Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-03-19 12:35:24 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-03-19 12:35:24 +0300
commit1d20d43661f3e98bde2bb5dc163a45fd90f8ac88 (patch)
tree53feb6983348a364ed4f52a4108866bf4ff0a599
parent2774ddc308f96f49a0f26871ff544681229f4eee (diff)
Add latest changes from gitlab-org/gitlab@12-9-stable-ee
-rw-r--r--.editorconfig15
-rw-r--r--.gitignore4
-rw-r--r--.gitlab-ci.yml24
-rw-r--r--.gitlab/CODEOWNERS37
-rw-r--r--.gitlab/ci/dev-fixtures.gitlab-ci.yml2
-rw-r--r--.gitlab/ci/docs.gitlab-ci.yml14
-rw-r--r--.gitlab/ci/frontend.gitlab-ci.yml88
-rw-r--r--.gitlab/ci/global.gitlab-ci.yml23
-rw-r--r--.gitlab/ci/memory.gitlab-ci.yml14
-rw-r--r--.gitlab/ci/pages.gitlab-ci.yml3
-rw-r--r--.gitlab/ci/qa.gitlab-ci.yml3
-rw-r--r--.gitlab/ci/rails.gitlab-ci.yml37
-rw-r--r--.gitlab/ci/reports.gitlab-ci.yml9
-rw-r--r--.gitlab/ci/review.gitlab-ci.yml14
-rw-r--r--.gitlab/ci/rules.gitlab-ci.yml22
-rw-r--r--.gitlab/ci/setup.gitlab-ci.yml6
-rw-r--r--.gitlab/ci/test-metadata.gitlab-ci.yml19
-rw-r--r--.gitlab/ci/yaml.gitlab-ci.yml4
-rw-r--r--.gitlab/issue_templates/Feature proposal.md6
-rw-r--r--.gitlab/issue_templates/Problem_Validation.md2
-rw-r--r--.gitlab/issue_templates/QA failure.md5
-rw-r--r--.gitlab/issue_templates/Security Release.md34
-rw-r--r--.gitlab/issue_templates/Technical Evaluation.md9
-rw-r--r--.gitlab/merge_request_templates/Documentation.md1
-rw-r--r--.gitlab/merge_request_templates/Security Release.md2
-rw-r--r--.haml-lint_todo.yml107
-rw-r--r--.markdownlint.json3
-rw-r--r--.rubocop.yml16
-rw-r--r--.vale.ini39
-rw-r--r--CHANGELOG-EE.md4
-rw-r--r--CHANGELOG.md42
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--GITLAB_PAGES_VERSION2
-rw-r--r--GITLAB_SHELL_VERSION2
-rw-r--r--GITLAB_WORKHORSE_VERSION2
-rw-r--r--Gemfile31
-rw-r--r--Gemfile.lock123
-rw-r--r--app/assets/images/cluster_app_logos/modsecurity.pngbin0 -> 6235 bytes
-rw-r--r--app/assets/javascripts/api.js16
-rw-r--r--app/assets/javascripts/badges/components/badge_form.vue6
-rw-r--r--app/assets/javascripts/behaviors/markdown/render_mermaid.js154
-rw-r--r--app/assets/javascripts/blob/balsamiq/balsamiq_viewer.js2
-rw-r--r--app/assets/javascripts/blob/components/blob_content.vue8
-rw-r--r--app/assets/javascripts/blob/components/blob_edit_content.vue49
-rw-r--r--app/assets/javascripts/blob/components/blob_edit_header.vue35
-rw-r--r--app/assets/javascripts/blob/file_template_mediator.js6
-rw-r--r--app/assets/javascripts/blob/notebook/index.js86
-rw-r--r--app/assets/javascripts/blob/notebook/notebook_viewer.vue81
-rw-r--r--app/assets/javascripts/blob/pdf/index.js54
-rw-r--r--app/assets/javascripts/blob/pdf/pdf_viewer.vue49
-rw-r--r--app/assets/javascripts/blob/pipeline_tour_success_modal.vue78
-rw-r--r--app/assets/javascripts/blob/suggest_gitlab_ci_yml/components/popover.vue121
-rw-r--r--app/assets/javascripts/blob/suggest_gitlab_ci_yml/index.js17
-rw-r--r--app/assets/javascripts/blob/template_selector.js4
-rw-r--r--app/assets/javascripts/blob/utils.js24
-rw-r--r--app/assets/javascripts/blob/viewer/index.js71
-rw-r--r--app/assets/javascripts/blob_edit/blob_bundle.js6
-rw-r--r--app/assets/javascripts/boards/components/board.js11
-rw-r--r--app/assets/javascripts/boards/components/board_card.vue9
-rw-r--r--app/assets/javascripts/boards/components/board_list.vue8
-rw-r--r--app/assets/javascripts/boards/components/board_sidebar.js8
-rw-r--r--app/assets/javascripts/boards/components/boards_selector.vue110
-rw-r--r--app/assets/javascripts/boards/components/issue_card_inner.vue51
-rw-r--r--app/assets/javascripts/boards/components/issue_card_inner_scoped_label.vue45
-rw-r--r--app/assets/javascripts/boards/components/issue_due_date.vue10
-rw-r--r--app/assets/javascripts/boards/components/project_select.vue4
-rw-r--r--app/assets/javascripts/boards/index.js13
-rw-r--r--app/assets/javascripts/boards/mixins/sortable_default_options.js1
-rw-r--r--app/assets/javascripts/boards/models/assignee.js4
-rw-r--r--app/assets/javascripts/boards/models/issue.js40
-rw-r--r--app/assets/javascripts/boards/models/list.js5
-rw-r--r--app/assets/javascripts/boards/mount_multiple_boards_switcher.js9
-rw-r--r--app/assets/javascripts/boards/queries/board.fragment.graphql4
-rw-r--r--app/assets/javascripts/boards/queries/group_boards.query.graphql13
-rw-r--r--app/assets/javascripts/boards/queries/project_boards.query.graphql13
-rw-r--r--app/assets/javascripts/boards/stores/boards_store.js71
-rw-r--r--app/assets/javascripts/broadcast_notification.js10
-rw-r--r--app/assets/javascripts/ci_variable_list/components/ci_variable_modal.vue224
-rw-r--r--app/assets/javascripts/ci_variable_list/components/ci_variable_popover.vue55
-rw-r--r--app/assets/javascripts/ci_variable_list/components/ci_variable_settings.vue32
-rw-r--r--app/assets/javascripts/ci_variable_list/components/ci_variable_table.vue184
-rw-r--r--app/assets/javascripts/ci_variable_list/constants.js16
-rw-r--r--app/assets/javascripts/ci_variable_list/index.js25
-rw-r--r--app/assets/javascripts/ci_variable_list/store/actions.js155
-rw-r--r--app/assets/javascripts/ci_variable_list/store/index.js17
-rw-r--r--app/assets/javascripts/ci_variable_list/store/mutation_types.js22
-rw-r--r--app/assets/javascripts/ci_variable_list/store/mutations.js86
-rw-r--r--app/assets/javascripts/ci_variable_list/store/state.js24
-rw-r--r--app/assets/javascripts/ci_variable_list/store/utils.js43
-rw-r--r--app/assets/javascripts/clusters/clusters_bundle.js24
-rw-r--r--app/assets/javascripts/clusters/components/applications.vue45
-rw-r--r--app/assets/javascripts/clusters/components/ingress_modsecurity_settings.vue164
-rw-r--r--app/assets/javascripts/clusters/stores/clusters_store.js11
-rw-r--r--app/assets/javascripts/clusters_list/components/clusters.vue99
-rw-r--r--app/assets/javascripts/clusters_list/constants.js15
-rw-r--r--app/assets/javascripts/clusters_list/index.js22
-rw-r--r--app/assets/javascripts/clusters_list/store/actions.js37
-rw-r--r--app/assets/javascripts/clusters_list/store/index.js16
-rw-r--r--app/assets/javascripts/clusters_list/store/mutation_types.js2
-rw-r--r--app/assets/javascripts/clusters_list/store/mutations.js12
-rw-r--r--app/assets/javascripts/clusters_list/store/state.js5
-rw-r--r--app/assets/javascripts/code_navigation/store/actions.js5
-rw-r--r--app/assets/javascripts/commons/polyfills.js19
-rw-r--r--app/assets/javascripts/commons/polyfills/custom_event.js7
-rw-r--r--app/assets/javascripts/commons/polyfills/element.js32
-rw-r--r--app/assets/javascripts/commons/polyfills/event.js8
-rw-r--r--app/assets/javascripts/commons/polyfills/nodelist.js7
-rw-r--r--app/assets/javascripts/commons/polyfills/request_idle_callback.js7
-rw-r--r--app/assets/javascripts/commons/polyfills/svg.js10
-rw-r--r--app/assets/javascripts/confirm_modal.js14
-rw-r--r--app/assets/javascripts/contributors/components/contributors.vue10
-rw-r--r--app/assets/javascripts/contributors/stores/getters.js10
-rw-r--r--app/assets/javascripts/create_cluster/components/cluster_form_dropdown.vue3
-rw-r--r--app/assets/javascripts/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue29
-rw-r--r--app/assets/javascripts/create_cluster/eks_cluster/components/service_credentials_form.vue6
-rw-r--r--app/assets/javascripts/create_cluster/eks_cluster/store/getters.js3
-rw-r--r--app/assets/javascripts/create_cluster/eks_cluster/store/state.js2
-rw-r--r--app/assets/javascripts/create_cluster/gke_cluster/components/gke_dropdown_mixin.js3
-rw-r--r--app/assets/javascripts/create_cluster/gke_cluster/components/gke_project_id_dropdown.vue6
-rw-r--r--app/assets/javascripts/create_merge_request_dropdown.js10
-rw-r--r--app/assets/javascripts/cycle_analytics/components/stage_nav_item.vue7
-rw-r--r--app/assets/javascripts/deploy_keys/components/key.vue9
-rw-r--r--app/assets/javascripts/diff_notes/components/resolve_discussion_btn.js70
-rw-r--r--app/assets/javascripts/diff_notes/diff_notes_bundle.js3
-rw-r--r--app/assets/javascripts/diffs/components/app.vue13
-rw-r--r--app/assets/javascripts/diffs/components/compare_versions_dropdown.vue7
-rw-r--r--app/assets/javascripts/diffs/components/diff_expansion_cell.vue26
-rw-r--r--app/assets/javascripts/diffs/components/diff_file.vue29
-rw-r--r--app/assets/javascripts/diffs/components/diff_file_header.vue6
-rw-r--r--app/assets/javascripts/diffs/components/diff_file_row.vue2
-rw-r--r--app/assets/javascripts/diffs/components/diff_stats.vue2
-rw-r--r--app/assets/javascripts/diffs/components/image_diff_overlay.vue4
-rw-r--r--app/assets/javascripts/diffs/components/inline_diff_comment_row.vue2
-rw-r--r--app/assets/javascripts/diffs/components/inline_diff_table_row.vue22
-rw-r--r--app/assets/javascripts/diffs/components/inline_diff_view.vue2
-rw-r--r--app/assets/javascripts/diffs/components/no_changes.vue6
-rw-r--r--app/assets/javascripts/diffs/components/parallel_diff_comment_row.vue4
-rw-r--r--app/assets/javascripts/diffs/components/parallel_diff_expansion_row.vue2
-rw-r--r--app/assets/javascripts/diffs/components/parallel_diff_table_row.vue33
-rw-r--r--app/assets/javascripts/diffs/components/parallel_diff_view.vue3
-rw-r--r--app/assets/javascripts/diffs/index.js2
-rw-r--r--app/assets/javascripts/diffs/store/actions.js26
-rw-r--r--app/assets/javascripts/diffs/store/getters.js24
-rw-r--r--app/assets/javascripts/diffs/store/modules/diff_state.js1
-rw-r--r--app/assets/javascripts/diffs/store/mutation_types.js1
-rw-r--r--app/assets/javascripts/diffs/store/mutations.js7
-rw-r--r--app/assets/javascripts/diffs/store/utils.js74
-rw-r--r--app/assets/javascripts/dropzone_input.js11
-rw-r--r--app/assets/javascripts/due_date_select.js1
-rw-r--r--app/assets/javascripts/editor/editor_lite.js9
-rw-r--r--app/assets/javascripts/emoji/index.js4
-rw-r--r--app/assets/javascripts/environments/components/enable_review_app_button.vue8
-rw-r--r--app/assets/javascripts/error_tracking/components/error_details.vue170
-rw-r--r--app/assets/javascripts/error_tracking/components/error_tracking_list.vue90
-rw-r--r--app/assets/javascripts/error_tracking/store/list/actions.js7
-rw-r--r--app/assets/javascripts/error_tracking/store/list/mutation_types.js1
-rw-r--r--app/assets/javascripts/error_tracking/store/list/mutations.js3
-rw-r--r--app/assets/javascripts/error_tracking/store/list/state.js1
-rw-r--r--app/assets/javascripts/filtered_search/available_dropdown_mappings.js4
-rw-r--r--app/assets/javascripts/filtered_search/dropdown_utils.js8
-rw-r--r--app/assets/javascripts/filtered_search/filtered_search_dropdown_manager.js20
-rw-r--r--app/assets/javascripts/filtered_search/filtered_search_manager.js54
-rw-r--r--app/assets/javascripts/filtered_search/issuable_filtered_search_token_keys.js4
-rw-r--r--app/assets/javascripts/filtered_search/stores/recent_searches_store.js4
-rw-r--r--app/assets/javascripts/filtered_search/visual_token_value.js39
-rw-r--r--app/assets/javascripts/frequent_items/components/frequent_items_list.vue2
-rw-r--r--app/assets/javascripts/frequent_items/components/frequent_items_list_item.vue19
-rw-r--r--app/assets/javascripts/frequent_items/components/frequent_items_search_input.vue4
-rw-r--r--app/assets/javascripts/frequent_items/utils.js4
-rw-r--r--app/assets/javascripts/grafana_integration/components/grafana_integration.vue4
-rw-r--r--app/assets/javascripts/groups/components/group_item.vue6
-rw-r--r--app/assets/javascripts/groups/components/item_actions.vue2
-rw-r--r--app/assets/javascripts/groups/constants.js4
-rw-r--r--app/assets/javascripts/ide/components/commit_sidebar/editor_header.vue30
-rw-r--r--app/assets/javascripts/ide/components/commit_sidebar/form.vue14
-rw-r--r--app/assets/javascripts/ide/components/commit_sidebar/list.vue57
-rw-r--r--app/assets/javascripts/ide/components/commit_sidebar/list_item.vue16
-rw-r--r--app/assets/javascripts/ide/components/file_row_extra.vue13
-rw-r--r--app/assets/javascripts/ide/components/nav_dropdown_button.vue4
-rw-r--r--app/assets/javascripts/ide/components/new_dropdown/upload.vue21
-rw-r--r--app/assets/javascripts/ide/components/pipelines/list.vue3
-rw-r--r--app/assets/javascripts/ide/components/preview/clientside.vue23
-rw-r--r--app/assets/javascripts/ide/components/repo_commit_section.vue22
-rw-r--r--app/assets/javascripts/ide/components/repo_editor.vue1
-rw-r--r--app/assets/javascripts/ide/index.js1
-rw-r--r--app/assets/javascripts/ide/lib/themes/white.js144
-rw-r--r--app/assets/javascripts/ide/stores/state.js1
-rw-r--r--app/assets/javascripts/ide/utils.js53
-rw-r--r--app/assets/javascripts/import_projects/components/import_projects_table.vue4
-rw-r--r--app/assets/javascripts/issuable_suggestions/components/app.vue3
-rw-r--r--app/assets/javascripts/issuable_suggestions/components/item.vue6
-rw-r--r--app/assets/javascripts/issuables_list/components/issuables_list_app.vue21
-rw-r--r--app/assets/javascripts/issue_show/stores/index.js3
-rw-r--r--app/assets/javascripts/issue_show/utils/update_description.js4
-rw-r--r--app/assets/javascripts/labels_select.js39
-rw-r--r--app/assets/javascripts/lib/utils/common_utils.js77
-rw-r--r--app/assets/javascripts/lib/utils/datetime_utility.js19
-rw-r--r--app/assets/javascripts/lib/utils/highlight.js7
-rw-r--r--app/assets/javascripts/lib/utils/icon_utils.js44
-rw-r--r--app/assets/javascripts/lib/utils/text_utility.js24
-rw-r--r--app/assets/javascripts/lib/utils/unit_format/formatter_factory.js139
-rw-r--r--app/assets/javascripts/lib/utils/unit_format/index.js257
-rw-r--r--app/assets/javascripts/locale/sprintf.js4
-rw-r--r--app/assets/javascripts/logs/components/environment_logs.vue278
-rw-r--r--app/assets/javascripts/logs/components/log_control_buttons.vue92
-rw-r--r--app/assets/javascripts/logs/index.js24
-rw-r--r--app/assets/javascripts/logs/stores/actions.js148
-rw-r--r--app/assets/javascripts/logs/stores/getters.js9
-rw-r--r--app/assets/javascripts/logs/stores/index.js23
-rw-r--r--app/assets/javascripts/logs/stores/mutation_types.js19
-rw-r--r--app/assets/javascripts/logs/stores/mutations.js93
-rw-r--r--app/assets/javascripts/logs/stores/state.js51
-rw-r--r--app/assets/javascripts/logs/utils.js28
-rw-r--r--app/assets/javascripts/main.js5
-rw-r--r--app/assets/javascripts/monitoring/components/charts/column.vue28
-rw-r--r--app/assets/javascripts/monitoring/components/charts/empty_chart.vue5
-rw-r--r--app/assets/javascripts/monitoring/components/charts/heatmap.vue5
-rw-r--r--app/assets/javascripts/monitoring/components/charts/options.js78
-rw-r--r--app/assets/javascripts/monitoring/components/charts/single_stat.vue5
-rw-r--r--app/assets/javascripts/monitoring/components/charts/stacked_column.vue6
-rw-r--r--app/assets/javascripts/monitoring/components/charts/time_series.vue49
-rw-r--r--app/assets/javascripts/monitoring/components/dashboard.vue26
-rw-r--r--app/assets/javascripts/monitoring/components/embed.vue9
-rw-r--r--app/assets/javascripts/monitoring/components/panel_type.vue228
-rw-r--r--app/assets/javascripts/monitoring/components/shared/prometheus_header.vue15
-rw-r--r--app/assets/javascripts/monitoring/constants.js36
-rw-r--r--app/assets/javascripts/monitoring/stores/actions.js44
-rw-r--r--app/assets/javascripts/monitoring/stores/getters.js4
-rw-r--r--app/assets/javascripts/monitoring/stores/mutations.js37
-rw-r--r--app/assets/javascripts/monitoring/stores/state.js2
-rw-r--r--app/assets/javascripts/monitoring/stores/utils.js110
-rw-r--r--app/assets/javascripts/monitoring/utils.js2
-rw-r--r--app/assets/javascripts/notes.js8
-rw-r--r--app/assets/javascripts/notes/components/discussion_actions.vue2
-rw-r--r--app/assets/javascripts/notes/components/discussion_counter.vue9
-rw-r--r--app/assets/javascripts/notes/components/discussion_jump_to_next_button.vue11
-rw-r--r--app/assets/javascripts/notes/components/discussion_resolve_button.vue12
-rw-r--r--app/assets/javascripts/notes/mixins/discussion_navigation.js152
-rw-r--r--app/assets/javascripts/notes/services/notes_service.js41
-rw-r--r--app/assets/javascripts/notes/stores/actions.js88
-rw-r--r--app/assets/javascripts/notes/stores/getters.js2
-rw-r--r--app/assets/javascripts/notes/stores/modules/index.js2
-rw-r--r--app/assets/javascripts/notes/stores/mutations.js8
-rw-r--r--app/assets/javascripts/notifications_dropdown.js2
-rw-r--r--app/assets/javascripts/operation_settings/components/external_dashboard.vue4
-rw-r--r--app/assets/javascripts/pages/admin/broadcast_messages/broadcast_message.js6
-rw-r--r--app/assets/javascripts/pages/admin/clusters/index/index.js2
-rw-r--r--app/assets/javascripts/pages/admin/index.js3
-rw-r--r--app/assets/javascripts/pages/admin/integrations/edit/index.js16
-rw-r--r--app/assets/javascripts/pages/admin/projects/index/components/delete_project_modal.vue8
-rw-r--r--app/assets/javascripts/pages/admin/sessions/index.js1
-rw-r--r--app/assets/javascripts/pages/admin/users/components/delete_user_modal.vue6
-rw-r--r--app/assets/javascripts/pages/groups/clusters/index/index.js2
-rw-r--r--app/assets/javascripts/pages/groups/new/group_path_validator.js4
-rw-r--r--app/assets/javascripts/pages/groups/settings/ci_cd/show/index.js26
-rw-r--r--app/assets/javascripts/pages/projects/blob/new/index.js11
-rw-r--r--app/assets/javascripts/pages/projects/blob/show/index.js22
-rw-r--r--app/assets/javascripts/pages/projects/clusters/index/index.js2
-rw-r--r--app/assets/javascripts/pages/projects/labels/components/promote_label_modal.vue4
-rw-r--r--app/assets/javascripts/pages/projects/logs/index.js3
-rw-r--r--app/assets/javascripts/pages/projects/pipeline_schedules/shared/components/interval_pattern_input.vue4
-rw-r--r--app/assets/javascripts/pages/projects/pipelines/init_pipelines.js28
-rw-r--r--app/assets/javascripts/pages/projects/registry/repositories/index.js10
-rw-r--r--app/assets/javascripts/pages/projects/releases/show/index.js3
-rw-r--r--app/assets/javascripts/pages/projects/settings/ci_cd/show/index.js29
-rw-r--r--app/assets/javascripts/pages/projects/settings/integrations/show/index.js6
-rw-r--r--app/assets/javascripts/pages/projects/settings/operations/show/index.js4
-rw-r--r--app/assets/javascripts/pages/projects/settings/repository/form.js2
-rw-r--r--app/assets/javascripts/pages/projects/shared/permissions/components/settings_panel.vue26
-rw-r--r--app/assets/javascripts/pages/projects/snippets/show/index.js1
-rw-r--r--app/assets/javascripts/pages/projects/wikis/components/delete_wiki_modal.vue4
-rw-r--r--app/assets/javascripts/pages/projects/wikis/wikis.js16
-rw-r--r--app/assets/javascripts/pages/registrations/new/index.js11
-rw-r--r--app/assets/javascripts/pages/sessions/new/username_validator.js4
-rw-r--r--app/assets/javascripts/pages/snippets/show/index.js1
-rw-r--r--app/assets/javascripts/pages/users/activity_calendar.js6
-rw-r--r--app/assets/javascripts/pipelines/pipeline_details_bundle.js12
-rw-r--r--app/assets/javascripts/profile/account/components/update_username.vue10
-rw-r--r--app/assets/javascripts/profile/gl_crop.js10
-rw-r--r--app/assets/javascripts/projects/project_new.js4
-rw-r--r--app/assets/javascripts/prometheus_metrics/prometheus_metrics.js6
-rw-r--r--app/assets/javascripts/registry/explorer/components/project_policy_alert.vue69
-rw-r--r--app/assets/javascripts/registry/explorer/constants.js47
-rw-r--r--app/assets/javascripts/registry/explorer/pages/details.vue267
-rw-r--r--app/assets/javascripts/registry/explorer/pages/index.vue2
-rw-r--r--app/assets/javascripts/registry/explorer/pages/list.vue147
-rw-r--r--app/assets/javascripts/registry/explorer/stores/actions.js13
-rw-r--r--app/assets/javascripts/registry/explorer/stores/getters.js6
-rw-r--r--app/assets/javascripts/registry/explorer/stores/index.js2
-rw-r--r--app/assets/javascripts/registry/explorer/stores/mutations.js1
-rw-r--r--app/assets/javascripts/registry/settings/components/registry_settings_app.vue44
-rw-r--r--app/assets/javascripts/related_merge_requests/components/related_merge_requests.vue16
-rw-r--r--app/assets/javascripts/releases/components/app_edit.vue23
-rw-r--r--app/assets/javascripts/releases/components/app_index.vue51
-rw-r--r--app/assets/javascripts/releases/components/app_show.vue29
-rw-r--r--app/assets/javascripts/releases/components/evidence_block.vue8
-rw-r--r--app/assets/javascripts/releases/components/release_block.vue23
-rw-r--r--app/assets/javascripts/releases/components/release_block_assets.vue2
-rw-r--r--app/assets/javascripts/releases/components/release_block_author.vue4
-rw-r--r--app/assets/javascripts/releases/components/release_block_footer.vue8
-rw-r--r--app/assets/javascripts/releases/components/release_block_header.vue14
-rw-r--r--app/assets/javascripts/releases/components/release_block_metadata.vue16
-rw-r--r--app/assets/javascripts/releases/components/release_block_milestone_info.vue60
-rw-r--r--app/assets/javascripts/releases/components/release_block_milestones.vue2
-rw-r--r--app/assets/javascripts/releases/constants.js8
-rw-r--r--app/assets/javascripts/releases/mount_edit.js10
-rw-r--r--app/assets/javascripts/releases/mount_index.js12
-rw-r--r--app/assets/javascripts/releases/mount_show.js21
-rw-r--r--app/assets/javascripts/releases/stores/index.js6
-rw-r--r--app/assets/javascripts/releases/stores/modules/detail/actions.js9
-rw-r--r--app/assets/javascripts/releases/stores/modules/list/actions.js12
-rw-r--r--app/assets/javascripts/reports/store/utils.js13
-rw-r--r--app/assets/javascripts/repository/components/last_commit.vue4
-rw-r--r--app/assets/javascripts/right_sidebar.js34
-rw-r--r--app/assets/javascripts/self_monitor/components/self_monitor_form.vue2
-rw-r--r--app/assets/javascripts/self_monitor/store/state.js2
-rw-r--r--app/assets/javascripts/shared/popover.js4
-rw-r--r--app/assets/javascripts/sidebar/components/assignees/assignee_title.vue6
-rw-r--r--app/assets/javascripts/sidebar/components/lock/lock_issue_sidebar.vue4
-rw-r--r--app/assets/javascripts/sidebar/components/time_tracking/sidebar_time_tracking.vue4
-rw-r--r--app/assets/javascripts/sidebar/lib/sidebar_move_issue.js4
-rw-r--r--app/assets/javascripts/sidebar/queries/sidebarDetails.query.graphql7
-rw-r--r--app/assets/javascripts/sidebar/queries/sidebarDetailsForHealthStatusFeatureFlag.query.graphql7
-rw-r--r--app/assets/javascripts/sidebar/queries/updateStatus.mutation.graphql7
-rw-r--r--app/assets/javascripts/sidebar/services/sidebar_service.js38
-rw-r--r--app/assets/javascripts/sidebar/sidebar_mediator.js6
-rw-r--r--app/assets/javascripts/snippet/snippet_bundle.js50
-rw-r--r--app/assets/javascripts/snippets/components/snippet_blob_edit.vue36
-rw-r--r--app/assets/javascripts/snippets/components/snippet_description_edit.vue72
-rw-r--r--app/assets/javascripts/snippets/components/snippet_title.vue2
-rw-r--r--app/assets/javascripts/snippets/components/snippet_visibility_edit.vue95
-rw-r--r--app/assets/javascripts/snippets/constants.js18
-rw-r--r--app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql1
-rw-r--r--app/assets/javascripts/terminal/terminal.js4
-rw-r--r--app/assets/javascripts/tracking.js2
-rw-r--r--app/assets/javascripts/u2f/authenticate.js6
-rw-r--r--app/assets/javascripts/u2f/register.js4
-rw-r--r--app/assets/javascripts/user_popovers.js1
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/deployment/constants.js5
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment.vue57
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_action_button.vue75
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_actions.vue190
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_stop_button.vue83
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_view_button.vue21
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.vue5
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline_container.vue4
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue68
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/states/commits_header.vue4
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled.vue4
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_conflicts.vue4
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_pipeline_tour.vue143
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue34
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/states/work_in_progress.vue42
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue8
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/services/mr_widget_service.js2
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js25
-rw-r--r--app/assets/javascripts/vue_shared/components/blob_viewers/mixins.js4
-rw-r--r--app/assets/javascripts/vue_shared/components/blob_viewers/rich_viewer.vue6
-rw-r--r--app/assets/javascripts/vue_shared/components/changed_file_icon.vue22
-rw-r--r--app/assets/javascripts/vue_shared/components/confirm_modal.vue68
-rw-r--r--app/assets/javascripts/vue_shared/components/date_time_picker/date_time_picker.vue6
-rw-r--r--app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue1
-rw-r--r--app/assets/javascripts/vue_shared/components/gl_mentions.vue101
-rw-r--r--app/assets/javascripts/vue_shared/components/loading_button.vue21
-rw-r--r--app/assets/javascripts/vue_shared/components/markdown/toolbar.vue7
-rw-r--r--app/assets/javascripts/vue_shared/components/notes/system_note.vue7
-rw-r--r--app/assets/javascripts/vue_shared/components/pagination/constants.js4
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_title.vue7
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value.vue31
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_regular_label.vue34
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_scoped_label.vue47
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_button.vue21
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents.vue30
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view.vue124
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view.vue178
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_title.vue39
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_value.vue53
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/labels_select_root.vue195
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/actions.js61
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/getters.js30
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/index.js12
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/mutation_types.js20
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/mutations.js76
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/state.js27
-rw-r--r--app/assets/javascripts/vue_shared/components/user_popover/user_popover.vue54
-rw-r--r--app/assets/javascripts/vue_shared/constants.js56
-rw-r--r--app/assets/stylesheets/bootstrap_migration.scss4
-rw-r--r--app/assets/stylesheets/components/popover.scss10
-rw-r--r--app/assets/stylesheets/framework.scss1
-rw-r--r--app/assets/stylesheets/framework/blocks.scss14
-rw-r--r--app/assets/stylesheets/framework/broadcast_messages.scss9
-rw-r--r--app/assets/stylesheets/framework/buttons.scss7
-rw-r--r--app/assets/stylesheets/framework/common.scss4
-rw-r--r--app/assets/stylesheets/framework/dropdowns.scss85
-rw-r--r--app/assets/stylesheets/framework/editor-lite.scss5
-rw-r--r--app/assets/stylesheets/framework/filters.scss1
-rw-r--r--app/assets/stylesheets/framework/images.scss2
-rw-r--r--app/assets/stylesheets/framework/mixins.scss19
-rw-r--r--app/assets/stylesheets/framework/modal.scss10
-rw-r--r--app/assets/stylesheets/framework/spinner.scss3
-rw-r--r--app/assets/stylesheets/framework/typography.scss4
-rw-r--r--app/assets/stylesheets/framework/variables.scss2
-rw-r--r--app/assets/stylesheets/framework/vue_transitions.scss2
-rw-r--r--app/assets/stylesheets/highlight/common.scss12
-rw-r--r--app/assets/stylesheets/highlight/themes/dark.scss11
-rw-r--r--app/assets/stylesheets/highlight/themes/monokai.scss11
-rw-r--r--app/assets/stylesheets/highlight/themes/none.scss11
-rw-r--r--app/assets/stylesheets/highlight/themes/solarized-dark.scss11
-rw-r--r--app/assets/stylesheets/highlight/themes/solarized-light.scss11
-rw-r--r--app/assets/stylesheets/highlight/white_base.scss18
-rw-r--r--app/assets/stylesheets/notify.scss16
-rw-r--r--app/assets/stylesheets/page_bundles/_ide_monaco_overrides.scss146
-rw-r--r--app/assets/stylesheets/page_bundles/ide.scss231
-rw-r--r--app/assets/stylesheets/pages/boards.scss17
-rw-r--r--app/assets/stylesheets/pages/builds.scss57
-rw-r--r--app/assets/stylesheets/pages/clusters.scss6
-rw-r--r--app/assets/stylesheets/pages/cycle_analytics.scss4
-rw-r--r--app/assets/stylesheets/pages/diff.scss8
-rw-r--r--app/assets/stylesheets/pages/environments.scss8
-rw-r--r--app/assets/stylesheets/pages/error_details.scss20
-rw-r--r--app/assets/stylesheets/pages/error_list.scss52
-rw-r--r--app/assets/stylesheets/pages/error_tracking_list.scss2
-rw-r--r--app/assets/stylesheets/pages/groups.scss2
-rw-r--r--app/assets/stylesheets/pages/issuable.scss39
-rw-r--r--app/assets/stylesheets/pages/issues.scss19
-rw-r--r--app/assets/stylesheets/pages/labels.scss23
-rw-r--r--app/assets/stylesheets/pages/merge_requests.scss4
-rw-r--r--app/assets/stylesheets/pages/milestone.scss16
-rw-r--r--app/assets/stylesheets/pages/notes.scss14
-rw-r--r--app/assets/stylesheets/pages/pipelines.scss14
-rw-r--r--app/assets/stylesheets/pages/profile.scss27
-rw-r--r--app/assets/stylesheets/pages/prometheus.scss4
-rw-r--r--app/assets/stylesheets/pages/settings.scss32
-rw-r--r--app/assets/stylesheets/utilities.scss7
-rw-r--r--app/assets/stylesheets/vendors/tribute.scss41
-rw-r--r--app/controllers/admin/application_settings_controller.rb2
-rw-r--r--app/controllers/admin/broadcast_messages_controller.rb1
-rw-r--r--app/controllers/admin/concerns/authenticates_2fa_for_admin_mode.rb84
-rw-r--r--app/controllers/admin/dashboard_controller.rb1
-rw-r--r--app/controllers/admin/integrations_controller.rb67
-rw-r--r--app/controllers/admin/jobs_controller.rb4
-rw-r--r--app/controllers/admin/serverless/domains_controller.rb16
-rw-r--r--app/controllers/admin/sessions_controller.rb18
-rw-r--r--app/controllers/application_controller.rb13
-rw-r--r--app/controllers/concerns/authenticates_with_two_factor.rb2
-rw-r--r--app/controllers/concerns/clientside_preview_csp.rb17
-rw-r--r--app/controllers/concerns/cycle_analytics_params.rb2
-rw-r--r--app/controllers/concerns/invisible_captcha.rb51
-rw-r--r--app/controllers/concerns/invisible_captcha_on_signup.rb51
-rw-r--r--app/controllers/concerns/issuable_actions.rb2
-rw-r--r--app/controllers/concerns/lfs_request.rb4
-rw-r--r--app/controllers/concerns/notes_actions.rb2
-rw-r--r--app/controllers/concerns/sessionless_authentication.rb12
-rw-r--r--app/controllers/concerns/snippets_actions.rb13
-rw-r--r--app/controllers/concerns/spammable_actions.rb2
-rw-r--r--app/controllers/concerns/static_object_external_storage_csp.rb16
-rw-r--r--app/controllers/concerns/uploads_actions.rb5
-rw-r--r--app/controllers/explore/snippets_controller.rb4
-rw-r--r--app/controllers/graphql_controller.rb5
-rw-r--r--app/controllers/groups/deploy_tokens_controller.rb12
-rw-r--r--app/controllers/groups/registry/repositories_controller.rb2
-rw-r--r--app/controllers/groups/settings/ci_cd_controller.rb30
-rw-r--r--app/controllers/groups_controller.rb5
-rw-r--r--app/controllers/ide_controller.rb3
-rw-r--r--app/controllers/import/gitea_controller.rb23
-rw-r--r--app/controllers/import/gitlab_projects_controller.rb30
-rw-r--r--app/controllers/ldap/omniauth_callbacks_controller.rb8
-rw-r--r--app/controllers/omniauth_callbacks_controller.rb17
-rw-r--r--app/controllers/profiles/keys_controller.rb2
-rw-r--r--app/controllers/profiles_controller.rb1
-rw-r--r--app/controllers/projects/blob_controller.rb1
-rw-r--r--app/controllers/projects/commits_controller.rb3
-rw-r--r--app/controllers/projects/deploy_keys_controller.rb17
-rw-r--r--app/controllers/projects/deploy_tokens_controller.rb2
-rw-r--r--app/controllers/projects/forks_controller.rb27
-rw-r--r--app/controllers/projects/graphs_controller.rb3
-rw-r--r--app/controllers/projects/hooks_controller.rb11
-rw-r--r--app/controllers/projects/import/jira_controller.rb60
-rw-r--r--app/controllers/projects/issues_controller.rb1
-rw-r--r--app/controllers/projects/jobs_controller.rb4
-rw-r--r--app/controllers/projects/logs_controller.rb79
-rw-r--r--app/controllers/projects/merge_requests_controller.rb14
-rw-r--r--app/controllers/projects/milestones_controller.rb3
-rw-r--r--app/controllers/projects/performance_monitoring/dashboards_controller.rb48
-rw-r--r--app/controllers/projects/pipeline_schedules_controller.rb4
-rw-r--r--app/controllers/projects/pipelines_controller.rb8
-rw-r--r--app/controllers/projects/registry/repositories_controller.rb2
-rw-r--r--app/controllers/projects/releases_controller.rb20
-rw-r--r--app/controllers/projects/repositories_controller.rb8
-rw-r--r--app/controllers/projects/services_controller.rb30
-rw-r--r--app/controllers/projects/settings/ci_cd_controller.rb31
-rw-r--r--app/controllers/projects/settings/integrations_controller.rb4
-rw-r--r--app/controllers/projects/settings/operations_controller.rb5
-rw-r--r--app/controllers/projects/settings/repository_controller.rb22
-rw-r--r--app/controllers/projects/snippets_controller.rb22
-rw-r--r--app/controllers/projects/tags/releases_controller.rb6
-rw-r--r--app/controllers/projects_controller.rb2
-rw-r--r--app/controllers/registrations_controller.rb2
-rw-r--r--app/controllers/repositories/git_http_client_controller.rb9
-rw-r--r--app/controllers/repositories/git_http_controller.rb23
-rw-r--r--app/controllers/repositories/lfs_api_controller.rb14
-rw-r--r--app/controllers/search_controller.rb11
-rw-r--r--app/controllers/sessions_controller.rb6
-rw-r--r--app/controllers/snippets_controller.rb35
-rw-r--r--app/controllers/users/terms_controller.rb2
-rw-r--r--app/finders/award_emojis_finder.rb2
-rw-r--r--app/finders/ci/jobs_finder.rb53
-rw-r--r--app/finders/ci/pipeline_schedules_finder.rb28
-rw-r--r--app/finders/ci/pipelines_finder.rb158
-rw-r--r--app/finders/ci/pipelines_for_merge_request_finder.rb90
-rw-r--r--app/finders/ci/runner_jobs_finder.rb45
-rw-r--r--app/finders/fork_targets_finder.rb20
-rw-r--r--app/finders/jobs_finder.rb51
-rw-r--r--app/finders/pipeline_schedules_finder.rb26
-rw-r--r--app/finders/pipelines_finder.rb156
-rw-r--r--app/finders/projects/export_job_finder.rb29
-rw-r--r--app/finders/runner_jobs_finder.rb43
-rw-r--r--app/finders/serverless_domain_finder.rb35
-rw-r--r--app/finders/snippets_finder.rb4
-rw-r--r--app/graphql/gitlab_schema.rb3
-rw-r--r--app/graphql/mutations/admin/sidekiq_queues/delete_jobs.rb52
-rw-r--r--app/graphql/mutations/concerns/mutations/resolves_group.rb6
-rw-r--r--app/graphql/mutations/concerns/mutations/resolves_issuable.rb24
-rw-r--r--app/graphql/mutations/concerns/mutations/resolves_project.rb6
-rw-r--r--app/graphql/mutations/issues/base.rb8
-rw-r--r--app/graphql/mutations/issues/update.rb20
-rw-r--r--app/graphql/mutations/merge_requests/base.rb8
-rw-r--r--app/graphql/resolvers/base_resolver.rb4
-rw-r--r--app/graphql/resolvers/boards_resolver.rb18
-rw-r--r--app/graphql/resolvers/concerns/resolves_pipelines.rb2
-rw-r--r--app/graphql/resolvers/issues_resolver.rb9
-rw-r--r--app/graphql/resolvers/projects/snippets_resolver.rb5
-rw-r--r--app/graphql/types/admin/sidekiq_queues/delete_jobs_response_type.rb29
-rw-r--r--app/graphql/types/base_field.rb36
-rw-r--r--app/graphql/types/group_type.rb13
-rw-r--r--app/graphql/types/mutation_type.rb1
-rw-r--r--app/graphql/types/project_type.rb13
-rw-r--r--app/graphql/types/snippet_type.rb1
-rw-r--r--app/helpers/analytics_navbar_helper.rb9
-rw-r--r--app/helpers/application_helper.rb2
-rw-r--r--app/helpers/application_settings_helper.rb4
-rw-r--r--app/helpers/auth_helper.rb22
-rw-r--r--app/helpers/blob_helper.rb16
-rw-r--r--app/helpers/boards_helper.rb10
-rw-r--r--app/helpers/broadcast_messages_helper.rb16
-rw-r--r--app/helpers/ci_variables_helper.rb16
-rw-r--r--app/helpers/clusters_helper.rb27
-rw-r--r--app/helpers/environments_helper.rb9
-rw-r--r--app/helpers/form_helper.rb15
-rw-r--r--app/helpers/ide_helper.rb5
-rw-r--r--app/helpers/issuables_helper.rb1
-rw-r--r--app/helpers/labels_helper.rb92
-rw-r--r--app/helpers/markup_helper.rb6
-rw-r--r--app/helpers/milestones_helper.rb20
-rw-r--r--app/helpers/projects_helper.rb11
-rw-r--r--app/helpers/releases_helper.rb4
-rw-r--r--app/helpers/submodule_helper.rb13
-rw-r--r--app/helpers/suggest_pipeline_helper.rb9
-rw-r--r--app/helpers/system_note_helper.rb4
-rw-r--r--app/helpers/user_callouts_helper.rb5
-rw-r--r--app/helpers/users_helper.rb15
-rw-r--r--app/helpers/visibility_level_helper.rb2
-rw-r--r--app/mailers/emails/pipelines.rb4
-rw-r--r--app/mailers/previews/notify_preview.rb4
-rw-r--r--app/models/ability.rb2
-rw-r--r--app/models/appearance.rb2
-rw-r--r--app/models/application_setting.rb13
-rw-r--r--app/models/application_setting_implementation.rb56
-rw-r--r--app/models/ci/bridge.rb16
-rw-r--r--app/models/ci/build.rb53
-rw-r--r--app/models/ci/job_artifact.rb23
-rw-r--r--app/models/ci/job_variable.rb5
-rw-r--r--app/models/ci/pipeline.rb41
-rw-r--r--app/models/ci/processable.rb6
-rw-r--r--app/models/ci/ref.rb23
-rw-r--r--app/models/clusters/applications/cert_manager.rb4
-rw-r--r--app/models/clusters/applications/crossplane.rb3
-rw-r--r--app/models/clusters/applications/elastic_stack.rb5
-rw-r--r--app/models/clusters/applications/ingress.rb9
-rw-r--r--app/models/clusters/applications/knative.rb16
-rw-r--r--app/models/clusters/applications/prometheus.rb15
-rw-r--r--app/models/clusters/applications/runner.rb2
-rw-r--r--app/models/clusters/cluster.rb37
-rw-r--r--app/models/clusters/concerns/application_core.rb2
-rw-r--r--app/models/clusters/concerns/application_data.rb8
-rw-r--r--app/models/clusters/concerns/application_status.rb5
-rw-r--r--app/models/commit.rb19
-rw-r--r--app/models/concerns/blob_language_from_git_attributes.rb4
-rw-r--r--app/models/concerns/bulk_insert_safe.rb135
-rw-r--r--app/models/concerns/bulk_insertable_associations.rb115
-rw-r--r--app/models/concerns/cache_markdown_field.rb18
-rw-r--r--app/models/concerns/ci/has_ref.rb33
-rw-r--r--app/models/concerns/ci/metadatable.rb2
-rw-r--r--app/models/concerns/has_ref.rb31
-rw-r--r--app/models/concerns/has_repository.rb5
-rw-r--r--app/models/concerns/issuable.rb14
-rw-r--r--app/models/concerns/milestone_eventable.rb9
-rw-r--r--app/models/concerns/milestoneable.rb2
-rw-r--r--app/models/concerns/milestoneish.rb26
-rw-r--r--app/models/concerns/reactive_caching.rb23
-rw-r--r--app/models/concerns/spammable.rb4
-rw-r--r--app/models/concerns/time_trackable.rb2
-rw-r--r--app/models/concerns/usage_statistics.rb13
-rw-r--r--app/models/concerns/versioned_description.rb2
-rw-r--r--app/models/concerns/with_uploads.rb1
-rw-r--r--app/models/deploy_token.rb2
-rw-r--r--app/models/deployment.rb13
-rw-r--r--app/models/description_version.rb8
-rw-r--r--app/models/discussion.rb4
-rw-r--r--app/models/environment.rb17
-rw-r--r--app/models/environment_status.rb14
-rw-r--r--app/models/epic.rb2
-rw-r--r--app/models/error_tracking/project_error_tracking_setting.rb6
-rw-r--r--app/models/event.rb54
-rw-r--r--app/models/external_pull_request.rb2
-rw-r--r--app/models/group.rb17
-rw-r--r--app/models/group_deploy_token.rb2
-rw-r--r--app/models/hooks/project_hook.rb2
-rw-r--r--app/models/identity.rb2
-rw-r--r--app/models/internal_id.rb2
-rw-r--r--app/models/internal_id_enums.rb10
-rw-r--r--app/models/issue.rb14
-rw-r--r--app/models/jira_import_data.rb19
-rw-r--r--app/models/key.rb24
-rw-r--r--app/models/label.rb4
-rw-r--r--app/models/lfs_object.rb1
-rw-r--r--app/models/list.rb2
-rw-r--r--app/models/member.rb2
-rw-r--r--app/models/merge_request.rb77
-rw-r--r--app/models/merge_request/metrics.rb2
-rw-r--r--app/models/merge_request/pipelines.rb88
-rw-r--r--app/models/merge_request_diff.rb1
-rw-r--r--app/models/merge_request_diff_commit.rb2
-rw-r--r--app/models/milestone.rb17
-rw-r--r--app/models/milestone_note.rb50
-rw-r--r--app/models/milestone_release.rb2
-rw-r--r--app/models/namespace.rb14
-rw-r--r--app/models/note.rb38
-rw-r--r--app/models/notification_recipient.rb11
-rw-r--r--app/models/notification_setting.rb14
-rw-r--r--app/models/pages_domain.rb5
-rw-r--r--app/models/project.rb118
-rw-r--r--app/models/project_ci_cd_setting.rb2
-rw-r--r--app/models/project_export_job.rb26
-rw-r--r--app/models/project_import_state.rb4
-rw-r--r--app/models/project_services/chat_message/base_message.rb6
-rw-r--r--app/models/project_services/chat_message/pipeline_message.rb9
-rw-r--r--app/models/project_services/chat_message/push_message.rb13
-rw-r--r--app/models/project_services/chat_notification_service.rb4
-rw-r--r--app/models/project_services/issue_tracker_service.rb14
-rw-r--r--app/models/project_services/jira_service.rb37
-rw-r--r--app/models/project_services/microsoft_teams_service.rb2
-rw-r--r--app/models/project_services/pipelines_email_service.rb2
-rw-r--r--app/models/project_services/prometheus_service.rb4
-rw-r--r--app/models/project_services/slack_service.rb16
-rw-r--r--app/models/project_services/slack_slash_commands_service.rb2
-rw-r--r--app/models/project_services/youtrack_service.rb2
-rw-r--r--app/models/project_wiki.rb31
-rw-r--r--app/models/prometheus_alert.rb4
-rw-r--r--app/models/protected_branch.rb10
-rw-r--r--app/models/release.rb2
-rw-r--r--app/models/releases/link.rb3
-rw-r--r--app/models/repository.rb28
-rw-r--r--app/models/resource_event.rb45
-rw-r--r--app/models/resource_label_event.rb31
-rw-r--r--app/models/resource_milestone_event.rb24
-rw-r--r--app/models/resource_weight_event.rb19
-rw-r--r--app/models/sent_notification.rb5
-rw-r--r--app/models/serverless/domain.rb44
-rw-r--r--app/models/serverless/domain_cluster.rb11
-rw-r--r--app/models/serverless/lookup_path.rb30
-rw-r--r--app/models/serverless/virtual_domain.rb22
-rw-r--r--app/models/service.rb18
-rw-r--r--app/models/snippet.rb48
-rw-r--r--app/models/snippet_repository.rb63
-rw-r--r--app/models/timelog.rb4
-rw-r--r--app/models/user.rb44
-rw-r--r--app/models/user_bot_type_enums.rb1
-rw-r--r--app/models/user_callout_enums.rb3
-rw-r--r--app/models/user_detail.rb7
-rw-r--r--app/models/user_highest_role.rb7
-rw-r--r--app/models/user_type_enums.rb15
-rw-r--r--app/models/users_statistics.rb14
-rw-r--r--app/models/wiki_page.rb60
-rw-r--r--app/models/x509_certificate.rb7
-rw-r--r--app/models/zoom_meeting.rb2
-rw-r--r--app/policies/group_policy.rb3
-rw-r--r--app/policies/note_policy.rb52
-rw-r--r--app/policies/personal_snippet_policy.rb3
-rw-r--r--app/policies/project_policy.rb6
-rw-r--r--app/policies/project_snippet_policy.rb4
-rw-r--r--app/policies/snippet_policy.rb4
-rw-r--r--app/presenters/project_hook_presenter.rb (renamed from app/presenters/hooks/project_hook_presenter.rb)0
-rw-r--r--app/presenters/projects/import_export/project_export_presenter.rb40
-rw-r--r--app/presenters/projects/prometheus/alert_presenter.rb19
-rw-r--r--app/presenters/release_presenter.rb2
-rw-r--r--app/presenters/service_hook_presenter.rb (renamed from app/presenters/hooks/service_hook_presenter.rb)0
-rw-r--r--app/presenters/snippet_blob_presenter.rb29
-rw-r--r--app/presenters/snippet_presenter.rb8
-rw-r--r--app/serializers/cluster_application_entity.rb2
-rw-r--r--app/serializers/diff_file_entity.rb14
-rw-r--r--app/serializers/environment_entity.rb20
-rw-r--r--app/serializers/group_variable_entity.rb1
-rw-r--r--app/serializers/issue_board_entity.rb3
-rw-r--r--app/serializers/merge_request_widget_entity.rb10
-rw-r--r--app/serializers/pipeline_serializer.rb7
-rw-r--r--app/serializers/serverless/domain_entity.rb8
-rw-r--r--app/services/audit_event_service.rb24
-rw-r--r--app/services/boards/issues/list_service.rb2
-rw-r--r--app/services/ci/create_cross_project_pipeline_service.rb41
-rw-r--r--app/services/ci/create_job_artifacts_service.rb94
-rw-r--r--app/services/ci/create_pipeline_service.rb1
-rw-r--r--app/services/ci/find_exposed_artifacts_service.rb6
-rw-r--r--app/services/ci/generate_coverage_reports_service.rb30
-rw-r--r--app/services/ci/parse_dotenv_artifact_service.rb64
-rw-r--r--app/services/ci/pipeline_bridge_status_service.rb9
-rw-r--r--app/services/ci/register_job_service.rb10
-rw-r--r--app/services/ci/retry_build_service.rb5
-rw-r--r--app/services/ci/update_ci_ref_status_service.rb65
-rw-r--r--app/services/clusters/applications/base_helm_service.rb2
-rw-r--r--app/services/clusters/applications/base_service.rb6
-rw-r--r--app/services/clusters/kubernetes/configure_istio_ingress_service.rb4
-rw-r--r--app/services/commits/cherry_pick_service.rb2
-rw-r--r--app/services/concerns/akismet_methods.rb10
-rw-r--r--app/services/concerns/deploy_token_methods.rb11
-rw-r--r--app/services/concerns/incident_management/settings.rb15
-rw-r--r--app/services/deploy_tokens/create_service.rb11
-rw-r--r--app/services/deployments/link_merge_requests_service.rb4
-rw-r--r--app/services/error_tracking/issue_update_service.rb1
-rw-r--r--app/services/git/process_ref_changes_service.rb2
-rw-r--r--app/services/groups/deploy_tokens/create_service.rb13
-rw-r--r--app/services/groups/import_export/export_service.rb30
-rw-r--r--app/services/groups/import_export/import_service.rb45
-rw-r--r--app/services/issuable/clone/attributes_rewriter.rb18
-rw-r--r--app/services/issuable/common_system_notes_service.rb17
-rw-r--r--app/services/issues/base_service.rb12
-rw-r--r--app/services/issues/close_service.rb29
-rw-r--r--app/services/issues/create_service.rb1
-rw-r--r--app/services/issues/import_csv_service.rb15
-rw-r--r--app/services/issues/reopen_service.rb1
-rw-r--r--app/services/issues/update_service.rb20
-rw-r--r--app/services/labels/transfer_service.rb6
-rw-r--r--app/services/lfs/lock_file_service.rb4
-rw-r--r--app/services/lfs/unlock_file_service.rb4
-rw-r--r--app/services/merge_requests/after_create_service.rb19
-rw-r--r--app/services/merge_requests/create_pipeline_service.rb23
-rw-r--r--app/services/merge_requests/create_service.rb21
-rw-r--r--app/services/merge_requests/merge_to_ref_service.rb2
-rw-r--r--app/services/merge_requests/update_service.rb12
-rw-r--r--app/services/metrics/dashboard/base_embed_service.rb4
-rw-r--r--app/services/metrics/dashboard/clone_dashboard_service.rb10
-rw-r--r--app/services/metrics/dashboard/custom_metric_embed_service.rb2
-rw-r--r--app/services/metrics/dashboard/default_embed_service.rb2
-rw-r--r--app/services/metrics/dashboard/dynamic_embed_service.rb2
-rw-r--r--app/services/metrics/dashboard/grafana_metric_embed_service.rb24
-rw-r--r--app/services/metrics/dashboard/update_dashboard_service.rb109
-rw-r--r--app/services/milestones/closed_issues_count_service.rb17
-rw-r--r--app/services/milestones/issues_count_service.rb17
-rw-r--r--app/services/milestones/transfer_service.rb18
-rw-r--r--app/services/notification_recipient_service.rb429
-rw-r--r--app/services/notification_recipients/build_service.rb38
-rw-r--r--app/services/notification_recipients/builder/base.rb217
-rw-r--r--app/services/notification_recipients/builder/default.rb74
-rw-r--r--app/services/notification_recipients/builder/merge_request_unmergeable.rb26
-rw-r--r--app/services/notification_recipients/builder/new_note.rb56
-rw-r--r--app/services/notification_recipients/builder/new_release.rb25
-rw-r--r--app/services/notification_recipients/builder/project_maintainers.rb24
-rw-r--r--app/services/notification_service.rb45
-rw-r--r--app/services/pod_logs/base_service.rb134
-rw-r--r--app/services/pod_logs/elasticsearch_service.rb79
-rw-r--r--app/services/pod_logs/kubernetes_service.rb88
-rw-r--r--app/services/post_receive_service.rb18
-rw-r--r--app/services/projects/alerting/notify_service.rb14
-rw-r--r--app/services/projects/container_repository/cleanup_tags_service.rb9
-rw-r--r--app/services/projects/create_service.rb15
-rw-r--r--app/services/projects/deploy_tokens/create_service.rb13
-rw-r--r--app/services/projects/destroy_service.rb19
-rw-r--r--app/services/projects/fork_service.rb49
-rw-r--r--app/services/projects/import_export/export_service.rb39
-rw-r--r--app/services/projects/import_service.rb6
-rw-r--r--app/services/projects/lfs_pointers/lfs_download_link_list_service.rb30
-rw-r--r--app/services/projects/lfs_pointers/lfs_link_service.rb4
-rw-r--r--app/services/projects/lsif_data_service.rb28
-rw-r--r--app/services/projects/protect_default_branch_service.rb2
-rw-r--r--app/services/projects/update_pages_service.rb4
-rw-r--r--app/services/projects/update_repository_storage_service.rb125
-rw-r--r--app/services/projects/update_service.rb11
-rw-r--r--app/services/repositories/base_service.rb8
-rw-r--r--app/services/repositories/destroy_service.rb4
-rw-r--r--app/services/resource_events/change_milestone_service.rb36
-rw-r--r--app/services/resource_events/merge_into_notes_service.rb9
-rw-r--r--app/services/resource_events/synthetic_milestone_notes_builder_service.rb31
-rw-r--r--app/services/search_service.rb48
-rw-r--r--app/services/serverless/associate_domain_service.rb30
-rw-r--r--app/services/snippets/bulk_destroy_service.rb74
-rw-r--r--app/services/snippets/create_service.rb56
-rw-r--r--app/services/snippets/destroy_service.rb30
-rw-r--r--app/services/snippets/update_service.rb58
-rw-r--r--app/services/spam/ham_service.rb2
-rw-r--r--app/services/spam/mark_as_spam_service.rb14
-rw-r--r--app/services/spam/spam_check_service.rb22
-rw-r--r--app/services/system_note_service.rb4
-rw-r--r--app/services/system_notes/issuables_service.rb8
-rw-r--r--app/services/system_notes/merge_requests_service.rb2
-rw-r--r--app/services/test_hooks/base_service.rb2
-rw-r--r--app/services/test_hooks/project_service.rb14
-rw-r--r--app/services/test_hooks/system_service.rb2
-rw-r--r--app/services/users/build_service.rb10
-rw-r--r--app/services/users/destroy_service.rb5
-rw-r--r--app/services/x509_certificate_revoke_service.rb9
-rw-r--r--app/uploaders/attachment_uploader.rb11
-rw-r--r--app/uploaders/avatar_uploader.rb7
-rw-r--r--app/uploaders/content_type_whitelist.rb53
-rw-r--r--app/uploaders/favicon_uploader.rb9
-rw-r--r--app/uploaders/gitlab_uploader.rb2
-rw-r--r--app/uploaders/object_storage.rb2
-rw-r--r--app/uploaders/upload_type_check.rb98
-rw-r--r--app/views/admin/application_settings/_ci_cd.html.haml2
-rw-r--r--app/views/admin/application_settings/_signin.html.haml2
-rw-r--r--app/views/admin/application_settings/_signup.html.haml14
-rw-r--r--app/views/admin/application_settings/_visibility_and_access.html.haml5
-rw-r--r--app/views/admin/application_settings/ci_cd.html.haml2
-rw-r--r--app/views/admin/broadcast_messages/_form.html.haml7
-rw-r--r--app/views/admin/dashboard/index.html.haml8
-rw-r--r--app/views/admin/integrations/_form.html.haml12
-rw-r--r--app/views/admin/integrations/edit.html.haml5
-rw-r--r--app/views/admin/services/_form.html.haml2
-rw-r--r--app/views/admin/sessions/_new_base.html.haml4
-rw-r--r--app/views/admin/sessions/_tabs_normal.html.haml2
-rw-r--r--app/views/admin/sessions/_two_factor_otp.html.haml9
-rw-r--r--app/views/admin/sessions/_two_factor_u2f.html.haml17
-rw-r--r--app/views/admin/sessions/new.html.haml6
-rw-r--r--app/views/admin/sessions/two_factor.html.haml15
-rw-r--r--app/views/award_emoji/_awards_block.html.haml1
-rw-r--r--app/views/ci/variables/_index.html.haml53
-rw-r--r--app/views/clusters/clusters/_advanced_settings_container.html.haml6
-rw-r--r--app/views/clusters/clusters/_advanced_settings_tab.html.haml6
-rw-r--r--app/views/clusters/clusters/_applications.html.haml1
-rw-r--r--app/views/clusters/clusters/_applications_tab.html.haml5
-rw-r--r--app/views/clusters/clusters/_configure.html.haml26
-rw-r--r--app/views/clusters/clusters/_details.html.haml11
-rw-r--r--app/views/clusters/clusters/_details_tab.html.haml5
-rw-r--r--app/views/clusters/clusters/_form.html.haml40
-rw-r--r--app/views/clusters/clusters/_gitlab_integration_form.html.haml43
-rw-r--r--app/views/clusters/clusters/_namespace.html.haml2
-rw-r--r--app/views/clusters/clusters/_provider_details_form.html.haml58
-rw-r--r--app/views/clusters/clusters/index.html.haml23
-rw-r--r--app/views/clusters/clusters/show.html.haml22
-rw-r--r--app/views/clusters/clusters/user/_form.html.haml2
-rw-r--r--app/views/clusters/platforms/kubernetes/_form.html.haml58
-rw-r--r--app/views/dashboard/merge_requests.html.haml4
-rw-r--r--app/views/dashboard/todos/_todo.html.haml12
-rw-r--r--app/views/dashboard/todos/index.html.haml8
-rw-r--r--app/views/devise/shared/_omniauth_box.html.haml11
-rw-r--r--app/views/discussions/_resolve_all.html.haml8
-rw-r--r--app/views/groups/registry/repositories/index.html.haml3
-rw-r--r--app/views/groups/settings/_permanent_deletion.html.haml2
-rw-r--r--app/views/groups/settings/_permissions.html.haml1
-rw-r--r--app/views/groups/settings/ci_cd/show.html.haml3
-rw-r--r--app/views/groups/show.html.haml2
-rw-r--r--app/views/help/ui.html.haml23
-rw-r--r--app/views/ide/_show.html.haml2
-rw-r--r--app/views/import/shared/_new_project_form.html.haml2
-rw-r--r--app/views/layouts/_page.html.haml2
-rw-r--r--app/views/layouts/fullscreen.html.haml2
-rw-r--r--app/views/layouts/nav/sidebar/_analytics_links.html.haml5
-rw-r--r--app/views/layouts/nav/sidebar/_group.html.haml18
-rw-r--r--app/views/layouts/nav/sidebar/_project.html.haml55
-rw-r--r--app/views/notify/_successful_pipeline.html.haml118
-rw-r--r--app/views/notify/_successful_pipeline.text.erb32
-rw-r--r--app/views/notify/pipeline_fixed_email.html.haml1
-rw-r--r--app/views/notify/pipeline_fixed_email.text.erb1
-rw-r--r--app/views/notify/pipeline_success_email.html.haml118
-rw-r--r--app/views/notify/pipeline_success_email.text.erb33
-rw-r--r--app/views/profiles/_email_settings.html.haml2
-rw-r--r--app/views/profiles/keys/_form.html.haml13
-rw-r--r--app/views/profiles/keys/_key.html.haml41
-rw-r--r--app/views/profiles/keys/_key_details.html.haml5
-rw-r--r--app/views/profiles/keys/_key_table.html.haml2
-rw-r--r--app/views/profiles/notifications/_email_settings.html.haml2
-rw-r--r--app/views/profiles/notifications/_group_settings.html.haml2
-rw-r--r--app/views/profiles/show.html.haml2
-rw-r--r--app/views/projects/_activity.html.haml3
-rw-r--r--app/views/projects/_commit_button.html.haml3
-rw-r--r--app/views/projects/_wiki.html.haml2
-rw-r--r--app/views/projects/blob/_editor.html.haml8
-rw-r--r--app/views/projects/blob/_pipeline_tour_success.html.haml1
-rw-r--r--app/views/projects/blob/_template_selectors.html.haml5
-rw-r--r--app/views/projects/blob/new.html.haml6
-rw-r--r--app/views/projects/blob/preview.html.haml2
-rw-r--r--app/views/projects/blob/show.html.haml2
-rw-r--r--app/views/projects/blob/viewers/_markup.html.haml2
-rw-r--r--app/views/projects/ci/builds/_build.html.haml2
-rw-r--r--app/views/projects/commit/_commit_box.html.haml4
-rw-r--r--app/views/projects/commit/x509/_certificate_details.html.haml2
-rw-r--r--app/views/projects/deploy_tokens/_form.html.haml34
-rw-r--r--app/views/projects/deploy_tokens/_index.html.haml17
-rw-r--r--app/views/projects/deploy_tokens/_revoke_modal.html.haml17
-rw-r--r--app/views/projects/deploy_tokens/_table.html.haml31
-rw-r--r--app/views/projects/edit.html.haml2
-rw-r--r--app/views/projects/find_file/show.html.haml3
-rw-r--r--app/views/projects/forks/new.html.haml6
-rw-r--r--app/views/projects/graphs/charts.html.haml1
-rw-r--r--app/views/projects/hook_logs/show.html.haml4
-rw-r--r--app/views/projects/hooks/_index.html.haml10
-rw-r--r--app/views/projects/hooks/edit.html.haml5
-rw-r--r--app/views/projects/hooks/index.html.haml14
-rw-r--r--app/views/projects/import/jira/show.html.haml24
-rw-r--r--app/views/projects/issues/_discussion.html.haml2
-rw-r--r--app/views/projects/issues/_issue.html.haml2
-rw-r--r--app/views/projects/issues/_new_branch.html.haml2
-rw-r--r--app/views/projects/issues/import_csv/_button.html.haml2
-rw-r--r--app/views/projects/logs/empty_logs.html.haml14
-rw-r--r--app/views/projects/logs/index.html.haml1
-rw-r--r--app/views/projects/merge_requests/_merge_request.html.haml2
-rw-r--r--app/views/projects/merge_requests/_widget.html.haml1
-rw-r--r--app/views/projects/merge_requests/show.html.haml1
-rw-r--r--app/views/projects/milestones/show.html.haml4
-rw-r--r--app/views/projects/new.html.haml2
-rw-r--r--app/views/projects/notes/_actions.html.haml4
-rw-r--r--app/views/projects/pipeline_schedules/index.html.haml2
-rw-r--r--app/views/projects/pipelines/_with_tabs.html.haml2
-rw-r--r--app/views/projects/registry/repositories/index.html.haml7
-rw-r--r--app/views/projects/runners/_runner.html.haml2
-rw-r--r--app/views/projects/services/_form.html.haml2
-rw-r--r--app/views/projects/services/_index.html.haml6
-rw-r--r--app/views/projects/services/edit.html.haml3
-rw-r--r--app/views/projects/services/slack/_help.haml16
-rw-r--r--app/views/projects/settings/ci_cd/_form.html.haml21
-rw-r--r--app/views/projects/settings/ci_cd/show.html.haml5
-rw-r--r--app/views/projects/settings/integrations/show.html.haml14
-rw-r--r--app/views/projects/settings/operations/_configuration_banner.html.haml24
-rw-r--r--app/views/projects/settings/operations/_error_tracking.html.haml2
-rw-r--r--app/views/projects/settings/operations/_incidents.html.haml2
-rw-r--r--app/views/projects/settings/operations/_prometheus.html.haml19
-rw-r--r--app/views/projects/settings/operations/show.html.haml2
-rw-r--r--app/views/projects/settings/repository/show.html.haml3
-rw-r--r--app/views/projects/snippets/show.html.haml3
-rw-r--r--app/views/projects/tags/new.html.haml12
-rw-r--r--app/views/projects/wikis/_form.html.haml16
-rw-r--r--app/views/projects/wikis/show.html.haml2
-rw-r--r--app/views/search/_category.html.haml1
-rw-r--r--app/views/search/results/_snippet_blob.html.haml2
-rw-r--r--app/views/shared/_broadcast_message.html.haml10
-rw-r--r--app/views/shared/_default_branch_protection.html.haml3
-rw-r--r--app/views/shared/_delete_label_modal.html.haml2
-rw-r--r--app/views/shared/_no_ssh.html.haml17
-rw-r--r--app/views/shared/_outdated_browser.html.haml19
-rw-r--r--app/views/shared/_service_settings.html.haml2
-rw-r--r--app/views/shared/badges/_badge_settings.html.haml2
-rw-r--r--app/views/shared/boards/components/_board.html.haml13
-rw-r--r--app/views/shared/boards/components/sidebar/_labels.html.haml15
-rw-r--r--app/views/shared/deploy_tokens/_form.html.haml34
-rw-r--r--app/views/shared/deploy_tokens/_index.html.haml18
-rw-r--r--app/views/shared/deploy_tokens/_new_deploy_token.html.haml (renamed from app/views/projects/deploy_tokens/_new_deploy_token.html.haml)0
-rw-r--r--app/views/shared/deploy_tokens/_revoke_modal.html.haml15
-rw-r--r--app/views/shared/deploy_tokens/_table.html.haml31
-rw-r--r--app/views/shared/issuable/_search_bar.html.haml2
-rw-r--r--app/views/shared/issuable/_sidebar.html.haml3
-rw-r--r--app/views/shared/issuable/_sidebar_assignees.html.haml2
-rw-r--r--app/views/shared/milestones/_issuable.html.haml7
-rw-r--r--app/views/shared/milestones/_labels_tab.html.haml8
-rw-r--r--app/views/shared/milestones/_milestone.html.haml4
-rw-r--r--app/views/shared/milestones/_sidebar.html.haml4
-rw-r--r--app/views/shared/milestones/_top.html.haml2
-rw-r--r--app/views/shared/notes/_note.html.haml2
-rw-r--r--app/views/shared/notifications/_custom_notifications.html.haml1
-rw-r--r--app/views/shared/projects/_project.html.haml5
-rw-r--r--app/views/shared/snippets/_blob.html.haml20
-rw-r--r--app/views/shared/snippets/_embed.html.haml7
-rw-r--r--app/views/shared/snippets/_form.html.haml4
-rw-r--r--app/views/shared/snippets/_list.html.haml2
-rw-r--r--app/views/shared/web_hooks/_form.html.haml22
-rw-r--r--app/views/snippets/notes/_actions.html.haml1
-rw-r--r--app/views/snippets/show.html.haml3
-rw-r--r--app/views/u2f/_authenticate.html.haml1
-rw-r--r--app/views/users/_cover_controls.html.haml2
-rw-r--r--app/views/users/_profile_basic_info.html.haml2
-rw-r--r--app/views/users/show.html.haml47
-rw-r--r--app/workers/admin_email_worker.rb4
-rw-r--r--app/workers/all_queues.yml585
-rw-r--r--app/workers/archive_trace_worker.rb2
-rw-r--r--app/workers/authorized_keys_worker.rb29
-rw-r--r--app/workers/authorized_projects_worker.rb4
-rw-r--r--app/workers/auto_devops/disable_worker.rb2
-rw-r--r--app/workers/auto_merge_process_worker.rb2
-rw-r--r--app/workers/background_migration_worker.rb24
-rw-r--r--app/workers/build_coverage_worker.rb2
-rw-r--r--app/workers/build_finished_worker.rb4
-rw-r--r--app/workers/build_hooks_worker.rb4
-rw-r--r--app/workers/build_queue_worker.rb4
-rw-r--r--app/workers/build_success_worker.rb4
-rw-r--r--app/workers/build_trace_sections_worker.rb2
-rw-r--r--app/workers/chaos/cpu_spin_worker.rb2
-rw-r--r--app/workers/chaos/db_spin_worker.rb2
-rw-r--r--app/workers/chaos/kill_worker.rb2
-rw-r--r--app/workers/chaos/leak_mem_worker.rb2
-rw-r--r--app/workers/chaos/sleep_worker.rb2
-rw-r--r--app/workers/chat_notification_worker.rb10
-rw-r--r--app/workers/ci/archive_traces_cron_worker.rb2
-rw-r--r--app/workers/ci/build_prepare_worker.rb2
-rw-r--r--app/workers/ci/build_schedule_worker.rb2
-rw-r--r--app/workers/ci/build_trace_chunk_flush_worker.rb2
-rw-r--r--app/workers/ci/create_cross_project_pipeline_worker.rb2
-rw-r--r--app/workers/ci/pipeline_bridge_status_worker.rb4
-rw-r--r--app/workers/ci/resource_groups/assign_resource_from_resource_group_worker.rb2
-rw-r--r--app/workers/cleanup_container_repository_worker.rb2
-rw-r--r--app/workers/cluster_configure_istio_worker.rb2
-rw-r--r--app/workers/cluster_configure_worker.rb2
-rw-r--r--app/workers/cluster_install_app_worker.rb2
-rw-r--r--app/workers/cluster_patch_app_worker.rb2
-rw-r--r--app/workers/cluster_project_configure_worker.rb2
-rw-r--r--app/workers/cluster_provision_worker.rb2
-rw-r--r--app/workers/cluster_upgrade_app_worker.rb2
-rw-r--r--app/workers/cluster_wait_for_app_installation_worker.rb2
-rw-r--r--app/workers/cluster_wait_for_ingress_ip_address_worker.rb2
-rw-r--r--app/workers/clusters/applications/activate_service_worker.rb2
-rw-r--r--app/workers/clusters/applications/deactivate_service_worker.rb2
-rw-r--r--app/workers/clusters/applications/uninstall_worker.rb2
-rw-r--r--app/workers/clusters/applications/wait_for_uninstall_app_worker.rb2
-rw-r--r--app/workers/clusters/cleanup/app_worker.rb2
-rw-r--r--app/workers/clusters/cleanup/project_namespace_worker.rb2
-rw-r--r--app/workers/clusters/cleanup/service_account_worker.rb2
-rw-r--r--app/workers/concerns/application_worker.rb11
-rw-r--r--app/workers/concerns/gitlab/github_import/notify_upon_death.rb31
-rw-r--r--app/workers/concerns/gitlab/github_import/object_importer.rb2
-rw-r--r--app/workers/concerns/gitlab/notify_upon_death.rb29
-rw-r--r--app/workers/concerns/project_export_options.rb25
-rw-r--r--app/workers/concerns/waitable_worker.rb2
-rw-r--r--app/workers/concerns/worker_attributes.rb38
-rw-r--r--app/workers/container_expiration_policy_worker.rb2
-rw-r--r--app/workers/create_commit_signature_worker.rb2
-rw-r--r--app/workers/create_evidence_worker.rb4
-rw-r--r--app/workers/create_note_diff_file_worker.rb2
-rw-r--r--app/workers/create_pipeline_worker.rb4
-rw-r--r--app/workers/delete_container_repository_worker.rb2
-rw-r--r--app/workers/delete_diff_files_worker.rb2
-rw-r--r--app/workers/delete_merged_branches_worker.rb2
-rw-r--r--app/workers/delete_stored_files_worker.rb2
-rw-r--r--app/workers/delete_user_worker.rb2
-rw-r--r--app/workers/deployments/finished_worker.rb2
-rw-r--r--app/workers/deployments/forward_deployment_worker.rb2
-rw-r--r--app/workers/deployments/success_worker.rb2
-rw-r--r--app/workers/detect_repository_languages_worker.rb2
-rw-r--r--app/workers/email_receiver_worker.rb4
-rw-r--r--app/workers/emails_on_push_worker.rb4
-rw-r--r--app/workers/environments/auto_stop_cron_worker.rb2
-rw-r--r--app/workers/error_tracking_issue_link_worker.rb6
-rw-r--r--app/workers/expire_build_artifacts_worker.rb2
-rw-r--r--app/workers/expire_build_instance_artifacts_worker.rb2
-rw-r--r--app/workers/expire_job_cache_worker.rb3
-rw-r--r--app/workers/expire_pipeline_cache_worker.rb4
-rw-r--r--app/workers/file_hook_worker.rb2
-rw-r--r--app/workers/git_garbage_collect_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/advance_stage_worker.rb53
-rw-r--r--app/workers/gitlab/github_import/import_diff_note_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/import_issue_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/import_lfs_object_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/import_note_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/import_pull_request_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/refresh_import_jid_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/stage/finish_import_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/stage/import_base_data_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/stage/import_lfs_objects_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/stage/import_notes_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/stage/import_pull_requests_worker.rb2
-rw-r--r--app/workers/gitlab/github_import/stage/import_repository_worker.rb2
-rw-r--r--app/workers/gitlab/import/advance_stage.rb61
-rw-r--r--app/workers/gitlab/phabricator_import/base_worker.rb2
-rw-r--r--app/workers/gitlab/phabricator_import/import_tasks_worker.rb2
-rw-r--r--app/workers/gitlab_shell_worker.rb14
-rw-r--r--app/workers/gitlab_usage_ping_worker.rb4
-rw-r--r--app/workers/group_destroy_worker.rb2
-rw-r--r--app/workers/group_export_worker.rb2
-rw-r--r--app/workers/group_import_worker.rb2
-rw-r--r--app/workers/hashed_storage/base_worker.rb2
-rw-r--r--app/workers/hashed_storage/migrator_worker.rb2
-rw-r--r--app/workers/hashed_storage/project_migrate_worker.rb2
-rw-r--r--app/workers/hashed_storage/project_rollback_worker.rb2
-rw-r--r--app/workers/hashed_storage/rollbacker_worker.rb2
-rw-r--r--app/workers/import_export_project_cleanup_worker.rb2
-rw-r--r--app/workers/import_issues_csv_worker.rb2
-rw-r--r--app/workers/incident_management/process_alert_worker.rb2
-rw-r--r--app/workers/invalid_gpg_signature_update_worker.rb2
-rw-r--r--app/workers/irker_worker.rb2
-rw-r--r--app/workers/issue_due_scheduler_worker.rb2
-rw-r--r--app/workers/mail_scheduler/issue_due_worker.rb2
-rw-r--r--app/workers/mail_scheduler/notification_service_worker.rb2
-rw-r--r--app/workers/merge_request_mergeability_check_worker.rb2
-rw-r--r--app/workers/merge_worker.rb4
-rw-r--r--app/workers/migrate_external_diffs_worker.rb2
-rw-r--r--app/workers/namespaceless_project_destroy_worker.rb2
-rw-r--r--app/workers/namespaces/prune_aggregation_schedules_worker.rb2
-rw-r--r--app/workers/namespaces/root_statistics_worker.rb10
-rw-r--r--app/workers/namespaces/schedule_aggregation_worker.rb10
-rw-r--r--app/workers/new_issue_worker.rb4
-rw-r--r--app/workers/new_merge_request_worker.rb12
-rw-r--r--app/workers/new_note_worker.rb4
-rw-r--r--app/workers/new_release_worker.rb2
-rw-r--r--app/workers/object_pool/create_worker.rb2
-rw-r--r--app/workers/object_pool/destroy_worker.rb2
-rw-r--r--app/workers/object_pool/join_worker.rb2
-rw-r--r--app/workers/object_pool/schedule_join_worker.rb2
-rw-r--r--app/workers/object_storage/background_move_worker.rb2
-rw-r--r--app/workers/object_storage/migrate_uploads_worker.rb2
-rw-r--r--app/workers/pages_domain_removal_cron_worker.rb2
-rw-r--r--app/workers/pages_domain_ssl_renewal_cron_worker.rb2
-rw-r--r--app/workers/pages_domain_ssl_renewal_worker.rb2
-rw-r--r--app/workers/pages_domain_verification_cron_worker.rb2
-rw-r--r--app/workers/pages_domain_verification_worker.rb2
-rw-r--r--app/workers/pages_worker.rb2
-rw-r--r--app/workers/personal_access_tokens/expiring_worker.rb2
-rw-r--r--app/workers/pipeline_hooks_worker.rb4
-rw-r--r--app/workers/pipeline_metrics_worker.rb4
-rw-r--r--app/workers/pipeline_notification_worker.rb18
-rw-r--r--app/workers/pipeline_process_worker.rb4
-rw-r--r--app/workers/pipeline_schedule_worker.rb2
-rw-r--r--app/workers/pipeline_success_worker.rb4
-rw-r--r--app/workers/pipeline_update_ci_ref_status_worker.rb17
-rw-r--r--app/workers/pipeline_update_worker.rb4
-rw-r--r--app/workers/post_receive.rb73
-rw-r--r--app/workers/process_commit_worker.rb18
-rw-r--r--app/workers/project_cache_worker.rb4
-rw-r--r--app/workers/project_daily_statistics_worker.rb2
-rw-r--r--app/workers/project_destroy_worker.rb2
-rw-r--r--app/workers/project_export_worker.rb11
-rw-r--r--app/workers/project_service_worker.rb2
-rw-r--r--app/workers/project_update_repository_storage_worker.rb13
-rw-r--r--app/workers/propagate_service_template_worker.rb2
-rw-r--r--app/workers/prune_old_events_worker.rb4
-rw-r--r--app/workers/prune_web_hook_logs_worker.rb2
-rw-r--r--app/workers/reactive_caching_worker.rb8
-rw-r--r--app/workers/rebase_worker.rb2
-rw-r--r--app/workers/remote_mirror_notification_worker.rb2
-rw-r--r--app/workers/remove_expired_group_links_worker.rb2
-rw-r--r--app/workers/remove_expired_members_worker.rb2
-rw-r--r--app/workers/remove_unreferenced_lfs_objects_worker.rb2
-rw-r--r--app/workers/repository_archive_cache_worker.rb2
-rw-r--r--app/workers/repository_check/batch_worker.rb2
-rw-r--r--app/workers/repository_check/clear_worker.rb2
-rw-r--r--app/workers/repository_check/dispatch_worker.rb2
-rw-r--r--app/workers/repository_check/single_repository_worker.rb2
-rw-r--r--app/workers/repository_cleanup_worker.rb2
-rw-r--r--app/workers/repository_fork_worker.rb30
-rw-r--r--app/workers/repository_import_worker.rb2
-rw-r--r--app/workers/repository_remove_remote_worker.rb2
-rw-r--r--app/workers/repository_update_remote_mirror_worker.rb2
-rw-r--r--app/workers/requests_profiles_worker.rb2
-rw-r--r--app/workers/run_pipeline_schedule_worker.rb2
-rw-r--r--app/workers/schedule_migrate_external_diffs_worker.rb2
-rw-r--r--app/workers/self_monitoring_project_create_worker.rb2
-rw-r--r--app/workers/self_monitoring_project_delete_worker.rb2
-rw-r--r--app/workers/stage_update_worker.rb4
-rw-r--r--app/workers/stuck_ci_jobs_worker.rb2
-rw-r--r--app/workers/stuck_export_jobs_worker.rb54
-rw-r--r--app/workers/stuck_import_jobs_worker.rb2
-rw-r--r--app/workers/stuck_merge_jobs_worker.rb2
-rw-r--r--app/workers/system_hook_push_worker.rb2
-rw-r--r--app/workers/todos_destroyer/confidential_issue_worker.rb2
-rw-r--r--app/workers/todos_destroyer/entity_leave_worker.rb2
-rw-r--r--app/workers/todos_destroyer/group_private_worker.rb2
-rw-r--r--app/workers/todos_destroyer/private_features_worker.rb2
-rw-r--r--app/workers/todos_destroyer/project_private_worker.rb2
-rw-r--r--app/workers/trending_projects_worker.rb2
-rw-r--r--app/workers/update_external_pull_requests_worker.rb2
-rw-r--r--app/workers/update_head_pipeline_for_merge_request_worker.rb4
-rw-r--r--app/workers/update_merge_requests_worker.rb20
-rw-r--r--app/workers/update_project_statistics_worker.rb2
-rw-r--r--app/workers/upload_checksum_worker.rb2
-rw-r--r--app/workers/wait_for_cluster_creation_worker.rb2
-rw-r--r--app/workers/web_hook_worker.rb2
-rw-r--r--app/workers/x509_certificate_revoke_worker.rb17
-rw-r--r--babel.config.js2
-rwxr-xr-xbin/actioncable63
-rwxr-xr-xbin/background_jobs92
-rwxr-xr-xbin/background_jobs_sk67
-rwxr-xr-xbin/background_jobs_sk_cluster76
-rwxr-xr-xbin/mail_room2
-rwxr-xr-xbin/sidekiq-cluster19
-rw-r--r--cable/config.ru6
-rw-r--r--changelogs/unreleased/10429_set_dast_default_git_strategy_to_none.yml5
-rw-r--r--changelogs/unreleased/10526-smartcard_support_different_hostname.yml5
-rw-r--r--changelogs/unreleased/11821-insights-back-json-fix.yml5
-rw-r--r--changelogs/unreleased/118503.yml5
-rw-r--r--changelogs/unreleased/119107-respect-dnt-for-experiments.yml5
-rw-r--r--changelogs/unreleased/119429-decouple-webhooks-from-integrations-within-project-settings.yml5
-rw-r--r--changelogs/unreleased/12577-generate-smaller-image-sizes-for-designs.yml5
-rw-r--r--changelogs/unreleased/13005-allow-to-use-issue-issues-and-merge_request-merge_requests-for-issu.yml5
-rw-r--r--changelogs/unreleased/13717-es-indexing-without-index.yml5
-rw-r--r--changelogs/unreleased/13810-cluster-environments-table.yml5
-rw-r--r--changelogs/unreleased/14080-slack-multiple-channels.yml5
-rw-r--r--changelogs/unreleased/15103-markup-tips-for-markdown-shown-while-editing-wiki-pages-in-other-fo.yml5
-rw-r--r--changelogs/unreleased/16131-wiki-titles-with-special-characters.yml5
-rw-r--r--changelogs/unreleased/193170-fix-deployment-ref-validation.yml5
-rw-r--r--changelogs/unreleased/195871-fix-duplicate-weight-change-notes.yml5
-rw-r--r--changelogs/unreleased/195969-multi-select-on-issue-boards-inconsistent-erratic.yml5
-rw-r--r--changelogs/unreleased/196609-remove-staging.yml5
-rw-r--r--changelogs/unreleased/196646-replace-underscore-with-lodash-for-app-assets-javascripts-badges.yml5
-rw-r--r--changelogs/unreleased/196648-replace-_-with-lodash.yml5
-rw-r--r--changelogs/unreleased/196688-replace-underscore-with-lodash-for-app-assets-javascripts-deploy_k.yml5
-rw-r--r--changelogs/unreleased/196718-remove-filter-epic-counts.yml5
-rw-r--r--changelogs/unreleased/196832-drop-feature-toggle.yml5
-rw-r--r--changelogs/unreleased/196883-repository-link-grpc-graceful-failure.yml5
-rw-r--r--changelogs/unreleased/197227-milestone-tab-async.yml5
-rw-r--r--changelogs/unreleased/197480-remove-package-file_type.yml5
-rw-r--r--changelogs/unreleased/197790-hide-private-commit-emails-in-notification-settings.yml5
-rw-r--r--changelogs/unreleased/197918-add-package-type-param-to-group-packages-api.yml5
-rw-r--r--changelogs/unreleased/197918-add-package-type-to-project-packages-api.yml5
-rw-r--r--changelogs/unreleased/197960-package-detail-activity.yml5
-rw-r--r--changelogs/unreleased/198050-frontend-pagination-in-log-explorer.yml5
-rw-r--r--changelogs/unreleased/198052-in-the-jobs-page-the-allowed-to-fail-badge-looks-like-an-error.yml5
-rw-r--r--changelogs/unreleased/198325-migrate-design-mentions-to-db-table.yml5
-rw-r--r--changelogs/unreleased/198326-migrate-commit-notes-mentions-to-db-table.yml5
-rw-r--r--changelogs/unreleased/198338-migrate-mr-mentions-to-db-table.yml5
-rw-r--r--changelogs/unreleased/198391-add-user-plan-and-trial-status-to-api.yml5
-rw-r--r--changelogs/unreleased/198604-monaco-snippets.yml5
-rw-r--r--changelogs/unreleased/19880-sort-closed-issues-by-recently-closed.yml5
-rw-r--r--changelogs/unreleased/199197-make-has-been-reverted-check-cheaper.yml5
-rw-r--r--changelogs/unreleased/199220-hide-snippet-blob-search.yml5
-rw-r--r--changelogs/unreleased/199220-snippet-index-desc.yml5
-rw-r--r--changelogs/unreleased/199220-snippet-search.yml5
-rw-r--r--changelogs/unreleased/199370-move-deploy_keys-to-cicd.yml5
-rw-r--r--changelogs/unreleased/199400-send-email-notifications-for-generic-alerts.yml5
-rw-r--r--changelogs/unreleased/199422-maximum-size-for-gitlab-pages-says-to-set-it-to-0-for-unlimited-bu.yml6
-rw-r--r--changelogs/unreleased/199438-fix-logs-encoding.yml5
-rw-r--r--changelogs/unreleased/199442-explore-projects.yml5
-rw-r--r--changelogs/unreleased/199790-approval-settings-target-branch-api.yml5
-rw-r--r--changelogs/unreleased/199908-use-only-the-first-line-of-the-commit-message-on-chat-service-noti.yml5
-rw-r--r--changelogs/unreleased/199998-container-expiration-policy-settings-hide-form-on-api-error-2.yml5
-rw-r--r--changelogs/unreleased/200107-avatar-content-type-does-not-match-file-extension.yml5
-rw-r--r--changelogs/unreleased/20083-conflict-between-project-s-permission-settings-description-and-actu.yml5
-rw-r--r--changelogs/unreleased/201427-issue-board-due-date-picker-unable-to-select-custom-year-or-month.yml6
-rw-r--r--changelogs/unreleased/201771.yml5
-rw-r--r--changelogs/unreleased/201931-white-syntax-highlighting-theme-for-web-ide.yml5
-rw-r--r--changelogs/unreleased/201999-define-formatter-y-axis.yml5
-rw-r--r--changelogs/unreleased/201999-formatter-column-chart.yml5
-rw-r--r--changelogs/unreleased/202008-disable-drag-from-epic-tree-dropdown-button.yml5
-rw-r--r--changelogs/unreleased/202094-enable-ff-by-default.yml5
-rw-r--r--changelogs/unreleased/202233-migrating-fa-spinner-whithin-top-level-projects.yml5
-rw-r--r--changelogs/unreleased/202271-migrate-fa-spinner-for-notifications_dropdown-js.yml5
-rw-r--r--changelogs/unreleased/202274-migrate-fa-spinner-to-spinner.yml5
-rw-r--r--changelogs/unreleased/202426-editor-lite-theme-preference.yml5
-rw-r--r--changelogs/unreleased/204723-nomethoderror-undefined-method-term_agreements-for-nil-nilclass.yml5
-rw-r--r--changelogs/unreleased/204774-quick-actions-executed-in-multiline-inline-code.yml5
-rw-r--r--changelogs/unreleased/204801-add-instance-to-services.yml5
-rw-r--r--changelogs/unreleased/204858-update-self-monitor-environments.yml5
-rw-r--r--changelogs/unreleased/205184-change-omniauth-log-format-to-json.yml5
-rw-r--r--changelogs/unreleased/205399-add-tooltip-to-file-tree-state.yml5
-rw-r--r--changelogs/unreleased/205435.yml5
-rw-r--r--changelogs/unreleased/205596-empty-state-for-code-review-analytics.yml5
-rw-r--r--changelogs/unreleased/206899-move-system-metrics-chart-group-to-the-top-of-the-default-dashbord.yml5
-rw-r--r--changelogs/unreleased/207087-blocked-status-issue.yml5
-rw-r--r--changelogs/unreleased/207126-more-descriptive-error-messages-in-migration-helpers.yml5
-rw-r--r--changelogs/unreleased/207181-status-page-settings-backend.yml5
-rw-r--r--changelogs/unreleased/207203-forward-deployment-ui.yml5
-rw-r--r--changelogs/unreleased/207216-lfs-batch-upload-fix.yml5
-rw-r--r--changelogs/unreleased/207223-fix-self-monitoring-project.yml5
-rw-r--r--changelogs/unreleased/207237-snippet-edit-description-vue.yml5
-rw-r--r--changelogs/unreleased/207242-vsibility-level-vue.yml5
-rw-r--r--changelogs/unreleased/207249-prevent-editing-weight-to-scroll-to-the-top.yml5
-rw-r--r--changelogs/unreleased/207367-change-link-icons-on-security-configuration-page-to-follow-design-.yml5
-rw-r--r--changelogs/unreleased/207390-pages-api-case-insensitive-domain-lookup.yml5
-rw-r--r--changelogs/unreleased/207455-frontend-fix-epic-blform.yml5
-rw-r--r--changelogs/unreleased/207462-scoped-labels-rendering-is-broken-in-todos.yml5
-rw-r--r--changelogs/unreleased/207464-prevent-unauthorized-user-to-lock-an-issue-when-the-sidebar-is-col.yml5
-rw-r--r--changelogs/unreleased/207468-note-confidential-attribute.yml5
-rw-r--r--changelogs/unreleased/207536-retried-jobs-are-not-able-to-find-knapsack-report.yml5
-rw-r--r--changelogs/unreleased/207623-fix-code-search-pagination.yml5
-rw-r--r--changelogs/unreleased/207808-geo-bug-filedownloaddispatchworker-may-sometimes-excessively-resyn.yml5
-rw-r--r--changelogs/unreleased/207927-validate-actor-user-against-codeowners.yml5
-rw-r--r--changelogs/unreleased/207962-deploy-ecs.yml5
-rw-r--r--changelogs/unreleased/207976-stop-markdown-caching-of-non-markdown-snippet-content.yml5
-rw-r--r--changelogs/unreleased/208128-add-external-pull-request-to-existing-object-relations.yml5
-rw-r--r--changelogs/unreleased/208151-code-review-analytics-shows-no-data-for-mrs-in-review-for-less-tha.yml5
-rw-r--r--changelogs/unreleased/208153-add-anchor-to-related-issues-and-related-merge-requests.yml5
-rw-r--r--changelogs/unreleased/208167-bigfix-unable-to-fork-project-to-the-same-namespace.yml5
-rw-r--r--changelogs/unreleased/208242-scoped-label-rendering-in-emails-is-broken.yml5
-rw-r--r--changelogs/unreleased/208258-update-documentation-and-common_metrics-yml-to-match-new-y_axis-pr.yml5
-rw-r--r--changelogs/unreleased/208403-attachment-file-not-found.yml5
-rw-r--r--changelogs/unreleased/208453-add-title-to-analytics-sidebar-menus.yml5
-rw-r--r--changelogs/unreleased/208455-remove-analytics-suffixes-from-analytics-sidebar-menu-items.yml5
-rw-r--r--changelogs/unreleased/208471-actionview-template-error-undefined-method-concat-for-nil-nilclass.yml5
-rw-r--r--changelogs/unreleased/208473-fix-pipeline-tab-url.yml5
-rw-r--r--changelogs/unreleased/208479-requests-for-svgs-returning-404-in-issues-analytics-feature.yml5
-rw-r--r--changelogs/unreleased/208524-error-in-custom-dashboard-yml-file-breaks-the-dashboards-dropdown.yml5
-rw-r--r--changelogs/unreleased/208674-use-wh-acceleration-for-ui-project-import.yml5
-rw-r--r--changelogs/unreleased/208675-add-package_name-as-option-to-packages-api.yml5
-rw-r--r--changelogs/unreleased/208678-packages-project-and-group-api-will-return-processing-nuget-packag.yml5
-rw-r--r--changelogs/unreleased/208788-fix-avg_cycle_analytics-giving-an-uncaught-error.yml5
-rw-r--r--changelogs/unreleased/208798-replace-instances-of-the-issue-duplicate-icon-with-the-duplicate-i.yml5
-rw-r--r--changelogs/unreleased/208827-replace-issue-external-icon-with-external-link.yml5
-rw-r--r--changelogs/unreleased/208830-conan-package-reference-fix.yml5
-rw-r--r--changelogs/unreleased/208830-download-urls-conan-reference.yml6
-rw-r--r--changelogs/unreleased/208885-optimize-ci_pipeline-counters-related-to-the-ci-pipeline.yml5
-rw-r--r--changelogs/unreleased/208886-optimize-deployment-counters-related-to-the-deployment.yml5
-rw-r--r--changelogs/unreleased/208887-optimize-project-counters-mirrored-pipelines.yml5
-rw-r--r--changelogs/unreleased/208887-optimize-project-counters-projects_with_repositories_enabled.yml5
-rw-r--r--changelogs/unreleased/208887-optimize-project-counters-service-desk.yml5
-rw-r--r--changelogs/unreleased/208887-optimize-project-counters-with-slack-service.yml5
-rw-r--r--changelogs/unreleased/208889-optimize-event-counters.yml5
-rw-r--r--changelogs/unreleased/208890-optimize-notes-counters.yml5
-rw-r--r--changelogs/unreleased/208891-optimize-todos-counters.yml5
-rw-r--r--changelogs/unreleased/208914-crud-for-instance-level-integrations.yml5
-rw-r--r--changelogs/unreleased/208923-enable-batch-counting-for-some-individual-queries.yml5
-rw-r--r--changelogs/unreleased/208936.yml5
-rw-r--r--changelogs/unreleased/209002-change-evidence-sha-clipboard-button-hover-text.yml5
-rw-r--r--changelogs/unreleased/209207-spinner-appears-to-be-broken.yml5
-rw-r--r--changelogs/unreleased/209277-introduce-a-feature-flag-for-resolve-notifications-for-when-pipeli.yml5
-rw-r--r--changelogs/unreleased/209761-fix-wiki-directories-with-hyphens.yml5
-rw-r--r--changelogs/unreleased/209783-follow-up-from-resolve-notifications-for-when-pipelines-are-fixed.yml5
-rw-r--r--changelogs/unreleased/210007-optimize-services_usage-counters.yml5
-rw-r--r--changelogs/unreleased/210008-the-same-chart-appears-twice-for-different-embeds.yml5
-rw-r--r--changelogs/unreleased/210051-optimize-or-remove-ldap_users-counter.yml5
-rw-r--r--changelogs/unreleased/210332-approximate-counters-are-not-working-on-gitlab-com.yml5
-rw-r--r--changelogs/unreleased/210335-fix-handling-of-numeric-emoji-names.yml5
-rw-r--r--changelogs/unreleased/21765-group-token-refactor.yml5
-rw-r--r--changelogs/unreleased/21811-group-create-deploy-tokens.yml5
-rw-r--r--changelogs/unreleased/21811-group-delete-deploy-token.yml5
-rw-r--r--changelogs/unreleased/21811-group-list-deploy-tokens.yml5
-rw-r--r--changelogs/unreleased/21811-instance-deploy-tokens.yml5
-rw-r--r--changelogs/unreleased/21811-project-create-deploy-tokens.yml5
-rw-r--r--changelogs/unreleased/21811-project-delete-deploy-token.yml5
-rw-r--r--changelogs/unreleased/21811-project-list-deploy-tokens.yml5
-rw-r--r--changelogs/unreleased/22103-make-code-tags-consistent-in-discussions.yml5
-rw-r--r--changelogs/unreleased/24072-user-profile-add-job-title.yml5
-rw-r--r--changelogs/unreleased/24083-tableflip-quick-action-is-interpreted-even-if-inside-code-block.yml5
-rw-r--r--changelogs/unreleased/24309-notifications-for-when-pipelines-are-fixed.yml5
-rw-r--r--changelogs/unreleased/25095-remove-gitlab-shell-indirection-for-authorized-keys.yml5
-rw-r--r--changelogs/unreleased/25283-add-masked-param-group-vars-api.yml5
-rw-r--r--changelogs/unreleased/25334-update-rouge.yml5
-rw-r--r--changelogs/unreleased/25351-add-buttons.yml5
-rw-r--r--changelogs/unreleased/25550-there-is-a-drag-and-drop-bug-in-boards.yml5
-rw-r--r--changelogs/unreleased/25744-optional-custom-icon-in-omniauth-login-label.yml5
-rw-r--r--changelogs/unreleased/25838-include-full-upload-url-in-api-response.yml5
-rw-r--r--changelogs/unreleased/25995-default-relative-links-to-blobs.yml5
-rw-r--r--changelogs/unreleased/26111-fix-github-gist-links.yml5
-rw-r--r--changelogs/unreleased/26113-file-type-issue.yml6
-rw-r--r--changelogs/unreleased/26556-create-merge-request-button-extends-past-edge-on-mobile.yml6
-rw-r--r--changelogs/unreleased/26712-Update-GitLab-codeclimate-to-head.yml5
-rw-r--r--changelogs/unreleased/27072-name-regex-allow-bulk-api.yml5
-rw-r--r--changelogs/unreleased/27144-gitlab-hosted-codesandbox.yml5
-rw-r--r--changelogs/unreleased/27227-widget-showing-changed-pages-for-visual-reviews.yml5
-rw-r--r--changelogs/unreleased/27300-add-filepath-redirect-url.yml5
-rw-r--r--changelogs/unreleased/27300-add-filepath-to-release-links-api.yml5
-rw-r--r--changelogs/unreleased/27300-enable-a-direct-link-to-a-release-and-release-assets-2.yml5
-rw-r--r--changelogs/unreleased/27300-expose-filepath-url-on-ui.yml5
-rw-r--r--changelogs/unreleased/27880-clearing-release-note-from-the-tags-page-deletes-release.yml5
-rw-r--r--changelogs/unreleased/28085-index-options-tuning.yml5
-rw-r--r--changelogs/unreleased/28560_cleanup_optimistic_locking_db.yml5
-rw-r--r--changelogs/unreleased/28627-adjust-commit-stats-over-limit-indication.yml5
-rw-r--r--changelogs/unreleased/28725-paginate-lfs-object-import.yml5
-rw-r--r--changelogs/unreleased/31289-show-issue-summary-on-releases-page.yml5
-rw-r--r--changelogs/unreleased/32046-differentiate-between-errors-failures-in-xunit-result.yml5
-rw-r--r--changelogs/unreleased/32882-render-special-references-for-releases.yml5
-rw-r--r--changelogs/unreleased/33641-fix_smartcard_param_check_in_user_build.yml5
-rw-r--r--changelogs/unreleased/34086-es-bulk-incremental-index-updates.yml5
-rw-r--r--changelogs/unreleased/34420-optimize-pagination-on-explore-snippets.yml5
-rw-r--r--changelogs/unreleased/34525-update-custom-dashboard.yml5
-rw-r--r--changelogs/unreleased/35475-add-prometheus-ci-vars.yml5
-rw-r--r--changelogs/unreleased/36243-introduce-an-optional-expiration-date-for-ssh-keys.yml5
-rw-r--r--changelogs/unreleased/37256-bump-wh-version.yml5
-rw-r--r--changelogs/unreleased/37256-use-workhorse-acceleration-on-project-import.yml5
-rw-r--r--changelogs/unreleased/37320-ensure-project-snippet-api-status.yml5
-rw-r--r--changelogs/unreleased/37320-ensure-project-snippet-feature-status-in-project-snippet-api-endpoi.yml5
-rw-r--r--changelogs/unreleased/37951-project-settings-required-approval-input-not-sequential-order.yml5
-rw-r--r--changelogs/unreleased/38096-splitmr-write-resource-milestone-events-pd.yml5
-rw-r--r--changelogs/unreleased/38143-replace-labels-in-vue-with-gitlab-ui-component.yml5
-rw-r--r--changelogs/unreleased/38144-replace-labels-in-haml-with-gitlab-ui-css.yml5
-rw-r--r--changelogs/unreleased/38145-replace-labels-in-non-vue-js-with-gitlab-ui-component.yml5
-rw-r--r--changelogs/unreleased/38414.yml5
-rw-r--r--changelogs/unreleased/40585-token-disclaimer.yml5
-rw-r--r--changelogs/unreleased/55487-backfill-lfs-objects-projects.yml5
-rw-r--r--changelogs/unreleased/63-nudge-users-to-select-a-template-to-set-up-a-pipeline.yml5
-rw-r--r--changelogs/unreleased/7003-fail-to-start-server-without-ar-connection.yml5
-rw-r--r--changelogs/unreleased/7583-developer-cannot-push-to-projects-they-create-in-groups.yml5
-rw-r--r--changelogs/unreleased/Remove-refreshData-function-logic-from-issue-js.yml5
-rw-r--r--changelogs/unreleased/Resolve-Migrate--fa-spinner-app-assets-javascripts-notes-components-discu.yml5
-rw-r--r--changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-award_emoji.yml5
-rw-r--r--changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-help.yml5
-rw-r--r--changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-ide.yml5
-rw-r--r--changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-find_file.yml5
-rw-r--r--changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-forks.yml5
-rw-r--r--changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-notes.yml5
-rw-r--r--changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-shared-badges.yml5
-rw-r--r--changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-snippets-notes.yml5
-rw-r--r--changelogs/unreleased/Resolve-Migrate--fa-spinner-ee-app-views-shared-members.yml5
-rw-r--r--changelogs/unreleased/a11y-ci-template.yml5
-rw-r--r--changelogs/unreleased/a11y-gitlab-script.yml5
-rw-r--r--changelogs/unreleased/ac-remove-track_mr_picking-ff.yml5
-rw-r--r--changelogs/unreleased/add-endpoint-to-remove-sidekiq-jobs-based-on-metadata.yml5
-rw-r--r--changelogs/unreleased/add-feature-flags-version.yml5
-rw-r--r--changelogs/unreleased/add-gatsby.yml5
-rw-r--r--changelogs/unreleased/add-missing-secure-queues-changelog.yml5
-rw-r--r--changelogs/unreleased/add-retiresjs-vars-to-dependency-scanning.yml5
-rw-r--r--changelogs/unreleased/add-shard-label-to-queue-timing-histogram-metric.yml5
-rw-r--r--changelogs/unreleased/add-sidekiq-metrics-for-gitaly-and-db-time.yml5
-rw-r--r--changelogs/unreleased/add-static-object-external-storage-url-to-csp-rules.yml5
-rw-r--r--changelogs/unreleased/add-trigger-include-artifact.yml5
-rw-r--r--changelogs/unreleased/add-user-job-title-column.yml5
-rw-r--r--changelogs/unreleased/ak-fix-multi-pod.yml5
-rw-r--r--changelogs/unreleased/ak-move-logs-to-core.yml5
-rw-r--r--changelogs/unreleased/ak-rescue-error.yml5
-rw-r--r--changelogs/unreleased/ak-upgrade-es.yml5
-rw-r--r--changelogs/unreleased/al-205505-fix-snippet-blob-viewers.yml5
-rw-r--r--changelogs/unreleased/allow-selecting-all-queues-with-sidekiq-cluster.yml5
-rw-r--r--changelogs/unreleased/allow-to-disable-defaults.yml5
-rw-r--r--changelogs/unreleased/allow_toggle_modsecurity_settings.yml5
-rw-r--r--changelogs/unreleased/auto-deploy-image-v0-12-0.yml5
-rw-r--r--changelogs/unreleased/broaden-access-scope-for-version-api.yml5
-rw-r--r--changelogs/unreleased/bvl-validate-changed-values.yml5
-rw-r--r--changelogs/unreleased/bw-board-query-by-id.yml5
-rw-r--r--changelogs/unreleased/bw-graphql-board-type.yml5
-rw-r--r--changelogs/unreleased/cat-fix-env-passthrough-sast-205694.yml5
-rw-r--r--changelogs/unreleased/cat-fix-namespaceid-import-39078.yml5
-rw-r--r--changelogs/unreleased/changed-pages-ci-var.yml5
-rw-r--r--changelogs/unreleased/closed-issue-weight-grey.yml5
-rw-r--r--changelogs/unreleased/cluster-apps-0-9-0.yml5
-rw-r--r--changelogs/unreleased/cluster-info-tabs.yml5
-rw-r--r--changelogs/unreleased/confapi-repo-push-mirror.yml5
-rw-r--r--changelogs/unreleased/create-approval-todos-on-update.yml5
-rw-r--r--changelogs/unreleased/create-puma-rugged-suboptimal-config-checker.yml6
-rw-r--r--changelogs/unreleased/create-remote-mirrors-docs.yml5
-rw-r--r--changelogs/unreleased/create_single_temporary_index_for_notes_mentions.yml5
-rw-r--r--changelogs/unreleased/create_users_statistics.yml5
-rw-r--r--changelogs/unreleased/creator-pairing-fix-alert.yml5
-rw-r--r--changelogs/unreleased/creator-pairing-group-list-padding.yml5
-rw-r--r--changelogs/unreleased/dblessing-add-scim-identities.yml6
-rw-r--r--changelogs/unreleased/dennis-update-ios-swift-project-template-logo.yml5
-rw-r--r--changelogs/unreleased/deploy-mr-once-take-2.yml5
-rw-r--r--changelogs/unreleased/descriptive_pages_error.yml5
-rw-r--r--changelogs/unreleased/dmishunov-rich-viewers.yml5
-rw-r--r--changelogs/unreleased/do-not-parse-undefined-severity-confidence.yml5
-rw-r--r--changelogs/unreleased/dotenv-report-artifact.yml5
-rw-r--r--changelogs/unreleased/drop_forked_project_links_table.yml5
-rw-r--r--changelogs/unreleased/drop_old_state_column_from_issues.yml5
-rw-r--r--changelogs/unreleased/dz-scope-issue-route-default.yml5
-rw-r--r--changelogs/unreleased/ee-insert-all-for-load-balancing.yml5
-rw-r--r--changelogs/unreleased/enable-customizable-cycle-analytics.yml5
-rw-r--r--changelogs/unreleased/feat-2fa-for-admin-mode.yml5
-rw-r--r--changelogs/unreleased/feat-broadcast-message-dimsiss.yml5
-rw-r--r--changelogs/unreleased/feat-mr-diff-coverage-visualisation.yml5
-rw-r--r--changelogs/unreleased/feat-x509-crl.yml5
-rw-r--r--changelogs/unreleased/feature-199458-track-jump-to-next-unresolved-thread.yml5
-rw-r--r--changelogs/unreleased/feature-enable-split-diffs-by-default.yml5
-rw-r--r--changelogs/unreleased/feature-image-diff-size.yml6
-rw-r--r--changelogs/unreleased/file-path-validator.yml5
-rw-r--r--changelogs/unreleased/filter-sentry-error-list.yml5
-rw-r--r--changelogs/unreleased/find-commits-by-author.yml5
-rw-r--r--changelogs/unreleased/fix-api.yml5
-rw-r--r--changelogs/unreleased/fix-ci-delete-variable-bug.yml5
-rw-r--r--changelogs/unreleased/fix-dependency-proxy-link.yml5
-rw-r--r--changelogs/unreleased/fix-deployment-namespace-resolution.yml5
-rw-r--r--changelogs/unreleased/fix-deployments-pagination.yml5
-rw-r--r--changelogs/unreleased/fix-duplicate-labels-when-moving-projects.yml5
-rw-r--r--changelogs/unreleased/fix-export-state-logic.yml5
-rw-r--r--changelogs/unreleased/fix-merge-to-ref-service-raise-command-error.yml5
-rw-r--r--changelogs/unreleased/fix-mermaid-flow-chart-width.yml5
-rw-r--r--changelogs/unreleased/fix-pipeline-creation-race-conditions.yml5
-rw-r--r--changelogs/unreleased/fix-pipeline-details-invalid-buttons.yml5
-rw-r--r--changelogs/unreleased/fix-pipeline-tooltip.yml5
-rw-r--r--changelogs/unreleased/fix-prevent-user-theme-color-api-overwrite.yml5
-rw-r--r--changelogs/unreleased/fixes-caret-position-after-pasting-an-image-15011.yml5
-rw-r--r--changelogs/unreleased/fj-195517-single-blob-snippet-view-render.yml5
-rw-r--r--changelogs/unreleased/fj-205646-fix-project-moved-message.yml5
-rw-r--r--changelogs/unreleased/fj-207803-fix-project-snippet-policy-bug.yml5
-rw-r--r--changelogs/unreleased/fj-208693-fix-bug-creating-snippet.yml5
-rw-r--r--changelogs/unreleased/fj-39176-create-project-snippet-repository.yml5
-rw-r--r--changelogs/unreleased/fj-39201-import-export-project-snippets.yml5
-rw-r--r--changelogs/unreleased/fj-39265-create-snippet-repository-content.yml5
-rw-r--r--changelogs/unreleased/fj-39265-update-snippet-repository-content.yml5
-rw-r--r--changelogs/unreleased/fj-39515-delete-snippet-repositories.yml5
-rw-r--r--changelogs/unreleased/fj-allow-create-snippet-default-branch.yml5
-rw-r--r--changelogs/unreleased/fj-fix-bug-hook-env.yml5
-rw-r--r--changelogs/unreleased/fj-fix-git-error-message-update-snippet.yml5
-rw-r--r--changelogs/unreleased/fj-fix-internal-api-return-code.yml5
-rw-r--r--changelogs/unreleased/fj-fix-snippet-update-error-message.yml5
-rw-r--r--changelogs/unreleased/fj-fix-snippet-url-to-repo.yml5
-rw-r--r--changelogs/unreleased/fj-remove-repository-storage-column-from-snippets.yml5
-rw-r--r--changelogs/unreleased/fj-rename-unauthorized-error-to-forbidden-error.yml5
-rw-r--r--changelogs/unreleased/fj-update-snippet-from-git-action.yml5
-rw-r--r--changelogs/unreleased/fooishbar-gitlab-fix-project-job-path-exposed-artifacts.yml5
-rw-r--r--changelogs/unreleased/georgekoltsov-196188-cleanup-temp-exports.yml5
-rw-r--r--changelogs/unreleased/georgekoltsov-27883-fix-import-pipeline-order.yml5
-rw-r--r--changelogs/unreleased/georgekoltsov-bump-project-import-limit.yml5
-rw-r--r--changelogs/unreleased/georgekoltsov-fix-500-on-gitea-importer.yml5
-rw-r--r--changelogs/unreleased/georgekoltsov-fix-epic-issues.yml5
-rw-r--r--changelogs/unreleased/georgekoltsov-fix-group-members-owner-access-level.yml5
-rw-r--r--changelogs/unreleased/georgekoltsov-fix-relations-order-on-export.yml5
-rw-r--r--changelogs/unreleased/gitaly_keepalive.yml5
-rw-r--r--changelogs/unreleased/gitlab-middleware-refactoring.yml5
-rw-r--r--changelogs/unreleased/group_milestones_n_1.yml5
-rw-r--r--changelogs/unreleased/groupapi-avatar-support.yml5
-rw-r--r--changelogs/unreleased/handle-object-storage-errors.yml5
-rw-r--r--changelogs/unreleased/insights-description-for-chart.yml5
-rw-r--r--changelogs/unreleased/introduce_highest_role_per_user_database_table.yml5
-rw-r--r--changelogs/unreleased/issue-updated_at-not-nil.yml5
-rw-r--r--changelogs/unreleased/issue_11391.yml5
-rw-r--r--changelogs/unreleased/issue_205500_1.yml5
-rw-r--r--changelogs/unreleased/issue_205690.yml5
-rw-r--r--changelogs/unreleased/jdb-display-base-label-versions-dropdown.yml5
-rw-r--r--changelogs/unreleased/jdb-fix-jump-to-next-unresolved-thread.yml5
-rw-r--r--changelogs/unreleased/jdb-hide-dont-remove-collapsed-files.yml5
-rw-r--r--changelogs/unreleased/jhyson-export_failures.yml5
-rw-r--r--changelogs/unreleased/jhyson-issue-import-export-consistency.yml5
-rw-r--r--changelogs/unreleased/jivanvl-add-edit-custom-metric-link.yml5
-rw-r--r--changelogs/unreleased/jivanvl-change-pod-logs-name.yml5
-rw-r--r--changelogs/unreleased/jivavnvl-add-refresh-button-monitoring-dashboard.yml5
-rw-r--r--changelogs/unreleased/jlouw-improve-audit-log-header-layout.yml5
-rw-r--r--changelogs/unreleased/jswain_update_renewal_link.yml5
-rw-r--r--changelogs/unreleased/kassio-fix-dev-seed.yml5
-rw-r--r--changelogs/unreleased/leipert-drop-node-8-support.yml5
-rw-r--r--changelogs/unreleased/leipert-polyfills-improvements.yml5
-rw-r--r--changelogs/unreleased/limit-broadcast-notifications-to-ui.yml5
-rw-r--r--changelogs/unreleased/lodash_blob.yml5
-rw-r--r--changelogs/unreleased/make_design_management_versions_created_at_not_null.yml5
-rw-r--r--changelogs/unreleased/mc-feature-trigger-pipelines-project-subscriptions.yml5
-rw-r--r--changelogs/unreleased/mermaid-fix.yml5
-rw-r--r--changelogs/unreleased/migrate-fa-spinner-in-views-dashboard-todos.yml5
-rw-r--r--changelogs/unreleased/migrate-security-scans.yml5
-rw-r--r--changelogs/unreleased/mk-hide-secondary-only-setting.yml5
-rw-r--r--changelogs/unreleased/mo-change-capybara-screenshots-name.yml5
-rw-r--r--changelogs/unreleased/mo-fix-capybara-screenshots-rails.yml5
-rw-r--r--changelogs/unreleased/mo-use-new-code-quality-image.yml5
-rw-r--r--changelogs/unreleased/move-storage-shards.yml5
-rw-r--r--changelogs/unreleased/mwaw-197871-improve-duplicated-dashboard-error-messages.yml5
-rw-r--r--changelogs/unreleased/mwaw-activate_shared_services_on_project_creation.yml6
-rw-r--r--changelogs/unreleased/mwaw-remove_logs_path_for_not_authorised_users.yml5
-rw-r--r--changelogs/unreleased/nfriend-create-release-through-ui.yml5
-rw-r--r--changelogs/unreleased/nfriend-enable-issues-summary.yml5
-rw-r--r--changelogs/unreleased/nfriend-enable-release-show-page-feature-flag.yml5
-rw-r--r--changelogs/unreleased/nicolasdular-email-restriction-regex.yml5
-rw-r--r--changelogs/unreleased/open-project-integration-2.yml5
-rw-r--r--changelogs/unreleased/osw-move-sidekiq-cluster-to-core.yml5
-rw-r--r--changelogs/unreleased/osw-support-opt-in-cluster-in-bg-jobs-script.yml5
-rw-r--r--changelogs/unreleased/pages-1-17.yml5
-rw-r--r--changelogs/unreleased/persist-expanded-environment-name-in-build-metadata.yml5
-rw-r--r--changelogs/unreleased/pokstad1-remove-branch-user-squash.yml5
-rw-r--r--changelogs/unreleased/public-api-for-merge-trains.yml5
-rw-r--r--changelogs/unreleased/rc-whitelist_ports.yml5
-rw-r--r--changelogs/unreleased/refactor-bypass-session-admin-mode.yml5
-rw-r--r--changelogs/unreleased/refactor-disable-csrf-in-session-destroy.yml5
-rw-r--r--changelogs/unreleased/remove-cs-kubernetes-workaround.yml5
-rw-r--r--changelogs/unreleased/remove-duplicate-auth-refresh-on-project-create.yml5
-rw-r--r--changelogs/unreleased/remove-merged-branch-names-ff.yml5
-rw-r--r--changelogs/unreleased/remove-puma-notices-from-admin-area-banner.yml5
-rw-r--r--changelogs/unreleased/replace-undefined-with-unknown.yml5
-rw-r--r--changelogs/unreleased/replace-undefined-with-unkown-vulnerabilities.yml5
-rw-r--r--changelogs/unreleased/replace_checkbox_by_toggle_for_modsecurity.yml5
-rw-r--r--changelogs/unreleased/repository-contributors-group-by-email.yml5
-rw-r--r--changelogs/unreleased/requirements-model.yml5
-rw-r--r--changelogs/unreleased/rk4bir-master-patch-60164.yml5
-rw-r--r--changelogs/unreleased/rk4bir-master-patch-77755.yml5
-rw-r--r--changelogs/unreleased/rk4bir-master-patch-85217.yml5
-rw-r--r--changelogs/unreleased/rk4bir-master-patch-92247.yml5
-rw-r--r--changelogs/unreleased/rk4bir-master-patch-97031.yml5
-rw-r--r--changelogs/unreleased/rs-commit-web_url.yml5
-rw-r--r--changelogs/unreleased/rs-keep-divergent-refs-column.yml5
-rw-r--r--changelogs/unreleased/rs-remote-mirrors-docs.yml5
-rw-r--r--changelogs/unreleased/sarnold-format-timestamps-locally.yml5
-rw-r--r--changelogs/unreleased/sh-avoid-current-settings-rake-task.yml5
-rw-r--r--changelogs/unreleased/sh-cache-ci-variables.yml5
-rw-r--r--changelogs/unreleased/sh-cleaup-prom-background-migrations.yml5
-rw-r--r--changelogs/unreleased/sh-enable-redis-key-compression.yml5
-rw-r--r--changelogs/unreleased/sh-ensure-fresh-project-settings.yml5
-rw-r--r--changelogs/unreleased/sh-feature-flag-persisted-process-cache.yml5
-rw-r--r--changelogs/unreleased/sh-optimize-pipeline-for-mrs.yml5
-rw-r--r--changelogs/unreleased/sh-rate-limit-archive-endpoint.yml5
-rw-r--r--changelogs/unreleased/sh-refresh-mr-widget-upon-cancel.yml5
-rw-r--r--changelogs/unreleased/sh-upgrade-bootsnap-1-4-6.yml5
-rw-r--r--changelogs/unreleased/sh-use-process-cache-for-feature-flags.yml5
-rw-r--r--changelogs/unreleased/sha-params-validator.yml5
-rw-r--r--changelogs/unreleased/show-cluster-status-fe.yml5
-rw-r--r--changelogs/unreleased/sidebar_not_expanding_at_certain_resolutions.yml5
-rw-r--r--changelogs/unreleased/simplify-ide-colors.yml5
-rw-r--r--changelogs/unreleased/stop_environments.yml5
-rw-r--r--changelogs/unreleased/support-airgap-in-dependency-scanning-template.yml5
-rw-r--r--changelogs/unreleased/switch-ff-ci-dynamic-child-pipeline.yml5
-rw-r--r--changelogs/unreleased/sy-alert-embeds.yml5
-rw-r--r--changelogs/unreleased/sy-auto-embed-alert.yml5
-rw-r--r--changelogs/unreleased/sy-global-integration.yml5
-rw-r--r--changelogs/unreleased/sy-grafana-default-panel.yml5
-rw-r--r--changelogs/unreleased/sy-grafana-default-times.yml5
-rw-r--r--changelogs/unreleased/tokenize-filtered-search.yml5
-rw-r--r--changelogs/unreleased/turn-on-new-variables-ui-ff.yml5
-rw-r--r--changelogs/unreleased/tweak-wiki-title-validation-message.yml5
-rw-r--r--changelogs/unreleased/udpate-cluster-application-image-to-0-11.yml6
-rw-r--r--changelogs/unreleased/udpate-cluster-application-image-to-0-12.yml5
-rw-r--r--changelogs/unreleased/unique-service-template-per-type.yml5
-rw-r--r--changelogs/unreleased/unlink-cache-deletions.yml5
-rw-r--r--changelogs/unreleased/update-ado-image-to-0-10-0.yml5
-rw-r--r--changelogs/unreleased/update-cert-manager-to-0-10-1.yml5
-rw-r--r--changelogs/unreleased/update-dast-ado-image-to-0-10-0.yml5
-rw-r--r--changelogs/unreleased/update-gitlab-runner-helm-chart-to-0-14-0.yml5
-rw-r--r--changelogs/unreleased/update-private-project-wording.yml5
-rw-r--r--changelogs/unreleased/update-puma-to-4-3-3.yml5
-rw-r--r--changelogs/unreleased/update-ruby-version-on-official-ci-templates.yml5
-rw-r--r--changelogs/unreleased/update_ingress_chart_version.yml5
-rw-r--r--changelogs/unreleased/update_repo_storage_checksum.yml5
-rw-r--r--changelogs/unreleased/upgrade-gitlab-ui.yml5
-rw-r--r--changelogs/unreleased/use-default-crossplane-stack-versions.yml5
-rw-r--r--changelogs/unreleased/use_replicate_repo_for_repo_move.yml5
-rw-r--r--changelogs/unreleased/validate-subnets-field.yml5
-rw-r--r--changelogs/unreleased/validate_service_project_id_nil_if_template.yml5
-rw-r--r--changelogs/unreleased/vh-snippets-content-types.yml5
-rw-r--r--changelogs/unreleased/vh-snippets-finder-tweak.yml5
-rw-r--r--config/application.rb17
-rw-r--r--config/environments/development.rb15
-rw-r--r--config/environments/test.rb5
-rw-r--r--config/feature_categories.yml17
-rw-r--r--config/gitlab.yml.example11
-rw-r--r--config/initializers/0_eager_load_http_cookie.rb10
-rw-r--r--config/initializers/0_inflections.rb31
-rw-r--r--config/initializers/0_license.rb20
-rw-r--r--config/initializers/1_settings.rb22
-rw-r--r--config/initializers/5_backend.rb2
-rw-r--r--config/initializers/8_devise.rb6
-rw-r--r--config/initializers/9_fast_gettext.rb7
-rw-r--r--config/initializers/actioncable.rb8
-rw-r--r--config/initializers/active_record_force_reconnects.rb7
-rw-r--r--config/initializers/active_record_lifecycle.rb2
-rw-r--r--config/initializers/console_message.rb2
-rw-r--r--config/initializers/graphql.rb6
-rw-r--r--config/initializers/lograge.rb34
-rw-r--r--config/initializers/omniauth.rb8
-rw-r--r--config/initializers/sidekiq.rb27
-rw-r--r--config/initializers/sidekiq_cluster.rb2
-rw-r--r--config/initializers/tracing.rb16
-rw-r--r--config/initializers_before_autoloader/000_inflections.rb32
-rw-r--r--config/initializers_before_autoloader/001_fast_gettext.rb8
-rw-r--r--config/initializers_before_autoloader/100_patch_omniauth_saml.rb28
-rw-r--r--config/locales/carrierwave.en.yml4
-rw-r--r--config/locales/en.yml2
-rw-r--r--config/prometheus/common_metrics.yml142
-rw-r--r--config/puma_actioncable.example.development.rb88
-rw-r--r--config/routes.rb11
-rw-r--r--config/routes/admin.rb7
-rw-r--r--config/routes/git_http.rb8
-rw-r--r--config/routes/group.rb7
-rw-r--r--config/routes/import.rb1
-rw-r--r--config/routes/merge_requests.rb1
-rw-r--r--config/routes/project.rb34
-rw-r--r--config/routes/user.rb4
-rw-r--r--config/settings.rb9
-rw-r--r--config/sidekiq_queues.yml10
-rw-r--r--config/webpack.config.js68
-rw-r--r--config/webpack.vendor.config.js3
-rw-r--r--danger/database/Dangerfile3
-rw-r--r--danger/gemfile/Dangerfile36
-rw-r--r--danger/karma/Dangerfile50
-rw-r--r--danger/metadata/Dangerfile3
-rw-r--r--danger/telemetry/Dangerfile19
-rw-r--r--db/fixtures/development/11_keys.rb6
-rw-r--r--db/migrate/20180305144721_add_privileged_to_runner.rb2
-rw-r--r--db/migrate/20180423204600_add_pages_access_level_to_project_feature.rb2
-rw-r--r--db/migrate/20180529093006_ensure_remote_mirror_columns.rb2
-rw-r--r--db/migrate/20180601213245_add_deploy_strategy_to_project_auto_devops.rb2
-rw-r--r--db/migrate/20180831164905_add_common_to_prometheus_metrics.rb2
-rw-r--r--db/migrate/20180907015926_add_legacy_abac_to_cluster_providers_gcp.rb2
-rw-r--r--db/migrate/20181017001059_add_cluster_type_to_clusters.rb2
-rw-r--r--db/migrate/20190218134158_add_masked_to_ci_variables.rb2
-rw-r--r--db/migrate/20190218134209_add_masked_to_ci_group_variables.rb2
-rw-r--r--db/migrate/20190220142344_add_email_header_and_footer_enabled_flag_to_appearances_table.rb2
-rw-r--r--db/migrate/20190228192410_add_multi_line_attributes_to_suggestion.rb2
-rw-r--r--db/migrate/20190322164830_add_auto_ssl_enabled_to_pages_domain.rb2
-rw-r--r--db/migrate/20190325165127_add_managed_to_cluster.rb2
-rw-r--r--db/migrate/20190415030217_add_variable_type_to_ci_variables.rb2
-rw-r--r--db/migrate/20190416185130_add_merge_train_enabled_to_ci_cd_settings.rb2
-rw-r--r--db/migrate/20190416213556_add_variable_type_to_ci_group_variables.rb2
-rw-r--r--db/migrate/20190416213631_add_variable_type_to_ci_pipeline_schedule_variables.rb2
-rw-r--r--db/migrate/20190426180107_add_deployment_events_to_services.rb2
-rw-r--r--db/migrate/20190520200123_add_rule_type_to_approval_merge_request_approval_rules.rb2
-rw-r--r--db/migrate/20190607085356_add_source_to_pages_domains.rb2
-rw-r--r--db/migrate/20190628145246_add_strategies_to_operations_feature_flag_scopes.rb2
-rw-r--r--db/migrate/20190709204413_add_rule_type_to_approval_project_rules.rb2
-rw-r--r--db/migrate/20190712064021_add_namespace_per_environment_flag_to_clusters.rb2
-rw-r--r--db/migrate/20190715173819_add_object_storage_flag_to_geo_node.rb2
-rw-r--r--db/migrate/20190729180447_add_merge_requests_require_code_owner_approval_to_protected_branches.rb2
-rw-r--r--db/migrate/20190816151221_add_active_jobs_limit_to_plans.rb2
-rw-r--r--db/migrate/20190901174200_add_max_issue_count_to_list.rb2
-rw-r--r--db/migrate/20190905140605_add_cloud_run_to_clusters_providers_gcp.rb2
-rw-r--r--db/migrate/20190907184714_add_show_whitespace_in_diffs_to_user_preferences.rb2
-rw-r--r--db/migrate/20190918104731_add_cleanup_status_to_cluster.rb2
-rw-r--r--db/migrate/20191001170300_create_ci_ref.rb19
-rw-r--r--db/migrate/20191014123159_add_expire_notification_delivered_to_personal_access_tokens.rb2
-rw-r--r--db/migrate/20191023093207_add_comment_actions_to_services.rb2
-rw-r--r--db/migrate/20191028130054_add_max_issue_weight_to_list.rb2
-rw-r--r--db/migrate/20191029191901_add_enabled_to_grafana_integrations.rb2
-rw-r--r--db/migrate/20191105155113_add_secret_to_snippet.rb2
-rw-r--r--db/migrate/20191106144901_add_state_to_merge_trains.rb2
-rw-r--r--db/migrate/20191111165017_add_fixed_pipeline_to_notification_settings.rb9
-rw-r--r--db/migrate/20191112090226_add_artifacts_to_ci_build_need.rb2
-rw-r--r--db/migrate/20191114201118_make_created_at_not_null_in_design_management_versions.rb15
-rw-r--r--db/migrate/20191121193110_add_issue_links_type.rb2
-rw-r--r--db/migrate/20191123081456_add_dismissable_to_broadcast_messages.rb9
-rw-r--r--db/migrate/20191127163053_add_confidential_to_doorkeeper_application.rb2
-rw-r--r--db/migrate/20191127221608_add_wildcard_and_domain_type_to_pages_domains.rb2
-rw-r--r--db/migrate/20191129134844_add_broadcast_type_to_broadcast_message.rb2
-rw-r--r--db/migrate/20191206014412_add_image_to_design_management_designs_versions.rb9
-rw-r--r--db/migrate/20191218124915_add_repository_storage_to_snippets.rb2
-rw-r--r--db/migrate/20191218125015_add_storage_version_to_snippets.rb2
-rw-r--r--db/migrate/20200102140148_add_expanded_environment_name_to_ci_build_metadata.rb13
-rw-r--r--db/migrate/20200122161638_add_deploy_token_type_to_deploy_tokens.rb2
-rw-r--r--db/migrate/20200128184209_add_usage_to_pages_domains.rb2
-rw-r--r--db/migrate/20200203015140_add_id_to_design_management_designs_versions.rb9
-rw-r--r--db/migrate/20200203025400_default_lock_version_to_zero_for_merge_requests.rb8
-rw-r--r--db/migrate/20200203025602_default_lock_version_to_zero_for_issues.rb8
-rw-r--r--db/migrate/20200203025619_default_lock_version_to_zero_for_epics.rb8
-rw-r--r--db/migrate/20200203025744_default_lock_version_to_zero_for_ci_builds.rb8
-rw-r--r--db/migrate/20200203025801_default_lock_version_to_zero_for_ci_stages.rb8
-rw-r--r--db/migrate/20200203025821_default_lock_version_to_zero_for_ci_pipelines.rb8
-rw-r--r--db/migrate/20200206141511_change_saml_provider_outer_forks_default.rb15
-rw-r--r--db/migrate/20200207062728_add_default_branch_protection_to_namespaces.rb19
-rw-r--r--db/migrate/20200211174946_add_auto_renew_to_gitlab_subscriptions.rb10
-rw-r--r--db/migrate/20200212014653_rename_security_dashboard_feature_flag_to_instance_security_dashboard.rb25
-rw-r--r--db/migrate/20200212133945_add_group_hooks_to_plan_limits.rb9
-rw-r--r--db/migrate/20200212134201_insert_group_hooks_plan_limits.rb23
-rw-r--r--db/migrate/20200213093702_add_email_restrictions_to_application_settings.rb15
-rw-r--r--db/migrate/20200213155311_add_npm_package_requests_forwarding_to_application_settings.rb20
-rw-r--r--db/migrate/20200215222507_drop_forked_project_links_fk.rb27
-rw-r--r--db/migrate/20200215225103_drop_forked_project_links_table.rb17
-rw-r--r--db/migrate/20200219105209_add_filepath_to_release_links.rb8
-rw-r--r--db/migrate/20200219133859_add_environment_id_to_deployment_merge_requests.rb9
-rw-r--r--db/migrate/20200219141307_add_environment_id_fk_to_deployment_merge_requests.rb17
-rw-r--r--db/migrate/20200219142522_add_environment_id_merge_request_id_uniq_idx_to_deployment_merge_requests.rb17
-rw-r--r--db/migrate/20200220180944_add_keep_divergent_refs.rb11
-rw-r--r--db/migrate/20200221023320_add_index_on_pages_domain_on_domain_lowercase.rb19
-rw-r--r--db/migrate/20200221074028_add_mr_metrics_first_approved_at.rb13
-rw-r--r--db/migrate/20200221100514_create_users_statistics.rb19
-rw-r--r--db/migrate/20200221105436_update_application_setting_npm_package_requests_forwarding_default.rb17
-rw-r--r--db/migrate/20200221144534_drop_activate_prometheus_services_background_jobs.rb26
-rw-r--r--db/migrate/20200222055543_add_confidential_to_note.rb18
-rw-r--r--db/migrate/20200224020219_add_status_page_settings.rb18
-rw-r--r--db/migrate/20200224163804_add_version_to_feature_flags_table.rb21
-rw-r--r--db/migrate/20200224185814_add_project_subscriptions_to_plan_limits.rb9
-rw-r--r--db/migrate/20200225111018_add_index_for_group_and_iid_search_to_epics.rb22
-rw-r--r--db/migrate/20200225123228_insert_project_subscriptions_plan_limits.rb25
-rw-r--r--db/migrate/20200226100614_create_requirements.rb28
-rw-r--r--db/migrate/20200226100624_requirements_add_project_fk.rb19
-rw-r--r--db/migrate/20200226100634_requirements_add_author_fk.rb19
-rw-r--r--db/migrate/20200226162156_rename_closed_at_to_dismissed_at_in_vulnerabilities.rb17
-rw-r--r--db/migrate/20200226162634_rename_closed_by_to_dismissed_by_in_vulnerabilities.rb17
-rw-r--r--db/migrate/20200227164113_create_scim_identities.rb18
-rw-r--r--db/migrate/20200227165129_create_user_details.rb24
-rw-r--r--db/migrate/20200228160542_create_ci_sources_projects.rb17
-rw-r--r--db/migrate/20200303055348_add_expires_at_to_keys.rb9
-rw-r--r--db/migrate/20200303074328_add_index_on_snippet_description.rb18
-rw-r--r--db/migrate/20200304085423_add_user_type.rb21
-rw-r--r--db/migrate/20200304090155_add_user_type_index.rb17
-rw-r--r--db/migrate/20200304121828_add_ci_sources_project_pipeline_foreign_key.rb19
-rw-r--r--db/migrate/20200304121844_add_ci_sources_project_source_project_foreign_key.rb19
-rw-r--r--db/migrate/20200304160800_add_index_services_on_template.rb19
-rw-r--r--db/migrate/20200304160801_delete_template_services_duplicated_by_type.rb25
-rw-r--r--db/migrate/20200304160823_add_index_to_service_unique_template_per_type.rb17
-rw-r--r--db/migrate/20200305121159_add_merge_request_metrics_first_reassigned_at.rb19
-rw-r--r--db/migrate/20200305151736_delete_template_project_services.rb19
-rw-r--r--db/migrate/20200306095654_add_merge_request_assignee_created_at.rb19
-rw-r--r--db/migrate/20200306160521_add_index_on_author_id_and_created_at_to_events.rb17
-rw-r--r--db/migrate/20200306170211_add_index_on_author_id_and_id_and_created_at_to_issues.rb17
-rw-r--r--db/migrate/20200306170321_add_index_on_user_id_and_created_at_to_ci_pipelines.rb19
-rw-r--r--db/migrate/20200306170531_add_index_on_author_id_and_created_at_to_todos.rb16
-rw-r--r--db/migrate/20200306192548_add_index_on_project_id_and_type_to_services.rb22
-rw-r--r--db/migrate/20200306193236_add_index_on_creator_id_and_created_at_to_projects.rb22
-rw-r--r--db/migrate/20200309140540_add_index_on_project_id_and_repository_access_level_to_project_features.rb18
-rw-r--r--db/migrate/20200309162244_add_open_project_tracker_data.rb23
-rw-r--r--db/migrate/20200309195209_add_index_on_project_id_and_builds_access_level_to_project_features.rb18
-rw-r--r--db/migrate/20200309195710_add_index_on_mirror_and_creator_id_and_created_at_to_projects.rb18
-rw-r--r--db/migrate/20200310123229_add_index_on_enabled_and_provider_type_and_id_to_clusters.rb19
-rw-r--r--db/migrate/20200310132654_add_instance_to_services.rb17
-rw-r--r--db/migrate/20200310133822_add_index_on_author_id_and_id_and_created_at_to_notes.rb19
-rw-r--r--db/migrate/20200310135823_add_index_to_service_unique_instance_per_type.rb17
-rw-r--r--db/migrate/20200310145304_add_runtime_created_to_ci_job_variables.rb19
-rw-r--r--db/migrate/20200311084025_add_index_on_user_id_status_created_at_to_deployments.rb17
-rw-r--r--db/migrate/20200311093210_create_user_highest_roles.rb25
-rw-r--r--db/migrate/20200311094020_add_index_on_id_and_status_to_deployments.rb17
-rw-r--r--db/migrate/20200311141053_add_ci_pipeline_schedules_to_plan_limits.rb17
-rw-r--r--db/migrate/20200311141943_insert_ci_pipeline_schedules_plan_limits.rb25
-rw-r--r--db/migrate/20200311165635_create_project_export_jobs.rb19
-rw-r--r--db/migrate/20200312163407_add_index_on_id_and_service_desk_enabled_to_projects.rb18
-rw-r--r--db/migrate/20200313123934_add_index_on_user_id_type_source_type_ldap_and_created_at_to_members.rb18
-rw-r--r--db/post_migrate/20200120083607_remove_storage_version_column_from_snippets.rb2
-rw-r--r--db/post_migrate/20200124110831_migrate_design_notes_mentions_to_db.rb61
-rw-r--r--db/post_migrate/20200128132510_add_temporary_index_for_notes_with_mentions.rb29
-rw-r--r--db/post_migrate/20200128133510_cleanup_empty_commit_user_mentions.rb28
-rw-r--r--db/post_migrate/20200128134110_migrate_commit_notes_mentions_to_db.rb37
-rw-r--r--db/post_migrate/20200128210353_cleanup_optimistic_locking_nulls.rb44
-rw-r--r--db/post_migrate/20200211155000_cleanup_empty_merge_request_mentions.rb27
-rw-r--r--db/post_migrate/20200211155100_add_temporary_merge_request_with_mentions_index.rb20
-rw-r--r--db/post_migrate/20200211155539_migrate_merge_request_mentions_to_db.rb35
-rw-r--r--db/post_migrate/20200214034836_remove_security_dashboard_feature_flag.rb25
-rw-r--r--db/post_migrate/20200214214934_create_environment_for_self_monitoring_project.rb28
-rw-r--r--db/post_migrate/20200217223651_add_index_to_job_artifact_secure_reports.rb24
-rw-r--r--db/post_migrate/20200217225719_schedule_migrate_security_scans.rb31
-rw-r--r--db/post_migrate/20200219183456_remove_issue_state_indexes.rb40
-rw-r--r--db/post_migrate/20200219184219_remove_merge_request_state_indexes.rb39
-rw-r--r--db/post_migrate/20200219193058_remove_state_from_issues.rb30
-rw-r--r--db/post_migrate/20200219193117_remove_state_from_merge_requests.rb30
-rw-r--r--db/post_migrate/20200221142216_remove_repository_storage_from_snippets.rb28
-rw-r--r--db/post_migrate/20200226162239_cleanup_closed_at_rename_in_vulnerabilities.rb17
-rw-r--r--db/post_migrate/20200226162723_cleanup_closed_by_rename_in_vulnerabilities.rb17
-rw-r--r--db/post_migrate/20200227140242_update_occurrence_severity_column.rb34
-rw-r--r--db/post_migrate/20200302142052_update_vulnerability_severity_column.rb31
-rw-r--r--db/post_migrate/20200304211738_remove_file_type_from_packages_package_files.rb9
-rw-r--r--db/post_migrate/20200310075115_schedule_link_lfs_objects_projects.rb29
-rw-r--r--db/post_migrate/20200310135818_remove_temporary_promoted_notes_index.rb22
-rw-r--r--db/schema.rb226
-rw-r--r--doc/.linting/vale/styles/gitlab/Contractions.yml76
-rw-r--r--doc/.linting/vale/styles/gitlab/LatinTerms.yml39
-rw-r--r--doc/.linting/vale/styles/gitlab/OxfordComma.yml6
-rw-r--r--doc/.linting/vale/styles/gitlab/SentenceSpacing.yml32
-rw-r--r--doc/.linting/vale/styles/gitlab/Substitutions.yml36
-rw-r--r--doc/.vale/gitlab/Contractions.yml53
-rw-r--r--doc/.vale/gitlab/FirstPerson.yml13
-rw-r--r--doc/.vale/gitlab/InternalLinkExtension.yml11
-rw-r--r--doc/.vale/gitlab/LatinTerms.yml17
-rw-r--r--doc/.vale/gitlab/OxfordComma.yml11
-rw-r--r--doc/.vale/gitlab/RelativeLinks.yml11
-rw-r--r--doc/.vale/gitlab/SentenceSpacing.yml15
-rw-r--r--doc/.vale/gitlab/Substitutions.yml13
-rw-r--r--doc/README.md12
-rw-r--r--doc/administration/audit_events.md62
-rw-r--r--doc/administration/auth/README.md1
-rw-r--r--doc/administration/auth/authentiq.md5
-rw-r--r--doc/administration/auth/cognito.md79
-rw-r--r--doc/administration/auth/crowd.md2
-rw-r--r--doc/administration/auth/how_to_configure_ldap_gitlab_ce/index.md6
-rw-r--r--doc/administration/auth/ldap-ee.md12
-rw-r--r--doc/administration/auth/ldap.md15
-rw-r--r--doc/administration/auth/oidc.md8
-rw-r--r--doc/administration/auth/okta.md27
-rw-r--r--doc/administration/compliance.md3
-rw-r--r--doc/administration/geo/disaster_recovery/background_verification.md16
-rw-r--r--doc/administration/geo/disaster_recovery/index.md10
-rw-r--r--doc/administration/geo/disaster_recovery/planned_failover.md22
-rw-r--r--doc/administration/geo/replication/configuration.md14
-rw-r--r--doc/administration/geo/replication/database.md4
-rw-r--r--doc/administration/geo/replication/datatypes.md59
-rw-r--r--doc/administration/geo/replication/docker_registry.md5
-rw-r--r--doc/administration/geo/replication/high_availability.md132
-rw-r--r--doc/administration/geo/replication/index.md8
-rw-r--r--doc/administration/geo/replication/location_aware_git_url.md2
-rw-r--r--doc/administration/geo/replication/object_storage.md2
-rw-r--r--doc/administration/geo/replication/remove_geo_node.md2
-rw-r--r--doc/administration/geo/replication/troubleshooting.md215
-rw-r--r--doc/administration/geo/replication/tuning.md4
-rw-r--r--doc/administration/geo/replication/updating_the_geo_nodes.md2
-rw-r--r--doc/administration/geo/replication/version_specific_updates.md16
-rw-r--r--doc/administration/git_annex.md18
-rw-r--r--doc/administration/git_protocol.md34
-rw-r--r--doc/administration/gitaly/img/praefect_architecture_v12_9.pngbin0 -> 44098 bytes
-rw-r--r--doc/administration/gitaly/index.md66
-rw-r--r--doc/administration/gitaly/praefect.md825
-rw-r--r--doc/administration/gitaly/reference.md2
-rw-r--r--doc/administration/high_availability/README.md419
-rw-r--r--doc/administration/high_availability/consul.md3
-rw-r--r--doc/administration/high_availability/database.md33
-rw-r--r--doc/administration/high_availability/gitaly.md9
-rw-r--r--doc/administration/high_availability/gitlab.md3
-rw-r--r--doc/administration/high_availability/monitoring_node.md4
-rw-r--r--doc/administration/high_availability/nfs.md17
-rw-r--r--doc/administration/high_availability/nfs_host_client_setup.md6
-rw-r--r--doc/administration/high_availability/object_storage.md32
-rw-r--r--doc/administration/high_availability/pgbouncer.md2
-rw-r--r--doc/administration/high_availability/redis.md32
-rw-r--r--doc/administration/housekeeping.md4
-rw-r--r--doc/administration/incoming_email.md69
-rw-r--r--doc/administration/index.md11
-rw-r--r--doc/administration/instance_limits.md67
-rw-r--r--doc/administration/integration/plantuml.md12
-rw-r--r--doc/administration/job_artifacts.md29
-rw-r--r--doc/administration/lfs/lfs_administration.md8
-rw-r--r--doc/administration/lfs/manage_large_binaries_with_git_lfs.md4
-rw-r--r--doc/administration/lfs/migrate_from_git_annex_to_git_lfs.md2
-rw-r--r--doc/administration/libravatar.md4
-rw-r--r--doc/administration/logs.md241
-rw-r--r--doc/administration/monitoring/gitlab_self_monitoring_project/index.md39
-rw-r--r--doc/administration/monitoring/performance/grafana_configuration.md9
-rw-r--r--doc/administration/monitoring/performance/influxdb_configuration.md2
-rw-r--r--doc/administration/monitoring/prometheus/gitlab_metrics.md20
-rw-r--r--doc/administration/monitoring/prometheus/index.md44
-rw-r--r--doc/administration/operations/extra_sidekiq_processes.md54
-rw-r--r--doc/administration/operations/fast_ssh_key_lookup.md6
-rw-r--r--doc/administration/operations/puma.md4
-rw-r--r--doc/administration/operations/ssh_certificates.md8
-rw-r--r--doc/administration/operations/unicorn.md18
-rw-r--r--doc/administration/packages/container_registry.md12
-rw-r--r--doc/administration/packages/dependency_proxy.md7
-rw-r--r--doc/administration/packages/index.md3
-rw-r--r--doc/administration/pages/index.md61
-rw-r--r--doc/administration/raketasks/maintenance.md1
-rw-r--r--doc/administration/raketasks/uploads/migrate.md5
-rw-r--r--doc/administration/reply_by_email_postfix_setup.md14
-rw-r--r--doc/administration/repository_storage_paths.md4
-rw-r--r--doc/administration/restart_gitlab.md10
-rw-r--r--doc/administration/server_hooks.md5
-rw-r--r--doc/administration/snippets/index.md2
-rw-r--r--doc/administration/static_objects_external_storage.md177
-rw-r--r--doc/administration/troubleshooting/debug.md20
-rw-r--r--doc/administration/troubleshooting/gdb-stuck-ruby.txt6
-rw-r--r--doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md59
-rw-r--r--doc/administration/troubleshooting/img/AzureAD-basic_SAML.pngbin137104 -> 30365 bytes
-rw-r--r--doc/administration/troubleshooting/img/AzureAD-claims.pngbin55574 -> 14213 bytes
-rw-r--r--doc/administration/troubleshooting/img/OneLogin-SSOsettings.pngbin85242 -> 25397 bytes
-rw-r--r--doc/administration/troubleshooting/img/OneLogin-app_details.pngbin61390 -> 18305 bytes
-rw-r--r--doc/administration/troubleshooting/img/OneLogin-encryption.pngbin34245 -> 9980 bytes
-rw-r--r--doc/administration/troubleshooting/img/OneLogin-parameters.pngbin17139 -> 5051 bytes
-rw-r--r--doc/administration/troubleshooting/img/OneLogin-userAdd.pngbin32711 -> 10021 bytes
-rw-r--r--doc/administration/troubleshooting/kubernetes_cheat_sheet.md14
-rw-r--r--doc/administration/troubleshooting/postgresql.md6
-rw-r--r--doc/administration/troubleshooting/sidekiq.md40
-rw-r--r--doc/administration/uploads.md4
-rw-r--r--doc/api/README.md22
-rw-r--r--doc/api/admin_sidekiq_queues.md47
-rw-r--r--doc/api/api_resources.md71
-rw-r--r--doc/api/appearance.md4
-rw-r--r--doc/api/audit_events.md18
-rw-r--r--doc/api/boards.md20
-rw-r--r--doc/api/broadcast_messages.md48
-rw-r--r--doc/api/commits.md72
-rw-r--r--doc/api/container_registry.md32
-rw-r--r--doc/api/custom_attributes.md8
-rw-r--r--doc/api/dependencies.md2
-rw-r--r--doc/api/deploy_keys.md12
-rw-r--r--doc/api/deploy_tokens.md238
-rw-r--r--doc/api/deployments.md10
-rw-r--r--doc/api/discussions.md62
-rw-r--r--doc/api/environments.md12
-rw-r--r--doc/api/epic_issues.md22
-rw-r--r--doc/api/epic_links.md10
-rw-r--r--doc/api/epics.md20
-rw-r--r--doc/api/error_tracking.md4
-rw-r--r--doc/api/events.md8
-rw-r--r--doc/api/feature_flag_specs.md12
-rw-r--r--doc/api/feature_flags.md8
-rw-r--r--doc/api/features.md6
-rw-r--r--doc/api/geo_nodes.md18
-rw-r--r--doc/api/graphql/reference/gitlab_schema.graphql804
-rw-r--r--doc/api/graphql/reference/gitlab_schema.json23574
-rw-r--r--doc/api/graphql/reference/index.md136
-rw-r--r--doc/api/group_activity_analytics.md55
-rw-r--r--doc/api/group_badges.md12
-rw-r--r--doc/api/group_boards.md20
-rw-r--r--doc/api/group_clusters.md41
-rw-r--r--doc/api/group_import_export.md39
-rw-r--r--doc/api/group_labels.md14
-rw-r--r--doc/api/group_level_variables.md42
-rw-r--r--doc/api/group_milestones.md16
-rw-r--r--doc/api/groups.md73
-rw-r--r--doc/api/import.md2
-rw-r--r--doc/api/issue_links.md6
-rw-r--r--doc/api/issues.md65
-rw-r--r--doc/api/issues_statistics.md12
-rw-r--r--doc/api/jobs.md28
-rw-r--r--doc/api/keys.md2
-rw-r--r--doc/api/labels.md16
-rw-r--r--doc/api/license.md8
-rw-r--r--doc/api/lint.md2
-rw-r--r--doc/api/managed_licenses.md10
-rw-r--r--doc/api/markdown.md4
-rw-r--r--doc/api/members.md53
-rw-r--r--doc/api/merge_request_approvals.md34
-rw-r--r--doc/api/merge_request_context_commits.md28
-rw-r--r--doc/api/merge_requests.md84
-rw-r--r--doc/api/merge_trains.md80
-rw-r--r--doc/api/milestones.md18
-rw-r--r--doc/api/namespaces.md6
-rw-r--r--doc/api/notes.md40
-rw-r--r--doc/api/notification_settings.md14
-rw-r--r--doc/api/oauth2.md49
-rw-r--r--doc/api/packages.md21
-rw-r--r--doc/api/pipeline_schedules.md22
-rw-r--r--doc/api/pipeline_triggers.md20
-rw-r--r--doc/api/pipelines.md30
-rw-r--r--doc/api/project_aliases.md18
-rw-r--r--doc/api/project_badges.md12
-rw-r--r--doc/api/project_clusters.md38
-rw-r--r--doc/api/project_import_export.md16
-rw-r--r--doc/api/project_level_variables.md20
-rw-r--r--doc/api/project_snippets.md18
-rw-r--r--doc/api/project_statistics.md2
-rw-r--r--doc/api/project_templates.md5
-rw-r--r--doc/api/projects.md91
-rw-r--r--doc/api/protected_branches.md12
-rw-r--r--doc/api/protected_environments.md18
-rw-r--r--doc/api/protected_tags.md10
-rw-r--r--doc/api/releases/index.md66
-rw-r--r--doc/api/releases/links.md10
-rw-r--r--doc/api/remote_mirrors.md121
-rw-r--r--doc/api/repositories.md42
-rw-r--r--doc/api/repository_files.md24
-rw-r--r--doc/api/repository_submodules.md2
-rw-r--r--doc/api/resource_label_events.md14
-rw-r--r--doc/api/runners.md48
-rw-r--r--doc/api/scim.md4
-rw-r--r--doc/api/search.md8
-rw-r--r--doc/api/services.md212
-rw-r--r--doc/api/settings.md22
-rw-r--r--doc/api/sidekiq_metrics.md8
-rw-r--r--doc/api/statistics.md2
-rw-r--r--doc/api/suggestions.md2
-rw-r--r--doc/api/system_hooks.md8
-rw-r--r--doc/api/tags.md12
-rw-r--r--doc/api/templates/dockerfiles.md4
-rw-r--r--doc/api/templates/licenses.md4
-rw-r--r--doc/api/todos.md6
-rw-r--r--doc/api/users.md290
-rw-r--r--doc/api/version.md2
-rw-r--r--doc/api/visual_review_discussions.md2
-rw-r--r--doc/api/vulnerability_findings.md7
-rw-r--r--doc/api/wikis.md12
-rw-r--r--doc/ci/README.md13
-rw-r--r--doc/ci/caching/index.md13
-rw-r--r--doc/ci/cloud_deployment/index.md58
-rw-r--r--doc/ci/docker/using_docker_build.md20
-rw-r--r--doc/ci/docker/using_docker_images.md12
-rw-r--r--doc/ci/environments.md74
-rw-r--r--doc/ci/environments/img/incremental_rollouts_play_v12_7.pngbin94182 -> 26784 bytes
-rw-r--r--doc/ci/environments/img/timed_rollout_v12_7.pngbin73699 -> 24016 bytes
-rw-r--r--doc/ci/examples/deployment/composer-npm-deploy.md4
-rw-r--r--doc/ci/examples/devops_and_game_dev_with_gitlab_ci_cd/index.md8
-rw-r--r--doc/ci/examples/end_to_end_testing_webdriverio/index.md2
-rw-r--r--doc/ci/examples/laravel_with_gitlab_and_envoy/index.md6
-rw-r--r--doc/ci/examples/license_management.md4
-rw-r--r--doc/ci/examples/test-scala-application.md2
-rw-r--r--doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/img/select_template_v12_6.pngbin51825 -> 32253 bytes
-rw-r--r--doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/img/set_up_ci_v12_6.pngbin93168 -> 57203 bytes
-rw-r--r--doc/ci/img/collapsible_log_v12_6.pngbin96571 -> 0 bytes
-rw-r--r--doc/ci/img/ecs_dashboard_v12_9.pngbin0 -> 109395 bytes
-rw-r--r--doc/ci/img/environment_auto_stop_v12_8.pngbin43534 -> 16587 bytes
-rw-r--r--doc/ci/img/environments_deployment_cluster_v12_8.pngbin0 -> 17704 bytes
-rw-r--r--doc/ci/img/parent_pipeline_graph_expanded_v12_6.pngbin298902 -> 96087 bytes
-rw-r--r--doc/ci/interactive_web_terminal/index.md2
-rw-r--r--doc/ci/introduction/index.md2
-rw-r--r--doc/ci/jenkins/index.md129
-rw-r--r--doc/ci/junit_test_reports.md12
-rw-r--r--doc/ci/large_repositories/index.md2
-rw-r--r--doc/ci/merge_request_pipelines/index.md4
-rw-r--r--doc/ci/merge_request_pipelines/pipelines_for_merged_results/index.md30
-rw-r--r--doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md16
-rw-r--r--doc/ci/multi_project_pipelines.md62
-rw-r--r--doc/ci/parent_child_pipelines.md63
-rw-r--r--doc/ci/pipelines.md549
-rw-r--r--doc/ci/pipelines/img/collapsible_log_v12_6.pngbin0 -> 96471 bytes
-rw-r--r--doc/ci/pipelines/img/job_artifacts_browser.png (renamed from doc/user/project/pipelines/img/job_artifacts_browser.png)bin3944 -> 3944 bytes
-rw-r--r--doc/ci/pipelines/img/job_artifacts_browser_button.png (renamed from doc/user/project/pipelines/img/job_artifacts_browser_button.png)bin5534 -> 5534 bytes
-rw-r--r--doc/ci/pipelines/img/job_artifacts_builds_page.png (renamed from doc/user/project/pipelines/img/job_artifacts_builds_page.png)bin15191 -> 15191 bytes
-rw-r--r--doc/ci/pipelines/img/job_artifacts_pipelines_page.png (renamed from doc/user/project/pipelines/img/job_artifacts_pipelines_page.png)bin16403 -> 16403 bytes
-rw-r--r--doc/ci/pipelines/img/job_failure_reason.png (renamed from doc/ci/img/job_failure_reason.png)bin5288 -> 5288 bytes
-rw-r--r--doc/ci/pipelines/img/job_latest_artifacts_browser.png (renamed from doc/user/project/pipelines/img/job_latest_artifacts_browser.png)bin10551 -> 10551 bytes
-rw-r--r--doc/ci/pipelines/img/manual_job_variables.png (renamed from doc/ci/img/manual_job_variables.png)bin111239 -> 111239 bytes
-rw-r--r--doc/ci/pipelines/img/pipeline-delete.png (renamed from doc/ci/img/pipeline-delete.png)bin9658 -> 9658 bytes
-rw-r--r--doc/ci/pipelines/img/pipeline_incremental_rollout.png (renamed from doc/ci/img/pipeline_incremental_rollout.png)bin4794 -> 4794 bytes
-rw-r--r--doc/ci/pipelines/img/pipeline_schedule_play.png (renamed from doc/user/project/pipelines/img/pipeline_schedule_play.png)bin11400 -> 11400 bytes
-rw-r--r--doc/ci/pipelines/img/pipeline_schedule_variables.png (renamed from doc/user/project/pipelines/img/pipeline_schedule_variables.png)bin6300 -> 6300 bytes
-rw-r--r--doc/ci/pipelines/img/pipeline_schedules_list.png (renamed from doc/user/project/pipelines/img/pipeline_schedules_list.png)bin12948 -> 12948 bytes
-rw-r--r--doc/ci/pipelines/img/pipeline_schedules_new_form.png (renamed from doc/user/project/pipelines/img/pipeline_schedules_new_form.png)bin20090 -> 20090 bytes
-rw-r--r--doc/ci/pipelines/img/pipeline_schedules_ownership.png (renamed from doc/user/project/pipelines/img/pipeline_schedules_ownership.png)bin5004 -> 5004 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines.png (renamed from doc/ci/img/pipelines.png)bin6298 -> 6298 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_duration_chart.pngbin0 -> 10587 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_grouped.png (renamed from doc/ci/img/pipelines_grouped.png)bin12888 -> 12888 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_index.png (renamed from doc/ci/img/pipelines_index.png)bin14896 -> 14896 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_mini_graph.png (renamed from doc/ci/img/pipelines_mini_graph.png)bin4671 -> 4671 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_mini_graph_simple.png (renamed from doc/ci/img/pipelines_mini_graph_simple.png)bin961 -> 961 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_mini_graph_sorting.png (renamed from doc/ci/img/pipelines_mini_graph_sorting.png)bin10742 -> 10742 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_settings_badges.png (renamed from doc/user/project/pipelines/img/pipelines_settings_badges.png)bin21137 -> 21137 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_settings_test_coverage.png (renamed from doc/user/project/pipelines/img/pipelines_settings_test_coverage.png)bin2549 -> 2549 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_success_chart.pngbin0 -> 23249 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_test_coverage_build.png (renamed from doc/user/project/pipelines/img/pipelines_test_coverage_build.png)bin4481 -> 4481 bytes
-rw-r--r--doc/ci/pipelines/img/pipelines_test_coverage_mr_widget.png (renamed from doc/user/project/pipelines/img/pipelines_test_coverage_mr_widget.png)bin6375 -> 6375 bytes
-rw-r--r--doc/ci/pipelines/index.md567
-rw-r--r--doc/ci/pipelines/job_artifacts.md215
-rw-r--r--doc/ci/pipelines/pipeline_architectures.md269
-rw-r--r--doc/ci/pipelines/schedules.md142
-rw-r--r--doc/ci/pipelines/settings.md309
-rw-r--r--doc/ci/quick_start/README.md7
-rw-r--r--doc/ci/review_apps/img/enable_review_app_v12_8.pngbin46424 -> 14013 bytes
-rw-r--r--doc/ci/review_apps/index.md6
-rw-r--r--doc/ci/runners/README.md14
-rw-r--r--doc/ci/services/postgres.md15
-rw-r--r--doc/ci/services/redis.md2
-rw-r--r--doc/ci/ssh_keys/README.md2
-rw-r--r--doc/ci/triggers/README.md6
-rw-r--r--doc/ci/variables/README.md34
-rw-r--r--doc/ci/variables/img/new_custom_variables_example.pngbin68252 -> 0 bytes
-rw-r--r--doc/ci/variables/predefined_variables.md5
-rw-r--r--doc/ci/yaml/README.md271
-rw-r--r--doc/development/README.md31
-rw-r--r--doc/development/adding_database_indexes.md2
-rw-r--r--doc/development/api_graphql_styleguide.md72
-rw-r--r--doc/development/api_styleguide.md8
-rw-r--r--doc/development/application_limits.md73
-rw-r--r--doc/development/architecture.md16
-rw-r--r--doc/development/background_migrations.md18
-rw-r--r--doc/development/chaos_endpoints.md10
-rw-r--r--doc/development/code_comments.md2
-rw-r--r--doc/development/code_review.md141
-rw-r--r--doc/development/contributing/design.md8
-rw-r--r--doc/development/contributing/issue_workflow.md47
-rw-r--r--doc/development/contributing/merge_request_workflow.md1
-rw-r--r--doc/development/contributing/style_guides.md8
-rw-r--r--doc/development/creating_enums.md2
-rw-r--r--doc/development/dangerbot.md22
-rw-r--r--doc/development/database/add_foreign_key_to_existing_column.md136
-rw-r--r--doc/development/database_debugging.md2
-rw-r--r--doc/development/database_review.md22
-rw-r--r--doc/development/db_dump.md8
-rw-r--r--doc/development/deleting_migrations.md2
-rw-r--r--doc/development/documentation/index.md72
-rw-r--r--doc/development/documentation/site_architecture/index.md46
-rw-r--r--doc/development/documentation/site_architecture/release_process.md16
-rw-r--r--doc/development/documentation/structure.md2
-rw-r--r--doc/development/documentation/styleguide.md201
-rw-r--r--doc/development/documentation/workflow.md148
-rw-r--r--doc/development/elasticsearch.md2
-rw-r--r--doc/development/event_tracking/backend.md37
-rw-r--r--doc/development/event_tracking/frontend.md146
-rw-r--r--doc/development/event_tracking/index.md75
-rw-r--r--doc/development/experiment_guide/index.md4
-rw-r--r--doc/development/fe_guide/dependencies.md40
-rw-r--r--doc/development/fe_guide/development_process.md4
-rw-r--r--doc/development/fe_guide/event_tracking.md4
-rw-r--r--doc/development/fe_guide/graphql.md130
-rw-r--r--doc/development/fe_guide/icons.md2
-rw-r--r--doc/development/fe_guide/index.md46
-rw-r--r--doc/development/fe_guide/principles.md4
-rw-r--r--doc/development/fe_guide/style/javascript.md15
-rw-r--r--doc/development/fe_guide/vuex.md11
-rw-r--r--doc/development/feature_flags/controls.md2
-rw-r--r--doc/development/feature_flags/process.md9
-rw-r--r--doc/development/file_storage.md4
-rw-r--r--doc/development/geo.md5
-rw-r--r--doc/development/geo/framework.md209
-rw-r--r--doc/development/gitaly.md8
-rw-r--r--doc/development/github_importer.md30
-rw-r--r--doc/development/go_guide/index.md4
-rw-r--r--doc/development/i18n/proofreader.md1
-rw-r--r--doc/development/img/reference_architecture.pngbin112468 -> 44285 bytes
-rw-r--r--doc/development/import_export.md47
-rw-r--r--doc/development/import_project.md18
-rw-r--r--doc/development/insert_into_tables_in_batches.md196
-rw-r--r--doc/development/instrumentation.md4
-rw-r--r--doc/development/integrations/jira_connect.md2
-rw-r--r--doc/development/integrations/secure.md11
-rw-r--r--doc/development/integrations/secure_partner_integration.md103
-rw-r--r--doc/development/issuable-like-models.md6
-rw-r--r--doc/development/kubernetes.md2
-rw-r--r--doc/development/licensing.md5
-rw-r--r--doc/development/logging.md61
-rw-r--r--doc/development/mass_insert.md2
-rw-r--r--doc/development/merge_request_performance_guidelines.md177
-rw-r--r--doc/development/migration_style_guide.md88
-rw-r--r--doc/development/module_with_instance_variables.md4
-rw-r--r--doc/development/namespaces_storage_statistics.md6
-rw-r--r--doc/development/new_fe_guide/development/accessibility.md2
-rw-r--r--doc/development/new_fe_guide/index.md2
-rw-r--r--doc/development/packages.md177
-rw-r--r--doc/development/performance.md54
-rw-r--r--doc/development/permissions.md4
-rw-r--r--doc/development/pipelines.md290
-rw-r--r--doc/development/profiling.md13
-rw-r--r--doc/development/prometheus_metrics.md5
-rw-r--r--doc/development/query_recorder.md110
-rw-r--r--doc/development/rails_initializers.md16
-rw-r--r--doc/development/redis.md2
-rw-r--r--doc/development/reference_processing.md157
-rw-r--r--doc/development/scalability.md2
-rw-r--r--doc/development/serializing_data.md2
-rw-r--r--doc/development/shell_commands.md2
-rw-r--r--doc/development/sidekiq_debugging.md19
-rw-r--r--doc/development/sidekiq_style_guide.md128
-rw-r--r--doc/development/testing_guide/best_practices.md7
-rw-r--r--doc/development/testing_guide/end_to_end/best_practices.md2
-rw-r--r--doc/development/testing_guide/end_to_end/index.md26
-rw-r--r--doc/development/testing_guide/end_to_end/quick_start_guide.md17
-rw-r--r--doc/development/testing_guide/end_to_end/rspec_metadata_tests.md15
-rw-r--r--doc/development/testing_guide/end_to_end/running_tests_that_require_special_setup.md50
-rw-r--r--doc/development/testing_guide/flaky_tests.md6
-rw-r--r--doc/development/testing_guide/frontend_testing.md6
-rw-r--r--doc/development/testing_guide/review_apps.md103
-rw-r--r--doc/development/testing_guide/testing_levels.md6
-rw-r--r--doc/development/testing_guide/testing_migrations_guide.md14
-rw-r--r--doc/development/what_requires_downtime.md5
-rw-r--r--doc/gitlab-basics/create-your-ssh-keys.md7
-rw-r--r--doc/install/aws/img/aws_ha_architecture_diagram.pngbin133747 -> 40323 bytes
-rw-r--r--doc/install/aws/index.md173
-rw-r--r--doc/install/installation.md38
-rw-r--r--doc/install/requirements.md34
-rw-r--r--doc/integration/bitbucket.md5
-rw-r--r--doc/integration/elasticsearch.md34
-rw-r--r--doc/integration/img/jira_dev_panel_jira_setup_1-1.pngbin45848 -> 13286 bytes
-rw-r--r--doc/integration/img/jira_dev_panel_setup_com_1.pngbin0 -> 15392 bytes
-rw-r--r--doc/integration/img/jira_dev_panel_setup_com_2.pngbin0 -> 22370 bytes
-rw-r--r--doc/integration/img/jira_dev_panel_setup_com_3.pngbin0 -> 7639 bytes
-rw-r--r--doc/integration/jira_development_panel.md54
-rw-r--r--doc/integration/kerberos.md3
-rw-r--r--doc/integration/omniauth.md6
-rw-r--r--doc/integration/saml.md15
-rw-r--r--doc/integration/shibboleth.md13
-rw-r--r--doc/integration/sourcegraph.md4
-rw-r--r--doc/integration/twitter.md2
-rw-r--r--doc/integration/ultra_auth.md2
-rw-r--r--doc/integration/vault.md137
-rw-r--r--doc/legal/corporate_contributor_license_agreement.md2
-rw-r--r--doc/policy/maintenance.md22
-rw-r--r--doc/raketasks/backup_restore.md45
-rw-r--r--doc/raketasks/cleanup.md13
-rw-r--r--doc/raketasks/features.md2
-rw-r--r--doc/raketasks/generate_sample_prometheus_data.md2
-rw-r--r--doc/raketasks/import.md12
-rw-r--r--doc/raketasks/list_repos.md4
-rw-r--r--doc/raketasks/user_management.md8
-rw-r--r--doc/security/crime_vulnerability.md2
-rw-r--r--doc/security/rack_attack.md6
-rw-r--r--doc/security/unlock_user.md4
-rw-r--r--doc/security/webhooks.md13
-rw-r--r--doc/ssh/README.md56
-rw-r--r--doc/subscriptions/index.md490
-rw-r--r--doc/system_hooks/system_hooks.md20
-rw-r--r--doc/telemetry/backend.md34
-rw-r--r--doc/telemetry/frontend.md167
-rw-r--r--doc/telemetry/index.md72
-rw-r--r--doc/topics/airgap/index.md10
-rw-r--r--doc/topics/application_development_platform/index.md2
-rw-r--r--doc/topics/autodevops/index.md93
-rw-r--r--doc/topics/autodevops/quick_start_guide.md2
-rw-r--r--doc/topics/git/numerous_undo_possibilities_in_git/index.md6
-rw-r--r--doc/topics/git/troubleshooting_git.md27
-rw-r--r--doc/topics/web_application_firewall/img/guide_waf_ingress_installation.pngbin54834 -> 0 bytes
-rw-r--r--doc/topics/web_application_firewall/img/guide_waf_ingress_installation_v12_9.pngbin0 -> 24830 bytes
-rw-r--r--doc/topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.pngbin0 -> 37120 bytes
-rw-r--r--doc/topics/web_application_firewall/index.md2
-rw-r--r--doc/topics/web_application_firewall/quick_start_guide.md77
-rw-r--r--doc/university/training/topics/subtree.md8
-rw-r--r--doc/update/README.md21
-rw-r--r--doc/update/mysql_to_postgresql.md16
-rw-r--r--doc/update/patch_versions.md2
-rw-r--r--doc/update/restore_after_failure.md10
-rw-r--r--doc/update/upgrading_from_ce_to_ee.md2
-rw-r--r--doc/user/admin_area/activating_deactivating_users.md4
-rw-r--r--doc/user/admin_area/appearance.md4
-rw-r--r--doc/user/admin_area/blocking_unblocking_users.md4
-rw-r--r--doc/user/admin_area/broadcast_messages.md2
-rw-r--r--doc/user/admin_area/index.md56
-rw-r--r--doc/user/admin_area/settings/account_and_limit_settings.md2
-rw-r--r--doc/user/admin_area/settings/continuous_integration.md19
-rw-r--r--doc/user/admin_area/settings/gitaly_timeouts.md34
-rw-r--r--doc/user/admin_area/settings/img/admin_package_registry_npm_package_requests_forward.pngbin0 -> 28630 bytes
-rw-r--r--doc/user/admin_area/settings/img/gitaly_timeouts.pngbin0 -> 24654 bytes
-rw-r--r--doc/user/admin_area/settings/index.md1
-rw-r--r--doc/user/admin_area/settings/usage_statistics.md295
-rw-r--r--doc/user/admin_area/settings/visibility_and_access_controls.md2
-rw-r--r--doc/user/analytics/img/code_review_analytics_v12_8.pngbin110305 -> 40082 bytes
-rw-r--r--doc/user/analytics/productivity_analytics.md6
-rw-r--r--doc/user/analytics/value_stream_analytics.md21
-rw-r--r--doc/user/application_security/compliance_dashboard/index.md30
-rw-r--r--doc/user/application_security/configuration/index.md4
-rw-r--r--doc/user/application_security/container_scanning/img/container_scanning.pngbin32549 -> 0 bytes
-rw-r--r--doc/user/application_security/container_scanning/img/container_scanning_v12_9.pngbin0 -> 73869 bytes
-rw-r--r--doc/user/application_security/container_scanning/index.md203
-rw-r--r--doc/user/application_security/dast/img/dast_all.pngbin25844 -> 0 bytes
-rw-r--r--doc/user/application_security/dast/img/dast_all_v12_9.pngbin0 -> 39268 bytes
-rw-r--r--doc/user/application_security/dast/img/dast_single.pngbin69353 -> 0 bytes
-rw-r--r--doc/user/application_security/dast/img/dast_single_v12_9.pngbin0 -> 96419 bytes
-rw-r--r--doc/user/application_security/dast/index.md39
-rw-r--r--doc/user/application_security/dependency_list/index.md4
-rw-r--r--doc/user/application_security/dependency_scanning/index.md67
-rw-r--r--doc/user/application_security/img/multi_select_v12_9.pngbin0 -> 32804 bytes
-rw-r--r--doc/user/application_security/img/outdated_report_branch_v12_9.pngbin0 -> 15172 bytes
-rw-r--r--doc/user/application_security/img/outdated_report_pipeline_v12_9.pngbin0 -> 16694 bytes
-rw-r--r--doc/user/application_security/img/security_configuration_page_v12_6.pngbin68210 -> 0 bytes
-rw-r--r--doc/user/application_security/img/security_configuration_page_v12_9.pngbin0 -> 51545 bytes
-rw-r--r--doc/user/application_security/index.md90
-rw-r--r--doc/user/application_security/license_compliance/index.md325
-rw-r--r--doc/user/application_security/license_management/index.md4
-rw-r--r--doc/user/application_security/offline_deployments/index.md55
-rw-r--r--doc/user/application_security/sast/img/sast.pngbin24876 -> 0 bytes
-rw-r--r--doc/user/application_security/sast/img/sast_v12_9.pngbin0 -> 40172 bytes
-rw-r--r--doc/user/application_security/sast/index.md82
-rw-r--r--doc/user/application_security/security_dashboard/index.md4
-rw-r--r--doc/user/asciidoc.md4
-rw-r--r--doc/user/clusters/applications.md300
-rw-r--r--doc/user/clusters/crossplane.md70
-rw-r--r--doc/user/clusters/img/threat_monitoring_v12_9.pngbin0 -> 53242 bytes
-rw-r--r--doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_8.png (renamed from doc/user/application_security/compliance_dashboard/img/compliance_dashboard_v12_8.png)bin66215 -> 66215 bytes
-rw-r--r--doc/user/compliance/compliance_dashboard/index.md31
-rw-r--r--doc/user/compliance/index.md10
-rw-r--r--doc/user/compliance/license_compliance/img/license_compliance.png (renamed from doc/user/application_security/license_compliance/img/license_compliance.png)bin5184 -> 5184 bytes
-rw-r--r--doc/user/compliance/license_compliance/img/license_compliance_add_license_v12_3.png (renamed from doc/user/application_security/license_compliance/img/license_compliance_add_license_v12_3.png)bin14046 -> 14046 bytes
-rw-r--r--doc/user/compliance/license_compliance/img/license_compliance_decision.png (renamed from doc/user/application_security/license_compliance/img/license_compliance_decision.png)bin5975 -> 5975 bytes
-rw-r--r--doc/user/compliance/license_compliance/img/license_compliance_pipeline_tab_v12_3.png (renamed from doc/user/application_security/license_compliance/img/license_compliance_pipeline_tab_v12_3.png)bin16435 -> 16435 bytes
-rw-r--r--doc/user/compliance/license_compliance/img/license_compliance_search_v12_3.png (renamed from doc/user/application_security/license_compliance/img/license_compliance_search_v12_3.png)bin26074 -> 26074 bytes
-rw-r--r--doc/user/compliance/license_compliance/img/license_compliance_settings_v12_3.png (renamed from doc/user/application_security/license_compliance/img/license_compliance_settings_v12_3.png)bin14766 -> 14766 bytes
-rw-r--r--doc/user/compliance/license_compliance/img/license_list_v12_6.png (renamed from doc/user/application_security/license_compliance/img/license_list_v12_6.png)bin30154 -> 30154 bytes
-rw-r--r--doc/user/compliance/license_compliance/img/policies_maintainer_add_v12_9.pngbin0 -> 22423 bytes
-rw-r--r--doc/user/compliance/license_compliance/img/policies_maintainer_edit_v12_9.pngbin0 -> 35281 bytes
-rw-r--r--doc/user/compliance/license_compliance/img/policies_v12_9.pngbin0 -> 66698 bytes
-rw-r--r--doc/user/compliance/license_compliance/index.md342
-rw-r--r--doc/user/discussions/img/suggestion_code_block_output_v12_8.pngbin29769 -> 15870 bytes
-rw-r--r--doc/user/gitlab_com/index.md40
-rw-r--r--doc/user/group/epics/img/epic_view_roadmap_v12.3.pngbin50491 -> 0 bytes
-rw-r--r--doc/user/group/epics/img/epic_view_roadmap_v12_9.pngbin0 -> 127201 bytes
-rw-r--r--doc/user/group/epics/index.md60
-rw-r--r--doc/user/group/index.md20
-rw-r--r--doc/user/group/issues_analytics/img/issues_created_per_month_v12_8.pngbin66854 -> 26718 bytes
-rw-r--r--doc/user/group/issues_analytics/index.md5
-rw-r--r--doc/user/group/roadmap/img/roadmap_view.pngbin49757 -> 0 bytes
-rw-r--r--doc/user/group/roadmap/img/roadmap_view_v12_9.pngbin0 -> 118218 bytes
-rw-r--r--doc/user/group/roadmap/index.md21
-rw-r--r--doc/user/group/saml_sso/index.md84
-rw-r--r--doc/user/group/subgroups/index.md5
-rw-r--r--doc/user/img/markdown_copy_from_spreadsheet_v12_7.pngbin371532 -> 180083 bytes
-rw-r--r--doc/user/img/markdown_paste_table_v12_7.pngbin153855 -> 89728 bytes
-rw-r--r--doc/user/incident_management/index.md11
-rw-r--r--doc/user/index.md2
-rw-r--r--doc/user/instance_statistics/user_cohorts.md2
-rw-r--r--doc/user/markdown.md121
-rw-r--r--doc/user/packages/conan_repository/index.md4
-rw-r--r--doc/user/packages/container_registry/img/expiration-policy-app.pngbin93286 -> 32054 bytes
-rw-r--r--doc/user/packages/index.md2
-rw-r--r--doc/user/packages/maven_repository/index.md10
-rw-r--r--doc/user/packages/npm_registry/index.md22
-rw-r--r--doc/user/packages/nuget_repository/img/visual_studio_adding_nuget_source.pngbin115238 -> 36730 bytes
-rw-r--r--doc/user/packages/nuget_repository/img/visual_studio_nuget_source_added.pngbin75079 -> 23888 bytes
-rw-r--r--doc/user/packages/nuget_repository/index.md151
-rw-r--r--doc/user/packages/workflows/monorepo.md6
-rw-r--r--doc/user/permissions.md52
-rw-r--r--doc/user/profile/account/two_factor_authentication.md22
-rw-r--r--doc/user/profile/notifications.md28
-rw-r--r--doc/user/project/badges.md6
-rw-r--r--doc/user/project/clusters/add_remove_clusters.md2
-rw-r--r--doc/user/project/clusters/img/kubernetes_pod_logs_v12_8.pngbin152582 -> 0 bytes
-rw-r--r--doc/user/project/clusters/img/kubernetes_pod_logs_v12_9.pngbin0 -> 117938 bytes
-rw-r--r--doc/user/project/clusters/index.md30
-rw-r--r--doc/user/project/clusters/kubernetes_pod_logs.md23
-rw-r--r--doc/user/project/clusters/serverless/aws.md279
-rw-r--r--doc/user/project/clusters/serverless/img/deploy-stage.pngbin5036 -> 0 bytes
-rw-r--r--doc/user/project/clusters/serverless/img/function-list_v12_7.pngbin0 -> 18551 bytes
-rw-r--r--doc/user/project/clusters/serverless/img/knative-app.pngbin9440 -> 0 bytes
-rw-r--r--doc/user/project/clusters/serverless/img/sam-api-endpoint.pngbin0 -> 29991 bytes
-rw-r--r--doc/user/project/clusters/serverless/img/sam-complete-raw.pngbin0 -> 38847 bytes
-rw-r--r--doc/user/project/clusters/serverless/index.md64
-rw-r--r--doc/user/project/code_owners.md14
-rw-r--r--doc/user/project/deploy_tokens/index.md36
-rw-r--r--doc/user/project/description_templates.md2
-rw-r--r--doc/user/project/img/deploy_boards_landing_page.pngbin43047 -> 14454 bytes
-rw-r--r--doc/user/project/img/issue_boards_blocked_icon_v12_8.pngbin66310 -> 31841 bytes
-rw-r--r--doc/user/project/img/issue_boards_multi_select_v12_4.png (renamed from doc/user/project/img/issue_boards_multi_select.png)bin6205 -> 6205 bytes
-rw-r--r--doc/user/project/img/labels_key_value_v12_1.pngbin166480 -> 55495 bytes
-rw-r--r--doc/user/project/import/bitbucket_server.md2
-rw-r--r--doc/user/project/import/gemnasium.md4
-rw-r--r--doc/user/project/import/github.md23
-rw-r--r--doc/user/project/import/index.md29
-rw-r--r--doc/user/project/index.md14
-rw-r--r--doc/user/project/insights/index.md17
-rw-r--r--doc/user/project/integrations/gitlab_slack_application.md8
-rw-r--r--doc/user/project/integrations/hipchat.md2
-rw-r--r--doc/user/project/integrations/img/grafana_embedded.pngbin64082 -> 28071 bytes
-rw-r--r--doc/user/project/integrations/img/prometheus_cluster_health_embed_v12_9.pngbin0 -> 50178 bytes
-rw-r--r--doc/user/project/integrations/img/prometheus_dashboard_edit_metric_link_v_12_9.pngbin0 -> 29178 bytes
-rw-r--r--doc/user/project/integrations/img/prometheus_dashboard_label_variable_shorthand.pngbin0 -> 3897 bytes
-rw-r--r--doc/user/project/integrations/img/prometheus_dashboard_label_variables.pngbin0 -> 8076 bytes
-rw-r--r--doc/user/project/integrations/img/prometheus_dashboard_repeated_label.pngbin0 -> 3116 bytes
-rw-r--r--doc/user/project/integrations/irker.md2
-rw-r--r--doc/user/project/integrations/jira.md10
-rw-r--r--doc/user/project/integrations/prometheus.md187
-rw-r--r--doc/user/project/integrations/prometheus_library/kubernetes.md8
-rw-r--r--doc/user/project/integrations/prometheus_units.md110
-rw-r--r--doc/user/project/integrations/slack.md6
-rw-r--r--doc/user/project/integrations/webhooks.md29
-rw-r--r--doc/user/project/issue_board.md31
-rw-r--r--doc/user/project/issues/csv_import.md8
-rw-r--r--doc/user/project/issues/design_management.md50
-rw-r--r--doc/user/project/issues/img/design_drag_and_drop_uploads_v12_9.pngbin0 -> 402118 bytes
-rw-r--r--doc/user/project/issues/img/issue_health_status_v12_9.pngbin0 -> 10509 bytes
-rw-r--r--doc/user/project/issues/img/related_issue_block_v12_8.pngbin117786 -> 35817 bytes
-rw-r--r--doc/user/project/issues/img/related_issues_add_v12_8.pngbin105785 -> 32939 bytes
-rw-r--r--doc/user/project/issues/img/related_issues_remove_v12_8.pngbin36051 -> 10708 bytes
-rw-r--r--doc/user/project/issues/index.md45
-rw-r--r--doc/user/project/issues/issue_data_and_actions.md8
-rw-r--r--doc/user/project/labels.md2
-rw-r--r--doc/user/project/merge_requests/accessibility_testing.md57
-rw-r--r--doc/user/project/merge_requests/cherry_pick_changes.md14
-rw-r--r--doc/user/project/merge_requests/code_quality.md36
-rw-r--r--doc/user/project/merge_requests/img/approvals_premium_mr_widget_v12_7.pngbin198351 -> 50214 bytes
-rw-r--r--doc/user/project/merge_requests/img/cherry_pick_mr_timeline_v12_9.pngbin0 -> 29557 bytes
-rw-r--r--doc/user/project/merge_requests/img/mr_approvals_by_code_owners_v12_7.pngbin88749 -> 25594 bytes
-rw-r--r--doc/user/project/merge_requests/img/scoped_to_protected_branch_v12_8.pngbin91714 -> 32970 bytes
-rw-r--r--doc/user/project/merge_requests/img/test_coverage_visualization_v12_9.pngbin0 -> 55119 bytes
-rw-r--r--doc/user/project/merge_requests/index.md12
-rw-r--r--doc/user/project/merge_requests/license_management.md4
-rw-r--r--doc/user/project/merge_requests/reviewing_and_managing_merge_requests.md2
-rw-r--r--doc/user/project/merge_requests/squash_and_merge.md3
-rw-r--r--doc/user/project/merge_requests/test_coverage_visualization.md78
-rw-r--r--doc/user/project/milestones/burndown_charts.md2
-rw-r--r--doc/user/project/new_ci_build_permissions_model.md10
-rw-r--r--doc/user/project/operations/error_tracking.md4
-rw-r--r--doc/user/project/pages/custom_domains_ssl_tls_certification/ssl_tls_concepts.md2
-rw-r--r--doc/user/project/pages/getting_started/new_or_existing_website.md2
-rw-r--r--doc/user/project/pages/index.md2
-rw-r--r--doc/user/project/pages/introduction.md2
-rw-r--r--doc/user/project/pipelines/job_artifacts.md213
-rw-r--r--doc/user/project/pipelines/schedules.md135
-rw-r--r--doc/user/project/pipelines/settings.md275
-rw-r--r--doc/user/project/protected_branches.md2
-rw-r--r--doc/user/project/push_options.md4
-rw-r--r--doc/user/project/releases/img/release_with_milestone_v12_9.pngbin0 -> 27783 bytes
-rw-r--r--doc/user/project/releases/img/releases_count_v12_8.pngbin92444 -> 27622 bytes
-rw-r--r--doc/user/project/releases/img/releases_v12_9.pngbin0 -> 51974 bytes
-rw-r--r--doc/user/project/releases/img/upcoming_release_v12_7.pngbin87736 -> 23246 bytes
-rw-r--r--doc/user/project/releases/index.md93
-rw-r--r--doc/user/project/repository/forking_workflow.md20
-rw-r--r--doc/user/project/repository/img/forking_workflow_fork_button.pngbin27407 -> 10365 bytes
-rw-r--r--doc/user/project/repository/index.md2
-rw-r--r--doc/user/project/repository/jupyter_notebooks/index.md2
-rw-r--r--doc/user/project/repository/reducing_the_repo_size_using_git.md2
-rw-r--r--doc/user/project/repository/repository_mirroring.md5
-rw-r--r--doc/user/project/repository/web_editor.md25
-rw-r--r--doc/user/project/repository/x509_signed_commits/index.md2
-rw-r--r--doc/user/project/settings/import_export.md44
-rw-r--r--doc/user/project/settings/index.md14
-rw-r--r--doc/user/project/web_ide/img/commit_changes_v12_3.pngbin196689 -> 0 bytes
-rw-r--r--doc/user/project/web_ide/img/commit_changes_v12_9.pngbin0 -> 681399 bytes
-rw-r--r--doc/user/project/web_ide/index.md33
-rw-r--r--doc/user/project/wiki/index.md12
-rw-r--r--doc/user/reserved_names.md1
-rw-r--r--doc/user/search/index.md3
-rw-r--r--doc/user/shortcuts.md2
-rw-r--r--generator_templates/active_record/migration/create_table_migration.rb2
-rw-r--r--generator_templates/active_record/migration/migration.rb5
-rw-r--r--generator_templates/rails/post_deployment_migration/migration.rb5
-rw-r--r--jest.config.js14
-rw-r--r--lib/api/admin/sidekiq.rb36
-rw-r--r--lib/api/api.rb4
-rw-r--r--lib/api/api_guard.rb28
-rw-r--r--lib/api/broadcast_messages.rb2
-rw-r--r--lib/api/deploy_tokens.rb149
-rw-r--r--lib/api/deployments.rb3
-rw-r--r--lib/api/discussions.rb4
-rw-r--r--lib/api/entities/broadcast_message.rb2
-rw-r--r--lib/api/entities/commit.rb4
-rw-r--r--lib/api/entities/deploy_token.rb10
-rw-r--r--lib/api/entities/deploy_token_with_token.rb9
-rw-r--r--lib/api/entities/discussion.rb2
-rw-r--r--lib/api/entities/gpg_key.rb2
-rw-r--r--lib/api/entities/group.rb1
-rw-r--r--lib/api/entities/internal.rb19
-rw-r--r--lib/api/entities/internal/pages/lookup_path.rb14
-rw-r--r--lib/api/entities/internal/pages/virtual_domain.rb14
-rw-r--r--lib/api/entities/internal/serverless/lookup_path.rb13
-rw-r--r--lib/api/entities/internal/serverless/virtual_domain.rb14
-rw-r--r--lib/api/entities/milestone_with_stats.rb12
-rw-r--r--lib/api/entities/note_with_gitlab_employee_badge.rb10
-rw-r--r--lib/api/entities/project.rb3
-rw-r--r--lib/api/entities/project_upload.rb21
-rw-r--r--lib/api/entities/release.rb4
-rw-r--r--lib/api/entities/releases/link.rb10
-rw-r--r--lib/api/entities/remote_mirror.rb3
-rw-r--r--lib/api/entities/ssh_key.rb2
-rw-r--r--lib/api/entities/user.rb2
-rw-r--r--lib/api/entities/user_details_with_admin.rb2
-rw-r--r--lib/api/entities/user_with_gitlab_employee_badge.rb9
-rw-r--r--lib/api/files.rb14
-rw-r--r--lib/api/group_variables.rb2
-rw-r--r--lib/api/helpers.rb24
-rw-r--r--lib/api/helpers/custom_validators.rb24
-rw-r--r--lib/api/helpers/file_upload_helpers.rb5
-rw-r--r--lib/api/helpers/groups_helpers.rb3
-rw-r--r--lib/api/helpers/internal_helpers.rb18
-rw-r--r--lib/api/helpers/notes_helpers.rb2
-rw-r--r--lib/api/helpers/projects_helpers.rb2
-rw-r--r--lib/api/internal/base.rb16
-rw-r--r--lib/api/internal/pages.rb23
-rw-r--r--lib/api/issues.rb1
-rw-r--r--lib/api/lsif_data.rb10
-rw-r--r--lib/api/notes.rb2
-rw-r--r--lib/api/pipeline_schedules.rb2
-rw-r--r--lib/api/pipelines.rb4
-rw-r--r--lib/api/project_container_repositories.rb6
-rw-r--r--lib/api/project_import.rb35
-rw-r--r--lib/api/project_snippets.rb5
-rw-r--r--lib/api/projects.rb37
-rw-r--r--lib/api/release/links.rb2
-rw-r--r--lib/api/releases.rb2
-rw-r--r--lib/api/remote_mirrors.rb28
-rw-r--r--lib/api/repositories.rb6
-rw-r--r--lib/api/runner.rb6
-rw-r--r--lib/api/runners.rb4
-rw-r--r--lib/api/todos.rb11
-rw-r--r--lib/api/users.rb37
-rw-r--r--lib/api/version.rb3
-rw-r--r--lib/backup/manager.rb53
-rw-r--r--lib/backup/repository.rb34
-rw-r--r--lib/banzai/filter/broadcast_message_placeholders_filter.rb57
-rw-r--r--lib/banzai/filter/inline_embeds_filter.rb26
-rw-r--r--lib/banzai/filter/inline_grafana_metrics_filter.rb37
-rw-r--r--lib/banzai/filter/inline_metrics_filter.rb19
-rw-r--r--lib/banzai/filter/inline_metrics_redactor_filter.rb22
-rw-r--r--lib/banzai/filter/issuable_state_filter.rb4
-rw-r--r--lib/banzai/filter/label_reference_filter.rb17
-rw-r--r--lib/banzai/filter/reference_filter.rb3
-rw-r--r--lib/banzai/filter/repository_link_filter.rb15
-rw-r--r--lib/banzai/pipeline/gfm_pipeline.rb10
-rw-r--r--lib/banzai/pipeline/post_process_pipeline.rb3
-rw-r--r--lib/declarative_policy.rb2
-rw-r--r--lib/declarative_policy/preferred_scope.rb4
-rw-r--r--lib/feature.rb8
-rw-r--r--lib/gitlab/access/branch_protection.rb4
-rw-r--r--lib/gitlab/application_rate_limiter.rb3
-rw-r--r--lib/gitlab/auth.rb4
-rw-r--r--lib/gitlab/auth/current_user_mode.rb23
-rw-r--r--lib/gitlab/auth/key_status_checker.rb29
-rw-r--r--lib/gitlab/auth/ldap/access.rb16
-rw-r--r--lib/gitlab/auth/ldap/adapter.rb14
-rw-r--r--lib/gitlab/auth/ldap/auth_hash.rb6
-rw-r--r--lib/gitlab/auth/ldap/authentication.rb10
-rw-r--r--lib/gitlab/auth/ldap/config.rb4
-rw-r--r--lib/gitlab/auth/ldap/dn.rb2
-rw-r--r--lib/gitlab/auth/ldap/ldap_connection_error.rb4
-rw-r--r--lib/gitlab/auth/ldap/person.rb14
-rw-r--r--lib/gitlab/auth/ldap/user.rb10
-rw-r--r--lib/gitlab/auth/o_auth/provider.rb15
-rw-r--r--lib/gitlab/auth/o_auth/user.rb16
-rw-r--r--lib/gitlab/authorized_keys.rb2
-rw-r--r--lib/gitlab/background_migration/backfill_snippet_repositories.rb89
-rw-r--r--lib/gitlab/background_migration/cleanup_optimistic_locking_nulls.rb32
-rw-r--r--lib/gitlab/background_migration/link_lfs_objects_projects.rb82
-rw-r--r--lib/gitlab/background_migration/migrate_security_scans.rb13
-rw-r--r--lib/gitlab/background_migration/remove_undefined_occurrence_severity_level.rb13
-rw-r--r--lib/gitlab/background_migration/remove_undefined_vulnerability_severity_level.rb13
-rw-r--r--lib/gitlab/background_migration/update_authorized_keys_file_since.rb13
-rw-r--r--lib/gitlab/background_migration/user_mentions/create_resource_user_mention.rb5
-rw-r--r--lib/gitlab/background_migration/user_mentions/models/commit.rb35
-rw-r--r--lib/gitlab/background_migration/user_mentions/models/commit_user_mention.rb18
-rw-r--r--lib/gitlab/background_migration/user_mentions/models/concerns/isolated_mentionable.rb136
-rw-r--r--lib/gitlab/background_migration/user_mentions/models/concerns/mentionable_migration_methods.rb22
-rw-r--r--lib/gitlab/background_migration/user_mentions/models/design_management/design.rb32
-rw-r--r--lib/gitlab/background_migration/user_mentions/models/design_user_mention.rb18
-rw-r--r--lib/gitlab/background_migration/user_mentions/models/epic.rb4
-rw-r--r--lib/gitlab/background_migration/user_mentions/models/merge_request.rb45
-rw-r--r--lib/gitlab/background_migration/user_mentions/models/merge_request_user_mention.rb18
-rw-r--r--lib/gitlab/background_migration/user_mentions/models/note.rb16
-rw-r--r--lib/gitlab/bitbucket_import/importer.rb4
-rw-r--r--lib/gitlab/cache/import/caching.rb153
-rw-r--r--lib/gitlab/checks/branch_check.rb18
-rw-r--r--lib/gitlab/checks/diff_check.rb4
-rw-r--r--lib/gitlab/checks/lfs_check.rb2
-rw-r--r--lib/gitlab/checks/lfs_integrity.rb4
-rw-r--r--lib/gitlab/checks/post_push_message.rb15
-rw-r--r--lib/gitlab/checks/project_moved.rb4
-rw-r--r--lib/gitlab/checks/push_check.rb2
-rw-r--r--lib/gitlab/checks/push_file_count_check.rb37
-rw-r--r--lib/gitlab/checks/snippet_check.rb38
-rw-r--r--lib/gitlab/checks/tag_check.rb8
-rw-r--r--lib/gitlab/ci/artifact_file_reader.rb71
-rw-r--r--lib/gitlab/ci/config.rb19
-rw-r--r--lib/gitlab/ci/config/entry/artifacts.rb2
-rw-r--r--lib/gitlab/ci/config/entry/bridge.rb93
-rw-r--r--lib/gitlab/ci/config/entry/cache.rb2
-rw-r--r--lib/gitlab/ci/config/entry/default.rb2
-rw-r--r--lib/gitlab/ci/config/entry/include.rb2
-rw-r--r--lib/gitlab/ci/config/entry/inherit.rb30
-rw-r--r--lib/gitlab/ci/config/entry/inherit/default.rb51
-rw-r--r--lib/gitlab/ci/config/entry/inherit/variables.rb48
-rw-r--r--lib/gitlab/ci/config/entry/job.rb106
-rw-r--r--lib/gitlab/ci/config/entry/processable.rb125
-rw-r--r--lib/gitlab/ci/config/entry/release.rb2
-rw-r--r--lib/gitlab/ci/config/entry/release/assets.rb2
-rw-r--r--lib/gitlab/ci/config/entry/reports.rb7
-rw-r--r--lib/gitlab/ci/config/entry/root.rb15
-rw-r--r--lib/gitlab/ci/config/entry/service.rb34
-rw-r--r--lib/gitlab/ci/config/entry/workflow.rb1
-rw-r--r--lib/gitlab/ci/config/external/context.rb5
-rw-r--r--lib/gitlab/ci/config/external/file/artifact.rb93
-rw-r--r--lib/gitlab/ci/config/external/file/local.rb3
-rw-r--r--lib/gitlab/ci/config/external/file/project.rb3
-rw-r--r--lib/gitlab/ci/config/external/mapper.rb3
-rw-r--r--lib/gitlab/ci/parsers.rb3
-rw-r--r--lib/gitlab/ci/parsers/coverage/cobertura.rb64
-rw-r--r--lib/gitlab/ci/parsers/test/junit.rb12
-rw-r--r--lib/gitlab/ci/pipeline/chain/base.rb2
-rw-r--r--lib/gitlab/ci/pipeline/chain/build/associations.rb28
-rw-r--r--lib/gitlab/ci/pipeline/chain/command.rb4
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/process.rb3
-rw-r--r--lib/gitlab/ci/pipeline/seed/build.rb33
-rw-r--r--lib/gitlab/ci/pipeline/seed/deployment.rb14
-rw-r--r--lib/gitlab/ci/pipeline/seed/environment.rb14
-rw-r--r--lib/gitlab/ci/reports/coverage_reports.rb43
-rw-r--r--lib/gitlab/ci/reports/test_case.rb9
-rw-r--r--lib/gitlab/ci/templates/Deploy-ECS.gitlab-ci.yml36
-rw-r--r--lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml1
-rw-r--r--lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml4
-rw-r--r--lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml3
-rw-r--r--lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml5
-rw-r--r--lib/gitlab/ci/templates/Pages/Jekyll.gitlab-ci.yml3
-rw-r--r--lib/gitlab/ci/templates/Pages/Middleman.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Pages/Nanoc.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Pages/Octopress.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml9
-rw-r--r--lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml1
-rw-r--r--lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml7
-rw-r--r--lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml4
-rw-r--r--lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml19
-rw-r--r--lib/gitlab/ci/yaml_processor.rb9
-rw-r--r--lib/gitlab/config/entry/attributable.rb2
-rw-r--r--lib/gitlab/config/entry/configurable.rb31
-rw-r--r--lib/gitlab/config_checker/puma_rugged_checker.rb28
-rw-r--r--lib/gitlab/cycle_analytics/usage_data.rb21
-rw-r--r--lib/gitlab/danger/commit_linter.rb7
-rw-r--r--lib/gitlab/danger/helper.rb7
-rw-r--r--lib/gitlab/data_builder/push.rb3
-rw-r--r--lib/gitlab/database/batch_count.rb2
-rw-r--r--lib/gitlab/database/connection_timer.rb50
-rw-r--r--lib/gitlab/database/migration_helpers.rb74
-rw-r--r--lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin.rb30
-rw-r--r--lib/gitlab/diff/highlight_cache.rb27
-rw-r--r--lib/gitlab/elasticsearch/logs.rb150
-rw-r--r--lib/gitlab/email.rb22
-rw-r--r--lib/gitlab/email/receiver.rb25
-rw-r--r--lib/gitlab/encoding_helper.rb13
-rw-r--r--lib/gitlab/experimentation.rb13
-rw-r--r--lib/gitlab/file_type_detection.rb14
-rw-r--r--lib/gitlab/git/blob.rb26
-rw-r--r--lib/gitlab/git/repository.rb26
-rw-r--r--lib/gitlab/git/rugged_impl/use_rugged.rb22
-rw-r--r--lib/gitlab/git_access.rb84
-rw-r--r--lib/gitlab/git_access_snippet.rb94
-rw-r--r--lib/gitlab/git_access_wiki.rb4
-rw-r--r--lib/gitlab/git_post_receive.rb10
-rw-r--r--lib/gitlab/gitaly_client.rb26
-rw-r--r--lib/gitlab/gitaly_client/commit_service.rb3
-rw-r--r--lib/gitlab/gitaly_client/operation_service.rb3
-rw-r--r--lib/gitlab/gitaly_client/remote_service.rb14
-rw-r--r--lib/gitlab/gitaly_client/repository_service.rb16
-rw-r--r--lib/gitlab/github_import.rb2
-rw-r--r--lib/gitlab/github_import/caching.rb151
-rw-r--r--lib/gitlab/github_import/importer/repository_importer.rb5
-rw-r--r--lib/gitlab/github_import/issuable_finder.rb4
-rw-r--r--lib/gitlab/github_import/label_finder.rb4
-rw-r--r--lib/gitlab/github_import/milestone_finder.rb4
-rw-r--r--lib/gitlab/github_import/page_counter.rb4
-rw-r--r--lib/gitlab/github_import/parallel_scheduling.rb6
-rw-r--r--lib/gitlab/github_import/user_finder.rb10
-rw-r--r--lib/gitlab/gl_repository.rb12
-rw-r--r--lib/gitlab/gl_repository/repo_type.rb18
-rw-r--r--lib/gitlab/gon_helper.rb1
-rw-r--r--lib/gitlab/grape_logging/formatters/lograge_with_timestamp.rb20
-rw-r--r--lib/gitlab/graphql/connections.rb4
-rw-r--r--lib/gitlab/graphql/docs/helper.rb22
-rw-r--r--lib/gitlab/graphql/docs/templates/default.md.haml5
-rw-r--r--lib/gitlab/graphql/pagination/offset_active_record_relation_connection.rb13
-rw-r--r--lib/gitlab/graphql/pagination/relations/offset_active_record_relation.rb12
-rw-r--r--lib/gitlab/graphql/timeout.rb11
-rw-r--r--lib/gitlab/import_export.rb16
-rw-r--r--lib/gitlab/import_export/after_export_strategies/move_file_strategy.rb19
-rw-r--r--lib/gitlab/import_export/attribute_cleaner.rb4
-rw-r--r--lib/gitlab/import_export/base/object_builder.rb105
-rw-r--r--lib/gitlab/import_export/base/relation_factory.rb312
-rw-r--r--lib/gitlab/import_export/base_object_builder.rb103
-rw-r--r--lib/gitlab/import_export/base_relation_factory.rb307
-rw-r--r--lib/gitlab/import_export/error.rb9
-rw-r--r--lib/gitlab/import_export/fast_hash_serializer.rb6
-rw-r--r--lib/gitlab/import_export/group/import_export.yml79
-rw-r--r--lib/gitlab/import_export/group/object_builder.rb57
-rw-r--r--lib/gitlab/import_export/group/relation_factory.rb42
-rw-r--r--lib/gitlab/import_export/group/tree_restorer.rb113
-rw-r--r--lib/gitlab/import_export/group/tree_saver.rb57
-rw-r--r--lib/gitlab/import_export/group_import_export.yml78
-rw-r--r--lib/gitlab/import_export/group_object_builder.rb55
-rw-r--r--lib/gitlab/import_export/group_project_object_builder.rb117
-rw-r--r--lib/gitlab/import_export/group_relation_factory.rb40
-rw-r--r--lib/gitlab/import_export/group_tree_restorer.rb116
-rw-r--r--lib/gitlab/import_export/group_tree_saver.rb55
-rw-r--r--lib/gitlab/import_export/import_export.yml379
-rw-r--r--lib/gitlab/import_export/importer.rb12
-rw-r--r--lib/gitlab/import_export/json/legacy_reader.rb104
-rw-r--r--lib/gitlab/import_export/json/legacy_writer.rb73
-rw-r--r--lib/gitlab/import_export/json/streaming_serializer.rb82
-rw-r--r--lib/gitlab/import_export/legacy_relation_tree_saver.rb27
-rw-r--r--lib/gitlab/import_export/members_mapper.rb16
-rw-r--r--lib/gitlab/import_export/project/base_task.rb41
-rw-r--r--lib/gitlab/import_export/project/export_task.rb43
-rw-r--r--lib/gitlab/import_export/project/import_export.yml387
-rw-r--r--lib/gitlab/import_export/project/import_task.rb110
-rw-r--r--lib/gitlab/import_export/project/legacy_tree_saver.rb68
-rw-r--r--lib/gitlab/import_export/project/object_builder.rb119
-rw-r--r--lib/gitlab/import_export/project/relation_factory.rb164
-rw-r--r--lib/gitlab/import_export/project/tree_restorer.rb75
-rw-r--r--lib/gitlab/import_export/project/tree_saver.rb56
-rw-r--r--lib/gitlab/import_export/project_relation_factory.rb184
-rw-r--r--lib/gitlab/import_export/project_tree_loader.rb72
-rw-r--r--lib/gitlab/import_export/project_tree_restorer.rb92
-rw-r--r--lib/gitlab/import_export/project_tree_saver.rb68
-rw-r--r--lib/gitlab/import_export/reader.rb8
-rw-r--r--lib/gitlab/import_export/relation_rename_service.rb48
-rw-r--r--lib/gitlab/import_export/relation_tree_restorer.rb72
-rw-r--r--lib/gitlab/import_export/relation_tree_saver.rb27
-rw-r--r--lib/gitlab/import_export/shared.rb8
-rw-r--r--lib/gitlab/import_export/snippet_repo_restorer.rb48
-rw-r--r--lib/gitlab/import_export/snippet_repo_saver.rb21
-rw-r--r--lib/gitlab/import_export/snippets_repo_restorer.rb36
-rw-r--r--lib/gitlab/import_export/snippets_repo_saver.rb34
-rw-r--r--lib/gitlab/incoming_email.rb8
-rw-r--r--lib/gitlab/jira/http_client.rb7
-rw-r--r--lib/gitlab/job_waiter.rb31
-rw-r--r--lib/gitlab/kubernetes/helm.rb8
-rw-r--r--lib/gitlab/kubernetes/helm/api.rb2
-rw-r--r--lib/gitlab/kubernetes/helm/client_command.rb2
-rw-r--r--lib/gitlab/kubernetes/namespace.rb8
-rw-r--r--lib/gitlab/legacy_github_import/importer.rb12
-rw-r--r--lib/gitlab/lograge/custom_options.rb43
-rw-r--r--lib/gitlab/markdown_cache.rb2
-rw-r--r--lib/gitlab/metrics/dashboard/finder.rb6
-rw-r--r--lib/gitlab/metrics/dashboard/service_selector.rb3
-rw-r--r--lib/gitlab/metrics/dashboard/stages/grafana_formatter.rb111
-rw-r--r--lib/gitlab/middleware/go.rb3
-rw-r--r--lib/gitlab/middleware/read_only/controller.rb8
-rw-r--r--lib/gitlab/object_hierarchy.rb2
-rw-r--r--lib/gitlab/omniauth_logging/json_formatter.rb13
-rw-r--r--lib/gitlab/path_regex.rb25
-rw-r--r--lib/gitlab/process_memory_cache.rb13
-rw-r--r--lib/gitlab/profiler.rb32
-rw-r--r--lib/gitlab/project_template.rb3
-rw-r--r--lib/gitlab/prometheus/query_variables.rb6
-rw-r--r--lib/gitlab/quick_actions/extractor.rb39
-rw-r--r--lib/gitlab/quick_actions/substitution_definition.rb2
-rw-r--r--lib/gitlab/rate_limit_helpers.rb35
-rw-r--r--lib/gitlab/reactive_cache_set_cache.rb34
-rw-r--r--lib/gitlab/redacted_search_results_logger.rb9
-rw-r--r--lib/gitlab/reference_counter.rb55
-rw-r--r--lib/gitlab/reference_extractor.rb2
-rw-r--r--lib/gitlab/regex.rb6
-rw-r--r--lib/gitlab/repo_path.rb48
-rw-r--r--lib/gitlab/repository_cache_adapter.rb2
-rw-r--r--lib/gitlab/repository_set_cache.rb24
-rw-r--r--lib/gitlab/request_profiler/middleware.rb6
-rw-r--r--lib/gitlab/search/found_blob.rb2
-rw-r--r--lib/gitlab/serverless/domain.rb13
-rw-r--r--lib/gitlab/serverless/function_uri.rb46
-rw-r--r--lib/gitlab/serverless/service.rb6
-rw-r--r--lib/gitlab/set_cache.rb71
-rw-r--r--lib/gitlab/setup_helper.rb2
-rw-r--r--lib/gitlab/shell.rb343
-rw-r--r--lib/gitlab/sidekiq_cluster.rb162
-rw-r--r--lib/gitlab/sidekiq_cluster/cli.rb184
-rw-r--r--lib/gitlab/sidekiq_config/cli_methods.rb6
-rw-r--r--lib/gitlab/sidekiq_config/dummy_worker.rb3
-rw-r--r--lib/gitlab/sidekiq_config/worker.rb9
-rw-r--r--lib/gitlab/sidekiq_logging/client_logger.rb11
-rw-r--r--lib/gitlab/sidekiq_logging/deduplication_logger.rb19
-rw-r--r--lib/gitlab/sidekiq_logging/json_formatter.rb3
-rw-r--r--lib/gitlab/sidekiq_logging/logs_jobs.rb25
-rw-r--r--lib/gitlab/sidekiq_logging/structured_logger.rb20
-rw-r--r--lib/gitlab/sidekiq_middleware.rb34
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs.rb13
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/client.rb13
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb116
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/server.rb13
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies.rb21
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb41
-rw-r--r--lib/gitlab/sidekiq_middleware/metrics.rb4
-rw-r--r--lib/gitlab/sidekiq_middleware/request_store_middleware.rb10
-rw-r--r--lib/gitlab/sidekiq_middleware/server_metrics.rb8
-rw-r--r--lib/gitlab/sidekiq_queue.rb68
-rw-r--r--lib/gitlab/slash_commands/presenters/base.rb2
-rw-r--r--lib/gitlab/template/finders/global_template_finder.rb17
-rw-r--r--lib/gitlab/template/gitlab_ci_yml_template.rb11
-rw-r--r--lib/gitlab/testing/clear_thread_memory_cache_middleware.rb17
-rw-r--r--lib/gitlab/tracing.rb37
-rw-r--r--lib/gitlab/uploads/migration_helper.rb6
-rw-r--r--lib/gitlab/url_blocker.rb8
-rw-r--r--lib/gitlab/url_blockers/domain_whitelist_entry.rb21
-rw-r--r--lib/gitlab/url_blockers/ip_whitelist_entry.rb22
-rw-r--r--lib/gitlab/url_blockers/url_whitelist.rb12
-rw-r--r--lib/gitlab/url_builder.rb5
-rw-r--r--lib/gitlab/usage_counters/common.rb30
-rw-r--r--lib/gitlab/usage_counters/pod_logs.rb11
-rw-r--r--lib/gitlab/usage_data.rb35
-rw-r--r--lib/gitlab/user_access.rb4
-rw-r--r--lib/gitlab/user_access_snippet.rb49
-rw-r--r--lib/gitlab/utils.rb16
-rw-r--r--lib/gitlab/utils/json_size_estimator.rb104
-rw-r--r--lib/gitlab/utils/log_limited_array.rb10
-rw-r--r--lib/gitlab/utils/measuring.rb75
-rw-r--r--lib/gitlab/with_request_store.rb13
-rw-r--r--lib/gitlab/workhorse.rb2
-rw-r--r--lib/gitlab/x509/commit.rb6
-rw-r--r--lib/gitlab_danger.rb3
-rw-r--r--lib/grafana/time_window.rb130
-rw-r--r--lib/grafana/validator.rb96
-rw-r--r--lib/omni_auth/strategies/saml.rb29
-rw-r--r--lib/quality/kubernetes_client.rb3
-rw-r--r--lib/quality/test_level.rb9
-rw-r--r--lib/sentry/client/issue.rb18
-rwxr-xr-xlib/support/init.d/gitlab8
-rw-r--r--lib/support/init.d/gitlab.default.example4
-rw-r--r--lib/system_check/gitlab_shell_check.rb2
-rw-r--r--lib/system_check/ldap_check.rb6
-rw-r--r--lib/tasks/cleanup.rake33
-rw-r--r--lib/tasks/gitlab/backup.rake21
-rw-r--r--lib/tasks/gitlab/cleanup.rake2
-rw-r--r--lib/tasks/gitlab/graphql.rake20
-rw-r--r--lib/tasks/gitlab/import_export/export.rake46
-rw-r--r--lib/tasks/gitlab/import_export/import.rake208
-rw-r--r--lib/tasks/gitlab/info.rake9
-rw-r--r--lib/tasks/gitlab/shell.rake10
-rw-r--r--lib/tasks/gitlab/uploads/migrate.rake4
-rw-r--r--lib/tasks/sidekiq.rake2
-rw-r--r--locale/gitlab.pot1474
-rw-r--r--locale/unfound_translations.rb1
-rw-r--r--package.json90
-rw-r--r--qa/Dockerfile2
-rw-r--r--qa/Gemfile2
-rw-r--r--qa/Gemfile.lock14
-rw-r--r--qa/README.md15
-rw-r--r--qa/qa.rb6
-rw-r--r--qa/qa/fixtures/monitored_auto_devops/.gitlab-ci.yml2
-rw-r--r--qa/qa/git/repository.rb2
-rw-r--r--qa/qa/page/dashboard/snippet/new.rb2
-rw-r--r--qa/qa/page/group/menu.rb7
-rw-r--r--qa/qa/page/merge_request/show.rb5
-rw-r--r--qa/qa/page/project/issue/show.rb2
-rw-r--r--qa/qa/page/project/operations/kubernetes/add_existing.rb4
-rw-r--r--qa/qa/page/project/operations/kubernetes/show.rb36
-rw-r--r--qa/qa/page/project/operations/metrics.rb49
-rw-r--r--qa/qa/page/project/operations/metrics/show.rb50
-rw-r--r--qa/qa/page/project/settings/ci_cd.rb10
-rw-r--r--qa/qa/page/project/settings/deploy_tokens.rb4
-rw-r--r--qa/qa/page/project/settings/repository.rb6
-rw-r--r--qa/qa/page/project/web_ide/edit.rb15
-rw-r--r--qa/qa/resource/deploy_key.rb4
-rw-r--r--qa/qa/resource/deploy_token.rb16
-rw-r--r--qa/qa/resource/kubernetes_cluster.rb5
-rw-r--r--qa/qa/resource/ssh_key.rb31
-rw-r--r--qa/qa/runtime/browser.rb15
-rw-r--r--qa/qa/service/cluster_provider/k3s.rb94
-rw-r--r--qa/qa/service/docker_run/base.rb4
-rw-r--r--qa/qa/service/docker_run/k3s.rb46
-rw-r--r--qa/qa/specs/features/browser_ui/1_manage/project/view_project_activity_spec.rb5
-rw-r--r--qa/qa/specs/features/browser_ui/2_plan/issue/create_issue_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/merge_request/rebase_merge_request_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/merge_request/squash_merge_request_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/add_list_delete_branches_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/add_ssh_key_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_http_spec.rb5
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb20
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/push_mirroring_over_http_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_spec.rb5
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/push_protected_branch_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/use_ssh_key_spec.rb8
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/snippet/create_snippet_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/web_ide/review_merge_request_spec.rb42
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/wiki/create_edit_clone_push_wiki_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/6_release/deploy_key/add_deploy_key_spec.rb4
-rw-r--r--qa/qa/specs/features/browser_ui/6_release/deploy_key/clone_using_deploy_key_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/7_configure/kubernetes/kubernetes_integration_spec.rb19
-rw-r--r--qa/spec/git/repository_spec.rb8
-rw-r--r--qa/spec/service/docker_run/k3s_spec.rb32
-rw-r--r--rubocop/cop/ban_catch_throw.rb34
-rw-r--r--rubocop/cop/gitlab/keys-first-and-values-first.rb2
-rw-r--r--rubocop/cop/migration/add_column_with_default.rb44
-rw-r--r--rubocop/cop/migration/schedule_async.rb54
-rw-r--r--rubocop/cop/migration/with_lock_retries_with_change.rb30
-rw-r--r--rubocop/cop/scalability/idempotent_worker.rb59
-rw-r--r--rubocop/migration_helpers.rb4
-rwxr-xr-xscripts/clean-old-cached-assets2
-rw-r--r--scripts/frontend/block_dependencies.js21
-rw-r--r--scripts/frontend/merge_coverage_frontend.js31
-rw-r--r--scripts/frontend/parallel_ci_sequencer.js41
-rwxr-xr-xscripts/gemfile_lock_changed.sh26
-rwxr-xr-xscripts/lint-doc.sh59
-rwxr-xr-xscripts/lint-rugged6
-rwxr-xr-xscripts/review_apps/automated_cleanup.rb34
-rw-r--r--scripts/review_apps/base-config.yaml24
-rwxr-xr-xscripts/security-harness6
-rwxr-xr-xscripts/static-analysis4
-rwxr-xr-xscripts/trigger-build24
-rw-r--r--scripts/utils.sh94
-rw-r--r--spec/bin/sidekiq_cluster_spec.rb45
-rw-r--r--spec/config/settings_spec.rb6
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb86
-rw-r--r--spec/controllers/admin/serverless/domains_controller_spec.rb90
-rw-r--r--spec/controllers/admin/services_controller_spec.rb10
-rw-r--r--spec/controllers/admin/sessions_controller_spec.rb116
-rw-r--r--spec/controllers/application_controller_spec.rb37
-rw-r--r--spec/controllers/boards/issues_controller_spec.rb12
-rw-r--r--spec/controllers/explore/snippets_controller_spec.rb31
-rw-r--r--spec/controllers/groups/registry/repositories_controller_spec.rb2
-rw-r--r--spec/controllers/groups/settings/ci_cd_controller_spec.rb12
-rw-r--r--spec/controllers/groups_controller_spec.rb7
-rw-r--r--spec/controllers/import/gitea_controller_spec.rb16
-rw-r--r--spec/controllers/import/gitlab_projects_controller_spec.rb58
-rw-r--r--spec/controllers/profiles/keys_controller_spec.rb16
-rw-r--r--spec/controllers/profiles_controller_spec.rb10
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb3
-rw-r--r--spec/controllers/projects/deploy_keys_controller_spec.rb16
-rw-r--r--spec/controllers/projects/forks_controller_spec.rb11
-rw-r--r--spec/controllers/projects/hooks_controller_spec.rb9
-rw-r--r--spec/controllers/projects/import/jira_controller_spec.rb173
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb16
-rw-r--r--spec/controllers/projects/logs_controller_spec.rb143
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb142
-rw-r--r--spec/controllers/projects/milestones_controller_spec.rb4
-rw-r--r--spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb126
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb104
-rw-r--r--spec/controllers/projects/registry/repositories_controller_spec.rb4
-rw-r--r--spec/controllers/projects/releases_controller_spec.rb97
-rw-r--r--spec/controllers/projects/repositories_controller_spec.rb14
-rw-r--r--spec/controllers/projects/serverless/functions_controller_spec.rb2
-rw-r--r--spec/controllers/projects/services_controller_spec.rb10
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb8
-rw-r--r--spec/controllers/projects/settings/repository_controller_spec.rb20
-rw-r--r--spec/controllers/projects/snippets_controller_spec.rb93
-rw-r--r--spec/controllers/projects/tags/releases_controller_spec.rb10
-rw-r--r--spec/controllers/projects_controller_spec.rb6
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb82
-rw-r--r--spec/controllers/search_controller_spec.rb8
-rw-r--r--spec/controllers/sessions_controller_spec.rb61
-rw-r--r--spec/controllers/snippets/notes_controller_spec.rb2
-rw-r--r--spec/controllers/snippets_controller_spec.rb195
-rw-r--r--spec/controllers/users/terms_controller_spec.rb146
-rw-r--r--spec/db/schema_spec.rb1
-rw-r--r--spec/factories/broadcast_messages.rb6
-rw-r--r--spec/factories/ci/builds.rb19
-rw-r--r--spec/factories/ci/job_artifacts.rb44
-rw-r--r--spec/factories/ci/job_variables.rb4
-rw-r--r--spec/factories/ci/pipelines.rb9
-rw-r--r--spec/factories/ci/ref.rb16
-rw-r--r--spec/factories/ci/test_case.rb31
-rw-r--r--spec/factories/clusters/applications/helm.rb32
-rw-r--r--spec/factories/deployments.rb2
-rw-r--r--spec/factories/merge_requests.rb12
-rw-r--r--spec/factories/notes.rb4
-rw-r--r--spec/factories/project_export_jobs.rb8
-rw-r--r--spec/factories/releases/link.rb1
-rw-r--r--spec/factories/resource_milestone_event.rb12
-rw-r--r--spec/factories/serverless/domain.rb11
-rw-r--r--spec/factories/services.rb11
-rw-r--r--spec/factories/snippets.rb5
-rw-r--r--spec/factories/user_details.rb8
-rw-r--r--spec/factories/user_highest_roles.rb7
-rw-r--r--spec/factories/wiki_pages.rb2
-rw-r--r--spec/factories/x509_certificate.rb1
-rw-r--r--spec/features/admin/admin_health_check_spec.rb2
-rw-r--r--spec/features/admin/admin_mode/login_spec.rb184
-rw-r--r--spec/features/admin/admin_mode/logout_spec.rb42
-rw-r--r--spec/features/admin/admin_mode_spec.rb4
-rw-r--r--spec/features/admin/admin_runners_spec.rb24
-rw-r--r--spec/features/boards/boards_spec.rb6
-rw-r--r--spec/features/boards/issue_ordering_spec.rb25
-rw-r--r--spec/features/boards/modal_filter_spec.rb2
-rw-r--r--spec/features/boards/multiple_boards_spec.rb10
-rw-r--r--spec/features/boards/new_issue_spec.rb8
-rw-r--r--spec/features/boards/sidebar_spec.rb14
-rw-r--r--spec/features/broadcast_messages_spec.rb65
-rw-r--r--spec/features/clusters/cluster_detail_page_spec.rb14
-rw-r--r--spec/features/clusters/installing_applications_shared_examples.rb7
-rw-r--r--spec/features/commits_spec.rb41
-rw-r--r--spec/features/container_registry_spec.rb75
-rw-r--r--spec/features/dashboard/issues_filter_spec.rb6
-rw-r--r--spec/features/dashboard/issues_spec.rb2
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb4
-rw-r--r--spec/features/dashboard/root_explore_spec.rb10
-rw-r--r--spec/features/discussion_comments/commit_spec.rb2
-rw-r--r--spec/features/discussion_comments/merge_request_spec.rb3
-rw-r--r--spec/features/discussion_comments/snippets_spec.rb6
-rw-r--r--spec/features/error_tracking/user_filters_errors_by_status_spec.rb40
-rw-r--r--spec/features/error_tracking/user_searches_sentry_errors_spec.rb2
-rw-r--r--spec/features/explore/user_explores_projects_spec.rb10
-rw-r--r--spec/features/group_variables_spec.rb2
-rw-r--r--spec/features/groups/clusters/user_spec.rb7
-rw-r--r--spec/features/groups/container_registry_spec.rb93
-rw-r--r--spec/features/groups/group_page_with_external_authorization_service_spec.rb2
-rw-r--r--spec/features/groups/issues_spec.rb2
-rw-r--r--spec/features/groups/labels/user_sees_links_to_issuables_spec.rb2
-rw-r--r--spec/features/groups/merge_requests_spec.rb2
-rw-r--r--spec/features/groups/navbar_spec.rb19
-rw-r--r--spec/features/groups/settings/ci_cd_spec.rb13
-rw-r--r--spec/features/ide/clientside_preview_csp_spec.rb35
-rw-r--r--spec/features/ide/static_object_external_storage_csp_spec.rb31
-rw-r--r--spec/features/issuables/issuable_list_spec.rb8
-rw-r--r--spec/features/issues/filtered_search/dropdown_assignee_spec.rb6
-rw-r--r--spec/features/issues/filtered_search/dropdown_author_spec.rb6
-rw-r--r--spec/features/issues/filtered_search/dropdown_base_spec.rb8
-rw-r--r--spec/features/issues/filtered_search/dropdown_emoji_spec.rb6
-rw-r--r--spec/features/issues/filtered_search/dropdown_label_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_milestone_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_release_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb74
-rw-r--r--spec/features/issues/filtered_search/visual_tokens_spec.rb16
-rw-r--r--spec/features/issues/issue_detail_spec.rb2
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb23
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb2
-rw-r--r--spec/features/issues/user_edits_issue_spec.rb7
-rw-r--r--spec/features/issues/user_views_issues_spec.rb2
-rw-r--r--spec/features/labels_hierarchy_spec.rb6
-rw-r--r--spec/features/markdown/mermaid_spec.rb67
-rw-r--r--spec/features/markdown/metrics_spec.rb74
-rw-r--r--spec/features/merge_request/maintainer_edits_fork_spec.rb2
-rw-r--r--spec/features/merge_request/user_creates_image_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb2
-rw-r--r--spec/features/merge_request/user_manages_subscription_spec.rb2
-rw-r--r--spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb4
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb3
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_diff_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb22
-rw-r--r--spec/features/merge_request/user_views_diffs_spec.rb27
-rw-r--r--spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb2
-rw-r--r--spec/features/merge_requests/filters_generic_behavior_spec.rb2
-rw-r--r--spec/features/merge_requests/user_filters_by_assignees_spec.rb4
-rw-r--r--spec/features/merge_requests/user_filters_by_labels_spec.rb6
-rw-r--r--spec/features/merge_requests/user_filters_by_milestones_spec.rb10
-rw-r--r--spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb4
-rw-r--r--spec/features/merge_requests/user_filters_by_target_branch_spec.rb6
-rw-r--r--spec/features/merge_requests/user_sorts_merge_requests_spec.rb8
-rw-r--r--spec/features/merge_requests/user_views_open_merge_requests_spec.rb6
-rw-r--r--spec/features/milestones/user_creates_milestone_spec.rb4
-rw-r--r--spec/features/milestones/user_edits_milestone_spec.rb6
-rw-r--r--spec/features/milestones/user_promotes_milestone_spec.rb8
-rw-r--r--spec/features/milestones/user_views_milestone_spec.rb8
-rw-r--r--spec/features/milestones/user_views_milestones_spec.rb26
-rw-r--r--spec/features/profiles/active_sessions_spec.rb138
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb52
-rw-r--r--spec/features/project_group_variables_spec.rb1
-rw-r--r--spec/features/project_variables_spec.rb2
-rw-r--r--spec/features/projects/active_tabs_spec.rb37
-rw-r--r--spec/features/projects/artifacts/user_downloads_artifacts_spec.rb6
-rw-r--r--spec/features/projects/badges/pipeline_badge_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb42
-rw-r--r--spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb67
-rw-r--r--spec/features/projects/branches/user_deletes_branch_spec.rb2
-rw-r--r--spec/features/projects/branches/user_views_branches_spec.rb6
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb7
-rw-r--r--spec/features/projects/clusters/user_spec.rb7
-rw-r--r--spec/features/projects/clusters_spec.rb1
-rw-r--r--spec/features/projects/commit/user_views_user_status_on_commit_spec.rb4
-rw-r--r--spec/features/projects/container_registry_spec.rb168
-rw-r--r--spec/features/projects/deploy_keys_spec.rb4
-rw-r--r--spec/features/projects/environments/environments_spec.rb2
-rw-r--r--spec/features/projects/labels/user_creates_labels_spec.rb6
-rw-r--r--spec/features/projects/labels/user_edits_labels_spec.rb6
-rw-r--r--spec/features/projects/labels/user_promotes_label_spec.rb8
-rw-r--r--spec/features/projects/labels/user_sees_links_to_issuables_spec.rb4
-rw-r--r--spec/features/projects/labels/user_views_labels_spec.rb5
-rw-r--r--spec/features/projects/navbar_spec.rb224
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb40
-rw-r--r--spec/features/projects/releases/user_views_edit_release_spec.rb112
-rw-r--r--spec/features/projects/releases/user_views_release_spec.rb37
-rw-r--r--spec/features/projects/releases/user_views_releases_spec.rb17
-rw-r--r--spec/features/projects/services/user_views_services_spec.rb2
-rw-r--r--spec/features/projects/settings/ci_cd_settings_spec.rb112
-rw-r--r--spec/features/projects/settings/integration_settings_spec.rb144
-rw-r--r--spec/features/projects/settings/operations_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb22
-rw-r--r--spec/features/projects/settings/project_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb1
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb116
-rw-r--r--spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb4
-rw-r--r--spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb2
-rw-r--r--spec/features/projects/settings/webhooks_settings_spec.rb143
-rw-r--r--spec/features/projects/show/user_manages_notifications_spec.rb20
-rw-r--r--spec/features/projects/show/user_sees_git_instructions_spec.rb12
-rw-r--r--spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb4
-rw-r--r--spec/features/projects/show/user_sees_readme_spec.rb5
-rw-r--r--spec/features/projects/snippets/create_snippet_spec.rb61
-rw-r--r--spec/features/projects/snippets/user_comments_on_snippet_spec.rb6
-rw-r--r--spec/features/projects/snippets/user_updates_snippet_spec.rb25
-rw-r--r--spec/features/projects/sourcegraph_csp_spec.rb90
-rw-r--r--spec/features/projects/tags/user_edits_tags_spec.rb2
-rw-r--r--spec/features/projects/user_sees_user_popover_spec.rb3
-rw-r--r--spec/features/projects/user_uses_shortcuts_spec.rb20
-rw-r--r--spec/features/projects/wiki/markdown_preview_spec.rb2
-rw-r--r--spec/features/projects/wiki/user_creates_wiki_page_spec.rb12
-rw-r--r--spec/features/projects/wiki/user_updates_wiki_page_spec.rb7
-rw-r--r--spec/features/projects/wiki/user_views_wiki_page_spec.rb52
-rw-r--r--spec/features/read_only_spec.rb2
-rw-r--r--spec/features/reportable_note/snippets_spec.rb8
-rw-r--r--spec/features/security/project/internal_access_spec.rb2
-rw-r--r--spec/features/security/project/private_access_spec.rb2
-rw-r--r--spec/features/security/project/public_access_spec.rb2
-rw-r--r--spec/features/snippets/search_snippets_spec.rb41
-rw-r--r--spec/features/snippets/spam_snippets_spec.rb26
-rw-r--r--spec/features/snippets/user_creates_snippet_spec.rb62
-rw-r--r--spec/features/snippets/user_edits_snippet_spec.rb19
-rw-r--r--spec/features/uploads/user_uploads_file_to_note_spec.rb2
-rw-r--r--spec/features/user_sorts_things_spec.rb8
-rw-r--r--spec/features/users/show_spec.rb28
-rw-r--r--spec/finders/award_emojis_finder_spec.rb5
-rw-r--r--spec/finders/ci/jobs_finder_spec.rb89
-rw-r--r--spec/finders/ci/pipeline_schedules_finder_spec.rb43
-rw-r--r--spec/finders/ci/pipelines_finder_spec.rb271
-rw-r--r--spec/finders/ci/pipelines_for_merge_request_finder_spec.rb160
-rw-r--r--spec/finders/ci/runner_jobs_finder_spec.rb63
-rw-r--r--spec/finders/fork_targets_finder_spec.rb35
-rw-r--r--spec/finders/jobs_finder_spec.rb89
-rw-r--r--spec/finders/pipeline_schedules_finder_spec.rb43
-rw-r--r--spec/finders/pipelines_finder_spec.rb271
-rw-r--r--spec/finders/projects/export_job_finder_spec.rb51
-rw-r--r--spec/finders/runner_jobs_finder_spec.rb63
-rw-r--r--spec/finders/serverless_domain_finder_spec.rb103
-rw-r--r--spec/finders/snippets_finder_spec.rb11
-rw-r--r--spec/fixtures/api/schemas/cluster_status.json10
-rw-r--r--spec/fixtures/api/schemas/entities/issue_board.json1
-rw-r--r--spec/fixtures/api/schemas/entities/user.json3
-rw-r--r--spec/fixtures/api/schemas/environment.json4
-rw-r--r--spec/fixtures/api/schemas/internal/serverless/lookup_path.json28
-rw-r--r--spec/fixtures/api/schemas/internal/serverless/virtual_domain.json14
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/commit/basic.json6
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/deploy_token.json33
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/deploy_tokens.json6
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/merge_request_simple.json26
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json30
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/project/export_status.json3
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json2
-rw-r--r--spec/fixtures/api/schemas/release/link.json2
-rw-r--r--spec/fixtures/api/schemas/remote_mirror.json5
-rw-r--r--spec/fixtures/build.env.gzbin0 -> 46 bytes
-rw-r--r--spec/fixtures/ci_build_artifacts.zipbin106365 -> 107464 bytes
-rw-r--r--spec/fixtures/cobertura/coverage.xml43
-rw-r--r--spec/fixtures/cobertura/coverage.xml.gzbin0 -> 576 bytes
-rw-r--r--spec/fixtures/cobertura/coverage_gocov_xml.xml216
-rw-r--r--spec/fixtures/cobertura/coverage_gocov_xml.xml.gzbin0 -> 1103 bytes
-rw-r--r--spec/fixtures/cobertura/coverage_with_corrupted_data.xml50
-rw-r--r--spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gzbin0 -> 571 bytes
-rw-r--r--spec/fixtures/csv_gitlab_export.csv5
-rw-r--r--spec/fixtures/group_export_invalid_subrelations.tar.gzbin0 -> 3036 bytes
-rw-r--r--spec/fixtures/lib/elasticsearch/logs_response.json73
-rw-r--r--spec/fixtures/lib/elasticsearch/query.json39
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_container.json46
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_cursor.json43
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_end_time.json48
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_search.json48
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_start_time.json48
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_times.json49
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json8
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group/project.json64
-rw-r--r--spec/fixtures/lib/gitlab/import_export/invalid_json/project.json3
-rw-r--r--spec/fixtures/lib/gitlab/import_export/light/project.json22
-rw-r--r--spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/project.json144
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/axis.json10
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json1
-rw-r--r--spec/fixtures/sentry/issues_sample_response.json42
-rw-r--r--spec/fixtures/trace/sample_trace4
-rw-r--r--spec/frontend/__mocks__/monaco-editor/index.js13
-rw-r--r--spec/frontend/__mocks__/mousetrap/index.js6
-rw-r--r--spec/frontend/badges/components/badge_form_spec.js197
-rw-r--r--spec/frontend/badges/components/badge_list_row_spec.js109
-rw-r--r--spec/frontend/badges/components/badge_list_spec.js95
-rw-r--r--spec/frontend/badges/components/badge_settings_spec.js117
-rw-r--r--spec/frontend/badges/components/badge_spec.js152
-rw-r--r--spec/frontend/badges/dummy_badge.js (renamed from spec/javascripts/badges/dummy_badge.js)0
-rw-r--r--spec/frontend/badges/store/actions_spec.js622
-rw-r--r--spec/frontend/badges/store/mutations_spec.js (renamed from spec/javascripts/badges/store/mutations_spec.js)0
-rw-r--r--spec/frontend/behaviors/quick_submit_spec.js153
-rw-r--r--spec/frontend/blob/3d_viewer/mesh_object_spec.js (renamed from spec/javascripts/blob/3d_viewer/mesh_object_spec.js)0
-rw-r--r--spec/frontend/blob/balsamiq/balsamiq_viewer_spec.js361
-rw-r--r--spec/frontend/blob/blob_file_dropzone_spec.js50
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_edit_content_spec.js.snap14
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap16
-rw-r--r--spec/frontend/blob/components/blob_edit_content_spec.js81
-rw-r--r--spec/frontend/blob/components/blob_edit_header_spec.js50
-rw-r--r--spec/frontend/blob/notebook/notebook_viever_spec.js108
-rw-r--r--spec/frontend/blob/pdf/pdf_viewer_spec.js67
-rw-r--r--spec/frontend/blob/pipeline_tour_success_spec.js40
-rw-r--r--spec/frontend/blob/sketch/index_spec.js92
-rw-r--r--spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js100
-rw-r--r--spec/frontend/blob/utils_spec.js95
-rw-r--r--spec/frontend/blob/viewer/index_spec.js179
-rw-r--r--spec/frontend/blob_edit/blob_bundle_spec.js31
-rw-r--r--spec/frontend/boards/boards_store_spec.js17
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js210
-rw-r--r--spec/frontend/boards/components/issue_card_inner_scoped_label_spec.js43
-rw-r--r--spec/frontend/boards/components/issue_due_date_spec.js55
-rw-r--r--spec/frontend/boards/issue_card_spec.js51
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js99
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_popover_spec.js48
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js39
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_table_spec.js84
-rw-r--r--spec/frontend/ci_variable_list/services/mock_data.js91
-rw-r--r--spec/frontend/ci_variable_list/store/actions_spec.js279
-rw-r--r--spec/frontend/ci_variable_list/store/mutations_spec.js64
-rw-r--r--spec/frontend/ci_variable_list/store/utils_spec.js49
-rw-r--r--spec/frontend/ci_variable_list/stubs.js14
-rw-r--r--spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap9
-rw-r--r--spec/frontend/clusters/components/applications_spec.js25
-rw-r--r--spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js129
-rw-r--r--spec/frontend/clusters/services/mock_data.js1
-rw-r--r--spec/frontend/clusters/stores/clusters_store_spec.js2
-rw-r--r--spec/frontend/clusters_list/components/clusters_spec.js78
-rw-r--r--spec/frontend/clusters_list/mock_data.js47
-rw-r--r--spec/frontend/clusters_list/store/actions_spec.js50
-rw-r--r--spec/frontend/code_navigation/store/actions_spec.js26
-rw-r--r--spec/frontend/confirm_modal_spec.js126
-rw-r--r--spec/frontend/contributors/store/getters_spec.js32
-rw-r--r--spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js142
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js120
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/getters_spec.js13
-rw-r--r--spec/frontend/create_cluster/gke_cluster/components/gke_zone_dropdown_spec.js101
-rw-r--r--spec/frontend/create_cluster/gke_cluster/helpers.js64
-rw-r--r--spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js140
-rw-r--r--spec/frontend/create_cluster/gke_cluster/stores/getters_spec.js (renamed from spec/javascripts/create_cluster/gke_cluster/stores/getters_spec.js)0
-rw-r--r--spec/frontend/create_cluster/gke_cluster/stores/mutations_spec.js (renamed from spec/javascripts/create_cluster/gke_cluster/stores/mutations_spec.js)0
-rw-r--r--spec/frontend/create_merge_request_dropdown_spec.js2
-rw-r--r--spec/frontend/diffs/components/app_spec.js728
-rw-r--r--spec/frontend/diffs/components/commit_item_spec.js181
-rw-r--r--spec/frontend/diffs/components/compare_versions_dropdown_spec.js179
-rw-r--r--spec/frontend/diffs/components/diff_file_row_spec.js2
-rw-r--r--spec/frontend/diffs/components/tree_list_spec.js138
-rw-r--r--spec/frontend/diffs/create_diffs_store.js15
-rw-r--r--spec/frontend/environments/environments_app_spec.js168
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js110
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js27
-rw-r--r--spec/frontend/error_tracking/store/list/actions_spec.js14
-rw-r--r--spec/frontend/error_tracking/store/list/mutation_spec.js11
-rw-r--r--spec/frontend/fixtures/groups.rb1
-rw-r--r--spec/frontend/fixtures/labels.rb17
-rw-r--r--spec/frontend/fixtures/metrics_dashboard.rb41
-rw-r--r--spec/frontend/fixtures/projects.rb1
-rw-r--r--spec/frontend/fixtures/static/notebook_viewer.html1
-rw-r--r--spec/frontend/fixtures/static/pdf_viewer.html1
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_item_spec.js101
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_spec.js101
-rw-r--r--spec/frontend/frequent_items/mock_data.js57
-rw-r--r--spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap6
-rw-r--r--spec/frontend/helpers/dom_shims/form_element.js1
-rw-r--r--spec/frontend/helpers/dom_shims/get_client_rects.js14
-rw-r--r--spec/frontend/helpers/dom_shims/image_element_properties.js12
-rw-r--r--spec/frontend/helpers/dom_shims/index.js4
-rw-r--r--spec/frontend/helpers/dom_shims/scroll_by.js7
-rw-r--r--spec/frontend/helpers/dom_shims/size_properties.js19
-rw-r--r--spec/frontend/helpers/tracking_helper.js2
-rw-r--r--spec/frontend/ide/components/commit_sidebar/editor_header_spec.js27
-rw-r--r--spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap1
-rw-r--r--spec/frontend/ide/components/preview/clientside_spec.js55
-rw-r--r--spec/frontend/ide/utils_spec.js60
-rw-r--r--spec/frontend/issuables_list/components/issuables_list_app_spec.js141
-rw-r--r--spec/frontend/labels_select_spec.js74
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js1001
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js39
-rw-r--r--spec/frontend/lib/utils/icon_utils_spec.js72
-rw-r--r--spec/frontend/lib/utils/mock_data.js8
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js23
-rw-r--r--spec/frontend/lib/utils/unit_format/formatter_factory_spec.js276
-rw-r--r--spec/frontend/lib/utils/unit_format/index_spec.js157
-rw-r--r--spec/frontend/logs/components/environment_logs_spec.js428
-rw-r--r--spec/frontend/logs/components/log_control_buttons_spec.js92
-rw-r--r--spec/frontend/logs/mock_data.js53
-rw-r--r--spec/frontend/logs/stores/actions_spec.js398
-rw-r--r--spec/frontend/logs/stores/getters_spec.js40
-rw-r--r--spec/frontend/logs/stores/mutations_spec.js259
-rw-r--r--spec/frontend/logs/utils_spec.js38
-rw-r--r--spec/frontend/mocks/ce/diffs/workers/tree_worker.js8
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap18
-rw-r--r--spec/frontend/monitoring/components/charts/anomaly_spec.js4
-rw-r--r--spec/frontend/monitoring/components/charts/column_spec.js59
-rw-r--r--spec/frontend/monitoring/components/charts/empty_chart_spec.js8
-rw-r--r--spec/frontend/monitoring/components/charts/options_spec.js60
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js812
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js31
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js2
-rw-r--r--spec/frontend/monitoring/components/panel_type_spec.js97
-rw-r--r--spec/frontend/monitoring/embed/embed_spec.js4
-rw-r--r--spec/frontend/monitoring/embed/mock_data.js37
-rw-r--r--spec/frontend/monitoring/mock_data.js351
-rw-r--r--spec/frontend/monitoring/shared/prometheus_header_spec.js26
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js68
-rw-r--r--spec/frontend/monitoring/store/getters_spec.js79
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js55
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js268
-rw-r--r--spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap3
-rw-r--r--spec/frontend/notes/components/discussion_counter_spec.js91
-rw-r--r--spec/frontend/notes/components/discussion_jump_to_next_button_spec.js27
-rw-r--r--spec/frontend/notes/components/discussion_keyboard_navigator_spec.js73
-rw-r--r--spec/frontend/notes/components/note_app_spec.js11
-rw-r--r--spec/frontend/notes/helpers.js12
-rw-r--r--spec/frontend/notes/mixins/discussion_navigation_spec.js178
-rw-r--r--spec/frontend/notes/old_notes_spec.js5
-rw-r--r--spec/frontend/notes/stores/actions_spec.js905
-rw-r--r--spec/frontend/notes/stores/getters_spec.js7
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js24
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/project_feature_settings_spec.js124
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/project_setting_row_spec.js63
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js434
-rw-r--r--spec/frontend/projects/project_import_gitlab_project_spec.js (renamed from spec/javascripts/projects/project_import_gitlab_project_spec.js)0
-rw-r--r--spec/frontend/projects/project_new_spec.js (renamed from spec/javascripts/projects/project_new_spec.js)0
-rw-r--r--spec/frontend/registry/explorer/components/project_policy_alert_spec.js132
-rw-r--r--spec/frontend/registry/explorer/pages/details_spec.js24
-rw-r--r--spec/frontend/registry/explorer/pages/list_spec.js10
-rw-r--r--spec/frontend/registry/explorer/stores/actions_spec.js24
-rw-r--r--spec/frontend/registry/explorer/stores/getters_spec.js34
-rw-r--r--spec/frontend/registry/explorer/stores/mutations_spec.js7
-rw-r--r--spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap6
-rw-r--r--spec/frontend/registry/settings/components/registry_settings_app_spec.js27
-rw-r--r--spec/frontend/releases/components/app_edit_spec.js136
-rw-r--r--spec/frontend/releases/components/app_show_spec.js61
-rw-r--r--spec/frontend/releases/components/evidence_block_spec.js18
-rw-r--r--spec/frontend/releases/components/release_block_footer_spec.js2
-rw-r--r--spec/frontend/releases/components/release_block_header_spec.js43
-rw-r--r--spec/frontend/releases/components/release_block_milestone_info_spec.js103
-rw-r--r--spec/frontend/releases/components/release_block_spec.js102
-rw-r--r--spec/frontend/releases/mock_data.js6
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js100
-rw-r--r--spec/frontend/reports/store/utils_spec.js34
-rw-r--r--spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap8
-rw-r--r--spec/frontend/search_spec.js42
-rw-r--r--spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap (renamed from spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap)0
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_form_spec.js89
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_spec.js83
-rw-r--r--spec/frontend/sidebar/assignee_title_spec.js116
-rw-r--r--spec/frontend/sidebar/mock_data.js10
-rw-r--r--spec/frontend/snippet/snippet_bundle_spec.js94
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap24
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap48
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap94
-rw-r--r--spec/frontend/snippets/components/snippet_blob_edit_spec.js40
-rw-r--r--spec/frontend/snippets/components/snippet_description_edit_spec.js52
-rw-r--r--spec/frontend/snippets/components/snippet_title_spec.js4
-rw-r--r--spec/frontend/snippets/components/snippet_visibility_edit_spec.js94
-rw-r--r--spec/frontend/tracking_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js55
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_tour_spec.js143
-rw-r--r--spec/frontend/vue_mr_widget/components/states/pipeline_tour_mock_data.js10
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js124
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js220
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js45
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_spec.js69
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/mock_data.js319
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js858
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap8
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/changed_file_icon_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/confirm_modal_spec.js120
-rw-r--r--spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js194
-rw-r--r--spec/frontend/vue_shared/components/gl_mentions_spec.js34
-rw-r--r--spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js121
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_title_spec.js31
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js61
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js55
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js223
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js265
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js54
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js61
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_value_spec.js84
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js127
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js66
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js276
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js31
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js172
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js107
-rw-r--r--spec/frontend/wikis_spec.js27
-rw-r--r--spec/graphql/features/authorization_spec.rb3
-rw-r--r--spec/graphql/gitlab_schema_spec.rb23
-rw-r--r--spec/graphql/mutations/concerns/mutations/resolves_group_spec.rb4
-rw-r--r--spec/graphql/mutations/concerns/mutations/resolves_issuable_spec.rb72
-rw-r--r--spec/graphql/mutations/concerns/mutations/resolves_project_spec.rb4
-rw-r--r--spec/graphql/mutations/issues/set_confidential_spec.rb2
-rw-r--r--spec/graphql/mutations/issues/set_due_date_spec.rb2
-rw-r--r--spec/graphql/mutations/issues/update_spec.rb53
-rw-r--r--spec/graphql/mutations/merge_requests/set_assignees_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_labels_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_locked_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_milestone_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_subscription_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_wip_spec.rb2
-rw-r--r--spec/graphql/mutations/todos/mark_all_done_spec.rb2
-rw-r--r--spec/graphql/mutations/todos/mark_done_spec.rb2
-rw-r--r--spec/graphql/mutations/todos/restore_many_spec.rb2
-rw-r--r--spec/graphql/mutations/todos/restore_spec.rb2
-rw-r--r--spec/graphql/resolvers/boards_resolver_spec.rb15
-rw-r--r--spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb10
-rw-r--r--spec/graphql/resolvers/group_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb32
-rw-r--r--spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/merge_requests_resolver_spec.rb13
-rw-r--r--spec/graphql/resolvers/project_pipelines_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/project_resolver_spec.rb7
-rw-r--r--spec/graphql/resolvers/projects/snippets_resolver_spec.rb10
-rw-r--r--spec/graphql/types/base_field_spec.rb23
-rw-r--r--spec/graphql/types/board_type_spec.rb15
-rw-r--r--spec/graphql/types/group_type_spec.rb10
-rw-r--r--spec/graphql/types/project_type_spec.rb7
-rw-r--r--spec/graphql/types/snippet_type_spec.rb43
-rw-r--r--spec/helpers/auth_helper_spec.rb32
-rw-r--r--spec/helpers/auto_devops_helper_spec.rb4
-rw-r--r--spec/helpers/award_emoji_helper_spec.rb2
-rw-r--r--spec/helpers/blob_helper_spec.rb91
-rw-r--r--spec/helpers/boards_helper_spec.rb4
-rw-r--r--spec/helpers/broadcast_messages_helper_spec.rb7
-rw-r--r--spec/helpers/environments_helper_spec.rb6
-rw-r--r--spec/helpers/events_helper_spec.rb2
-rw-r--r--spec/helpers/form_helper_spec.rb19
-rw-r--r--spec/helpers/issuables_helper_spec.rb4
-rw-r--r--spec/helpers/issues_helper_spec.rb2
-rw-r--r--spec/helpers/labels_helper_spec.rb34
-rw-r--r--spec/helpers/markup_helper_spec.rb37
-rw-r--r--spec/helpers/notes_helper_spec.rb2
-rw-r--r--spec/helpers/notifications_helper_spec.rb1
-rw-r--r--spec/helpers/projects/error_tracking_helper_spec.rb4
-rw-r--r--spec/helpers/projects_helper_spec.rb52
-rw-r--r--spec/helpers/releases_helper_spec.rb21
-rw-r--r--spec/helpers/submodule_helper_spec.rb47
-rw-r--r--spec/helpers/users_helper_spec.rb38
-rw-r--r--spec/helpers/visibility_level_helper_spec.rb2
-rw-r--r--spec/initializers/100_patch_omniauth_saml_spec.rb26
-rw-r--r--spec/initializers/lograge_spec.rb2
-rw-r--r--spec/javascripts/badges/components/badge_form_spec.js192
-rw-r--r--spec/javascripts/badges/components/badge_list_row_spec.js106
-rw-r--r--spec/javascripts/badges/components/badge_list_spec.js91
-rw-r--r--spec/javascripts/badges/components/badge_settings_spec.js118
-rw-r--r--spec/javascripts/badges/components/badge_spec.js150
-rw-r--r--spec/javascripts/badges/store/actions_spec.js618
-rw-r--r--spec/javascripts/behaviors/quick_submit_spec.js143
-rw-r--r--spec/javascripts/blob/balsamiq/balsamiq_viewer_browser_spec.js59
-rw-r--r--spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js57
-rw-r--r--spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js351
-rw-r--r--spec/javascripts/blob/blob_file_dropzone_spec.js39
-rw-r--r--spec/javascripts/blob/notebook/index_spec.js130
-rw-r--r--spec/javascripts/blob/pdf/index_spec.js72
-rw-r--r--spec/javascripts/blob/sketch/index_spec.js120
-rw-r--r--spec/javascripts/blob/viewer/index_spec.js180
-rw-r--r--spec/javascripts/blob_edit/blob_bundle_spec.js30
-rw-r--r--spec/javascripts/boards/board_card_spec.js8
-rw-r--r--spec/javascripts/boards/components/boards_selector_spec.js203
-rw-r--r--spec/javascripts/boards/list_spec.js17
-rw-r--r--spec/javascripts/collapsed_sidebar_todo_spec.js6
-rw-r--r--spec/javascripts/create_cluster/.eslintrc.yml3
-rw-r--r--spec/javascripts/create_cluster/gke_cluster/components/gke_zone_dropdown_spec.js94
-rw-r--r--spec/javascripts/create_cluster/gke_cluster/helpers.js49
-rw-r--r--spec/javascripts/create_cluster/gke_cluster/mock_data.js75
-rw-r--r--spec/javascripts/create_cluster/gke_cluster/stores/actions_spec.js139
-rw-r--r--spec/javascripts/diffs/components/app_spec.js718
-rw-r--r--spec/javascripts/diffs/components/commit_item_spec.js166
-rw-r--r--spec/javascripts/diffs/components/compare_versions_dropdown_spec.js160
-rw-r--r--spec/javascripts/diffs/components/diff_expansion_cell_spec.js203
-rw-r--r--spec/javascripts/diffs/components/diff_file_spec.js7
-rw-r--r--spec/javascripts/diffs/components/inline_diff_table_row_spec.js61
-rw-r--r--spec/javascripts/diffs/components/parallel_diff_table_row_spec.js62
-rw-r--r--spec/javascripts/diffs/components/tree_list_spec.js126
-rw-r--r--spec/javascripts/diffs/create_diffs_store.js16
-rw-r--r--spec/javascripts/diffs/mock_data/diff_discussions.js12
-rw-r--r--spec/javascripts/diffs/store/actions_spec.js43
-rw-r--r--spec/javascripts/diffs/store/getters_spec.js30
-rw-r--r--spec/javascripts/diffs/store/mutations_spec.js14
-rw-r--r--spec/javascripts/diffs/store/utils_spec.js124
-rw-r--r--spec/javascripts/dirty_submit/dirty_submit_form_spec.js6
-rw-r--r--spec/javascripts/editor/editor_lite_spec.js49
-rw-r--r--spec/javascripts/environments/environments_app_spec.js279
-rw-r--r--spec/javascripts/environments/mock_data.js66
-rw-r--r--spec/javascripts/filtered_search/visual_token_value_spec.js4
-rw-r--r--spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js94
-rw-r--r--spec/javascripts/frequent_items/components/frequent_items_list_spec.js90
-rw-r--r--spec/javascripts/groups/mock_data.js3
-rw-r--r--spec/javascripts/helpers/tracking_helper.js28
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/form_spec.js2
-rw-r--r--spec/javascripts/ide/components/file_row_extra_spec.js30
-rw-r--r--spec/javascripts/ide/components/repo_commit_section_spec.js14
-rw-r--r--spec/javascripts/issue_show/components/app_spec.js14
-rw-r--r--spec/javascripts/labels_issue_sidebar_spec.js4
-rw-r--r--spec/javascripts/lib/utils/browser_spec.js175
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js981
-rw-r--r--spec/javascripts/lib/utils/mock_data.js9
-rw-r--r--spec/javascripts/monitoring/components/dashboard_resize_spec.js2
-rw-r--r--spec/javascripts/notes/components/discussion_counter_spec.js90
-rw-r--r--spec/javascripts/notes/components/noteable_note_spec.js4
-rw-r--r--spec/javascripts/notes/helpers.js13
-rw-r--r--spec/javascripts/notes/stores/actions_spec.js911
-rw-r--r--spec/javascripts/pdf/index_spec.js3
-rw-r--r--spec/javascripts/pdf/page_spec.js4
-rw-r--r--spec/javascripts/releases/components/app_index_spec.js133
-rw-r--r--spec/javascripts/releases/stores/modules/list/actions_spec.js6
-rw-r--r--spec/javascripts/reports/components/grouped_test_reports_app_spec.js20
-rw-r--r--spec/javascripts/search_spec.js41
-rw-r--r--spec/javascripts/sidebar/assignee_title_spec.js123
-rw-r--r--spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js13
-rw-r--r--spec/javascripts/sidebar/sidebar_mediator_spec.js10
-rw-r--r--spec/javascripts/smart_interval_spec.js4
-rw-r--r--spec/javascripts/vue_mr_widget/components/deployment_stop_button_spec.js95
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js16
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js4
-rw-r--r--spec/javascripts/vue_mr_widget/mock_data.js319
-rw-r--r--spec/javascripts/vue_mr_widget/mr_widget_options_spec.js855
-rw-r--r--spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js12
-rw-r--r--spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js86
-rw-r--r--spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js176
-rw-r--r--spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js115
-rw-r--r--spec/lib/api/entities/release_spec.rb32
-rw-r--r--spec/lib/api/helpers/custom_validators_spec.rb91
-rw-r--r--spec/lib/backup/manager_spec.rb56
-rw-r--r--spec/lib/backup/repository_spec.rb6
-rw-r--r--spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb89
-rw-r--r--spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb75
-rw-r--r--spec/lib/banzai/filter/inline_metrics_filter_spec.rb79
-rw-r--r--spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb36
-rw-r--r--spec/lib/banzai/filter/label_reference_filter_spec.rb26
-rw-r--r--spec/lib/banzai/filter/repository_link_filter_spec.rb36
-rw-r--r--spec/lib/banzai/pipeline/gfm_pipeline_spec.rb2
-rw-r--r--spec/lib/event_filter_spec.rb21
-rw-r--r--spec/lib/feature_spec.rb12
-rw-r--r--spec/lib/gitlab/access/branch_protection_spec.rb17
-rw-r--r--spec/lib/gitlab/alerting/alert_spec.rb11
-rw-r--r--spec/lib/gitlab/auth/current_user_mode_spec.rb404
-rw-r--r--spec/lib/gitlab/auth/key_status_checker_spec.rb68
-rw-r--r--spec/lib/gitlab/auth/ldap/access_spec.rb10
-rw-r--r--spec/lib/gitlab/auth/ldap/adapter_spec.rb10
-rw-r--r--spec/lib/gitlab/auth/ldap/auth_hash_spec.rb4
-rw-r--r--spec/lib/gitlab/auth/ldap/authentication_spec.rb8
-rw-r--r--spec/lib/gitlab/auth/ldap/config_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/ldap/dn_spec.rb50
-rw-r--r--spec/lib/gitlab/auth/ldap/person_spec.rb4
-rw-r--r--spec/lib/gitlab/auth/ldap/user_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb36
-rw-r--r--spec/lib/gitlab/auth/saml/user_spec.rb16
-rw-r--r--spec/lib/gitlab/auth_spec.rb13
-rw-r--r--spec/lib/gitlab/authorized_keys_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_hashed_project_repositories_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_legacy_project_repositories_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb154
-rw-r--r--spec/lib/gitlab/background_migration/digest_column_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_columns_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_runners_tokens_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb13
-rw-r--r--spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb113
-rw-r--r--spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_null_private_profile_to_false_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_stage_index_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_cluster_kubernetes_namespace_table_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/remove_restricted_todos_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/reset_merge_status_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb109
-rw-r--r--spec/lib/gitlab/bare_repository_import/repository_spec.rb9
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb13
-rw-r--r--spec/lib/gitlab/cache/import/caching_spec.rb119
-rw-r--r--spec/lib/gitlab/checks/branch_check_spec.rb20
-rw-r--r--spec/lib/gitlab/checks/diff_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/force_push_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/lfs_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/project_created_spec.rb29
-rw-r--r--spec/lib/gitlab/checks/project_moved_spec.rb103
-rw-r--r--spec/lib/gitlab/checks/push_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/push_file_count_check_spec.rb53
-rw-r--r--spec/lib/gitlab/checks/snippet_check_spec.rb43
-rw-r--r--spec/lib/gitlab/checks/tag_check_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/artifact_file_reader_spec.rb100
-rw-r--r--spec/lib/gitlab/ci/build/policy/changes_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/policy/variables_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb42
-rw-r--r--spec/lib/gitlab/ci/config/entry/inherit/variables_spec.rb42
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/config/entry/jobs_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb381
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/config/external/file/artifact_spec.rb167
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/config/external/file/project_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/config/external/file/template_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb110
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb176
-rw-r--r--spec/lib/gitlab/ci/parsers/test/junit_spec.rb69
-rw-r--r--spec/lib/gitlab/ci/parsers_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build/associations_spec.rb62
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/command_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb88
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb43
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb59
-rw-r--r--spec/lib/gitlab/ci/reports/coverage_reports_spec.rb66
-rw-r--r--spec/lib/gitlab/ci/reports/test_case_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/status/composite_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/templates_spec.rb50
-rw-r--r--spec/lib/gitlab/ci/trace/chunked_io_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/trace/stream_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb102
-rw-r--r--spec/lib/gitlab/config/entry/attributable_spec.rb2
-rw-r--r--spec/lib/gitlab/config_checker/puma_rugged_checker_spec.rb65
-rw-r--r--spec/lib/gitlab/danger/commit_linter_spec.rb47
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb6
-rw-r--r--spec/lib/gitlab/data_builder/wiki_page_spec.rb2
-rw-r--r--spec/lib/gitlab/database/connection_timer_spec.rb100
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb157
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb55
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb28
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb40
-rw-r--r--spec/lib/gitlab/elasticsearch/logs_spec.rb89
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb6
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb43
-rw-r--r--spec/lib/gitlab/file_type_detection_spec.rb29
-rw-r--r--spec/lib/gitlab/git/blob_spec.rb62
-rw-r--r--spec/lib/gitlab/git/lfs_changes_spec.rb2
-rw-r--r--spec/lib/gitlab/git/merge_base_spec.rb2
-rw-r--r--spec/lib/gitlab/git/push_spec.rb3
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb116
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb37
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb212
-rw-r--r--spec/lib/gitlab/git_access_spec.rb82
-rw-r--r--spec/lib/gitlab/git_access_wiki_spec.rb4
-rw-r--r--spec/lib/gitlab/git_post_receive_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb14
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb8
-rw-r--r--spec/lib/gitlab/gitaly_client/remote_service_spec.rb13
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb24
-rw-r--r--spec/lib/gitlab/github_import/caching_spec.rb119
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb17
-rw-r--r--spec/lib/gitlab/github_import/issuable_finder_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/label_finder_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/milestone_finder_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/page_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb22
-rw-r--r--spec/lib/gitlab/github_import_spec.rb2
-rw-r--r--spec/lib/gitlab/gl_repository/repo_type_spec.rb78
-rw-r--r--spec/lib/gitlab/gl_repository_spec.rb9
-rw-r--r--spec/lib/gitlab/global_id_spec.rb2
-rw-r--r--spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb49
-rw-r--r--spec/lib/gitlab/graphql/docs/renderer_spec.rb96
-rw-r--r--spec/lib/gitlab/graphql/loaders/batch_lfs_oid_loader_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/pagination/offset_active_record_relation_connection_spec.rb9
-rw-r--r--spec/lib/gitlab/graphql/timeout_spec.rb23
-rw-r--r--spec/lib/gitlab/hashed_storage/migrator_spec.rb4
-rw-r--r--spec/lib/gitlab/hook_data/issuable_builder_spec.rb2
-rw-r--r--spec/lib/gitlab/hook_data/issue_builder_spec.rb4
-rw-r--r--spec/lib/gitlab/hook_data/merge_request_builder_spec.rb2
-rw-r--r--spec/lib/gitlab/import/merge_request_helpers_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml14
-rw-r--r--spec/lib/gitlab/import_export/base/object_builder_spec.rb53
-rw-r--r--spec/lib/gitlab/import_export/base/relation_factory_spec.rb152
-rw-r--r--spec/lib/gitlab/import_export/base_object_builder_spec.rb53
-rw-r--r--spec/lib/gitlab/import_export/base_relation_factory_spec.rb145
-rw-r--r--spec/lib/gitlab/import_export/error_spec.rb31
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/fork_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/group/object_builder_spec.rb66
-rw-r--r--spec/lib/gitlab/import_export/group/relation_factory_spec.rb120
-rw-r--r--spec/lib/gitlab/import_export/group/tree_restorer_spec.rb153
-rw-r--r--spec/lib/gitlab/import_export/group/tree_saver_spec.rb202
-rw-r--r--spec/lib/gitlab/import_export/group_object_builder_spec.rb66
-rw-r--r--spec/lib/gitlab/import_export/group_project_object_builder_spec.rb153
-rw-r--r--spec/lib/gitlab/import_export/group_relation_factory_spec.rb120
-rw-r--r--spec/lib/gitlab/import_export/group_tree_restorer_spec.rb153
-rw-r--r--spec/lib/gitlab/import_export/group_tree_saver_spec.rb202
-rw-r--r--spec/lib/gitlab/import_export/import_export_spec.rb8
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/importer_spec.rb5
-rw-r--r--spec/lib/gitlab/import_export/json/legacy_reader_spec.rb149
-rw-r--r--spec/lib/gitlab/import_export/json/legacy_writer_spec.rb79
-rw-r--r--spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb42
-rw-r--r--spec/lib/gitlab/import_export/members_mapper_spec.rb294
-rw-r--r--spec/lib/gitlab/import_export/merge_request_parser_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/project/export_task_spec.rb69
-rw-r--r--spec/lib/gitlab/import_export/project/import_task_spec.rb96
-rw-r--r--spec/lib/gitlab/import_export/project/legacy_tree_saver_spec.rb397
-rw-r--r--spec/lib/gitlab/import_export/project/object_builder_spec.rb153
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb326
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb896
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb346
-rw-r--r--spec/lib/gitlab/import_export/project_relation_factory_spec.rb328
-rw-r--r--spec/lib/gitlab/import_export/project_tree_loader_spec.rb49
-rw-r--r--spec/lib/gitlab/import_export/project_tree_restorer_spec.rb844
-rw-r--r--spec/lib/gitlab/import_export/project_tree_saver_spec.rb397
-rw-r--r--spec/lib/gitlab/import_export/relation_rename_service_spec.rb122
-rw-r--r--spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb27
-rw-r--r--spec/lib/gitlab/import_export/relation_tree_saver_spec.rb42
-rw-r--r--spec/lib/gitlab/import_export/repo_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml10
-rw-r--r--spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb70
-rw-r--r--spec/lib/gitlab/import_export/snippet_repo_saver_spec.rb48
-rw-r--r--spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb55
-rw-r--r--spec/lib/gitlab/import_export/snippets_repo_saver_spec.rb56
-rw-r--r--spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb4
-rw-r--r--spec/lib/gitlab/incoming_email_spec.rb11
-rw-r--r--spec/lib/gitlab/job_waiter_spec.rb35
-rw-r--r--spec/lib/gitlab/json_cache_spec.rb3
-rw-r--r--spec/lib/gitlab/kubernetes/helm/api_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes/helm/pod_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes/namespace_spec.rb8
-rw-r--r--spec/lib/gitlab/language_detection_spec.rb6
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb8
-rw-r--r--spec/lib/gitlab/lograge/custom_options_spec.rb50
-rw-r--r--spec/lib/gitlab/metrics/dashboard/finder_spec.rb6
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb82
-rw-r--r--spec/lib/gitlab/middleware/go_spec.rb11
-rw-r--r--spec/lib/gitlab/omniauth_logging/json_formatter_spec.rb12
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb33
-rw-r--r--spec/lib/gitlab/phabricator_import/cache/map_spec.rb2
-rw-r--r--spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/profiler_spec.rb22
-rw-r--r--spec/lib/gitlab/project_authorizations_spec.rb72
-rw-r--r--spec/lib/gitlab/project_template_spec.rb3
-rw-r--r--spec/lib/gitlab/prometheus/query_variables_spec.rb4
-rw-r--r--spec/lib/gitlab/quick_actions/extractor_spec.rb43
-rw-r--r--spec/lib/gitlab/quick_actions/substitution_definition_spec.rb2
-rw-r--r--spec/lib/gitlab/rate_limit_helpers_spec.rb50
-rw-r--r--spec/lib/gitlab/reactive_cache_set_cache_spec.rb74
-rw-r--r--spec/lib/gitlab/reference_counter_spec.rb62
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb18
-rw-r--r--spec/lib/gitlab/repo_path_spec.rb93
-rw-r--r--spec/lib/gitlab/repository_cache_adapter_spec.rb3
-rw-r--r--spec/lib/gitlab/repository_cache_spec.rb43
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb91
-rw-r--r--spec/lib/gitlab/sanitizers/exif_spec.rb6
-rw-r--r--spec/lib/gitlab/serverless/domain_spec.rb22
-rw-r--r--spec/lib/gitlab/serverless/function_uri_spec.rb81
-rw-r--r--spec/lib/gitlab/serverless/service_spec.rb10
-rw-r--r--spec/lib/gitlab/shell_spec.rb314
-rw-r--r--spec/lib/gitlab/sidekiq_cluster/cli_spec.rb282
-rw-r--r--spec/lib/gitlab/sidekiq_cluster_spec.rb196
-rw-r--r--spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb48
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_spec.rb17
-rw-r--r--spec/lib/gitlab/sidekiq_logging/deduplication_logger_spec.rb33
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb19
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb29
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb34
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb157
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb52
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb72
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb15
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb33
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb26
-rw-r--r--spec/lib/gitlab/sidekiq_queue_spec.rb87
-rw-r--r--spec/lib/gitlab/slash_commands/issue_move_spec.rb10
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb10
-rw-r--r--spec/lib/gitlab/template/finders/global_template_finder_spec.rb84
-rw-r--r--spec/lib/gitlab/template/finders/repo_template_finders_spec.rb3
-rw-r--r--spec/lib/gitlab/tracing_spec.rb69
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb64
-rw-r--r--spec/lib/gitlab/url_blockers/domain_whitelist_entry_spec.rb58
-rw-r--r--spec/lib/gitlab/url_blockers/ip_whitelist_entry_spec.rb75
-rw-r--r--spec/lib/gitlab/url_blockers/url_whitelist_spec.rb106
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb18
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb43
-rw-r--r--spec/lib/gitlab/user_access_snippet_spec.rb95
-rw-r--r--spec/lib/gitlab/user_access_spec.rb47
-rw-r--r--spec/lib/gitlab/utils/json_size_estimator_spec.rb39
-rw-r--r--spec/lib/gitlab/utils/log_limited_array_spec.rb22
-rw-r--r--spec/lib/gitlab/utils_spec.rb8
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb2
-rw-r--r--spec/lib/gitlab/x509/commit_spec.rb16
-rw-r--r--spec/lib/gitlab_danger_spec.rb2
-rw-r--r--spec/lib/grafana/time_window_spec.rb115
-rw-r--r--spec/lib/grafana/validator_spec.rb119
-rw-r--r--spec/lib/omni_auth/strategies/saml_spec.rb26
-rw-r--r--spec/lib/quality/kubernetes_client_spec.rb6
-rw-r--r--spec/lib/quality/test_level_spec.rb46
-rw-r--r--spec/lib/sentry/client/issue_spec.rb34
-rw-r--r--spec/mailers/emails/pages_domains_spec.rb4
-rw-r--r--spec/mailers/emails/pipelines_spec.rb15
-rw-r--r--spec/mailers/notify_spec.rb22
-rw-r--r--spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb2
-rw-r--r--spec/migrations/20191015154408_drop_merge_requests_require_code_owner_approval_from_projects_spec.rb2
-rw-r--r--spec/migrations/20191125114345_add_admin_mode_protected_path_spec.rb2
-rw-r--r--spec/migrations/20191204114127_delete_legacy_triggers_spec.rb2
-rw-r--r--spec/migrations/20200107172020_add_timestamp_softwarelicensespolicy_spec.rb2
-rw-r--r--spec/migrations/20200122123016_backfill_project_settings_spec.rb2
-rw-r--r--spec/migrations/20200123155929_remove_invalid_jira_data_spec.rb2
-rw-r--r--spec/migrations/20200127090233_remove_invalid_issue_tracker_data_spec.rb2
-rw-r--r--spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb2
-rw-r--r--spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb2
-rw-r--r--spec/migrations/active_record/schema_spec.rb2
-rw-r--r--spec/migrations/add_default_and_free_plans_spec.rb2
-rw-r--r--spec/migrations/add_deploy_token_type_to_deploy_tokens_spec.rb2
-rw-r--r--spec/migrations/add_foreign_key_from_notification_settings_to_users_spec.rb2
-rw-r--r--spec/migrations/add_foreign_keys_to_todos_spec.rb2
-rw-r--r--spec/migrations/add_not_null_constraint_to_project_mirror_data_foreign_key_spec.rb2
-rw-r--r--spec/migrations/add_pages_access_level_to_project_feature_spec.rb2
-rw-r--r--spec/migrations/add_pipeline_build_foreign_key_spec.rb2
-rw-r--r--spec/migrations/add_temporary_partial_index_on_project_id_to_services_spec.rb2
-rw-r--r--spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb2
-rw-r--r--spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb2
-rw-r--r--spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb2
-rw-r--r--spec/migrations/backfill_and_add_not_null_constraint_to_released_at_column_on_releases_table_spec.rb2
-rw-r--r--spec/migrations/backfill_operations_feature_flags_active_spec.rb2
-rw-r--r--spec/migrations/backfill_operations_feature_flags_iid_spec.rb2
-rw-r--r--spec/migrations/backfill_releases_name_with_tag_name_spec.rb2
-rw-r--r--spec/migrations/backfill_releases_table_updated_at_and_add_not_null_constraints_to_timestamps_spec.rb2
-rw-r--r--spec/migrations/backfill_store_project_full_path_in_repo_spec.rb2
-rw-r--r--spec/migrations/backport_enterprise_schema_spec.rb2
-rw-r--r--spec/migrations/change_default_value_for_dsa_key_restriction_spec.rb2
-rw-r--r--spec/migrations/change_outbound_local_requests_whitelist_default_spec.rb2
-rw-r--r--spec/migrations/change_packages_size_defaults_in_project_statistics_spec.rb2
-rw-r--r--spec/migrations/clean_grafana_url_spec.rb2
-rw-r--r--spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb2
-rw-r--r--spec/migrations/cleanup_build_stage_migration_spec.rb2
-rw-r--r--spec/migrations/cleanup_empty_commit_user_mentions_spec.rb36
-rw-r--r--spec/migrations/cleanup_environments_external_url_spec.rb2
-rw-r--r--spec/migrations/cleanup_legacy_artifact_migration_spec.rb2
-rw-r--r--spec/migrations/cleanup_optimistic_locking_nulls_spec.rb53
-rw-r--r--spec/migrations/cleanup_stages_position_migration_spec.rb2
-rw-r--r--spec/migrations/create_environment_for_self_monitoring_project_spec.rb68
-rw-r--r--spec/migrations/create_missing_namespace_for_internal_users_spec.rb2
-rw-r--r--spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb2
-rw-r--r--spec/migrations/delete_template_project_services_spec.rb21
-rw-r--r--spec/migrations/delete_template_services_duplicated_by_type_spec.rb24
-rw-r--r--spec/migrations/drop_activate_prometheus_services_background_jobs_spec.rb89
-rw-r--r--spec/migrations/drop_background_migration_jobs_spec.rb2
-rw-r--r--spec/migrations/drop_duplicate_protected_tags_spec.rb2
-rw-r--r--spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb2
-rw-r--r--spec/migrations/encrypt_deploy_tokens_tokens_spec.rb2
-rw-r--r--spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb2
-rw-r--r--spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb2
-rw-r--r--spec/migrations/enqueue_reset_merge_status_second_run_spec.rb2
-rw-r--r--spec/migrations/enqueue_reset_merge_status_spec.rb2
-rw-r--r--spec/migrations/enqueue_verify_pages_domain_workers_spec.rb2
-rw-r--r--spec/migrations/fill_empty_finished_at_in_deployments_spec.rb2
-rw-r--r--spec/migrations/fill_file_store_spec.rb2
-rw-r--r--spec/migrations/fill_productivity_analytics_start_date_spec.rb2
-rw-r--r--spec/migrations/fix_max_pages_size_spec.rb2
-rw-r--r--spec/migrations/fix_null_type_labels_spec.rb2
-rw-r--r--spec/migrations/fix_pool_repository_source_project_id_spec.rb2
-rw-r--r--spec/migrations/fix_projects_without_project_feature_spec.rb2
-rw-r--r--spec/migrations/fix_wrong_pages_access_level_spec.rb2
-rw-r--r--spec/migrations/generate_lets_encrypt_private_key_spec.rb2
-rw-r--r--spec/migrations/generate_missing_routes_spec.rb2
-rw-r--r--spec/migrations/import_common_metrics_spec.rb2
-rw-r--r--spec/migrations/insert_project_hooks_plan_limits_spec.rb2
-rw-r--r--spec/migrations/migrate_auto_dev_ops_domain_to_cluster_domain_spec.rb2
-rw-r--r--spec/migrations/migrate_code_owner_approval_status_to_protected_branches_in_batches_spec.rb2
-rw-r--r--spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb37
-rw-r--r--spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb2
-rw-r--r--spec/migrations/migrate_forbidden_redirect_uris_spec.rb2
-rw-r--r--spec/migrations/migrate_k8s_service_integration_spec.rb2
-rw-r--r--spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb2
-rw-r--r--spec/migrations/migrate_legacy_managed_clusters_to_unmanaged_spec.rb2
-rw-r--r--spec/migrations/migrate_managed_clusters_with_no_token_to_unmanaged_spec.rb2
-rw-r--r--spec/migrations/migrate_merge_request_mentions_to_db_spec.rb31
-rw-r--r--spec/migrations/migrate_null_wiki_access_levels_spec.rb2
-rw-r--r--spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb2
-rw-r--r--spec/migrations/move_limits_from_plans_spec.rb2
-rw-r--r--spec/migrations/nullify_users_role_spec.rb2
-rw-r--r--spec/migrations/populate_project_statistics_packages_size_spec.rb2
-rw-r--r--spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb2
-rw-r--r--spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb2
-rw-r--r--spec/migrations/remove_empty_github_service_templates_spec.rb2
-rw-r--r--spec/migrations/remove_packages_deprecated_dependencies_spec.rb2
-rw-r--r--spec/migrations/remove_redundant_pipeline_stages_spec.rb2
-rw-r--r--spec/migrations/remove_security_dashboard_feature_flag_spec.rb53
-rw-r--r--spec/migrations/rename_security_dashboard_feature_flag_to_instance_security_dashboard_spec.rb53
-rw-r--r--spec/migrations/reschedule_builds_stages_migration_spec.rb2
-rw-r--r--spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb2
-rw-r--r--spec/migrations/save_instance_administrators_group_id_spec.rb2
-rw-r--r--spec/migrations/schedule_digest_personal_access_tokens_spec.rb2
-rw-r--r--spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb2
-rw-r--r--spec/migrations/schedule_link_lfs_objects_projects_spec.rb76
-rw-r--r--spec/migrations/schedule_migrate_security_scans_spec.rb68
-rw-r--r--spec/migrations/schedule_pages_metadata_migration_spec.rb2
-rw-r--r--spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb2
-rw-r--r--spec/migrations/schedule_recalculate_project_authorizations_second_run_spec.rb2
-rw-r--r--spec/migrations/schedule_recalculate_project_authorizations_spec.rb2
-rw-r--r--spec/migrations/schedule_runners_token_encryption_spec.rb2
-rw-r--r--spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb2
-rw-r--r--spec/migrations/schedule_stages_index_migration_spec.rb2
-rw-r--r--spec/migrations/schedule_sync_issuables_state_id_spec.rb2
-rw-r--r--spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb2
-rw-r--r--spec/migrations/schedule_to_archive_legacy_traces_spec.rb2
-rw-r--r--spec/migrations/schedule_update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb2
-rw-r--r--spec/migrations/services_remove_temporary_index_on_project_id_spec.rb2
-rw-r--r--spec/migrations/set_issue_id_for_all_versions_spec.rb2
-rw-r--r--spec/migrations/steal_fill_store_upload_spec.rb2
-rw-r--r--spec/migrations/sync_issuables_state_id_spec.rb2
-rw-r--r--spec/migrations/truncate_user_fullname_spec.rb2
-rw-r--r--spec/migrations/update_application_setting_npm_package_requests_forwarding_default_spec.rb38
-rw-r--r--spec/migrations/update_fingerprint_sha256_within_keys_spec.rb2
-rw-r--r--spec/migrations/update_minimum_password_length_spec.rb2
-rw-r--r--spec/migrations/update_project_import_visibility_level_spec.rb2
-rw-r--r--spec/migrations/update_timestamp_softwarelicensespolicy_spec.rb2
-rw-r--r--spec/models/analytics/cycle_analytics/project_stage_spec.rb6
-rw-r--r--spec/models/application_setting_spec.rb57
-rw-r--r--spec/models/ci/build_spec.rb154
-rw-r--r--spec/models/ci/job_artifact_spec.rb31
-rw-r--r--spec/models/ci/pipeline_spec.rb148
-rw-r--r--spec/models/ci/processable_spec.rb24
-rw-r--r--spec/models/ci/ref_spec.rb11
-rw-r--r--spec/models/clusters/applications/cert_manager_spec.rb6
-rw-r--r--spec/models/clusters/applications/elastic_stack_spec.rb4
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb6
-rw-r--r--spec/models/clusters/applications/knative_spec.rb57
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb77
-rw-r--r--spec/models/clusters/cluster_spec.rb104
-rw-r--r--spec/models/commit_range_spec.rb25
-rw-r--r--spec/models/commit_spec.rb106
-rw-r--r--spec/models/concerns/avatarable_spec.rb2
-rw-r--r--spec/models/concerns/blob_language_from_git_attributes_spec.rb13
-rw-r--r--spec/models/concerns/bulk_insert_safe_spec.rb144
-rw-r--r--spec/models/concerns/bulk_insertable_associations_spec.rb233
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb20
-rw-r--r--spec/models/concerns/ci/has_ref_spec.rb79
-rw-r--r--spec/models/concerns/has_ref_spec.rb79
-rw-r--r--spec/models/concerns/issuable_spec.rb2
-rw-r--r--spec/models/concerns/milestoneish_spec.rb47
-rw-r--r--spec/models/concerns/reactive_caching_spec.rb37
-rw-r--r--spec/models/concerns/spammable_spec.rb40
-rw-r--r--spec/models/concerns/usage_statistics_spec.rb52
-rw-r--r--spec/models/deployment_spec.rb15
-rw-r--r--spec/models/environment_spec.rb63
-rw-r--r--spec/models/environment_status_spec.rb16
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb21
-rw-r--r--spec/models/group_spec.rb10
-rw-r--r--spec/models/issue_spec.rb103
-rw-r--r--spec/models/key_spec.rb49
-rw-r--r--spec/models/label_link_spec.rb5
-rw-r--r--spec/models/lfs_object_spec.rb9
-rw-r--r--spec/models/merge_request/pipelines_spec.rb160
-rw-r--r--spec/models/merge_request_diff_commit_spec.rb5
-rw-r--r--spec/models/merge_request_diff_file_spec.rb5
-rw-r--r--spec/models/merge_request_spec.rb91
-rw-r--r--spec/models/milestone_note_spec.rb18
-rw-r--r--spec/models/milestone_spec.rb28
-rw-r--r--spec/models/namespace_spec.rb73
-rw-r--r--spec/models/note_spec.rb174
-rw-r--r--spec/models/notification_recipient_spec.rb42
-rw-r--r--spec/models/notification_setting_spec.rb3
-rw-r--r--spec/models/pages_domain_spec.rb9
-rw-r--r--spec/models/project_export_job_spec.rb19
-rw-r--r--spec/models/project_import_state_spec.rb3
-rw-r--r--spec/models/project_services/chat_message/push_message_spec.rb34
-rw-r--r--spec/models/project_services/chat_notification_service_spec.rb23
-rw-r--r--spec/models/project_services/gitlab_issue_tracker_service_spec.rb12
-rw-r--r--spec/models/project_services/jira_service_spec.rb30
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb9
-rw-r--r--spec/models/project_services/youtrack_service_spec.rb4
-rw-r--r--spec/models/project_spec.rb301
-rw-r--r--spec/models/project_wiki_spec.rb20
-rw-r--r--spec/models/protected_branch_spec.rb48
-rw-r--r--spec/models/release_spec.rb1
-rw-r--r--spec/models/releases/link_spec.rb45
-rw-r--r--spec/models/repository_spec.rb41
-rw-r--r--spec/models/resource_label_event_spec.rb4
-rw-r--r--spec/models/resource_milestone_event_spec.rb55
-rw-r--r--spec/models/resource_weight_event_spec.rb7
-rw-r--r--spec/models/serverless/domain_cluster_spec.rb8
-rw-r--r--spec/models/serverless/domain_spec.rb97
-rw-r--r--spec/models/service_spec.rb56
-rw-r--r--spec/models/snippet_repository_spec.rb158
-rw-r--r--spec/models/snippet_spec.rb98
-rw-r--r--spec/models/upload_spec.rb30
-rw-r--r--spec/models/user_detail_spec.rb14
-rw-r--r--spec/models/user_highest_role_spec.rb13
-rw-r--r--spec/models/user_spec.rb141
-rw-r--r--spec/models/wiki_page_spec.rb77
-rw-r--r--spec/models/x509_certificate_spec.rb22
-rw-r--r--spec/policies/application_setting/term_policy_spec.rb2
-rw-r--r--spec/policies/ci/pipeline_schedule_policy_spec.rb6
-rw-r--r--spec/policies/note_policy_spec.rb107
-rw-r--r--spec/policies/project_policy_spec.rb14
-rw-r--r--spec/policies/project_snippet_policy_spec.rb48
-rw-r--r--spec/policies/resource_label_event_policy_spec.rb8
-rw-r--r--spec/presenters/issue_presenter_spec.rb4
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb10
-rw-r--r--spec/presenters/project_hook_presenter_spec.rb (renamed from spec/presenters/hooks/project_hook_presenter_spec.rb)0
-rw-r--r--spec/presenters/projects/import_export/project_export_presenter_spec.rb92
-rw-r--r--spec/presenters/projects/prometheus/alert_presenter_spec.rb53
-rw-r--r--spec/presenters/service_hook_presenter_spec.rb (renamed from spec/presenters/hooks/service_hook_presenter_spec.rb)0
-rw-r--r--spec/presenters/snippet_blob_presenter_spec.rb91
-rw-r--r--spec/presenters/snippet_presenter_spec.rb20
-rw-r--r--spec/requests/api/access_requests_spec.rb28
-rw-r--r--spec/requests/api/admin/sidekiq_spec.rb65
-rw-r--r--spec/requests/api/api_guard/admin_mode_middleware_spec.rb35
-rw-r--r--spec/requests/api/appearance_spec.rb20
-rw-r--r--spec/requests/api/applications_spec.rb30
-rw-r--r--spec/requests/api/award_emoji_spec.rb50
-rw-r--r--spec/requests/api/badges_spec.rb38
-rw-r--r--spec/requests/api/boards_spec.rb6
-rw-r--r--spec/requests/api/branches_spec.rb46
-rw-r--r--spec/requests/api/broadcast_messages_spec.rb66
-rw-r--r--spec/requests/api/commit_statuses_spec.rb48
-rw-r--r--spec/requests/api/commits_spec.rb110
-rw-r--r--spec/requests/api/deploy_keys_spec.rb50
-rw-r--r--spec/requests/api/deploy_tokens_spec.rb312
-rw-r--r--spec/requests/api/deployments_spec.rb39
-rw-r--r--spec/requests/api/discussions_spec.rb2
-rw-r--r--spec/requests/api/doorkeeper_access_spec.rb8
-rw-r--r--spec/requests/api/environments_spec.rb46
-rw-r--r--spec/requests/api/error_tracking_spec.rb2
-rw-r--r--spec/requests/api/events_spec.rb24
-rw-r--r--spec/requests/api/features_spec.rb50
-rw-r--r--spec/requests/api/files_spec.rb134
-rw-r--r--spec/requests/api/graphql/boards/boards_query_spec.rb25
-rw-r--r--spec/requests/api/graphql/gitlab_schema_spec.rb10
-rw-r--r--spec/requests/api/graphql/group_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb71
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb4
-rw-r--r--spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb8
-rw-r--r--spec/requests/api/graphql_spec.rb52
-rw-r--r--spec/requests/api/group_boards_spec.rb2
-rw-r--r--spec/requests/api/group_clusters_spec.rb40
-rw-r--r--spec/requests/api/group_export_spec.rb14
-rw-r--r--spec/requests/api/group_import_spec.rb18
-rw-r--r--spec/requests/api/group_labels_spec.rb66
-rw-r--r--spec/requests/api/group_variables_spec.rb47
-rw-r--r--spec/requests/api/groups_spec.rb238
-rw-r--r--spec/requests/api/helpers_spec.rb2
-rw-r--r--spec/requests/api/import_github_spec.rb4
-rw-r--r--spec/requests/api/internal/base_spec.rb324
-rw-r--r--spec/requests/api/internal/pages_spec.rb96
-rw-r--r--spec/requests/api/issues/put_projects_issues_spec.rb51
-rw-r--r--spec/requests/api/jobs_spec.rb112
-rw-r--r--spec/requests/api/keys_spec.rb25
-rw-r--r--spec/requests/api/labels_spec.rb88
-rw-r--r--spec/requests/api/lint_spec.rb8
-rw-r--r--spec/requests/api/lsif_data_spec.rb75
-rw-r--r--spec/requests/api/markdown_spec.rb14
-rw-r--r--spec/requests/api/members_spec.rb60
-rw-r--r--spec/requests/api/merge_request_diffs_spec.rb10
-rw-r--r--spec/requests/api/merge_requests_spec.rb286
-rw-r--r--spec/requests/api/namespaces_spec.rb22
-rw-r--r--spec/requests/api/notes_spec.rb18
-rw-r--r--spec/requests/api/notification_settings_spec.rb16
-rw-r--r--spec/requests/api/oauth_tokens_spec.rb6
-rw-r--r--spec/requests/api/pages_domains_spec.rb48
-rw-r--r--spec/requests/api/pipeline_schedules_spec.rb2
-rw-r--r--spec/requests/api/pipelines_spec.rb64
-rw-r--r--spec/requests/api/project_clusters_spec.rb40
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb57
-rw-r--r--spec/requests/api/project_events_spec.rb18
-rw-r--r--spec/requests/api/project_export_spec.rb67
-rw-r--r--spec/requests/api/project_hooks_spec.rb38
-rw-r--r--spec/requests/api/project_import_spec.rb259
-rw-r--r--spec/requests/api/project_milestones_spec.rb18
-rw-r--r--spec/requests/api/project_snapshots_spec.rb8
-rw-r--r--spec/requests/api/project_snippets_spec.rb176
-rw-r--r--spec/requests/api/project_statistics_spec.rb8
-rw-r--r--spec/requests/api/project_templates_spec.rb38
-rw-r--r--spec/requests/api/projects_spec.rb550
-rw-r--r--spec/requests/api/protected_branches_spec.rb34
-rw-r--r--spec/requests/api/protected_tags_spec.rb24
-rw-r--r--spec/requests/api/release/links_spec.rb33
-rw-r--r--spec/requests/api/releases_spec.rb80
-rw-r--r--spec/requests/api/remote_mirrors_spec.rb73
-rw-r--r--spec/requests/api/repositories_spec.rb50
-rw-r--r--spec/requests/api/runner_spec.rb252
-rw-r--r--spec/requests/api/runners_spec.rb172
-rw-r--r--spec/requests/api/search_spec.rb42
-rw-r--r--spec/requests/api/services_spec.rb34
-rw-r--r--spec/requests/api/settings_spec.rb44
-rw-r--r--spec/requests/api/sidekiq_metrics_spec.rb8
-rw-r--r--spec/requests/api/snippets_spec.rb118
-rw-r--r--spec/requests/api/statistics_spec.rb6
-rw-r--r--spec/requests/api/submodules_spec.rb14
-rw-r--r--spec/requests/api/suggestions_spec.rb6
-rw-r--r--spec/requests/api/system_hooks_spec.rb22
-rw-r--r--spec/requests/api/tags_spec.rb30
-rw-r--r--spec/requests/api/task_completion_status_spec.rb2
-rw-r--r--spec/requests/api/templates_spec.rb16
-rw-r--r--spec/requests/api/todos_spec.rb12
-rw-r--r--spec/requests/api/triggers_spec.rb54
-rw-r--r--spec/requests/api/users_spec.rb482
-rw-r--r--spec/requests/api/variables_spec.rb44
-rw-r--r--spec/requests/api/version_spec.rb48
-rw-r--r--spec/requests/api/wikis_spec.rb32
-rw-r--r--spec/requests/git_http_spec.rb8
-rw-r--r--spec/requests/groups/milestones_controller_spec.rb46
-rw-r--r--spec/requests/groups/registry/repositories_controller_spec.rb2
-rw-r--r--spec/requests/lfs_http_spec.rb32
-rw-r--r--spec/requests/sessions_spec.rb17
-rw-r--r--spec/routing/project_routing_spec.rb6
-rw-r--r--spec/rubocop/cop/ban_catch_throw_spec.rb30
-rw-r--r--spec/rubocop/cop/migration/add_column_with_default_spec.rb62
-rw-r--r--spec/rubocop/cop/migration/schedule_async_spec.rb152
-rw-r--r--spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb43
-rw-r--r--spec/rubocop/cop/scalability/idempotent_worker_spec.rb38
-rw-r--r--spec/rubocop/migration_helpers_spec.rb56
-rw-r--r--spec/serializers/build_details_entity_spec.rb2
-rw-r--r--spec/serializers/cluster_application_entity_spec.rb18
-rw-r--r--spec/serializers/container_repository_entity_spec.rb12
-rw-r--r--spec/serializers/container_tag_entity_spec.rb12
-rw-r--r--spec/serializers/deployment_serializer_spec.rb5
-rw-r--r--spec/serializers/environment_entity_spec.rb20
-rw-r--r--spec/serializers/group_variable_entity_spec.rb2
-rw-r--r--spec/serializers/merge_request_serializer_spec.rb4
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb32
-rw-r--r--spec/serializers/namespace_basic_entity_spec.rb2
-rw-r--r--spec/serializers/pipeline_details_entity_spec.rb2
-rw-r--r--spec/serializers/pipeline_entity_spec.rb6
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb4
-rw-r--r--spec/serializers/project_import_entity_spec.rb2
-rw-r--r--spec/serializers/project_serializer_spec.rb2
-rw-r--r--spec/serializers/serverless/domain_entity_spec.rb19
-rw-r--r--spec/services/boards/issues/list_service_spec.rb20
-rw-r--r--spec/services/ci/create_cross_project_pipeline_service_spec.rb119
-rw-r--r--spec/services/ci/create_job_artifacts_service_spec.rb82
-rw-r--r--spec/services/ci/create_pipeline_service/custom_config_content_spec.rb57
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb1
-rw-r--r--spec/services/ci/find_exposed_artifacts_service_spec.rb42
-rw-r--r--spec/services/ci/parse_dotenv_artifact_service_spec.rb260
-rw-r--r--spec/services/ci/pipeline_bridge_status_service_spec.rb18
-rw-r--r--spec/services/ci/register_job_service_spec.rb26
-rw-r--r--spec/services/ci/retry_build_service_spec.rb30
-rw-r--r--spec/services/ci/update_ci_ref_status_service_spec.rb169
-rw-r--r--spec/services/clusters/applications/check_installation_progress_service_spec.rb2
-rw-r--r--spec/services/clusters/applications/check_uninstall_progress_service_spec.rb2
-rw-r--r--spec/services/clusters/applications/create_service_spec.rb20
-rw-r--r--spec/services/clusters/applications/install_service_spec.rb2
-rw-r--r--spec/services/clusters/applications/patch_service_spec.rb2
-rw-r--r--spec/services/clusters/applications/uninstall_service_spec.rb2
-rw-r--r--spec/services/clusters/applications/update_service_spec.rb21
-rw-r--r--spec/services/clusters/applications/upgrade_service_spec.rb2
-rw-r--r--spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb32
-rw-r--r--spec/services/clusters/update_service_spec.rb2
-rw-r--r--spec/services/commits/cherry_pick_service_spec.rb12
-rw-r--r--spec/services/deploy_tokens/create_service_spec.rb63
-rw-r--r--spec/services/deployments/after_create_service_spec.rb22
-rw-r--r--spec/services/deployments/link_merge_requests_service_spec.rb59
-rw-r--r--spec/services/error_tracking/issue_details_service_spec.rb2
-rw-r--r--spec/services/error_tracking/issue_update_service_spec.rb6
-rw-r--r--spec/services/git/branch_push_service_spec.rb23
-rw-r--r--spec/services/git/process_ref_changes_service_spec.rb36
-rw-r--r--spec/services/groups/deploy_tokens/create_service_spec.rb10
-rw-r--r--spec/services/groups/import_export/export_service_spec.rb74
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb75
-rw-r--r--spec/services/issuable/clone/attributes_rewriter_spec.rb42
-rw-r--r--spec/services/issuable/common_system_notes_service_spec.rb44
-rw-r--r--spec/services/issues/close_service_spec.rb101
-rw-r--r--spec/services/issues/create_service_spec.rb8
-rw-r--r--spec/services/issues/import_csv_service_spec.rb50
-rw-r--r--spec/services/issues/reopen_service_spec.rb10
-rw-r--r--spec/services/issues/update_service_spec.rb62
-rw-r--r--spec/services/labels/transfer_service_spec.rb104
-rw-r--r--spec/services/merge_requests/after_create_service_spec.rb71
-rw-r--r--spec/services/merge_requests/create_service_spec.rb51
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb4
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb11
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb56
-rw-r--r--spec/services/merge_requests/update_service_spec.rb8
-rw-r--r--spec/services/metrics/dashboard/clone_dashboard_service_spec.rb2
-rw-r--r--spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb8
-rw-r--r--spec/services/metrics/dashboard/default_embed_service_spec.rb2
-rw-r--r--spec/services/metrics/dashboard/dynamic_embed_service_spec.rb8
-rw-r--r--spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb8
-rw-r--r--spec/services/metrics/dashboard/update_dashboard_service_spec.rb141
-rw-r--r--spec/services/milestones/closed_issues_count_service_spec.rb24
-rw-r--r--spec/services/milestones/issues_count_service_spec.rb24
-rw-r--r--spec/services/milestones/transfer_service_spec.rb19
-rw-r--r--spec/services/notification_recipient_service_spec.rb61
-rw-r--r--spec/services/notification_recipients/build_service_spec.rb61
-rw-r--r--spec/services/notification_recipients/builder/default_spec.rb44
-rw-r--r--spec/services/notification_service_spec.rb114
-rw-r--r--spec/services/pod_logs/base_service_spec.rb232
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb222
-rw-r--r--spec/services/pod_logs/kubernetes_service_spec.rb166
-rw-r--r--spec/services/post_receive_service_spec.rb256
-rw-r--r--spec/services/projects/alerting/notify_service_spec.rb71
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb84
-rw-r--r--spec/services/projects/create_service_spec.rb80
-rw-r--r--spec/services/projects/deploy_tokens/create_service_spec.rb10
-rw-r--r--spec/services/projects/destroy_service_spec.rb35
-rw-r--r--spec/services/projects/fork_service_spec.rb88
-rw-r--r--spec/services/projects/import_export/export_service_spec.rb57
-rw-r--r--spec/services/projects/import_service_spec.rb10
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb137
-rw-r--r--spec/services/projects/lfs_pointers/lfs_link_service_spec.rb4
-rw-r--r--spec/services/projects/lsif_data_service_spec.rb15
-rw-r--r--spec/services/projects/operations/update_service_spec.rb43
-rw-r--r--spec/services/projects/protect_default_branch_service_spec.rb14
-rw-r--r--spec/services/projects/transfer_service_spec.rb12
-rw-r--r--spec/services/projects/update_pages_service_spec.rb3
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb127
-rw-r--r--spec/services/projects/update_service_spec.rb27
-rw-r--r--spec/services/releases/update_service_spec.rb6
-rw-r--r--spec/services/resource_events/change_milestone_service_spec.rb13
-rw-r--r--spec/services/search_service_spec.rb70
-rw-r--r--spec/services/serverless/associate_domain_service_spec.rb74
-rw-r--r--spec/services/snippets/bulk_destroy_service_spec.rb161
-rw-r--r--spec/services/snippets/create_service_spec.rb81
-rw-r--r--spec/services/snippets/destroy_service_spec.rb81
-rw-r--r--spec/services/snippets/update_service_spec.rb127
-rw-r--r--spec/services/spam/mark_as_spam_service_spec.rb2
-rw-r--r--spec/services/system_note_service_spec.rb10
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb43
-rw-r--r--spec/services/system_notes/merge_requests_service_spec.rb2
-rw-r--r--spec/services/users/destroy_service_spec.rb54
-rw-r--r--spec/services/users/update_service_spec.rb7
-rw-r--r--spec/services/x509_certificate_revoke_service_spec.rb43
-rw-r--r--spec/spec_helper.rb25
-rw-r--r--spec/support/caching.rb15
-rw-r--r--spec/support/capybara.rb40
-rw-r--r--spec/support/helpers/filtered_search_helpers.rb4
-rw-r--r--spec/support/helpers/graphql_helpers.rb4
-rw-r--r--spec/support/helpers/idempotent_worker_helper.rb15
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb6
-rw-r--r--spec/support/helpers/ldap_helpers.rb24
-rw-r--r--spec/support/helpers/login_helpers.rb13
-rw-r--r--spec/support/helpers/metrics_dashboard_url_helpers.rb16
-rw-r--r--spec/support/helpers/notification_helpers.rb2
-rw-r--r--spec/support/helpers/project_forks_helper.rb2
-rw-r--r--spec/support/helpers/test_env.rb60
-rw-r--r--spec/support/helpers/wait_for_requests.rb4
-rw-r--r--spec/support/helpers/wiki_helpers.rb5
-rw-r--r--spec/support/helpers/workhorse_helpers.rb4
-rw-r--r--spec/support/import_export/common_util.rb4
-rw-r--r--spec/support/import_export/configuration_helper.rb4
-rw-r--r--spec/support/matchers/exceed_query_limit.rb10
-rw-r--r--spec/support/services/deploy_token_shared_examples.rb60
-rw-r--r--spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/requests/api/graphql/group_and_project_boards_query_shared_context.rb50
-rw-r--r--spec/support/shared_contexts/upload_type_check_shared_context.rb30
-rw-r--r--spec/support/shared_examples/banzai/filters/inline_embeds_shared_examples.rb48
-rw-r--r--spec/support/shared_examples/banzai/filters/inline_metrics_redactor_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/controllers/deploy_token_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/csp.rb79
-rw-r--r--spec/support/shared_examples/features/deploy_token_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/features/error_tracking_shared_example.rb22
-rw-r--r--spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/lib/gitlab/import_export/project_tree_restorer_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/models/application_setting_shared_examples.rb47
-rw-r--r--spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/models/cluster_application_status_shared_examples.rb84
-rw-r--r--spec/support/shared_examples/models/cluster_application_version_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/concerns/blob_replicator_strategy_shared_examples.rb77
-rw-r--r--spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb40
-rw-r--r--spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/note_access_check_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb50
-rw-r--r--spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/discussions_shared_examples.rb56
-rw-r--r--spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb112
-rw-r--r--spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/snippet_shared_examples.rb43
-rw-r--r--spec/support/shared_examples/resource_events.rb117
-rw-r--r--spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb145
-rw-r--r--spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb46
-rw-r--r--spec/support/shared_examples/tasks/gitlab/import_export/import_measurement_shared_examples.rb31
-rw-r--r--spec/support/shared_examples/tasks/gitlab/import_export/measurable_shared_examples.rb31
-rw-r--r--spec/support/shared_examples/tasks/gitlab/uploads/migration_shared_examples.rb31
-rw-r--r--spec/support/shared_examples/uploaders/upload_type_shared_examples.rb69
-rw-r--r--spec/support/shared_examples/uploaders/workers/object_storage/migrate_uploads_shared_examples.rb120
-rw-r--r--spec/support/shared_examples/views/pipeline_status_changes_email.rb78
-rw-r--r--spec/support/shared_examples/workers/idempotency_shared_examples.rb33
-rw-r--r--spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb34
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb67
-rw-r--r--spec/tasks/gitlab/check_rake_spec.rb8
-rw-r--r--spec/tasks/gitlab/import_export/import_rake_spec.rb91
-rw-r--r--spec/tasks/gitlab/uploads/migrate_rake_spec.rb42
-rw-r--r--spec/tasks/gitlab/web_hook_rake_spec.rb9
-rw-r--r--spec/uploaders/avatar_uploader_spec.rb30
-rw-r--r--spec/uploaders/content_type_whitelist_spec.rb34
-rw-r--r--spec/uploaders/favicon_uploader_spec.rb38
-rw-r--r--spec/uploaders/object_storage_spec.rb2
-rw-r--r--spec/uploaders/upload_type_check_spec.rb124
-rw-r--r--spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb109
-rw-r--r--spec/views/admin/sessions/new.html.haml_spec.rb2
-rw-r--r--spec/views/admin/sessions/two_factor.html.haml_spec.rb41
-rw-r--r--spec/views/import/gitlab_projects/new.html.haml_spec.rb7
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb5
-rw-r--r--spec/views/notify/pipeline_failed_email.html.haml_spec.rb70
-rw-r--r--spec/views/notify/pipeline_fixed_email.html.haml_spec.rb10
-rw-r--r--spec/views/notify/pipeline_fixed_email.text.erb_spec.rb10
-rw-r--r--spec/views/notify/pipeline_success_email.html.haml_spec.rb54
-rw-r--r--spec/views/notify/pipeline_success_email.text.erb_spec.rb22
-rw-r--r--spec/views/profiles/preferences/show.html.haml_spec.rb2
-rw-r--r--spec/views/projects/settings/operations/show.html.haml_spec.rb2
-rw-r--r--spec/views/shared/milestones/_issuable.html.haml_spec.rb33
-rw-r--r--spec/views/shared/milestones/_top.html.haml_spec.rb2
-rw-r--r--spec/workers/authorized_keys_worker_spec.rb55
-rw-r--r--spec/workers/authorized_projects_worker_spec.rb17
-rw-r--r--spec/workers/background_migration_worker_spec.rb10
-rw-r--r--spec/workers/concerns/application_worker_spec.rb41
-rw-r--r--spec/workers/concerns/gitlab/github_import/notify_upon_death_spec.rb51
-rw-r--r--spec/workers/concerns/gitlab/notify_upon_death_spec.rb51
-rw-r--r--spec/workers/concerns/project_export_options_spec.rb41
-rw-r--r--spec/workers/detect_repository_languages_worker_spec.rb2
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb30
-rw-r--r--spec/workers/expire_build_instance_artifacts_worker_spec.rb2
-rw-r--r--spec/workers/expire_job_cache_worker_spec.rb32
-rw-r--r--spec/workers/gitlab_shell_worker_spec.rb33
-rw-r--r--spec/workers/mail_scheduler/notification_service_worker_spec.rb2
-rw-r--r--spec/workers/namespaces/root_statistics_worker_spec.rb2
-rw-r--r--spec/workers/namespaces/schedule_aggregation_worker_spec.rb2
-rw-r--r--spec/workers/pipeline_notification_worker_spec.rb9
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb4
-rw-r--r--spec/workers/pipeline_update_ci_ref_status_worker_service_spec.rb18
-rw-r--r--spec/workers/post_receive_spec.rb111
-rw-r--r--spec/workers/project_export_worker_spec.rb46
-rw-r--r--spec/workers/project_update_repository_storage_worker_spec.rb19
-rw-r--r--spec/workers/remote_mirror_notification_worker_spec.rb4
-rw-r--r--spec/workers/repository_fork_worker_spec.rb34
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb6
-rw-r--r--spec/workers/stuck_export_jobs_worker_spec.rb75
-rw-r--r--spec/workers/update_external_pull_requests_worker_spec.rb4
-rw-r--r--spec/workers/update_merge_requests_worker_spec.rb12
-rw-r--r--spec/workers/x509_certificate_revoke_worker_spec.rb41
-rw-r--r--[-rwxr-xr-x]vendor/gitignore/C++.gitignore0
-rw-r--r--[-rwxr-xr-x]vendor/gitignore/Java.gitignore0
-rw-r--r--vendor/ingress/values.yaml5
-rw-r--r--vendor/project_templates/gatsby.tar.gzbin0 -> 78324 bytes
-rw-r--r--yarn.lock1733
4333 files changed, 113197 insertions, 51586 deletions
diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 00000000000..d704f20c726
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,15 @@
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+trim_trailing_whitespace = true
+insert_final_newline = true
+
+[*.{js,json,vue,scss,rb,haml,yml}]
+indent_size = 2
+
+[*.{js,json,vue,scss,rb,haml,yml,md}]
+indent_style = space
+charset = utf-8
diff --git a/.gitignore b/.gitignore
index b3e6cbae96b..4bebf3fd047 100644
--- a/.gitignore
+++ b/.gitignore
@@ -30,6 +30,7 @@ eslint-report.html
/app/assets/javascripts/locale/**/app.js
/backups/*
/config/aws.yml
+/config/cable.yml
/config/database*.yml
/config/gitlab.yml
/config/gitlab_ci.yml
@@ -42,6 +43,7 @@ eslint-report.html
/config/redis.shared_state.yml
/config/unicorn.rb
/config/puma.rb
+/config/puma_actioncable.rb
/config/secrets.yml
/config/sidekiq.yml
/config/registry.key
@@ -54,7 +56,7 @@ eslint-report.html
/dump.rdb
/jsconfig.json
/log/*.log*
-/node_modules/
+/node_modules
/nohup.out
/public/assets/
/public/uploads.*
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 19dca0b2fcd..be1818391ca 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -3,6 +3,7 @@ image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.5-golang-1.1
stages:
- sync
- prepare
+ - fixtures
- test
- post-test
- review-prepare
@@ -11,6 +12,29 @@ stages:
- post-qa
- pages
+# always use `gitlab-org` runners
+default:
+ tags:
+ - gitlab-org
+
+workflow:
+ rules:
+ # If `$FORCE_GITLAB_CI` is set, create a pipeline.
+ - if: '$FORCE_GITLAB_CI'
+ # For merge requests, create a pipeline.
+ - if: '$CI_MERGE_REQUEST_IID'
+ # For `master` branch, create a pipeline (this includes on schedules, pushes, merges, etc.).
+ - if: '$CI_COMMIT_BRANCH == "master"'
+ # For tags, create a pipeline.
+ - if: '$CI_COMMIT_TAG'
+ # If `$GITLAB_INTERNAL` isn't set, don't create a pipeline.
+ - if: '$GITLAB_INTERNAL == null'
+ when: never
+ # For stable, auto-deploy, and security branches, create a pipeline.
+ - if: '$CI_COMMIT_BRANCH =~ /^[\d-]+-stable(-ee)?$/'
+ - if: '$CI_COMMIT_BRANCH =~ /^\d+-\d+-auto-deploy-\d+$/'
+ - if: '$CI_COMMIT_BRANCH =~ /^security\//'
+
variables:
RAILS_ENV: "test"
NODE_ENV: "test"
diff --git a/.gitlab/CODEOWNERS b/.gitlab/CODEOWNERS
index 0b817fe14b5..28ad2b41921 100644
--- a/.gitlab/CODEOWNERS
+++ b/.gitlab/CODEOWNERS
@@ -8,19 +8,29 @@
# Technical writing team are the default reviewers for all markdown docs
*.md @gl-docsteam
+/doc/ @gl-docsteam
# Frontend maintainers should see everything in `app/assets/`
-app/assets/ @gitlab-org/maintainers/frontend
*.scss @annabeldunstone @gitlab-org/maintainers/frontend
-/scripts/frontend/ @gitlab-org/maintainers/frontend
+*.js @gitlab-org/maintainers/frontend
+/app/assets/ @gitlab-org/maintainers/frontend
+/ee/app/assets/ @gitlab-org/maintainers/frontend
+/spec/javascripts/ @gitlab-org/maintainers/frontend
+/ee/spec/javascripts/ @gitlab-org/maintainers/frontend
+/spec/frontend/ @gitlab-org/maintainers/frontend
+/ee/spec/frontend/ @gitlab-org/maintainers/frontend
# Database maintainers should review changes in `db/`
-db/ @gitlab-org/maintainers/database
-lib/gitlab/background_migration/ @gitlab-org/maintainers/database
-lib/gitlab/database/ @gitlab-org/maintainers/database
-lib/gitlab/sql/ @gitlab-org/maintainers/database
-lib/gitlab/github_import/ @gitlab-org/maintainers/database
+/db/ @gitlab-org/maintainers/database
/ee/db/ @gitlab-org/maintainers/database
+/lib/gitlab/background_migration/ @gitlab-org/maintainers/database
+/ee/lib/ee/gitlab/background_migration/ @gitlab-org/maintainers/database
+/lib/gitlab/database/ @gitlab-org/maintainers/database
+/ee/lib/gitlab/database/ @gitlab-org/maintainers/database
+/lib/gitlab/sql/ @gitlab-org/maintainers/database
+/lib/gitlab/github_import/ @gitlab-org/maintainers/database
+/app/finders/ @gitlab-org/maintainers/database
+/ee/app/finders/ @gitlab-org/maintainers/database
# Feature specific owners
/ee/lib/gitlab/code_owners/ @reprazent
@@ -38,11 +48,20 @@ lib/gitlab/github_import/ @gitlab-org/maintainers/database
# Engineering Productivity owned files
/.gitlab-ci.yml @gl-quality/eng-prod
/.gitlab/ci/ @gl-quality/eng-prod
+/.gitlab/ci/docs.gitlab-ci.yml @gl-quality/eng-prod @gl-docsteam
+/.gitlab/ci/releases.gitlab-ci.yml @gl-quality/eng-prod @gitlab-org/delivery
/.gitlab/CODEOWNERS @gl-quality/eng-prod
Dangerfile @gl-quality/eng-prod
/danger/ @gl-quality/eng-prod
/lib/gitlab/danger/ @gl-quality/eng-prod
/scripts/ @gl-quality/eng-prod
+/scripts/frontend/ @gl-quality/eng-prod @gitlab-org/maintainers/frontend
+.editorconfig @gl-quality/eng-prod
-# Delivery owner files
-/.gitlab/ci/releases.gitlab-ci.yml @gitlab-org/delivery
+# Telemetry owner files
+/ee/lib/gitlab/usage_data_counters/ @gitlab-org/growth/telemetry
+/ee/lib/ee/gitlab/usage_data.rb @gitlab-org/growth/telemetry
+/lib/gitlab/grafana_embed_usage_data.rb @gitlab-org/growth/telemetry
+/lib/gitlab/usage_data.rb @gitlab-org/growth/telemetry
+/lib/gitlab/cycle_analytics/usage_data.rb @gitlab-org/growth/telemetry
+/lib/gitlab/usage_data_counters/ @gitlab-org/growth/telemetry
diff --git a/.gitlab/ci/dev-fixtures.gitlab-ci.yml b/.gitlab/ci/dev-fixtures.gitlab-ci.yml
index e08f7e969cb..e77a75d2822 100644
--- a/.gitlab/ci/dev-fixtures.gitlab-ci.yml
+++ b/.gitlab/ci/dev-fixtures.gitlab-ci.yml
@@ -1,13 +1,11 @@
.run-dev-fixtures:
extends:
- - .default-tags
- .default-retry
- .default-cache
- .default-before_script
- .use-pg9
stage: test
needs: ["setup-test-env"]
- dependencies: ["setup-test-env"]
variables:
FIXTURE_PATH: "db/fixtures/development"
SEED_CYCLE_ANALYTICS: "true"
diff --git a/.gitlab/ci/docs.gitlab-ci.yml b/.gitlab/ci/docs.gitlab-ci.yml
index 59f1773da2e..946db0c4be8 100644
--- a/.gitlab/ci/docs.gitlab-ci.yml
+++ b/.gitlab/ci/docs.gitlab-ci.yml
@@ -1,6 +1,5 @@
.review-docs:
extends:
- - .default-tags
- .default-retry
- .docs:rules:review-docs
allow_failure: true
@@ -16,6 +15,7 @@
name: review-docs/$DOCS_GITLAB_REPO_SUFFIX-$CI_MERGE_REQUEST_IID
# DOCS_REVIEW_APPS_DOMAIN and DOCS_GITLAB_REPO_SUFFIX are CI variables
# Discussion: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/14236/diffs#note_40140693
+ auto_stop_in: 2 weeks
url: http://docs-preview-$DOCS_GITLAB_REPO_SUFFIX-$CI_MERGE_REQUEST_IID.$DOCS_REVIEW_APPS_DOMAIN/$DOCS_GITLAB_REPO_SUFFIX
on_stop: review-docs-cleanup
before_script:
@@ -41,18 +41,17 @@ review-docs-cleanup:
docs lint:
extends:
- - .default-tags
- .default-retry
- .docs:rules:docs-lint
- image: "registry.gitlab.com/gitlab-org/gitlab-docs:docs-lint"
+ image: "registry.gitlab.com/gitlab-org/gitlab-docs:lint"
stage: test
- dependencies: []
+ needs: []
script:
- scripts/lint-doc.sh
- # Lint Markdown
- - markdownlint --config .markdownlint.json 'doc/**/*.md'
# Prepare docs for build
- - mv doc/ /tmp/gitlab-docs/content/$DOCS_GITLAB_REPO_SUFFIX
+ # The path must be 'ee/' because we have hardcoded links relying on it
+ # https://gitlab.com/gitlab-org/gitlab-docs/-/blob/887850752fc0e72856da6632db132f005ba77f16/content/index.erb#L44-63
+ - mv doc/ /tmp/gitlab-docs/content/ee
- cd /tmp/gitlab-docs
# Build HTML from Markdown
- bundle exec nanoc
@@ -63,7 +62,6 @@ docs lint:
graphql-reference-verify:
extends:
- - .default-tags
- .default-retry
- .default-cache
- .default-before_script
diff --git a/.gitlab/ci/frontend.gitlab-ci.yml b/.gitlab/ci/frontend.gitlab-ci.yml
index 3a72c941b89..8685ccc5432 100644
--- a/.gitlab/ci/frontend.gitlab-ci.yml
+++ b/.gitlab/ci/frontend.gitlab-ci.yml
@@ -6,10 +6,10 @@
- tmp/cache/assets/sprockets
- tmp/cache/babel-loader
- tmp/cache/vue-loader
+ - tmp/cache/webpack-dlls
.gitlab:assets:compile-metadata:
extends:
- - .default-tags
- .default-retry
- .default-before_script
- .assets-compile-cache
@@ -63,7 +63,6 @@ gitlab:assets:compile pull-cache:
.compile-assets-metadata:
extends:
- - .default-tags
- .default-retry
- .default-before_script
- .assets-compile-cache
@@ -78,6 +77,7 @@ gitlab:assets:compile pull-cache:
SETUP_DB: "false"
# we override the max_old_space_size to prevent OOM errors
NODE_OPTIONS: --max_old_space_size=3584
+ WEBPACK_VENDOR_DLL: "true"
cache:
key: "assets-compile:v9"
artifacts:
@@ -118,15 +118,52 @@ compile-assets pull-cache as-if-foss:
policy: pull
key: "assets-compile:v9:foss"
-.frontend-job-base:
+.frontend-fixtures-base:
extends:
- - .default-tags
- .default-retry
- .default-cache
- .default-before_script
- .use-pg9
+ stage: fixtures
+ needs:
+ - job: "setup-test-env"
+ artifacts: true
+ - job: "compile-assets pull-cache"
+ artifacts: true
+ script:
+ - date
+ - scripts/gitaly-test-spawn
+ - date
+ - bundle exec rake frontend:fixtures
+ artifacts:
+ name: frontend-fixtures
+ expire_in: 31d
+ when: always
+ paths:
+ - node_modules
+ - public/assets
+ - tmp/tests/frontend/
+
+frontend-fixtures:
+ extends:
+ - .frontend-fixtures-base
+ - .frontend:rules:default-frontend-jobs
+
+frontend-fixtures-as-if-foss:
+ extends:
+ - .frontend-fixtures-base
+ - .frontend:rules:default-frontend-jobs-as-if-foss
+ - .as-if-foss
+
+.frontend-job-base:
+ extends:
+ - .default-retry
+ - .default-cache
+ - .default-before_script
+ variables:
+ USE_BUNDLE_INSTALL: "false"
+ SETUP_DB: "false"
stage: test
- needs: ["setup-test-env", "compile-assets pull-cache"]
.karma-base:
extends: .frontend-job-base
@@ -136,14 +173,13 @@ compile-assets pull-cache as-if-foss:
script:
- export BABEL_ENV=coverage CHROME_LOG_FILE=chrome_debug.log
- date
- - scripts/gitaly-test-spawn
- - date
- - bundle exec rake karma
+ - yarn karma
karma:
extends:
- .karma-base
- .frontend:rules:default-frontend-jobs
+ needs: ["frontend-fixtures"]
coverage: '/^Statements *: (\d+\.\d+%)/'
artifacts:
name: coverage-javascript
@@ -161,25 +197,24 @@ karma-as-if-foss:
- .karma-base
- .frontend:rules:default-frontend-jobs-as-if-foss
- .as-if-foss
+ needs: ["frontend-fixtures-as-if-foss"]
.jest-base:
extends: .frontend-job-base
script:
- - scripts/gitaly-test-spawn
- date
- - bundle exec rake frontend:fixtures
- - date
- - yarn jest --ci --coverage
+ - yarn jest --ci --coverage --testSequencer ./scripts/frontend/parallel_ci_sequencer.js
cache:
key: jest
paths:
- - tmp/jest/jest/
+ - tmp/cache/jest/
policy: pull-push
jest:
extends:
- .jest-base
- .frontend:rules:default-frontend-jobs
+ needs: ["frontend-fixtures"]
artifacts:
name: coverage-frontend
expire_in: 31d
@@ -190,18 +225,38 @@ jest:
- tmp/tests/frontend/
reports:
junit: junit_jest.xml
+ parallel: 2
jest-as-if-foss:
extends:
- .jest-base
- .frontend:rules:default-frontend-jobs-as-if-foss
- .as-if-foss
+ needs: ["frontend-fixtures-as-if-foss"]
cache:
policy: pull
+coverage-frontend:
+ extends:
+ - .default-retry
+ - .frontend:rules:default-frontend-jobs
+ needs: ["jest"]
+ stage: post-test
+ before_script:
+ - yarn install --frozen-lockfile --cache-folder .yarn-cache --prefer-offline
+ script:
+ - yarn node scripts/frontend/merge_coverage_frontend.js
+ artifacts:
+ name: coverage-frontend
+ expire_in: 31d
+ paths:
+ - coverage-frontend/
+ cache:
+ paths:
+ - .yarn-cache/
+
.qa-frontend-node:
extends:
- - .default-tags
- .default-retry
- .default-cache
- .frontend:rules:qa-frontend-node
@@ -218,10 +273,6 @@ jest-as-if-foss:
- date
- yarn run webpack-prod
-qa-frontend-node:8:
- extends: .qa-frontend-node
- image: node:carbon
-
qa-frontend-node:10:
extends: .qa-frontend-node
image: node:dubnium
@@ -233,7 +284,6 @@ qa-frontend-node:latest:
webpack-dev-server:
extends:
- - .default-tags
- .default-retry
- .default-cache
- .frontend:rules:default-frontend-jobs
diff --git a/.gitlab/ci/global.gitlab-ci.yml b/.gitlab/ci/global.gitlab-ci.yml
index ffdc115cff7..d5c89eacbb2 100644
--- a/.gitlab/ci/global.gitlab-ci.yml
+++ b/.gitlab/ci/global.gitlab-ci.yml
@@ -1,7 +1,3 @@
-.default-tags:
- tags:
- - gitlab-org
-
.default-retry:
retry:
max: 2 # This is confusing but this means "3 runs at max".
@@ -50,6 +46,15 @@
variables:
POSTGRES_HOST_AUTH_METHOD: trust
+.use-pg11:
+ image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.5-golang-1.12-git-2.24-lfs-2.9-chrome-73.0-node-12.x-yarn-1.21-postgresql-11-graphicsmagick-1.3.34"
+ services:
+ - name: postgres:11.6
+ command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
+ - name: redis:alpine
+ variables:
+ POSTGRES_HOST_AUTH_METHOD: trust
+
.use-pg9-ee:
services:
- name: postgres:9.6.17
@@ -69,6 +74,16 @@
variables:
POSTGRES_HOST_AUTH_METHOD: trust
+.use-pg11-ee:
+ image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.5-golang-1.12-git-2.24-lfs-2.9-chrome-73.0-node-12.x-yarn-1.21-postgresql-11-graphicsmagick-1.3.34"
+ services:
+ - name: postgres:11.6
+ command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
+ - name: redis:alpine
+ - name: elasticsearch:6.4.2
+ variables:
+ POSTGRES_HOST_AUTH_METHOD: trust
+
.as-if-foss:
variables:
FOSS_ONLY: '1'
diff --git a/.gitlab/ci/memory.gitlab-ci.yml b/.gitlab/ci/memory.gitlab-ci.yml
index a8e2d90a4ee..b2267b03c5f 100644
--- a/.gitlab/ci/memory.gitlab-ci.yml
+++ b/.gitlab/ci/memory.gitlab-ci.yml
@@ -1,6 +1,5 @@
.only-code-memory-job-base:
extends:
- - .default-tags
- .default-retry
- .default-cache
- .default-before_script
@@ -8,6 +7,10 @@
memory-static:
extends: .only-code-memory-job-base
+ stage: test
+ needs:
+ - job: setup-test-env
+ artifacts: true
variables:
SETUP_DB: "false"
script:
@@ -36,6 +39,12 @@ memory-on-boot:
extends:
- .only-code-memory-job-base
- .use-pg10
+ stage: test
+ needs:
+ - job: setup-test-env
+ artifacts: true
+ - job: compile-assets pull-cache
+ artifacts: true
variables:
NODE_ENV: "production"
RAILS_ENV: "production"
@@ -44,8 +53,7 @@ memory-on-boot:
# we override the max_old_space_size to prevent OOM errors
NODE_OPTIONS: --max_old_space_size=3584
script:
- # Both bootsnap and derailed monkey-patch Kernel#require, which leads to circular dependency
- - ENABLE_BOOTSNAP=false PATH_TO_HIT="/users/sign_in" CUT_OFF=0.3 bundle exec derailed exec perf:mem >> 'tmp/memory_on_boot.txt'
+ - PATH_TO_HIT="/users/sign_in" CUT_OFF=0.3 bundle exec derailed exec perf:mem >> 'tmp/memory_on_boot.txt'
- scripts/generate-memory-metrics-on-boot tmp/memory_on_boot.txt >> 'tmp/memory_on_boot_metrics.txt'
artifacts:
paths:
diff --git a/.gitlab/ci/pages.gitlab-ci.yml b/.gitlab/ci/pages.gitlab-ci.yml
index 993ed21e39d..983f675d1d6 100644
--- a/.gitlab/ci/pages.gitlab-ci.yml
+++ b/.gitlab/ci/pages.gitlab-ci.yml
@@ -1,11 +1,10 @@
pages:
extends:
- - .default-tags
- .default-retry
- .default-cache
- .pages:rules
stage: pages
- dependencies: ["coverage", "karma", "gitlab:assets:compile pull-cache"]
+ dependencies: ["rspec:coverage", "karma", "gitlab:assets:compile pull-cache"]
script:
- mv public/ .public/
- mkdir public/
diff --git a/.gitlab/ci/qa.gitlab-ci.yml b/.gitlab/ci/qa.gitlab-ci.yml
index 0e9d7abb3ac..b0713c0944a 100644
--- a/.gitlab/ci/qa.gitlab-ci.yml
+++ b/.gitlab/ci/qa.gitlab-ci.yml
@@ -1,9 +1,8 @@
.qa-job-base:
extends:
- - .default-tags
- .default-retry
stage: test
- dependencies: []
+ needs: []
cache:
key: "qa-framework-jobs:v1"
paths:
diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml
index 58c4ef0e02a..f9074adeaba 100644
--- a/.gitlab/ci/rails.gitlab-ci.yml
+++ b/.gitlab/ci/rails.gitlab-ci.yml
@@ -7,7 +7,6 @@
.rails-job-base:
extends:
- - .default-tags
- .default-retry
- .default-cache
- .default-before_script
@@ -202,7 +201,7 @@ gitlab:setup:
paths:
- log/development.log
-coverage:
+rspec:coverage:
extends:
- .rails-job-base
- .rails:rules:ee-and-foss
@@ -238,6 +237,12 @@ rspec quarantine pg9:
- .rails:rules:master-refs-code-backstage
- .use-pg10
+rspec migration pg10:
+ extends:
+ - .rspec-base-pg10
+ - .rspec-base-migration
+ parallel: 2
+
rspec unit pg10:
extends: .rspec-base-pg10
parallel: 20
@@ -252,6 +257,34 @@ rspec system pg10:
# master-only jobs #
####################
+############################
+# nightly master-only jobs #
+.rspec-base-pg11:
+ extends:
+ - .rspec-base
+ - .rails:rules:nightly-master-refs-code-backstage
+ - .use-pg11
+
+rspec migration pg11:
+ extends:
+ - .rspec-base-pg11
+ - .rspec-base-migration
+ parallel: 2
+
+rspec unit pg11:
+ extends: .rspec-base-pg11
+ parallel: 20
+
+rspec integration pg11:
+ extends: .rspec-base-pg11
+ parallel: 8
+
+rspec system pg11:
+ extends: .rspec-base-pg11
+ parallel: 24
+# nightly master-only jobs #
+############################
+
#########################
# ee + master-only jobs #
rspec-ee quarantine pg9:
diff --git a/.gitlab/ci/reports.gitlab-ci.yml b/.gitlab/ci/reports.gitlab-ci.yml
index e7ee47fbe0a..f381c423f5d 100644
--- a/.gitlab/ci/reports.gitlab-ci.yml
+++ b/.gitlab/ci/reports.gitlab-ci.yml
@@ -12,6 +12,7 @@ code_quality:
- .default-retry
- .reports:rules:code_quality
stage: test
+ needs: []
image: docker:stable
allow_failure: true
services:
@@ -19,7 +20,7 @@ code_quality:
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
- CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/security-products/codequality:0.85.6"
+ CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/ci-cd/codequality:0.85.9"
script:
- |
if ! docker info &>/dev/null; then
@@ -39,7 +40,6 @@ code_quality:
paths:
- gl-code-quality-report.json # GitLab-specific
expire_in: 1 week # GitLab-specific
- dependencies: []
# We need to duplicate this job's definition because it seems it's impossible to
# override an included `only.refs`.
@@ -52,7 +52,7 @@ sast:
- .reports:rules:sast
stage: test
allow_failure: true
- dependencies: [] # GitLab-specific
+ needs: []
artifacts:
paths:
- gl-sast-report.json # GitLab-specific
@@ -90,6 +90,7 @@ dependency_scanning:
- .default-retry
- .reports:rules:dependency_scanning
stage: test
+ needs: []
image: docker:stable
variables:
DOCKER_DRIVER: overlay2
@@ -148,7 +149,6 @@ dependency_scanning:
reports:
dependency_scanning: gl-dependency-scanning-report.json
expire_in: 1 week # GitLab-specific
- dependencies: []
# We need to duplicate this job's definition because it seems it's impossible to
# override an included `only.refs`.
@@ -168,6 +168,7 @@ dast:
# DAST_USERNAME: "root"
# DAST_USERNAME_FIELD: "user[login]"
# DAST_PASSWORD_FIELD: "user[passowrd]"
+ DAST_VERSION: 1
allow_failure: true
script:
- 'export DAST_WEBSITE="${DAST_WEBSITE:-$(cat environment_url.txt)}"'
diff --git a/.gitlab/ci/review.gitlab-ci.yml b/.gitlab/ci/review.gitlab-ci.yml
index e23708337a4..14b1561ec1a 100644
--- a/.gitlab/ci/review.gitlab-ci.yml
+++ b/.gitlab/ci/review.gitlab-ci.yml
@@ -1,6 +1,5 @@
.review-docker:
extends:
- - .default-tags
- .default-retry
image: registry.gitlab.com/gitlab-org/gitlab-build-images:gitlab-qa-alpine-ruby-2.6
services:
@@ -29,7 +28,6 @@ build-qa-image:
review-cleanup:
extends:
- - .default-tags
- .default-retry
- .review:rules:review-cleanup
stage: prepare
@@ -46,7 +44,6 @@ review-cleanup:
review-build-cng:
extends:
- - .default-tags
- .default-retry
- .review:rules:mr-and-schedule
image: ruby:2.6-alpine
@@ -63,18 +60,18 @@ review-build-cng:
.review-workflow-base:
extends:
- - .default-tags
- .default-retry
image: registry.gitlab.com/gitlab-org/gitlab-build-images:gitlab-charts-build-base
variables:
HOST_SUFFIX: "${CI_ENVIRONMENT_SLUG}"
DOMAIN: "-${CI_ENVIRONMENT_SLUG}.${REVIEW_APPS_DOMAIN}"
- GITLAB_HELM_CHART_REF: "v2.5.1"
+ GITLAB_HELM_CHART_REF: "v2.6.8"
GITLAB_EDITION: "ce"
environment:
name: review/${CI_COMMIT_REF_NAME}
url: https://gitlab-${CI_ENVIRONMENT_SLUG}.${REVIEW_APPS_DOMAIN}
on_stop: review-stop
+ auto_stop_in: 48 hours
review-deploy:
extends:
@@ -84,6 +81,7 @@ review-deploy:
needs:
- job: review-build-cng
artifacts: false
+ resource_group: "review/${CI_COMMIT_REF_NAME}"
allow_failure: true
before_script:
- '[[ ! -d "ee/" ]] || export GITLAB_EDITION="ee"'
@@ -215,7 +213,6 @@ review-performance:
parallel-spec-reports:
extends:
- - .default-tags
- .review:rules:mr-only-manual
image: ruby:2.6-alpine
stage: post-qa
@@ -242,15 +239,14 @@ parallel-spec-reports:
danger-review:
extends:
- - .default-tags
- .default-retry
- .default-cache
- .review:rules:danger
image: registry.gitlab.com/gitlab-org/gitlab-build-images:danger
stage: test
- dependencies: []
+ needs: []
script:
- git version
- node --version
- yarn install --frozen-lockfile --cache-folder .yarn-cache --prefer-offline
- - danger --fail-on-errors=true
+ - danger --fail-on-errors=true --verbose
diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml
index 8cf652d51e0..3ef28e02e9d 100644
--- a/.gitlab/ci/rules.gitlab-ci.yml
+++ b/.gitlab/ci/rules.gitlab-ci.yml
@@ -22,6 +22,9 @@
.if-merge-request: &if-merge-request
if: '$CI_MERGE_REQUEST_IID'
+.if-nightly-master-schedule: &if-nightly-master-schedule
+ if: '$NIGHTLY && $CI_COMMIT_REF_NAME == "master" && $CI_PIPELINE_SOURCE == "schedule"'
+
.if-dot-com-gitlab-org-schedule: &if-dot-com-gitlab-org-schedule
if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE == "gitlab-org" && $CI_PIPELINE_SOURCE == "schedule"'
@@ -77,6 +80,9 @@
- "{,ee/}{app,bin,config,db,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
+.frontend-dependency-patterns: &frontend-dependency-patterns
+ - "{package.json,yarn.lock}"
+
.qa-patterns: &qa-patterns
- ".dockerignore"
- "qa/**/*"
@@ -267,6 +273,9 @@
rules:
- <<: *if-master-refs
when: on_success
+ - <<: *if-merge-request
+ changes: *frontend-dependency-patterns
+ when: on_success
################
# Memory rules #
@@ -343,6 +352,12 @@
changes: *code-backstage-patterns
when: on_success
+.rails:rules:nightly-master-refs-code-backstage:
+ rules:
+ - <<: *if-nightly-master-schedule
+ changes: *code-backstage-patterns
+ when: on_success
+
.rails:rules:ee-only:
rules:
- <<: *if-not-ee
@@ -484,13 +499,6 @@
changes: *code-backstage-patterns
when: on_success
-.test-metadata:rules:flaky-examples-check:
- rules:
- - <<: *if-merge-request
- changes: *code-backstage-patterns
- when: on_success
-
-
##############
# YAML rules #
##############
diff --git a/.gitlab/ci/setup.gitlab-ci.yml b/.gitlab/ci/setup.gitlab-ci.yml
index fb203db1478..b1918961f3e 100644
--- a/.gitlab/ci/setup.gitlab-ci.yml
+++ b/.gitlab/ci/setup.gitlab-ci.yml
@@ -2,7 +2,6 @@
# rubygems.org in the future.
cache gems:
extends:
- - .default-tags
- .default-retry
- .default-cache
- .default-before_script
@@ -21,14 +20,14 @@ cache gems:
.minimal-job:
extends:
- - .default-tags
- .default-retry
- dependencies: []
+ needs: []
gitlab_git_test:
extends:
- .minimal-job
- .setup:rules:gitlab_git_test
+ stage: test
script:
- spec/support/prepare-gitlab-git-test-for-commit --check-for-changes
@@ -36,5 +35,6 @@ no_ee_check:
extends:
- .minimal-job
- .setup:rules:no_ee_check
+ stage: test
script:
- scripts/no-ee-check
diff --git a/.gitlab/ci/test-metadata.gitlab-ci.yml b/.gitlab/ci/test-metadata.gitlab-ci.yml
index 719e4e821c9..cda6d996bdb 100644
--- a/.gitlab/ci/test-metadata.gitlab-ci.yml
+++ b/.gitlab/ci/test-metadata.gitlab-ci.yml
@@ -37,22 +37,3 @@ update-tests-metadata:
- retry gem install fog-aws mime-types activesupport rspec_profiling postgres-copy --no-document
- source scripts/rspec_helpers.sh
- update_tests_metadata
-
-flaky-examples-check:
- extends:
- - .default-tags
- - .default-retry
- - .test-metadata:rules:flaky-examples-check
- image: ruby:2.6-alpine
- stage: post-test
- variables:
- NEW_FLAKY_SPECS_REPORT: rspec_flaky/report-new.json
- allow_failure: true
- artifacts:
- expire_in: 30d
- paths:
- - rspec_flaky/
- script:
- - '[[ -f $NEW_FLAKY_SPECS_REPORT ]] || echo "{}" > ${NEW_FLAKY_SPECS_REPORT}'
- - scripts/merge-reports ${NEW_FLAKY_SPECS_REPORT} rspec_flaky/new_*_*.json
- - scripts/flaky_examples/detect-new-flaky-examples $NEW_FLAKY_SPECS_REPORT
diff --git a/.gitlab/ci/yaml.gitlab-ci.yml b/.gitlab/ci/yaml.gitlab-ci.yml
index cdc3aa6e577..ab31dd59299 100644
--- a/.gitlab/ci/yaml.gitlab-ci.yml
+++ b/.gitlab/ci/yaml.gitlab-ci.yml
@@ -2,11 +2,11 @@
# This uses rules from project root `.yamllint`.
lint-ci-gitlab:
extends:
- - .default-tags
- .default-retry
- .yaml:rules
image: sdesbure/yamllint:latest
- dependencies: []
+ stage: test
+ needs: []
variables:
LINT_PATHS: .gitlab-ci.yml .gitlab/ci lib/gitlab/ci/templates changelogs
script:
diff --git a/.gitlab/issue_templates/Feature proposal.md b/.gitlab/issue_templates/Feature proposal.md
index fa989e45281..2bbef723b21 100644
--- a/.gitlab/issue_templates/Feature proposal.md
+++ b/.gitlab/issue_templates/Feature proposal.md
@@ -15,6 +15,8 @@
* [Sidney (Systems Administrator)](https://about.gitlab.com/handbook/marketing/product-marketing/roles-personas/#sidney-systems-administrator)
* [Sam (Security Analyst)](https://about.gitlab.com/handbook/marketing/product-marketing/roles-personas/#sam-security-analyst)
* [Dana (Data Analyst)](https://about.gitlab.com/handbook/marketing/product-marketing/roles-personas/#dana-data-analyst)
+* [Simone (Software Engineer in Test)](https://about.gitlab.com/handbook/marketing/product-marketing/roles-personas/#simone-software-engineer-in-test)
+* [Allison (Application Ops)](https://about.gitlab.com/handbook/marketing/product-marketing/roles-personas/#allison-application-ops)
Personas are described at https://about.gitlab.com/handbook/marketing/product-marketing/roles-personas/ -->
@@ -57,6 +59,10 @@ See the test engineering planning process and reach out to your counterpart Soft
<!-- Which leads to: in which enterprise tier should this feature go? See https://about.gitlab.com/handbook/product/pricing/#four-tiers -->
+### Is this a cross-stage feature?
+
+<!-- Communicate if this change will affect multiple Stage Groups or product areas. We recommend always start with the assumption that a feature request will have an impact into another Group. Loop in the most relevant PM and Product Designer from that Group to provide strategic support to help align the Group's broader plan and vision, as well as to avoid UX and technical debt. https://about.gitlab.com/handbook/product/#cross-stage-features -->
+
### Links / references
/label ~feature
diff --git a/.gitlab/issue_templates/Problem_Validation.md b/.gitlab/issue_templates/Problem_Validation.md
index bc1fd3374df..f7515c07218 100644
--- a/.gitlab/issue_templates/Problem_Validation.md
+++ b/.gitlab/issue_templates/Problem_Validation.md
@@ -1,6 +1,6 @@
## Problem Statement
-<!-- What is the problem we hope to validate and solve? -->
+<!-- What is the problem we hope to validate? Reference how to write a real customer problem statement at https://productcoalition.com/how-to-write-a-good-customer-problem-statement-a815f80189ba for guidance. -->
## Reach
diff --git a/.gitlab/issue_templates/QA failure.md b/.gitlab/issue_templates/QA failure.md
index 13b5d7bf92c..e1b3eec5d29 100644
--- a/.gitlab/issue_templates/QA failure.md
+++ b/.gitlab/issue_templates/QA failure.md
@@ -40,7 +40,10 @@ Attach the screenshot and HTML snapshot of the page from the job's artifacts:
/due in 2 weeks
<!-- Base labels. -->
-/label ~Quality ~QA ~bug ~S1
+/label ~Quality ~QA ~test
+
+<!-- Test failure type label, please use just one.-->
+/label ~"failure::broken-test" ~"failure::flaky-test" ~"failure::stale-test" ~"failure::test-environment" ~"failure::investigating"
<!--
Choose the stage that appears in the test path, e.g. ~"devops::create" for
diff --git a/.gitlab/issue_templates/Security Release.md b/.gitlab/issue_templates/Security Release.md
index e5b26bc6fc6..b06f31f0e9a 100644
--- a/.gitlab/issue_templates/Security Release.md
+++ b/.gitlab/issue_templates/Security Release.md
@@ -4,6 +4,10 @@
Set the title to: `Security Release: 12.2.X, 12.1.X, and 12.0.X`
-->
+:warning: **Only Release Managers and members of the AppSec team can edit the description of this issue**
+
+-------
+
## Releases tasks
- https://gitlab.com/gitlab-org/release/docs/blob/master/general/security/release-manager.md
@@ -12,31 +16,17 @@ Set the title to: `Security Release: 12.2.X, 12.1.X, and 12.0.X`
## Version issues:
-* 12.2.X: {release task link}
-* 12.1.X: {release task link}
-* 12.0.X: {release task link}
+12.2.X, 12.1.X, 12.0.X: {release task link}
## Issues in GitLab Security
-* {https://gitlab.com/gitlab-org/security/gitlab/issues/ link}
-
-| Version | MR |
-|---------|----|
-| 12.2 | {https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests link} |
-| 12.1 | {https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests link} |
-| 12.0 | {https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests link} |
-| master | {https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests link} |
-
-## Issues in Omnibus-GitLab
-
-* {https://gitlab.com/gitlab-org/security/gitlab/issues/ link}
+To include your issue and merge requests in this Security Release, please mark
+your security issues as related to this release tracking issue. You can do this
+in the "Linked issues" section below this issue description.
-| Version | MR |
-|---------|----|
-| 12.2 | {https://dev.gitlab.org/gitlab/omnibus-gitlab/merge_requests/ link} |
-| 12.1 | {https://dev.gitlab.org/gitlab/omnibus-gitlab/merge_requests/ link} |
-| 12.0 | {https://dev.gitlab.org/gitlab/omnibus-gitlab/merge_requests/ link} |
-| master | {https://dev.gitlab.org/gitlab/omnibus-gitlab/merge_requests/ link} |
+:warning: If your security issues are not marked as related to this release
+tracking issue, their merge requests may not be included in the security
+release.
## QA
{QA issue link}
@@ -49,5 +39,5 @@ GitLab.com: {https://gitlab.com/gitlab-com/www-gitlab-com/merge_requests/ link}
## Email notification
{https://gitlab.com/gitlab-com/marketing/general/issues/ link}
-/label ~security
+/label ~security ~"upcoming security release"
/confidential
diff --git a/.gitlab/issue_templates/Technical Evaluation.md b/.gitlab/issue_templates/Technical Evaluation.md
index f703f727113..f603d88a764 100644
--- a/.gitlab/issue_templates/Technical Evaluation.md
+++ b/.gitlab/issue_templates/Technical Evaluation.md
@@ -9,9 +9,12 @@
<!-- Outline the tasks with issues that you need evaluate as a part of the implementation issue -->
-- [ ] Add task
-- [ ] Add task
-- [ ] Add task
+- [ ] Determine feasibility of the feature
+- [ ] Create issue for implementation or update existing implementation issue description with implementation proposal
+- [ ] Set weight on implementation issue
+- [ ] If weight is greater than 5, break issue into smaller issues
+- [ ] Add task
+- [ ] Add task
### Risks and Implementation Considerations
diff --git a/.gitlab/merge_request_templates/Documentation.md b/.gitlab/merge_request_templates/Documentation.md
index 2a7da2a436f..901228ee77e 100644
--- a/.gitlab/merge_request_templates/Documentation.md
+++ b/.gitlab/merge_request_templates/Documentation.md
@@ -1,5 +1,6 @@
<!-- Follow the documentation workflow https://docs.gitlab.com/ee/development/documentation/workflow.html -->
<!-- Additional information is located at https://docs.gitlab.com/ee/development/documentation/ -->
+<!-- To find the designated Tech Writer for the stage/group, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers -->
<!-- Mention "documentation" or "docs" in the MR title -->
<!-- For changing documentation location use the "Change documentation location" template -->
diff --git a/.gitlab/merge_request_templates/Security Release.md b/.gitlab/merge_request_templates/Security Release.md
index 02cb4c59fd1..24fe44200d6 100644
--- a/.gitlab/merge_request_templates/Security Release.md
+++ b/.gitlab/merge_request_templates/Security Release.md
@@ -12,7 +12,7 @@ See [the general developer security release guidelines](https://gitlab.com/gitla
## Developer checklist
-- [ ] **Make sure this merge request mentions the [GitLab Security] issue it belongs to (i.e. `Related to <issue_id>`).**
+- [ ] **On "Related issues" section, write down the [GitLab Security] issue it belongs to (i.e. `Related to <issue_id>`).**
- [ ] Merge request targets `master`, or `X-Y-stable` for backports.
- [ ] Milestone is set for the version this merge request applies to. A closed milestone can be assigned via [quick actions].
- [ ] Title of this merge request is the same as for all backports.
diff --git a/.haml-lint_todo.yml b/.haml-lint_todo.yml
index 2e1b1770f99..dd94d0b494d 100644
--- a/.haml-lint_todo.yml
+++ b/.haml-lint_todo.yml
@@ -1,13 +1,13 @@
# This configuration was generated by
# `haml-lint --auto-gen-config`
-# on 2019-05-07 19:04:08 +0100 using Haml-Lint version 0.30.0.
+# on 2020-03-04 13:16:29 +0100 using Haml-Lint version 0.34.0.
# The point is for the user to remove these configuration records
# one by one as the lints are removed from the code base.
# Note that changes in the inspected code, or installation of new
# versions of Haml-Lint, may require this file to be generated again.
linters:
- # Offense count: 2075
+ # Offense count: 1646
NoPlainNodes:
enabled: true
exclude:
@@ -19,13 +19,11 @@ linters:
- 'app/views/admin/application_settings/_gitaly.html.haml'
- 'app/views/admin/application_settings/_influx.html.haml'
- 'app/views/admin/application_settings/_ip_limits.html.haml'
- - 'app/views/admin/application_settings/_logging.html.haml'
- 'app/views/admin/application_settings/_performance.html.haml'
- 'app/views/admin/application_settings/_plantuml.html.haml'
- 'app/views/admin/application_settings/_prometheus.html.haml'
- 'app/views/admin/application_settings/_realtime.html.haml'
- 'app/views/admin/application_settings/_repository_check.html.haml'
- - 'app/views/admin/application_settings/_repository_storage.html.haml'
- 'app/views/admin/application_settings/_signin.html.haml'
- 'app/views/admin/application_settings/_signup.html.haml'
- 'app/views/admin/application_settings/_spam.html.haml'
@@ -42,13 +40,11 @@ linters:
- 'app/views/admin/broadcast_messages/index.html.haml'
- 'app/views/admin/dashboard/index.html.haml'
- 'app/views/admin/deploy_keys/new.html.haml'
- - 'app/views/admin/groups/show.html.haml'
- 'app/views/admin/health_check/show.html.haml'
- 'app/views/admin/hook_logs/_index.html.haml'
- 'app/views/admin/hook_logs/show.html.haml'
- 'app/views/admin/hooks/_form.html.haml'
- 'app/views/admin/hooks/edit.html.haml'
- - 'app/views/admin/hooks/index.html.haml'
- 'app/views/admin/labels/_form.html.haml'
- 'app/views/admin/logs/show.html.haml'
- 'app/views/admin/projects/_projects.html.haml'
@@ -78,8 +74,6 @@ linters:
- 'app/views/dashboard/milestones/index.html.haml'
- 'app/views/dashboard/projects/_blank_state_admin_welcome.html.haml'
- 'app/views/dashboard/projects/_blank_state_welcome.html.haml'
- - 'app/views/dashboard/projects/_zero_authorized_projects.html.haml'
- - 'app/views/dashboard/snippets/index.html.haml'
- 'app/views/dashboard/todos/_todo.html.haml'
- 'app/views/dashboard/todos/index.html.haml'
- 'app/views/devise/confirmations/almost_there.haml'
@@ -97,12 +91,10 @@ linters:
- 'app/views/devise/sessions/two_factor.html.haml'
- 'app/views/devise/shared/_omniauth_box.html.haml'
- 'app/views/devise/shared/_sign_in_link.html.haml'
- - 'app/views/devise/shared/_signup_box.html.haml'
- 'app/views/devise/shared/_tabs_normal.html.haml'
- 'app/views/discussions/_discussion.html.haml'
- 'app/views/discussions/_headline.html.haml'
- 'app/views/discussions/_notes.html.haml'
- - 'app/views/discussions/_resolve_all.html.haml'
- 'app/views/doorkeeper/applications/_delete_form.html.haml'
- 'app/views/doorkeeper/authorized_applications/_delete_form.html.haml'
- 'app/views/errors/encoding.html.haml'
@@ -114,8 +106,6 @@ linters:
- 'app/views/events/event/_push.html.haml'
- 'app/views/groups/_create_chat_team.html.haml'
- 'app/views/groups/_group_admin_settings.html.haml'
- - 'app/views/groups/group_members/_new_group_member.html.haml'
- - 'app/views/groups/group_members/index.html.haml'
- 'app/views/groups/labels/edit.html.haml'
- 'app/views/groups/labels/new.html.haml'
- 'app/views/groups/milestones/edit.html.haml'
@@ -130,29 +120,25 @@ linters:
- 'app/views/help/instance_configuration.html.haml'
- 'app/views/help/instance_configuration/_gitlab_ci.html.haml'
- 'app/views/help/instance_configuration/_gitlab_pages.html.haml'
- - 'app/views/help/instance_configuration/_ssh_info.html.haml'
- 'app/views/help/ui.html.haml'
- 'app/views/import/bitbucket/status.html.haml'
- 'app/views/import/bitbucket_server/status.html.haml'
- - 'app/views/instance_statistics/cohorts/_cohorts_table.html.haml'
- - 'app/views/instance_statistics/cohorts/_usage_ping.html.haml'
- 'app/views/invites/show.html.haml'
- 'app/views/layouts/_mailer.html.haml'
- 'app/views/layouts/header/_default.html.haml'
- 'app/views/layouts/header/_new_dropdown.haml'
- 'app/views/layouts/mailer/devise.html.haml'
- - 'app/views/layouts/nav/sidebar/_profile.html.haml'
- 'app/views/layouts/notify.html.haml'
- 'app/views/notify/_failed_builds.html.haml'
- 'app/views/notify/_reassigned_issuable_email.html.haml'
- 'app/views/notify/_removal_notification.html.haml'
+ - 'app/views/notify/_successful_pipeline.html.haml'
- 'app/views/notify/autodevops_disabled_email.html.haml'
- 'app/views/notify/changed_milestone_email.html.haml'
- 'app/views/notify/import_issues_csv_email.html.haml'
- 'app/views/notify/issue_moved_email.html.haml'
- 'app/views/notify/member_access_denied_email.html.haml'
- 'app/views/notify/member_invite_accepted_email.html.haml'
- - 'app/views/notify/member_invite_declined_email.html.haml'
- 'app/views/notify/member_invited_email.html.haml'
- 'app/views/notify/new_gpg_key_email.html.haml'
- 'app/views/notify/new_mention_in_issue_email.html.haml'
@@ -163,7 +149,6 @@ linters:
- 'app/views/notify/pages_domain_verification_failed_email.html.haml'
- 'app/views/notify/pages_domain_verification_succeeded_email.html.haml'
- 'app/views/notify/pipeline_failed_email.html.haml'
- - 'app/views/notify/pipeline_success_email.html.haml'
- 'app/views/notify/project_was_exported_email.html.haml'
- 'app/views/notify/project_was_moved_email.html.haml'
- 'app/views/notify/project_was_not_exported_email.html.haml'
@@ -172,31 +157,11 @@ linters:
- 'app/views/notify/removed_milestone_issue_email.html.haml'
- 'app/views/notify/removed_milestone_merge_request_email.html.haml'
- 'app/views/notify/repository_push_email.html.haml'
- - 'app/views/peek/views/_gc.html.haml'
- - 'app/views/peek/views/_redis.html.haml'
- - 'app/views/peek/views/_sidekiq.html.haml'
- - 'app/views/profiles/_event_table.html.haml'
- - 'app/views/profiles/active_sessions/_active_session.html.haml'
- - 'app/views/profiles/active_sessions/index.html.haml'
- - 'app/views/profiles/audit_log.html.haml'
- 'app/views/profiles/chat_names/_chat_name.html.haml'
- 'app/views/profiles/chat_names/index.html.haml'
- 'app/views/profiles/chat_names/new.html.haml'
- - 'app/views/profiles/emails/index.html.haml'
- - 'app/views/profiles/gpg_keys/_key.html.haml'
- - 'app/views/profiles/gpg_keys/index.html.haml'
- 'app/views/profiles/keys/_key.html.haml'
- - 'app/views/profiles/keys/_key_details.html.haml'
- - 'app/views/profiles/keys/index.html.haml'
- - 'app/views/profiles/notifications/show.html.haml'
- - 'app/views/profiles/passwords/edit.html.haml'
- - 'app/views/profiles/personal_access_tokens/index.html.haml'
- - 'app/views/profiles/preferences/show.html.haml'
- 'app/views/profiles/show.html.haml'
- - 'app/views/profiles/two_factor_auths/_codes.html.haml'
- - 'app/views/profiles/two_factor_auths/codes.html.haml'
- - 'app/views/profiles/two_factor_auths/create.html.haml'
- - 'app/views/profiles/two_factor_auths/show.html.haml'
- 'app/views/projects/_bitbucket_import_modal.html.haml'
- 'app/views/projects/_customize_workflow.html.haml'
- 'app/views/projects/_deletion_failed.html.haml'
@@ -206,10 +171,8 @@ linters:
- 'app/views/projects/_import_project_pane.html.haml'
- 'app/views/projects/_issuable_by_email.html.haml'
- 'app/views/projects/_md_preview.html.haml'
- - 'app/views/projects/_new_project_fields.html.haml'
- 'app/views/projects/_readme.html.haml'
- 'app/views/projects/artifacts/_artifact.html.haml'
- - 'app/views/projects/artifacts/_search_bar.html.haml'
- 'app/views/projects/artifacts/_tree_file.html.haml'
- 'app/views/projects/artifacts/browse.html.haml'
- 'app/views/projects/blame/_age_map_legend.html.haml'
@@ -219,7 +182,6 @@ linters:
- 'app/views/projects/blob/_new_dir.html.haml'
- 'app/views/projects/blob/_remove.html.haml'
- 'app/views/projects/blob/_render_error.html.haml'
- - 'app/views/projects/blob/_template_selectors.html.haml'
- 'app/views/projects/blob/_upload.html.haml'
- 'app/views/projects/blob/edit.html.haml'
- 'app/views/projects/blob/new.html.haml'
@@ -244,8 +206,6 @@ linters:
- 'app/views/projects/deploy_keys/_form.html.haml'
- 'app/views/projects/deploy_keys/_index.html.haml'
- 'app/views/projects/deploy_keys/edit.html.haml'
- - 'app/views/projects/deploy_tokens/_revoke_modal.html.haml'
- - 'app/views/projects/deploy_tokens/_table.html.haml'
- 'app/views/projects/deployments/_deployment.html.haml'
- 'app/views/projects/diffs/_file_header.html.haml'
- 'app/views/projects/diffs/_replaced_image_diff.html.haml'
@@ -254,10 +214,8 @@ linters:
- 'app/views/projects/environments/show.html.haml'
- 'app/views/projects/forks/error.html.haml'
- 'app/views/projects/generic_commit_statuses/_generic_commit_status.html.haml'
- - 'app/views/projects/graphs/charts.html.haml'
- 'app/views/projects/hook_logs/_index.html.haml'
- 'app/views/projects/hook_logs/show.html.haml'
- - 'app/views/projects/hooks/_index.html.haml'
- 'app/views/projects/hooks/edit.html.haml'
- 'app/views/projects/imports/new.html.haml'
- 'app/views/projects/imports/show.html.haml'
@@ -294,20 +252,9 @@ linters:
- 'app/views/projects/merge_requests/widget/open/_error.html.haml'
- 'app/views/projects/mirrors/_regenerate_public_ssh_key_confirm_modal.html.haml'
- 'app/views/projects/mirrors/_ssh_host_keys.html.haml'
- - 'app/views/projects/new.html.haml'
- 'app/views/projects/no_repo.html.haml'
- - 'app/views/projects/pages/_access.html.haml'
- - 'app/views/projects/pages/_destroy.haml'
- - 'app/views/projects/pages/_https_only.html.haml'
- - 'app/views/projects/pages/_list.html.haml'
- - 'app/views/projects/pages/_no_domains.html.haml'
- - 'app/views/projects/pages/_use.html.haml'
- - 'app/views/projects/pages/show.html.haml'
- 'app/views/projects/pipeline_schedules/_pipeline_schedule.html.haml'
- 'app/views/projects/pipelines/_info.html.haml'
- - 'app/views/projects/pipelines/charts/_pipelines.haml'
- - 'app/views/projects/protected_branches/shared/_branches_list.html.haml'
- - 'app/views/projects/protected_branches/shared/_create_protected_branch.html.haml'
- 'app/views/projects/protected_branches/shared/_dropdown.html.haml'
- 'app/views/projects/protected_branches/shared/_index.html.haml'
- 'app/views/projects/protected_branches/shared/_matching_branch.html.haml'
@@ -325,7 +272,6 @@ linters:
- 'app/views/projects/runners/_shared_runners.html.haml'
- 'app/views/projects/runners/edit.html.haml'
- 'app/views/projects/services/_form.html.haml'
- - 'app/views/projects/services/_index.html.haml'
- 'app/views/projects/services/mattermost_slash_commands/_detailed_help.html.haml'
- 'app/views/projects/services/mattermost_slash_commands/_help.html.haml'
- 'app/views/projects/services/prometheus/_metrics.html.haml'
@@ -338,43 +284,35 @@ linters:
- 'app/views/projects/tags/releases/edit.html.haml'
- 'app/views/projects/tree/_tree_row.html.haml'
- 'app/views/projects/tree/_truncated_notice_tree_row.html.haml'
- - 'app/views/projects/triggers/_content.html.haml'
- 'app/views/projects/triggers/_form.html.haml'
- 'app/views/projects/triggers/_index.html.haml'
- 'app/views/projects/triggers/_trigger.html.haml'
- 'app/views/projects/triggers/edit.html.haml'
- - 'app/views/projects/wikis/_new.html.haml'
- 'app/views/projects/wikis/_pages_wiki_page.html.haml'
- 'app/views/projects/wikis/edit.html.haml'
- 'app/views/projects/wikis/history.html.haml'
- - 'app/views/repository_check_mailer/notify.html.haml'
- - 'app/views/search/_form.html.haml'
- 'app/views/search/results/_issue.html.haml'
- 'app/views/search/results/_note.html.haml'
- 'app/views/search/results/_snippet_blob.html.haml'
- 'app/views/search/results/_snippet_title.html.haml'
- 'app/views/shared/_auto_devops_implicitly_enabled_banner.html.haml'
- 'app/views/shared/_commit_message_container.html.haml'
- - 'app/views/shared/_confirm_modal.html.haml'
- 'app/views/shared/_confirm_fork_modal.html.haml'
+ - 'app/views/shared/_confirm_modal.html.haml'
- 'app/views/shared/_delete_label_modal.html.haml'
- 'app/views/shared/_group_form.html.haml'
- 'app/views/shared/_group_tips.html.haml'
- 'app/views/shared/_milestone_expired.html.haml'
- 'app/views/shared/_no_password.html.haml'
- 'app/views/shared/_no_ssh.html.haml'
- - 'app/views/shared/_outdated_browser.html.haml'
- - 'app/views/shared/_personal_access_tokens_created_container.html.haml'
- - 'app/views/shared/_personal_access_tokens_table.html.haml'
- 'app/views/shared/_ping_consent.html.haml'
- 'app/views/shared/_project_limit.html.haml'
- - 'app/views/shared/_service_settings.html.haml'
- 'app/views/shared/boards/components/_board.html.haml'
- 'app/views/shared/boards/components/_sidebar.html.haml'
- 'app/views/shared/boards/components/sidebar/_due_date.html.haml'
- 'app/views/shared/boards/components/sidebar/_labels.html.haml'
- 'app/views/shared/boards/components/sidebar/_milestone.html.haml'
- - 'app/views/shared/empty_states/_priority_labels.html.haml'
+ - 'app/views/shared/deploy_tokens/_revoke_modal.html.haml'
- 'app/views/shared/hook_logs/_content.html.haml'
- 'app/views/shared/issuable/_assignees.html.haml'
- 'app/views/shared/issuable/_board_create_list_dropdown.html.haml'
@@ -399,57 +337,42 @@ linters:
- 'app/views/shared/notifications/_button.html.haml'
- 'app/views/shared/notifications/_custom_notifications.html.haml'
- 'app/views/shared/notifications/_new_button.html.haml'
- - 'app/views/shared/notifications/_notification_dropdown.html.haml'
- - 'app/views/shared/plugins/_index.html.haml'
- - 'app/views/shared/projects/_dropdown.html.haml'
- - 'app/views/shared/projects/_list.html.haml'
- - 'app/views/shared/projects/_project.html.haml'
- 'app/views/shared/runners/_runner_description.html.haml'
- 'app/views/shared/runners/show.html.haml'
- - 'app/views/shared/snippets/_embed.html.haml'
- 'app/views/shared/snippets/_header.html.haml'
- 'app/views/shared/snippets/_snippet.html.haml'
- - 'app/views/shared/tokens/_scopes_list.html.haml'
- 'app/views/shared/web_hooks/_form.html.haml'
- 'app/views/shared/web_hooks/_hook.html.haml'
- - 'app/views/shared/web_hooks/_test_button.html.haml'
- 'app/views/u2f/_authenticate.html.haml'
- 'app/views/u2f/_register.html.haml'
- 'app/views/users/_deletion_guidance.html.haml'
- 'ee/app/views/admin/_namespace_plan_info.html.haml'
- 'ee/app/views/admin/application_settings/_templates.html.haml'
- 'ee/app/views/admin/audit_logs/index.html.haml'
- - 'ee/app/views/admin/dashboard/stats.html.haml'
- 'ee/app/views/admin/emails/show.html.haml'
- 'ee/app/views/admin/geo/nodes/edit.html.haml'
- 'ee/app/views/admin/geo/nodes/new.html.haml'
- 'ee/app/views/admin/geo/projects/_registry_failed.html.haml'
- 'ee/app/views/admin/geo/projects/_registry_never.html.haml'
- - 'ee/app/views/admin/licenses/_breakdown.html.haml'
- 'ee/app/views/admin/licenses/_upload_trial_license.html.haml'
- - 'ee/app/views/admin/licenses/missing.html.haml'
- 'ee/app/views/admin/licenses/new.html.haml'
- 'ee/app/views/admin/licenses/show.html.haml'
- 'ee/app/views/admin/monitoring/ee/_nav.html.haml'
- 'ee/app/views/admin/projects/_shared_runner_status.html.haml'
- - 'ee/app/views/admin/push_rules/show.html.haml'
+ - 'ee/app/views/admin/users/_auditor_access_level_radio.html.haml'
+ - 'ee/app/views/admin/users/_auditor_user_badge.html.haml'
- 'ee/app/views/admin/users/_limits.html.haml'
- 'ee/app/views/admin/users/_user_detail_note.html.haml'
- 'ee/app/views/dashboard/projects/_blank_state_ee_trial.html.haml'
- 'ee/app/views/errors/kerberos_denied.html.haml'
- - 'ee/app/views/groups/analytics/show.html.haml'
- - 'ee/app/views/groups/audit_events/index.html.haml'
- 'ee/app/views/groups/ee/_settings_nav.html.haml'
- 'ee/app/views/groups/epics/_epic.html.haml'
- 'ee/app/views/groups/group_members/_ldap_sync.html.haml'
- 'ee/app/views/groups/group_members/_sync_button.html.haml'
- 'ee/app/views/groups/hooks/edit.html.haml'
- - 'ee/app/views/groups/hooks/index.html.haml'
- 'ee/app/views/groups/ldap_group_links/index.html.haml'
- - 'ee/app/views/groups/pipeline_quota/index.html.haml'
- 'ee/app/views/jira_connect/subscriptions/index.html.haml'
- 'ee/app/views/layouts/jira_connect.html.haml'
- - 'ee/app/views/layouts/nav/ee/_epic_link.html.haml'
- 'ee/app/views/layouts/nav/ee/admin/_new_monitoring_sidebar.html.haml'
- 'ee/app/views/layouts/service_desk.html.haml'
- 'ee/app/views/ldap_group_links/_form.html.haml'
@@ -465,26 +388,22 @@ linters:
- 'ee/app/views/notify/epic_status_changed_email.html.haml'
- 'ee/app/views/notify/issues_csv_email.html.haml'
- 'ee/app/views/notify/new_review_email.html.haml'
- - 'ee/app/views/notify/prometheus_alert_fired_email.html.haml'
- 'ee/app/views/notify/send_admin_notification.html.haml'
- 'ee/app/views/notify/send_unsubscribed_notification.html.haml'
- 'ee/app/views/notify/unapproved_merge_request_email.html.haml'
- 'ee/app/views/oauth/geo_auth/error.html.haml'
- 'ee/app/views/profiles/pipeline_quota/index.haml'
- - 'ee/app/views/projects/audit_events/index.html.haml'
- 'ee/app/views/projects/blob/_owners.html.haml'
- 'ee/app/views/projects/commits/_mirror_status.html.haml'
- - 'ee/app/views/projects/feature_flags/_configure_feature_flags_modal.html.haml'
- 'ee/app/views/projects/issues/_issue_weight.html.haml'
- - 'ee/app/views/projects/issues/_related_issues.html.haml'
- 'ee/app/views/projects/issues/export_csv/_modal.html.haml'
- 'ee/app/views/projects/jobs/_shared_runner_limit_warning.html.haml'
- 'ee/app/views/projects/merge_requests/_approvals_count.html.haml'
- 'ee/app/views/projects/merge_requests/widget/open/_geo.html.haml'
- 'ee/app/views/projects/mirrors/_mirrored_repositories_count.html.haml'
+ - 'ee/app/views/projects/protected_branches/_update_protected_branch.html.haml'
- 'ee/app/views/projects/protected_branches/ee/_create_protected_branch.html.haml'
- 'ee/app/views/projects/protected_branches/ee/_dropdown.html.haml'
- - 'ee/app/views/projects/protected_branches/ee/_fallback_update_protected_branch.html.haml'
- 'ee/app/views/projects/protected_tags/_protected_tag_extra_create_access_levels.haml'
- 'ee/app/views/projects/protected_tags/ee/_create_protected_tag.html.haml'
- 'ee/app/views/projects/push_rules/_index.html.haml'
@@ -494,22 +413,16 @@ linters:
- 'ee/app/views/projects/settings/slacks/edit.html.haml'
- 'ee/app/views/shared/_additional_email_text.html.haml'
- 'ee/app/views/shared/_mirror_update_button.html.haml'
- - 'ee/app/views/shared/_shared_runners_minutes_limit.html.haml'
- - 'ee/app/views/shared/audit_events/_event_table.html.haml'
- 'ee/app/views/shared/boards/components/_list_weight.html.haml'
- - 'ee/app/views/shared/boards/components/sidebar/_epic.html.haml'
- - 'ee/app/views/shared/ee/_import_form.html.haml'
- 'ee/app/views/shared/epic/_search_bar.html.haml'
- 'ee/app/views/shared/issuable/_approvals.html.haml'
- 'ee/app/views/shared/issuable/_board_create_list_dropdown.html.haml'
- 'ee/app/views/shared/issuable/_filter_weight.html.haml'
- - 'ee/app/views/shared/issuable/_sidebar_item_epic.haml'
- 'ee/app/views/shared/members/ee/_ldap_tag.html.haml'
- 'ee/app/views/shared/members/ee/_override_member_buttons.html.haml'
- 'ee/app/views/shared/members/ee/_sso_badge.html.haml'
- 'ee/app/views/shared/milestones/_burndown.html.haml'
- 'ee/app/views/shared/milestones/_weight.html.haml'
- - 'ee/app/views/shared/promotions/_promote_audit_events.html.haml'
- 'ee/app/views/shared/promotions/_promote_burndown_charts.html.haml'
- 'ee/app/views/shared/promotions/_promote_csv_export.html.haml'
- 'ee/app/views/shared/promotions/_promote_issue_weights.html.haml'
@@ -517,7 +430,3 @@ linters:
- 'ee/app/views/shared/promotions/_promote_servicedesk.html.haml'
- 'ee/app/views/shared/push_rules/_form.html.haml'
- 'ee/app/views/unsubscribes/show.html.haml'
- - 'ee/app/views/admin/users/_auditor_access_level_radio.html.haml'
- - 'ee/app/views/admin/users/_auditor_user_badge.html.haml'
- - 'ee/app/views/projects/protected_branches/_update_protected_branch.html.haml'
- - 'ee/app/views/analytics/cycle_analytics/show.html.haml'
diff --git a/.markdownlint.json b/.markdownlint.json
index fe3790f47e6..4e2f74c6104 100644
--- a/.markdownlint.json
+++ b/.markdownlint.json
@@ -123,5 +123,6 @@
"YouTrack"
],
"code_blocks": false
- }
+ },
+ "code-fence-style": false
}
diff --git a/.rubocop.yml b/.rubocop.yml
index 514e4271272..ebc27c4cc9e 100644
--- a/.rubocop.yml
+++ b/.rubocop.yml
@@ -331,25 +331,19 @@ RSpec/MissingExampleGroupArgument:
RSpec/UnspecifiedException:
Enabled: false
-# Work in progress. See https://gitlab.com/gitlab-org/gitlab/issues/196163
RSpec/HaveGitlabHttpStatus:
Enabled: true
Exclude:
- 'spec/support/matchers/have_gitlab_http_status.rb'
Include:
- - 'spec/support/**/*'
- - 'ee/spec/support/**/*'
- - 'spec/features/**/*'
- - 'ee/spec/features/**/*'
- - 'spec/controllers/**/*'
- - 'ee/spec/controllers/**/*'
- - 'spec/requests/*.rb'
- - 'ee/spec/requests/*.rb'
- - 'spec/requests/api/*/**/*.rb'
- - 'ee/spec/requests/api/*/**/*.rb'
+ - 'spec/**/*'
+ - 'ee/spec/**/*'
Style/MultilineWhenThen:
Enabled: false
Style/FloatDivision:
Enabled: false
+
+Cop/BanCatchThrow:
+ Enabled: true
diff --git a/.vale.ini b/.vale.ini
index 89a669ec7ff..13b198b9148 100644
--- a/.vale.ini
+++ b/.vale.ini
@@ -1,40 +1,9 @@
-# Vale configuration file, taken from https://errata-ai.github.io/vale/config/
+# Vale configuration file.
+#
+# For more information, see https://errata-ai.gitbook.io/vale/getting-started/configuration.
-# The relative path to the folder containing linting rules (styles)
-# -----------------------------------------------------------------
-StylesPath = doc/.linting/vale/styles
-
-# Minimum alert level
-# -------------------
-# The minimum alert level to display (suggestion, warning, or error).
-# If integrated into CI, builds fail by default on error-level alerts,
-# unless you execute Vale with the --no-exit flag
+StylesPath = doc/.vale
MinAlertLevel = suggestion
-# Should Vale parse any file formats other than .md files as Markdown?
-# --------------------------------------------------------------------
-[formats]
-mdx = md
-
-# What file types should Vale test?
-# ----------------------------------
[*.md]
-
-# Styles to load
-# --------------
-# What styles, located in the StylesPath folder, should Vale load?
-# Vale also currently includes write-good, proselint, joblint, and vale
BasedOnStyles = gitlab
-
-# Enabling or disabling specific rules in a style
-# -----------------------------------------------
-# To disable a rule in an enabled style, use the following format:
-# {style}.{filename} = NO
-# To enable a single rule in a disabled style, use the following format:
-# vale.Editorializing = YES
-
-# Altering the severity of a rule in a style
-# ------------------------------------------
-# To change the reporting level (suggestion, warning, error) of a rule,
-# use the following format: {style}.{filename} = {level}
-# vale.Hedging = error
diff --git a/CHANGELOG-EE.md b/CHANGELOG-EE.md
index a2f417a16c6..fe44d526d51 100644
--- a/CHANGELOG-EE.md
+++ b/CHANGELOG-EE.md
@@ -17,11 +17,11 @@ Please view this file on the master branch, on stable branches it's out of date.
## 12.8.4
-- Unreleased due to tagging failure.
+- No changes.
## 12.8.3
-- Unreleased due to tagging failure.
+- No changes.
## 12.8.2
diff --git a/CHANGELOG.md b/CHANGELOG.md
index cc6df650d28..a75434d1ed7 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -41,11 +41,49 @@ entry.
## 12.8.4
-- Unreleased due to tagging failure.
+### Fixed (8 changes)
+
+- Fix Group Import API file upload when object storage is disabled. !25715
+- Fix Web IDE fork modal showing no text. !25842
+- Fixed regression when URL was encoded in a loop. !25849
+- Fixed repository browsing for folders with non-ascii characters. !25877
+- Fix search for Sentry error list. !26129
+- Send credentials with GraphQL fetch requests. !26386
+- Show CI status in project dashboards. !26403
+- Rescue invalid URLs during badge retrieval in asset proxy. !26524
+
+### Performance (2 changes)
+
+- Disable Marginalia line backtrace in production. !26199
+- Remove unnecessary Redis deletes for broadcast messages. !26541
+
+### Other (1 change, 1 of them is from the community)
+
+- Fix fixtures for Error Tracking Web UI. !26233 (Takuya Noguchi)
+
## 12.8.3
-- Unreleased due to tagging failure.
+### Fixed (8 changes)
+
+- Fix Group Import API file upload when object storage is disabled. !25715
+- Fix Web IDE fork modal showing no text. !25842
+- Fixed regression when URL was encoded in a loop. !25849
+- Fixed repository browsing for folders with non-ascii characters. !25877
+- Fix search for Sentry error list. !26129
+- Send credentials with GraphQL fetch requests. !26386
+- Show CI status in project dashboards. !26403
+- Rescue invalid URLs during badge retrieval in asset proxy. !26524
+
+### Performance (2 changes)
+
+- Disable Marginalia line backtrace in production. !26199
+- Remove unnecessary Redis deletes for broadcast messages. !26541
+
+### Other (1 change, 1 of them is from the community)
+
+- Fix fixtures for Error Tracking Web UI. !26233 (Takuya Noguchi)
+
## 12.8.2
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index c9bb023a76e..2e492831929 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-12.8.7
+1b7629e1c76556e0e216784deebb989b43169d62
diff --git a/GITLAB_PAGES_VERSION b/GITLAB_PAGES_VERSION
index 15b989e398f..092afa15df4 100644
--- a/GITLAB_PAGES_VERSION
+++ b/GITLAB_PAGES_VERSION
@@ -1 +1 @@
-1.16.0
+1.17.0
diff --git a/GITLAB_SHELL_VERSION b/GITLAB_SHELL_VERSION
index 275283a18f9..4044f90867d 100644
--- a/GITLAB_SHELL_VERSION
+++ b/GITLAB_SHELL_VERSION
@@ -1 +1 @@
-11.0.0
+12.0.0
diff --git a/GITLAB_WORKHORSE_VERSION b/GITLAB_WORKHORSE_VERSION
index 72963fb08c2..f062572ef7b 100644
--- a/GITLAB_WORKHORSE_VERSION
+++ b/GITLAB_WORKHORSE_VERSION
@@ -1 +1 @@
-8.21.0
+8.25.0
diff --git a/Gemfile b/Gemfile
index 20c713e8c39..51350401807 100644
--- a/Gemfile
+++ b/Gemfile
@@ -2,7 +2,7 @@ source 'https://rubygems.org'
gem 'rails', '6.0.2'
-gem 'bootsnap', '~> 1.4'
+gem 'bootsnap', '~> 1.4.6'
# Improves copy-on-write performance for MRI
gem 'nakayoshi_fork', '~> 0.0.4'
@@ -58,7 +58,7 @@ gem 'akismet', '~> 3.0'
gem 'invisible_captcha', '~> 0.12.1'
# Two-factor authentication
-gem 'devise-two-factor', '~> 3.0.0'
+gem 'devise-two-factor', '~> 3.1.0'
gem 'rqrcode-rails3', '~> 0.1.7'
gem 'attr_encrypted', '~> 3.1.0'
gem 'u2f', '~> 0.2.1'
@@ -87,7 +87,7 @@ gem 'grape-entity', '~> 0.7.1'
gem 'rack-cors', '~> 1.0.6', require: 'rack/cors'
# GraphQL API
-gem 'graphql', '~> 1.9.12'
+gem 'graphql', '~> 1.9.19'
# NOTE: graphiql-rails v1.5+ doesn't work: https://gitlab.com/gitlab-org/gitlab/issues/31771
# TODO: remove app/views/graphiql/rails/editors/show.html.erb when https://github.com/rmosolgo/graphiql-rails/pull/71 is released:
# https://gitlab.com/gitlab-org/gitlab/issues/31747
@@ -149,7 +149,7 @@ gem 'wikicloth', '0.8.1'
gem 'asciidoctor', '~> 2.0.10'
gem 'asciidoctor-include-ext', '~> 0.3.1', require: false
gem 'asciidoctor-plantuml', '0.0.10'
-gem 'rouge', '~> 3.15.0'
+gem 'rouge', '~> 3.17.0'
gem 'truncato', '~> 0.7.11'
gem 'bootstrap_form', '~> 4.2.0'
gem 'nokogiri', '~> 1.10.5'
@@ -159,7 +159,7 @@ gem 'escape_utils', '~> 1.1'
gem 'icalendar'
# Diffs
-gem 'diffy', '~> 3.1.0'
+gem 'diffy', '~> 3.3'
gem 'diff_match_patch', '~> 0.1.0'
# Application server
@@ -171,7 +171,7 @@ group :unicorn do
end
group :puma do
- gem 'gitlab-puma', '~> 4.3.1.gitlab.2', require: false
+ gem 'gitlab-puma', '~> 4.3.3.gitlab.2', require: false
gem 'gitlab-puma_worker_killer', '~> 0.1.1.gitlab.1', require: false
gem 'rack-timeout', require: false
end
@@ -237,7 +237,7 @@ gem 'atlassian-jwt', '~> 0.2.0'
gem 'flowdock', '~> 0.7'
# Slack integration
-gem 'slack-notifier', '~> 1.5.1'
+gem 'slack-messenger', '~> 2.3.3'
# Hangouts Chat integration
gem 'hangouts-chat', '~> 0.0.5'
@@ -301,7 +301,7 @@ gem 'sentry-raven', '~> 2.9'
gem 'premailer-rails', '~> 1.10.3'
# LabKit: Tracing and Correlation
-gem 'gitlab-labkit', '0.9.1'
+gem 'gitlab-labkit', '0.11.0'
# I18n
gem 'ruby_parser', '~> 3.8', require: false
@@ -319,7 +319,7 @@ gem 'peek', '~> 1.1'
gem 'snowplow-tracker', '~> 0.6.1'
# Memory benchmarks
-gem 'derailed_benchmarks', require: false
+gem 'gitlab-derailed_benchmarks', require: false
# Metrics
group :metrics do
@@ -355,7 +355,7 @@ group :development, :test do
gem 'database_cleaner', '~> 1.7.0'
gem 'factory_bot_rails', '~> 5.1.0'
- gem 'rspec-rails', '~> 4.0.0.beta3'
+ gem 'rspec-rails', '~> 4.0.0.beta4'
# Prevent occasions where minitest is not bundled in packaged versions of ruby (see #3826)
gem 'minitest', '~> 5.11.0'
@@ -374,8 +374,8 @@ group :development, :test do
gem 'scss_lint', '~> 0.56.0', require: false
gem 'haml_lint', '~> 0.34.0', require: false
- gem 'simplecov', '~> 0.16.1', require: false
- gem 'bundler-audit', '~> 0.5.0', require: false
+ gem 'simplecov', '~> 0.18.5', require: false
+ gem 'bundler-audit', '~> 0.6.1', require: false
gem 'benchmark-ips', '~> 2.3.0', require: false
@@ -383,7 +383,7 @@ group :development, :test do
gem 'simple_po_parser', '~> 1.1.2', require: false
- gem 'timecop', '~> 0.8.0'
+ gem 'timecop', '~> 0.9.1'
gem 'png_quantizator', '~> 0.2.1', require: false
@@ -419,7 +419,8 @@ end
gem 'octokit', '~> 4.15'
-gem 'mail_room', '~> 0.10.0'
+# https://gitlab.com/gitlab-org/gitlab/issues/207207
+gem 'gitlab-mail_room', '~> 0.0.3', require: 'mail_room'
gem 'email_reply_trimmer', '~> 0.1'
gem 'html2text'
@@ -455,7 +456,7 @@ group :ed25519 do
end
# Gitaly GRPC protocol definitions
-gem 'gitaly', '~> 1.86.0'
+gem 'gitaly', '~> 12.9.0.pre.rc4'
gem 'grpc', '~> 1.24.0'
diff --git a/Gemfile.lock b/Gemfile.lock
index aa33bd4cd68..8006e45259a 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -123,7 +123,7 @@ GEM
binding_ninja (0.2.3)
binding_of_caller (0.8.0)
debug_inspector (>= 0.0.1)
- bootsnap (1.4.5)
+ bootsnap (1.4.6)
msgpack (~> 1.0)
bootstrap_form (4.2.0)
actionpack (>= 5.0)
@@ -134,8 +134,8 @@ GEM
bullet (6.0.2)
activesupport (>= 3.0.0)
uniform_notifier (~> 1.11)
- bundler-audit (0.5.0)
- bundler (~> 1.2)
+ bundler-audit (0.6.1)
+ bundler (>= 1.2.0, < 3)
thor (~> 0.18)
byebug (9.1.0)
capybara (3.22.0)
@@ -211,15 +211,6 @@ GEM
declarative-option (0.1.0)
default_value_for (3.3.0)
activerecord (>= 3.2.0, < 6.1)
- derailed_benchmarks (1.4.2)
- benchmark-ips (~> 2)
- get_process_mem (~> 0)
- heapy (~> 0)
- memory_profiler (~> 0)
- rack (>= 1)
- rake (> 10, < 14)
- ruby-statistics (>= 2.1)
- thor (~> 0.19)
descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1)
device_detector (1.0.0)
@@ -229,18 +220,18 @@ GEM
railties (>= 4.1.0)
responders
warden (~> 1.2.3)
- devise-two-factor (3.0.0)
- activesupport
+ devise-two-factor (3.1.0)
+ activesupport (< 6.1)
attr_encrypted (>= 1.3, < 4, != 2)
devise (~> 4.0)
- railties
+ railties (< 6.1)
rotp (~> 2.0)
diff-lcs (1.3)
diff_match_patch (0.1.0)
- diffy (3.1.0)
+ diffy (3.3.0)
discordrb-webhooks-blackst0ne (3.3.0)
rest-client (~> 2.0)
- docile (1.3.1)
+ docile (1.3.2)
domain_name (0.5.20180417)
unf (>= 0.0.5, < 1.0.0)
doorkeeper (5.0.2)
@@ -375,12 +366,21 @@ GEM
po_to_json (>= 1.0.0)
rails (>= 3.2.0)
git (1.5.0)
- gitaly (1.86.0)
+ gitaly (12.9.0.pre.rc4)
grpc (~> 1.0)
github-markup (1.7.0)
gitlab-chronic (0.10.5)
numerizer (~> 0.2)
- gitlab-labkit (0.9.1)
+ gitlab-derailed_benchmarks (1.6.1)
+ benchmark-ips (~> 2)
+ get_process_mem (~> 0)
+ heapy (~> 0)
+ memory_profiler (~> 0)
+ rack (>= 1)
+ rake (> 10, < 14)
+ ruby-statistics (>= 2.1)
+ thor (>= 0.19, < 2)
+ gitlab-labkit (0.11.0)
actionpack (>= 5.0.0, < 6.1.0)
activesupport (>= 5.0.0, < 6.1.0)
grpc (~> 1.19)
@@ -388,9 +388,10 @@ GEM
opentracing (~> 0.4)
redis (> 3.0.0, < 5.0.0)
gitlab-license (1.0.0)
+ gitlab-mail_room (0.0.3)
gitlab-markup (1.7.0)
gitlab-net-dns (0.9.1)
- gitlab-puma (4.3.1.gitlab.2)
+ gitlab-puma (4.3.3.gitlab.2)
nio4r (~> 2.0)
gitlab-puma_worker_killer (0.1.1.gitlab.1)
get_process_mem (~> 0.2)
@@ -433,7 +434,7 @@ GEM
multi_json (~> 1.11)
os (>= 0.9, < 2.0)
signet (~> 0.7)
- gpgme (2.0.19)
+ gpgme (2.0.20)
mini_portile2 (~> 2.3)
grape (1.1.0)
activesupport
@@ -455,7 +456,7 @@ GEM
graphiql-rails (1.4.10)
railties
sprockets-rails
- graphql (1.9.12)
+ graphql (1.9.19)
graphql-docs (1.6.0)
commonmarker (~> 0.16)
escape_utils (~> 1.2)
@@ -616,7 +617,6 @@ GEM
lumberjack (1.0.13)
mail (2.7.1)
mini_mime (>= 0.1.1)
- mail_room (0.10.0)
marcel (0.3.3)
mimemagic (~> 0.3.2)
marginalia (1.8.0)
@@ -889,41 +889,41 @@ GEM
retriable (3.1.2)
rinku (2.0.0)
rotp (2.1.2)
- rouge (3.15.0)
+ rouge (3.17.0)
rqrcode (0.7.0)
chunky_png
rqrcode-rails3 (0.1.7)
rqrcode (>= 0.4.2)
- rspec (3.8.0)
- rspec-core (~> 3.8.0)
- rspec-expectations (~> 3.8.0)
- rspec-mocks (~> 3.8.0)
- rspec-core (3.8.2)
- rspec-support (~> 3.8.0)
- rspec-expectations (3.8.4)
+ rspec (3.9.0)
+ rspec-core (~> 3.9.0)
+ rspec-expectations (~> 3.9.0)
+ rspec-mocks (~> 3.9.0)
+ rspec-core (3.9.1)
+ rspec-support (~> 3.9.1)
+ rspec-expectations (3.9.0)
diff-lcs (>= 1.2.0, < 2.0)
- rspec-support (~> 3.8.0)
- rspec-mocks (3.8.1)
+ rspec-support (~> 3.9.0)
+ rspec-mocks (3.9.1)
diff-lcs (>= 1.2.0, < 2.0)
- rspec-support (~> 3.8.0)
+ rspec-support (~> 3.9.0)
rspec-parameterized (0.4.2)
binding_ninja (>= 0.2.3)
parser
proc_to_ast
rspec (>= 2.13, < 4)
unparser
- rspec-rails (4.0.0.beta3)
+ rspec-rails (4.0.0.beta4)
actionpack (>= 4.2)
activesupport (>= 4.2)
railties (>= 4.2)
- rspec-core (~> 3.8)
- rspec-expectations (~> 3.8)
- rspec-mocks (~> 3.8)
- rspec-support (~> 3.8)
+ rspec-core (~> 3.9)
+ rspec-expectations (~> 3.9)
+ rspec-mocks (~> 3.9)
+ rspec-support (~> 3.9)
rspec-retry (0.6.1)
rspec-core (> 3.3)
rspec-set (0.1.3)
- rspec-support (3.8.2)
+ rspec-support (3.9.2)
rspec_junit_formatter (0.4.1)
rspec-core (>= 2, < 4, != 2.12.0)
rspec_profiling (0.0.5)
@@ -955,7 +955,7 @@ GEM
ruby-progressbar (1.10.1)
ruby-saml (1.7.2)
nokogiri (>= 1.5.10)
- ruby-statistics (2.1.1)
+ ruby-statistics (2.1.2)
ruby_dep (1.5.0)
ruby_parser (3.13.1)
sexp_processor (~> 4.9)
@@ -1015,13 +1015,12 @@ GEM
jwt (>= 1.5, < 3.0)
multi_json (~> 1.10)
simple_po_parser (1.1.2)
- simplecov (0.16.1)
+ simplecov (0.18.5)
docile (~> 1.1)
- json (>= 1.8, < 3)
- simplecov-html (~> 0.10.0)
- simplecov-html (0.10.2)
+ simplecov-html (~> 0.11)
+ simplecov-html (0.12.2)
sixarm_ruby_unaccent (1.2.0)
- slack-notifier (1.5.1)
+ slack-messenger (2.3.3)
snowplow-tracker (0.6.1)
contracts (~> 0.7, <= 0.11)
spring (2.0.2)
@@ -1065,7 +1064,7 @@ GEM
thread_safe (0.3.6)
thrift (0.11.0.0)
tilt (2.0.10)
- timecop (0.8.1)
+ timecop (0.9.1)
timfel-krb5-auth (0.8.3)
toml (0.2.0)
parslet (~> 1.8.0)
@@ -1171,12 +1170,12 @@ DEPENDENCIES
benchmark-memory (~> 0.1)
better_errors (~> 2.5.0)
binding_of_caller (~> 0.8.0)
- bootsnap (~> 1.4)
+ bootsnap (~> 1.4.6)
bootstrap_form (~> 4.2.0)
brakeman (~> 4.2)
browser (~> 2.5)
bullet (~> 6.0.2)
- bundler-audit (~> 0.5.0)
+ bundler-audit (~> 0.6.1)
capybara (~> 3.22.0)
capybara-screenshot (~> 1.0.22)
carrierwave (~> 1.3)
@@ -1190,12 +1189,11 @@ DEPENDENCIES
database_cleaner (~> 1.7.0)
deckar01-task_list (= 2.3.1)
default_value_for (~> 3.3.0)
- derailed_benchmarks
device_detector
devise (~> 4.6)
- devise-two-factor (~> 3.0.0)
+ devise-two-factor (~> 3.1.0)
diff_match_patch (~> 0.1.0)
- diffy (~> 3.1.0)
+ diffy (~> 3.3)
discordrb-webhooks-blackst0ne (~> 3.3)
doorkeeper (~> 5.0.2)
doorkeeper-openid_connect (~> 1.6.3)
@@ -1230,14 +1228,16 @@ DEPENDENCIES
gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.3)
- gitaly (~> 1.86.0)
+ gitaly (~> 12.9.0.pre.rc4)
github-markup (~> 1.7.0)
gitlab-chronic (~> 0.10.5)
- gitlab-labkit (= 0.9.1)
+ gitlab-derailed_benchmarks
+ gitlab-labkit (= 0.11.0)
gitlab-license (~> 1.0)
+ gitlab-mail_room (~> 0.0.3)
gitlab-markup (~> 1.7.0)
gitlab-net-dns (~> 0.9.1)
- gitlab-puma (~> 4.3.1.gitlab.2)
+ gitlab-puma (~> 4.3.3.gitlab.2)
gitlab-puma_worker_killer (~> 0.1.1.gitlab.1)
gitlab-sidekiq-fetcher (= 0.5.2)
gitlab-styles (~> 3.1.0)
@@ -1252,7 +1252,7 @@ DEPENDENCIES
grape-path-helpers (~> 1.2)
grape_logging (~> 1.7)
graphiql-rails (~> 1.4.10)
- graphql (~> 1.9.12)
+ graphql (~> 1.9.19)
graphql-docs (~> 1.6.0)
grpc (~> 1.24.0)
gssapi
@@ -1284,7 +1284,6 @@ DEPENDENCIES
loofah (~> 2.2)
lru_redux
mail (= 2.7.1)
- mail_room (~> 0.10.0)
marginalia (~> 1.8.0)
memory_profiler (~> 0.9)
method_source (~> 0.8)
@@ -1347,10 +1346,10 @@ DEPENDENCIES
request_store (~> 1.3)
responders (~> 3.0)
retriable (~> 3.1.2)
- rouge (~> 3.15.0)
+ rouge (~> 3.17.0)
rqrcode-rails3 (~> 0.1.7)
rspec-parameterized
- rspec-rails (~> 4.0.0.beta3)
+ rspec-rails (~> 4.0.0.beta4)
rspec-retry (~> 0.6.1)
rspec-set (~> 0.1.3)
rspec_junit_formatter
@@ -1375,8 +1374,8 @@ DEPENDENCIES
sidekiq (~> 5.2.7)
sidekiq-cron (~> 1.0)
simple_po_parser (~> 1.1.2)
- simplecov (~> 0.16.1)
- slack-notifier (~> 1.5.1)
+ simplecov (~> 0.18.5)
+ slack-messenger (~> 2.3.3)
snowplow-tracker (~> 0.6.1)
spring (~> 2.0.0)
spring-commands-rspec (~> 1.0.4)
@@ -1387,7 +1386,7 @@ DEPENDENCIES
sys-filesystem (~> 1.1.6)
test-prof (~> 0.10.0)
thin (~> 1.7.0)
- timecop (~> 0.8.0)
+ timecop (~> 0.9.1)
toml-rb (~> 1.0.0)
truncato (~> 0.7.11)
u2f (~> 0.2.1)
diff --git a/app/assets/images/cluster_app_logos/modsecurity.png b/app/assets/images/cluster_app_logos/modsecurity.png
new file mode 100644
index 00000000000..fd58275e1d7
--- /dev/null
+++ b/app/assets/images/cluster_app_logos/modsecurity.png
Binary files differ
diff --git a/app/assets/javascripts/api.js b/app/assets/javascripts/api.js
index 4dc4ce543e9..022d79ecf49 100644
--- a/app/assets/javascripts/api.js
+++ b/app/assets/javascripts/api.js
@@ -1,5 +1,3 @@
-import $ from 'jquery';
-import _ from 'underscore';
import axios from './lib/utils/axios_utils';
import { joinPaths } from './lib/utils/url_utility';
import flash from '~/flash';
@@ -47,6 +45,7 @@ const Api = {
adminStatisticsPath: '/api/:version/application/statistics',
pipelineSinglePath: '/api/:version/projects/:id/pipelines/:pipeline_id',
lsifPath: '/api/:version/projects/:id/commits/:commit_id/lsif/info',
+ environmentsPath: '/api/:version/projects/:id/environments',
group(groupId, callback) {
const url = Api.buildUrl(Api.groupPath).replace(':id', groupId);
@@ -69,7 +68,7 @@ const Api = {
},
// Return groups list. Filtered by query
- groups(query, options, callback = $.noop) {
+ groups(query, options, callback = () => {}) {
const url = Api.buildUrl(Api.groupsPath);
return axios
.get(url, {
@@ -107,7 +106,7 @@ const Api = {
},
// Return projects list. Filtered by query
- projects(query, options, callback = _.noop) {
+ projects(query, options, callback = () => {}) {
const url = Api.buildUrl(Api.projectsPath);
const defaults = {
search: query,
@@ -475,12 +474,17 @@ const Api = {
return axios.get(url);
},
- lsifData(projectPath, commitId, path) {
+ lsifData(projectPath, commitId, paths) {
const url = Api.buildUrl(this.lsifPath)
.replace(':id', encodeURIComponent(projectPath))
.replace(':commit_id', commitId);
- return axios.get(url, { params: { path } });
+ return axios.get(url, { params: { paths } });
+ },
+
+ environments(id) {
+ const url = Api.buildUrl(this.environmentsPath).replace(':id', encodeURIComponent(id));
+ return axios.get(url);
},
buildUrl(url) {
diff --git a/app/assets/javascripts/badges/components/badge_form.vue b/app/assets/javascripts/badges/components/badge_form.vue
index 19668d7e232..dccc0b024ba 100644
--- a/app/assets/javascripts/badges/components/badge_form.vue
+++ b/app/assets/javascripts/badges/components/badge_form.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { escape, debounce } from 'lodash';
import { mapActions, mapState } from 'vuex';
import { GlLoadingIcon, GlFormInput, GlFormGroup } from '@gitlab/ui';
import createFlash from '~/flash';
@@ -54,7 +54,7 @@ export default {
s__('Badges|The %{docsLinkStart}variables%{docsLinkEnd} GitLab supports: %{placeholders}'),
{
docsLinkEnd: '</a>',
- docsLinkStart: `<a href="${_.escape(this.docsUrl)}">`,
+ docsLinkStart: `<a href="${escape(this.docsUrl)}">`,
placeholders,
},
false,
@@ -118,7 +118,7 @@ export default {
},
methods: {
...mapActions(['addBadge', 'renderBadge', 'saveBadge', 'stopEditing', 'updateBadgeInForm']),
- debouncedPreview: _.debounce(function preview() {
+ debouncedPreview: debounce(function preview() {
this.renderBadge();
}, badgePreviewDelayInMilliseconds),
onCancel() {
diff --git a/app/assets/javascripts/behaviors/markdown/render_mermaid.js b/app/assets/javascripts/behaviors/markdown/render_mermaid.js
index 3856832de90..fe63ebd470d 100644
--- a/app/assets/javascripts/behaviors/markdown/render_mermaid.js
+++ b/app/assets/javascripts/behaviors/markdown/render_mermaid.js
@@ -1,6 +1,7 @@
import flash from '~/flash';
import $ from 'jquery';
-import { sprintf, __ } from '../../locale';
+import { __, sprintf } from '~/locale';
+import { once } from 'lodash';
// Renders diagrams and flowcharts from text using Mermaid in any element with the
// `js-render-mermaid` class.
@@ -18,14 +19,10 @@ import { sprintf, __ } from '../../locale';
// This is an arbitrary number; Can be iterated upon when suitable.
const MAX_CHAR_LIMIT = 5000;
+let mermaidModule = {};
-function renderMermaids($els) {
- if (!$els.length) return;
-
- // A diagram may have been truncated in search results which will cause errors, so abort the render.
- if (document.querySelector('body').dataset.page === 'search:show') return;
-
- import(/* webpackChunkName: 'mermaid' */ 'mermaid')
+function importMermaidModule() {
+ return import(/* webpackChunkName: 'mermaid' */ 'mermaid')
.then(mermaid => {
mermaid.initialize({
// mermaid core options
@@ -35,68 +32,133 @@ function renderMermaids($els) {
// mermaidAPI options
theme: 'neutral',
flowchart: {
+ useMaxWidth: true,
htmlLabels: false,
},
securityLevel: 'strict',
});
+ mermaidModule = mermaid;
+
+ return mermaid;
+ })
+ .catch(err => {
+ flash(sprintf(__("Can't load mermaid module: %{err}"), { err }));
+ // eslint-disable-next-line no-console
+ console.error(err);
+ });
+}
+
+function fixElementSource(el) {
+ // Mermaid doesn't like `<br />` tags, so collapse all like tags into `<br>`, which is parsed correctly.
+ const source = el.textContent.replace(/<br\s*\/>/g, '<br>');
+
+ // Remove any extra spans added by the backend syntax highlighting.
+ Object.assign(el, { textContent: source });
+
+ return { source };
+}
+
+function renderMermaidEl(el) {
+ mermaidModule.init(undefined, el, id => {
+ const source = el.textContent;
+ const svg = document.getElementById(id);
+
+ // As of https://github.com/knsv/mermaid/commit/57b780a0d,
+ // Mermaid will make two init callbacks:one to initialize the
+ // flow charts, and another to initialize the Gannt charts.
+ // Guard against an error caused by double initialization.
+ if (svg.classList.contains('mermaid')) {
+ return;
+ }
+
+ svg.classList.add('mermaid');
+
+ // pre > code > svg
+ svg.closest('pre').replaceWith(svg);
+
+ // We need to add the original source into the DOM to allow Copy-as-GFM
+ // to access it.
+ const sourceEl = document.createElement('text');
+ sourceEl.classList.add('source');
+ sourceEl.setAttribute('display', 'none');
+ sourceEl.textContent = source;
+
+ svg.appendChild(sourceEl);
+ });
+}
+
+function renderMermaids($els) {
+ if (!$els.length) return;
+
+ // A diagram may have been truncated in search results which will cause errors, so abort the render.
+ if (document.querySelector('body').dataset.page === 'search:show') return;
+
+ importMermaidModule()
+ .then(() => {
let renderedChars = 0;
$els.each((i, el) => {
- // Mermaid doesn't like `<br />` tags, so collapse all like tags into `<br>`, which is parsed correctly.
- const source = el.textContent.replace(/<br\s*\/>/g, '<br>');
-
+ const { source } = fixElementSource(el);
/**
* Restrict the rendering to a certain amount of character to
* prevent mermaidjs from hanging up the entire thread and
* causing a DoS.
*/
if ((source && source.length > MAX_CHAR_LIMIT) || renderedChars > MAX_CHAR_LIMIT) {
- el.textContent = sprintf(
- __(
- 'Cannot render the image. Maximum character count (%{charLimit}) has been exceeded.',
- ),
- { charLimit: MAX_CHAR_LIMIT },
- );
+ const html = `
+ <div class="alert gl-alert gl-alert-warning alert-dismissible lazy-render-mermaid-container js-lazy-render-mermaid-container fade show" role="alert">
+ <div>
+ <div class="display-flex">
+ <div>${__(
+ 'Warning: Displaying this diagram might cause performance issues on this page.',
+ )}</div>
+ <div class="gl-alert-actions">
+ <button class="js-lazy-render-mermaid btn gl-alert-action btn-warning btn-md new-gl-button">Display</button>
+ </div>
+ </div>
+ <button type="button" class="close" data-dismiss="alert" aria-label="Close">
+ <span aria-hidden="true">&times;</span>
+ </button>
+ </div>
+ </div>
+ `;
+
+ const $parent = $(el).parent();
+
+ if (!$parent.hasClass('lazy-alert-shown')) {
+ $parent.after(html);
+ $parent.addClass('lazy-alert-shown');
+ }
+
return;
}
renderedChars += source.length;
- // Remove any extra spans added by the backend syntax highlighting.
- Object.assign(el, { textContent: source });
-
- mermaid.init(undefined, el, id => {
- const svg = document.getElementById(id);
-
- // As of https://github.com/knsv/mermaid/commit/57b780a0d,
- // Mermaid will make two init callbacks:one to initialize the
- // flow charts, and another to initialize the Gannt charts.
- // Guard against an error caused by double initialization.
- if (svg.classList.contains('mermaid')) {
- return;
- }
-
- svg.classList.add('mermaid');
-
- // pre > code > svg
- svg.closest('pre').replaceWith(svg);
- // We need to add the original source into the DOM to allow Copy-as-GFM
- // to access it.
- const sourceEl = document.createElement('text');
- sourceEl.classList.add('source');
- sourceEl.setAttribute('display', 'none');
- sourceEl.textContent = source;
-
- svg.appendChild(sourceEl);
- });
+ renderMermaidEl(el);
});
})
.catch(err => {
- flash(`Can't load mermaid module: ${err}`);
+ flash(sprintf(__('Encountered an error while rendering: %{err}'), { err }));
+ // eslint-disable-next-line no-console
+ console.error(err);
});
}
+const hookLazyRenderMermaidEvent = once(() => {
+ $(document.body).on('click', '.js-lazy-render-mermaid', function eventHandler() {
+ const parent = $(this).closest('.js-lazy-render-mermaid-container');
+ const pre = parent.prev();
+
+ const el = pre.find('.js-render-mermaid');
+
+ parent.remove();
+
+ renderMermaidEl(el);
+ });
+});
+
export default function renderMermaid($els) {
if (!$els.length) return;
@@ -111,4 +173,6 @@ export default function renderMermaid($els) {
renderMermaids($(this).find('.js-render-mermaid'));
}
});
+
+ hookLazyRenderMermaidEvent();
}
diff --git a/app/assets/javascripts/blob/balsamiq/balsamiq_viewer.js b/app/assets/javascripts/blob/balsamiq/balsamiq_viewer.js
index 87c8568802e..5b781947d55 100644
--- a/app/assets/javascripts/blob/balsamiq/balsamiq_viewer.js
+++ b/app/assets/javascripts/blob/balsamiq/balsamiq_viewer.js
@@ -1,5 +1,5 @@
import sqljs from 'sql.js';
-import { template as _template } from 'underscore';
+import { template as _template } from 'lodash';
import axios from '~/lib/utils/axios_utils';
import { successCodes } from '~/lib/utils/http_status';
diff --git a/app/assets/javascripts/blob/components/blob_content.vue b/app/assets/javascripts/blob/components/blob_content.vue
index 2639a099093..7d5d48cfc31 100644
--- a/app/assets/javascripts/blob/components/blob_content.vue
+++ b/app/assets/javascripts/blob/components/blob_content.vue
@@ -45,7 +45,13 @@ export default {
<template v-else>
<blob-content-error v-if="viewerError" :viewer-error="viewerError" />
- <component :is="viewer" v-else ref="contentViewer" :content="content" />
+ <component
+ :is="viewer"
+ v-else
+ ref="contentViewer"
+ :content="content"
+ :type="activeViewer.fileType"
+ />
</template>
</div>
</template>
diff --git a/app/assets/javascripts/blob/components/blob_edit_content.vue b/app/assets/javascripts/blob/components/blob_edit_content.vue
new file mode 100644
index 00000000000..83303a373f3
--- /dev/null
+++ b/app/assets/javascripts/blob/components/blob_edit_content.vue
@@ -0,0 +1,49 @@
+<script>
+import { initEditorLite } from '~/blob/utils';
+
+export default {
+ props: {
+ value: {
+ type: String,
+ required: true,
+ },
+ fileName: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ },
+ data() {
+ return {
+ content: this.value,
+ editor: null,
+ };
+ },
+ watch: {
+ fileName(newVal) {
+ this.editor.updateModelLanguage(newVal);
+ },
+ },
+ mounted() {
+ this.editor = initEditorLite({
+ el: this.$refs.editor,
+ blobPath: this.fileName,
+ blobContent: this.content,
+ });
+ },
+ methods: {
+ triggerFileChange() {
+ const val = this.editor.getValue();
+ this.content = val;
+ this.$emit('input', val);
+ },
+ },
+};
+</script>
+<template>
+ <div class="file-content code">
+ <pre id="editor" ref="editor" data-editor-loading @focusout="triggerFileChange">{{
+ content
+ }}</pre>
+ </div>
+</template>
diff --git a/app/assets/javascripts/blob/components/blob_edit_header.vue b/app/assets/javascripts/blob/components/blob_edit_header.vue
new file mode 100644
index 00000000000..e9b5ceda479
--- /dev/null
+++ b/app/assets/javascripts/blob/components/blob_edit_header.vue
@@ -0,0 +1,35 @@
+<script>
+import { GlFormInput } from '@gitlab/ui';
+
+export default {
+ components: {
+ GlFormInput,
+ },
+ props: {
+ value: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ name: this.value,
+ };
+ },
+};
+</script>
+<template>
+ <div class="js-file-title file-title-flex-parent">
+ <gl-form-input
+ id="snippet_file_name"
+ v-model="name"
+ :placeholder="
+ s__('Snippets|Give your file a name to add code highlighting, e.g. example.rb for Ruby')
+ "
+ name="snippet_file_name"
+ class="form-control js-snippet-file-name qa-snippet-file-name"
+ type="text"
+ @change="$emit('input', name)"
+ />
+ </div>
+</template>
diff --git a/app/assets/javascripts/blob/file_template_mediator.js b/app/assets/javascripts/blob/file_template_mediator.js
index 0fb02ca5965..d2c0ef330e4 100644
--- a/app/assets/javascripts/blob/file_template_mediator.js
+++ b/app/assets/javascripts/blob/file_template_mediator.js
@@ -9,6 +9,7 @@ import GitignoreSelector from './template_selectors/gitignore_selector';
import LicenseSelector from './template_selectors/license_selector';
import toast from '~/vue_shared/plugins/global_toast';
import { __ } from '~/locale';
+import initPopover from '~/blob/suggest_gitlab_ci_yml';
export default class FileTemplateMediator {
constructor({ editor, currentAction, projectId }) {
@@ -128,6 +129,7 @@ export default class FileTemplateMediator {
selectTemplateFile(selector, query, data) {
const self = this;
const { name } = selector.config;
+ const suggestCommitChanges = document.querySelector('.js-suggest-gitlab-ci-yml-commit-changes');
selector.renderLoading();
@@ -146,6 +148,10 @@ export default class FileTemplateMediator {
},
},
});
+
+ if (suggestCommitChanges) {
+ initPopover(suggestCommitChanges);
+ }
})
.catch(err => new Flash(`An error occurred while fetching the template: ${err}`));
}
diff --git a/app/assets/javascripts/blob/notebook/index.js b/app/assets/javascripts/blob/notebook/index.js
index 35634d63e4a..a8c94b6263e 100644
--- a/app/assets/javascripts/blob/notebook/index.js
+++ b/app/assets/javascripts/blob/notebook/index.js
@@ -1,87 +1,17 @@
-/* eslint-disable no-new */
import Vue from 'vue';
-import axios from '../../lib/utils/axios_utils';
-import notebookLab from '../../notebook/index.vue';
+import NotebookViewer from './notebook_viewer.vue';
export default () => {
const el = document.getElementById('js-notebook-viewer');
- new Vue({
+ return new Vue({
el,
- components: {
- notebookLab,
+ render(createElement) {
+ return createElement(NotebookViewer, {
+ props: {
+ endpoint: el.dataset.endpoint,
+ },
+ });
},
- data() {
- return {
- error: false,
- loadError: false,
- loading: true,
- json: {},
- };
- },
- mounted() {
- if (gon.katex_css_url) {
- const katexStyles = document.createElement('link');
- katexStyles.setAttribute('rel', 'stylesheet');
- katexStyles.setAttribute('href', gon.katex_css_url);
- document.head.appendChild(katexStyles);
- }
-
- if (gon.katex_js_url) {
- const katexScript = document.createElement('script');
- katexScript.addEventListener('load', () => {
- this.loadFile();
- });
- katexScript.setAttribute('src', gon.katex_js_url);
- document.head.appendChild(katexScript);
- } else {
- this.loadFile();
- }
- },
- methods: {
- loadFile() {
- axios
- .get(el.dataset.endpoint)
- .then(res => res.data)
- .then(data => {
- this.json = data;
- this.loading = false;
- })
- .catch(e => {
- if (e.status !== 200) {
- this.loadError = true;
- }
-
- this.error = true;
- });
- },
- },
- template: `
- <div class="container-fluid md prepend-top-default append-bottom-default">
- <div
- class="text-center loading"
- v-if="loading && !error">
- <i
- class="fa fa-spinner fa-spin"
- aria-hidden="true"
- aria-label="iPython notebook loading">
- </i>
- </div>
- <notebook-lab
- v-if="!loading && !error"
- :notebook="json"
- code-css-class="code white" />
- <p
- class="text-center"
- v-if="error">
- <span v-if="loadError">
- An error occurred while loading the file. Please try again later.
- </span>
- <span v-else>
- An error occurred while parsing the file.
- </span>
- </p>
- </div>
- `,
});
};
diff --git a/app/assets/javascripts/blob/notebook/notebook_viewer.vue b/app/assets/javascripts/blob/notebook/notebook_viewer.vue
new file mode 100644
index 00000000000..401fe9beb62
--- /dev/null
+++ b/app/assets/javascripts/blob/notebook/notebook_viewer.vue
@@ -0,0 +1,81 @@
+<script>
+import axios from '~/lib/utils/axios_utils';
+import notebookLab from '~/notebook/index.vue';
+import { GlLoadingIcon } from '@gitlab/ui';
+
+export default {
+ components: {
+ notebookLab,
+ GlLoadingIcon,
+ },
+ props: {
+ endpoint: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ error: false,
+ loadError: false,
+ loading: true,
+ json: {},
+ };
+ },
+ mounted() {
+ if (gon.katex_css_url) {
+ const katexStyles = document.createElement('link');
+ katexStyles.setAttribute('rel', 'stylesheet');
+ katexStyles.setAttribute('href', gon.katex_css_url);
+ document.head.appendChild(katexStyles);
+ }
+
+ if (gon.katex_js_url) {
+ const katexScript = document.createElement('script');
+ katexScript.addEventListener('load', () => {
+ this.loadFile();
+ });
+ katexScript.setAttribute('src', gon.katex_js_url);
+ document.head.appendChild(katexScript);
+ } else {
+ this.loadFile();
+ }
+ },
+ methods: {
+ loadFile() {
+ axios
+ .get(this.endpoint)
+ .then(res => res.data)
+ .then(data => {
+ this.json = data;
+ this.loading = false;
+ })
+ .catch(e => {
+ if (e.status !== 200) {
+ this.loadError = true;
+ }
+ this.error = true;
+ });
+ },
+ },
+};
+</script>
+
+<template>
+ <div
+ class="js-notebook-viewer-mounted container-fluid md prepend-top-default append-bottom-default"
+ >
+ <div v-if="loading && !error" class="text-center loading">
+ <gl-loading-icon class="mt-5" size="lg" />
+ </div>
+ <notebook-lab v-if="!loading && !error" :notebook="json" code-css-class="code white" />
+ <p v-if="error" class="text-center">
+ <span v-if="loadError" ref="loadErrorMessage">{{
+ __('An error occurred while loading the file. Please try again later.')
+ }}</span>
+ <span v-else ref="parsingErrorMessage">{{
+ __('An error occurred while parsing the file.')
+ }}</span>
+ </p>
+ </div>
+</template>
diff --git a/app/assets/javascripts/blob/pdf/index.js b/app/assets/javascripts/blob/pdf/index.js
index 19778d07983..218987585b4 100644
--- a/app/assets/javascripts/blob/pdf/index.js
+++ b/app/assets/javascripts/blob/pdf/index.js
@@ -1,57 +1,17 @@
import Vue from 'vue';
-import pdfLab from '../../pdf/index.vue';
-import { GlLoadingIcon } from '@gitlab/ui';
+import PdfViewer from './pdf_viewer.vue';
export default () => {
const el = document.getElementById('js-pdf-viewer');
return new Vue({
el,
- components: {
- pdfLab,
- GlLoadingIcon,
+ render(createElement) {
+ return createElement(PdfViewer, {
+ props: {
+ pdf: el.dataset.endpoint,
+ },
+ });
},
- data() {
- return {
- error: false,
- loadError: false,
- loading: true,
- pdf: el.dataset.endpoint,
- };
- },
- methods: {
- onLoad() {
- this.loading = false;
- },
- onError(error) {
- this.loading = false;
- this.loadError = true;
- this.error = error;
- },
- },
- template: `
- <div class="js-pdf-viewer container-fluid md prepend-top-default append-bottom-default">
- <div
- class="text-center loading"
- v-if="loading && !error">
- <gl-loading-icon class="mt-5" size="lg"/>
- </div>
- <pdf-lab
- v-if="!loadError"
- :pdf="pdf"
- @pdflabload="onLoad"
- @pdflaberror="onError" />
- <p
- class="text-center"
- v-if="error">
- <span v-if="loadError">
- An error occurred while loading the file. Please try again later.
- </span>
- <span v-else>
- An error occurred while decoding the file.
- </span>
- </p>
- </div>
- `,
});
};
diff --git a/app/assets/javascripts/blob/pdf/pdf_viewer.vue b/app/assets/javascripts/blob/pdf/pdf_viewer.vue
new file mode 100644
index 00000000000..5eaddfc099a
--- /dev/null
+++ b/app/assets/javascripts/blob/pdf/pdf_viewer.vue
@@ -0,0 +1,49 @@
+<script>
+import PdfLab from '../../pdf/index.vue';
+import { GlLoadingIcon } from '@gitlab/ui';
+
+export default {
+ components: {
+ PdfLab,
+ GlLoadingIcon,
+ },
+ props: {
+ pdf: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ error: false,
+ loadError: false,
+ loading: true,
+ };
+ },
+ methods: {
+ onLoad() {
+ this.loading = false;
+ },
+ onError(error) {
+ this.loading = false;
+ this.loadError = true;
+ this.error = error;
+ },
+ },
+};
+</script>
+
+<template>
+ <div class="js-pdf-viewer container-fluid md prepend-top-default append-bottom-default">
+ <div v-if="loading && !error" class="text-center loading">
+ <gl-loading-icon class="mt-5" size="lg" />
+ </div>
+ <pdf-lab v-if="!loadError" :pdf="pdf" @pdflabload="onLoad" @pdflaberror="onError" />
+ <p v-if="error" class="text-center">
+ <span v-if="loadError" ref="loadError">
+ {{ __('An error occurred while loading the file. Please try again later.') }}
+ </span>
+ <span v-else>{{ __('An error occurred while decoding the file.') }}</span>
+ </p>
+ </div>
+</template>
diff --git a/app/assets/javascripts/blob/pipeline_tour_success_modal.vue b/app/assets/javascripts/blob/pipeline_tour_success_modal.vue
new file mode 100644
index 00000000000..0739b4d5e39
--- /dev/null
+++ b/app/assets/javascripts/blob/pipeline_tour_success_modal.vue
@@ -0,0 +1,78 @@
+<script>
+import { GlModal, GlSprintf, GlLink } from '@gitlab/ui';
+import { sprintf, s__, __ } from '~/locale';
+import Cookies from 'js-cookie';
+import { glEmojiTag } from '~/emoji';
+
+export default {
+ beginnerLink:
+ 'https://about.gitlab.com/blog/2018/01/22/a-beginners-guide-to-continuous-integration/',
+ exampleLink: 'https://docs.gitlab.com/ee/ci/examples/',
+ bodyMessage: s__(
+ 'MR widget|The pipeline will now run automatically every time you commit code. Pipelines are useful for deploying static web pages, detecting vulnerabilities in dependencies, static or dynamic application security testing (SAST and DAST), and so much more!',
+ ),
+ modalTitle: sprintf(
+ __("That's it, well done!%{celebrate}"),
+ {
+ celebrate: glEmojiTag('tada'),
+ },
+ false,
+ ),
+ components: {
+ GlModal,
+ GlSprintf,
+ GlLink,
+ },
+ props: {
+ goToPipelinesPath: {
+ type: String,
+ required: true,
+ },
+ commitCookie: {
+ type: String,
+ required: true,
+ },
+ },
+ mounted() {
+ this.disableModalFromRenderingAgain();
+ },
+ methods: {
+ disableModalFromRenderingAgain() {
+ Cookies.remove(this.commitCookie);
+ },
+ },
+};
+</script>
+<template>
+ <gl-modal
+ visible
+ size="sm"
+ :title="$options.modalTitle"
+ modal-id="success-pipeline-modal-id-not-used"
+ >
+ <p>
+ {{ $options.bodyMessage }}
+ </p>
+ <gl-sprintf
+ :message="
+ s__(`MR widget|Take a look at our %{beginnerLinkStart}Beginner's Guide to Continuous Integration%{beginnerLinkEnd}
+ and our %{exampleLinkStart}examples of GitLab CI/CD%{exampleLinkEnd}
+ to see all the cool stuff you can do with it.`)
+ "
+ >
+ <template #beginnerLink="{content}">
+ <gl-link :href="$options.beginnerLink" target="_blank">
+ {{ content }}
+ </gl-link>
+ </template>
+ <template #exampleLink="{content}">
+ <gl-link :href="$options.exampleLink" target="_blank">
+ {{ content }}
+ </gl-link>
+ </template>
+ </gl-sprintf>
+ <template #modal-footer>
+ <a :href="goToPipelinesPath" class="btn btn-success">{{ __('Go to Pipelines') }}</a>
+ </template>
+ </gl-modal>
+</template>
diff --git a/app/assets/javascripts/blob/suggest_gitlab_ci_yml/components/popover.vue b/app/assets/javascripts/blob/suggest_gitlab_ci_yml/components/popover.vue
new file mode 100644
index 00000000000..7f0c232eea8
--- /dev/null
+++ b/app/assets/javascripts/blob/suggest_gitlab_ci_yml/components/popover.vue
@@ -0,0 +1,121 @@
+<script>
+import { GlPopover, GlSprintf, GlButton, GlIcon } from '@gitlab/ui';
+import Cookies from 'js-cookie';
+import { parseBoolean, scrollToElement } from '~/lib/utils/common_utils';
+import { s__ } from '~/locale';
+import { glEmojiTag } from '~/emoji';
+import Tracking from '~/tracking';
+
+const trackingMixin = Tracking.mixin();
+
+const popoverStates = {
+ suggest_gitlab_ci_yml: {
+ title: s__(`suggestPipeline|1/2: Choose a template`),
+ content: s__(
+ `suggestPipeline|We recommend the %{boldStart}Code Quality%{boldEnd} template, which will add a report widget to your Merge Requests. This way you’ll learn about code quality degradations much sooner. %{footerStart} Goodbye technical debt! %{footerEnd}`,
+ ),
+ emoji: glEmojiTag('wave'),
+ },
+ suggest_commit_first_project_gitlab_ci_yml: {
+ title: s__(`suggestPipeline|2/2: Commit your changes`),
+ content: s__(
+ `suggestPipeline|Commit the changes and your pipeline will automatically run for the first time.`,
+ ),
+ },
+};
+export default {
+ components: {
+ GlPopover,
+ GlSprintf,
+ GlIcon,
+ GlButton,
+ },
+ mixins: [trackingMixin],
+ props: {
+ target: {
+ type: String,
+ required: true,
+ },
+ trackLabel: {
+ type: String,
+ required: true,
+ },
+ dismissKey: {
+ type: String,
+ required: true,
+ },
+ humanAccess: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ popoverDismissed: parseBoolean(Cookies.get(this.dismissKey)),
+ tracking: {
+ label: this.trackLabel,
+ property: this.humanAccess,
+ },
+ };
+ },
+ computed: {
+ suggestTitle() {
+ return popoverStates[this.trackLabel].title || '';
+ },
+ suggestContent() {
+ return popoverStates[this.trackLabel].content || '';
+ },
+ emoji() {
+ return popoverStates[this.trackLabel].emoji || '';
+ },
+ },
+ mounted() {
+ if (this.trackLabel === 'suggest_commit_first_project_gitlab_ci_yml' && !this.popoverDismissed)
+ scrollToElement(document.querySelector(this.target));
+
+ this.trackOnShow();
+ },
+ methods: {
+ onDismiss() {
+ this.popoverDismissed = true;
+ Cookies.set(this.dismissKey, this.popoverDismissed, { expires: 365 });
+ },
+ trackOnShow() {
+ if (!this.popoverDismissed) this.track();
+ },
+ },
+};
+</script>
+
+<template>
+ <gl-popover
+ v-if="!popoverDismissed"
+ show
+ :target="target"
+ placement="rightbottom"
+ trigger="manual"
+ container="viewport"
+ :css-classes="['suggest-gitlab-ci-yml', 'ml-4']"
+ >
+ <template #title>
+ <span v-html="suggestTitle"></span>
+ <span class="ml-auto">
+ <gl-button :aria-label="__('Close')" class="btn-blank" @click="onDismiss">
+ <gl-icon name="close" aria-hidden="true" />
+ </gl-button>
+ </span>
+ </template>
+
+ <gl-sprintf :message="suggestContent">
+ <template #bold="{content}">
+ <strong> {{ content }} </strong>
+ </template>
+ <template #footer="{content}">
+ <div class="mt-3">
+ {{ content }}
+ <span v-html="emoji"></span>
+ </div>
+ </template>
+ </gl-sprintf>
+ </gl-popover>
+</template>
diff --git a/app/assets/javascripts/blob/suggest_gitlab_ci_yml/index.js b/app/assets/javascripts/blob/suggest_gitlab_ci_yml/index.js
new file mode 100644
index 00000000000..3b67b3dd259
--- /dev/null
+++ b/app/assets/javascripts/blob/suggest_gitlab_ci_yml/index.js
@@ -0,0 +1,17 @@
+import Vue from 'vue';
+import Popover from './components/popover.vue';
+
+export default el =>
+ new Vue({
+ el,
+ render(createElement) {
+ return createElement(Popover, {
+ props: {
+ target: el.dataset.target,
+ trackLabel: el.dataset.trackLabel,
+ dismissKey: el.dataset.dismissKey,
+ humanAccess: el.dataset.humanAccess,
+ },
+ });
+ },
+ });
diff --git a/app/assets/javascripts/blob/template_selector.js b/app/assets/javascripts/blob/template_selector.js
index b0de4dc8628..2427e25a17d 100644
--- a/app/assets/javascripts/blob/template_selector.js
+++ b/app/assets/javascripts/blob/template_selector.js
@@ -92,10 +92,10 @@ export default class TemplateSelector {
}
startLoadingSpinner() {
- this.$dropdownIcon.addClass('fa-spinner fa-spin').removeClass('fa-chevron-down');
+ this.$dropdownIcon.addClass('spinner').removeClass('fa-chevron-down');
}
stopLoadingSpinner() {
- this.$dropdownIcon.addClass('fa-chevron-down').removeClass('fa-spinner fa-spin');
+ this.$dropdownIcon.addClass('fa-chevron-down').removeClass('spinner');
}
}
diff --git a/app/assets/javascripts/blob/utils.js b/app/assets/javascripts/blob/utils.js
new file mode 100644
index 00000000000..dc2ec642e59
--- /dev/null
+++ b/app/assets/javascripts/blob/utils.js
@@ -0,0 +1,24 @@
+/* global ace */
+import Editor from '~/editor/editor_lite';
+
+export function initEditorLite({ el, blobPath, blobContent }) {
+ if (!el) {
+ throw new Error(`"el" parameter is required to initialize Editor`);
+ }
+ let editor;
+
+ if (window?.gon?.features?.monacoSnippets) {
+ editor = new Editor();
+ editor.createInstance({
+ el,
+ blobPath,
+ blobContent,
+ });
+ } else {
+ editor = ace.edit(el);
+ }
+
+ return editor;
+}
+
+export default () => ({});
diff --git a/app/assets/javascripts/blob/viewer/index.js b/app/assets/javascripts/blob/viewer/index.js
index 742404da46c..3ac419557eb 100644
--- a/app/assets/javascripts/blob/viewer/index.js
+++ b/app/assets/javascripts/blob/viewer/index.js
@@ -5,10 +5,43 @@ import { handleLocationHash } from '../../lib/utils/common_utils';
import axios from '../../lib/utils/axios_utils';
import { __ } from '~/locale';
+const loadRichBlobViewer = type => {
+ switch (type) {
+ case 'balsamiq':
+ return import(/* webpackChunkName: 'balsamiq_viewer' */ '../balsamiq_viewer');
+ case 'notebook':
+ return import(/* webpackChunkName: 'notebook_viewer' */ '../notebook_viewer');
+ case 'openapi':
+ return import(/* webpackChunkName: 'openapi_viewer' */ '../openapi_viewer');
+ case 'pdf':
+ return import(/* webpackChunkName: 'pdf_viewer' */ '../pdf_viewer');
+ case 'sketch':
+ return import(/* webpackChunkName: 'sketch_viewer' */ '../sketch_viewer');
+ case 'stl':
+ return import(/* webpackChunkName: 'stl_viewer' */ '../stl_viewer');
+ default:
+ return Promise.resolve();
+ }
+};
+
+export const handleBlobRichViewer = (viewer, type) => {
+ if (!viewer || !type) return;
+
+ loadRichBlobViewer(type)
+ .then(module => module?.default(viewer))
+ .catch(error => {
+ Flash(__('Error loading file viewer.'));
+ throw error;
+ });
+};
+
export default class BlobViewer {
constructor() {
+ const viewer = document.querySelector('.blob-viewer[data-type="rich"]');
+ const type = viewer?.dataset?.richType;
BlobViewer.initAuxiliaryViewer();
- BlobViewer.initRichViewer();
+
+ handleBlobRichViewer(viewer, type);
this.initMainViewers();
}
@@ -20,42 +53,6 @@ export default class BlobViewer {
BlobViewer.loadViewer(auxiliaryViewer);
}
- static initRichViewer() {
- const viewer = document.querySelector('.blob-viewer[data-type="rich"]');
- if (!viewer || !viewer.dataset.richType) return;
-
- const initViewer = promise =>
- promise
- .then(module => module.default(viewer))
- .catch(error => {
- Flash(__('Error loading file viewer.'));
- throw error;
- });
-
- switch (viewer.dataset.richType) {
- case 'balsamiq':
- initViewer(import(/* webpackChunkName: 'balsamiq_viewer' */ '../balsamiq_viewer'));
- break;
- case 'notebook':
- initViewer(import(/* webpackChunkName: 'notebook_viewer' */ '../notebook_viewer'));
- break;
- case 'openapi':
- initViewer(import(/* webpackChunkName: 'openapi_viewer' */ '../openapi_viewer'));
- break;
- case 'pdf':
- initViewer(import(/* webpackChunkName: 'pdf_viewer' */ '../pdf_viewer'));
- break;
- case 'sketch':
- initViewer(import(/* webpackChunkName: 'sketch_viewer' */ '../sketch_viewer'));
- break;
- case 'stl':
- initViewer(import(/* webpackChunkName: 'stl_viewer' */ '../stl_viewer'));
- break;
- default:
- break;
- }
- }
-
initMainViewers() {
this.$fileHolder = $('.file-holder');
if (!this.$fileHolder.length) return;
diff --git a/app/assets/javascripts/blob_edit/blob_bundle.js b/app/assets/javascripts/blob_edit/blob_bundle.js
index 6aaf5bf7296..f4ce98037c8 100644
--- a/app/assets/javascripts/blob_edit/blob_bundle.js
+++ b/app/assets/javascripts/blob_edit/blob_bundle.js
@@ -4,11 +4,13 @@ import $ from 'jquery';
import NewCommitForm from '../new_commit_form';
import EditBlob from './edit_blob';
import BlobFileDropzone from '../blob/blob_file_dropzone';
+import initPopover from '~/blob/suggest_gitlab_ci_yml';
export default () => {
const editBlobForm = $('.js-edit-blob-form');
const uploadBlobForm = $('.js-upload-blob-form');
const deleteBlobForm = $('.js-delete-blob-form');
+ const suggestEl = document.querySelector('.js-suggest-gitlab-ci-yml');
if (editBlobForm.length) {
const urlRoot = editBlobForm.data('relativeUrlRoot');
@@ -56,4 +58,8 @@ export default () => {
if (deleteBlobForm.length) {
new NewCommitForm(deleteBlobForm);
}
+
+ if (suggestEl) {
+ initPopover(suggestEl);
+ }
};
diff --git a/app/assets/javascripts/boards/components/board.js b/app/assets/javascripts/boards/components/board.js
index a6deb656b37..67046715e9b 100644
--- a/app/assets/javascripts/boards/components/board.js
+++ b/app/assets/javascripts/boards/components/board.js
@@ -1,7 +1,7 @@
import $ from 'jquery';
import Sortable from 'sortablejs';
import Vue from 'vue';
-import { GlButtonGroup, GlButton, GlTooltip } from '@gitlab/ui';
+import { GlButtonGroup, GlButton, GlLabel, GlTooltip } from '@gitlab/ui';
import isWipLimitsOn from 'ee_else_ce/boards/mixins/is_wip_limits';
import { s__, __, sprintf } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
@@ -14,6 +14,7 @@ import IssueCount from './issue_count.vue';
import boardsStore from '../stores/boards_store';
import { getBoardSortableDefaultOptions, sortableEnd } from '../mixins/sortable_default_options';
import { ListType } from '../constants';
+import { isScopedLabel } from '~/lib/utils/common_utils';
export default Vue.extend({
components: {
@@ -24,6 +25,7 @@ export default Vue.extend({
GlButtonGroup,
IssueCount,
GlButton,
+ GlLabel,
GlTooltip,
},
directives: {
@@ -95,6 +97,9 @@ export default Vue.extend({
// eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
return `boards.${this.boardId}.${this.list.type}.${this.list.id}`;
},
+ helpLink() {
+ return boardsStore.scopedLabels.helpLink;
+ },
},
watch: {
filter: {
@@ -145,6 +150,10 @@ export default Vue.extend({
}
},
methods: {
+ showScopedLabels(label) {
+ return boardsStore.scopedLabels.enabled && isScopedLabel(label);
+ },
+
showNewIssueForm() {
this.$refs['board-list'].showIssueForm = !this.$refs['board-list'].showIssueForm;
},
diff --git a/app/assets/javascripts/boards/components/board_card.vue b/app/assets/javascripts/boards/components/board_card.vue
index 12d68256598..0fc60528eb6 100644
--- a/app/assets/javascripts/boards/components/board_card.vue
+++ b/app/assets/javascripts/boards/components/board_card.vue
@@ -65,11 +65,12 @@ export default {
},
showIssue(e) {
if (e.target.classList.contains('js-no-trigger')) return;
- if (this.showDetail) {
- this.showDetail = false;
- // If CMD or CTRL is clicked
- const isMultiSelect = this.canMultiSelect && (e.ctrlKey || e.metaKey);
+ // If CMD or CTRL is clicked
+ const isMultiSelect = this.canMultiSelect && (e.ctrlKey || e.metaKey);
+
+ if (this.showDetail || isMultiSelect) {
+ this.showDetail = false;
if (boardsStore.detail.issue && boardsStore.detail.issue.id === this.issue.id) {
eventHub.$emit('clearDetailIssue', isMultiSelect);
diff --git a/app/assets/javascripts/boards/components/board_list.vue b/app/assets/javascripts/boards/components/board_list.vue
index 4a64d9e04f2..c4e2c398d45 100644
--- a/app/assets/javascripts/boards/components/board_list.vue
+++ b/app/assets/javascripts/boards/components/board_list.vue
@@ -1,7 +1,6 @@
<script>
import { Sortable, MultiDrag } from 'sortablejs';
import { GlLoadingIcon } from '@gitlab/ui';
-import _ from 'underscore';
import boardNewIssue from './board_new_issue.vue';
import boardCard from './board_card.vue';
import eventHub from '../eventhub';
@@ -266,11 +265,12 @@ export default {
* same list or the other list. Don't remove items if it's same list.
*/
const isSameList = toList && toList.id === this.list.id;
-
if (toList && !isSameList && boardsStore.shouldRemoveIssue(this.list, toList)) {
const issues = items.map(item => this.list.findIssue(Number(item.dataset.issueId)));
-
- if (_.compact(issues).length && !boardsStore.issuesAreContiguous(this.list, issues)) {
+ if (
+ issues.filter(Boolean).length &&
+ !boardsStore.issuesAreContiguous(this.list, issues)
+ ) {
const indexes = [];
const ids = this.list.issues.map(i => i.id);
issues.forEach(issue => {
diff --git a/app/assets/javascripts/boards/components/board_sidebar.js b/app/assets/javascripts/boards/components/board_sidebar.js
index ba1fe9202fc..9b67126bee2 100644
--- a/app/assets/javascripts/boards/components/board_sidebar.js
+++ b/app/assets/javascripts/boards/components/board_sidebar.js
@@ -2,6 +2,7 @@
import $ from 'jquery';
import Vue from 'vue';
+import { GlLabel } from '@gitlab/ui';
import Flash from '~/flash';
import { sprintf, __ } from '~/locale';
import Sidebar from '~/right_sidebar';
@@ -22,6 +23,7 @@ export default Vue.extend({
components: {
AssigneeTitle,
Assignees,
+ GlLabel,
SidebarEpicsSelect: () =>
import('ee_component/sidebar/components/sidebar_item_epics_select.vue'),
RemoveBtn,
@@ -67,6 +69,9 @@ export default Vue.extend({
selectedLabels() {
return this.hasLabels ? this.issue.labels.map(l => l.title).join(',') : '';
},
+ helpLink() {
+ return boardsStore.scopedLabels.helpLink;
+ },
},
watch: {
detail: {
@@ -147,8 +152,5 @@ export default Vue.extend({
showScopedLabels(label) {
return boardsStore.scopedLabels.enabled && isScopedLabel(label);
},
- helpLink() {
- return boardsStore.scopedLabels.helpLink;
- },
},
});
diff --git a/app/assets/javascripts/boards/components/boards_selector.vue b/app/assets/javascripts/boards/components/boards_selector.vue
index eeb0fbec1ed..fbf487357a5 100644
--- a/app/assets/javascripts/boards/components/boards_selector.vue
+++ b/app/assets/javascripts/boards/components/boards_selector.vue
@@ -1,5 +1,5 @@
<script>
-import { throttle } from 'underscore';
+import { throttle } from 'lodash';
import {
GlLoadingIcon,
GlSearchBoxByType,
@@ -10,6 +10,11 @@ import {
} from '@gitlab/ui';
import httpStatusCodes from '~/lib/utils/http_status';
+
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import projectQuery from '../queries/project_boards.query.graphql';
+import groupQuery from '../queries/group_boards.query.graphql';
+
import boardsStore from '../stores/boards_store';
import BoardForm from './board_form.vue';
@@ -88,8 +93,9 @@ export default {
},
data() {
return {
- loading: true,
hasScrollFade: false,
+ loadingBoards: 0,
+ loadingRecentBoards: false,
scrollFadeInitialized: false,
boards: [],
recentBoards: [],
@@ -102,6 +108,12 @@ export default {
};
},
computed: {
+ parentType() {
+ return this.groupId ? 'group' : 'project';
+ },
+ loading() {
+ return this.loadingRecentBoards && this.loadingBoards;
+ },
currentPage() {
return this.state.currentPage;
},
@@ -110,14 +122,6 @@ export default {
board.name.toLowerCase().includes(this.filterTerm.toLowerCase()),
);
},
- reload: {
- get() {
- return this.state.reload;
- },
- set(newValue) {
- this.state.reload = newValue;
- },
- },
board() {
return this.state.currentBoard;
},
@@ -142,16 +146,6 @@ export default {
this.scrollFadeInitialized = false;
this.$nextTick(this.setScrollFade);
},
- reload() {
- if (this.reload) {
- this.boards = [];
- this.recentBoards = [];
- this.loading = true;
- this.reload = false;
-
- this.loadBoards(false);
- }
- },
},
created() {
boardsStore.setCurrentBoard(this.currentBoard);
@@ -165,49 +159,71 @@ export default {
return;
}
- const recentBoardsPromise = new Promise((resolve, reject) =>
- boardsStore
- .recentBoards()
- .then(resolve)
- .catch(err => {
- /**
- * If user is unauthorized we'd still want to resolve the
- * request to display all boards.
- */
- if (err.response.status === httpStatusCodes.UNAUTHORIZED) {
- resolve({ data: [] }); // recent boards are empty
- return;
- }
- reject(err);
- }),
- );
+ this.$apollo.addSmartQuery('boards', {
+ variables() {
+ return { fullPath: this.state.endpoints.fullPath };
+ },
+ query() {
+ return this.groupId ? groupQuery : projectQuery;
+ },
+ loadingKey: 'loadingBoards',
+ update(data) {
+ if (!data?.[this.parentType]) {
+ return [];
+ }
+ return data[this.parentType].boards.edges.map(({ node }) => ({
+ id: getIdFromGraphQLId(node.id),
+ name: node.name,
+ }));
+ },
+ });
- Promise.all([boardsStore.allBoards(), recentBoardsPromise])
- .then(([allBoards, recentBoards]) => [allBoards.data, recentBoards.data])
- .then(([allBoardsJson, recentBoardsJson]) => {
- this.loading = false;
- this.boards = allBoardsJson;
- this.recentBoards = recentBoardsJson;
+ this.loadingRecentBoards = true;
+ boardsStore
+ .recentBoards()
+ .then(res => {
+ this.recentBoards = res.data;
+ })
+ .catch(err => {
+ /**
+ * If user is unauthorized we'd still want to resolve the
+ * request to display all boards.
+ */
+ if (err?.response?.status === httpStatusCodes.UNAUTHORIZED) {
+ this.recentBoards = []; // recent boards are empty
+ return;
+ }
+ throw err;
})
.then(() => this.$nextTick()) // Wait for boards list in DOM
.then(() => {
this.setScrollFade();
})
- .catch(() => {
- this.loading = false;
+ .catch(() => {})
+ .finally(() => {
+ this.loadingRecentBoards = false;
});
},
isScrolledUp() {
const { content } = this.$refs;
+
+ if (!content) {
+ return false;
+ }
+
const currentPosition = this.contentClientHeight + content.scrollTop;
- return content && currentPosition < this.maxPosition;
+ return currentPosition < this.maxPosition;
},
initScrollFade() {
- this.scrollFadeInitialized = true;
-
const { content } = this.$refs;
+ if (!content) {
+ return;
+ }
+
+ this.scrollFadeInitialized = true;
+
this.contentClientHeight = content.clientHeight;
this.maxPosition = content.scrollHeight;
},
diff --git a/app/assets/javascripts/boards/components/issue_card_inner.vue b/app/assets/javascripts/boards/components/issue_card_inner.vue
index bdaed17fd09..daaa12c096b 100644
--- a/app/assets/javascripts/boards/components/issue_card_inner.vue
+++ b/app/assets/javascripts/boards/components/issue_card_inner.vue
@@ -1,7 +1,7 @@
<script>
-import _ from 'underscore';
+import { sortBy } from 'lodash';
import { mapState } from 'vuex';
-import { GlTooltipDirective } from '@gitlab/ui';
+import { GlLabel, GlTooltipDirective } from '@gitlab/ui';
import issueCardInner from 'ee_else_ce/boards/mixins/issue_card_inner';
import { sprintf, __ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
@@ -10,18 +10,17 @@ import UserAvatarLink from '../../vue_shared/components/user_avatar/user_avatar_
import IssueDueDate from './issue_due_date.vue';
import IssueTimeEstimate from './issue_time_estimate.vue';
import boardsStore from '../stores/boards_store';
-import IssueCardInnerScopedLabel from './issue_card_inner_scoped_label.vue';
import { isScopedLabel } from '~/lib/utils/common_utils';
export default {
components: {
+ GlLabel,
Icon,
UserAvatarLink,
TooltipOnTruncate,
IssueDueDate,
IssueTimeEstimate,
IssueCardWeight: () => import('ee_component/boards/components/issue_card_weight.vue'),
- IssueCardInnerScopedLabel,
},
directives: {
GlTooltip: GlTooltipDirective,
@@ -101,10 +100,7 @@ export default {
return !groupId ? referencePath.split('#')[0] : null;
},
orderedLabels() {
- return _.chain(this.issue.labels)
- .filter(this.isNonListLabel)
- .sortBy('title')
- .value();
+ return sortBy(this.issue.labels.filter(this.isNonListLabel), 'title');
},
helpLink() {
return boardsStore.scopedLabels.helpLink;
@@ -145,12 +141,6 @@ export default {
boardsStore.toggleFilter(filter);
},
- labelStyle(label) {
- return {
- backgroundColor: label.color,
- color: label.textColor,
- };
- },
showScopedLabel(label) {
return boardsStore.scopedLabels.enabled && isScopedLabel(label);
},
@@ -177,34 +167,23 @@ export default {
class="confidential-icon append-right-4"
:aria-label="__('Confidential')"
/>
- <a :href="issue.path" :title="issue.title" class="js-no-trigger" @mousemove.stop>
- {{ issue.title }}
- </a>
+ <a :href="issue.path" :title="issue.title" class="js-no-trigger" @mousemove.stop>{{
+ issue.title
+ }}</a>
</h4>
</div>
<div v-if="showLabelFooter" class="board-card-labels prepend-top-4 d-flex flex-wrap">
<template v-for="label in orderedLabels">
- <issue-card-inner-scoped-label
- v-if="showScopedLabel(label)"
+ <gl-label
:key="label.id"
- :label="label"
- :label-style="labelStyle(label)"
+ :background-color="label.color"
+ :title="label.title"
+ :description="label.description"
+ size="sm"
+ :scoped="showScopedLabel(label)"
:scoped-labels-documentation-link="helpLink"
- @scoped-label-click="filterByLabel($event)"
- />
-
- <button
- v-else
- :key="label.id"
- v-gl-tooltip
- :style="labelStyle(label)"
- :title="label.description"
- class="badge color-label append-right-4 prepend-top-4"
- type="button"
@click="filterByLabel(label)"
- >
- {{ label.title }}
- </button>
+ />
</template>
</div>
<div class="board-card-footer d-flex justify-content-between align-items-end">
@@ -225,7 +204,7 @@ export default {
#{{ issue.iid }}
</span>
<span class="board-info-items prepend-top-8 d-inline-block">
- <issue-due-date v-if="issue.dueDate" :date="issue.dueDate" />
+ <issue-due-date v-if="issue.dueDate" :date="issue.dueDate" :closed="issue.closed" />
<issue-time-estimate v-if="issue.timeEstimate" :estimate="issue.timeEstimate" />
<issue-card-weight
v-if="validIssueWeight"
diff --git a/app/assets/javascripts/boards/components/issue_card_inner_scoped_label.vue b/app/assets/javascripts/boards/components/issue_card_inner_scoped_label.vue
deleted file mode 100644
index fa4c68964cb..00000000000
--- a/app/assets/javascripts/boards/components/issue_card_inner_scoped_label.vue
+++ /dev/null
@@ -1,45 +0,0 @@
-<script>
-import { GlLink, GlTooltip } from '@gitlab/ui';
-
-export default {
- components: {
- GlTooltip,
- GlLink,
- },
- props: {
- label: {
- type: Object,
- required: true,
- },
- labelStyle: {
- type: Object,
- required: true,
- },
- scopedLabelsDocumentationLink: {
- type: String,
- required: true,
- },
- },
-};
-</script>
-
-<template>
- <span
- class="d-inline-block position-relative scoped-label-wrapper append-right-4 prepend-top-4 board-label"
- >
- <a @click="$emit('scoped-label-click', label)">
- <span :ref="'labelTitleRef'" :style="labelStyle" class="badge label color-label">
- {{ label.title }}
- </span>
- <gl-tooltip :target="() => $refs.labelTitleRef" placement="top" boundary="viewport">
- <span class="font-weight-bold scoped-label-tooltip-title">{{ __('Scoped label') }}</span
- ><br />
- {{ label.description }}
- </gl-tooltip>
- </a>
-
- <gl-link :href="scopedLabelsDocumentationLink" target="_blank" class="label scoped-label"
- ><i class="fa fa-question-circle" :style="labelStyle"></i
- ></gl-link>
- </span>
-</template>
diff --git a/app/assets/javascripts/boards/components/issue_due_date.vue b/app/assets/javascripts/boards/components/issue_due_date.vue
index a32ebdab5e1..1d70c635c18 100644
--- a/app/assets/javascripts/boards/components/issue_due_date.vue
+++ b/app/assets/javascripts/boards/components/issue_due_date.vue
@@ -16,6 +16,11 @@ export default {
GlTooltip,
},
props: {
+ closed: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
date: {
type: String,
required: true,
@@ -66,7 +71,7 @@ export default {
return getDayDifference(today, this.issueDueDate);
},
isPastDue() {
- if (this.timeDifference >= 0) return false;
+ if (this.timeDifference >= 0 || this.closed) return false;
return true;
},
standardDateFormat() {
@@ -92,7 +97,8 @@ export default {
}}</time>
</span>
<gl-tooltip :target="() => $refs.issueDueDate" :placement="tooltipPlacement">
- <span class="bold">{{ __('Due date') }}</span> <br />
+ <span class="bold">{{ __('Due date') }}</span>
+ <br />
<span :class="{ 'text-danger-muted': isPastDue }">{{ title }}</span>
</gl-tooltip>
</span>
diff --git a/app/assets/javascripts/boards/components/project_select.vue b/app/assets/javascripts/boards/components/project_select.vue
index 4a50b1e2efc..30f1e843e7b 100644
--- a/app/assets/javascripts/boards/components/project_select.vue
+++ b/app/assets/javascripts/boards/components/project_select.vue
@@ -1,6 +1,6 @@
<script>
import $ from 'jquery';
-import _ from 'underscore';
+import { escape } from 'lodash';
import { GlLoadingIcon } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
import { __ } from '~/locale';
@@ -83,7 +83,7 @@ export default {
}" data-project-name="${project.name}" data-project-name-with-namespace="${
project.name_with_namespace
}">
- ${_.escape(project.name_with_namespace)}
+ ${escape(project.name_with_namespace)}
</a>
</li>
`;
diff --git a/app/assets/javascripts/boards/index.js b/app/assets/javascripts/boards/index.js
index f1b481fc386..781cb0c1cc9 100644
--- a/app/assets/javascripts/boards/index.js
+++ b/app/assets/javascripts/boards/index.js
@@ -84,7 +84,6 @@ export default () => {
rootPath: $boardApp.dataset.rootPath,
bulkUpdatePath: $boardApp.dataset.bulkUpdatePath,
detailIssue: boardsStore.detail,
- defaultAvatar: $boardApp.dataset.defaultAvatar,
},
computed: {
detailIssueVisible() {
@@ -98,6 +97,7 @@ export default () => {
listsEndpoint: this.listsEndpoint,
bulkUpdatePath: this.bulkUpdatePath,
boardId: this.boardId,
+ fullPath: $boardApp.dataset.fullPath,
});
boardsStore.rootPath = this.boardsEndpoint;
@@ -129,13 +129,10 @@ export default () => {
position = -1;
}
- boardsStore.addList(
- {
- ...listObj,
- position,
- },
- this.defaultAvatar,
- );
+ boardsStore.addList({
+ ...listObj,
+ position,
+ });
});
boardsStore.addBlankState();
diff --git a/app/assets/javascripts/boards/mixins/sortable_default_options.js b/app/assets/javascripts/boards/mixins/sortable_default_options.js
index f77f131c71a..68ea28e68d9 100644
--- a/app/assets/javascripts/boards/mixins/sortable_default_options.js
+++ b/app/assets/javascripts/boards/mixins/sortable_default_options.js
@@ -26,7 +26,6 @@ export function getBoardSortableDefaultOptions(obj) {
scrollSpeed: 20,
onStart: sortableStart,
onEnd: sortableEnd,
- fallbackTolerance: 1,
});
Object.keys(obj).forEach(key => {
diff --git a/app/assets/javascripts/boards/models/assignee.js b/app/assets/javascripts/boards/models/assignee.js
index 4a29b0d0581..5f5758583bb 100644
--- a/app/assets/javascripts/boards/models/assignee.js
+++ b/app/assets/javascripts/boards/models/assignee.js
@@ -1,9 +1,9 @@
export default class ListAssignee {
- constructor(obj, defaultAvatar) {
+ constructor(obj) {
this.id = obj.id;
this.name = obj.name;
this.username = obj.username;
- this.avatar = obj.avatar_url || obj.avatar || defaultAvatar;
+ this.avatar = obj.avatar_url || obj.avatar || gon.default_avatar_url;
this.path = obj.path;
this.state = obj.state;
this.webUrl = obj.web_url || obj.webUrl;
diff --git a/app/assets/javascripts/boards/models/issue.js b/app/assets/javascripts/boards/models/issue.js
index 044d96a9aec..d099c4b930c 100644
--- a/app/assets/javascripts/boards/models/issue.js
+++ b/app/assets/javascripts/boards/models/issue.js
@@ -10,7 +10,7 @@ import IssueProject from './project';
import boardsStore from '../stores/boards_store';
class ListIssue {
- constructor(obj, defaultAvatar) {
+ constructor(obj) {
this.subscribed = obj.subscribed;
this.labels = [];
this.assignees = [];
@@ -19,42 +19,14 @@ class ListIssue {
this.isFetching = {
subscriptions: true,
};
+ this.closed = obj.closed;
this.isLoading = {};
- this.refreshData(obj, defaultAvatar);
- }
-
- refreshData(obj, defaultAvatar) {
- this.id = obj.id;
- this.iid = obj.iid;
- this.title = obj.title;
- this.confidential = obj.confidential;
- this.dueDate = obj.due_date;
- this.sidebarInfoEndpoint = obj.issue_sidebar_endpoint;
- this.referencePath = obj.reference_path;
- this.path = obj.real_path;
- this.toggleSubscriptionEndpoint = obj.toggle_subscription_endpoint;
- this.project_id = obj.project_id;
- this.timeEstimate = obj.time_estimate;
- this.assignableLabelsEndpoint = obj.assignable_labels_endpoint;
- this.blocked = obj.blocked;
-
- if (obj.project) {
- this.project = new IssueProject(obj.project);
- }
-
- if (obj.milestone) {
- this.milestone = new ListMilestone(obj.milestone);
- this.milestone_id = obj.milestone.id;
- }
-
- if (obj.labels) {
- this.labels = obj.labels.map(label => new ListLabel(label));
- }
+ this.refreshData(obj);
+ }
- if (obj.assignees) {
- this.assignees = obj.assignees.map(a => new ListAssignee(a, defaultAvatar));
- }
+ refreshData(obj) {
+ boardsStore.refreshIssueData(this, obj);
}
addLabel(label) {
diff --git a/app/assets/javascripts/boards/models/list.js b/app/assets/javascripts/boards/models/list.js
index ff50b8ed7d1..990b648190a 100644
--- a/app/assets/javascripts/boards/models/list.js
+++ b/app/assets/javascripts/boards/models/list.js
@@ -36,7 +36,7 @@ const TYPES = {
};
class List {
- constructor(obj, defaultAvatar) {
+ constructor(obj) {
this.id = obj.id;
this._uid = this.guid();
this.position = obj.position;
@@ -55,7 +55,6 @@ class List {
this.maxIssueCount = Object.hasOwnProperty.call(obj, 'max_issue_count')
? obj.max_issue_count
: 0;
- this.defaultAvatar = defaultAvatar;
if (obj.label) {
this.label = new ListLabel(obj.label);
@@ -156,7 +155,7 @@ class List {
createIssues(data) {
data.forEach(issueObj => {
- this.addIssue(new ListIssue(issueObj, this.defaultAvatar));
+ this.addIssue(new ListIssue(issueObj));
});
}
diff --git a/app/assets/javascripts/boards/mount_multiple_boards_switcher.js b/app/assets/javascripts/boards/mount_multiple_boards_switcher.js
index 8d22f009784..73d37459bfe 100644
--- a/app/assets/javascripts/boards/mount_multiple_boards_switcher.js
+++ b/app/assets/javascripts/boards/mount_multiple_boards_switcher.js
@@ -1,7 +1,15 @@
import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createDefaultClient from '~/lib/graphql';
import { parseBoolean } from '~/lib/utils/common_utils';
import BoardsSelector from '~/boards/components/boards_selector.vue';
+Vue.use(VueApollo);
+
+const apolloProvider = new VueApollo({
+ defaultClient: createDefaultClient(),
+});
+
export default () => {
const boardsSwitcherElement = document.getElementById('js-multiple-boards-switcher');
return new Vue({
@@ -9,6 +17,7 @@ export default () => {
components: {
BoardsSelector,
},
+ apolloProvider,
data() {
const { dataset } = boardsSwitcherElement;
diff --git a/app/assets/javascripts/boards/queries/board.fragment.graphql b/app/assets/javascripts/boards/queries/board.fragment.graphql
new file mode 100644
index 00000000000..48f55e899bf
--- /dev/null
+++ b/app/assets/javascripts/boards/queries/board.fragment.graphql
@@ -0,0 +1,4 @@
+fragment BoardFragment on Board {
+ id,
+ name
+}
diff --git a/app/assets/javascripts/boards/queries/group_boards.query.graphql b/app/assets/javascripts/boards/queries/group_boards.query.graphql
new file mode 100644
index 00000000000..74c224add7d
--- /dev/null
+++ b/app/assets/javascripts/boards/queries/group_boards.query.graphql
@@ -0,0 +1,13 @@
+#import "ee_else_ce/boards/queries/board.fragment.graphql"
+
+query group_boards($fullPath: ID!) {
+ group(fullPath: $fullPath) {
+ boards {
+ edges {
+ node {
+ ...BoardFragment
+ }
+ }
+ }
+ }
+}
diff --git a/app/assets/javascripts/boards/queries/project_boards.query.graphql b/app/assets/javascripts/boards/queries/project_boards.query.graphql
new file mode 100644
index 00000000000..a1326bd5eff
--- /dev/null
+++ b/app/assets/javascripts/boards/queries/project_boards.query.graphql
@@ -0,0 +1,13 @@
+#import "ee_else_ce/boards/queries/board.fragment.graphql"
+
+query project_boards($fullPath: ID!) {
+ project(fullPath: $fullPath) {
+ boards {
+ edges {
+ node {
+ ...BoardFragment
+ }
+ }
+ }
+ }
+}
diff --git a/app/assets/javascripts/boards/stores/boards_store.js b/app/assets/javascripts/boards/stores/boards_store.js
index e5ce8b70a4f..e5447080e37 100644
--- a/app/assets/javascripts/boards/stores/boards_store.js
+++ b/app/assets/javascripts/boards/stores/boards_store.js
@@ -2,7 +2,7 @@
/* global List */
import $ from 'jquery';
-import _ from 'underscore';
+import { sortBy } from 'lodash';
import Vue from 'vue';
import Cookies from 'js-cookie';
import BoardsStoreEE from 'ee_else_ce/boards/stores/boards_store_ee';
@@ -12,6 +12,10 @@ import axios from '~/lib/utils/axios_utils';
import { mergeUrlParams } from '~/lib/utils/url_utility';
import eventHub from '../eventhub';
import { ListType } from '../constants';
+import IssueProject from '../models/project';
+import ListLabel from '../models/label';
+import ListAssignee from '../models/assignee';
+import ListMilestone from '../models/milestone';
const boardsStore = {
disabled: false,
@@ -30,7 +34,6 @@ const boardsStore = {
labels: [],
},
currentPage: '',
- reload: false,
endpoints: {},
},
detail: {
@@ -42,7 +45,14 @@ const boardsStore = {
},
multiSelect: { list: [] },
- setEndpoints({ boardsEndpoint, listsEndpoint, bulkUpdatePath, boardId, recentBoardsEndpoint }) {
+ setEndpoints({
+ boardsEndpoint,
+ listsEndpoint,
+ bulkUpdatePath,
+ boardId,
+ recentBoardsEndpoint,
+ fullPath,
+ }) {
const listsEndpointGenerate = `${listsEndpoint}/generate.json`;
this.state.endpoints = {
boardsEndpoint,
@@ -50,6 +60,7 @@ const boardsStore = {
listsEndpoint,
listsEndpointGenerate,
bulkUpdatePath,
+ fullPath,
recentBoardsEndpoint: `${recentBoardsEndpoint}.json`,
};
},
@@ -61,13 +72,11 @@ const boardsStore = {
};
},
showPage(page) {
- this.state.reload = false;
this.state.currentPage = page;
},
- addList(listObj, defaultAvatar) {
- const list = new List(listObj, defaultAvatar);
- this.state.lists = _.sortBy([...this.state.lists, list], 'position');
-
+ addList(listObj) {
+ const list = new List(listObj);
+ this.state.lists = sortBy([...this.state.lists, list], 'position');
return list;
},
new(listObj) {
@@ -80,7 +89,7 @@ const boardsStore = {
// Remove any new issues from the backlog
// as they will be visible in the new list
list.issues.forEach(backlogList.removeIssue.bind(backlogList));
- this.state.lists = _.sortBy(this.state.lists, 'position');
+ this.state.lists = sortBy(this.state.lists, 'position');
})
.catch(() => {
// https://gitlab.com/gitlab-org/gitlab-foss/issues/30821
@@ -184,10 +193,9 @@ const boardsStore = {
moveMultipleIssuesToList({ listFrom, listTo, issues, newIndex }) {
const issueTo = issues.map(issue => listTo.findIssue(issue.id));
- const issueLists = _.flatten(issues.map(issue => issue.getLists()));
+ const issueLists = issues.map(issue => issue.getLists()).flat();
const listLabels = issueLists.map(list => list.label);
-
- const hasMoveableIssues = _.compact(issueTo).length > 0;
+ const hasMoveableIssues = issueTo.filter(Boolean).length > 0;
if (!hasMoveableIssues) {
// Check if target list assignee is already present in this issue
@@ -335,7 +343,8 @@ const boardsStore = {
return (
(listTo.type !== 'label' && listFrom.type === 'assignee') ||
(listTo.type !== 'assignee' && listFrom.type === 'label') ||
- listFrom.type === 'backlog'
+ listFrom.type === 'backlog' ||
+ listFrom.type === 'closed'
);
},
moveIssueInList(list, issue, oldIndex, newIndex, idArray) {
@@ -539,10 +548,6 @@ const boardsStore = {
return axios.post(endpoint);
},
- allBoards() {
- return axios.get(this.generateBoardsPath());
- },
-
recentBoards() {
return axios.get(this.state.endpoints.recentBoardsEndpoint);
},
@@ -595,6 +600,38 @@ const boardsStore = {
clearMultiSelect() {
this.multiSelect.list = [];
},
+ refreshIssueData(issue, obj) {
+ issue.id = obj.id;
+ issue.iid = obj.iid;
+ issue.title = obj.title;
+ issue.confidential = obj.confidential;
+ issue.dueDate = obj.due_date;
+ issue.sidebarInfoEndpoint = obj.issue_sidebar_endpoint;
+ issue.referencePath = obj.reference_path;
+ issue.path = obj.real_path;
+ issue.toggleSubscriptionEndpoint = obj.toggle_subscription_endpoint;
+ issue.project_id = obj.project_id;
+ issue.timeEstimate = obj.time_estimate;
+ issue.assignableLabelsEndpoint = obj.assignable_labels_endpoint;
+ issue.blocked = obj.blocked;
+
+ if (obj.project) {
+ issue.project = new IssueProject(obj.project);
+ }
+
+ if (obj.milestone) {
+ issue.milestone = new ListMilestone(obj.milestone);
+ issue.milestone_id = obj.milestone.id;
+ }
+
+ if (obj.labels) {
+ issue.labels = obj.labels.map(label => new ListLabel(label));
+ }
+
+ if (obj.assignees) {
+ issue.assignees = obj.assignees.map(a => new ListAssignee(a));
+ }
+ },
};
BoardsStoreEE.initEESpecific(boardsStore);
diff --git a/app/assets/javascripts/broadcast_notification.js b/app/assets/javascripts/broadcast_notification.js
index b124502506a..dc5401199dc 100644
--- a/app/assets/javascripts/broadcast_notification.js
+++ b/app/assets/javascripts/broadcast_notification.js
@@ -6,16 +6,14 @@ const handleOnDismiss = ({ currentTarget }) => {
dataset: { id },
} = currentTarget;
- Cookies.set(`hide_broadcast_notification_message_${id}`, true);
+ Cookies.set(`hide_broadcast_message_${id}`, true);
const notification = document.querySelector(`.js-broadcast-notification-${id}`);
notification.parentNode.removeChild(notification);
};
export default () => {
- const dismissButton = document.querySelector('.js-dismiss-current-broadcast-notification');
-
- if (dismissButton) {
- dismissButton.addEventListener('click', handleOnDismiss);
- }
+ document
+ .querySelectorAll('.js-dismiss-current-broadcast-notification')
+ .forEach(dismissButton => dismissButton.addEventListener('click', handleOnDismiss));
};
diff --git a/app/assets/javascripts/ci_variable_list/components/ci_variable_modal.vue b/app/assets/javascripts/ci_variable_list/components/ci_variable_modal.vue
new file mode 100644
index 00000000000..0ccc58ec2da
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/components/ci_variable_modal.vue
@@ -0,0 +1,224 @@
+<script>
+import { __ } from '~/locale';
+import { mapActions, mapState } from 'vuex';
+import { ADD_CI_VARIABLE_MODAL_ID } from '../constants';
+import {
+ GlButton,
+ GlModal,
+ GlFormSelect,
+ GlFormGroup,
+ GlFormInput,
+ GlFormTextarea,
+ GlFormCheckbox,
+ GlLink,
+ GlIcon,
+} from '@gitlab/ui';
+
+export default {
+ modalId: ADD_CI_VARIABLE_MODAL_ID,
+ components: {
+ GlButton,
+ GlModal,
+ GlFormSelect,
+ GlFormGroup,
+ GlFormInput,
+ GlFormTextarea,
+ GlFormCheckbox,
+ GlLink,
+ GlIcon,
+ },
+ computed: {
+ ...mapState([
+ 'projectId',
+ 'environments',
+ 'typeOptions',
+ 'variable',
+ 'variableBeingEdited',
+ 'isGroup',
+ 'maskableRegex',
+ ]),
+ canSubmit() {
+ if (this.variableData.masked && this.maskedState === false) {
+ return false;
+ }
+ return this.variableData.key !== '' && this.variableData.secret_value !== '';
+ },
+ canMask() {
+ const regex = RegExp(this.maskableRegex);
+ return regex.test(this.variableData.secret_value);
+ },
+ displayMaskedError() {
+ return !this.canMask && this.variableData.masked && this.variableData.secret_value !== '';
+ },
+ maskedState() {
+ if (this.displayMaskedError) {
+ return false;
+ }
+ return null;
+ },
+ variableData() {
+ return this.variableBeingEdited || this.variable;
+ },
+ modalActionText() {
+ return this.variableBeingEdited ? __('Update variable') : __('Add variable');
+ },
+ primaryAction() {
+ return {
+ text: this.modalActionText,
+ attributes: { variant: 'success', disabled: !this.canSubmit },
+ };
+ },
+ maskedFeedback() {
+ return __('This variable can not be masked');
+ },
+ },
+ methods: {
+ ...mapActions([
+ 'addVariable',
+ 'updateVariable',
+ 'resetEditing',
+ 'displayInputValue',
+ 'clearModal',
+ 'deleteVariable',
+ ]),
+ updateOrAddVariable() {
+ if (this.variableBeingEdited) {
+ this.updateVariable(this.variableBeingEdited);
+ } else {
+ this.addVariable();
+ }
+ this.hideModal();
+ },
+ resetModalHandler() {
+ if (this.variableBeingEdited) {
+ this.resetEditing();
+ } else {
+ this.clearModal();
+ }
+ },
+ hideModal() {
+ this.$refs.modal.hide();
+ },
+ deleteVarAndClose() {
+ this.deleteVariable(this.variableBeingEdited);
+ this.hideModal();
+ },
+ },
+};
+</script>
+
+<template>
+ <gl-modal
+ ref="modal"
+ :modal-id="$options.modalId"
+ :title="modalActionText"
+ @hidden="resetModalHandler"
+ >
+ <form>
+ <gl-form-group :label="__('Key')" label-for="ci-variable-key">
+ <gl-form-input
+ id="ci-variable-key"
+ v-model="variableData.key"
+ data-qa-selector="variable_key"
+ />
+ </gl-form-group>
+
+ <gl-form-group
+ :label="__('Value')"
+ label-for="ci-variable-value"
+ :state="maskedState"
+ :invalid-feedback="maskedFeedback"
+ >
+ <gl-form-textarea
+ id="ci-variable-value"
+ v-model="variableData.secret_value"
+ rows="3"
+ max-rows="6"
+ data-qa-selector="variable_value"
+ />
+ </gl-form-group>
+
+ <div class="d-flex">
+ <gl-form-group
+ :label="__('Type')"
+ label-for="ci-variable-type"
+ class="w-50 append-right-15"
+ :class="{ 'w-100': isGroup }"
+ >
+ <gl-form-select
+ id="ci-variable-type"
+ v-model="variableData.variable_type"
+ :options="typeOptions"
+ />
+ </gl-form-group>
+
+ <gl-form-group
+ v-if="!isGroup"
+ :label="__('Environment scope')"
+ label-for="ci-variable-env"
+ class="w-50"
+ >
+ <gl-form-select
+ id="ci-variable-env"
+ v-model="variableData.environment_scope"
+ :options="environments"
+ />
+ </gl-form-group>
+ </div>
+
+ <gl-form-group :label="__('Flags')" label-for="ci-variable-flags">
+ <gl-form-checkbox v-model="variableData.protected" class="mb-0">
+ {{ __('Protect variable') }}
+ <gl-link href="/help/ci/variables/README#protected-environment-variables">
+ <gl-icon name="question" :size="12" />
+ </gl-link>
+ <p class="prepend-top-4 text-secondary">
+ {{ __('Export variable to pipelines running on protected branches and tags only.') }}
+ </p>
+ </gl-form-checkbox>
+
+ <gl-form-checkbox
+ ref="masked-ci-variable"
+ v-model="variableData.masked"
+ data-qa-selector="variable_masked"
+ >
+ {{ __('Mask variable') }}
+ <gl-link href="/help/ci/variables/README#masked-variables">
+ <gl-icon name="question" :size="12" />
+ </gl-link>
+ <p class="prepend-top-4 append-bottom-0 text-secondary">
+ {{ __('Variable will be masked in job logs.') }}
+ <span
+ :class="{
+ 'bold text-plain': displayMaskedError,
+ }"
+ >
+ {{ __('Requires values to meet regular expression requirements.') }}</span
+ >
+ <gl-link href="/help/ci/variables/README#masked-variables">{{
+ __('More information')
+ }}</gl-link>
+ </p>
+ </gl-form-checkbox>
+ </gl-form-group>
+ </form>
+ <template #modal-footer>
+ <gl-button @click="hideModal">{{ __('Cancel') }}</gl-button>
+ <gl-button
+ v-if="variableBeingEdited"
+ ref="deleteCiVariable"
+ category="secondary"
+ variant="danger"
+ @click="deleteVarAndClose"
+ >{{ __('Delete variable') }}</gl-button
+ >
+ <gl-button
+ ref="updateOrAddVariable"
+ :disabled="!canSubmit"
+ variant="success"
+ @click="updateOrAddVariable"
+ >{{ modalActionText }}
+ </gl-button>
+ </template>
+ </gl-modal>
+</template>
diff --git a/app/assets/javascripts/ci_variable_list/components/ci_variable_popover.vue b/app/assets/javascripts/ci_variable_list/components/ci_variable_popover.vue
new file mode 100644
index 00000000000..c4b1bc18f5a
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/components/ci_variable_popover.vue
@@ -0,0 +1,55 @@
+<script>
+import { GlPopover, GlIcon, GlButton, GlTooltipDirective } from '@gitlab/ui';
+
+export default {
+ maxTextLength: 95,
+ components: {
+ GlPopover,
+ GlIcon,
+ GlButton,
+ },
+ directives: {
+ GlTooltip: GlTooltipDirective,
+ },
+ props: {
+ target: {
+ type: String,
+ required: true,
+ },
+ value: {
+ type: String,
+ required: true,
+ },
+ tooltipText: {
+ type: String,
+ required: true,
+ },
+ },
+ computed: {
+ displayValue() {
+ if (this.value.length > this.$options.maxTextLength) {
+ return `${this.value.substring(0, this.$options.maxTextLength)}...`;
+ }
+ return this.value;
+ },
+ },
+};
+</script>
+
+<template>
+ <div id="popover-container">
+ <gl-popover :target="target" triggers="hover" placement="top" container="popover-container">
+ <div class="d-flex justify-content-between position-relative">
+ <div class="pr-5 w-100 ci-popover-value">{{ displayValue }}</div>
+ <gl-button
+ v-gl-tooltip
+ class="btn-transparent btn-clipboard position-absolute position-top-0 position-right-0"
+ :title="tooltipText"
+ :data-clipboard-text="value"
+ >
+ <gl-icon name="copy-to-clipboard" />
+ </gl-button>
+ </div>
+ </gl-popover>
+ </div>
+</template>
diff --git a/app/assets/javascripts/ci_variable_list/components/ci_variable_settings.vue b/app/assets/javascripts/ci_variable_list/components/ci_variable_settings.vue
new file mode 100644
index 00000000000..ed1240c247f
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/components/ci_variable_settings.vue
@@ -0,0 +1,32 @@
+<script>
+import CiVariableModal from './ci_variable_modal.vue';
+import CiVariableTable from './ci_variable_table.vue';
+import { mapState, mapActions } from 'vuex';
+
+export default {
+ components: {
+ CiVariableModal,
+ CiVariableTable,
+ },
+ computed: {
+ ...mapState(['isGroup']),
+ },
+ mounted() {
+ if (!this.isGroup) {
+ this.fetchEnvironments();
+ }
+ },
+ methods: {
+ ...mapActions(['fetchEnvironments']),
+ },
+};
+</script>
+
+<template>
+ <div class="row">
+ <div class="col-lg-12">
+ <ci-variable-table />
+ <ci-variable-modal />
+ </div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/ci_variable_list/components/ci_variable_table.vue b/app/assets/javascripts/ci_variable_list/components/ci_variable_table.vue
new file mode 100644
index 00000000000..3f2f89ada6f
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/components/ci_variable_table.vue
@@ -0,0 +1,184 @@
+<script>
+import { GlTable, GlButton, GlModalDirective, GlIcon } from '@gitlab/ui';
+import { s__, __ } from '~/locale';
+import { mapState, mapActions } from 'vuex';
+import { ADD_CI_VARIABLE_MODAL_ID } from '../constants';
+import CiVariablePopover from './ci_variable_popover.vue';
+
+export default {
+ modalId: ADD_CI_VARIABLE_MODAL_ID,
+ trueIcon: 'mobile-issue-close',
+ falseIcon: 'close',
+ iconSize: 16,
+ fields: [
+ {
+ key: 'variable_type',
+ label: s__('CiVariables|Type'),
+ customStyle: { width: '70px' },
+ },
+ {
+ key: 'key',
+ label: s__('CiVariables|Key'),
+ tdClass: 'text-plain',
+ sortable: true,
+ customStyle: { width: '40%' },
+ },
+ {
+ key: 'value',
+ label: s__('CiVariables|Value'),
+ tdClass: 'qa-ci-variable-input-value',
+ customStyle: { width: '40%' },
+ },
+ {
+ key: 'protected',
+ label: s__('CiVariables|Protected'),
+ customStyle: { width: '100px' },
+ },
+ {
+ key: 'masked',
+ label: s__('CiVariables|Masked'),
+ customStyle: { width: '100px' },
+ },
+ {
+ key: 'environment_scope',
+ label: s__('CiVariables|Environments'),
+ customStyle: { width: '20%' },
+ },
+ {
+ key: 'actions',
+ label: '',
+ customStyle: { width: '35px' },
+ },
+ ],
+ components: {
+ GlTable,
+ GlButton,
+ GlIcon,
+ CiVariablePopover,
+ },
+ directives: {
+ GlModalDirective,
+ },
+ computed: {
+ ...mapState(['variables', 'valuesHidden', 'isGroup', 'isLoading', 'isDeleting']),
+ valuesButtonText() {
+ return this.valuesHidden ? __('Reveal values') : __('Hide values');
+ },
+ tableIsNotEmpty() {
+ return this.variables && this.variables.length > 0;
+ },
+ fields() {
+ if (this.isGroup) {
+ return this.$options.fields.filter(field => field.key !== 'environment_scope');
+ }
+ return this.$options.fields;
+ },
+ },
+ mounted() {
+ this.fetchVariables();
+ },
+ methods: {
+ ...mapActions(['fetchVariables', 'toggleValues', 'editVariable']),
+ },
+};
+</script>
+
+<template>
+ <div class="ci-variable-table">
+ <gl-table
+ :fields="fields"
+ :items="variables"
+ tbody-tr-class="js-ci-variable-row"
+ sort-by="key"
+ sort-direction="asc"
+ stacked="lg"
+ fixed
+ show-empty
+ sort-icon-left
+ no-sort-reset
+ >
+ <template #table-colgroup="scope">
+ <col v-for="field in scope.fields" :key="field.key" :style="field.customStyle" />
+ </template>
+ <template #cell(key)="{ item }">
+ <div class="d-flex truncated-container">
+ <span :id="`ci-variable-key-${item.id}`" class="d-inline-block mw-100 text-truncate">{{
+ item.key
+ }}</span>
+ <ci-variable-popover
+ :target="`ci-variable-key-${item.id}`"
+ :value="item.key"
+ :tooltip-text="__('Copy key')"
+ />
+ </div>
+ </template>
+ <template #cell(value)="{ item }">
+ <span v-if="valuesHidden">*********************</span>
+ <div v-else class="d-flex truncated-container">
+ <span :id="`ci-variable-value-${item.id}`" class="d-inline-block mw-100 text-truncate">{{
+ item.value
+ }}</span>
+ <ci-variable-popover
+ :target="`ci-variable-value-${item.id}`"
+ :value="item.value"
+ :tooltip-text="__('Copy value')"
+ />
+ </div>
+ </template>
+ <template #cell(protected)="{ item }">
+ <gl-icon v-if="item.protected" :size="$options.iconSize" :name="$options.trueIcon" />
+ <gl-icon v-else :size="$options.iconSize" :name="$options.falseIcon" />
+ </template>
+ <template #cell(masked)="{ item }">
+ <gl-icon v-if="item.masked" :size="$options.iconSize" :name="$options.trueIcon" />
+ <gl-icon v-else :size="$options.iconSize" :name="$options.falseIcon" />
+ </template>
+ <template #cell(environment_scope)="{ item }">
+ <div class="d-flex truncated-container">
+ <span :id="`ci-variable-env-${item.id}`" class="d-inline-block mw-100 text-truncate">{{
+ item.environment_scope
+ }}</span>
+ <ci-variable-popover
+ :target="`ci-variable-env-${item.id}`"
+ :value="item.environment_scope"
+ :tooltip-text="__('Copy environment')"
+ />
+ </div>
+ </template>
+ <template #cell(actions)="{ item }">
+ <gl-button
+ ref="edit-ci-variable"
+ v-gl-modal-directive="$options.modalId"
+ @click="editVariable(item)"
+ >
+ <gl-icon :size="$options.iconSize" name="pencil" />
+ </gl-button>
+ </template>
+ <template #empty>
+ <p ref="empty-variables" class="text-center empty-variables text-plain">
+ {{ __('There are no variables yet.') }}
+ </p>
+ </template>
+ </gl-table>
+ <div
+ class="ci-variable-actions d-flex justify-content-end"
+ :class="{ 'justify-content-center': !tableIsNotEmpty }"
+ >
+ <gl-button
+ v-if="tableIsNotEmpty"
+ ref="secret-value-reveal-button"
+ data-qa-selector="reveal_ci_variable_value"
+ class="append-right-8"
+ @click="toggleValues(!valuesHidden)"
+ >{{ valuesButtonText }}</gl-button
+ >
+ <gl-button
+ ref="add-ci-variable"
+ v-gl-modal-directive="$options.modalId"
+ data-qa-selector="add_ci_variable"
+ variant="success"
+ >{{ __('Add Variable') }}</gl-button
+ >
+ </div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/ci_variable_list/constants.js b/app/assets/javascripts/ci_variable_list/constants.js
new file mode 100644
index 00000000000..b2fa980c546
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/constants.js
@@ -0,0 +1,16 @@
+import { __ } from '~/locale';
+
+// eslint-disable import/prefer-default-export
+export const ADD_CI_VARIABLE_MODAL_ID = 'add-ci-variable';
+
+export const displayText = {
+ variableText: __('Var'),
+ fileText: __('File'),
+ allEnvironmentsText: __('All'),
+};
+
+export const types = {
+ variableType: 'env_var',
+ fileType: 'file',
+ allEnvironmentsType: '*',
+};
diff --git a/app/assets/javascripts/ci_variable_list/index.js b/app/assets/javascripts/ci_variable_list/index.js
new file mode 100644
index 00000000000..58501b216c1
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/index.js
@@ -0,0 +1,25 @@
+import Vue from 'vue';
+import CiVariableSettings from './components/ci_variable_settings.vue';
+import createStore from './store';
+import { parseBoolean } from '~/lib/utils/common_utils';
+
+export default () => {
+ const el = document.getElementById('js-ci-project-variables');
+ const { endpoint, projectId, group, maskableRegex } = el.dataset;
+ const isGroup = parseBoolean(group);
+
+ const store = createStore({
+ endpoint,
+ projectId,
+ isGroup,
+ maskableRegex,
+ });
+
+ return new Vue({
+ el,
+ store,
+ render(createElement) {
+ return createElement(CiVariableSettings);
+ },
+ });
+};
diff --git a/app/assets/javascripts/ci_variable_list/store/actions.js b/app/assets/javascripts/ci_variable_list/store/actions.js
new file mode 100644
index 00000000000..f3a629b84ee
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/store/actions.js
@@ -0,0 +1,155 @@
+import * as types from './mutation_types';
+import axios from '~/lib/utils/axios_utils';
+import Api from '~/api';
+import createFlash from '~/flash';
+import { __ } from '~/locale';
+import { prepareDataForApi, prepareDataForDisplay, prepareEnvironments } from './utils';
+
+export const toggleValues = ({ commit }, valueState) => {
+ commit(types.TOGGLE_VALUES, valueState);
+};
+
+export const clearModal = ({ commit }) => {
+ commit(types.CLEAR_MODAL);
+};
+
+export const resetEditing = ({ commit, dispatch }) => {
+ // fetch variables again if modal is being edited and then hidden
+ // without saving changes, to cover use case of reactivity in the table
+ dispatch('fetchVariables');
+ commit(types.RESET_EDITING);
+};
+
+export const requestAddVariable = ({ commit }) => {
+ commit(types.REQUEST_ADD_VARIABLE);
+};
+
+export const receiveAddVariableSuccess = ({ commit }) => {
+ commit(types.RECEIVE_ADD_VARIABLE_SUCCESS);
+};
+
+export const receiveAddVariableError = ({ commit }, error) => {
+ commit(types.RECEIVE_ADD_VARIABLE_ERROR, error);
+};
+
+export const addVariable = ({ state, dispatch }) => {
+ dispatch('requestAddVariable');
+
+ return axios
+ .patch(state.endpoint, {
+ variables_attributes: [prepareDataForApi(state.variable)],
+ })
+ .then(() => {
+ dispatch('receiveAddVariableSuccess');
+ dispatch('fetchVariables');
+ })
+ .catch(error => {
+ createFlash(error.response.data[0]);
+ dispatch('receiveAddVariableError', error);
+ });
+};
+
+export const requestUpdateVariable = ({ commit }) => {
+ commit(types.REQUEST_UPDATE_VARIABLE);
+};
+
+export const receiveUpdateVariableSuccess = ({ commit }) => {
+ commit(types.RECEIVE_UPDATE_VARIABLE_SUCCESS);
+};
+
+export const receiveUpdateVariableError = ({ commit }, error) => {
+ commit(types.RECEIVE_UPDATE_VARIABLE_ERROR, error);
+};
+
+export const updateVariable = ({ state, dispatch }, variable) => {
+ dispatch('requestUpdateVariable');
+
+ const updatedVariable = prepareDataForApi(variable);
+ updatedVariable.secrect_value = updateVariable.value;
+
+ return axios
+ .patch(state.endpoint, { variables_attributes: [updatedVariable] })
+ .then(() => {
+ dispatch('receiveUpdateVariableSuccess');
+ dispatch('fetchVariables');
+ })
+ .catch(error => {
+ createFlash(error.response.data[0]);
+ dispatch('receiveUpdateVariableError', error);
+ });
+};
+
+export const editVariable = ({ commit }, variable) => {
+ const variableToEdit = variable;
+ variableToEdit.secret_value = variableToEdit.value;
+ commit(types.VARIABLE_BEING_EDITED, variableToEdit);
+};
+
+export const requestVariables = ({ commit }) => {
+ commit(types.REQUEST_VARIABLES);
+};
+export const receiveVariablesSuccess = ({ commit }, variables) => {
+ commit(types.RECEIVE_VARIABLES_SUCCESS, variables);
+};
+
+export const fetchVariables = ({ dispatch, state }) => {
+ dispatch('requestVariables');
+
+ return axios
+ .get(state.endpoint)
+ .then(({ data }) => {
+ dispatch('receiveVariablesSuccess', prepareDataForDisplay(data.variables));
+ })
+ .catch(() => {
+ createFlash(__('There was an error fetching the variables.'));
+ });
+};
+
+export const requestDeleteVariable = ({ commit }) => {
+ commit(types.REQUEST_DELETE_VARIABLE);
+};
+
+export const receiveDeleteVariableSuccess = ({ commit }) => {
+ commit(types.RECEIVE_DELETE_VARIABLE_SUCCESS);
+};
+
+export const receiveDeleteVariableError = ({ commit }, error) => {
+ commit(types.RECEIVE_DELETE_VARIABLE_ERROR, error);
+};
+
+export const deleteVariable = ({ dispatch, state }, variable) => {
+ dispatch('requestDeleteVariable');
+
+ const destroy = true;
+
+ return axios
+ .patch(state.endpoint, { variables_attributes: [prepareDataForApi(variable, destroy)] })
+ .then(() => {
+ dispatch('receiveDeleteVariableSuccess');
+ dispatch('fetchVariables');
+ })
+ .catch(error => {
+ createFlash(error.response.data[0]);
+ dispatch('receiveDeleteVariableError', error);
+ });
+};
+
+export const requestEnvironments = ({ commit }) => {
+ commit(types.REQUEST_ENVIRONMENTS);
+};
+
+export const receiveEnvironmentsSuccess = ({ commit }, environments) => {
+ commit(types.RECEIVE_ENVIRONMENTS_SUCCESS, environments);
+};
+
+export const fetchEnvironments = ({ dispatch, state }) => {
+ dispatch('requestEnvironments');
+
+ return Api.environments(state.projectId)
+ .then(res => {
+ dispatch('receiveEnvironmentsSuccess', prepareEnvironments(res.data));
+ })
+ .catch(() => {
+ createFlash(__('There was an error fetching the environments information.'));
+ });
+};
diff --git a/app/assets/javascripts/ci_variable_list/store/index.js b/app/assets/javascripts/ci_variable_list/store/index.js
new file mode 100644
index 00000000000..db4ba95b3c2
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/store/index.js
@@ -0,0 +1,17 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import * as actions from './actions';
+import mutations from './mutations';
+import state from './state';
+
+Vue.use(Vuex);
+
+export default (initialState = {}) =>
+ new Vuex.Store({
+ actions,
+ mutations,
+ state: {
+ ...state(),
+ ...initialState,
+ },
+ });
diff --git a/app/assets/javascripts/ci_variable_list/store/mutation_types.js b/app/assets/javascripts/ci_variable_list/store/mutation_types.js
new file mode 100644
index 00000000000..240066d0f22
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/store/mutation_types.js
@@ -0,0 +1,22 @@
+export const TOGGLE_VALUES = 'TOGGLE_VALUES';
+export const VARIABLE_BEING_EDITED = 'VARIABLE_BEING_EDITED';
+export const RESET_EDITING = 'RESET_EDITING';
+export const CLEAR_MODAL = 'CLEAR_MODAL';
+
+export const REQUEST_VARIABLES = 'REQUEST_VARIABLES';
+export const RECEIVE_VARIABLES_SUCCESS = 'RECEIVE_VARIABLES_SUCCESS';
+
+export const REQUEST_DELETE_VARIABLE = 'REQUEST_DELETE_VARIABLE';
+export const RECEIVE_DELETE_VARIABLE_SUCCESS = 'RECEIVE_DELETE_VARIABLE_SUCCESS';
+export const RECEIVE_DELETE_VARIABLE_ERROR = 'RECEIVE_DELETE_VARIABLE_ERROR';
+
+export const REQUEST_ADD_VARIABLE = 'REQUEST_ADD_VARIABLE';
+export const RECEIVE_ADD_VARIABLE_SUCCESS = 'RECEIVE_ADD_VARIABLE_SUCCESS';
+export const RECEIVE_ADD_VARIABLE_ERROR = 'RECEIVE_ADD_VARIABLE_ERROR';
+
+export const REQUEST_UPDATE_VARIABLE = 'REQUEST_UPDATE_VARIABLE';
+export const RECEIVE_UPDATE_VARIABLE_SUCCESS = 'RECEIVE_UPDATE_VARIABLE_SUCCESS';
+export const RECEIVE_UPDATE_VARIABLE_ERROR = 'RECEIVE_UPDATE_VARIABLE_ERROR';
+
+export const REQUEST_ENVIRONMENTS = 'REQUEST_ENVIRONMENTS';
+export const RECEIVE_ENVIRONMENTS_SUCCESS = 'RECEIVE_ENVIRONMENTS_SUCCESS';
diff --git a/app/assets/javascripts/ci_variable_list/store/mutations.js b/app/assets/javascripts/ci_variable_list/store/mutations.js
new file mode 100644
index 00000000000..c75eb4a91fd
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/store/mutations.js
@@ -0,0 +1,86 @@
+import * as types from './mutation_types';
+import { displayText } from '../constants';
+
+export default {
+ [types.REQUEST_VARIABLES](state) {
+ state.isLoading = true;
+ },
+
+ [types.RECEIVE_VARIABLES_SUCCESS](state, variables) {
+ state.isLoading = false;
+ state.variables = variables;
+ },
+
+ [types.REQUEST_DELETE_VARIABLE](state) {
+ state.isDeleting = true;
+ },
+
+ [types.RECEIVE_DELETE_VARIABLE_SUCCESS](state) {
+ state.isDeleting = false;
+ },
+
+ [types.RECEIVE_DELETE_VARIABLE_ERROR](state, error) {
+ state.isDeleting = false;
+ state.error = error;
+ },
+
+ [types.REQUEST_ADD_VARIABLE](state) {
+ state.isLoading = true;
+ },
+
+ [types.RECEIVE_ADD_VARIABLE_SUCCESS](state) {
+ state.isLoading = false;
+ },
+
+ [types.RECEIVE_ADD_VARIABLE_ERROR](state, error) {
+ state.isLoading = false;
+ state.error = error;
+ },
+
+ [types.REQUEST_UPDATE_VARIABLE](state) {
+ state.isLoading = true;
+ },
+
+ [types.RECEIVE_UPDATE_VARIABLE_SUCCESS](state) {
+ state.isLoading = false;
+ },
+
+ [types.RECEIVE_UPDATE_VARIABLE_ERROR](state, error) {
+ state.isLoading = false;
+ state.error = error;
+ },
+
+ [types.TOGGLE_VALUES](state, valueState) {
+ state.valuesHidden = valueState;
+ },
+
+ [types.REQUEST_ENVIRONMENTS](state) {
+ state.isLoading = true;
+ },
+
+ [types.RECEIVE_ENVIRONMENTS_SUCCESS](state, environments) {
+ state.isLoading = false;
+ state.environments = environments;
+ state.environments.unshift(displayText.allEnvironmentsText);
+ },
+
+ [types.VARIABLE_BEING_EDITED](state, variable) {
+ state.variableBeingEdited = variable;
+ },
+
+ [types.CLEAR_MODAL](state) {
+ state.variable = {
+ variable_type: displayText.variableText,
+ key: '',
+ secret_value: '',
+ protected: false,
+ masked: false,
+ environment_scope: displayText.allEnvironmentsText,
+ };
+ },
+
+ [types.RESET_EDITING](state) {
+ state.variableBeingEdited = null;
+ state.showInputValue = false;
+ },
+};
diff --git a/app/assets/javascripts/ci_variable_list/store/state.js b/app/assets/javascripts/ci_variable_list/store/state.js
new file mode 100644
index 00000000000..5166321d6a7
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/store/state.js
@@ -0,0 +1,24 @@
+import { displayText } from '../constants';
+
+export default () => ({
+ endpoint: null,
+ projectId: null,
+ isGroup: null,
+ maskableRegex: null,
+ isLoading: false,
+ isDeleting: false,
+ variable: {
+ variable_type: displayText.variableText,
+ key: '',
+ secret_value: '',
+ protected: false,
+ masked: false,
+ environment_scope: displayText.allEnvironmentsText,
+ },
+ variables: null,
+ valuesHidden: true,
+ error: null,
+ environments: [],
+ typeOptions: [displayText.variableText, displayText.fileText],
+ variableBeingEdited: null,
+});
diff --git a/app/assets/javascripts/ci_variable_list/store/utils.js b/app/assets/javascripts/ci_variable_list/store/utils.js
new file mode 100644
index 00000000000..3cd8c85024b
--- /dev/null
+++ b/app/assets/javascripts/ci_variable_list/store/utils.js
@@ -0,0 +1,43 @@
+import { cloneDeep } from 'lodash';
+import { displayText, types } from '../constants';
+
+const variableTypeHandler = type =>
+ type === displayText.variableText ? types.variableType : types.fileType;
+
+export const prepareDataForDisplay = variables => {
+ const variablesToDisplay = [];
+ variables.forEach(variable => {
+ const variableCopy = variable;
+ if (variableCopy.variable_type === types.variableType) {
+ variableCopy.variable_type = displayText.variableText;
+ } else {
+ variableCopy.variable_type = displayText.fileText;
+ }
+ variableCopy.secret_value = variableCopy.value;
+
+ if (variableCopy.environment_scope === types.allEnvironmentsType) {
+ variableCopy.environment_scope = displayText.allEnvironmentsText;
+ }
+ variablesToDisplay.push(variableCopy);
+ });
+ return variablesToDisplay;
+};
+
+export const prepareDataForApi = (variable, destroy = false) => {
+ const variableCopy = cloneDeep(variable);
+ variableCopy.protected = variableCopy.protected.toString();
+ variableCopy.masked = variableCopy.masked.toString();
+ variableCopy.variable_type = variableTypeHandler(variableCopy.variable_type);
+ if (variableCopy.environment_scope === displayText.allEnvironmentsText) {
+ variableCopy.environment_scope = types.allEnvironmentsType;
+ }
+
+ if (destroy) {
+ // eslint-disable-next-line
+ variableCopy._destroy = destroy;
+ }
+
+ return variableCopy;
+};
+
+export const prepareEnvironments = environments => environments.map(e => e.name);
diff --git a/app/assets/javascripts/clusters/clusters_bundle.js b/app/assets/javascripts/clusters/clusters_bundle.js
index b764348eb3c..e20c87ed8a0 100644
--- a/app/assets/javascripts/clusters/clusters_bundle.js
+++ b/app/assets/javascripts/clusters/clusters_bundle.js
@@ -255,6 +255,8 @@ export default class Clusters {
eventHub.$on('setKnativeHostname', data => this.setKnativeHostname(data));
eventHub.$on('uninstallApplication', data => this.uninstallApplication(data));
eventHub.$on('setCrossplaneProviderStack', data => this.setCrossplaneProviderStack(data));
+ eventHub.$on('setIngressModSecurityEnabled', data => this.setIngressModSecurityEnabled(data));
+ eventHub.$on('resetIngressModSecurityEnabled', id => this.resetIngressModSecurityEnabled(id));
// Add event listener to all the banner close buttons
this.addBannerCloseHandler(this.unreachableContainer, 'unreachable');
this.addBannerCloseHandler(this.authenticationFailureContainer, 'authentication_failure');
@@ -268,6 +270,8 @@ export default class Clusters {
eventHub.$off('setKnativeHostname');
eventHub.$off('setCrossplaneProviderStack');
eventHub.$off('uninstallApplication');
+ eventHub.$off('setIngressModSecurityEnabled');
+ eventHub.$off('resetIngressModSecurityEnabled');
}
initPolling(method, successCallback, errorCallback) {
@@ -313,10 +317,13 @@ export default class Clusters {
this.checkForNewInstalls(prevApplicationMap, this.store.state.applications);
this.updateContainer(prevStatus, this.store.state.status, this.store.state.statusReason);
- this.toggleIngressDomainHelpText(
- prevApplicationMap[INGRESS],
- this.store.state.applications[INGRESS],
- );
+
+ if (this.ingressDomainHelpText) {
+ this.toggleIngressDomainHelpText(
+ prevApplicationMap[INGRESS],
+ this.store.state.applications[INGRESS],
+ );
+ }
}
showToken() {
@@ -513,6 +520,15 @@ export default class Clusters {
this.store.updateAppProperty(appId, 'validationError', null);
}
+ setIngressModSecurityEnabled({ id, modSecurityEnabled }) {
+ this.store.updateAppProperty(id, 'isEditingModSecurityEnabled', true);
+ this.store.updateAppProperty(id, 'modsecurity_enabled', modSecurityEnabled);
+ }
+
+ resetIngressModSecurityEnabled(id) {
+ this.store.updateAppProperty(id, 'isEditingModSecurityEnabled', false);
+ }
+
destroy() {
this.destroyed = true;
diff --git a/app/assets/javascripts/clusters/components/applications.vue b/app/assets/javascripts/clusters/components/applications.vue
index fe2ad562ad5..87d190e51c4 100644
--- a/app/assets/javascripts/clusters/components/applications.vue
+++ b/app/assets/javascripts/clusters/components/applications.vue
@@ -21,6 +21,7 @@ import KnativeDomainEditor from './knative_domain_editor.vue';
import { CLUSTER_TYPE, PROVIDER_TYPE, APPLICATION_STATUS, INGRESS } from '../constants';
import eventHub from '~/clusters/event_hub';
import CrossplaneProviderStack from './crossplane_provider_stack.vue';
+import IngressModsecuritySettings from './ingress_modsecurity_settings.vue';
export default {
components: {
@@ -29,6 +30,7 @@ export default {
GlLoadingIcon,
KnativeDomainEditor,
CrossplaneProviderStack,
+ IngressModsecuritySettings,
},
props: {
type: {
@@ -117,9 +119,6 @@ export default {
ingressInstalled() {
return this.applications.ingress.status === APPLICATION_STATUS.INSTALLED;
},
- ingressEnableModsecurity() {
- return this.applications.ingress.modsecurity_enabled;
- },
ingressExternalEndpoint() {
return this.applications.ingress.externalIp || this.applications.ingress.externalHostname;
},
@@ -129,18 +128,6 @@ export default {
crossplaneInstalled() {
return this.applications.crossplane.status === APPLICATION_STATUS.INSTALLED;
},
- ingressModSecurityDescription() {
- const escapedUrl = _.escape(this.ingressModSecurityHelpPath);
-
- return sprintf(
- s__('ClusterIntegration|Learn more about %{startLink}ModSecurity%{endLink}'),
- {
- startLink: `<a href="${escapedUrl}" target="_blank" rel="noopener noreferrer">`,
- endLink: '</a>',
- },
- false,
- );
- },
ingressDescription() {
return sprintf(
_.escape(
@@ -241,6 +228,9 @@ Crossplane runs inside your Kubernetes cluster and supports secure connectivity
}
return null;
},
+ ingress() {
+ return this.applications.ingress;
+ },
},
created() {
this.helmInstallIllustration = helmInstallIllustration;
@@ -270,7 +260,6 @@ Crossplane runs inside your Kubernetes cluster and supports secure connectivity
<template>
<section id="cluster-applications">
- <h4>{{ s__('ClusterIntegration|Applications') }}</h4>
<p class="append-bottom-0">
{{
s__(`ClusterIntegration|Choose which applications to install on your Kubernetes cluster.
@@ -329,6 +318,7 @@ Crossplane runs inside your Kubernetes cluster and supports secure connectivity
:uninstall-successful="applications.ingress.uninstallSuccessful"
:uninstall-failed="applications.ingress.uninstallFailed"
:disabled="!helmInstalled"
+ :updateable="false"
title-link="https://kubernetes.io/docs/concepts/services-networking/ingress/"
>
<div slot="description">
@@ -340,25 +330,10 @@ Crossplane runs inside your Kubernetes cluster and supports secure connectivity
}}
</p>
- <template>
- <div class="form-group">
- <div class="form-check form-check-inline">
- <input
- v-model="applications.ingress.modsecurity_enabled"
- :disabled="ingressInstalled"
- type="checkbox"
- autocomplete="off"
- class="form-check-input"
- />
- <label class="form-check-label label-bold" for="ingress-enable-modsecurity">
- {{ s__('ClusterIntegration|Enable Web Application Firewall') }}
- </label>
- </div>
- <p class="form-text text-muted">
- <strong v-html="ingressModSecurityDescription"></strong>
- </p>
- </div>
- </template>
+ <ingress-modsecurity-settings
+ :ingress="ingress"
+ :ingress-mod-security-help-path="ingressModSecurityHelpPath"
+ />
<template v-if="ingressInstalled">
<div class="form-group">
diff --git a/app/assets/javascripts/clusters/components/ingress_modsecurity_settings.vue b/app/assets/javascripts/clusters/components/ingress_modsecurity_settings.vue
new file mode 100644
index 00000000000..98a783aab6e
--- /dev/null
+++ b/app/assets/javascripts/clusters/components/ingress_modsecurity_settings.vue
@@ -0,0 +1,164 @@
+<script>
+import _ from 'lodash';
+import { __ } from '../../locale';
+import { APPLICATION_STATUS, INGRESS } from '~/clusters/constants';
+import { GlAlert, GlSprintf, GlLink, GlToggle, GlButton } from '@gitlab/ui';
+import eventHub from '~/clusters/event_hub';
+import modSecurityLogo from 'images/cluster_app_logos/modsecurity.png';
+
+const { UPDATING, UNINSTALLING, INSTALLING, INSTALLED, UPDATED } = APPLICATION_STATUS;
+
+export default {
+ title: 'ModSecurity Web Application Firewall',
+ modsecurityUrl: 'https://modsecurity.org/about.html',
+ components: {
+ GlAlert,
+ GlSprintf,
+ GlLink,
+ GlToggle,
+ GlButton,
+ },
+ props: {
+ ingress: {
+ type: Object,
+ required: true,
+ },
+ ingressModSecurityHelpPath: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ },
+ data: () => ({
+ modSecurityLogo,
+ hasValueChanged: false,
+ }),
+ computed: {
+ modSecurityEnabled: {
+ get() {
+ return this.ingress.modsecurity_enabled;
+ },
+ set(isEnabled) {
+ eventHub.$emit('setIngressModSecurityEnabled', {
+ id: INGRESS,
+ modSecurityEnabled: isEnabled,
+ });
+ if (this.hasValueChanged) {
+ this.resetStatus();
+ } else {
+ this.hasValueChanged = true;
+ }
+ },
+ },
+ ingressModSecurityDescription() {
+ return _.escape(this.ingressModSecurityHelpPath);
+ },
+ saving() {
+ return [UPDATING].includes(this.ingress.status);
+ },
+ saveButtonDisabled() {
+ return [UNINSTALLING, UPDATING, INSTALLING].includes(this.ingress.status);
+ },
+ saveButtonLabel() {
+ return this.saving ? __('Saving') : __('Save changes');
+ },
+ /**
+ * Returns true either when:
+ * - The application is getting updated.
+ * - The user has changed some of the settings for an application which is
+ * neither getting installed nor updated.
+ */
+ showButtons() {
+ return (
+ this.saving || (this.hasValueChanged && [INSTALLED, UPDATED].includes(this.ingress.status))
+ );
+ },
+ },
+ methods: {
+ updateApplication() {
+ eventHub.$emit('updateApplication', {
+ id: INGRESS,
+ params: { modsecurity_enabled: this.ingress.modsecurity_enabled },
+ });
+ this.resetStatus();
+ },
+ resetStatus() {
+ eventHub.$emit('resetIngressModSecurityEnabled', INGRESS);
+ this.hasValueChanged = false;
+ },
+ },
+};
+</script>
+
+<template>
+ <div>
+ <gl-alert
+ v-if="ingress.updateFailed"
+ class="mb-3"
+ variant="danger"
+ :dismissible="false"
+ @dismiss="alert = null"
+ >
+ {{
+ s__(
+ 'ClusterIntegration|Something went wrong while trying to save your settings. Please try again.',
+ )
+ }}
+ </gl-alert>
+ <div class="gl-responsive-table-row-layout" role="row">
+ <div class="table-section append-right-8 section-align-top" role="gridcell">
+ <img
+ :src="modSecurityLogo"
+ :alt="`${$options.title} logo`"
+ class="cluster-application-logo avatar s40"
+ />
+ </div>
+ <div class="table-section section-wrap" role="gridcell">
+ <strong>
+ <gl-link :href="$options.modsecurityUrl" target="_blank">{{ $options.title }} </gl-link>
+ </strong>
+ <div class="form-group">
+ <p class="form-text text-muted">
+ <strong>
+ <gl-sprintf
+ :message="
+ s__(
+ 'ClusterIntegration|Real-time web application monitoring, logging and access control. %{linkStart}More information%{linkEnd}',
+ )
+ "
+ >
+ <template #link="{ content }">
+ <gl-link :href="ingressModSecurityDescription" target="_blank"
+ >{{ content }}
+ </gl-link>
+ </template>
+ </gl-sprintf>
+ </strong>
+ </p>
+ <div class="form-check form-check-inline mt-3">
+ <gl-toggle
+ v-model="modSecurityEnabled"
+ :label-on="__('Enabled')"
+ :label-off="__('Disabled')"
+ :disabled="saveButtonDisabled"
+ label-position="right"
+ />
+ </div>
+ <div v-if="showButtons">
+ <gl-button
+ class="btn-success inline mr-1"
+ :loading="saving"
+ :disabled="saveButtonDisabled"
+ @click="updateApplication"
+ >
+ {{ saveButtonLabel }}
+ </gl-button>
+ <gl-button :disabled="saveButtonDisabled" @click="resetStatus">
+ {{ __('Cancel') }}
+ </gl-button>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/clusters/stores/clusters_store.js b/app/assets/javascripts/clusters/stores/clusters_store.js
index 939c396e1b9..00ed939e3b4 100644
--- a/app/assets/javascripts/clusters/stores/clusters_store.js
+++ b/app/assets/javascripts/clusters/stores/clusters_store.js
@@ -12,6 +12,7 @@ import {
INSTALL_EVENT,
UPDATE_EVENT,
UNINSTALL_EVENT,
+ ELASTIC_STACK,
} from '../constants';
import transitionApplicationState from '../services/application_state_machine';
@@ -54,6 +55,8 @@ export default class ClusterStore {
modsecurity_enabled: false,
externalIp: null,
externalHostname: null,
+ isEditingModSecurityEnabled: false,
+ updateFailed: false,
},
cert_manager: {
...applicationInitialState,
@@ -208,8 +211,9 @@ export default class ClusterStore {
if (appId === INGRESS) {
this.state.applications.ingress.externalIp = serverAppEntry.external_ip;
this.state.applications.ingress.externalHostname = serverAppEntry.external_hostname;
- this.state.applications.ingress.modsecurity_enabled =
- serverAppEntry.modsecurity_enabled || this.state.applications.ingress.modsecurity_enabled;
+ if (!this.state.applications.ingress.isEditingModSecurityEnabled) {
+ this.state.applications.ingress.modsecurity_enabled = serverAppEntry.modsecurity_enabled;
+ }
} else if (appId === CERT_MANAGER) {
this.state.applications.cert_manager.email =
this.state.applications.cert_manager.email || serverAppEntry.email;
@@ -234,6 +238,9 @@ export default class ClusterStore {
} else if (appId === RUNNER) {
this.state.applications.runner.version = version;
this.state.applications.runner.updateAvailable = updateAvailable;
+ } else if (appId === ELASTIC_STACK) {
+ this.state.applications.elastic_stack.version = version;
+ this.state.applications.elastic_stack.updateAvailable = updateAvailable;
}
});
}
diff --git a/app/assets/javascripts/clusters_list/components/clusters.vue b/app/assets/javascripts/clusters_list/components/clusters.vue
new file mode 100644
index 00000000000..46dacf30f39
--- /dev/null
+++ b/app/assets/javascripts/clusters_list/components/clusters.vue
@@ -0,0 +1,99 @@
+<script>
+import { mapState, mapActions } from 'vuex';
+import { GlTable, GlLoadingIcon, GlBadge } from '@gitlab/ui';
+import tooltip from '~/vue_shared/directives/tooltip';
+import { CLUSTER_TYPES, STATUSES } from '../constants';
+import { __, sprintf } from '~/locale';
+
+export default {
+ components: {
+ GlTable,
+ GlLoadingIcon,
+ GlBadge,
+ },
+ directives: {
+ tooltip,
+ },
+ fields: [
+ {
+ key: 'name',
+ label: __('Kubernetes cluster'),
+ },
+ {
+ key: 'environmentScope',
+ label: __('Environment scope'),
+ },
+ {
+ key: 'size',
+ label: __('Size'),
+ },
+ {
+ key: 'cpu',
+ label: __('Total cores (vCPUs)'),
+ },
+ {
+ key: 'memory',
+ label: __('Total memory (GB)'),
+ },
+ {
+ key: 'clusterType',
+ label: __('Cluster level'),
+ formatter: value => CLUSTER_TYPES[value],
+ },
+ ],
+ computed: {
+ ...mapState(['clusters', 'loading']),
+ },
+ mounted() {
+ // TODO - uncomment this once integrated with BE
+ // this.fetchClusters();
+ },
+ methods: {
+ ...mapActions(['fetchClusters']),
+ statusClass(status) {
+ return STATUSES[status].className;
+ },
+ statusTitle(status) {
+ const { title } = STATUSES[status];
+ return sprintf(__('Status: %{title}'), { title }, false);
+ },
+ },
+};
+</script>
+
+<template>
+ <gl-loading-icon v-if="loading" size="md" class="mt-3" />
+ <gl-table
+ v-else
+ :items="clusters"
+ :fields="$options.fields"
+ stacked="md"
+ variant="light"
+ class="qa-clusters-table"
+ >
+ <template #cell(name)="{ item }">
+ <div class="d-flex flex-row-reverse flex-md-row js-status">
+ {{ item.name }}
+ <gl-loading-icon
+ v-if="item.status === 'deleting'"
+ v-tooltip
+ :title="statusTitle(item.status)"
+ size="sm"
+ class="mr-2 ml-md-2"
+ />
+ <div
+ v-else
+ v-tooltip
+ class="cluster-status-indicator rounded-circle align-self-center gl-w-8 gl-h-8 mr-2 ml-md-2"
+ :class="statusClass(item.status)"
+ :title="statusTitle(item.status)"
+ ></div>
+ </div>
+ </template>
+ <template #cell(clusterType)="{value}">
+ <gl-badge variant="light">
+ {{ value }}
+ </gl-badge>
+ </template>
+ </gl-table>
+</template>
diff --git a/app/assets/javascripts/clusters_list/constants.js b/app/assets/javascripts/clusters_list/constants.js
new file mode 100644
index 00000000000..9428f08176c
--- /dev/null
+++ b/app/assets/javascripts/clusters_list/constants.js
@@ -0,0 +1,15 @@
+import { __ } from '~/locale';
+
+export const CLUSTER_TYPES = {
+ project_type: __('Project'),
+ group_type: __('Group'),
+ instance_type: __('Instance'),
+};
+
+export const STATUSES = {
+ disabled: { className: 'disabled', title: __('Disabled') },
+ connected: { className: 'bg-success', title: __('Connected') },
+ unreachable: { className: 'bg-danger', title: __('Unreachable') },
+ authentication_failure: { className: 'bg-warning', title: __('Authentication Failure') },
+ deleting: { title: __('Deleting') },
+};
diff --git a/app/assets/javascripts/clusters_list/index.js b/app/assets/javascripts/clusters_list/index.js
new file mode 100644
index 00000000000..67d0a33030b
--- /dev/null
+++ b/app/assets/javascripts/clusters_list/index.js
@@ -0,0 +1,22 @@
+import Vue from 'vue';
+import Clusters from './components/clusters.vue';
+import { createStore } from './store';
+
+export default () => {
+ const entryPoint = document.querySelector('#js-clusters-list-app');
+
+ if (!entryPoint) {
+ return;
+ }
+
+ const { endpoint } = entryPoint.dataset;
+
+ // eslint-disable-next-line no-new
+ new Vue({
+ el: '#js-clusters-list-app',
+ store: createStore({ endpoint }),
+ render(createElement) {
+ return createElement(Clusters);
+ },
+ });
+};
diff --git a/app/assets/javascripts/clusters_list/store/actions.js b/app/assets/javascripts/clusters_list/store/actions.js
new file mode 100644
index 00000000000..79bc9932438
--- /dev/null
+++ b/app/assets/javascripts/clusters_list/store/actions.js
@@ -0,0 +1,37 @@
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import Poll from '~/lib/utils/poll';
+import axios from '~/lib/utils/axios_utils';
+import Visibility from 'visibilityjs';
+import flash from '~/flash';
+import { __ } from '~/locale';
+import * as types from './mutation_types';
+
+export const fetchClusters = ({ state, commit }) => {
+ const poll = new Poll({
+ resource: {
+ fetchClusters: endpoint => axios.get(endpoint),
+ },
+ data: state.endpoint,
+ method: 'fetchClusters',
+ successCallback: ({ data }) => {
+ commit(types.SET_CLUSTERS_DATA, convertObjectPropsToCamelCase(data, { deep: true }));
+ commit(types.SET_LOADING_STATE, false);
+ },
+ errorCallback: () => flash(__('An error occurred while loading clusters')),
+ });
+
+ if (!Visibility.hidden()) {
+ poll.makeRequest();
+ }
+
+ Visibility.change(() => {
+ if (!Visibility.hidden()) {
+ poll.restart();
+ } else {
+ poll.stop();
+ }
+ });
+};
+
+// prevent babel-plugin-rewire from generating an invalid default during karma tests
+export default () => {};
diff --git a/app/assets/javascripts/clusters_list/store/index.js b/app/assets/javascripts/clusters_list/store/index.js
new file mode 100644
index 00000000000..c472d2f354c
--- /dev/null
+++ b/app/assets/javascripts/clusters_list/store/index.js
@@ -0,0 +1,16 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import state from './state';
+import mutations from './mutations';
+import * as actions from './actions';
+
+Vue.use(Vuex);
+
+export const createStore = initialState =>
+ new Vuex.Store({
+ actions,
+ mutations,
+ state: state(initialState),
+ });
+
+export default createStore;
diff --git a/app/assets/javascripts/clusters_list/store/mutation_types.js b/app/assets/javascripts/clusters_list/store/mutation_types.js
new file mode 100644
index 00000000000..f056f3ab7d9
--- /dev/null
+++ b/app/assets/javascripts/clusters_list/store/mutation_types.js
@@ -0,0 +1,2 @@
+export const SET_CLUSTERS_DATA = 'SET_CLUSTERS_DATA';
+export const SET_LOADING_STATE = 'SET_LOADING_STATE';
diff --git a/app/assets/javascripts/clusters_list/store/mutations.js b/app/assets/javascripts/clusters_list/store/mutations.js
new file mode 100644
index 00000000000..ffd3c4601bf
--- /dev/null
+++ b/app/assets/javascripts/clusters_list/store/mutations.js
@@ -0,0 +1,12 @@
+import * as types from './mutation_types';
+
+export default {
+ [types.SET_LOADING_STATE](state, value) {
+ state.loading = value;
+ },
+ [types.SET_CLUSTERS_DATA](state, clusters) {
+ Object.assign(state, {
+ clusters,
+ });
+ },
+};
diff --git a/app/assets/javascripts/clusters_list/store/state.js b/app/assets/javascripts/clusters_list/store/state.js
new file mode 100644
index 00000000000..ed032ed8435
--- /dev/null
+++ b/app/assets/javascripts/clusters_list/store/state.js
@@ -0,0 +1,5 @@
+export default (initialState = {}) => ({
+ endpoint: initialState.endpoint,
+ loading: false, // TODO - set this to true once integrated with BE
+ clusters: [],
+});
diff --git a/app/assets/javascripts/code_navigation/store/actions.js b/app/assets/javascripts/code_navigation/store/actions.js
index 2c52074e362..5220b1215b8 100644
--- a/app/assets/javascripts/code_navigation/store/actions.js
+++ b/app/assets/javascripts/code_navigation/store/actions.js
@@ -13,9 +13,10 @@ export default {
commit(types.REQUEST_DATA);
api
- .lsifData(state.projectPath, state.commitId, state.blobPath)
+ .lsifData(state.projectPath, state.commitId, [state.blobPath])
.then(({ data }) => {
- const normalizedData = data.reduce((acc, d) => {
+ const dataForPath = data[state.blobPath];
+ const normalizedData = dataForPath.reduce((acc, d) => {
if (d.hover) {
acc[`${d.start_line}:${d.start_char}`] = d;
addInteractionClass(d);
diff --git a/app/assets/javascripts/commons/polyfills.js b/app/assets/javascripts/commons/polyfills.js
index 5e04b0573d2..fdeb64a7644 100644
--- a/app/assets/javascripts/commons/polyfills.js
+++ b/app/assets/javascripts/commons/polyfills.js
@@ -1,5 +1,24 @@
// Browser polyfills
+
+/**
+ * Polyfill: fetch
+ * @what https://fetch.spec.whatwg.org/
+ * @why Because Apollo GraphQL client relies on fetch
+ * @browsers Internet Explorer 11
+ * @see https://caniuse.com/#feat=fetch
+ */
+import 'unfetch/polyfill/index';
+
+/**
+ * Polyfill: FormData APIs
+ * @what delete(), get(), getAll(), has(), set(), entries(), keys(), values(),
+ * and support for for...of
+ * @why Because Apollo GraphQL client relies on fetch
+ * @browsers Internet Explorer 11, Edge < 18
+ * @see https://caniuse.com/#feat=mdn-api_formdata and subfeatures
+ */
import 'formdata-polyfill';
+
import './polyfills/custom_event';
import './polyfills/element';
import './polyfills/event';
diff --git a/app/assets/javascripts/commons/polyfills/custom_event.js b/app/assets/javascripts/commons/polyfills/custom_event.js
index db51ade61ae..6b14eff6f05 100644
--- a/app/assets/javascripts/commons/polyfills/custom_event.js
+++ b/app/assets/javascripts/commons/polyfills/custom_event.js
@@ -1,3 +1,10 @@
+/**
+ * Polyfill: CustomEvent constructor
+ * @what new CustomEvent()
+ * @why Certain features, e.g. notes utilize this
+ * @browsers Internet Explorer 11
+ * @see https://caniuse.com/#feat=customevent
+ */
if (typeof window.CustomEvent !== 'function') {
window.CustomEvent = function CustomEvent(event, params) {
const evt = document.createEvent('CustomEvent');
diff --git a/app/assets/javascripts/commons/polyfills/element.js b/app/assets/javascripts/commons/polyfills/element.js
index dde5e8f54f9..b13ceccf511 100644
--- a/app/assets/javascripts/commons/polyfills/element.js
+++ b/app/assets/javascripts/commons/polyfills/element.js
@@ -1,6 +1,19 @@
-// polyfill Element.classList and DOMTokenList with classList.js
+/**
+ * Polyfill
+ * @what Element.classList
+ * @why In order to align browser features
+ * @browsers Internet Explorer 11
+ * @see https://caniuse.com/#feat=classlist
+ */
import 'classlist-polyfill';
+/**
+ * Polyfill
+ * @what Element.closest
+ * @why In order to align browser features
+ * @browsers Internet Explorer 11
+ * @see https://caniuse.com/#feat=element-closest
+ */
Element.prototype.closest =
Element.prototype.closest ||
function closest(selector, selectedElement = this) {
@@ -10,6 +23,13 @@ Element.prototype.closest =
: Element.prototype.closest(selector, selectedElement.parentElement);
};
+/**
+ * Polyfill
+ * @what Element.matches
+ * @why In order to align browser features
+ * @browsers Internet Explorer 11
+ * @see https://caniuse.com/#feat=mdn-api_element_matches
+ */
Element.prototype.matches =
Element.prototype.matches ||
Element.prototype.matchesSelector ||
@@ -26,7 +46,15 @@ Element.prototype.matches =
return i > -1;
};
-// From the polyfill on MDN, https://developer.mozilla.org/en-US/docs/Web/API/ChildNode/remove#Polyfill
+/**
+ * Polyfill
+ * @what ChildNode.remove, Element.remove, CharacterData.remove, DocumentType.remove
+ * @why In order to align browser features
+ * @browsers Internet Explorer 11
+ * @see https://caniuse.com/#feat=childnode-remove
+ *
+ * From the polyfill on MDN, https://developer.mozilla.org/en-US/docs/Web/API/ChildNode/remove#Polyfill
+ */
(arr => {
arr.forEach(item => {
if (Object.prototype.hasOwnProperty.call(item, 'remove')) {
diff --git a/app/assets/javascripts/commons/polyfills/event.js b/app/assets/javascripts/commons/polyfills/event.js
index ff5b9a1982f..543dd5f9a93 100644
--- a/app/assets/javascripts/commons/polyfills/event.js
+++ b/app/assets/javascripts/commons/polyfills/event.js
@@ -1,6 +1,10 @@
/**
- * Polyfill for IE11 support.
- * new Event() is not supported by IE11.
+ * Polyfill: Event constructor
+ * @what new Event()
+ * @why To align browser support
+ * @browsers Internet Explorer 11
+ * @see https://caniuse.com/#feat=mdn-api_event_event
+ *
* Although `initEvent` is deprecated for modern browsers it is the one supported by IE
*/
if (typeof window.Event !== 'function') {
diff --git a/app/assets/javascripts/commons/polyfills/nodelist.js b/app/assets/javascripts/commons/polyfills/nodelist.js
index 3772c94b900..3a9111e64f8 100644
--- a/app/assets/javascripts/commons/polyfills/nodelist.js
+++ b/app/assets/javascripts/commons/polyfills/nodelist.js
@@ -1,3 +1,10 @@
+/**
+ * Polyfill
+ * @what NodeList.forEach
+ * @why To align browser support
+ * @browsers Internet Explorer 11
+ * @see https://caniuse.com/#feat=mdn-api_nodelist_foreach
+ */
if (window.NodeList && !NodeList.prototype.forEach) {
NodeList.prototype.forEach = function forEach(callback, thisArg = window) {
for (let i = 0; i < this.length; i += 1) {
diff --git a/app/assets/javascripts/commons/polyfills/request_idle_callback.js b/app/assets/javascripts/commons/polyfills/request_idle_callback.js
index 2356569d06e..51dc82e593a 100644
--- a/app/assets/javascripts/commons/polyfills/request_idle_callback.js
+++ b/app/assets/javascripts/commons/polyfills/request_idle_callback.js
@@ -1,3 +1,10 @@
+/**
+ * Polyfill
+ * @what requestIdleCallback
+ * @why To align browser features
+ * @browsers Safari (all versions), Internet Explorer 11
+ * @see https://caniuse.com/#feat=requestidlecallback
+ */
window.requestIdleCallback =
window.requestIdleCallback ||
function requestShim(cb) {
diff --git a/app/assets/javascripts/commons/polyfills/svg.js b/app/assets/javascripts/commons/polyfills/svg.js
index 8648a568f6f..92a8b03fbb4 100644
--- a/app/assets/javascripts/commons/polyfills/svg.js
+++ b/app/assets/javascripts/commons/polyfills/svg.js
@@ -1,5 +1,11 @@
+/**
+ * polyfill support for external SVG file references via <use xlink:href>
+ * @what polyfill support for external SVG file references via <use xlink:href>
+ * @why This is used in our GitLab SVG icon library
+ * @browsers Internet Explorer 11
+ * @see https://caniuse.com/#feat=mdn-svg_elements_use_external_uri
+ * @see https//css-tricks.com/svg-use-external-source/
+ */
import svg4everybody from 'svg4everybody';
-// polyfill support for external SVG file references via <use xlink:href>
-// @see https://css-tricks.com/svg-use-external-source/
svg4everybody();
diff --git a/app/assets/javascripts/confirm_modal.js b/app/assets/javascripts/confirm_modal.js
new file mode 100644
index 00000000000..4b4fdf03873
--- /dev/null
+++ b/app/assets/javascripts/confirm_modal.js
@@ -0,0 +1,14 @@
+import Vue from 'vue';
+import ConfirmModal from '~/vue_shared/components/confirm_modal.vue';
+
+const mountConfirmModal = () => {
+ return new Vue({
+ render(h) {
+ return h(ConfirmModal, {
+ props: { selector: '.js-confirm-modal-button' },
+ });
+ },
+ }).$mount();
+};
+
+export default () => mountConfirmModal();
diff --git a/app/assets/javascripts/contributors/components/contributors.vue b/app/assets/javascripts/contributors/components/contributors.vue
index 8dbf0a68c43..19516a13d15 100644
--- a/app/assets/javascripts/contributors/components/contributors.vue
+++ b/app/assets/javascripts/contributors/components/contributors.vue
@@ -66,12 +66,12 @@ export default {
individualChartsData() {
const maxNumberOfIndividualContributorsCharts = 100;
- return Object.keys(this.parsedData.byAuthor)
- .map(name => {
- const author = this.parsedData.byAuthor[name];
+ return Object.keys(this.parsedData.byAuthorEmail)
+ .map(email => {
+ const author = this.parsedData.byAuthorEmail[email];
return {
- name,
- email: author.email,
+ name: author.name,
+ email,
commits: author.commits,
dates: [
{
diff --git a/app/assets/javascripts/contributors/stores/getters.js b/app/assets/javascripts/contributors/stores/getters.js
index 9e02e3ed9e7..9b0def9b3ca 100644
--- a/app/assets/javascripts/contributors/stores/getters.js
+++ b/app/assets/javascripts/contributors/stores/getters.js
@@ -1,17 +1,17 @@
export const showChart = state => Boolean(!state.loading && state.chartData);
export const parsedData = state => {
- const byAuthor = {};
+ const byAuthorEmail = {};
const total = {};
state.chartData.forEach(({ date, author_name, author_email }) => {
total[date] = total[date] ? total[date] + 1 : 1;
- const authorData = byAuthor[author_name];
+ const authorData = byAuthorEmail[author_email];
if (!authorData) {
- byAuthor[author_name] = {
- email: author_email.toLowerCase(),
+ byAuthorEmail[author_email] = {
+ name: author_name,
commits: 1,
dates: {
[date]: 1,
@@ -25,7 +25,7 @@ export const parsedData = state => {
return {
total,
- byAuthor,
+ byAuthorEmail,
};
};
diff --git a/app/assets/javascripts/create_cluster/components/cluster_form_dropdown.vue b/app/assets/javascripts/create_cluster/components/cluster_form_dropdown.vue
index 2f7fcfcb755..e9d484bdd94 100644
--- a/app/assets/javascripts/create_cluster/components/cluster_form_dropdown.vue
+++ b/app/assets/javascripts/create_cluster/components/cluster_form_dropdown.vue
@@ -1,11 +1,12 @@
<script>
+import { isNil } from 'lodash';
import $ from 'jquery';
import { GlIcon } from '@gitlab/ui';
import DropdownSearchInput from '~/vue_shared/components/dropdown/dropdown_search_input.vue';
import DropdownHiddenInput from '~/vue_shared/components/dropdown/dropdown_hidden_input.vue';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
-const toArray = value => [].concat(value);
+const toArray = value => (isNil(value) ? [] : [].concat(value));
const itemsProp = (items, prop) => items.map(item => item[prop]);
const defaultSearchFn = (searchQuery, labelProp) => item =>
item[labelProp].toLowerCase().indexOf(searchQuery) > -1;
diff --git a/app/assets/javascripts/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue b/app/assets/javascripts/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue
index 59c5586edcd..74b5a62f754 100644
--- a/app/assets/javascripts/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue
+++ b/app/assets/javascripts/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue
@@ -1,6 +1,6 @@
<script>
-import { createNamespacedHelpers, mapState, mapActions } from 'vuex';
-import _ from 'underscore';
+import { createNamespacedHelpers, mapState, mapActions, mapGetters } from 'vuex';
+import { escape as esc } from 'lodash';
import { GlFormInput, GlFormCheckbox } from '@gitlab/ui';
import { sprintf, s__ } from '~/locale';
import ClusterFormDropdown from '~/create_cluster/components/cluster_form_dropdown.vue';
@@ -61,6 +61,7 @@ export default {
'gitlabManagedCluster',
'isCreatingCluster',
]),
+ ...mapGetters(['subnetValid']),
...mapRolesState({
roles: 'items',
isLoadingRoles: 'isLoadingItems',
@@ -119,7 +120,7 @@ export default {
!this.selectedRegion ||
!this.selectedKeyPair ||
!this.selectedVpc ||
- !this.selectedSubnet ||
+ !this.subnetValid ||
!this.selectedRole ||
!this.selectedSecurityGroup ||
!this.selectedInstanceType ||
@@ -127,13 +128,16 @@ export default {
this.isCreatingCluster
);
},
+ displaySubnetError() {
+ return Boolean(this.loadingSubnetsError) || this.selectedSubnet?.length === 1;
+ },
createClusterButtonLabel() {
return this.isCreatingCluster
? s__('ClusterIntegration|Creating Kubernetes cluster')
: s__('ClusterIntegration|Create Kubernetes cluster');
},
kubernetesIntegrationHelpText() {
- const escapedUrl = _.escape(this.kubernetesIntegrationHelpPath);
+ const escapedUrl = esc(this.kubernetesIntegrationHelpPath);
return sprintf(
s__(
@@ -216,6 +220,13 @@ export default {
false,
);
},
+ subnetValidationErrorText() {
+ if (this.loadingSubnetsError) {
+ return s__('ClusterIntegration|Could not load subnets for the selected VPC');
+ }
+
+ return s__('ClusterIntegration|You should select at least two subnets');
+ },
securityGroupDropdownHelpText() {
return sprintf(
s__(
@@ -245,7 +256,7 @@ export default {
);
},
gitlabManagedHelpText() {
- const escapedUrl = _.escape(this.gitlabManagedClusterHelpPath);
+ const escapedUrl = esc(this.gitlabManagedClusterHelpPath);
return sprintf(
s__(
@@ -289,14 +300,14 @@ export default {
this.setRegion({ region });
this.setVpc({ vpc: null });
this.setKeyPair({ keyPair: null });
- this.setSubnet({ subnet: null });
+ this.setSubnet({ subnet: [] });
this.setSecurityGroup({ securityGroup: null });
this.fetchVpcs({ region });
this.fetchKeyPairs({ region });
},
setVpcAndFetchSubnets(vpc) {
this.setVpc({ vpc });
- this.setSubnet({ subnet: null });
+ this.setSubnet({ subnet: [] });
this.setSecurityGroup({ securityGroup: null });
this.fetchSubnets({ vpc, region: this.selectedRegion });
this.fetchSecurityGroups({ vpc, region: this.selectedRegion });
@@ -436,8 +447,8 @@ export default {
:placeholder="s__('ClusterIntergation|Select a subnet')"
:search-field-placeholder="s__('ClusterIntegration|Search subnets')"
:empty-text="s__('ClusterIntegration|No subnet found')"
- :has-errors="Boolean(loadingSubnetsError)"
- :error-message="s__('ClusterIntegration|Could not load subnets for the selected VPC')"
+ :has-errors="displaySubnetError"
+ :error-message="subnetValidationErrorText"
@input="setSubnet({ subnet: $event })"
/>
<p class="form-text text-muted" v-html="subnetDropdownHelpText"></p>
diff --git a/app/assets/javascripts/create_cluster/eks_cluster/components/service_credentials_form.vue b/app/assets/javascripts/create_cluster/eks_cluster/components/service_credentials_form.vue
index 0cfe47dafaf..47cc4e4ce67 100644
--- a/app/assets/javascripts/create_cluster/eks_cluster/components/service_credentials_form.vue
+++ b/app/assets/javascripts/create_cluster/eks_cluster/components/service_credentials_form.vue
@@ -1,6 +1,6 @@
<script>
import { GlFormInput } from '@gitlab/ui';
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { mapState, mapActions } from 'vuex';
import { sprintf, s__, __ } from '~/locale';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
@@ -42,7 +42,7 @@ export default {
: s__('ClusterIntegration|Authenticate with AWS');
},
accountAndExternalIdsHelpText() {
- const escapedUrl = _.escape(this.accountAndExternalIdsHelpPath);
+ const escapedUrl = esc(this.accountAndExternalIdsHelpPath);
return sprintf(
s__(
@@ -59,7 +59,7 @@ export default {
);
},
provisionRoleArnHelpText() {
- const escapedUrl = _.escape(this.createRoleArnHelpPath);
+ const escapedUrl = esc(this.createRoleArnHelpPath);
return sprintf(
s__(
diff --git a/app/assets/javascripts/create_cluster/eks_cluster/store/getters.js b/app/assets/javascripts/create_cluster/eks_cluster/store/getters.js
index e69de29bb2d..bbe4930c191 100644
--- a/app/assets/javascripts/create_cluster/eks_cluster/store/getters.js
+++ b/app/assets/javascripts/create_cluster/eks_cluster/store/getters.js
@@ -0,0 +1,3 @@
+// eslint-disable-next-line import/prefer-default-export
+export const subnetValid = ({ selectedSubnet }) =>
+ Array.isArray(selectedSubnet) && selectedSubnet.length >= 2;
diff --git a/app/assets/javascripts/create_cluster/eks_cluster/store/state.js b/app/assets/javascripts/create_cluster/eks_cluster/store/state.js
index 20434dcce98..d1337e7ea4a 100644
--- a/app/assets/javascripts/create_cluster/eks_cluster/store/state.js
+++ b/app/assets/javascripts/create_cluster/eks_cluster/store/state.js
@@ -21,7 +21,7 @@ export default () => ({
selectedRole: '',
selectedKeyPair: '',
selectedVpc: '',
- selectedSubnet: '',
+ selectedSubnet: [],
selectedSecurityGroup: '',
selectedInstanceType: 'm5.large',
nodeCount: '3',
diff --git a/app/assets/javascripts/create_cluster/gke_cluster/components/gke_dropdown_mixin.js b/app/assets/javascripts/create_cluster/gke_cluster/components/gke_dropdown_mixin.js
index 43fd0cac3be..3b91ce63744 100644
--- a/app/assets/javascripts/create_cluster/gke_cluster/components/gke_dropdown_mixin.js
+++ b/app/assets/javascripts/create_cluster/gke_cluster/components/gke_dropdown_mixin.js
@@ -1,4 +1,3 @@
-import _ from 'underscore';
import { GlLoadingIcon } from '@gitlab/ui';
import DropdownSearchInput from '~/vue_shared/components/dropdown/dropdown_search_input.vue';
import DropdownHiddenInput from '~/vue_shared/components/dropdown/dropdown_hidden_input.vue';
@@ -49,7 +48,7 @@ export default {
methods: {
fetchSuccessHandler() {
if (this.defaultValue) {
- const itemToSelect = _.find(this.items, item => item.name === this.defaultValue);
+ const itemToSelect = this.items.find(item => item.name === this.defaultValue);
if (itemToSelect) {
this.setItem(itemToSelect.name);
diff --git a/app/assets/javascripts/create_cluster/gke_cluster/components/gke_project_id_dropdown.vue b/app/assets/javascripts/create_cluster/gke_cluster/components/gke_project_id_dropdown.vue
index 6815d3629e3..6d8e6bbac11 100644
--- a/app/assets/javascripts/create_cluster/gke_cluster/components/gke_project_id_dropdown.vue
+++ b/app/assets/javascripts/create_cluster/gke_cluster/components/gke_project_id_dropdown.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { mapState, mapGetters, mapActions } from 'vuex';
import { s__, sprintf } from '~/locale';
@@ -65,7 +65,7 @@ export default {
s__(message),
{
docsLinkEnd: '&nbsp;<i class="fa fa-external-link" aria-hidden="true"></i></a>',
- docsLinkStart: `<a href="${_.escape(
+ docsLinkStart: `<a href="${esc(
this.docsUrl,
)}" target="_blank" rel="noopener noreferrer">`,
},
@@ -119,7 +119,7 @@ export default {
...mapActions({ setItem: 'setProject' }),
fetchSuccessHandler() {
if (this.defaultValue) {
- const projectToSelect = _.find(this.items, item => item.projectId === this.defaultValue);
+ const projectToSelect = this.items.find(item => item.projectId === this.defaultValue);
if (projectToSelect) {
this.setItem(projectToSelect);
diff --git a/app/assets/javascripts/create_merge_request_dropdown.js b/app/assets/javascripts/create_merge_request_dropdown.js
index d9805e5e76a..229612f5e9d 100644
--- a/app/assets/javascripts/create_merge_request_dropdown.js
+++ b/app/assets/javascripts/create_merge_request_dropdown.js
@@ -42,7 +42,7 @@ export default class CreateMergeRequestDropdown {
this.refInput = this.wrapperEl.querySelector('.js-ref');
this.refMessage = this.wrapperEl.querySelector('.js-ref-message');
this.unavailableButton = this.wrapperEl.querySelector('.unavailable');
- this.unavailableButtonArrow = this.unavailableButton.querySelector('.fa');
+ this.unavailableButtonSpinner = this.unavailableButton.querySelector('.spinner');
this.unavailableButtonText = this.unavailableButton.querySelector('.text');
this.branchCreated = false;
@@ -417,14 +417,10 @@ export default class CreateMergeRequestDropdown {
setUnavailableButtonState(isLoading = true) {
if (isLoading) {
- this.unavailableButtonArrow.classList.add('fa-spin');
- this.unavailableButtonArrow.classList.add('fa-spinner');
- this.unavailableButtonArrow.classList.remove('fa-exclamation-triangle');
+ this.unavailableButtonSpinner.classList.remove('hide');
this.unavailableButtonText.textContent = __('Checking branch availability...');
} else {
- this.unavailableButtonArrow.classList.remove('fa-spin');
- this.unavailableButtonArrow.classList.remove('fa-spinner');
- this.unavailableButtonArrow.classList.add('fa-exclamation-triangle');
+ this.unavailableButtonSpinner.classList.add('hide');
this.unavailableButtonText.textContent = __('New branch unavailable');
}
}
diff --git a/app/assets/javascripts/cycle_analytics/components/stage_nav_item.vue b/app/assets/javascripts/cycle_analytics/components/stage_nav_item.vue
index 1b09fe1b370..3c18608eb75 100644
--- a/app/assets/javascripts/cycle_analytics/components/stage_nav_item.vue
+++ b/app/assets/javascripts/cycle_analytics/components/stage_nav_item.vue
@@ -45,10 +45,13 @@ export default {
:class="{ active: isActive }"
class="stage-nav-item d-flex pl-4 pr-4 m-0 mb-1 ml-2 rounded border-color-default border-style-solid border-width-1px"
>
- <div class="stage-nav-item-cell stage-name p-0" :class="{ 'font-weight-bold': isActive }">
+ <div
+ class="stage-nav-item-cell stage-name w-50 pr-2"
+ :class="{ 'font-weight-bold': isActive }"
+ >
{{ title }}
</div>
- <div class="stage-nav-item-cell stage-median mr-4">
+ <div class="stage-nav-item-cell stage-median w-50">
<template v-if="isUserAllowed">
<span v-if="hasValue">{{ value }}</span>
<span v-else class="stage-empty">{{ __('Not enough data') }}</span>
diff --git a/app/assets/javascripts/deploy_keys/components/key.vue b/app/assets/javascripts/deploy_keys/components/key.vue
index c856e380c41..585b091bc51 100644
--- a/app/assets/javascripts/deploy_keys/components/key.vue
+++ b/app/assets/javascripts/deploy_keys/components/key.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { head, tail } from 'lodash';
import { s__, sprintf } from '~/locale';
import icon from '~/vue_shared/components/icon.vue';
import tooltip from '~/vue_shared/directives/tooltip';
@@ -48,8 +48,7 @@ export default {
const projects = [...this.deployKey.deploy_keys_projects];
if (this.projectId !== null) {
- const indexOfCurrentProject = _.findIndex(
- projects,
+ const indexOfCurrentProject = projects.findIndex(
project =>
project &&
project.project &&
@@ -66,10 +65,10 @@ export default {
return projects;
},
firstProject() {
- return _.head(this.projects);
+ return head(this.projects);
},
restProjects() {
- return _.tail(this.projects);
+ return tail(this.projects);
},
restProjectsTooltip() {
return sprintf(s__('DeployKeys|Expand %{count} other projects'), {
diff --git a/app/assets/javascripts/diff_notes/components/resolve_discussion_btn.js b/app/assets/javascripts/diff_notes/components/resolve_discussion_btn.js
deleted file mode 100644
index 5f2a17da630..00000000000
--- a/app/assets/javascripts/diff_notes/components/resolve_discussion_btn.js
+++ /dev/null
@@ -1,70 +0,0 @@
-/* eslint-disable no-else-return */
-/* global CommentsStore */
-/* global ResolveService */
-
-import Vue from 'vue';
-import { __ } from '~/locale';
-
-const ResolveDiscussionBtn = Vue.extend({
- props: {
- discussionId: {
- type: String,
- required: true,
- },
- mergeRequestId: {
- type: Number,
- required: true,
- },
- canResolve: {
- type: Boolean,
- required: true,
- },
- },
- data() {
- return {
- discussion: {},
- };
- },
- computed: {
- showButton() {
- if (this.discussion) {
- return this.discussion.isResolvable();
- } else {
- return false;
- }
- },
- isDiscussionResolved() {
- if (this.discussion) {
- return this.discussion.isResolved();
- } else {
- return false;
- }
- },
- buttonText() {
- if (this.isDiscussionResolved) {
- return __('Unresolve discussion');
- } else {
- return __('Resolve discussion');
- }
- },
- loading() {
- if (this.discussion) {
- return this.discussion.loading;
- } else {
- return false;
- }
- },
- },
- created() {
- CommentsStore.createDiscussion(this.discussionId, this.canResolve);
-
- this.discussion = CommentsStore.state[this.discussionId];
- },
- methods: {
- resolve() {
- ResolveService.toggleResolveForDiscussion(this.mergeRequestId, this.discussionId);
- },
- },
-});
-
-Vue.component('resolve-discussion-btn', ResolveDiscussionBtn);
diff --git a/app/assets/javascripts/diff_notes/diff_notes_bundle.js b/app/assets/javascripts/diff_notes/diff_notes_bundle.js
index 7dcf3594471..92862d4c933 100644
--- a/app/assets/javascripts/diff_notes/diff_notes_bundle.js
+++ b/app/assets/javascripts/diff_notes/diff_notes_bundle.js
@@ -11,7 +11,6 @@ import './components/comment_resolve_btn';
import './components/jump_to_discussion';
import './components/resolve_btn';
import './components/resolve_count';
-import './components/resolve_discussion_btn';
import './components/diff_note_avatars';
import './components/new_issue_for_discussion';
@@ -20,7 +19,7 @@ export default () => {
document.querySelector('.merge-request') || document.querySelector('.commit-box');
const { projectPath } = projectPathHolder.dataset;
const COMPONENT_SELECTOR =
- 'resolve-btn, resolve-discussion-btn, jump-to-discussion, comment-and-resolve-btn, new-issue-for-discussion-btn';
+ 'resolve-btn, jump-to-discussion, comment-and-resolve-btn, new-issue-for-discussion-btn';
window.gl = window.gl || {};
window.gl.diffNoteApps = {};
diff --git a/app/assets/javascripts/diffs/components/app.vue b/app/assets/javascripts/diffs/components/app.vue
index f9d3d31e152..3ea2a2fbaee 100644
--- a/app/assets/javascripts/diffs/components/app.vue
+++ b/app/assets/javascripts/diffs/components/app.vue
@@ -50,6 +50,11 @@ export default {
type: String,
required: true,
},
+ endpointCoverage: {
+ type: String,
+ required: false,
+ default: '',
+ },
projectPath: {
type: String,
required: true,
@@ -169,6 +174,7 @@ export default {
endpoint: this.endpoint,
endpointMetadata: this.endpointMetadata,
endpointBatch: this.endpointBatch,
+ endpointCoverage: this.endpointCoverage,
projectPath: this.projectPath,
dismissEndpoint: this.dismissEndpoint,
showSuggestPopover: this.showSuggestPopover,
@@ -218,6 +224,7 @@ export default {
'fetchDiffFiles',
'fetchDiffFilesMeta',
'fetchDiffFilesBatch',
+ 'fetchCoverageFiles',
'startRenderDiffsQueue',
'assignDiscussionsToDiff',
'setHighlightedRow',
@@ -292,6 +299,10 @@ export default {
});
}
+ if (this.endpointCoverage) {
+ this.fetchCoverageFiles();
+ }
+
if (!this.isNotesFetched) {
eventHub.$emit('fetchNotesData');
}
@@ -398,7 +409,7 @@ export default {
}"
>
<commit-widget v-if="commit" :commit="commit" />
- <div v-if="isBatchLoading" class="loading"><gl-loading-icon /></div>
+ <div v-if="isBatchLoading" class="loading"><gl-loading-icon size="lg" /></div>
<template v-else-if="renderDiffFiles">
<diff-file
v-for="file in diffFiles"
diff --git a/app/assets/javascripts/diffs/components/compare_versions_dropdown.vue b/app/assets/javascripts/diffs/components/compare_versions_dropdown.vue
index 1dcdb65d5c7..cc4b2dacab3 100644
--- a/app/assets/javascripts/diffs/components/compare_versions_dropdown.vue
+++ b/app/assets/javascripts/diffs/components/compare_versions_dropdown.vue
@@ -1,6 +1,7 @@
<script>
import Icon from '~/vue_shared/components/icon.vue';
import { n__, __, sprintf } from '~/locale';
+import { getParameterByName, parseBoolean } from '~/lib/utils/common_utils';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
export default {
@@ -94,6 +95,9 @@ export default {
}
return version.versionIndex === -1;
},
+ isHead() {
+ return parseBoolean(getParameterByName('diff_head'));
+ },
isLatest(version) {
return (
this.mergeRequestVersion && version.version_index === this.targetVersions[0].version_index
@@ -121,7 +125,8 @@ export default {
<div>
<strong>
{{ versionName(version) }}
- <template v-if="isBase(version)">{{
+ <template v-if="isHead()">{{ s__('DiffsCompareBaseBranch|(HEAD)') }}</template>
+ <template v-else-if="isBase(version)">{{
s__('DiffsCompareBaseBranch|(base)')
}}</template>
</strong>
diff --git a/app/assets/javascripts/diffs/components/diff_expansion_cell.vue b/app/assets/javascripts/diffs/components/diff_expansion_cell.vue
index 23fbfc2b74b..46ed76450c4 100644
--- a/app/assets/javascripts/diffs/components/diff_expansion_cell.vue
+++ b/app/assets/javascripts/diffs/components/diff_expansion_cell.vue
@@ -3,7 +3,7 @@ import { mapState, mapActions } from 'vuex';
import createFlash from '~/flash';
import { s__ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
-import { UNFOLD_COUNT } from '../constants';
+import { UNFOLD_COUNT, INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '../constants';
import * as utils from '../store/utils';
import tooltip from '../../vue_shared/directives/tooltip';
@@ -11,6 +11,16 @@ const EXPAND_ALL = 0;
const EXPAND_UP = 1;
const EXPAND_DOWN = 2;
+const lineNumberByViewType = (viewType, diffLine) => {
+ const numberGetters = {
+ [INLINE_DIFF_VIEW_TYPE]: line => line?.new_line,
+ [PARALLEL_DIFF_VIEW_TYPE]: line => (line?.right || line?.left)?.new_line,
+ };
+ const numberGetter = numberGetters[viewType];
+
+ return numberGetter && numberGetter(diffLine);
+};
+
export default {
directives: {
tooltip,
@@ -44,7 +54,7 @@ export default {
colspan: {
type: Number,
required: false,
- default: 3,
+ default: 4,
},
},
computed: {
@@ -67,12 +77,16 @@ export default {
...mapActions('diffs', ['loadMoreLines']),
getPrevLineNumber(oldLineNumber, newLineNumber) {
const diffFile = utils.findDiffFile(this.diffFiles, this.fileHash);
- const indexForInline = utils.findIndexInInlineLines(diffFile.highlighted_diff_lines, {
+ const lines = {
+ [INLINE_DIFF_VIEW_TYPE]: diffFile.highlighted_diff_lines,
+ [PARALLEL_DIFF_VIEW_TYPE]: diffFile.parallel_diff_lines,
+ };
+ const index = utils.getPreviousLineIndex(this.diffViewType, diffFile, {
oldLineNumber,
newLineNumber,
});
- const prevLine = diffFile.highlighted_diff_lines[indexForInline - 2];
- return (prevLine && prevLine.new_line) || 0;
+
+ return lineNumberByViewType(this.diffViewType, lines[this.diffViewType][index - 2]) || 0;
},
callLoadMoreLines(
endpoint,
@@ -114,7 +128,7 @@ export default {
this.handleExpandAllLines(expandOptions);
}
},
- handleExpandUpLines(expandOptions = EXPAND_ALL) {
+ handleExpandUpLines(expandOptions) {
const { endpoint, fileHash, view, oldLineNumber, newLineNumber, offset } = expandOptions;
const bottom = this.isBottom;
diff --git a/app/assets/javascripts/diffs/components/diff_file.vue b/app/assets/javascripts/diffs/components/diff_file.vue
index f5051748f10..8babc05f1ce 100644
--- a/app/assets/javascripts/diffs/components/diff_file.vue
+++ b/app/assets/javascripts/diffs/components/diff_file.vue
@@ -1,6 +1,6 @@
<script>
import { mapActions, mapGetters, mapState } from 'vuex';
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { GlLoadingIcon } from '@gitlab/ui';
import { __, sprintf } from '~/locale';
import createFlash from '~/flash';
@@ -46,7 +46,7 @@ export default {
return sprintf(
__('You can %{linkStart}view the blob%{linkEnd} instead.'),
{
- linkStart: `<a href="${_.escape(this.file.view_path)}">`,
+ linkStart: `<a href="${esc(this.file.view_path)}">`,
linkEnd: '</a>',
},
false,
@@ -179,18 +179,19 @@ export default {
<div v-if="errorMessage" class="diff-viewer">
<div class="nothing-here-block" v-html="errorMessage"></div>
</div>
- <div v-else-if="isCollapsed" class="nothing-here-block diff-collapsed">
- {{ __('This diff is collapsed.') }}
- <a class="click-to-expand js-click-to-expand" href="#" @click.prevent="handleToggle">{{
- __('Click to expand it.')
- }}</a>
- </div>
- <diff-content
- v-else
- :class="{ hidden: isCollapsed || isFileTooLarge }"
- :diff-file="file"
- :help-page-path="helpPagePath"
- />
+ <template v-else>
+ <div v-show="isCollapsed" class="nothing-here-block diff-collapsed">
+ {{ __('This diff is collapsed.') }}
+ <a class="click-to-expand js-click-to-expand" href="#" @click.prevent="handleToggle">{{
+ __('Click to expand it.')
+ }}</a>
+ </div>
+ <diff-content
+ v-show="!isCollapsed && !isFileTooLarge"
+ :diff-file="file"
+ :help-page-path="helpPagePath"
+ />
+ </template>
</div>
</template>
</div>
diff --git a/app/assets/javascripts/diffs/components/diff_file_header.vue b/app/assets/javascripts/diffs/components/diff_file_header.vue
index 731c53a7339..d4270960f57 100644
--- a/app/assets/javascripts/diffs/components/diff_file_header.vue
+++ b/app/assets/javascripts/diffs/components/diff_file_header.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { mapActions, mapGetters } from 'vuex';
import { GlButton, GlTooltipDirective, GlLoadingIcon } from '@gitlab/ui';
import { polyfillSticky } from '~/lib/utils/sticky';
@@ -91,7 +91,7 @@ export default {
return this.expanded ? 'chevron-down' : 'chevron-right';
},
viewFileButtonText() {
- const truncatedContentSha = _.escape(truncateSha(this.diffFile.content_sha));
+ const truncatedContentSha = esc(truncateSha(this.diffFile.content_sha));
return sprintf(
s__('MergeRequests|View file @ %{commitId}'),
{ commitId: truncatedContentSha },
@@ -99,7 +99,7 @@ export default {
);
},
viewReplacedFileButtonText() {
- const truncatedBaseSha = _.escape(truncateSha(this.diffFile.diff_refs.base_sha));
+ const truncatedBaseSha = esc(truncateSha(this.diffFile.diff_refs.base_sha));
return sprintf(
s__('MergeRequests|View replaced file @ %{commitId}'),
{
diff --git a/app/assets/javascripts/diffs/components/diff_file_row.vue b/app/assets/javascripts/diffs/components/diff_file_row.vue
index 15e63a1c9ca..c8ba8d6040e 100644
--- a/app/assets/javascripts/diffs/components/diff_file_row.vue
+++ b/app/assets/javascripts/diffs/components/diff_file_row.vue
@@ -35,6 +35,6 @@ export default {
<template>
<file-row :file="file" v-bind="$attrs" v-on="$listeners">
<file-row-stats v-if="showFileRowStats" :file="file" class="mr-1" />
- <changed-file-icon :file="file" :size="16" />
+ <changed-file-icon :file="file" :size="16" :show-tooltip="true" />
</file-row>
</template>
diff --git a/app/assets/javascripts/diffs/components/diff_stats.vue b/app/assets/javascripts/diffs/components/diff_stats.vue
index 9d362ceb429..0234fc4f40e 100644
--- a/app/assets/javascripts/diffs/components/diff_stats.vue
+++ b/app/assets/javascripts/diffs/components/diff_stats.vue
@@ -1,7 +1,7 @@
<script>
import Icon from '~/vue_shared/components/icon.vue';
import { n__ } from '~/locale';
-import { isNumber } from 'underscore';
+import { isNumber } from 'lodash';
export default {
components: { Icon },
diff --git a/app/assets/javascripts/diffs/components/image_diff_overlay.vue b/app/assets/javascripts/diffs/components/image_diff_overlay.vue
index 703a281308e..be7e6789216 100644
--- a/app/assets/javascripts/diffs/components/image_diff_overlay.vue
+++ b/app/assets/javascripts/diffs/components/image_diff_overlay.vue
@@ -1,6 +1,6 @@
<script>
import { mapActions, mapGetters } from 'vuex';
-import _ from 'underscore';
+import { isArray } from 'lodash';
import imageDiffMixin from 'ee_else_ce/diffs/mixins/image_diff';
import Icon from '~/vue_shared/components/icon.vue';
@@ -46,7 +46,7 @@ export default {
return this.getCommentFormForDiffFile(this.fileHash);
},
allDiscussions() {
- return _.isArray(this.discussions) ? this.discussions : [this.discussions];
+ return isArray(this.discussions) ? this.discussions : [this.discussions];
},
},
methods: {
diff --git a/app/assets/javascripts/diffs/components/inline_diff_comment_row.vue b/app/assets/javascripts/diffs/components/inline_diff_comment_row.vue
index a06dbd70ac5..87f0396cf72 100644
--- a/app/assets/javascripts/diffs/components/inline_diff_comment_row.vue
+++ b/app/assets/javascripts/diffs/components/inline_diff_comment_row.vue
@@ -51,7 +51,7 @@ export default {
<template>
<tr v-if="shouldRender" :class="className" class="notes_holder">
- <td class="notes-content" colspan="3">
+ <td class="notes-content" colspan="4">
<div class="content">
<diff-discussions
v-if="line.discussions.length"
diff --git a/app/assets/javascripts/diffs/components/inline_diff_table_row.vue b/app/assets/javascripts/diffs/components/inline_diff_table_row.vue
index 55a8df43c62..bd99fcb71b8 100644
--- a/app/assets/javascripts/diffs/components/inline_diff_table_row.vue
+++ b/app/assets/javascripts/diffs/components/inline_diff_table_row.vue
@@ -1,5 +1,6 @@
<script>
-import { mapActions, mapState } from 'vuex';
+import { mapActions, mapGetters, mapState } from 'vuex';
+import { GlTooltipDirective } from '@gitlab/ui';
import DiffTableCell from './diff_table_cell.vue';
import {
MATCH_LINE_TYPE,
@@ -15,11 +16,18 @@ export default {
components: {
DiffTableCell,
},
+ directives: {
+ GlTooltip: GlTooltipDirective,
+ },
props: {
fileHash: {
type: String,
required: true,
},
+ filePath: {
+ type: String,
+ required: true,
+ },
contextLinesPath: {
type: String,
required: true,
@@ -40,6 +48,7 @@ export default {
};
},
computed: {
+ ...mapGetters('diffs', ['fileLineCoverage']),
...mapState({
isHighlighted(state) {
return this.line.line_code !== null && this.line.line_code === state.diffs.highlightedRow;
@@ -62,6 +71,9 @@ export default {
isMatchLine() {
return this.line.type === MATCH_LINE_TYPE;
},
+ coverageState() {
+ return this.fileLineCoverage(this.filePath, this.line.new_line);
+ },
},
created() {
this.newLineType = NEW_LINE_TYPE;
@@ -114,13 +126,19 @@ export default {
class="diff-line-num new_line qa-new-diff-line"
/>
<td
+ v-gl-tooltip.hover
+ :title="coverageState.text"
+ :class="[line.type, coverageState.class, { hll: isHighlighted }]"
+ class="line-coverage"
+ ></td>
+ <td
:class="[
line.type,
{
hll: isHighlighted,
},
]"
- class="line_content"
+ class="line_content with-coverage"
v-html="line.rich_text"
></td>
</tr>
diff --git a/app/assets/javascripts/diffs/components/inline_diff_view.vue b/app/assets/javascripts/diffs/components/inline_diff_view.vue
index 1eb17588376..8b25cdc2887 100644
--- a/app/assets/javascripts/diffs/components/inline_diff_view.vue
+++ b/app/assets/javascripts/diffs/components/inline_diff_view.vue
@@ -48,6 +48,7 @@ export default {
<colgroup>
<col style="width: 50px;" />
<col style="width: 50px;" />
+ <col style="width: 8px;" />
<col />
</colgroup>
<tbody>
@@ -63,6 +64,7 @@ export default {
<inline-diff-table-row
:key="`${line.line_code || index}`"
:file-hash="diffFile.file_hash"
+ :file-path="diffFile.file_path"
:context-lines-path="diffFile.context_lines_path"
:line="line"
:is-bottom="index + 1 === diffLinesLength"
diff --git a/app/assets/javascripts/diffs/components/no_changes.vue b/app/assets/javascripts/diffs/components/no_changes.vue
index 47e9627a957..09cb542c3dc 100644
--- a/app/assets/javascripts/diffs/components/no_changes.vue
+++ b/app/assets/javascripts/diffs/components/no_changes.vue
@@ -1,6 +1,6 @@
<script>
import { mapGetters } from 'vuex';
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { GlButton } from '@gitlab/ui';
import { __, sprintf } from '~/locale';
@@ -24,8 +24,8 @@ export default {
{
ref_start: '<span class="ref-name">',
ref_end: '</span>',
- source_branch: _.escape(this.getNoteableData.source_branch),
- target_branch: _.escape(this.getNoteableData.target_branch),
+ source_branch: esc(this.getNoteableData.source_branch),
+ target_branch: esc(this.getNoteableData.target_branch),
},
false,
);
diff --git a/app/assets/javascripts/diffs/components/parallel_diff_comment_row.vue b/app/assets/javascripts/diffs/components/parallel_diff_comment_row.vue
index 65b41b0e456..b525490f7cc 100644
--- a/app/assets/javascripts/diffs/components/parallel_diff_comment_row.vue
+++ b/app/assets/javascripts/diffs/components/parallel_diff_comment_row.vue
@@ -122,7 +122,7 @@ export default {
<template>
<tr v-if="shouldRender" :class="className" class="notes_holder">
- <td class="notes-content parallel old" colspan="2">
+ <td class="notes-content parallel old" colspan="3">
<div v-if="shouldRenderDiscussionsOnLeft" class="content">
<diff-discussions
:discussions="line.left.discussions"
@@ -147,7 +147,7 @@ export default {
</template>
</diff-discussion-reply>
</td>
- <td class="notes-content parallel new" colspan="2">
+ <td class="notes-content parallel new" colspan="3">
<div v-if="shouldRenderDiscussionsOnRight" class="content">
<diff-discussions
:discussions="line.right.discussions"
diff --git a/app/assets/javascripts/diffs/components/parallel_diff_expansion_row.vue b/app/assets/javascripts/diffs/components/parallel_diff_expansion_row.vue
index c1b30eab199..0a80107ced4 100644
--- a/app/assets/javascripts/diffs/components/parallel_diff_expansion_row.vue
+++ b/app/assets/javascripts/diffs/components/parallel_diff_expansion_row.vue
@@ -49,7 +49,7 @@ export default {
:line="line.left"
:is-top="isTop"
:is-bottom="isBottom"
- :colspan="4"
+ :colspan="6"
/>
</template>
</tr>
diff --git a/app/assets/javascripts/diffs/components/parallel_diff_table_row.vue b/app/assets/javascripts/diffs/components/parallel_diff_table_row.vue
index 4c95d618b0f..83d803f42b1 100644
--- a/app/assets/javascripts/diffs/components/parallel_diff_table_row.vue
+++ b/app/assets/javascripts/diffs/components/parallel_diff_table_row.vue
@@ -1,6 +1,7 @@
<script>
-import { mapActions, mapState } from 'vuex';
+import { mapActions, mapGetters, mapState } from 'vuex';
import $ from 'jquery';
+import { GlTooltipDirective } from '@gitlab/ui';
import DiffTableCell from './diff_table_cell.vue';
import {
MATCH_LINE_TYPE,
@@ -18,11 +19,18 @@ export default {
components: {
DiffTableCell,
},
+ directives: {
+ GlTooltip: GlTooltipDirective,
+ },
props: {
fileHash: {
type: String,
required: true,
},
+ filePath: {
+ type: String,
+ required: true,
+ },
contextLinesPath: {
type: String,
required: true,
@@ -44,6 +52,7 @@ export default {
};
},
computed: {
+ ...mapGetters('diffs', ['fileLineCoverage']),
...mapState({
isHighlighted(state) {
const lineCode =
@@ -82,6 +91,9 @@ export default {
isMatchLineRight() {
return this.line.right && this.line.right.type === MATCH_LINE_TYPE;
},
+ coverageState() {
+ return this.fileLineCoverage(this.filePath, this.line.right.new_line);
+ },
},
created() {
this.newLineType = NEW_LINE_TYPE;
@@ -99,7 +111,7 @@ export default {
const allCellsInHoveringRow = Array.from(e.currentTarget.children);
const hoverIndex = allCellsInHoveringRow.indexOf(hoveringCell);
- if (hoverIndex >= 2) {
+ if (hoverIndex >= 3) {
this.isRightHover = isHover;
} else {
this.isLeftHover = isHover;
@@ -143,17 +155,19 @@ export default {
line-position="left"
class="diff-line-num old_line"
/>
+ <td :class="parallelViewLeftLineType" class="line-coverage left-side"></td>
<td
:id="line.left.line_code"
:class="parallelViewLeftLineType"
- class="line_content parallel left-side"
+ class="line_content with-coverage parallel left-side"
@mousedown="handleParallelLineMouseDown"
v-html="line.left.rich_text"
></td>
</template>
<template v-else>
<td class="diff-line-num old_line empty-cell"></td>
- <td class="line_content parallel left-side empty-cell"></td>
+ <td class="line-coverage left-side empty-cell"></td>
+ <td class="line_content with-coverage parallel left-side empty-cell"></td>
</template>
<template v-if="line.right && !isMatchLineRight">
<diff-table-cell
@@ -170,6 +184,12 @@ export default {
class="diff-line-num new_line"
/>
<td
+ v-gl-tooltip.hover
+ :title="coverageState.text"
+ :class="[line.right.type, coverageState.class, { hll: isHighlighted }]"
+ class="line-coverage right-side"
+ ></td>
+ <td
:id="line.right.line_code"
:class="[
line.right.type,
@@ -177,14 +197,15 @@ export default {
hll: isHighlighted,
},
]"
- class="line_content parallel right-side"
+ class="line_content with-coverage parallel right-side"
@mousedown="handleParallelLineMouseDown"
v-html="line.right.rich_text"
></td>
</template>
<template v-else>
<td class="diff-line-num old_line empty-cell"></td>
- <td class="line_content parallel right-side empty-cell"></td>
+ <td class="line-coverage right-side empty-cell"></td>
+ <td class="line_content with-coverage parallel right-side empty-cell"></td>
</template>
</tr>
</template>
diff --git a/app/assets/javascripts/diffs/components/parallel_diff_view.vue b/app/assets/javascripts/diffs/components/parallel_diff_view.vue
index 88baac092a1..d796aad9d06 100644
--- a/app/assets/javascripts/diffs/components/parallel_diff_view.vue
+++ b/app/assets/javascripts/diffs/components/parallel_diff_view.vue
@@ -47,8 +47,10 @@ export default {
>
<colgroup>
<col style="width: 50px;" />
+ <col style="width: 8px;" />
<col />
<col style="width: 50px;" />
+ <col style="width: 8px;" />
<col />
</colgroup>
<tbody>
@@ -64,6 +66,7 @@ export default {
<parallel-diff-table-row
:key="line.line_code"
:file-hash="diffFile.file_hash"
+ :file-path="diffFile.file_path"
:context-lines-path="diffFile.context_lines_path"
:line="line"
:is-bottom="index + 1 === diffLinesLength"
diff --git a/app/assets/javascripts/diffs/index.js b/app/assets/javascripts/diffs/index.js
index 375ac80021f..ce48e36bfd7 100644
--- a/app/assets/javascripts/diffs/index.js
+++ b/app/assets/javascripts/diffs/index.js
@@ -69,6 +69,7 @@ export default function initDiffsApp(store) {
endpoint: dataset.endpoint,
endpointMetadata: dataset.endpointMetadata || '',
endpointBatch: dataset.endpointBatch || '',
+ endpointCoverage: dataset.endpointCoverage || '',
projectPath: dataset.projectPath,
helpPagePath: dataset.helpPagePath,
currentUser: JSON.parse(dataset.currentUserData) || {},
@@ -104,6 +105,7 @@ export default function initDiffsApp(store) {
endpoint: this.endpoint,
endpointMetadata: this.endpointMetadata,
endpointBatch: this.endpointBatch,
+ endpointCoverage: this.endpointCoverage,
currentUser: this.currentUser,
projectPath: this.projectPath,
helpPagePath: this.helpPagePath,
diff --git a/app/assets/javascripts/diffs/store/actions.js b/app/assets/javascripts/diffs/store/actions.js
index bd85105ccb4..18bbdf402ee 100644
--- a/app/assets/javascripts/diffs/store/actions.js
+++ b/app/assets/javascripts/diffs/store/actions.js
@@ -1,8 +1,10 @@
import Vue from 'vue';
import Cookies from 'js-cookie';
+import Poll from '~/lib/utils/poll';
import axios from '~/lib/utils/axios_utils';
+import httpStatusCodes from '~/lib/utils/http_status';
import createFlash from '~/flash';
-import { s__ } from '~/locale';
+import { __, s__ } from '~/locale';
import { handleLocationHash, historyPushState, scrollToElement } from '~/lib/utils/common_utils';
import { mergeUrlParams, getLocationHash } from '~/lib/utils/url_utility';
import TreeWorker from '../workers/tree_worker';
@@ -43,6 +45,7 @@ export const setBaseConfig = ({ commit }, options) => {
endpoint,
endpointMetadata,
endpointBatch,
+ endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@@ -52,6 +55,7 @@ export const setBaseConfig = ({ commit }, options) => {
endpoint,
endpointMetadata,
endpointBatch,
+ endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@@ -170,6 +174,26 @@ export const fetchDiffFilesMeta = ({ commit, state }) => {
.catch(() => worker.terminate());
};
+export const fetchCoverageFiles = ({ commit, state }) => {
+ const coveragePoll = new Poll({
+ resource: {
+ getCoverageReports: endpoint => axios.get(endpoint),
+ },
+ data: state.endpointCoverage,
+ method: 'getCoverageReports',
+ successCallback: ({ status, data }) => {
+ if (status === httpStatusCodes.OK) {
+ commit(types.SET_COVERAGE_DATA, data);
+
+ coveragePoll.stop();
+ }
+ },
+ errorCallback: () => createFlash(__('Something went wrong on our end. Please try again!')),
+ });
+
+ coveragePoll.makeRequest();
+};
+
export const setHighlightedRow = ({ commit }, lineCode) => {
const fileHash = lineCode.split('_')[0];
commit(types.SET_HIGHLIGHTED_ROW, lineCode);
diff --git a/app/assets/javascripts/diffs/store/getters.js b/app/assets/javascripts/diffs/store/getters.js
index c4737090a70..3898974638f 100644
--- a/app/assets/javascripts/diffs/store/getters.js
+++ b/app/assets/javascripts/diffs/store/getters.js
@@ -1,3 +1,4 @@
+import { __, n__ } from '~/locale';
import { PARALLEL_DIFF_VIEW_TYPE, INLINE_DIFF_VIEW_TYPE } from '../constants';
export const isParallelView = state => state.diffViewType === PARALLEL_DIFF_VIEW_TYPE;
@@ -99,6 +100,29 @@ export const getCommentFormForDiffFile = state => fileHash =>
state.commentForms.find(form => form.fileHash === fileHash);
/**
+ * Returns the test coverage hits for a specific line of a given file
+ * @param {string} file
+ * @param {number} line
+ * @returns {number}
+ */
+export const fileLineCoverage = state => (file, line) => {
+ if (!state.coverageFiles.files) return {};
+ const fileCoverage = state.coverageFiles.files[file];
+ if (!fileCoverage) return {};
+ const lineCoverage = fileCoverage[String(line)];
+
+ if (lineCoverage === 0) {
+ return { text: __('No test coverage'), class: 'no-coverage' };
+ } else if (lineCoverage >= 0) {
+ return {
+ text: n__('Test coverage: %d hit', 'Test coverage: %d hits', lineCoverage),
+ class: 'coverage',
+ };
+ }
+ return {};
+};
+
+/**
* Returns index of a currently selected diff in diffFiles
* @returns {number}
*/
diff --git a/app/assets/javascripts/diffs/store/modules/diff_state.js b/app/assets/javascripts/diffs/store/modules/diff_state.js
index 011cd24500a..81f1506260c 100644
--- a/app/assets/javascripts/diffs/store/modules/diff_state.js
+++ b/app/assets/javascripts/diffs/store/modules/diff_state.js
@@ -17,6 +17,7 @@ export default () => ({
commit: null,
startVersion: null,
diffFiles: [],
+ coverageFiles: {},
mergeRequestDiffs: [],
mergeRequestDiff: null,
diffViewType: viewTypeFromQueryString || viewTypeFromCookie || defaultViewType,
diff --git a/app/assets/javascripts/diffs/store/mutation_types.js b/app/assets/javascripts/diffs/store/mutation_types.js
index 2097c8d3655..4436935c1ec 100644
--- a/app/assets/javascripts/diffs/store/mutation_types.js
+++ b/app/assets/javascripts/diffs/store/mutation_types.js
@@ -5,6 +5,7 @@ export const SET_RETRIEVING_BATCHES = 'SET_RETRIEVING_BATCHES';
export const SET_DIFF_DATA = 'SET_DIFF_DATA';
export const SET_DIFF_DATA_BATCH = 'SET_DIFF_DATA_BATCH';
export const SET_DIFF_VIEW_TYPE = 'SET_DIFF_VIEW_TYPE';
+export const SET_COVERAGE_DATA = 'SET_COVERAGE_DATA';
export const SET_MERGE_REQUEST_DIFFS = 'SET_MERGE_REQUEST_DIFFS';
export const TOGGLE_LINE_HAS_FORM = 'TOGGLE_LINE_HAS_FORM';
export const ADD_CONTEXT_LINES = 'ADD_CONTEXT_LINES';
diff --git a/app/assets/javascripts/diffs/store/mutations.js b/app/assets/javascripts/diffs/store/mutations.js
index c26411af5d7..bb4c80b5759 100644
--- a/app/assets/javascripts/diffs/store/mutations.js
+++ b/app/assets/javascripts/diffs/store/mutations.js
@@ -16,6 +16,7 @@ export default {
endpoint,
endpointMetadata,
endpointBatch,
+ endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@@ -25,6 +26,7 @@ export default {
endpoint,
endpointMetadata,
endpointBatch,
+ endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@@ -69,6 +71,10 @@ export default {
});
},
+ [types.SET_COVERAGE_DATA](state, coverageFiles) {
+ Object.assign(state, { coverageFiles });
+ },
+
[types.RENDER_FILE](state, file) {
Object.assign(file, {
renderIt: true,
@@ -140,6 +146,7 @@ export default {
addContextLines({
inlineLines: diffFile.highlighted_diff_lines,
parallelLines: diffFile.parallel_diff_lines,
+ diffViewType: state.diffViewType,
contextLines: lines,
bottom,
lineNumbers,
diff --git a/app/assets/javascripts/diffs/store/utils.js b/app/assets/javascripts/diffs/store/utils.js
index 80972d2aeb8..9c788e283b9 100644
--- a/app/assets/javascripts/diffs/store/utils.js
+++ b/app/assets/javascripts/diffs/store/utils.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { property, isEqual } from 'lodash';
import { truncatePathMiddleToLength } from '~/lib/utils/text_utility';
import { diffModes, diffViewerModes } from '~/ide/constants';
import {
@@ -13,6 +13,8 @@ import {
LINES_TO_BE_RENDERED_DIRECTLY,
MAX_LINES_TO_BE_RENDERED,
TREE_TYPE,
+ INLINE_DIFF_VIEW_TYPE,
+ PARALLEL_DIFF_VIEW_TYPE,
} from '../constants';
export function findDiffFile(files, match, matchKey = 'file_hash') {
@@ -93,8 +95,7 @@ export function getNoteFormData(params) {
export const findIndexInInlineLines = (lines, lineNumbers) => {
const { oldLineNumber, newLineNumber } = lineNumbers;
- return _.findIndex(
- lines,
+ return lines.findIndex(
line => line.old_line === oldLineNumber && line.new_line === newLineNumber,
);
};
@@ -102,8 +103,7 @@ export const findIndexInInlineLines = (lines, lineNumbers) => {
export const findIndexInParallelLines = (lines, lineNumbers) => {
const { oldLineNumber, newLineNumber } = lineNumbers;
- return _.findIndex(
- lines,
+ return lines.findIndex(
line =>
line.left &&
line.right &&
@@ -112,13 +112,32 @@ export const findIndexInParallelLines = (lines, lineNumbers) => {
);
};
+const indexGettersByViewType = {
+ [INLINE_DIFF_VIEW_TYPE]: findIndexInInlineLines,
+ [PARALLEL_DIFF_VIEW_TYPE]: findIndexInParallelLines,
+};
+
+export const getPreviousLineIndex = (diffViewType, file, lineNumbers) => {
+ const findIndex = indexGettersByViewType[diffViewType];
+ const lines = {
+ [INLINE_DIFF_VIEW_TYPE]: file.highlighted_diff_lines,
+ [PARALLEL_DIFF_VIEW_TYPE]: file.parallel_diff_lines,
+ };
+
+ return findIndex && findIndex(lines[diffViewType], lineNumbers);
+};
+
export function removeMatchLine(diffFile, lineNumbers, bottom) {
const indexForInline = findIndexInInlineLines(diffFile.highlighted_diff_lines, lineNumbers);
const indexForParallel = findIndexInParallelLines(diffFile.parallel_diff_lines, lineNumbers);
const factor = bottom ? 1 : -1;
- diffFile.highlighted_diff_lines.splice(indexForInline + factor, 1);
- diffFile.parallel_diff_lines.splice(indexForParallel + factor, 1);
+ if (indexForInline > -1) {
+ diffFile.highlighted_diff_lines.splice(indexForInline + factor, 1);
+ }
+ if (indexForParallel > -1) {
+ diffFile.parallel_diff_lines.splice(indexForParallel + factor, 1);
+ }
}
export function addLineReferences(lines, lineNumbers, bottom, isExpandDown, nextLineNumbers) {
@@ -160,8 +179,8 @@ export function addLineReferences(lines, lineNumbers, bottom, isExpandDown, next
return linesWithNumbers;
}
-export function addContextLines(options) {
- const { inlineLines, parallelLines, contextLines, lineNumbers, isExpandDown } = options;
+function addParallelContextLines(options) {
+ const { parallelLines, contextLines, lineNumbers, isExpandDown } = options;
const normalizedParallelLines = contextLines.map(line => ({
left: line,
right: line,
@@ -170,17 +189,40 @@ export function addContextLines(options) {
const factor = isExpandDown ? 1 : 0;
if (!isExpandDown && options.bottom) {
- inlineLines.push(...contextLines);
parallelLines.push(...normalizedParallelLines);
} else {
- const inlineIndex = findIndexInInlineLines(inlineLines, lineNumbers);
const parallelIndex = findIndexInParallelLines(parallelLines, lineNumbers);
- inlineLines.splice(inlineIndex + factor, 0, ...contextLines);
parallelLines.splice(parallelIndex + factor, 0, ...normalizedParallelLines);
}
}
+function addInlineContextLines(options) {
+ const { inlineLines, contextLines, lineNumbers, isExpandDown } = options;
+ const factor = isExpandDown ? 1 : 0;
+
+ if (!isExpandDown && options.bottom) {
+ inlineLines.push(...contextLines);
+ } else {
+ const inlineIndex = findIndexInInlineLines(inlineLines, lineNumbers);
+
+ inlineLines.splice(inlineIndex + factor, 0, ...contextLines);
+ }
+}
+
+export function addContextLines(options) {
+ const { diffViewType } = options;
+ const contextLineHandlers = {
+ [INLINE_DIFF_VIEW_TYPE]: addInlineContextLines,
+ [PARALLEL_DIFF_VIEW_TYPE]: addParallelContextLines,
+ };
+ const contextLineHandler = contextLineHandlers[diffViewType];
+
+ if (contextLineHandler) {
+ contextLineHandler(options);
+ }
+}
+
/**
* Trims the first char of the `richText` property when it's either a space or a diff symbol.
* @param {Object} line
@@ -400,7 +442,7 @@ export function isDiscussionApplicableToLine({ discussion, diffPosition, latestD
const originalRefs = discussion.original_position;
const refs = discussion.position;
- return _.isEqual(refs, diffPositionCopy) || _.isEqual(originalRefs, diffPositionCopy);
+ return isEqual(refs, diffPositionCopy) || isEqual(originalRefs, diffPositionCopy);
}
// eslint-disable-next-line
@@ -536,10 +578,10 @@ export const convertExpandLines = ({
for (let i = 0, diffLinesLength = diffLines.length; i < diffLinesLength; i += 1) {
const line = diffLines[i];
- if (_.property(typeKey)(line) === 'match') {
+ if (property(typeKey)(line) === 'match') {
const beforeLine = diffLines[i - 1];
const afterLine = diffLines[i + 1];
- const newLineProperty = _.property(newLineKey);
+ const newLineProperty = property(newLineKey);
const beforeLineIndex = newLineProperty(beforeLine) || 0;
const afterLineIndex = newLineProperty(afterLine) - 1 || dataLength;
@@ -547,7 +589,7 @@ export const convertExpandLines = ({
...data.slice(beforeLineIndex, afterLineIndex).map((l, index) =>
mapLine({
line: Object.assign(l, { hasForm: false, discussions: [] }),
- oldLine: (_.property(oldLineKey)(beforeLine) || 0) + index + 1,
+ oldLine: (property(oldLineKey)(beforeLine) || 0) + index + 1,
newLine: (newLineProperty(beforeLine) || 0) + index + 1,
}),
),
diff --git a/app/assets/javascripts/dropzone_input.js b/app/assets/javascripts/dropzone_input.js
index 86590865892..0e2dd59092a 100644
--- a/app/assets/javascripts/dropzone_input.js
+++ b/app/assets/javascripts/dropzone_input.js
@@ -259,8 +259,15 @@ export default function dropzoneInput(form) {
const insertToTextArea = (filename, url) => {
const $child = $(child);
- $child.val((index, val) => val.replace(`{{${filename}}}`, url));
-
+ const textarea = $child.get(0);
+ const caretStart = textarea.selectionStart;
+ const caretEnd = textarea.selectionEnd;
+ const formattedText = `{{${filename}}}`;
+ $child.val((index, val) => val.replace(formattedText, url));
+ textarea.setSelectionRange(
+ caretStart - formattedText.length + url.length,
+ caretEnd - formattedText.length + url.length,
+ );
$child.trigger('change');
};
diff --git a/app/assets/javascripts/due_date_select.js b/app/assets/javascripts/due_date_select.js
index 218bf41cd58..6ebbeecae1d 100644
--- a/app/assets/javascripts/due_date_select.js
+++ b/app/assets/javascripts/due_date_select.js
@@ -44,6 +44,7 @@ class DueDateSelect {
this.$selectbox.hide();
this.$value.css('display', '');
},
+ shouldPropagate: false,
});
}
diff --git a/app/assets/javascripts/editor/editor_lite.js b/app/assets/javascripts/editor/editor_lite.js
index 8711f6e65af..663d14bcfcb 100644
--- a/app/assets/javascripts/editor/editor_lite.js
+++ b/app/assets/javascripts/editor/editor_lite.js
@@ -1,5 +1,5 @@
import { editor as monacoEditor, languages as monacoLanguages, Uri } from 'monaco-editor';
-import whiteTheme from '~/ide/lib/themes/white';
+import { DEFAULT_THEME, themes } from '~/ide/lib/themes';
import { defaultEditorOptions } from '~/ide/lib/editor_options';
import { clearDomElement } from './utils';
@@ -11,6 +11,7 @@ export default class Editor {
this.instance = null;
this.model = null;
this.options = {
+ extraEditorClassName: 'gl-editor-lite',
...defaultEditorOptions,
...options,
};
@@ -19,8 +20,10 @@ export default class Editor {
}
static setupMonacoTheme() {
- monacoEditor.defineTheme('white', whiteTheme);
- monacoEditor.setTheme('white');
+ const themeName = window.gon?.user_color_scheme || DEFAULT_THEME;
+ const theme = themes.find(t => t.name === themeName);
+ if (theme) monacoEditor.defineTheme(themeName, theme.data);
+ monacoEditor.setTheme(theme ? themeName : DEFAULT_THEME);
}
createInstance({ el = undefined, blobPath = '', blobContent = '' } = {}) {
diff --git a/app/assets/javascripts/emoji/index.js b/app/assets/javascripts/emoji/index.js
index cd8dff40b88..27dff8cf9aa 100644
--- a/app/assets/javascripts/emoji/index.js
+++ b/app/assets/javascripts/emoji/index.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { uniq } from 'lodash';
import emojiMap from 'emojis/digests.json';
import emojiAliases from 'emojis/aliases.json';
@@ -18,7 +18,7 @@ export function filterEmojiNames(filter) {
}
export function filterEmojiNamesByAlias(filter) {
- return _.uniq(filterEmojiNames(filter).map(name => normalizeEmojiName(name)));
+ return uniq(filterEmojiNames(filter).map(name => normalizeEmojiName(name)));
}
let emojiCategoryMap;
diff --git a/app/assets/javascripts/environments/components/enable_review_app_button.vue b/app/assets/javascripts/environments/components/enable_review_app_button.vue
index 2f9e9cb628f..8fbbc5189bf 100644
--- a/app/assets/javascripts/environments/components/enable_review_app_button.vue
+++ b/app/assets/javascripts/environments/components/enable_review_app_button.vue
@@ -26,15 +26,17 @@ export default {
modalInfo: {
closeText: s__('EnableReviewApp|Close'),
copyToClipboardText: s__('EnableReviewApp|Copy snippet text'),
- copyString: `deploy_review
+ copyString: `deploy_review:
stage: deploy
script:
- echo "Deploy a review app"
environment:
name: review/$CI_COMMIT_REF_NAME
url: https://$CI_ENVIRONMENT_SLUG.example.com
- only: branches
- except: master`,
+ only:
+ - branches
+ except:
+ - master`,
id: 'enable-review-app-info',
title: s__('ReviewApp|Enable Review App'),
},
diff --git a/app/assets/javascripts/error_tracking/components/error_details.vue b/app/assets/javascripts/error_tracking/components/error_details.vue
index 047a3f99706..6c4f59eb49f 100644
--- a/app/assets/javascripts/error_tracking/components/error_details.vue
+++ b/app/assets/javascripts/error_tracking/components/error_details.vue
@@ -10,9 +10,11 @@ import {
GlBadge,
GlAlert,
GlSprintf,
+ GlDropdown,
+ GlDropdownItem,
+ GlDropdownDivider,
} from '@gitlab/ui';
import { __, sprintf, n__ } from '~/locale';
-import LoadingButton from '~/vue_shared/components/loading_button.vue';
import Icon from '~/vue_shared/components/icon.vue';
import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate.vue';
import Stacktrace from './stacktrace.vue';
@@ -23,9 +25,10 @@ import { severityLevel, severityLevelVariant, errorStatus } from './constants';
import query from '../queries/details.query.graphql';
+const SENTRY_TIMEOUT = 10000;
+
export default {
components: {
- LoadingButton,
GlButton,
GlFormInput,
GlLink,
@@ -36,6 +39,9 @@ export default {
GlBadge,
GlAlert,
GlSprintf,
+ GlDropdown,
+ GlDropdownItem,
+ GlDropdownDivider,
},
directives: {
TrackEvent: TrackEventDirective,
@@ -83,6 +89,8 @@ export default {
if (res.data.project?.sentryErrors?.detailedError) {
this.$apollo.queries.error.stopPolling();
this.setStatus(this.error.status);
+ } else {
+ this.onNoApolloResult();
}
},
},
@@ -90,6 +98,8 @@ export default {
data() {
return {
error: null,
+ errorLoading: true,
+ errorPollTimeout: 0,
issueCreationInProgress: false,
isAlertVisible: false,
closedIssueId: null,
@@ -134,6 +144,11 @@ export default {
false,
);
},
+ issueUpdateInProgress() {
+ return (
+ this.updatingIgnoreStatus || this.updatingResolveStatus || this.issueCreationInProgress
+ );
+ },
errorLevel() {
return sprintf(__('level: %{level}'), { level: this.error.tags.level });
},
@@ -149,8 +164,19 @@ export default {
return this.errorStatus !== errorStatus.RESOLVED ? __('Resolve') : __('Unresolve');
},
},
+ watch: {
+ error(val) {
+ if (val) {
+ this.errorLoading = false;
+ }
+ },
+ },
mounted() {
this.startPollingStacktrace(this.issueStackTracePath);
+ this.errorPollTimeout = Date.now() + SENTRY_TIMEOUT;
+ this.$apollo.queries.error.setOptions({
+ fetchPolicy: 'cache-and-network',
+ });
},
methods: {
...mapActions('details', [
@@ -182,6 +208,13 @@ export default {
}
});
},
+ onNoApolloResult() {
+ if (Date.now() > this.errorPollTimeout) {
+ this.$apollo.queries.error.stopPolling();
+ this.errorLoading = false;
+ createFlash(__('Could not connect to Sentry. Refresh the page to try again.'), 'warning');
+ }
+ },
formatDate(date) {
return `${this.timeFormatted(date)} (${dateFormat(date, 'UTC:yyyy-mm-dd h:MM:ssTT Z')})`;
},
@@ -191,7 +224,7 @@ export default {
<template>
<div>
- <div v-if="$apollo.queries.error.loading" class="py-3">
+ <div v-if="errorLoading" class="py-3">
<gl-loading-icon :size="3" />
</div>
<div v-else-if="error" class="error-details">
@@ -207,64 +240,104 @@ export default {
</gl-sprintf>
</gl-alert>
- <div class="top-area align-items-center justify-content-between py-3">
- <div v-if="!loadingStacktrace && stacktrace" data-qa-selector="reported_text">
+ <div class="error-details-header d-flex py-2 justify-content-between">
+ <div
+ v-if="!loadingStacktrace && stacktrace"
+ class="error-details-meta my-auto"
+ data-qa-selector="reported_text"
+ >
<gl-sprintf :message="__('Reported %{timeAgo} by %{reportedBy}')">
<template #reportedBy>
- <strong>{{ error.culprit }}</strong>
+ <strong class="error-details-meta-culprit">{{ error.culprit }}</strong>
</template>
<template #timeAgo>
{{ timeFormatted(stacktraceData.date_received) }}
</template>
</gl-sprintf>
</div>
-
- <div class="d-inline-flex ml-lg-auto">
- <loading-button
- :label="ignoreBtnLabel"
- :loading="updatingIgnoreStatus"
- data-qa-selector="update_ignore_status_button"
- @click="onIgnoreStatusUpdate"
- />
- <loading-button
- class="btn-outline-info ml-2"
- :label="resolveBtnLabel"
- :loading="updatingResolveStatus"
- data-qa-selector="update_resolve_status_button"
- @click="onResolveStatusUpdate"
- />
- <gl-button
- v-if="error.gitlabIssuePath"
- class="ml-2"
- data-qa-selector="view_issue_button"
- :href="error.gitlabIssuePath"
- variant="success"
- >
- {{ __('View issue') }}
- </gl-button>
- <form
- ref="sentryIssueForm"
- :action="projectIssuesPath"
- method="POST"
- class="d-inline-block ml-2"
+ <div class="error-details-actions">
+ <div class="d-inline-flex bv-d-sm-down-none">
+ <gl-button
+ :loading="updatingIgnoreStatus"
+ data-qa-selector="update_ignore_status_button"
+ @click="onIgnoreStatusUpdate"
+ >
+ {{ ignoreBtnLabel }}
+ </gl-button>
+ <gl-button
+ class="btn-outline-info ml-2"
+ :loading="updatingResolveStatus"
+ data-qa-selector="update_resolve_status_button"
+ @click="onResolveStatusUpdate"
+ >
+ {{ resolveBtnLabel }}
+ </gl-button>
+ <gl-button
+ v-if="error.gitlabIssuePath"
+ class="ml-2"
+ data-qa-selector="view_issue_button"
+ :href="error.gitlabIssuePath"
+ variant="success"
+ >
+ {{ __('View issue') }}
+ </gl-button>
+ <form
+ ref="sentryIssueForm"
+ :action="projectIssuesPath"
+ method="POST"
+ class="d-inline-block ml-2"
+ >
+ <gl-form-input class="hidden" name="issue[title]" :value="issueTitle" />
+ <input name="issue[description]" :value="issueDescription" type="hidden" />
+ <gl-form-input
+ :value="error.sentryId"
+ class="hidden"
+ name="issue[sentry_issue_attributes][sentry_issue_identifier]"
+ />
+ <gl-form-input :value="csrfToken" class="hidden" name="authenticity_token" />
+ <gl-button
+ v-if="!error.gitlabIssuePath"
+ class="btn-success"
+ :loading="issueCreationInProgress"
+ data-qa-selector="create_issue_button"
+ @click="createIssue"
+ >
+ {{ __('Create issue') }}
+ </gl-button>
+ </form>
+ </div>
+ <gl-dropdown
+ text="Options"
+ class="error-details-options d-md-none"
+ right
+ :disabled="issueUpdateInProgress"
>
- <gl-form-input class="hidden" name="issue[title]" :value="issueTitle" />
- <input name="issue[description]" :value="issueDescription" type="hidden" />
- <gl-form-input
- :value="error.sentryId"
- class="hidden"
- name="issue[sentry_issue_attributes][sentry_issue_identifier]"
- />
- <gl-form-input :value="csrfToken" class="hidden" name="authenticity_token" />
- <loading-button
+ <gl-dropdown-item
+ data-qa-selector="update_ignore_status_button"
+ @click="onIgnoreStatusUpdate"
+ >{{ ignoreBtnLabel }}</gl-dropdown-item
+ >
+ <gl-dropdown-item
+ data-qa-selector="update_resolve_status_button"
+ @click="onResolveStatusUpdate"
+ >{{ resolveBtnLabel }}</gl-dropdown-item
+ >
+ <gl-dropdown-divider />
+ <gl-dropdown-item
+ v-if="error.gitlabIssuePath"
+ data-qa-selector="view_issue_button"
+ :href="error.gitlabIssuePath"
+ variant="success"
+ >{{ __('View issue') }}</gl-dropdown-item
+ >
+ <gl-dropdown-item
v-if="!error.gitlabIssuePath"
- class="btn-success"
- :label="__('Create issue')"
:loading="issueCreationInProgress"
data-qa-selector="create_issue_button"
@click="createIssue"
- />
- </form>
+ >{{ __('Create issue') }}</gl-dropdown-item
+ >
+ </gl-dropdown>
</div>
</div>
<div>
@@ -300,7 +373,6 @@ export default {
<strong class="bold">{{ __('Sentry event') }}:</strong>
<gl-link
v-track-event="trackClickErrorLinkToSentryOptions(error.externalUrl)"
- class="d-inline-flex align-items-center"
:href="error.externalUrl"
target="_blank"
>
diff --git a/app/assets/javascripts/error_tracking/components/error_tracking_list.vue b/app/assets/javascripts/error_tracking/components/error_tracking_list.vue
index 552e8cac3a7..0e160e8d568 100644
--- a/app/assets/javascripts/error_tracking/components/error_tracking_list.vue
+++ b/app/assets/javascripts/error_tracking/components/error_tracking_list.vue
@@ -16,12 +16,11 @@ import {
GlButtonGroup,
} from '@gitlab/ui';
import AccessorUtils from '~/lib/utils/accessor';
-import Icon from '~/vue_shared/components/icon.vue';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import { __ } from '~/locale';
import { isEmpty } from 'lodash';
-export const tableDataClass = 'table-col d-flex d-sm-table-cell align-items-center';
+export const tableDataClass = 'table-col d-flex d-md-table-cell align-items-center';
export default {
FIRST_PAGE: 1,
@@ -36,7 +35,7 @@ export default {
key: 'error',
label: __('Error'),
thClass: 'w-60p',
- tdClass: `${tableDataClass} px-3`,
+ tdClass: `${tableDataClass} px-3 rounded-top`,
},
{
key: 'events',
@@ -59,14 +58,19 @@ export default {
{
key: 'status',
label: '',
- tdClass: `${tableDataClass} text-right`,
+ tdClass: `table-col d-none d-md-table-cell align-items-center pl-md-0`,
},
{
key: 'details',
- tdClass: 'table-col d-sm-none d-flex align-items-center',
+ tdClass: 'table-col d-md-none d-flex align-items-center rounded-bottom bg-secondary',
thClass: 'invisible w-0',
},
],
+ statusFilters: {
+ unresolved: __('Unresolved'),
+ ignored: __('Ignored'),
+ resolved: __('Resolved'),
+ },
sortFields: {
last_seen: __('Last Seen'),
first_seen: __('First Seen'),
@@ -83,7 +87,6 @@ export default {
GlLoadingIcon,
GlTable,
GlFormInput,
- Icon,
GlPagination,
TimeAgo,
GlButtonGroup,
@@ -136,6 +139,7 @@ export default {
'sortField',
'recentSearches',
'pagination',
+ 'statusFilter',
'cursor',
]),
paginationRequired() {
@@ -169,6 +173,7 @@ export default {
'fetchPaginatedResults',
'updateStatus',
'removeIgnoredResolvedErrors',
+ 'filterByStatus',
]),
setSearchText(text) {
this.errorSearchQuery = text;
@@ -191,9 +196,16 @@ export default {
isCurrentSortField(field) {
return field === this.sortField;
},
+ isCurrentStatusFilter(filter) {
+ return filter === this.statusFilter;
+ },
getIssueUpdatePath(errorId) {
return `/${this.projectPath}/-/error_tracking/${errorId}.json`;
},
+ filterErrors(status, label) {
+ this.filterValue = label;
+ return this.filterByStatus(status);
+ },
updateIssueStatus(errorId, status) {
this.updateStatus({
endpoint: this.getIssueUpdatePath(errorId),
@@ -209,7 +221,7 @@ export default {
<div class="error-list">
<div v-if="errorTrackingEnabled">
<div class="row flex-column flex-sm-row align-items-sm-center row-top m-0 mt-sm-2 p-0 p-sm-3">
- <div class="search-box flex-fill mr-sm-2 my-3 m-sm-0 p-3 p-sm-0">
+ <div class="search-box flex-fill mr-sm-2 my-3 m-sm-0 p-3 p-sm-0 bg-secondary">
<div class="filtered-search-box mb-0">
<gl-dropdown
:text="__('Recent searches')"
@@ -260,11 +272,32 @@ export default {
</div>
<gl-dropdown
- class="sort-control"
+ :text="$options.statusFilters[statusFilter]"
+ class="status-dropdown mr-2"
+ menu-class="dropdown"
+ :disabled="loading"
+ >
+ <gl-dropdown-item
+ v-for="(label, status) in $options.statusFilters"
+ :key="status"
+ @click="filterErrors(status, label)"
+ >
+ <span class="d-flex">
+ <gl-icon
+ class="flex-shrink-0 append-right-4"
+ :class="{ invisible: !isCurrentStatusFilter(status) }"
+ name="mobile-issue-close"
+ />
+ {{ label }}
+ </span>
+ </gl-dropdown-item>
+ </gl-dropdown>
+
+ <gl-dropdown
:text="$options.sortFields[sortField]"
left
:disabled="loading"
- menu-class="sort-dropdown"
+ menu-class="dropdown"
>
<gl-dropdown-item
v-for="(label, field) in $options.sortFields"
@@ -272,7 +305,7 @@ export default {
@click="sortByField(field)"
>
<span class="d-flex">
- <icon
+ <gl-icon
class="flex-shrink-0 append-right-4"
:class="{ invisible: !isCurrentSortField(field) }"
name="mobile-issue-close"
@@ -288,25 +321,25 @@ export default {
</div>
<template v-else>
- <h4 class="d-block d-sm-none my-3">{{ __('Open errors') }}</h4>
+ <h4 class="d-block d-md-none my-3">{{ __('Open errors') }}</h4>
<gl-table
- class="mt-3"
+ class="error-list-table mt-3"
:items="errors"
:fields="$options.fields"
:show-empty="true"
fixed
- stacked="sm"
+ stacked="md"
tbody-tr-class="table-row mb-4"
>
<template #head(error)>
- <div class="d-none d-sm-block">{{ __('Open errors') }}</div>
+ <div class="d-none d-md-block">{{ __('Open errors') }}</div>
</template>
<template #head(events)="data">
- <div class="text-sm-right">{{ data.label }}</div>
+ <div class="text-md-right">{{ data.label }}</div>
</template>
<template #head(users)="data">
- <div class="text-sm-right">{{ data.label }}</div>
+ <div class="text-md-right">{{ data.label }}</div>
</template>
<template #cell(error)="errors">
@@ -328,7 +361,7 @@ export default {
</template>
<template #cell(lastSeen)="errors">
- <div class="text-md-left text-right">
+ <div class="text-lg-left text-right">
<time-ago :time="errors.item.lastSeen" class="text-secondary" />
</div>
</template>
@@ -348,9 +381,28 @@ export default {
</template>
<template #cell(details)="errors">
<gl-button
+ category="primary"
+ variant="info"
+ block
+ class="mb-1 mt-2"
+ @click="updateIssueStatus(errors.item.id, 'resolved')"
+ >
+ {{ __('Resolve') }}
+ </gl-button>
+ <gl-button
+ category="secondary"
+ variant="default"
+ block
+ class="mb-2"
+ @click="updateIssueStatus(errors.item.id, 'ignored')"
+ >
+ {{ __('Ignore') }}
+ </gl-button>
+ <gl-button
:href="getDetailsLink(errors.item.id)"
- variant="outline-info"
- class="d-block"
+ category="secondary"
+ variant="info"
+ class="d-block mb-2"
>
{{ __('More details') }}
</gl-button>
diff --git a/app/assets/javascripts/error_tracking/store/list/actions.js b/app/assets/javascripts/error_tracking/store/list/actions.js
index 6f8573c0f4d..4170c1bf759 100644
--- a/app/assets/javascripts/error_tracking/store/list/actions.js
+++ b/app/assets/javascripts/error_tracking/store/list/actions.js
@@ -18,6 +18,7 @@ export function startPolling({ state, commit, dispatch }) {
search_term: state.searchQuery,
sort: state.sortField,
cursor: state.cursor,
+ issue_status: state.statusFilter,
},
},
successCallback: ({ data }) => {
@@ -83,6 +84,12 @@ export const searchByQuery = ({ commit, dispatch }, query) => {
dispatch('startPolling');
};
+export const filterByStatus = ({ commit, dispatch }, status) => {
+ commit(types.SET_STATUS_FILTER, status);
+ dispatch('stopPolling');
+ dispatch('startPolling');
+};
+
export const sortByField = ({ commit, dispatch }, field) => {
commit(types.SET_CURSOR, null);
commit(types.SET_SORT_FIELD, field);
diff --git a/app/assets/javascripts/error_tracking/store/list/mutation_types.js b/app/assets/javascripts/error_tracking/store/list/mutation_types.js
index 23495cbf01d..872ac8ea8fc 100644
--- a/app/assets/javascripts/error_tracking/store/list/mutation_types.js
+++ b/app/assets/javascripts/error_tracking/store/list/mutation_types.js
@@ -10,3 +10,4 @@ export const SET_SORT_FIELD = 'SET_SORT_FIELD';
export const SET_SEARCH_QUERY = 'SET_SEARCH_QUERY';
export const SET_CURSOR = 'SET_CURSOR';
export const REMOVE_IGNORED_RESOLVED_ERRORS = 'REMOVE_IGNORED_RESOLVED_ERRORS';
+export const SET_STATUS_FILTER = 'SET_STATUS_FILTER';
diff --git a/app/assets/javascripts/error_tracking/store/list/mutations.js b/app/assets/javascripts/error_tracking/store/list/mutations.js
index 38d156263fb..be0cd4de78d 100644
--- a/app/assets/javascripts/error_tracking/store/list/mutations.js
+++ b/app/assets/javascripts/error_tracking/store/list/mutations.js
@@ -62,4 +62,7 @@ export default {
[types.REMOVE_IGNORED_RESOLVED_ERRORS](state, error) {
state.errors = state.errors.filter(err => err.id !== error);
},
+ [types.SET_STATUS_FILTER](state, query) {
+ state.statusFilter = query;
+ },
};
diff --git a/app/assets/javascripts/error_tracking/store/list/state.js b/app/assets/javascripts/error_tracking/store/list/state.js
index 225a805e709..eb983fde9e0 100644
--- a/app/assets/javascripts/error_tracking/store/list/state.js
+++ b/app/assets/javascripts/error_tracking/store/list/state.js
@@ -3,6 +3,7 @@ export default () => ({
loading: true,
endpoint: null,
sortField: 'last_seen',
+ statusFilter: 'unresolved',
searchQuery: null,
indexPath: '',
recentSearches: [],
diff --git a/app/assets/javascripts/filtered_search/available_dropdown_mappings.js b/app/assets/javascripts/filtered_search/available_dropdown_mappings.js
index 5450abf4cbd..692b41da965 100644
--- a/app/assets/javascripts/filtered_search/available_dropdown_mappings.js
+++ b/app/assets/javascripts/filtered_search/available_dropdown_mappings.js
@@ -9,7 +9,7 @@ import DropdownUtils from './dropdown_utils';
import { mergeUrlParams } from '../lib/utils/url_utility';
export default class AvailableDropdownMappings {
- constructor(
+ constructor({
container,
runnerTagsEndpoint,
labelsEndpoint,
@@ -18,7 +18,7 @@ export default class AvailableDropdownMappings {
groupsOnly,
includeAncestorGroups,
includeDescendantGroups,
- ) {
+ }) {
this.container = container;
this.runnerTagsEndpoint = runnerTagsEndpoint;
this.labelsEndpoint = labelsEndpoint;
diff --git a/app/assets/javascripts/filtered_search/dropdown_utils.js b/app/assets/javascripts/filtered_search/dropdown_utils.js
index 274c08e6955..43de86b09ee 100644
--- a/app/assets/javascripts/filtered_search/dropdown_utils.js
+++ b/app/assets/javascripts/filtered_search/dropdown_utils.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { last } from 'lodash';
import FilteredSearchContainer from './container';
import FilteredSearchTokenizer from './filtered_search_tokenizer';
import FilteredSearchDropdownManager from './filtered_search_dropdown_manager';
@@ -70,11 +70,11 @@ export default class DropdownUtils {
if (!allowMultiple && itemInExistingTokens) {
updatedItem.droplab_hidden = true;
- } else if (!isSearchItem && (!lastKey || _.last(searchInput.split('')) === ' ')) {
+ } else if (!isSearchItem && (!lastKey || last(searchInput.split('')) === ' ')) {
updatedItem.droplab_hidden = false;
} else if (lastKey) {
const split = lastKey.split(':');
- const tokenName = _.last(split[0].split(' '));
+ const tokenName = last(split[0].split(' '));
const match = isSearchItem
? allowedKeys.some(key => key.startsWith(tokenName.toLowerCase()))
@@ -129,7 +129,7 @@ export default class DropdownUtils {
const values = [];
if (untilInput) {
- const inputIndex = _.findIndex(tokens, t => t.classList.contains('input-token'));
+ const inputIndex = tokens.findIndex(t => t.classList.contains('input-token'));
// Add one to include input-token to the tokens array
tokens.splice(inputIndex + 1);
}
diff --git a/app/assets/javascripts/filtered_search/filtered_search_dropdown_manager.js b/app/assets/javascripts/filtered_search/filtered_search_dropdown_manager.js
index 566fb295588..d051b60814e 100644
--- a/app/assets/javascripts/filtered_search/filtered_search_dropdown_manager.js
+++ b/app/assets/javascripts/filtered_search/filtered_search_dropdown_manager.js
@@ -1,5 +1,5 @@
+import { last } from 'lodash';
import AvailableDropdownMappings from 'ee_else_ce/filtered_search/available_dropdown_mappings';
-import _ from 'underscore';
import DropLab from '~/droplab/drop_lab';
import FilteredSearchContainer from './container';
import FilteredSearchTokenKeys from './filtered_search_token_keys';
@@ -13,6 +13,7 @@ export default class FilteredSearchDropdownManager {
labelsEndpoint = '',
milestonesEndpoint = '',
releasesEndpoint = '',
+ epicsEndpoint = '',
tokenizer,
page,
isGroup,
@@ -27,6 +28,7 @@ export default class FilteredSearchDropdownManager {
this.labelsEndpoint = removeTrailingSlash(labelsEndpoint);
this.milestonesEndpoint = removeTrailingSlash(milestonesEndpoint);
this.releasesEndpoint = removeTrailingSlash(releasesEndpoint);
+ this.epicsEndpoint = removeTrailingSlash(epicsEndpoint);
this.tokenizer = tokenizer;
this.filteredSearchTokenKeys = filteredSearchTokenKeys || FilteredSearchTokenKeys;
this.filteredSearchInput = this.container.querySelector('.filtered-search');
@@ -54,16 +56,8 @@ export default class FilteredSearchDropdownManager {
setupMapping() {
const supportedTokens = this.filteredSearchTokenKeys.getKeys();
- const availableMappings = new AvailableDropdownMappings(
- this.container,
- this.runnerTagsEndpoint,
- this.labelsEndpoint,
- this.milestonesEndpoint,
- this.releasesEndpoint,
- this.groupsOnly,
- this.includeAncestorGroups,
- this.includeDescendantGroups,
- );
+
+ const availableMappings = new AvailableDropdownMappings({ ...this });
this.mapping = availableMappings.getAllowedMappings(supportedTokens);
}
@@ -190,8 +184,8 @@ export default class FilteredSearchDropdownManager {
// Eg. token = 'label:'
const split = lastToken.split(':');
- const dropdownName = _.last(split[0].split(' '));
- const possibleOperatorToken = _.last(split[1]);
+ const dropdownName = last(split[0].split(' '));
+ const possibleOperatorToken = last(split[1]);
const hasOperator = FilteredSearchVisualTokens.permissibleOperatorValues.includes(
possibleOperatorToken && possibleOperatorToken.trim(),
diff --git a/app/assets/javascripts/filtered_search/filtered_search_manager.js b/app/assets/javascripts/filtered_search/filtered_search_manager.js
index 0b4f9457c54..7ea7313f648 100644
--- a/app/assets/javascripts/filtered_search/filtered_search_manager.js
+++ b/app/assets/javascripts/filtered_search/filtered_search_manager.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { last } from 'lodash';
import recentSearchesStorageKeys from 'ee_else_ce/filtered_search/recent_searches_storage_keys';
import { getParameterByName, getUrlParamsArray } from '~/lib/utils/common_utils';
import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered_search_token_keys';
@@ -45,6 +45,11 @@ export default class FilteredSearchManager {
this.filteredSearchTokenKeys.enableMultipleAssignees();
}
+ const { epicsEndpoint } = this.filteredSearchInput.dataset;
+ if (!epicsEndpoint && this.filteredSearchTokenKeys.removeEpicToken) {
+ this.filteredSearchTokenKeys.removeEpicToken();
+ }
+
this.recentSearchesStore = new RecentSearchesStore({
isLocalStorageAvailable: RecentSearchesService.isAvailable(),
allowedKeys: this.filteredSearchTokenKeys.getKeys(),
@@ -88,12 +93,20 @@ export default class FilteredSearchManager {
if (this.filteredSearchInput) {
this.tokenizer = FilteredSearchTokenizer;
+ const {
+ runnerTagsEndpoint = '',
+ labelsEndpoint = '',
+ milestonesEndpoint = '',
+ releasesEndpoint = '',
+ epicsEndpoint = '',
+ } = this.filteredSearchInput.dataset;
+
this.dropdownManager = new FilteredSearchDropdownManager({
- runnerTagsEndpoint:
- this.filteredSearchInput.getAttribute('data-runner-tags-endpoint') || '',
- labelsEndpoint: this.filteredSearchInput.getAttribute('data-labels-endpoint') || '',
- milestonesEndpoint: this.filteredSearchInput.getAttribute('data-milestones-endpoint') || '',
- releasesEndpoint: this.filteredSearchInput.getAttribute('data-releases-endpoint') || '',
+ runnerTagsEndpoint,
+ labelsEndpoint,
+ milestonesEndpoint,
+ releasesEndpoint,
+ epicsEndpoint,
tokenizer: this.tokenizer,
page: this.page,
isGroup: this.isGroup,
@@ -443,7 +456,7 @@ export default class FilteredSearchManager {
if (fragments.length > 1) {
const inputValues = fragments[0].split(' ');
- const tokenKey = _.last(inputValues);
+ const tokenKey = last(inputValues);
if (inputValues.length > 1) {
inputValues.pop();
@@ -459,33 +472,6 @@ export default class FilteredSearchManager {
});
input.value = input.value.replace(`${tokenKey}:`, '');
}
-
- const splitSearchToken = searchToken && searchToken.split(' ');
- let lastSearchToken = _.last(splitSearchToken);
- lastSearchToken = lastSearchToken?.toLowerCase();
-
- /**
- * If user writes "milestone", a known token, in the input, we should not
- * wait for leading colon to flush it as a filter token.
- */
- if (this.filteredSearchTokenKeys.getKeys().includes(lastSearchToken)) {
- if (splitSearchToken.length > 1) {
- splitSearchToken.pop();
- const searchVisualTokens = splitSearchToken.join(' ');
-
- input.value = input.value.replace(searchVisualTokens, '');
- FilteredSearchVisualTokens.addSearchVisualToken(searchVisualTokens);
- }
- FilteredSearchVisualTokens.addFilterVisualToken(lastSearchToken, null, null, {
- uppercaseTokenName: this.filteredSearchTokenKeys.shouldUppercaseTokenName(
- lastSearchToken,
- ),
- capitalizeTokenValue: this.filteredSearchTokenKeys.shouldCapitalizeTokenValue(
- lastSearchToken,
- ),
- });
- input.value = input.value.replace(lastSearchToken, '');
- }
} else if (!isLastVisualTokenValid && !FilteredSearchVisualTokens.getLastTokenOperator()) {
const tokenKey = FilteredSearchVisualTokens.getLastTokenPartial();
const tokenOperator = searchToken && searchToken.trim();
diff --git a/app/assets/javascripts/filtered_search/issuable_filtered_search_token_keys.js b/app/assets/javascripts/filtered_search/issuable_filtered_search_token_keys.js
index 8722fc64b62..9bea7aa7b04 100644
--- a/app/assets/javascripts/filtered_search/issuable_filtered_search_token_keys.js
+++ b/app/assets/javascripts/filtered_search/issuable_filtered_search_token_keys.js
@@ -1,4 +1,4 @@
-import { flatten } from 'underscore';
+import { flattenDeep } from 'lodash';
import FilteredSearchTokenKeys from './filtered_search_token_keys';
import { __ } from '~/locale';
@@ -73,7 +73,7 @@ export const alternativeTokenKeys = [
},
];
-export const conditions = flatten(
+export const conditions = flattenDeep(
[
{
url: 'assignee_id=None',
diff --git a/app/assets/javascripts/filtered_search/stores/recent_searches_store.js b/app/assets/javascripts/filtered_search/stores/recent_searches_store.js
index 76d40bfdaf8..b3eb0475d6f 100644
--- a/app/assets/javascripts/filtered_search/stores/recent_searches_store.js
+++ b/app/assets/javascripts/filtered_search/stores/recent_searches_store.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { uniq } from 'lodash';
class RecentSearchesStore {
constructor(initialState = {}, allowedKeys) {
@@ -20,7 +20,7 @@ class RecentSearchesStore {
setRecentSearches(searches = []) {
const trimmedSearches = searches.map(search => search.trim());
- this.state.recentSearches = _.uniq(trimmedSearches).slice(0, 5);
+ this.state.recentSearches = uniq(trimmedSearches).slice(0, 5);
return this.state.recentSearches;
}
}
diff --git a/app/assets/javascripts/filtered_search/visual_token_value.js b/app/assets/javascripts/filtered_search/visual_token_value.js
index 9f3cf881af4..b8f4cd8a1e1 100644
--- a/app/assets/javascripts/filtered_search/visual_token_value.js
+++ b/app/assets/javascripts/filtered_search/visual_token_value.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { USER_TOKEN_TYPES } from 'ee_else_ce/filtered_search/constants';
import FilteredSearchContainer from '~/filtered_search/container';
import FilteredSearchVisualTokens from '~/filtered_search/filtered_search_visual_tokens';
@@ -28,6 +28,8 @@ export default class VisualTokenValue {
this.updateUserTokenAppearance(tokenValueContainer, tokenValueElement);
} else if (tokenType === 'my-reaction') {
this.updateEmojiTokenAppearance(tokenValueContainer, tokenValueElement);
+ } else if (tokenType === 'epic') {
+ this.updateEpicLabel(tokenValueContainer, tokenValueElement);
}
}
@@ -46,7 +48,7 @@ export default class VisualTokenValue {
tokenValueContainer.dataset.originalValue = tokenValue;
tokenValueElement.innerHTML = `
<img class="avatar s20" src="${user.avatar_url}" alt="">
- ${_.escape(user.name)}
+ ${esc(user.name)}
`;
/* eslint-enable no-param-reassign */
})
@@ -83,6 +85,39 @@ export default class VisualTokenValue {
.catch(() => new Flash(__('An error occurred while fetching label colors.')));
}
+ updateEpicLabel(tokenValueContainer) {
+ const tokenValue = this.tokenValue.replace(/^&/, '');
+ const filteredSearchInput = FilteredSearchContainer.container.querySelector('.filtered-search');
+ const { epicsEndpoint } = filteredSearchInput.dataset;
+ const epicsEndpointWithParams = FilteredSearchVisualTokens.getEndpointWithQueryParams(
+ `${epicsEndpoint}.json`,
+ filteredSearchInput.dataset.endpointQueryParams,
+ );
+
+ return AjaxCache.retrieve(epicsEndpointWithParams)
+ .then(epics => {
+ const matchingEpic = (epics || []).find(epic => epic.id === Number(tokenValue));
+
+ if (!matchingEpic) {
+ return;
+ }
+
+ VisualTokenValue.replaceEpicTitle(tokenValueContainer, matchingEpic.title, matchingEpic.id);
+ })
+ .catch(() => new Flash(__('An error occurred while adding formatted title for epic')));
+ }
+
+ static replaceEpicTitle(tokenValueContainer, epicTitle, epicId) {
+ const tokenContainer = tokenValueContainer;
+
+ const valueContainer = tokenContainer.querySelector('.value');
+
+ if (valueContainer) {
+ tokenContainer.dataset.originalValue = valueContainer.innerText;
+ valueContainer.innerText = `"${epicTitle}"::&${epicId}`;
+ }
+ }
+
static setTokenStyle(tokenValueContainer, backgroundColor, textColor) {
const token = tokenValueContainer;
diff --git a/app/assets/javascripts/frequent_items/components/frequent_items_list.vue b/app/assets/javascripts/frequent_items/components/frequent_items_list.vue
index 0ece64692ae..9d898d1a1a1 100644
--- a/app/assets/javascripts/frequent_items/components/frequent_items_list.vue
+++ b/app/assets/javascripts/frequent_items/components/frequent_items_list.vue
@@ -58,7 +58,7 @@ export default {
<template>
<div class="frequent-items-list-container">
- <ul class="list-unstyled">
+ <ul ref="frequentItemsList" class="list-unstyled">
<li v-if="isListEmpty" :class="{ 'section-failure': isFetchFailed }" class="section-empty">
{{ listEmptyMessage }}
</li>
diff --git a/app/assets/javascripts/frequent_items/components/frequent_items_list_item.vue b/app/assets/javascripts/frequent_items/components/frequent_items_list_item.vue
index 6188d41ae96..c0dadedbc51 100644
--- a/app/assets/javascripts/frequent_items/components/frequent_items_list_item.vue
+++ b/app/assets/javascripts/frequent_items/components/frequent_items_list_item.vue
@@ -1,6 +1,5 @@
<script>
/* eslint-disable vue/require-default-prop */
-import _ from 'underscore';
import Identicon from '~/vue_shared/components/identicon.vue';
import highlight from '~/lib/utils/highlight';
import { truncateNamespace } from '~/lib/utils/text_utility';
@@ -38,9 +37,6 @@ export default {
},
},
computed: {
- hasAvatar() {
- return _.isString(this.avatarUrl) && !_.isEmpty(this.avatarUrl);
- },
truncatedNamespace() {
return truncateNamespace(this.namespace);
},
@@ -54,8 +50,11 @@ export default {
<template>
<li class="frequent-items-list-item-container">
<a :href="webUrl" class="clearfix">
- <div class="frequent-items-item-avatar-container avatar-container rect-avatar s32">
- <img v-if="hasAvatar" :src="avatarUrl" class="avatar s32" />
+ <div
+ ref="frequentItemsItemAvatarContainer"
+ class="frequent-items-item-avatar-container avatar-container rect-avatar s32"
+ >
+ <img v-if="avatarUrl" ref="frequentItemsItemAvatar" :src="avatarUrl" class="avatar s32" />
<identicon
v-else
:entity-id="itemId"
@@ -64,16 +63,18 @@ export default {
class="rect-avatar"
/>
</div>
- <div class="frequent-items-item-metadata-container">
+ <div ref="frequentItemsItemMetadataContainer" class="frequent-items-item-metadata-container">
<div
+ ref="frequentItemsItemTitle"
:title="itemName"
- class="frequent-items-item-title js-frequent-items-item-title"
+ class="frequent-items-item-title"
v-html="highlightedItemName"
></div>
<div
v-if="namespace"
+ ref="frequentItemsItemNamespace"
:title="namespace"
- class="frequent-items-item-namespace js-frequent-items-item-namespace"
+ class="frequent-items-item-namespace"
>
{{ truncatedNamespace }}
</div>
diff --git a/app/assets/javascripts/frequent_items/components/frequent_items_search_input.vue b/app/assets/javascripts/frequent_items/components/frequent_items_search_input.vue
index c69e1b792dc..40add09f25d 100644
--- a/app/assets/javascripts/frequent_items/components/frequent_items_search_input.vue
+++ b/app/assets/javascripts/frequent_items/components/frequent_items_search_input.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { debounce } from 'lodash';
import { mapActions } from 'vuex';
import Icon from '~/vue_shared/components/icon.vue';
import eventHub from '../event_hub';
@@ -21,7 +21,7 @@ export default {
},
},
watch: {
- searchQuery: _.debounce(function debounceSearchQuery() {
+ searchQuery: debounce(function debounceSearchQuery() {
this.setSearchQuery(this.searchQuery);
}, 500),
},
diff --git a/app/assets/javascripts/frequent_items/utils.js b/app/assets/javascripts/frequent_items/utils.js
index 5188d6118ac..a992480c22b 100644
--- a/app/assets/javascripts/frequent_items/utils.js
+++ b/app/assets/javascripts/frequent_items/utils.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { take } from 'lodash';
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import sanitize from 'sanitize-html';
import { FREQUENT_ITEMS, HOUR_IN_MS } from './constants';
@@ -31,7 +31,7 @@ export const getTopFrequentItems = items => {
return 0;
});
- return _.first(frequentItems, frequentItemsCount);
+ return take(frequentItems, frequentItemsCount);
};
export const updateExistingFrequentItem = (frequentItem, item) => {
diff --git a/app/assets/javascripts/grafana_integration/components/grafana_integration.vue b/app/assets/javascripts/grafana_integration/components/grafana_integration.vue
index 41d83e45c52..22cafd50bf3 100644
--- a/app/assets/javascripts/grafana_integration/components/grafana_integration.vue
+++ b/app/assets/javascripts/grafana_integration/components/grafana_integration.vue
@@ -55,9 +55,9 @@ export default {
<template>
<section id="grafana" class="settings no-animate js-grafana-integration">
<div class="settings-header">
- <h4 class="js-section-header">
+ <h3 class="js-section-header h4">
{{ s__('GrafanaIntegration|Grafana Authentication') }}
- </h4>
+ </h3>
<gl-button class="js-settings-toggle">{{ __('Expand') }}</gl-button>
<p class="js-section-sub-header">
{{ s__('GrafanaIntegration|Embed Grafana charts in GitLab issues.') }}
diff --git a/app/assets/javascripts/groups/components/group_item.vue b/app/assets/javascripts/groups/components/group_item.vue
index b192fb78631..1b8c75202fb 100644
--- a/app/assets/javascripts/groups/components/group_item.vue
+++ b/app/assets/javascripts/groups/components/group_item.vue
@@ -95,7 +95,7 @@ export default {
<li :id="groupDomId" :class="rowClass" class="group-row" @click.stop="onClickRowGroup">
<div
:class="{ 'project-row-contents': !isGroup }"
- class="group-row-contents d-flex align-items-center py-2"
+ class="group-row-contents d-flex align-items-center py-2 pr-3"
>
<div class="folder-toggle-wrap append-right-4 d-flex align-items-center">
<item-caret :is-group-open="group.isOpen" />
@@ -103,8 +103,8 @@ export default {
</div>
<gl-loading-icon
v-if="group.isChildrenLoading"
- size="md"
- class="d-none d-sm-inline-flex flex-shrink-0 append-right-10"
+ size="lg"
+ class="d-none d-sm-inline-flex flex-shrink-0 append-right-8"
/>
<div
:class="{ 'd-sm-flex': !group.isChildrenLoading }"
diff --git a/app/assets/javascripts/groups/components/item_actions.vue b/app/assets/javascripts/groups/components/item_actions.vue
index 4b569970204..5454480e61a 100644
--- a/app/assets/javascripts/groups/components/item_actions.vue
+++ b/app/assets/javascripts/groups/components/item_actions.vue
@@ -68,7 +68,7 @@ export default {
data-placement="bottom"
class="edit-group btn btn-xs no-expand"
>
- <icon name="settings" class="position-top-0" />
+ <icon name="settings" class="position-top-0 align-middle" />
</a>
</div>
</template>
diff --git a/app/assets/javascripts/groups/constants.js b/app/assets/javascripts/groups/constants.js
index 9c246cf3ba6..e27265b7b4a 100644
--- a/app/assets/javascripts/groups/constants.js
+++ b/app/assets/javascripts/groups/constants.js
@@ -39,7 +39,9 @@ export const GROUP_VISIBILITY_TYPE = {
export const PROJECT_VISIBILITY_TYPE = {
public: __('Public - The project can be accessed without any authentication.'),
internal: __('Internal - The project can be accessed by any logged in user.'),
- private: __('Private - Project access must be granted explicitly to each user.'),
+ private: __(
+ 'Private - Project access must be granted explicitly to each user. If this project is part of a group, access will be granted to members of the group.',
+ ),
};
export const VISIBILITY_TYPE_ICON = {
diff --git a/app/assets/javascripts/ide/components/commit_sidebar/editor_header.vue b/app/assets/javascripts/ide/components/commit_sidebar/editor_header.vue
index 3398cd091ba..e618fb3daae 100644
--- a/app/assets/javascripts/ide/components/commit_sidebar/editor_header.vue
+++ b/app/assets/javascripts/ide/components/commit_sidebar/editor_header.vue
@@ -24,25 +24,19 @@ export default {
discardModalTitle() {
return sprintf(__('Discard changes to %{path}?'), { path: this.activeFile.path });
},
- actionButtonText() {
- return this.activeFile.staged ? __('Unstage') : __('Stage');
- },
isStaged() {
return !this.activeFile.changed && this.activeFile.staged;
},
},
methods: {
...mapActions(['stageChange', 'unstageChange', 'discardFileChanges']),
- actionButtonClicked() {
- if (this.activeFile.staged) {
- this.unstageChange(this.activeFile.path);
- } else {
- this.stageChange(this.activeFile.path);
- }
- },
showDiscardModal() {
this.$refs.discardModal.show();
},
+ discardChanges(path) {
+ this.unstageChange(path);
+ this.discardFileChanges(path);
+ },
},
};
</script>
@@ -65,19 +59,7 @@ export default {
class="btn btn-remove btn-inverted append-right-8"
@click="showDiscardModal"
>
- {{ __('Discard') }}
- </button>
- <button
- ref="actionButton"
- :class="{
- 'btn-success': !isStaged,
- 'btn-warning': isStaged,
- }"
- type="button"
- class="btn btn-inverted"
- @click="actionButtonClicked"
- >
- {{ actionButtonText }}
+ {{ __('Discard changes') }}
</button>
</div>
<gl-modal
@@ -87,7 +69,7 @@ export default {
:ok-title="__('Discard changes')"
:modal-id="discardModalId"
:title="discardModalTitle"
- @ok="discardFileChanges(activeFile.path)"
+ @ok="discardChanges(activeFile.path)"
>
{{ __("You will lose all changes you've made to this file. This action cannot be undone.") }}
</gl-modal>
diff --git a/app/assets/javascripts/ide/components/commit_sidebar/form.vue b/app/assets/javascripts/ide/components/commit_sidebar/form.vue
index 5ec3fc4041b..f6ca728defc 100644
--- a/app/assets/javascripts/ide/components/commit_sidebar/form.vue
+++ b/app/assets/javascripts/ide/components/commit_sidebar/form.vue
@@ -1,6 +1,6 @@
<script>
import { mapState, mapActions, mapGetters } from 'vuex';
-import { sprintf, __ } from '~/locale';
+import { n__, __ } from '~/locale';
import LoadingButton from '~/vue_shared/components/loading_button.vue';
import CommitMessageField from './message_field.vue';
import Actions from './actions.vue';
@@ -26,15 +26,7 @@ export default {
...mapGetters(['hasChanges']),
...mapGetters('commit', ['discardDraftButtonDisabled', 'preBuiltCommitMessage']),
overviewText() {
- return sprintf(
- __(
- '<strong>%{stagedFilesLength} staged</strong> and <strong>%{changedFilesLength} unstaged</strong> changes',
- ),
- {
- stagedFilesLength: this.stagedFiles.length,
- changedFilesLength: this.changedFiles.length,
- },
- );
+ return n__('%d changed file', '%d changed files', this.stagedFiles.length);
},
commitButtonText() {
return this.stagedFiles.length ? __('Commit') : __('Stage & Commit');
@@ -125,7 +117,7 @@ export default {
>
{{ __('Commit…') }}
</button>
- <p class="text-center" v-html="overviewText"></p>
+ <p class="text-center bold">{{ overviewText }}</p>
</div>
<form v-if="!isCompact" ref="formEl" @submit.prevent.stop="commitChanges">
<transition name="fade"> <success-message v-show="lastCommitMsg" /> </transition>
diff --git a/app/assets/javascripts/ide/components/commit_sidebar/list.vue b/app/assets/javascripts/ide/components/commit_sidebar/list.vue
index d9a385a9d31..a15e22d4742 100644
--- a/app/assets/javascripts/ide/components/commit_sidebar/list.vue
+++ b/app/assets/javascripts/ide/components/commit_sidebar/list.vue
@@ -17,10 +17,6 @@ export default {
tooltip,
},
props: {
- title: {
- type: String,
- required: true,
- },
fileList: {
type: Array,
required: true,
@@ -29,18 +25,6 @@ export default {
type: String,
required: true,
},
- action: {
- type: String,
- required: true,
- },
- actionBtnText: {
- type: String,
- required: true,
- },
- actionBtnIcon: {
- type: String,
- required: true,
- },
stagedList: {
type: Boolean,
required: false,
@@ -63,9 +47,9 @@ export default {
},
computed: {
titleText() {
- return sprintf(__('%{title} changes'), {
- title: this.title,
- });
+ if (!this.title) return __('Changes');
+
+ return sprintf(__('%{title} changes'), { title: this.title });
},
filesLength() {
return this.fileList.length;
@@ -73,17 +57,16 @@ export default {
},
methods: {
...mapActions(['stageAllChanges', 'unstageAllChanges', 'discardAllChanges']),
- actionBtnClicked() {
- this[this.action]();
-
- $(this.$refs.actionBtn).tooltip('hide');
- },
openDiscardModal() {
$('#discard-all-changes').modal('show');
},
+ unstageAndDiscardAllChanges() {
+ this.unstageAllChanges();
+ this.discardAllChanges();
+ },
},
discardModalText: __(
- "You will lose all the unstaged changes you've made in this project. This action cannot be undone.",
+ "You will lose all uncommitted changes you've made in this project. This action cannot be undone.",
),
};
</script>
@@ -96,24 +79,6 @@ export default {
<strong> {{ titleText }} </strong>
<div class="d-flex ml-auto">
<button
- ref="actionBtn"
- v-tooltip
- :title="actionBtnText"
- :aria-label="actionBtnText"
- :disabled="!filesLength"
- :class="{
- 'disabled-content': !filesLength,
- }"
- type="button"
- class="d-flex ide-staged-action-btn p-0 border-0 align-items-center"
- data-placement="bottom"
- data-container="body"
- data-boundary="viewport"
- @click="actionBtnClicked"
- >
- <icon :name="actionBtnIcon" :size="16" class="ml-auto mr-auto" />
- </button>
- <button
v-if="!stagedList"
v-tooltip
:title="__('Discard all changes')"
@@ -129,7 +94,7 @@ export default {
data-boundary="viewport"
@click="openDiscardModal"
>
- <icon :size="16" name="remove-all" class="ml-auto mr-auto" />
+ <icon :size="16" name="remove-all" class="ml-auto mr-auto position-top-0" />
</button>
</div>
</div>
@@ -151,9 +116,9 @@ export default {
v-if="!stagedList"
id="discard-all-changes"
:footer-primary-button-text="__('Discard all changes')"
- :header-title-text="__('Discard all unstaged changes?')"
+ :header-title-text="__('Discard all changes?')"
footer-primary-button-variant="danger"
- @submit="discardAllChanges"
+ @submit="unstageAndDiscardAllChanges"
>
{{ $options.discardModalText }}
</gl-modal>
diff --git a/app/assets/javascripts/ide/components/commit_sidebar/list_item.vue b/app/assets/javascripts/ide/components/commit_sidebar/list_item.vue
index 726e2b7e1fc..e49d96efe50 100644
--- a/app/assets/javascripts/ide/components/commit_sidebar/list_item.vue
+++ b/app/assets/javascripts/ide/components/commit_sidebar/list_item.vue
@@ -57,13 +57,7 @@ export default {
},
},
methods: {
- ...mapActions([
- 'discardFileChanges',
- 'updateViewer',
- 'openPendingTab',
- 'unstageChange',
- 'stageChange',
- ]),
+ ...mapActions(['discardFileChanges', 'updateViewer', 'openPendingTab']),
openFileInEditor() {
if (this.file.type === 'tree') return null;
@@ -76,13 +70,6 @@ export default {
}
});
},
- fileAction() {
- if (this.file.staged) {
- this.unstageChange(this.file.path);
- } else {
- this.stageChange(this.file.path);
- }
- },
},
};
</script>
@@ -97,7 +84,6 @@ export default {
}"
class="multi-file-commit-list-path w-100 border-0 ml-0 mr-0"
role="button"
- @dblclick="fileAction"
@click="openFileInEditor"
>
<span class="multi-file-commit-list-file-path d-flex align-items-center">
diff --git a/app/assets/javascripts/ide/components/file_row_extra.vue b/app/assets/javascripts/ide/components/file_row_extra.vue
index 3ef7d863bd5..32822a75772 100644
--- a/app/assets/javascripts/ide/components/file_row_extra.vue
+++ b/app/assets/javascripts/ide/components/file_row_extra.vue
@@ -1,6 +1,6 @@
<script>
import { mapGetters } from 'vuex';
-import { n__, __, sprintf } from '~/locale';
+import { n__ } from '~/locale';
import tooltip from '~/vue_shared/directives/tooltip';
import Icon from '~/vue_shared/components/icon.vue';
import ChangedFileIcon from '~/vue_shared/components/changed_file_icon.vue';
@@ -49,16 +49,7 @@ export default {
folderChangesTooltip() {
if (this.changesCount === 0) return undefined;
- if (this.folderUnstagedCount > 0 && this.folderStagedCount === 0) {
- return n__('%d unstaged change', '%d unstaged changes', this.folderUnstagedCount);
- } else if (this.folderUnstagedCount === 0 && this.folderStagedCount > 0) {
- return n__('%d staged change', '%d staged changes', this.folderStagedCount);
- }
-
- return sprintf(__('%{staged} staged and %{unstaged} unstaged changes'), {
- unstaged: this.folderUnstagedCount,
- staged: this.folderStagedCount,
- });
+ return n__('%d changed file', '%d changed files', this.changesCount);
},
showTreeChangesCount() {
return this.isTree && this.changesCount > 0 && !this.file.opened;
diff --git a/app/assets/javascripts/ide/components/nav_dropdown_button.vue b/app/assets/javascripts/ide/components/nav_dropdown_button.vue
index 4cd320d5d66..8dc22620eca 100644
--- a/app/assets/javascripts/ide/components/nav_dropdown_button.vue
+++ b/app/assets/javascripts/ide/components/nav_dropdown_button.vue
@@ -31,8 +31,8 @@ export default {
<template>
<dropdown-button>
- <span class="row">
- <span class="col-auto text-truncate" :class="{ 'col-7': showMergeRequests }">
+ <span class="row flex-nowrap">
+ <span class="col-auto flex-fill text-truncate">
<icon :size="16" :aria-label="__('Current Branch')" name="branch" /> {{ branchLabel }}
</span>
<span v-if="showMergeRequests" class="col-5 pl-0 text-truncate">
diff --git a/app/assets/javascripts/ide/components/new_dropdown/upload.vue b/app/assets/javascripts/ide/components/new_dropdown/upload.vue
index 0efb0012246..7261e0590c8 100644
--- a/app/assets/javascripts/ide/components/new_dropdown/upload.vue
+++ b/app/assets/javascripts/ide/components/new_dropdown/upload.vue
@@ -1,5 +1,6 @@
<script>
import ItemButton from './button.vue';
+import { isTextFile } from '~/ide/utils';
export default {
components: {
@@ -23,29 +24,11 @@ export default {
},
},
methods: {
- isText(content, fileType) {
- const knownBinaryFileTypes = ['image/'];
- const knownTextFileTypes = ['text/'];
- const isKnownBinaryFileType = knownBinaryFileTypes.find(type => fileType.includes(type));
- const isKnownTextFileType = knownTextFileTypes.find(type => fileType.includes(type));
- const asciiRegex = /^[ -~\t\n\r]+$/; // tests whether a string contains ascii characters only (ranges from space to tilde, tabs and new lines)
-
- if (isKnownBinaryFileType) {
- return false;
- }
-
- if (isKnownTextFileType) {
- return true;
- }
-
- // if it's not a known file type, determine the type by evaluating the file contents
- return asciiRegex.test(content);
- },
createFile(target, file) {
const { name } = file;
const encodedContent = target.result.split('base64,')[1];
const rawContent = encodedContent ? atob(encodedContent) : '';
- const isText = this.isText(rawContent, file.type);
+ const isText = isTextFile(rawContent, file.type, name);
const emitCreateEvent = content =>
this.$emit('create', {
diff --git a/app/assets/javascripts/ide/components/pipelines/list.vue b/app/assets/javascripts/ide/components/pipelines/list.vue
index b61d0a47795..3a63fc32639 100644
--- a/app/assets/javascripts/ide/components/pipelines/list.vue
+++ b/app/assets/javascripts/ide/components/pipelines/list.vue
@@ -59,7 +59,7 @@ export default {
<gl-loading-icon v-if="showLoadingIcon" :size="2" class="prepend-top-default" />
<template v-else-if="hasLoadedPipeline">
<header v-if="latestPipeline" class="ide-tree-header ide-pipeline-header">
- <ci-icon :status="latestPipeline.details.status" :size="24" />
+ <ci-icon :status="latestPipeline.details.status" :size="24" class="d-flex" />
<span class="prepend-left-8">
<strong> {{ __('Pipeline') }} </strong>
<a
@@ -76,6 +76,7 @@ export default {
:help-page-path="links.ciHelpPagePath"
:empty-state-svg-path="pipelinesEmptyStateSvgPath"
:can-set-ci="true"
+ class="mb-auto mt-auto"
/>
<div v-else-if="latestPipeline.yamlError" class="bs-callout bs-callout-danger">
<p class="append-bottom-0">{{ __('Found errors in your .gitlab-ci.yml:') }}</p>
diff --git a/app/assets/javascripts/ide/components/preview/clientside.vue b/app/assets/javascripts/ide/components/preview/clientside.vue
index beb179d0411..aa8d932da6e 100644
--- a/app/assets/javascripts/ide/components/preview/clientside.vue
+++ b/app/assets/javascripts/ide/components/preview/clientside.vue
@@ -21,7 +21,7 @@ export default {
};
},
computed: {
- ...mapState(['entries', 'promotionSvgPath', 'links']),
+ ...mapState(['entries', 'promotionSvgPath', 'links', 'codesandboxBundlerUrl']),
...mapGetters(['packageJson', 'currentProject']),
normalizedEntries() {
return Object.keys(this.entries).reduce((acc, path) => {
@@ -106,12 +106,7 @@ export default {
return this.loadFileContent(this.mainEntry)
.then(() => this.$nextTick())
.then(() => {
- this.initManager('#ide-preview', this.sandboxOpts, {
- fileResolver: {
- isFile: p => Promise.resolve(Boolean(this.entries[createPathWithExt(p)])),
- readFile: p => this.loadFileContent(createPathWithExt(p)).then(content => content),
- },
- });
+ this.initManager();
this.listener = listen(e => {
switch (e.type) {
@@ -139,8 +134,18 @@ export default {
this.manager.updatePreview(this.sandboxOpts);
}, 250);
},
- initManager(el, opts, resolver) {
- this.manager = new Manager(el, opts, resolver);
+ initManager() {
+ const { codesandboxBundlerUrl: bundlerURL } = this;
+
+ const settings = {
+ fileResolver: {
+ isFile: p => Promise.resolve(Boolean(this.entries[createPathWithExt(p)])),
+ readFile: p => this.loadFileContent(createPathWithExt(p)).then(content => content),
+ },
+ ...(bundlerURL ? { bundlerURL } : {}),
+ };
+
+ this.manager = new Manager('#ide-preview', this.sandboxOpts, settings);
},
},
};
diff --git a/app/assets/javascripts/ide/components/repo_commit_section.vue b/app/assets/javascripts/ide/components/repo_commit_section.vue
index 62fb0b03975..b8dca2709c8 100644
--- a/app/assets/javascripts/ide/components/repo_commit_section.vue
+++ b/app/assets/javascripts/ide/components/repo_commit_section.vue
@@ -86,28 +86,12 @@ export default {
</deprecated-modal>
<template v-if="showStageUnstageArea">
<commit-files-list
- :title="__('Unstaged')"
- :key-prefix="$options.stageKeys.unstaged"
- :file-list="changedFiles"
- :action-btn-text="__('Stage all changes')"
- :active-file-key="activeFileKey"
- :empty-state-text="__('There are no unstaged changes')"
- action="stageAllChanges"
- action-btn-icon="stage-all"
- class="is-first"
- icon-name="unstaged"
- />
- <commit-files-list
- :title="__('Staged')"
:key-prefix="$options.stageKeys.staged"
:file-list="stagedFiles"
- :action-btn-text="__('Unstage all changes')"
- :staged-list="true"
:active-file-key="activeFileKey"
- :empty-state-text="__('There are no staged changes')"
- action="unstageAllChanges"
- action-btn-icon="unstage-all"
- icon-name="staged"
+ :empty-state-text="__('There are no changes')"
+ class="is-first"
+ icon-name="unstaged"
/>
</template>
<empty-state v-if="unusedSeal" />
diff --git a/app/assets/javascripts/ide/components/repo_editor.vue b/app/assets/javascripts/ide/components/repo_editor.vue
index bfb760f3579..c93a95e490a 100644
--- a/app/assets/javascripts/ide/components/repo_editor.vue
+++ b/app/assets/javascripts/ide/components/repo_editor.vue
@@ -308,6 +308,7 @@ export default {
'is-added': file.tempFile,
}"
class="multi-file-editor-holder"
+ data-qa-selector="editor_container"
@focusout="triggerFilesChange"
></div>
<content-viewer
diff --git a/app/assets/javascripts/ide/index.js b/app/assets/javascripts/ide/index.js
index a3450522697..9e9d9df8f82 100644
--- a/app/assets/javascripts/ide/index.js
+++ b/app/assets/javascripts/ide/index.js
@@ -53,6 +53,7 @@ export function initIde(el, options = {}) {
clientsidePreviewEnabled: parseBoolean(el.dataset.clientsidePreviewEnabled),
renderWhitespaceInCode: parseBoolean(el.dataset.renderWhitespaceInCode),
editorTheme: window.gon?.user_color_scheme || DEFAULT_THEME,
+ codesandboxBundlerUrl: el.dataset.codesandboxBundlerUrl,
});
},
methods: {
diff --git a/app/assets/javascripts/ide/lib/themes/white.js b/app/assets/javascripts/ide/lib/themes/white.js
index 273bc783fc6..f06458d8a16 100644
--- a/app/assets/javascripts/ide/lib/themes/white.js
+++ b/app/assets/javascripts/ide/lib/themes/white.js
@@ -1,12 +1,146 @@
+const BOLD = 'bold';
+const ITALIC = 'italic';
+const BOLD_ITALIC = 'bold italic';
+const UNDERLINE = 'underline';
+
export default {
base: 'vs',
- inherit: true,
- rules: [],
+ inherit: false,
+ rules: [
+ // Standard theme defaults and overrides based on VS theme
+ // https://github.com/Microsoft/vscode/blob/master/src/vs/editor/standalone/common/themes.ts
+ // License: MIT (https://github.com/microsoft/vscode/blob/master/LICENSE.txt)
+ { token: '', foreground: '2e2e2e', background: 'ffffff' },
+ { token: 'keyword.css', fontStyle: BOLD, foreground: '999999' },
+ { token: 'keyword.less', fontStyle: BOLD, foreground: '999999' },
+ { token: 'keyword.scss', fontStyle: BOLD, foreground: '999999' },
+ { token: 'keyword.md', fontStyle: BOLD, foreground: '800080' },
+
+ { token: 'variable', foreground: '008080' },
+ { token: 'variable.md', foreground: 'dd1144' },
+ { token: 'variable.predefined', foreground: '008080' },
+ { token: 'number', foreground: '009999' },
+ { token: 'number.hex', foreground: '3030c0' },
+
+ { token: 'type.identifier.ts', foreground: '445588', fontStyle: BOLD },
+ { token: 'type.identifier.swift', foreground: '445588', fontStyle: BOLD },
+ { token: 'type.identifier.kt', foreground: '445588', fontStyle: BOLD },
+ { token: 'type.identifier.perl', foreground: '2e2e2e', fontStyle: BOLD },
+
+ { token: 'tag', foreground: '000080' },
+ { token: 'tag.class', foreground: '445588', fontStyle: BOLD },
+ { token: 'tag.css', foreground: '445588', fontStyle: BOLD },
+ { token: 'tag.less', foreground: '445588', fontStyle: BOLD },
+ { token: 'tag.scss', foreground: '445588', fontStyle: BOLD },
+ { token: 'tag.id.jade', foreground: '445588' },
+ { token: 'tag.class.jade', foreground: '445588' },
+ { token: 'meta.scss', foreground: '800000' },
+ { token: 'metatag', foreground: 'e00000' },
+ { token: 'metatag.content.html', foreground: 'e00000' },
+ { token: 'metatag.html', foreground: '808080' },
+ { token: 'metatag.xml', foreground: '808080' },
+ { token: 'metatag.php', fontStyle: BOLD },
+
+ { token: 'key', foreground: '863b00' },
+ { token: 'key.ini', foreground: '008080' },
+ { token: 'string.key.json', foreground: '2e2e2e' },
+ { token: 'string.value.json', foreground: 'dd1144' },
+ { token: 'string.link.md', foreground: 'aa0000' },
+
+ { token: 'attribute.name', foreground: '008080' },
+ { token: 'attribute.name.css', foreground: '2e2e2e' },
+ { token: 'attribute.name.json', foreground: '2e2e2e' },
+ { token: 'attribute.name.scss', foreground: '2e2e2e' },
+ { token: 'attribute.name.less', foreground: '2e2e2e' },
+ { token: 'attribute.value', foreground: 'dd1144' },
+ { token: 'attribute.value.css', foreground: '0086b3' },
+ { token: 'attribute.value.hex', foreground: '0086b3' },
+ { token: 'attribute.value.number', foreground: '009999' },
+ { token: 'attribute.value.unit', foreground: '009999' },
+ { token: 'attribute.value.xml', foreground: 'dd1144' },
+ { token: 'attribute.value.html', foreground: 'dd1144' },
+ { token: 'attribute.value.md', foreground: 'aa0000' },
+
+ { token: 'string', foreground: 'dd1144' },
+ { token: 'string.target', foreground: 'aa0000' },
+ { token: 'string.sql', foreground: 'dd1144' },
+
+ { token: 'keyword.flow', foreground: '2e2e2e', fontStyle: BOLD },
+ { token: 'keyword.st', foreground: '445588', fontStyle: BOLD },
+ { token: 'variable.st', foreground: '445588', fontStyle: BOLD },
+ { token: 'type.st', foreground: '445588', fontStyle: BOLD },
+
+ { token: 'operator.scss', foreground: '666666' },
+ { token: 'operator.sql', foreground: '2e2e2e', fontStyle: BOLD },
+ { token: 'operator.swift', foreground: '666666' },
+ { token: 'predefined.sql', foreground: '2e2e2e', fontStyle: BOLD },
+
+ // GitHub theme based on https://github.com/brijeshb42/monaco-themes/blob/master/themes/GitHub.json
+ // Customized for Web IDE
+ // License: MIT (https://github.com/brijeshb42/monaco-themes/blob/master/LICENSE)
+ { token: 'comment', foreground: '999988', fontStyle: ITALIC },
+ { token: 'comment.block.preprocessor', foreground: '999999', fontStyle: BOLD },
+ { token: 'comment.documentation', foreground: '999999', fontStyle: BOLD_ITALIC },
+ { token: 'comment.block.documentation', foreground: '999999', fontStyle: BOLD_ITALIC },
+ { token: 'invalid.illegal', foreground: 'aa0000', background: 'e3d2d2' },
+ { token: 'keyword', fontStyle: BOLD, foreground: '2e2e2e' },
+ { token: 'storage', fontStyle: BOLD },
+ { token: 'keyword.operator', fontStyle: BOLD },
+ { token: 'constant.language', fontStyle: BOLD },
+ { token: 'support.constant', fontStyle: BOLD },
+ { token: 'storage.type', foreground: '445588', fontStyle: BOLD },
+ { token: 'support.type', foreground: '445588', fontStyle: BOLD },
+ { token: 'entity.other.attribute-name', foreground: '008080' },
+ { token: 'variable.other', foreground: '0086b3' },
+ { token: 'variable.language', foreground: '999999' },
+ { token: 'entity.name.type', foreground: '445588', fontStyle: BOLD },
+ { token: 'entity.other.inherited-class', foreground: '445588', fontStyle: BOLD },
+ { token: 'support.class', foreground: '445588', fontStyle: BOLD },
+ { token: 'variable.other.constant', foreground: '008080' },
+ { token: 'constant.character.entity', foreground: '800080' },
+ { token: 'entity.name.exception', foreground: 'aa0000' },
+ { token: 'entity.name.function', foreground: 'aa0000' },
+ { token: 'support.function', foreground: 'aa0000' },
+ { token: 'keyword.other.name-of-parameter', foreground: 'aa0000' },
+ { token: 'entity.name.section', foreground: '666666' },
+ { token: 'entity.name.tag', foreground: '000080' },
+ { token: 'variable.parameter', foreground: '008080' },
+ { token: 'support.variable', foreground: '008080' },
+ { token: 'constant.numeric', foreground: '009999' },
+ { token: 'constant.other', foreground: '009999' },
+ { token: 'constant.character', foreground: 'dd1144' },
+ { token: 'string.regexp', foreground: '009926' },
+ { token: 'constant.other.symbol', foreground: '990073' },
+ { token: 'punctuation', fontStyle: BOLD },
+ { token: 'markup.deleted', foreground: '000000', background: 'ffdddd' },
+ { token: 'markup.italic', fontStyle: ITALIC },
+ { token: 'markup.error', foreground: 'aa0000' },
+ { token: 'markup.heading.1', foreground: '999999' },
+ { token: 'markup.inserted', foreground: '000000', background: 'ddffdd' },
+ { token: 'markup.output', foreground: '808080' },
+ { token: 'markup.raw', foreground: '808080' },
+ { token: 'markup.prompt', foreground: '666666' },
+ { token: 'markup.bold', fontStyle: BOLD },
+ { token: 'markup.heading', foreground: '999999' },
+ { token: 'markup.traceback', foreground: 'aa0000' },
+ { token: 'markup.underline', fontStyle: UNDERLINE },
+ { token: 'meta.diff.range', foreground: '999999', background: 'eaf2f5' },
+ { token: 'meta.diff.index', foreground: '999999', background: 'eaf2f5' },
+ { token: 'meta.separator', foreground: '999999', background: 'eaf2f5' },
+ { token: 'meta.diff.header.from-file', foreground: '999999', background: 'ffdddd' },
+ { token: 'meta.diff.header.to-file', foreground: '999999', background: 'ddffdd' },
+ { token: 'meta.link', foreground: '4183c4' },
+ ],
colors: {
- 'editorLineNumber.foreground': '#CCCCCC',
- 'diffEditor.insertedTextBackground': '#A0F5B420',
- 'diffEditor.removedTextBackground': '#f9d7dc20',
+ 'editor.foreground': '#2e2e2e',
'editor.selectionBackground': '#aad6f8',
+ 'editor.lineHighlightBackground': '#fffeeb',
+ 'editorCursor.foreground': '#666666',
+ 'editorWhitespace.foreground': '#bbbbbb',
+
+ 'editorLineNumber.foreground': '#cccccc',
+ 'diffEditor.insertedTextBackground': '#a0f5b420',
+ 'diffEditor.removedTextBackground': '#f9d7dc20',
'editorIndentGuide.activeBackground': '#cccccc',
},
};
diff --git a/app/assets/javascripts/ide/stores/state.js b/app/assets/javascripts/ide/stores/state.js
index a714562c5b2..0fd6a448283 100644
--- a/app/assets/javascripts/ide/stores/state.js
+++ b/app/assets/javascripts/ide/stores/state.js
@@ -34,4 +34,5 @@ export default () => ({
clientsidePreviewEnabled: false,
renderWhitespaceInCode: false,
editorTheme: DEFAULT_THEME,
+ codesandboxBundlerUrl: null,
});
diff --git a/app/assets/javascripts/ide/utils.js b/app/assets/javascripts/ide/utils.js
index ae579fef25f..64ac539a4ff 100644
--- a/app/assets/javascripts/ide/utils.js
+++ b/app/assets/javascripts/ide/utils.js
@@ -1,4 +1,57 @@
import { commitItemIconMap } from './constants';
+import { languages } from 'monaco-editor';
+import { flatten } from 'lodash';
+
+const toLowerCase = x => x.toLowerCase();
+
+const monacoLanguages = languages.getLanguages();
+const monacoExtensions = new Set(
+ flatten(monacoLanguages.map(lang => lang.extensions?.map(toLowerCase) || [])),
+);
+const monacoMimetypes = new Set(
+ flatten(monacoLanguages.map(lang => lang.mimetypes?.map(toLowerCase) || [])),
+);
+const monacoFilenames = new Set(
+ flatten(monacoLanguages.map(lang => lang.filenames?.map(toLowerCase) || [])),
+);
+
+const KNOWN_TYPES = [
+ {
+ isText: false,
+ isMatch(mimeType) {
+ return mimeType.toLowerCase().includes('image/');
+ },
+ },
+ {
+ isText: true,
+ isMatch(mimeType) {
+ return mimeType.toLowerCase().includes('text/');
+ },
+ },
+ {
+ isText: true,
+ isMatch(mimeType, fileName) {
+ const fileExtension = fileName.includes('.') ? `.${fileName.split('.').pop()}` : '';
+
+ return (
+ monacoExtensions.has(fileExtension.toLowerCase()) ||
+ monacoMimetypes.has(mimeType.toLowerCase()) ||
+ monacoFilenames.has(fileName.toLowerCase())
+ );
+ },
+ },
+];
+
+export function isTextFile(content, mimeType, fileName) {
+ const knownType = KNOWN_TYPES.find(type => type.isMatch(mimeType, fileName));
+
+ if (knownType) return knownType.isText;
+
+ // does the string contain ascii characters only (ranges from space to tilde, tabs and new lines)
+ const asciiRegex = /^[ -~\t\n\r]+$/;
+ // for unknown types, determine the type by evaluating the file contents
+ return asciiRegex.test(content);
+}
export const getCommitIconMap = file => {
if (file.deleted) {
diff --git a/app/assets/javascripts/import_projects/components/import_projects_table.vue b/app/assets/javascripts/import_projects/components/import_projects_table.vue
index e5ac3cbafe5..849bda28d03 100644
--- a/app/assets/javascripts/import_projects/components/import_projects_table.vue
+++ b/app/assets/javascripts/import_projects/components/import_projects_table.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { throttle } from 'lodash';
import { mapActions, mapState, mapGetters } from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import LoadingButton from '~/vue_shared/components/loading_button.vue';
@@ -67,7 +67,7 @@ export default {
this.setFilter(target.value);
},
- throttledFetchRepos: _.throttle(function fetch() {
+ throttledFetchRepos: throttle(function fetch() {
eventHub.$off('importAll');
this.fetchRepos();
}, reposFetchThrottleDelay),
diff --git a/app/assets/javascripts/issuable_suggestions/components/app.vue b/app/assets/javascripts/issuable_suggestions/components/app.vue
index d435460e38f..67d10b797fb 100644
--- a/app/assets/javascripts/issuable_suggestions/components/app.vue
+++ b/app/assets/javascripts/issuable_suggestions/components/app.vue
@@ -1,5 +1,4 @@
<script>
-import _ from 'underscore';
import { GlTooltipDirective } from '@gitlab/ui';
import { __ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
@@ -48,7 +47,7 @@ export default {
},
computed: {
isSearchEmpty() {
- return _.isEmpty(this.search);
+ return !this.search.length;
},
showSuggestions() {
return !this.isSearchEmpty && this.issues.length && !this.loading;
diff --git a/app/assets/javascripts/issuable_suggestions/components/item.vue b/app/assets/javascripts/issuable_suggestions/components/item.vue
index 66a4cc44d51..9f3508fb937 100644
--- a/app/assets/javascripts/issuable_suggestions/components/item.vue
+++ b/app/assets/javascripts/issuable_suggestions/components/item.vue
@@ -1,6 +1,6 @@
<script>
/* eslint-disable @gitlab/vue-i18n/no-bare-strings */
-import _ from 'underscore';
+import { uniqueId } from 'lodash';
import { GlLink, GlTooltip, GlTooltipDirective } from '@gitlab/ui';
import { __ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
@@ -36,13 +36,13 @@ export default {
counts() {
return [
{
- id: _.uniqueId(),
+ id: uniqueId(),
icon: 'thumb-up',
tooltipTitle: __('Upvotes'),
count: this.suggestion.upvotes,
},
{
- id: _.uniqueId(),
+ id: uniqueId(),
icon: 'comment',
tooltipTitle: __('Comments'),
count: this.suggestion.userNotesCount,
diff --git a/app/assets/javascripts/issuables_list/components/issuables_list_app.vue b/app/assets/javascripts/issuables_list/components/issuables_list_app.vue
index 6b6a8bd4068..640827fe564 100644
--- a/app/assets/javascripts/issuables_list/components/issuables_list_app.vue
+++ b/app/assets/javascripts/issuables_list/components/issuables_list_app.vue
@@ -1,9 +1,14 @@
<script>
-import { omit } from 'underscore';
+import { toNumber, omit } from 'lodash';
import { GlEmptyState, GlPagination, GlSkeletonLoading } from '@gitlab/ui';
import flash from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import { scrollToElement, urlParamsToObject } from '~/lib/utils/common_utils';
+import {
+ scrollToElement,
+ urlParamsToObject,
+ historyPushState,
+ getParameterByName,
+} from '~/lib/utils/common_utils';
import { __ } from '~/locale';
import initManualOrdering from '~/manual_ordering';
import Issuable from './issuable.vue';
@@ -14,6 +19,7 @@ import {
PAGE_SIZE_MANUAL,
LOADING_LIST_ITEMS_LENGTH,
} from '../constants';
+import { setUrlParams } from '~/lib/utils/url_utility';
import issueableEventHub from '../eventhub';
export default {
@@ -56,7 +62,10 @@ export default {
isBulkEditing: false,
issuables: [],
loading: false,
- page: 1,
+ page:
+ getParameterByName('page', window.location.href) !== null
+ ? toNumber(getParameterByName('page'))
+ : 1,
selection: {},
totalItems: 0,
};
@@ -189,6 +198,12 @@ export default {
if (newPage === this.page) return;
scrollToElement('#content-body');
+
+ // NOTE: This allows for the params to be updated on pagination
+ historyPushState(
+ setUrlParams({ ...this.filters, page: newPage }, window.location.href, true),
+ );
+
this.fetchIssuables(newPage);
},
onSelectAll() {
diff --git a/app/assets/javascripts/issue_show/stores/index.js b/app/assets/javascripts/issue_show/stores/index.js
index 688ba7b268d..0cd094243b9 100644
--- a/app/assets/javascripts/issue_show/stores/index.js
+++ b/app/assets/javascripts/issue_show/stores/index.js
@@ -1,4 +1,3 @@
-import _ from 'underscore';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import updateDescription from '../utils/update_description';
@@ -26,7 +25,7 @@ export default class Store {
'.detail-page-description.content-block',
);
const details =
- !_.isNull(descriptionSection) && descriptionSection.getElementsByTagName('details');
+ descriptionSection != null && descriptionSection.getElementsByTagName('details');
this.state.descriptionHtml = updateDescription(data.description, details);
this.state.titleHtml = data.title;
diff --git a/app/assets/javascripts/issue_show/utils/update_description.js b/app/assets/javascripts/issue_show/utils/update_description.js
index 315f6c23b02..c5811290e61 100644
--- a/app/assets/javascripts/issue_show/utils/update_description.js
+++ b/app/assets/javascripts/issue_show/utils/update_description.js
@@ -1,5 +1,3 @@
-import _ from 'underscore';
-
/**
* Function that replaces the open attribute for the <details> element.
*
@@ -10,7 +8,7 @@ import _ from 'underscore';
const updateDescription = (descriptionHtml = '', details) => {
let detailNodes = details;
- if (_.isEmpty(details)) {
+ if (!details.length) {
detailNodes = [];
}
diff --git a/app/assets/javascripts/labels_select.js b/app/assets/javascripts/labels_select.js
index f57febbda37..7107c970457 100644
--- a/app/assets/javascripts/labels_select.js
+++ b/app/assets/javascripts/labels_select.js
@@ -479,21 +479,47 @@ export default class LabelsSelect {
// concatenation
// see: http://2ality.com/2016/05/template-literal-whitespace.html#joining-arrays
+ const linkOpenTag =
+ '<a href="<%- issueUpdateURL.slice(0, issueUpdateURL.lastIndexOf("/")) %>?label_name[]=<%- encodeURIComponent(label.title) %>" class="gl-link gl-label-link has-tooltip" <%= linkAttrs %> title="<%= tooltipTitleTemplate({ label, isScopedLabel, enableScopedLabels, escapeStr }) %>">';
+ const spanOpenTag =
+ '<span class="gl-label-text" style="background-color: <%= escapeStr(label.color) %>; color: <%= escapeStr(label.text_color) %>;">';
const labelTemplate = _.template(
[
- '<a href="<%- issueUpdateURL.slice(0, issueUpdateURL.lastIndexOf("/")) %>?label_name[]=<%- encodeURIComponent(label.title) %>">',
- '<span class="badge label has-tooltip color-label" <%= linkAttrs %> title="<%= tooltipTitleTemplate({ label, isScopedLabel, enableScopedLabels, escapeStr }) %>" style="background-color: <%= escapeStr(label.color) %>; color: <%= escapeStr(label.text_color) %>;">',
+ '<span class="gl-label">',
+ linkOpenTag,
+ spanOpenTag,
'<%- label.title %>',
'</span>',
'</a>',
+ '</span>',
].join(''),
);
+ const rightLabelTextColor = ({ label, escapeStr }) => {
+ return escapeStr(label.text_color === '#FFFFFF' ? label.color : label.text_color);
+ };
+
const infoIconTemplate = _.template(
[
- '<a href="<%= scopedLabelsDocumentationLink %>" class="label scoped-label" target="_blank" rel="noopener">',
- '<i class="fa fa-question-circle" style="background-color: <%= escapeStr(label.color) %>; color: <%= escapeStr(label.text_color) %>;"></i>',
+ '<a href="<%= scopedLabelsDocumentationLink %>" class="gl-link gl-label-icon" target="_blank" rel="noopener">',
+ '<i class="fa fa-question-circle"></i>',
+ '</a>',
+ ].join(''),
+ );
+
+ const scopedLabelTemplate = _.template(
+ [
+ '<span class="gl-label gl-label-scoped" style="color: <%= escapeStr(label.color) %>;">',
+ linkOpenTag,
+ spanOpenTag,
+ '<%- label.title.slice(0, label.title.lastIndexOf("::")) %>',
+ '</span>',
+ '<span class="gl-label-text" style="color: <%= rightLabelTextColor({ label, escapeStr }) %>;">',
+ '<%- label.title.slice(label.title.lastIndexOf("::") + 2) %>',
+ '</span>',
'</a>',
+ '<%= infoIconTemplate({ label, scopedLabelsDocumentationLink, escapeStr }) %>',
+ '</span>',
].join(''),
);
@@ -514,8 +540,7 @@ export default class LabelsSelect {
'<% _.each(labels, function(label){ %>',
'<% if (isScopedLabel(label) && enableScopedLabels) { %>',
'<span class="d-inline-block position-relative scoped-label-wrapper">',
- '<%= labelTemplate({ label, issueUpdateURL, isScopedLabel, enableScopedLabels, tooltipTitleTemplate, escapeStr, linkAttrs: \'data-html="true"\' }) %>',
- '<%= infoIconTemplate({ label, scopedLabelsDocumentationLink, escapeStr }) %>',
+ '<%= scopedLabelTemplate({ label, issueUpdateURL, isScopedLabel, enableScopedLabels, rightLabelTextColor, infoIconTemplate, scopedLabelsDocumentationLink, tooltipTitleTemplate, escapeStr, linkAttrs: \'data-html="true"\' }) %>',
'</span>',
'<% } else { %>',
'<%= labelTemplate({ label, issueUpdateURL, isScopedLabel, enableScopedLabels, tooltipTitleTemplate, escapeStr, linkAttrs: "" }) %>',
@@ -527,7 +552,9 @@ export default class LabelsSelect {
return tpl({
...tplData,
labelTemplate,
+ rightLabelTextColor,
infoIconTemplate,
+ scopedLabelTemplate,
tooltipTitleTemplate,
isScopedLabel,
escapeStr: _.escape,
diff --git a/app/assets/javascripts/lib/utils/common_utils.js b/app/assets/javascripts/lib/utils/common_utils.js
index dd5a52fe1ce..abecfba5718 100644
--- a/app/assets/javascripts/lib/utils/common_utils.js
+++ b/app/assets/javascripts/lib/utils/common_utils.js
@@ -8,6 +8,7 @@ import axios from './axios_utils';
import { getLocationHash } from './url_utility';
import { convertToCamelCase, convertToSnakeCase } from './text_utility';
import { isObject } from './type_utility';
+import { isFunction } from 'lodash';
export const getPagePath = (index = 0) => {
const page = $('body').attr('data-page') || '';
@@ -667,30 +668,34 @@ export const spriteIcon = (icon, className = '') => {
};
/**
- * This method takes in object with snake_case property names
- * and returns a new object with camelCase property names
- *
- * Reasoning for this method is to ensure consistent property
- * naming conventions across JS code.
+ * @callback ConversionFunction
+ * @param {string} prop
+ */
+
+/**
+ * This function takes a conversion function as the first parameter
+ * and applies this function to each prop in the provided object.
*
* This method also supports additional params in `options` object
*
+ * @param {ConversionFunction} conversionFunction - Function to apply to each prop of the object.
* @param {Object} obj - Object to be converted.
* @param {Object} options - Object containing additional options.
* @param {boolean} options.deep - FLag to allow deep object converting
- * @param {Array[]} dropKeys - List of properties to discard while building new object
- * @param {Array[]} ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
+ * @param {Array[]} options.dropKeys - List of properties to discard while building new object
+ * @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/
-export const convertObjectPropsToCamelCase = (obj = {}, options = {}) => {
- if (obj === null) {
+export const convertObjectProps = (conversionFunction, obj = {}, options = {}) => {
+ if (!isFunction(conversionFunction) || obj === null) {
return {};
}
- const initial = Array.isArray(obj) ? [] : {};
const { deep = false, dropKeys = [], ignoreKeyNames = [] } = options;
+ const isObjParameterArray = Array.isArray(obj);
+ const initialValue = isObjParameterArray ? [] : {};
+
return Object.keys(obj).reduce((acc, prop) => {
- const result = acc;
const val = obj[prop];
// Drop properties from new object if
@@ -702,34 +707,54 @@ export const convertObjectPropsToCamelCase = (obj = {}, options = {}) => {
// Skip converting properties in new object
// if there are any mentioned in options
if (ignoreKeyNames.indexOf(prop) > -1) {
- result[prop] = obj[prop];
+ acc[prop] = val;
return acc;
}
if (deep && (isObject(val) || Array.isArray(val))) {
- result[convertToCamelCase(prop)] = convertObjectPropsToCamelCase(val, options);
+ if (isObjParameterArray) {
+ acc[prop] = convertObjectProps(conversionFunction, val, options);
+ } else {
+ acc[conversionFunction(prop)] = convertObjectProps(conversionFunction, val, options);
+ }
} else {
- result[convertToCamelCase(prop)] = obj[prop];
+ acc[conversionFunction(prop)] = val;
}
return acc;
- }, initial);
+ }, initialValue);
};
/**
+ * This method takes in object with snake_case property names
+ * and returns a new object with camelCase property names
+ *
+ * Reasoning for this method is to ensure consistent property
+ * naming conventions across JS code.
+ *
+ * This method also supports additional params in `options` object
+ *
+ * @param {Object} obj - Object to be converted.
+ * @param {Object} options - Object containing additional options.
+ * @param {boolean} options.deep - FLag to allow deep object converting
+ * @param {Array[]} options.dropKeys - List of properties to discard while building new object
+ * @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
+ */
+export const convertObjectPropsToCamelCase = (obj = {}, options = {}) =>
+ convertObjectProps(convertToCamelCase, obj, options);
+
+/**
* Converts all the object keys to snake case
*
- * @param {Object} obj Object to transform
- * @returns {Object}
+ * This method also supports additional params in `options` object
+ *
+ * @param {Object} obj - Object to be converted.
+ * @param {Object} options - Object containing additional options.
+ * @param {boolean} options.deep - FLag to allow deep object converting
+ * @param {Array[]} options.dropKeys - List of properties to discard while building new object
+ * @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/
-// Follow up to add additional options param:
-// https://gitlab.com/gitlab-org/gitlab/issues/39173
-export const convertObjectPropsToSnakeCase = (obj = {}) =>
- obj
- ? Object.entries(obj).reduce(
- (acc, [key, value]) => ({ ...acc, [convertToSnakeCase(key)]: value }),
- {},
- )
- : {};
+export const convertObjectPropsToSnakeCase = (obj = {}, options = {}) =>
+ convertObjectProps(convertToSnakeCase, obj, options);
export const imagePath = imgUrl =>
`${gon.asset_host || ''}${gon.relative_url_root || ''}/assets/${imgUrl}`;
diff --git a/app/assets/javascripts/lib/utils/datetime_utility.js b/app/assets/javascripts/lib/utils/datetime_utility.js
index fd9a13be18b..f6077673ad5 100644
--- a/app/assets/javascripts/lib/utils/datetime_utility.js
+++ b/app/assets/javascripts/lib/utils/datetime_utility.js
@@ -1,5 +1,5 @@
import $ from 'jquery';
-import _ from 'underscore';
+import { isString, mapValues, isNumber, reduce } from 'lodash';
import * as timeago from 'timeago.js';
import dateFormat from 'dateformat';
import { languageCode, s__, __, n__ } from '../../locale';
@@ -79,7 +79,7 @@ export const getDayName = date =>
* @returns {String}
*/
export const formatDate = (datetime, format = 'mmm d, yyyy h:MMtt Z') => {
- if (_.isString(datetime) && datetime.match(/\d+-\d+\d+ /)) {
+ if (isString(datetime) && datetime.match(/\d+-\d+\d+ /)) {
throw new Error(__('Invalid date'));
}
return dateFormat(datetime, format);
@@ -175,6 +175,7 @@ export const localTimeAgo = ($timeagoEls, setTimeago = true) => {
function addTimeAgoTooltip() {
$timeagoEls.each((i, el) => {
// Recreate with custom template
+ el.setAttribute('title', formatDate(el.dateTime));
$(el).tooltip({
template:
'<div class="tooltip local-timeago" role="tooltip"><div class="arrow"></div><div class="tooltip-inner"></div></div>',
@@ -496,7 +497,7 @@ export const parseSeconds = (
let unorderedMinutes = Math.abs(seconds / SECONDS_PER_MINUTE);
- return _.mapObject(timePeriodConstraints, minutesPerPeriod => {
+ return mapValues(timePeriodConstraints, minutesPerPeriod => {
if (minutesPerPeriod === 0) {
return 0;
}
@@ -515,7 +516,7 @@ export const parseSeconds = (
* If the 'fullNameFormat' param is passed it returns a non condensed string eg '1 week 3 days'
*/
export const stringifyTime = (timeObject, fullNameFormat = false) => {
- const reducedTime = _.reduce(
+ const reducedTime = reduce(
timeObject,
(memo, unitValue, unitName) => {
const isNonZero = Boolean(unitValue);
@@ -565,6 +566,14 @@ export const getDateInPast = (date, daysInPast) =>
export const getDateInFuture = (date, daysInFuture) =>
new Date(newDate(date).setDate(date.getDate() + daysInFuture));
+/**
+ * Checks if a given date-instance was created with a valid date
+ *
+ * @param {Date} date
+ * @returns boolean
+ */
+export const isValidDate = date => date instanceof Date && !Number.isNaN(date.getTime());
+
/*
* Appending T00:00:00 makes JS assume local time and prevents it from shifting the date
* to match the user's time zone. We want to display the date in server time for now, to
@@ -641,7 +650,7 @@ export const dayAfter = date => new Date(newDate(date).setDate(date.getDate() +
* @return {String} approximated time
*/
export const approximateDuration = (seconds = 0) => {
- if (!_.isNumber(seconds) || seconds < 0) {
+ if (!isNumber(seconds) || seconds < 0) {
return '';
}
diff --git a/app/assets/javascripts/lib/utils/highlight.js b/app/assets/javascripts/lib/utils/highlight.js
index 8f0afa3467d..b1dd562f63a 100644
--- a/app/assets/javascripts/lib/utils/highlight.js
+++ b/app/assets/javascripts/lib/utils/highlight.js
@@ -1,5 +1,4 @@
import fuzzaldrinPlus from 'fuzzaldrin-plus';
-import _ from 'underscore';
import sanitize from 'sanitize-html';
/**
@@ -17,11 +16,11 @@ import sanitize from 'sanitize-html';
* @param {String} matchSuffix The string to insert at the end of a match
*/
export default function highlight(string, match = '', matchPrefix = '<b>', matchSuffix = '</b>') {
- if (_.isUndefined(string) || _.isNull(string)) {
+ if (!string) {
return '';
}
- if (_.isUndefined(match) || _.isNull(match) || match === '') {
+ if (!match) {
return string;
}
@@ -34,7 +33,7 @@ export default function highlight(string, match = '', matchPrefix = '<b>', match
return sanitizedValue
.split('')
.map((character, i) => {
- if (_.contains(occurrences, i)) {
+ if (occurrences.includes(i)) {
return `${matchPrefix}${character}${matchSuffix}`;
}
diff --git a/app/assets/javascripts/lib/utils/icon_utils.js b/app/assets/javascripts/lib/utils/icon_utils.js
index 7b8dd9bbef7..97ee773358d 100644
--- a/app/assets/javascripts/lib/utils/icon_utils.js
+++ b/app/assets/javascripts/lib/utils/icon_utils.js
@@ -1,18 +1,40 @@
-/* eslint-disable import/prefer-default-export */
-
+import { memoize } from 'lodash';
import axios from '~/lib/utils/axios_utils';
/**
- * Retrieve SVG icon path content from gitlab/svg sprite icons
- * @param {String} name
+ * Resolves to a DOM that contains GitLab icons
+ * in svg format. Memoized to avoid duplicate requests
*/
-export const getSvgIconPathContent = name =>
+const getSvgDom = memoize(() =>
axios
.get(gon.sprite_icons)
- .then(({ data: svgs }) =>
- new DOMParser()
- .parseFromString(svgs, 'text/xml')
- .querySelector(`#${name} path`)
- .getAttribute('d'),
- )
+ .then(({ data: svgs }) => new DOMParser().parseFromString(svgs, 'text/xml'))
+ .catch(() => {
+ getSvgDom.cache.clear();
+ }),
+);
+
+/**
+ * Clears the memoized SVG content.
+ *
+ * You probably don't need to invoke this function unless
+ * sprite_icons are updated.
+ */
+export const clearSvgIconPathContentCache = () => {
+ getSvgDom.cache.clear();
+};
+
+/**
+ * Retrieve SVG icon path content from gitlab/svg sprite icons.
+ *
+ * Content loaded is cached.
+ *
+ * @param {String} name - Icon name
+ * @returns A promise that resolves to the svg path
+ */
+export const getSvgIconPathContent = name =>
+ getSvgDom()
+ .then(doc => {
+ return doc.querySelector(`#${name} path`).getAttribute('d');
+ })
.catch(() => null);
diff --git a/app/assets/javascripts/lib/utils/text_utility.js b/app/assets/javascripts/lib/utils/text_utility.js
index a03fedcd7e7..f857e618d89 100644
--- a/app/assets/javascripts/lib/utils/text_utility.js
+++ b/app/assets/javascripts/lib/utils/text_utility.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { isString } from 'lodash';
/**
* Adds a , to a string composed by numbers, at every 3 chars.
@@ -142,11 +142,25 @@ export const stripHtml = (string, replace = '') => {
};
/**
- * Converts snake_case string to camelCase
+ * Converts a snake_cased string to camelCase.
+ * Leading and trailing underscores are ignored.
*
- * @param {*} string
+ * @param {String} string The snake_cased string to convert
+ * @returns {String} A camelCased version of the string
+ *
+ * @example
+ *
+ * // returns "aSnakeCasedString"
+ * convertToCamelCase('a_snake_cased_string')
+ *
+ * // returns "_leadingUnderscore"
+ * convertToCamelCase('_leading_underscore')
+ *
+ * // returns "trailingUnderscore_"
+ * convertToCamelCase('trailing_underscore_')
*/
-export const convertToCamelCase = string => string.replace(/(_\w)/g, s => s[1].toUpperCase());
+export const convertToCamelCase = string =>
+ string.replace(/([a-z0-9])_([a-z0-9])/gi, (match, p1, p2) => `${p1}${p2.toUpperCase()}`);
/**
* Converts camelCase string to snake_case
@@ -199,7 +213,7 @@ export const splitCamelCase = string =>
* i.e. "My Group / My Subgroup / My Project"
*/
export const truncateNamespace = (string = '') => {
- if (_.isNull(string) || !_.isString(string)) {
+ if (string === null || !isString(string)) {
return '';
}
diff --git a/app/assets/javascripts/lib/utils/unit_format/formatter_factory.js b/app/assets/javascripts/lib/utils/unit_format/formatter_factory.js
new file mode 100644
index 00000000000..98bcb8348e2
--- /dev/null
+++ b/app/assets/javascripts/lib/utils/unit_format/formatter_factory.js
@@ -0,0 +1,139 @@
+/**
+ * Formats a number as string using `toLocaleString`.
+ *
+ * @param {Number} number to be converted
+ * @param {params} Parameters
+ * @param {params.fractionDigits} Number of decimal digits
+ * to display, defaults to using `toLocaleString` defaults.
+ * @param {params.maxLength} Max output char lenght at the
+ * expense of precision, if the output is longer than this,
+ * the formatter switches to using exponential notation.
+ * @param {params.factor} Value is multiplied by this factor,
+ * useful for value normalization.
+ * @returns Formatted value
+ */
+function formatNumber(
+ value,
+ { fractionDigits = undefined, valueFactor = 1, style = undefined, maxLength = undefined },
+) {
+ if (value === null) {
+ return '';
+ }
+
+ const num = value * valueFactor;
+ const formatted = num.toLocaleString(undefined, {
+ minimumFractionDigits: fractionDigits,
+ maximumFractionDigits: fractionDigits,
+ style,
+ });
+
+ if (maxLength !== undefined && formatted.length > maxLength) {
+ // 123456 becomes 1.23e+8
+ return num.toExponential(2);
+ }
+ return formatted;
+}
+
+/**
+ * Formats a number as a string scaling it up according to units.
+ *
+ * While the number is scaled down, the units are scaled up.
+ *
+ * @param {Array} List of units of the scale
+ * @param {Number} unitFactor - Factor of the scale for each
+ * unit after which the next unit is used scaled.
+ */
+const scaledFormatter = (units, unitFactor = 1000) => {
+ if (unitFactor === 0) {
+ return new RangeError(`unitFactor cannot have the value 0.`);
+ }
+
+ return (value, fractionDigits) => {
+ if (value === null) {
+ return '';
+ }
+ if (
+ value === Number.NEGATIVE_INFINITY ||
+ value === Number.POSITIVE_INFINITY ||
+ Number.isNaN(value)
+ ) {
+ return value.toLocaleString(undefined);
+ }
+
+ let num = value;
+ let scale = 0;
+ const limit = units.length;
+
+ while (Math.abs(num) >= unitFactor) {
+ scale += 1;
+ num /= unitFactor;
+
+ if (scale >= limit) {
+ return 'NA';
+ }
+ }
+
+ const unit = units[scale];
+
+ return `${formatNumber(num, { fractionDigits })}${unit}`;
+ };
+};
+
+/**
+ * Returns a function that formats a number as a string.
+ */
+export const numberFormatter = (style = 'decimal', valueFactor = 1) => {
+ return (value, fractionDigits, maxLength) => {
+ return `${formatNumber(value, { fractionDigits, maxLength, valueFactor, style })}`;
+ };
+};
+
+/**
+ * Returns a function that formats a number as a string with a suffix.
+ */
+export const suffixFormatter = (unit = '', valueFactor = 1) => {
+ return (value, fractionDigits, maxLength) => {
+ const length = maxLength !== undefined ? maxLength - unit.length : undefined;
+ return `${formatNumber(value, { fractionDigits, maxLength: length, valueFactor })}${unit}`;
+ };
+};
+
+/**
+ * Returns a function that formats a number scaled using SI units notation.
+ */
+export const scaledSIFormatter = (unit = '', prefixOffset = 0) => {
+ const fractional = ['y', 'z', 'a', 'f', 'p', 'n', 'µ', 'm'];
+ const multiplicative = ['k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'];
+ const symbols = [...fractional, '', ...multiplicative];
+
+ const units = symbols.slice(fractional.length + prefixOffset).map(prefix => {
+ return `${prefix}${unit}`;
+ });
+
+ if (!units.length) {
+ // eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
+ throw new RangeError('The unit cannot be converted, please try a different scale');
+ }
+
+ return scaledFormatter(units);
+};
+
+/**
+ * Returns a function that formats a number scaled using SI units notation.
+ */
+export const scaledBinaryFormatter = (unit = '', prefixOffset = 0) => {
+ // eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
+ const multiplicative = ['Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'];
+ const symbols = ['', ...multiplicative];
+
+ const units = symbols.slice(prefixOffset).map(prefix => {
+ return `${prefix}${unit}`;
+ });
+
+ if (!units.length) {
+ // eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
+ throw new RangeError('The unit cannot be converted, please try a different scale');
+ }
+
+ return scaledFormatter(units, 1024);
+};
diff --git a/app/assets/javascripts/lib/utils/unit_format/index.js b/app/assets/javascripts/lib/utils/unit_format/index.js
new file mode 100644
index 00000000000..d3aea37e677
--- /dev/null
+++ b/app/assets/javascripts/lib/utils/unit_format/index.js
@@ -0,0 +1,257 @@
+import { s__ } from '~/locale';
+
+import {
+ suffixFormatter,
+ scaledSIFormatter,
+ scaledBinaryFormatter,
+ numberFormatter,
+} from './formatter_factory';
+
+/**
+ * Supported formats
+ *
+ * Based on:
+ *
+ * https://tc39.es/proposal-unified-intl-numberformat/section6/locales-currencies-tz_proposed_out.html#sec-issanctionedsimpleunitidentifier
+ */
+export const SUPPORTED_FORMATS = {
+ // Number
+ number: 'number',
+ percent: 'percent',
+ percentHundred: 'percentHundred',
+
+ // Duration
+ seconds: 'seconds',
+ milliseconds: 'milliseconds',
+
+ // Digital (Metric)
+ decimalBytes: 'decimalBytes',
+ kilobytes: 'kilobytes',
+ megabytes: 'megabytes',
+ gigabytes: 'gigabytes',
+ terabytes: 'terabytes',
+ petabytes: 'petabytes',
+
+ // Digital (IEC)
+ bytes: 'bytes',
+ kibibytes: 'kibibytes',
+ mebibytes: 'mebibytes',
+ gibibytes: 'gibibytes',
+ tebibytes: 'tebibytes',
+ pebibytes: 'pebibytes',
+};
+
+/**
+ * Returns a function that formats number to different units
+ * @param {String} format - Format to use, must be one of the SUPPORTED_FORMATS. Defaults to number.
+ *
+ *
+ */
+export const getFormatter = (format = SUPPORTED_FORMATS.number) => {
+ // Number
+
+ if (format === SUPPORTED_FORMATS.number) {
+ /**
+ * Formats a number
+ *
+ * @function
+ * @param {Number} value - Number to format
+ * @param {Number} fractionDigits - precision decimals
+ * @param {Number} maxLength - Max lenght of formatted number
+ * if lenght is exceeded, exponential format is used.
+ */
+ return numberFormatter();
+ }
+ if (format === SUPPORTED_FORMATS.percent) {
+ /**
+ * Formats a percentge (0 - 1)
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is rendered as `100%`
+ * @param {Number} fractionDigits - number of precision decimals
+ * @param {Number} maxLength - Max lenght of formatted number
+ * if lenght is exceeded, exponential format is used.
+ */
+ return numberFormatter('percent');
+ }
+ if (format === SUPPORTED_FORMATS.percentHundred) {
+ /**
+ * Formats a percentge (0 to 100)
+ *
+ * @function
+ * @param {Number} value - Number to format, `100` is rendered as `100%`
+ * @param {Number} fractionDigits - number of precision decimals
+ * @param {Number} maxLength - Max lenght of formatted number
+ * if lenght is exceeded, exponential format is used.
+ */
+ return numberFormatter('percent', 1 / 100);
+ }
+
+ // Durations
+
+ if (format === SUPPORTED_FORMATS.seconds) {
+ /**
+ * Formats a number of seconds
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is rendered as `1s`
+ * @param {Number} fractionDigits - number of precision decimals
+ * @param {Number} maxLength - Max lenght of formatted number
+ * if lenght is exceeded, exponential format is used.
+ */
+ return suffixFormatter(s__('Units|s'));
+ }
+ if (format === SUPPORTED_FORMATS.milliseconds) {
+ /**
+ * Formats a number of milliseconds with ms as units
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1ms`
+ * @param {Number} fractionDigits - number of precision decimals
+ * @param {Number} maxLength - Max lenght of formatted number
+ * if lenght is exceeded, exponential format is used.
+ */
+ return suffixFormatter(s__('Units|ms'));
+ }
+
+ // Digital (Metric)
+
+ if (format === SUPPORTED_FORMATS.decimalBytes) {
+ /**
+ * Formats a number of bytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1B`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledSIFormatter('B');
+ }
+ if (format === SUPPORTED_FORMATS.kilobytes) {
+ /**
+ * Formats a number of kilobytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1kB`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledSIFormatter('B', 1);
+ }
+ if (format === SUPPORTED_FORMATS.megabytes) {
+ /**
+ * Formats a number of megabytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1MB`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledSIFormatter('B', 2);
+ }
+ if (format === SUPPORTED_FORMATS.gigabytes) {
+ /**
+ * Formats a number of gigabytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1GB`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledSIFormatter('B', 3);
+ }
+ if (format === SUPPORTED_FORMATS.terabytes) {
+ /**
+ * Formats a number of terabytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1GB`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledSIFormatter('B', 4);
+ }
+ if (format === SUPPORTED_FORMATS.petabytes) {
+ /**
+ * Formats a number of petabytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1PB`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledSIFormatter('B', 5);
+ }
+
+ // Digital (IEC)
+
+ if (format === SUPPORTED_FORMATS.bytes) {
+ /**
+ * Formats a number of bytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1B`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledBinaryFormatter('B');
+ }
+ if (format === SUPPORTED_FORMATS.kibibytes) {
+ /**
+ * Formats a number of kilobytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1kB`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledBinaryFormatter('B', 1);
+ }
+ if (format === SUPPORTED_FORMATS.mebibytes) {
+ /**
+ * Formats a number of megabytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1MB`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledBinaryFormatter('B', 2);
+ }
+ if (format === SUPPORTED_FORMATS.gibibytes) {
+ /**
+ * Formats a number of gigabytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1GB`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledBinaryFormatter('B', 3);
+ }
+ if (format === SUPPORTED_FORMATS.tebibytes) {
+ /**
+ * Formats a number of terabytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1GB`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledBinaryFormatter('B', 4);
+ }
+ if (format === SUPPORTED_FORMATS.pebibytes) {
+ /**
+ * Formats a number of petabytes scaled up to larger digital
+ * units for larger numbers.
+ *
+ * @function
+ * @param {Number} value - Number to format, `1` is formatted as `1PB`
+ * @param {Number} fractionDigits - number of precision decimals
+ */
+ return scaledBinaryFormatter('B', 5);
+ }
+
+ // Fail so client library addresses issue
+ throw TypeError(`${format} is not a valid number format`);
+};
diff --git a/app/assets/javascripts/locale/sprintf.js b/app/assets/javascripts/locale/sprintf.js
index a3557fbf3fb..7ab4e725d99 100644
--- a/app/assets/javascripts/locale/sprintf.js
+++ b/app/assets/javascripts/locale/sprintf.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { escape } from 'lodash';
/**
Very limited implementation of sprintf supporting only named parameters.
@@ -17,7 +17,7 @@ export default (input, parameters, escapeParameters = true) => {
if (parameters) {
Object.keys(parameters).forEach(parameterName => {
const parameterValue = parameters[parameterName];
- const escapedParameterValue = escapeParameters ? _.escape(parameterValue) : parameterValue;
+ const escapedParameterValue = escapeParameters ? escape(parameterValue) : parameterValue;
output = output.replace(new RegExp(`%{${parameterName}}`, 'g'), escapedParameterValue);
});
}
diff --git a/app/assets/javascripts/logs/components/environment_logs.vue b/app/assets/javascripts/logs/components/environment_logs.vue
new file mode 100644
index 00000000000..68ec2bc2f16
--- /dev/null
+++ b/app/assets/javascripts/logs/components/environment_logs.vue
@@ -0,0 +1,278 @@
+<script>
+import { throttle } from 'lodash';
+import { mapActions, mapState, mapGetters } from 'vuex';
+import {
+ GlSprintf,
+ GlAlert,
+ GlDropdown,
+ GlDropdownItem,
+ GlFormGroup,
+ GlSearchBoxByClick,
+ GlInfiniteScroll,
+} from '@gitlab/ui';
+import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import LogControlButtons from './log_control_buttons.vue';
+
+import { timeRanges, defaultTimeRange } from '~/vue_shared/constants';
+import { timeRangeFromUrl } from '~/monitoring/utils';
+import { formatDate } from '../utils';
+
+export default {
+ components: {
+ GlSprintf,
+ GlAlert,
+ GlDropdown,
+ GlDropdownItem,
+ GlFormGroup,
+ GlSearchBoxByClick,
+ GlInfiniteScroll,
+ DateTimePicker,
+ LogControlButtons,
+ },
+ filters: {
+ formatDate,
+ },
+ props: {
+ environmentName: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ currentPodName: {
+ type: [String, null],
+ required: false,
+ default: null,
+ },
+ environmentsPath: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ clusterApplicationsDocumentationPath: {
+ type: String,
+ required: true,
+ },
+ },
+ traceHeight: 600,
+ data() {
+ return {
+ searchQuery: '',
+ timeRanges,
+ isElasticStackCalloutDismissed: false,
+ scrollDownButtonDisabled: true,
+ };
+ },
+ computed: {
+ ...mapState('environmentLogs', ['environments', 'timeRange', 'logs', 'pods']),
+ ...mapGetters('environmentLogs', ['trace']),
+
+ timeRangeModel: {
+ get() {
+ return this.timeRange.selected;
+ },
+ set(val) {
+ this.setTimeRange(val);
+ },
+ },
+
+ showLoader() {
+ return this.logs.isLoading;
+ },
+ advancedFeaturesEnabled() {
+ const environment = this.environments.options.find(
+ ({ name }) => name === this.environments.current,
+ );
+ return environment && environment.enable_advanced_logs_querying;
+ },
+ disableAdvancedControls() {
+ return this.environments.isLoading || !this.advancedFeaturesEnabled;
+ },
+ shouldShowElasticStackCallout() {
+ return !this.isElasticStackCalloutDismissed && this.disableAdvancedControls;
+ },
+ },
+ mounted() {
+ this.setInitData({
+ timeRange: timeRangeFromUrl() || defaultTimeRange,
+ environmentName: this.environmentName,
+ podName: this.currentPodName,
+ });
+
+ this.fetchEnvironments(this.environmentsPath);
+ },
+ methods: {
+ ...mapActions('environmentLogs', [
+ 'setInitData',
+ 'setSearch',
+ 'setTimeRange',
+ 'showPodLogs',
+ 'showEnvironment',
+ 'fetchEnvironments',
+ 'fetchMoreLogsPrepend',
+ ]),
+
+ topReached() {
+ if (!this.logs.isLoading) {
+ this.fetchMoreLogsPrepend();
+ }
+ },
+ scrollDown() {
+ this.$refs.infiniteScroll.scrollDown();
+ },
+ scroll: throttle(function scrollThrottled({ target = {} }) {
+ const { scrollTop = 0, clientHeight = 0, scrollHeight = 0 } = target;
+ this.scrollDownButtonDisabled = scrollTop + clientHeight === scrollHeight;
+ }, 200),
+ },
+};
+</script>
+<template>
+ <div class="environment-logs-viewer mt-3">
+ <gl-alert
+ v-if="shouldShowElasticStackCallout"
+ class="mb-3 js-elasticsearch-alert"
+ @dismiss="isElasticStackCalloutDismissed = true"
+ >
+ {{
+ s__(
+ 'Environments|Install Elastic Stack on your cluster to enable advanced querying capabilities such as full text search.',
+ )
+ }}
+ <a :href="clusterApplicationsDocumentationPath">
+ <strong>
+ {{ s__('View Documentation') }}
+ </strong>
+ </a>
+ </gl-alert>
+ <div class="top-bar js-top-bar d-flex">
+ <div class="row mx-n1">
+ <gl-form-group
+ id="environments-dropdown-fg"
+ :label="s__('Environments|Environment')"
+ label-size="sm"
+ label-for="environments-dropdown"
+ class="col-3 px-1"
+ >
+ <gl-dropdown
+ id="environments-dropdown"
+ :text="environments.current"
+ :disabled="environments.isLoading"
+ class="d-flex gl-h-32 js-environments-dropdown"
+ toggle-class="dropdown-menu-toggle"
+ >
+ <gl-dropdown-item
+ v-for="env in environments.options"
+ :key="env.id"
+ @click="showEnvironment(env.name)"
+ >
+ {{ env.name }}
+ </gl-dropdown-item>
+ </gl-dropdown>
+ </gl-form-group>
+ <gl-form-group
+ id="pods-dropdown-fg"
+ :label="s__('Environments|Logs from')"
+ label-size="sm"
+ label-for="pods-dropdown"
+ class="col-3 px-1"
+ >
+ <gl-dropdown
+ id="pods-dropdown"
+ :text="pods.current || s__('Environments|No pods to display')"
+ :disabled="environments.isLoading"
+ class="d-flex gl-h-32 js-pods-dropdown"
+ toggle-class="dropdown-menu-toggle"
+ >
+ <gl-dropdown-item
+ v-for="podName in pods.options"
+ :key="podName"
+ @click="showPodLogs(podName)"
+ >
+ {{ podName }}
+ </gl-dropdown-item>
+ </gl-dropdown>
+ </gl-form-group>
+ <gl-form-group
+ id="dates-fg"
+ :label="s__('Environments|Show last')"
+ label-size="sm"
+ label-for="time-window-dropdown"
+ class="col-3 px-1"
+ >
+ <date-time-picker
+ ref="dateTimePicker"
+ v-model="timeRangeModel"
+ class="w-100 gl-h-32"
+ :disabled="disableAdvancedControls"
+ :options="timeRanges"
+ />
+ </gl-form-group>
+ <gl-form-group
+ id="search-fg"
+ :label="s__('Environments|Search')"
+ label-size="sm"
+ label-for="search"
+ class="col-3 px-1"
+ >
+ <gl-search-box-by-click
+ v-model.trim="searchQuery"
+ :disabled="disableAdvancedControls"
+ :placeholder="s__('Environments|Search')"
+ class="js-logs-search"
+ type="search"
+ autofocus
+ @submit="setSearch(searchQuery)"
+ />
+ </gl-form-group>
+ </div>
+
+ <log-control-buttons
+ ref="scrollButtons"
+ class="controllers align-self-end mb-1"
+ :scroll-down-button-disabled="scrollDownButtonDisabled"
+ @refresh="showPodLogs(pods.current)"
+ @scrollDown="scrollDown"
+ />
+ </div>
+
+ <gl-infinite-scroll
+ ref="infiniteScroll"
+ class="log-lines"
+ :style="{ height: `${$options.traceHeight}px` }"
+ :max-list-height="$options.traceHeight"
+ :fetched-items="logs.lines.length"
+ @topReached="topReached"
+ @scroll="scroll"
+ >
+ <template #items>
+ <pre
+ class="build-trace js-log-trace"
+ ><code class="bash js-build-output"><div v-if="showLoader" class="build-loader-animation js-build-loader-animation">
+ <div class="dot"></div>
+ <div class="dot"></div>
+ <div class="dot"></div>
+ </div>{{trace}}
+ </code></pre>
+ </template>
+ <template #default
+ ><div></div
+ ></template>
+ </gl-infinite-scroll>
+
+ <div ref="logFooter" class="log-footer py-2 px-3">
+ <gl-sprintf :message="s__('Environments|Logs from %{start} to %{end}.')">
+ <template #start>{{ timeRange.current.start | formatDate }}</template>
+ <template #end>{{ timeRange.current.end | formatDate }}</template>
+ </gl-sprintf>
+ <gl-sprintf
+ v-if="!logs.isComplete"
+ :message="s__('Environments|Currently showing %{fetched} results.')"
+ >
+ <template #fetched>{{ logs.lines.length }}</template>
+ </gl-sprintf>
+ <template v-else>
+ {{ s__('Environments|Currently showing all results.') }}</template
+ >
+ </div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/logs/components/log_control_buttons.vue b/app/assets/javascripts/logs/components/log_control_buttons.vue
new file mode 100644
index 00000000000..170d0474447
--- /dev/null
+++ b/app/assets/javascripts/logs/components/log_control_buttons.vue
@@ -0,0 +1,92 @@
+<script>
+import { GlButton, GlTooltipDirective } from '@gitlab/ui';
+import Icon from '~/vue_shared/components/icon.vue';
+
+export default {
+ components: {
+ Icon,
+ GlButton,
+ },
+ directives: {
+ GlTooltip: GlTooltipDirective,
+ },
+ props: {
+ scrollUpButtonDisabled: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
+ scrollDownButtonDisabled: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
+ },
+ data() {
+ return {
+ scrollUpAvailable: Boolean(this.$listeners.scrollUp),
+ scrollDownAvailable: Boolean(this.$listeners.scrollDown),
+ };
+ },
+ methods: {
+ handleRefreshClick() {
+ this.$emit('refresh');
+ },
+ handleScrollUp() {
+ this.$emit('scrollUp');
+ },
+ handleScrollDown() {
+ this.$emit('scrollDown');
+ },
+ },
+};
+</script>
+
+<template>
+ <div>
+ <div
+ v-if="scrollUpAvailable"
+ v-gl-tooltip
+ class="controllers-buttons"
+ :title="__('Scroll to top')"
+ aria-labelledby="scroll-to-top"
+ >
+ <gl-button
+ id="scroll-to-top"
+ class="btn-blank js-scroll-to-top"
+ :aria-label="__('Scroll to top')"
+ :disabled="scrollUpButtonDisabled"
+ @click="handleScrollUp()"
+ ><icon name="scroll_up"
+ /></gl-button>
+ </div>
+ <div
+ v-if="scrollDownAvailable"
+ v-gl-tooltip
+ :disabled="scrollUpButtonDisabled"
+ class="controllers-buttons"
+ :title="__('Scroll to bottom')"
+ aria-labelledby="scroll-to-bottom"
+ >
+ <gl-button
+ id="scroll-to-bottom"
+ class="btn-blank js-scroll-to-bottom"
+ :aria-label="__('Scroll to bottom')"
+ :v-if="scrollDownAvailable"
+ :disabled="scrollDownButtonDisabled"
+ @click="handleScrollDown()"
+ ><icon name="scroll_down"
+ /></gl-button>
+ </div>
+ <gl-button
+ id="refresh-log"
+ v-gl-tooltip
+ class="ml-1 px-2 js-refresh-log"
+ :title="__('Refresh')"
+ :aria-label="__('Refresh')"
+ @click="handleRefreshClick"
+ >
+ <icon name="retry" />
+ </gl-button>
+ </div>
+</template>
diff --git a/app/assets/javascripts/logs/index.js b/app/assets/javascripts/logs/index.js
new file mode 100644
index 00000000000..70dbffdc3dd
--- /dev/null
+++ b/app/assets/javascripts/logs/index.js
@@ -0,0 +1,24 @@
+import Vue from 'vue';
+import { getParameterValues } from '~/lib/utils/url_utility';
+import LogViewer from './components/environment_logs.vue';
+import store from './stores';
+
+export default (props = {}) => {
+ const el = document.getElementById('environment-logs');
+ const [currentPodName] = getParameterValues('pod_name');
+
+ // eslint-disable-next-line no-new
+ new Vue({
+ el,
+ store,
+ render(createElement) {
+ return createElement(LogViewer, {
+ props: {
+ ...el.dataset,
+ currentPodName,
+ ...props,
+ },
+ });
+ },
+ });
+};
diff --git a/app/assets/javascripts/logs/stores/actions.js b/app/assets/javascripts/logs/stores/actions.js
new file mode 100644
index 00000000000..4544ebdfec1
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/actions.js
@@ -0,0 +1,148 @@
+import { backOff } from '~/lib/utils/common_utils';
+import httpStatusCodes from '~/lib/utils/http_status';
+import axios from '~/lib/utils/axios_utils';
+import flash from '~/flash';
+import { s__ } from '~/locale';
+import { convertToFixedRange } from '~/lib/utils/datetime_range';
+
+import * as types from './mutation_types';
+
+const flashTimeRangeWarning = () => {
+ flash(s__('Metrics|Invalid time range, please verify.'), 'warning');
+};
+
+const flashLogsError = () => {
+ flash(s__('Metrics|There was an error fetching the logs, please try again'));
+};
+
+const requestUntilData = (url, params) =>
+ backOff((next, stop) => {
+ axios
+ .get(url, { params })
+ .then(res => {
+ if (res.status === httpStatusCodes.ACCEPTED) {
+ next();
+ return;
+ }
+ stop(res);
+ })
+ .catch(err => {
+ stop(err);
+ });
+ });
+
+const requestLogsUntilData = state => {
+ const params = {};
+ const { logs_api_path } = state.environments.options.find(
+ ({ name }) => name === state.environments.current,
+ );
+
+ if (state.pods.current) {
+ params.pod_name = state.pods.current;
+ }
+ if (state.search) {
+ params.search = state.search;
+ }
+ if (state.timeRange.current) {
+ try {
+ const { start, end } = convertToFixedRange(state.timeRange.current);
+ params.start = start;
+ params.end = end;
+ } catch {
+ flashTimeRangeWarning();
+ }
+ }
+ if (state.logs.cursor) {
+ params.cursor = state.logs.cursor;
+ }
+
+ return requestUntilData(logs_api_path, params);
+};
+
+export const setInitData = ({ commit }, { timeRange, environmentName, podName }) => {
+ commit(types.SET_TIME_RANGE, timeRange);
+ commit(types.SET_PROJECT_ENVIRONMENT, environmentName);
+ commit(types.SET_CURRENT_POD_NAME, podName);
+};
+
+export const showPodLogs = ({ dispatch, commit }, podName) => {
+ commit(types.SET_CURRENT_POD_NAME, podName);
+ dispatch('fetchLogs');
+};
+
+export const setSearch = ({ dispatch, commit }, searchQuery) => {
+ commit(types.SET_SEARCH, searchQuery);
+ dispatch('fetchLogs');
+};
+
+export const setTimeRange = ({ dispatch, commit }, timeRange) => {
+ commit(types.SET_TIME_RANGE, timeRange);
+ dispatch('fetchLogs');
+};
+
+export const showEnvironment = ({ dispatch, commit }, environmentName) => {
+ commit(types.SET_PROJECT_ENVIRONMENT, environmentName);
+ commit(types.SET_CURRENT_POD_NAME, null);
+ dispatch('fetchLogs');
+};
+
+/**
+ * Fetch environments data and initial logs
+ * @param {Object} store
+ * @param {String} environmentsPath
+ */
+export const fetchEnvironments = ({ commit, dispatch }, environmentsPath) => {
+ commit(types.REQUEST_ENVIRONMENTS_DATA);
+
+ return axios
+ .get(environmentsPath)
+ .then(({ data }) => {
+ commit(types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS, data.environments);
+ dispatch('fetchLogs');
+ })
+ .catch(() => {
+ commit(types.RECEIVE_ENVIRONMENTS_DATA_ERROR);
+ flash(s__('Metrics|There was an error fetching the environments data, please try again'));
+ });
+};
+
+export const fetchLogs = ({ commit, state }) => {
+ commit(types.REQUEST_PODS_DATA);
+ commit(types.REQUEST_LOGS_DATA);
+
+ return requestLogsUntilData(state)
+ .then(({ data }) => {
+ const { pod_name, pods, logs, cursor } = data;
+ commit(types.SET_CURRENT_POD_NAME, pod_name);
+
+ commit(types.RECEIVE_PODS_DATA_SUCCESS, pods);
+ commit(types.RECEIVE_LOGS_DATA_SUCCESS, { logs, cursor });
+ })
+ .catch(() => {
+ commit(types.RECEIVE_PODS_DATA_ERROR);
+ commit(types.RECEIVE_LOGS_DATA_ERROR);
+ flashLogsError();
+ });
+};
+
+export const fetchMoreLogsPrepend = ({ commit, state }) => {
+ if (state.logs.isComplete) {
+ // return when all logs are loaded
+ return Promise.resolve();
+ }
+
+ commit(types.REQUEST_LOGS_DATA_PREPEND);
+
+ return requestLogsUntilData(state)
+ .then(({ data }) => {
+ const { logs, cursor } = data;
+ commit(types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS, { logs, cursor });
+ })
+ .catch(() => {
+ commit(types.RECEIVE_LOGS_DATA_PREPEND_ERROR);
+ flashLogsError();
+ });
+};
+
+// prevent babel-plugin-rewire from generating an invalid default during karma tests
+export default () => {};
diff --git a/app/assets/javascripts/logs/stores/getters.js b/app/assets/javascripts/logs/stores/getters.js
new file mode 100644
index 00000000000..58f2dbf4835
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/getters.js
@@ -0,0 +1,9 @@
+import { formatDate } from '../utils';
+
+const mapTrace = ({ timestamp = null, message = '' }) =>
+ [timestamp ? formatDate(timestamp) : '', message].join(' | ');
+
+export const trace = state => state.logs.lines.map(mapTrace).join('\n');
+
+// prevent babel-plugin-rewire from generating an invalid default during karma tests
+export default () => {};
diff --git a/app/assets/javascripts/logs/stores/index.js b/app/assets/javascripts/logs/stores/index.js
new file mode 100644
index 00000000000..d16941ddf93
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/index.js
@@ -0,0 +1,23 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import * as actions from './actions';
+import * as getters from './getters';
+import mutations from './mutations';
+import state from './state';
+
+Vue.use(Vuex);
+
+export const createStore = () =>
+ new Vuex.Store({
+ modules: {
+ environmentLogs: {
+ namespaced: true,
+ actions,
+ mutations,
+ state: state(),
+ getters,
+ },
+ },
+ });
+
+export default createStore;
diff --git a/app/assets/javascripts/logs/stores/mutation_types.js b/app/assets/javascripts/logs/stores/mutation_types.js
new file mode 100644
index 00000000000..5ff49135e41
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/mutation_types.js
@@ -0,0 +1,19 @@
+export const SET_PROJECT_ENVIRONMENT = 'SET_PROJECT_ENVIRONMENT';
+export const SET_SEARCH = 'SET_SEARCH';
+export const SET_TIME_RANGE = 'SET_TIME_RANGE';
+export const SET_CURRENT_POD_NAME = 'SET_CURRENT_POD_NAME';
+
+export const REQUEST_ENVIRONMENTS_DATA = 'REQUEST_ENVIRONMENTS_DATA';
+export const RECEIVE_ENVIRONMENTS_DATA_SUCCESS = 'RECEIVE_ENVIRONMENTS_DATA_SUCCESS';
+export const RECEIVE_ENVIRONMENTS_DATA_ERROR = 'RECEIVE_ENVIRONMENTS_DATA_ERROR';
+
+export const REQUEST_LOGS_DATA = 'REQUEST_LOGS_DATA';
+export const RECEIVE_LOGS_DATA_SUCCESS = 'RECEIVE_LOGS_DATA_SUCCESS';
+export const RECEIVE_LOGS_DATA_ERROR = 'RECEIVE_LOGS_DATA_ERROR';
+export const REQUEST_LOGS_DATA_PREPEND = 'REQUEST_LOGS_DATA_PREPEND';
+export const RECEIVE_LOGS_DATA_PREPEND_SUCCESS = 'RECEIVE_LOGS_DATA_PREPEND_SUCCESS';
+export const RECEIVE_LOGS_DATA_PREPEND_ERROR = 'RECEIVE_LOGS_DATA_PREPEND_ERROR';
+
+export const REQUEST_PODS_DATA = 'REQUEST_PODS_DATA';
+export const RECEIVE_PODS_DATA_SUCCESS = 'RECEIVE_PODS_DATA_SUCCESS';
+export const RECEIVE_PODS_DATA_ERROR = 'RECEIVE_PODS_DATA_ERROR';
diff --git a/app/assets/javascripts/logs/stores/mutations.js b/app/assets/javascripts/logs/stores/mutations.js
new file mode 100644
index 00000000000..d94d71cd25a
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/mutations.js
@@ -0,0 +1,93 @@
+import * as types from './mutation_types';
+import { convertToFixedRange } from '~/lib/utils/datetime_range';
+
+const mapLine = ({ timestamp, message }) => ({
+ timestamp,
+ message,
+});
+
+export default {
+ // Search Data
+ [types.SET_SEARCH](state, searchQuery) {
+ state.search = searchQuery;
+ },
+
+ // Time Range Data
+ [types.SET_TIME_RANGE](state, timeRange) {
+ state.timeRange.selected = timeRange;
+ state.timeRange.current = convertToFixedRange(timeRange);
+ },
+
+ // Environments Data
+ [types.SET_PROJECT_ENVIRONMENT](state, environmentName) {
+ state.environments.current = environmentName;
+ },
+ [types.REQUEST_ENVIRONMENTS_DATA](state) {
+ state.environments.options = [];
+ state.environments.isLoading = true;
+ },
+ [types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS](state, environmentOptions) {
+ state.environments.options = environmentOptions;
+ state.environments.isLoading = false;
+ },
+ [types.RECEIVE_ENVIRONMENTS_DATA_ERROR](state) {
+ state.environments.options = [];
+ state.environments.isLoading = false;
+ },
+
+ // Logs data
+ [types.REQUEST_LOGS_DATA](state) {
+ state.timeRange.current = convertToFixedRange(state.timeRange.selected);
+
+ state.logs.lines = [];
+ state.logs.isLoading = true;
+
+ // start pagination from the beginning
+ state.logs.cursor = null;
+ state.logs.isComplete = false;
+ },
+ [types.RECEIVE_LOGS_DATA_SUCCESS](state, { logs = [], cursor }) {
+ state.logs.lines = logs.map(mapLine);
+ state.logs.isLoading = false;
+ state.logs.cursor = cursor;
+
+ if (!cursor) {
+ state.logs.isComplete = true;
+ }
+ },
+ [types.RECEIVE_LOGS_DATA_ERROR](state) {
+ state.logs.lines = [];
+ state.logs.isLoading = false;
+ },
+
+ [types.REQUEST_LOGS_DATA_PREPEND](state) {
+ state.logs.isLoading = true;
+ },
+ [types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, { logs = [], cursor }) {
+ const lines = logs.map(mapLine);
+ state.logs.lines = lines.concat(state.logs.lines);
+ state.logs.isLoading = false;
+ state.logs.cursor = cursor;
+
+ if (!cursor) {
+ state.logs.isComplete = true;
+ }
+ },
+ [types.RECEIVE_LOGS_DATA_PREPEND_ERROR](state) {
+ state.logs.isLoading = false;
+ },
+
+ // Pods data
+ [types.SET_CURRENT_POD_NAME](state, podName) {
+ state.pods.current = podName;
+ },
+ [types.REQUEST_PODS_DATA](state) {
+ state.pods.options = [];
+ },
+ [types.RECEIVE_PODS_DATA_SUCCESS](state, podOptions) {
+ state.pods.options = podOptions;
+ },
+ [types.RECEIVE_PODS_DATA_ERROR](state) {
+ state.pods.options = [];
+ },
+};
diff --git a/app/assets/javascripts/logs/stores/state.js b/app/assets/javascripts/logs/stores/state.js
new file mode 100644
index 00000000000..2c8f47294cc
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/state.js
@@ -0,0 +1,51 @@
+import { timeRanges, defaultTimeRange } from '~/vue_shared/constants';
+import { convertToFixedRange } from '~/lib/utils/datetime_range';
+
+export default () => ({
+ /**
+ * Full text search
+ */
+ search: '',
+
+ /**
+ * Time range (Show last)
+ */
+ timeRange: {
+ options: timeRanges,
+ // Selected time range, can be fixed or relative
+ selected: defaultTimeRange,
+ // Current time range, must be fixed
+ current: convertToFixedRange(defaultTimeRange),
+ },
+
+ /**
+ * Environments list information
+ */
+ environments: {
+ options: [],
+ isLoading: false,
+ current: null,
+ },
+
+ /**
+ * Logs including trace
+ */
+ logs: {
+ lines: [],
+ isLoading: false,
+ /**
+ * Logs `cursor` represents the current pagination position,
+ * Should be sent in next batch (page) of logs to be fetched
+ */
+ cursor: null,
+ isComplete: false,
+ },
+
+ /**
+ * Pods list information
+ */
+ pods: {
+ options: [],
+ current: null,
+ },
+});
diff --git a/app/assets/javascripts/logs/utils.js b/app/assets/javascripts/logs/utils.js
new file mode 100644
index 00000000000..30213dbc130
--- /dev/null
+++ b/app/assets/javascripts/logs/utils.js
@@ -0,0 +1,28 @@
+import { secondsToMilliseconds } from '~/lib/utils/datetime_utility';
+import dateFormat from 'dateformat';
+
+const dateFormatMask = 'UTC:mmm dd HH:MM:ss.l"Z"';
+
+/**
+ * Returns a time range (`start`, `end`) where `start` is the
+ * current time minus a given number of seconds and `end`
+ * is the current time (`now()`).
+ *
+ * @param {Number} seconds Seconds duration, defaults to 0.
+ * @returns {Object} range Time range
+ * @returns {String} range.start ISO String of current time minus given seconds
+ * @returns {String} range.end ISO String of current time
+ */
+export const getTimeRange = (seconds = 0) => {
+ const end = Math.floor(Date.now() / 1000); // convert milliseconds to seconds
+ const start = end - seconds;
+
+ return {
+ start: new Date(secondsToMilliseconds(start)).toISOString(),
+ end: new Date(secondsToMilliseconds(end)).toISOString(),
+ };
+};
+
+export const formatDate = timestamp => dateFormat(timestamp, dateFormatMask);
+
+export default {};
diff --git a/app/assets/javascripts/main.js b/app/assets/javascripts/main.js
index 5b645b032ed..4d3a61a7627 100644
--- a/app/assets/javascripts/main.js
+++ b/app/assets/javascripts/main.js
@@ -69,7 +69,7 @@ if (gon && gon.disable_animations) {
// inject test utilities if necessary
if (process.env.NODE_ENV !== 'production' && gon && gon.test_env) {
disableJQueryAnimations();
- import(/* webpackMode: "eager" */ './test_utils/');
+ import(/* webpackMode: "eager" */ './test_utils/'); // eslint-disable-line no-unused-expressions
}
document.addEventListener('beforeunload', () => {
@@ -200,7 +200,8 @@ document.addEventListener('DOMContentLoaded', () => {
}
});
- if (bootstrapBreakpoint === 'sm' || bootstrapBreakpoint === 'xs') {
+ const isBoardsPage = /(projects|groups):boards:show/.test(document.body.dataset.page);
+ if (!isBoardsPage && (bootstrapBreakpoint === 'sm' || bootstrapBreakpoint === 'xs')) {
const $rightSidebar = $('aside.right-sidebar');
const $layoutPage = $('.layout-page');
diff --git a/app/assets/javascripts/monitoring/components/charts/column.vue b/app/assets/javascripts/monitoring/components/charts/column.vue
index 0acdfe7675c..0ed801e6e57 100644
--- a/app/assets/javascripts/monitoring/components/charts/column.vue
+++ b/app/assets/javascripts/monitoring/components/charts/column.vue
@@ -5,6 +5,7 @@ import { getSvgIconPathContent } from '~/lib/utils/icon_utils';
import { chartHeight } from '../../constants';
import { makeDataSeries } from '~/helpers/monitor_helper';
import { graphDataValidatorForValues } from '../../utils';
+import { getYAxisOptions, getChartGrid } from './options';
export default {
components: {
@@ -41,15 +42,25 @@ export default {
values: queryData[0].data,
};
},
+ chartOptions() {
+ const yAxis = {
+ ...getYAxisOptions(this.graphData.yAxis),
+ scale: false,
+ };
+
+ return {
+ grid: getChartGrid(),
+ yAxis,
+ dataZoom: this.dataZoomConfig,
+ };
+ },
xAxisTitle() {
return this.graphData.metrics[0].result[0].x_label !== undefined
? this.graphData.metrics[0].result[0].x_label
: '';
},
yAxisTitle() {
- return this.graphData.metrics[0].result[0].y_label !== undefined
- ? this.graphData.metrics[0].result[0].y_label
- : '';
+ return this.chartOptions.yAxis.name;
},
xAxisType() {
return this.graphData.x_type !== undefined ? this.graphData.x_type : 'category';
@@ -59,11 +70,6 @@ export default {
return handleIcon ? { handleIcon } : {};
},
- chartOptions() {
- return {
- dataZoom: this.dataZoomConfig,
- };
- },
},
created() {
this.setSvg('scroll-handle');
@@ -90,11 +96,7 @@ export default {
};
</script>
<template>
- <div v-gl-resize-observer-directive="onResize" class="prometheus-graph">
- <div class="prometheus-graph-header">
- <h5 ref="graphTitle" class="prometheus-graph-title">{{ graphData.title }}</h5>
- <div ref="graphWidgets" class="prometheus-graph-widgets"><slot></slot></div>
- </div>
+ <div v-gl-resize-observer-directive="onResize">
<gl-column-chart
ref="columnChart"
v-bind="$attrs"
diff --git a/app/assets/javascripts/monitoring/components/charts/empty_chart.vue b/app/assets/javascripts/monitoring/components/charts/empty_chart.vue
index eedc5162e0c..5588d9ac060 100644
--- a/app/assets/javascripts/monitoring/components/charts/empty_chart.vue
+++ b/app/assets/javascripts/monitoring/components/charts/empty_chart.vue
@@ -27,10 +27,7 @@ export default {
};
</script>
<template>
- <div class="prometheus-graph d-flex flex-column justify-content-center">
- <div class="prometheus-graph-header">
- <h5 ref="graphTitle" class="prometheus-graph-title">{{ graphTitle }}</h5>
- </div>
+ <div class="d-flex flex-column justify-content-center">
<div
class="prepend-top-8 svg-w-100 d-flex align-items-center"
:style="svgContainerStyle"
diff --git a/app/assets/javascripts/monitoring/components/charts/heatmap.vue b/app/assets/javascripts/monitoring/components/charts/heatmap.vue
index 881904cbd0c..0a0165a113e 100644
--- a/app/assets/javascripts/monitoring/components/charts/heatmap.vue
+++ b/app/assets/javascripts/monitoring/components/charts/heatmap.vue
@@ -2,13 +2,11 @@
import { GlResizeObserverDirective } from '@gitlab/ui';
import { GlHeatmap } from '@gitlab/ui/dist/charts';
import dateformat from 'dateformat';
-import PrometheusHeader from '../shared/prometheus_header.vue';
import { graphDataValidatorForValues } from '../../utils';
export default {
components: {
GlHeatmap,
- PrometheusHeader,
},
directives: {
GlResizeObserverDirective,
@@ -65,8 +63,7 @@ export default {
};
</script>
<template>
- <div v-gl-resize-observer-directive="onResize" class="prometheus-graph col-12 col-lg-6">
- <prometheus-header :graph-title="graphData.title" />
+ <div v-gl-resize-observer-directive="onResize" class="col-12 col-lg-6">
<gl-heatmap
ref="heatmapChart"
v-bind="$attrs"
diff --git a/app/assets/javascripts/monitoring/components/charts/options.js b/app/assets/javascripts/monitoring/components/charts/options.js
new file mode 100644
index 00000000000..d9f49bd81f5
--- /dev/null
+++ b/app/assets/javascripts/monitoring/components/charts/options.js
@@ -0,0 +1,78 @@
+import { SUPPORTED_FORMATS, getFormatter } from '~/lib/utils/unit_format';
+import { s__ } from '~/locale';
+
+const yAxisBoundaryGap = [0.1, 0.1];
+/**
+ * Max string length of formatted axis tick
+ */
+const maxDataAxisTickLength = 8;
+
+// Defaults
+const defaultFormat = SUPPORTED_FORMATS.number;
+
+const defaultYAxisFormat = defaultFormat;
+const defaultYAxisPrecision = 2;
+
+const defaultTooltipFormat = defaultFormat;
+const defaultTooltipPrecision = 3;
+
+// Give enough space for y-axis with units and name.
+const chartGridLeft = 75;
+
+// Axis options
+
+/**
+ * Converts .yml parameters to echarts axis options for data axis
+ * @param {Object} param - Dashboard .yml definition options
+ */
+const getDataAxisOptions = ({ format, precision, name }) => {
+ const formatter = getFormatter(format);
+
+ return {
+ name,
+ nameLocation: 'center', // same as gitlab-ui's default
+ scale: true,
+ axisLabel: {
+ formatter: val => formatter(val, precision, maxDataAxisTickLength),
+ },
+ };
+};
+
+/**
+ * Converts .yml parameters to echarts y-axis options
+ * @param {Object} param - Dashboard .yml definition options
+ */
+export const getYAxisOptions = ({
+ name = s__('Metrics|Values'),
+ format = defaultYAxisFormat,
+ precision = defaultYAxisPrecision,
+} = {}) => {
+ return {
+ nameGap: 63, // larger gap than gitlab-ui's default to fit with formatted numbers
+ scale: true,
+ boundaryGap: yAxisBoundaryGap,
+
+ ...getDataAxisOptions({
+ name,
+ format,
+ precision,
+ }),
+ };
+};
+
+// Chart grid
+
+/**
+ * Grid with enough room to display chart.
+ */
+export const getChartGrid = ({ left = chartGridLeft } = {}) => ({ left });
+
+// Tooltip options
+
+export const getTooltipFormatter = ({
+ format = defaultTooltipFormat,
+ precision = defaultTooltipPrecision,
+} = {}) => {
+ const formatter = getFormatter(format);
+ return num => formatter(num, precision);
+};
diff --git a/app/assets/javascripts/monitoring/components/charts/single_stat.vue b/app/assets/javascripts/monitoring/components/charts/single_stat.vue
index 3368be4df75..225fcfda165 100644
--- a/app/assets/javascripts/monitoring/components/charts/single_stat.vue
+++ b/app/assets/javascripts/monitoring/components/charts/single_stat.vue
@@ -42,10 +42,7 @@ export default {
};
</script>
<template>
- <div class="prometheus-graph">
- <div class="prometheus-graph-header">
- <h5 ref="graphTitle" class="prometheus-graph-title">{{ graphTitle }}</h5>
- </div>
+ <div>
<gl-single-stat :value="statValue" :title="graphTitle" variant="success" />
</div>
</template>
diff --git a/app/assets/javascripts/monitoring/components/charts/stacked_column.vue b/app/assets/javascripts/monitoring/components/charts/stacked_column.vue
index 55ae4a3bdb2..56a747b9d1f 100644
--- a/app/assets/javascripts/monitoring/components/charts/stacked_column.vue
+++ b/app/assets/javascripts/monitoring/components/charts/stacked_column.vue
@@ -81,11 +81,7 @@ export default {
};
</script>
<template>
- <div v-gl-resize-observer-directive="onResize" class="prometheus-graph">
- <div class="prometheus-graph-header">
- <h5 ref="graphTitle" class="prometheus-graph-title">{{ graphData.title }}</h5>
- <div ref="graphWidgets" class="prometheus-graph-widgets"><slot></slot></div>
- </div>
+ <div v-gl-resize-observer-directive="onResize">
<gl-stacked-column-chart
ref="chart"
v-bind="$attrs"
diff --git a/app/assets/javascripts/monitoring/components/charts/time_series.vue b/app/assets/javascripts/monitoring/components/charts/time_series.vue
index d2b1e4da3fd..cba0a6da6a9 100644
--- a/app/assets/javascripts/monitoring/components/charts/time_series.vue
+++ b/app/assets/javascripts/monitoring/components/charts/time_series.vue
@@ -4,7 +4,6 @@ import { GlLink, GlButton, GlTooltip, GlResizeObserverDirective } from '@gitlab/
import { GlAreaChart, GlLineChart, GlChartSeriesLabel } from '@gitlab/ui/dist/charts';
import dateFormat from 'dateformat';
import { s__, __ } from '~/locale';
-import { roundOffFloat } from '~/lib/utils/common_utils';
import { getSvgIconPathContent } from '~/lib/utils/icon_utils';
import Icon from '~/vue_shared/components/icon.vue';
import {
@@ -16,6 +15,7 @@ import {
dateFormats,
chartColorValues,
} from '../../constants';
+import { getYAxisOptions, getChartGrid, getTooltipFormatter } from './options';
import { makeDataSeries } from '~/helpers/monitor_helper';
import { graphDataValidatorForValues } from '../../utils';
@@ -30,7 +30,7 @@ const deploymentYAxisCoords = {
max: 100,
};
-const THROTTLED_DATAZOOM_WAIT = 1000; // miliseconds
+const THROTTLED_DATAZOOM_WAIT = 1000; // milliseconds
const timestampToISODate = timestamp => new Date(timestamp).toISOString();
const events = {
@@ -112,7 +112,6 @@ export default {
isDeployment: false,
sha: '',
},
- showTitleTooltip: false,
width: 0,
height: chartHeight,
svgs: {},
@@ -166,14 +165,7 @@ export default {
const option = omit(this.option, ['series', 'yAxis', 'xAxis']);
const dataYAxis = {
- name: this.yAxisLabel,
- nameGap: 50, // same as gitlab-ui's default
- nameLocation: 'center', // same as gitlab-ui's default
- boundaryGap: [0.1, 0.1],
- scale: true,
- axisLabel: {
- formatter: num => roundOffFloat(num, 3).toString(),
- },
+ ...getYAxisOptions(this.graphData.yAxis),
...yAxis,
};
@@ -203,6 +195,7 @@ export default {
series: this.chartOptionSeries,
xAxis: timeXAxis,
yAxis: [dataYAxis, deploymentsYAxis],
+ grid: getChartGrid(),
dataZoom: [this.dataZoomConfig],
...option,
};
@@ -281,16 +274,11 @@ export default {
},
};
},
- yAxisLabel() {
- return `${this.graphData.y_label}`;
+ tooltipYFormatter() {
+ // Use same format as y-axis
+ return getTooltipFormatter({ format: this.graphData.yAxis?.format });
},
},
- mounted() {
- const graphTitleEl = this.$refs.graphTitle;
- if (graphTitleEl && graphTitleEl.scrollWidth > graphTitleEl.offsetWidth) {
- this.showTitleTooltip = true;
- }
- },
created() {
this.setSvg('rocket');
this.setSvg('scroll-handle');
@@ -320,11 +308,11 @@ export default {
this.tooltip.commitUrl = deploy.commitUrl;
} else {
const { seriesName, color, dataIndex } = dataPoint;
- const value = yVal.toFixed(3);
+
this.tooltip.content.push({
name: seriesName,
dataIndex,
- value,
+ value: this.tooltipYFormatter(yVal),
color,
});
}
@@ -387,24 +375,7 @@ export default {
</script>
<template>
- <div v-gl-resize-observer-directive="onResize" class="prometheus-graph">
- <div class="prometheus-graph-header">
- <h5
- ref="graphTitle"
- class="prometheus-graph-title js-graph-title text-truncate append-right-8"
- >
- {{ graphData.title }}
- </h5>
- <gl-tooltip :target="() => $refs.graphTitle" :disabled="!showTitleTooltip">
- {{ graphData.title }}
- </gl-tooltip>
- <div
- class="prometheus-graph-widgets js-graph-widgets flex-fill"
- data-qa-selector="prometheus_graph_widgets"
- >
- <slot></slot>
- </div>
- </div>
+ <div v-gl-resize-observer-directive="onResize">
<component
:is="glChartComponent"
ref="chart"
diff --git a/app/assets/javascripts/monitoring/components/dashboard.vue b/app/assets/javascripts/monitoring/components/dashboard.vue
index 79f32b357fc..dbfb3e97c20 100644
--- a/app/assets/javascripts/monitoring/components/dashboard.vue
+++ b/app/assets/javascripts/monitoring/components/dashboard.vue
@@ -19,7 +19,7 @@ import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
import { s__ } from '~/locale';
import createFlash from '~/flash';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
-import { mergeUrlParams, redirectTo } from '~/lib/utils/url_utility';
+import { mergeUrlParams, redirectTo, refreshCurrentPage } from '~/lib/utils/url_utility';
import invalidUrl from '~/lib/utils/invalid_url';
import Icon from '~/vue_shared/components/icon.vue';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
@@ -31,7 +31,8 @@ import DashboardsDropdown from './dashboards_dropdown.vue';
import TrackEventDirective from '~/vue_shared/directives/track_event';
import { getAddMetricTrackingOptions, timeRangeToUrl, timeRangeFromUrl } from '../utils';
-import { defaultTimeRange, timeRanges, metricStates } from '../constants';
+import { metricStates } from '../constants';
+import { defaultTimeRange, timeRanges } from '~/vue_shared/constants';
export default {
components: {
@@ -351,6 +352,10 @@ export default {
};
redirectTo(mergeUrlParams(params, window.location.href));
},
+
+ refreshDashboard() {
+ refreshCurrentPage();
+ },
},
addMetric: {
title: s__('Metrics|Add metric'),
@@ -438,7 +443,7 @@ export default {
:label="s__('Metrics|Show last')"
label-size="sm"
label-for="monitor-time-window-dropdown"
- class="col-sm-6 col-md-6 col-lg-4"
+ class="col-sm-auto col-md-auto col-lg-auto"
>
<date-time-picker
ref="dateTimePicker"
@@ -449,6 +454,18 @@ export default {
/>
</gl-form-group>
+ <gl-form-group class="col-sm-2 col-md-2 col-lg-1 refresh-dashboard-button">
+ <gl-button
+ ref="refreshDashboardBtn"
+ v-gl-tooltip
+ variant="default"
+ :title="s__('Metrics|Reload this page')"
+ @click="refreshDashboard"
+ >
+ <icon name="repeat" />
+ </gl-button>
+ </gl-form-group>
+
<gl-form-group
v-if="hasHeaderButtons"
label-for="prometheus-graphs-dropdown-buttons"
@@ -468,6 +485,7 @@ export default {
ref="addMetricBtn"
v-gl-modal="$options.addMetric.modalId"
variant="outline-success"
+ data-qa-selector="add_metric_button"
class="mr-2 mt-1"
>{{ $options.addMetric.title }}</gl-button
>
@@ -522,7 +540,7 @@ export default {
<div v-if="!showEmptyState">
<graph-group
- v-for="(groupData, index) in dashboard.panel_groups"
+ v-for="(groupData, index) in dashboard.panelGroups"
:key="`${groupData.group}.${groupData.priority}`"
:name="groupData.group"
:show-panels="showPanels"
diff --git a/app/assets/javascripts/monitoring/components/embed.vue b/app/assets/javascripts/monitoring/components/embed.vue
index 49188a7af8f..6182b570e76 100644
--- a/app/assets/javascripts/monitoring/components/embed.vue
+++ b/app/assets/javascripts/monitoring/components/embed.vue
@@ -3,7 +3,8 @@ import { mapActions, mapState, mapGetters } from 'vuex';
import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
import { convertToFixedRange } from '~/lib/utils/datetime_range';
import { timeRangeFromUrl, removeTimeRangeParams } from '../utils';
-import { sidebarAnimationDuration, defaultTimeRange } from '../constants';
+import { sidebarAnimationDuration } from '../constants';
+import { defaultTimeRange } from '~/vue_shared/constants';
let sidebarMutationObserver;
@@ -28,10 +29,10 @@ export default {
...mapState('monitoringDashboard', ['dashboard']),
...mapGetters('monitoringDashboard', ['metricsWithData']),
charts() {
- if (!this.dashboard || !this.dashboard.panel_groups) {
+ if (!this.dashboard || !this.dashboard.panelGroups) {
return [];
}
- const groupWithMetrics = this.dashboard.panel_groups.find(group =>
+ const groupWithMetrics = this.dashboard.panelGroups.find(group =>
group.panels.find(chart => this.chartHasData(chart)),
) || { panels: [] };
@@ -67,7 +68,7 @@ export default {
'setShowErrorBanner',
]),
chartHasData(chart) {
- return chart.metrics.some(metric => this.metricsWithData().includes(metric.metric_id));
+ return chart.metrics.some(metric => this.metricsWithData().includes(metric.metricId));
},
onSidebarMutation() {
setTimeout(() => {
diff --git a/app/assets/javascripts/monitoring/components/panel_type.vue b/app/assets/javascripts/monitoring/components/panel_type.vue
index 22fab1b03f2..4573ec58ab8 100644
--- a/app/assets/javascripts/monitoring/components/panel_type.vue
+++ b/app/assets/javascripts/monitoring/components/panel_type.vue
@@ -3,13 +3,15 @@ import { mapState } from 'vuex';
import { pickBy } from 'lodash';
import invalidUrl from '~/lib/utils/invalid_url';
import {
+ GlResizeObserverDirective,
GlDropdown,
GlDropdownItem,
GlModal,
GlModalDirective,
+ GlTooltip,
GlTooltipDirective,
} from '@gitlab/ui';
-import { __ } from '~/locale';
+import { __, n__ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
import MonitorTimeSeriesChart from './charts/time_series.vue';
import MonitorAnomalyChart from './charts/anomaly.vue';
@@ -29,11 +31,13 @@ export default {
MonitorStackedColumnChart,
MonitorEmptyChart,
Icon,
+ GlTooltip,
GlDropdown,
GlDropdownItem,
GlModal,
},
directives: {
+ GlResizeObserver: GlResizeObserverDirective,
GlModal: GlModalDirective,
GlTooltip: GlTooltipDirective,
TrackEvent: TrackEventDirective,
@@ -61,11 +65,15 @@ export default {
},
data() {
return {
+ showTitleTooltip: false,
zoomedTimeRange: null,
};
},
computed: {
...mapState('monitoringDashboard', ['deploymentData', 'projectPath', 'logsPath', 'timeRange']),
+ title() {
+ return this.graphData.title || '';
+ },
alertWidgetAvailable() {
return IS_EE && this.prometheusAlertsAvailable && this.alertsEndpoint && this.graphData;
},
@@ -97,12 +105,30 @@ export default {
const data = new Blob([this.csvText], { type: 'text/plain' });
return window.URL.createObjectURL(data);
},
- monitorChartComponent() {
+ timeChartComponent() {
if (this.isPanelType('anomaly-chart')) {
return MonitorAnomalyChart;
}
return MonitorTimeSeriesChart;
},
+ isContextualMenuShown() {
+ return (
+ this.graphDataHasMetrics &&
+ !this.isPanelType('single-stat') &&
+ !this.isPanelType('heatmap') &&
+ !this.isPanelType('column') &&
+ !this.isPanelType('stacked-column')
+ );
+ },
+ editCustomMetricLink() {
+ return this.graphData?.metrics[0].edit_path;
+ },
+ editCustomMetricLinkText() {
+ return n__('Metrics|Edit metric', 'Metrics|Edit metrics', this.graphData.metrics.length);
+ },
+ },
+ mounted() {
+ this.refreshTitleTooltip();
},
methods: {
getGraphAlerts(queries) {
@@ -119,9 +145,18 @@ export default {
showToast() {
this.$toast.show(__('Link copied'));
},
+ refreshTitleTooltip() {
+ const { graphTitle } = this.$refs;
+ this.showTitleTooltip =
+ Boolean(graphTitle) && graphTitle.scrollWidth > graphTitle.offsetWidth;
+ },
+
downloadCSVOptions,
generateLinkToChartOptions,
+ onResize() {
+ this.refreshTitleTooltip();
+ },
onDatazoom({ start, end }) {
this.zoomedTimeRange = { start, end };
},
@@ -129,88 +164,115 @@ export default {
};
</script>
<template>
- <monitor-single-stat-chart
- v-if="isPanelType('single-stat') && graphDataHasMetrics"
- :graph-data="graphData"
- />
- <monitor-heatmap-chart
- v-else-if="isPanelType('heatmap') && graphDataHasMetrics"
- :graph-data="graphData"
- />
- <monitor-column-chart
- v-else-if="isPanelType('column') && graphDataHasMetrics"
- :graph-data="graphData"
- />
- <monitor-stacked-column-chart
- v-else-if="isPanelType('stacked-column') && graphDataHasMetrics"
- :graph-data="graphData"
- />
- <component
- :is="monitorChartComponent"
- v-else-if="graphDataHasMetrics"
- ref="timeChart"
- :graph-data="graphData"
- :deployment-data="deploymentData"
- :project-path="projectPath"
- :thresholds="getGraphAlertValues(graphData.metrics)"
- :group-id="groupId"
- @datazoom="onDatazoom"
- >
- <div class="d-flex align-items-center">
- <alert-widget
- v-if="alertWidgetAvailable && graphData"
- :modal-id="`alert-modal-${index}`"
- :alerts-endpoint="alertsEndpoint"
- :relevant-queries="graphData.metrics"
- :alerts-to-manage="getGraphAlerts(graphData.metrics)"
- @setAlerts="setAlerts"
- />
- <gl-dropdown
- v-gl-tooltip
- class="ml-auto mx-3"
- toggle-class="btn btn-transparent border-0"
- data-qa-selector="prometheus_widgets_dropdown"
- :right="true"
- :no-caret="true"
- :title="__('More actions')"
+ <div v-gl-resize-observer="onResize" class="prometheus-graph">
+ <div class="prometheus-graph-header">
+ <h5
+ ref="graphTitle"
+ class="prometheus-graph-title gl-font-size-large font-weight-bold text-truncate append-right-8"
>
- <template slot="button-content">
- <icon name="ellipsis_v" class="text-secondary" />
- </template>
-
- <gl-dropdown-item
- v-if="logsPathWithTimeRange"
- ref="viewLogsLink"
- :href="logsPathWithTimeRange"
- >
- {{ s__('Metrics|View logs') }}
- </gl-dropdown-item>
+ {{ title }}
+ </h5>
+ <gl-tooltip :target="() => $refs.graphTitle" :disabled="!showTitleTooltip">
+ {{ title }}
+ </gl-tooltip>
+ <div
+ v-if="isContextualMenuShown"
+ class="prometheus-graph-widgets js-graph-widgets flex-fill"
+ data-qa-selector="prometheus_graph_widgets"
+ >
+ <div class="d-flex align-items-center">
+ <alert-widget
+ v-if="alertWidgetAvailable && graphData"
+ :modal-id="`alert-modal-${index}`"
+ :alerts-endpoint="alertsEndpoint"
+ :relevant-queries="graphData.metrics"
+ :alerts-to-manage="getGraphAlerts(graphData.metrics)"
+ @setAlerts="setAlerts"
+ />
+ <gl-dropdown
+ v-gl-tooltip
+ class="ml-auto mx-3"
+ toggle-class="btn btn-transparent border-0"
+ data-qa-selector="prometheus_widgets_dropdown"
+ right
+ no-caret
+ :title="__('More actions')"
+ >
+ <template slot="button-content">
+ <icon name="ellipsis_v" class="text-secondary" />
+ </template>
+ <gl-dropdown-item
+ v-if="editCustomMetricLink"
+ ref="editMetricLink"
+ :href="editCustomMetricLink"
+ >
+ {{ editCustomMetricLinkText }}
+ </gl-dropdown-item>
+ <gl-dropdown-item
+ v-if="logsPathWithTimeRange"
+ ref="viewLogsLink"
+ :href="logsPathWithTimeRange"
+ >
+ {{ s__('Metrics|View logs') }}
+ </gl-dropdown-item>
- <gl-dropdown-item
- v-track-event="downloadCSVOptions(graphData.title)"
- :href="downloadCsv"
- download="chart_metrics.csv"
- >
- {{ __('Download CSV') }}
- </gl-dropdown-item>
- <gl-dropdown-item
- v-if="clipboardText"
- ref="copyChartLink"
- v-track-event="generateLinkToChartOptions(clipboardText)"
- :data-clipboard-text="clipboardText"
- @click="showToast(clipboardText)"
- >
- {{ __('Generate link to chart') }}
- </gl-dropdown-item>
- <gl-dropdown-item
- v-if="alertWidgetAvailable"
- v-gl-modal="`alert-modal-${index}`"
- data-qa-selector="alert_widget_menu_item"
- >
- {{ __('Alerts') }}
- </gl-dropdown-item>
- </gl-dropdown>
+ <gl-dropdown-item
+ v-if="csvText"
+ ref="downloadCsvLink"
+ v-track-event="downloadCSVOptions(title)"
+ :href="downloadCsv"
+ download="chart_metrics.csv"
+ >
+ {{ __('Download CSV') }}
+ </gl-dropdown-item>
+ <gl-dropdown-item
+ v-if="clipboardText"
+ ref="copyChartLink"
+ v-track-event="generateLinkToChartOptions(clipboardText)"
+ :data-clipboard-text="clipboardText"
+ @click="showToast(clipboardText)"
+ >
+ {{ __('Generate link to chart') }}
+ </gl-dropdown-item>
+ <gl-dropdown-item
+ v-if="alertWidgetAvailable"
+ v-gl-modal="`alert-modal-${index}`"
+ data-qa-selector="alert_widget_menu_item"
+ >
+ {{ __('Alerts') }}
+ </gl-dropdown-item>
+ </gl-dropdown>
+ </div>
+ </div>
</div>
- </component>
- <monitor-empty-chart v-else :graph-title="graphData.title" />
+
+ <monitor-single-stat-chart
+ v-if="isPanelType('single-stat') && graphDataHasMetrics"
+ :graph-data="graphData"
+ />
+ <monitor-heatmap-chart
+ v-else-if="isPanelType('heatmap') && graphDataHasMetrics"
+ :graph-data="graphData"
+ />
+ <monitor-column-chart
+ v-else-if="isPanelType('column') && graphDataHasMetrics"
+ :graph-data="graphData"
+ />
+ <monitor-stacked-column-chart
+ v-else-if="isPanelType('stacked-column') && graphDataHasMetrics"
+ :graph-data="graphData"
+ />
+ <component
+ :is="timeChartComponent"
+ v-else-if="graphDataHasMetrics"
+ ref="timeChart"
+ :graph-data="graphData"
+ :deployment-data="deploymentData"
+ :project-path="projectPath"
+ :thresholds="getGraphAlertValues(graphData.metrics)"
+ :group-id="groupId"
+ @datazoom="onDatazoom"
+ />
+ <monitor-empty-chart v-else :graph-title="title" v-bind="$attrs" v-on="$listeners" />
+ </div>
</template>
diff --git a/app/assets/javascripts/monitoring/components/shared/prometheus_header.vue b/app/assets/javascripts/monitoring/components/shared/prometheus_header.vue
deleted file mode 100644
index ceeec51ee65..00000000000
--- a/app/assets/javascripts/monitoring/components/shared/prometheus_header.vue
+++ /dev/null
@@ -1,15 +0,0 @@
-<script>
-export default {
- props: {
- graphTitle: {
- type: String,
- required: true,
- },
- },
-};
-</script>
-<template>
- <div class="prometheus-graph-header">
- <h5 ref="title" class="prometheus-graph-title">{{ graphTitle }}</h5>
- </div>
-</template>
diff --git a/app/assets/javascripts/monitoring/constants.js b/app/assets/javascripts/monitoring/constants.js
index ddf6c9878df..cc7f5af2259 100644
--- a/app/assets/javascripts/monitoring/constants.js
+++ b/app/assets/javascripts/monitoring/constants.js
@@ -1,5 +1,3 @@
-import { __ } from '~/locale';
-
export const PROMETHEUS_TIMEOUT = 120000; // TWO_MINUTES
/**
@@ -89,37 +87,3 @@ export const dateFormats = {
timeOfDay: 'h:MM TT',
default: 'dd mmm yyyy, h:MMTT',
};
-
-export const timeRanges = [
- {
- label: __('30 minutes'),
- duration: { seconds: 60 * 30 },
- },
- {
- label: __('3 hours'),
- duration: { seconds: 60 * 60 * 3 },
- },
- {
- label: __('8 hours'),
- duration: { seconds: 60 * 60 * 8 },
- default: true,
- },
- {
- label: __('1 day'),
- duration: { seconds: 60 * 60 * 24 * 1 },
- },
- {
- label: __('3 days'),
- duration: { seconds: 60 * 60 * 24 * 3 },
- },
- {
- label: __('1 week'),
- duration: { seconds: 60 * 60 * 24 * 7 * 1 },
- },
- {
- label: __('1 month'),
- duration: { seconds: 60 * 60 * 24 * 30 },
- },
-];
-
-export const defaultTimeRange = timeRanges.find(tr => tr.default);
diff --git a/app/assets/javascripts/monitoring/stores/actions.js b/app/assets/javascripts/monitoring/stores/actions.js
index 8bb5047ef04..7d0d37c1a20 100644
--- a/app/assets/javascripts/monitoring/stores/actions.js
+++ b/app/assets/javascripts/monitoring/stores/actions.js
@@ -1,3 +1,4 @@
+import * as Sentry from '@sentry/browser';
import * as types from './mutation_types';
import axios from '~/lib/utils/axios_utils';
import createFlash from '~/flash';
@@ -50,9 +51,11 @@ export const requestMetricsDashboard = ({ commit }) => {
commit(types.REQUEST_METRICS_DATA);
};
export const receiveMetricsDashboardSuccess = ({ commit, dispatch }, { response, params }) => {
- commit(types.SET_ALL_DASHBOARDS, response.all_dashboards);
- commit(types.RECEIVE_METRICS_DATA_SUCCESS, response.dashboard);
- commit(types.SET_ENDPOINTS, convertObjectPropsToCamelCase(response.metrics_data));
+ const { all_dashboards, dashboard, metrics_data } = response;
+
+ commit(types.SET_ALL_DASHBOARDS, all_dashboards);
+ commit(types.RECEIVE_METRICS_DATA_SUCCESS, dashboard);
+ commit(types.SET_ENDPOINTS, convertObjectPropsToCamelCase(metrics_data));
return dispatch('fetchPrometheusMetrics', params);
};
@@ -76,7 +79,7 @@ export const fetchData = ({ dispatch }) => {
dispatch('fetchEnvironmentsData');
};
-export const fetchDashboard = ({ state, dispatch }) => {
+export const fetchDashboard = ({ state, commit, dispatch }) => {
dispatch('requestMetricsDashboard');
const params = {};
@@ -94,11 +97,15 @@ export const fetchDashboard = ({ state, dispatch }) => {
return backOffRequest(() => axios.get(state.dashboardEndpoint, { params }))
.then(resp => resp.data)
.then(response => dispatch('receiveMetricsDashboardSuccess', { response, params }))
- .catch(e => {
- dispatch('receiveMetricsDashboardFailure', e);
+ .catch(error => {
+ Sentry.captureException(error);
+
+ commit(types.SET_ALL_DASHBOARDS, error.response?.data?.all_dashboards ?? []);
+ dispatch('receiveMetricsDashboardFailure', error);
+
if (state.showErrorBanner) {
- if (e.response.data && e.response.data.message) {
- const { message } = e.response.data;
+ if (error.response.data && error.response.data.message) {
+ const { message } = error.response.data;
createFlash(
sprintf(
s__('Metrics|There was an error while retrieving metrics. %{message}'),
@@ -145,14 +152,16 @@ export const fetchPrometheusMetric = ({ commit }, { metric, params }) => {
step,
};
- commit(types.REQUEST_METRIC_RESULT, { metricId: metric.metric_id });
+ commit(types.REQUEST_METRIC_RESULT, { metricId: metric.metricId });
- return fetchPrometheusResult(metric.prometheus_endpoint_path, queryParams)
+ return fetchPrometheusResult(metric.prometheusEndpointPath, queryParams)
.then(result => {
- commit(types.RECEIVE_METRIC_RESULT_SUCCESS, { metricId: metric.metric_id, result });
+ commit(types.RECEIVE_METRIC_RESULT_SUCCESS, { metricId: metric.metricId, result });
})
.catch(error => {
- commit(types.RECEIVE_METRIC_RESULT_FAILURE, { metricId: metric.metric_id, error });
+ Sentry.captureException(error);
+
+ commit(types.RECEIVE_METRIC_RESULT_FAILURE, { metricId: metric.metricId, error });
// Continue to throw error so the dashboard can notify using createFlash
throw error;
});
@@ -162,7 +171,7 @@ export const fetchPrometheusMetrics = ({ state, commit, dispatch, getters }, par
commit(types.REQUEST_METRICS_DATA);
const promises = [];
- state.dashboard.panel_groups.forEach(group => {
+ state.dashboard.panelGroups.forEach(group => {
group.panels.forEach(panel => {
panel.metrics.forEach(metric => {
promises.push(dispatch('fetchPrometheusMetric', { metric, params }));
@@ -197,7 +206,8 @@ export const fetchDeploymentsData = ({ state, dispatch }) => {
dispatch('receiveDeploymentsDataSuccess', response.deployments);
})
- .catch(() => {
+ .catch(error => {
+ Sentry.captureException(error);
dispatch('receiveDeploymentsDataFailure');
createFlash(s__('Metrics|There was an error getting deployment information.'));
});
@@ -225,7 +235,8 @@ export const fetchEnvironmentsData = ({ state, dispatch }) => {
dispatch('receiveEnvironmentsDataSuccess', environments);
})
- .catch(() => {
+ .catch(err => {
+ Sentry.captureException(err);
dispatch('receiveEnvironmentsDataFailure');
createFlash(s__('Metrics|There was an error getting environments information.'));
});
@@ -254,7 +265,10 @@ export const duplicateSystemDashboard = ({ state }, payload) => {
.then(response => response.data)
.then(data => data.dashboard)
.catch(error => {
+ Sentry.captureException(error);
+
const { response } = error;
+
if (response && response.data && response.data.error) {
throw sprintf(s__('Metrics|There was an error creating the dashboard. %{error}'), {
error: response.data.error,
diff --git a/app/assets/javascripts/monitoring/stores/getters.js b/app/assets/javascripts/monitoring/stores/getters.js
index 3801149e49d..1affc6f0a76 100644
--- a/app/assets/javascripts/monitoring/stores/getters.js
+++ b/app/assets/javascripts/monitoring/stores/getters.js
@@ -11,7 +11,7 @@ const metricsIdsInPanel = panel =>
* states in all the metric in the dashboard or group.
*/
export const getMetricStates = state => groupKey => {
- let groups = state.dashboard.panel_groups;
+ let groups = state.dashboard.panelGroups;
if (groupKey) {
groups = groups.filter(group => group.key === groupKey);
}
@@ -43,7 +43,7 @@ export const getMetricStates = state => groupKey => {
* filtered by group key.
*/
export const metricsWithData = state => groupKey => {
- let groups = state.dashboard.panel_groups;
+ let groups = state.dashboard.panelGroups;
if (groupKey) {
groups = groups.filter(group => group.key === groupKey);
}
diff --git a/app/assets/javascripts/monitoring/stores/mutations.js b/app/assets/javascripts/monitoring/stores/mutations.js
index 8bd53a24b61..7aac98821c9 100644
--- a/app/assets/javascripts/monitoring/stores/mutations.js
+++ b/app/assets/javascripts/monitoring/stores/mutations.js
@@ -1,18 +1,11 @@
import Vue from 'vue';
import pick from 'lodash/pick';
-import { slugify } from '~/lib/utils/text_utility';
import * as types from './mutation_types';
-import { normalizeMetric, normalizeQueryResult } from './utils';
+import { mapToDashboardViewModel, normalizeQueryResult } from './utils';
import { BACKOFF_TIMEOUT } from '../../lib/utils/common_utils';
import { metricStates } from '../constants';
import httpStatusCodes from '~/lib/utils/http_status';
-const normalizePanelMetrics = (metrics, defaultLabel) =>
- metrics.map(metric => ({
- ...normalizeMetric(metric),
- label: metric.label || defaultLabel,
- }));
-
/**
* Locate and return a metric in the dashboard by its id
* as generated by `uniqMetricsId()`.
@@ -21,10 +14,10 @@ const normalizePanelMetrics = (metrics, defaultLabel) =>
*/
const findMetricInDashboard = (metricId, dashboard) => {
let res = null;
- dashboard.panel_groups.forEach(group => {
+ dashboard.panelGroups.forEach(group => {
group.panels.forEach(panel => {
panel.metrics.forEach(metric => {
- if (metric.metric_id === metricId) {
+ if (metric.metricId === metricId) {
res = metric;
}
});
@@ -86,27 +79,9 @@ export default {
state.showEmptyState = true;
},
[types.RECEIVE_METRICS_DATA_SUCCESS](state, dashboard) {
- state.dashboard = {
- ...dashboard,
- panel_groups: dashboard.panel_groups.map((group, i) => {
- const key = `${slugify(group.group || 'default')}-${i}`;
- let { panels = [] } = group;
-
- // each panel has metric information that needs to be normalized
- panels = panels.map(panel => ({
- ...panel,
- metrics: normalizePanelMetrics(panel.metrics, panel.y_label),
- }));
-
- return {
- ...group,
- panels,
- key,
- };
- }),
- };
+ state.dashboard = mapToDashboardViewModel(dashboard);
- if (!state.dashboard.panel_groups.length) {
+ if (!state.dashboard.panelGroups.length) {
state.emptyState = 'noData';
}
},
@@ -206,7 +181,7 @@ export default {
state.showErrorBanner = enabled;
},
[types.SET_PANEL_GROUP_METRICS](state, payload) {
- const panelGroup = state.dashboard.panel_groups.find(pg => payload.key === pg.key);
+ const panelGroup = state.dashboard.panelGroups.find(pg => payload.key === pg.key);
panelGroup.panels = payload.panels;
},
[types.SET_ENVIRONMENTS_FILTER](state, searchTerm) {
diff --git a/app/assets/javascripts/monitoring/stores/state.js b/app/assets/javascripts/monitoring/stores/state.js
index a2050f8e893..2b1907e8df7 100644
--- a/app/assets/javascripts/monitoring/stores/state.js
+++ b/app/assets/javascripts/monitoring/stores/state.js
@@ -15,7 +15,7 @@ export default () => ({
showEmptyState: true,
showErrorBanner: true,
dashboard: {
- panel_groups: [],
+ panelGroups: [],
},
allDashboards: [],
diff --git a/app/assets/javascripts/monitoring/stores/utils.js b/app/assets/javascripts/monitoring/stores/utils.js
index cd586c6af3e..066d0bf7676 100644
--- a/app/assets/javascripts/monitoring/stores/utils.js
+++ b/app/assets/javascripts/monitoring/stores/utils.js
@@ -1,5 +1,6 @@
-import { omit } from 'lodash';
+import { slugify } from '~/lib/utils/text_utility';
import createGqClient, { fetchPolicies } from '~/lib/graphql';
+import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
export const gqClient = createGqClient(
@@ -9,7 +10,15 @@ export const gqClient = createGqClient(
},
);
-export const uniqMetricsId = metric => `${metric.metric_id}_${metric.id}`;
+/**
+ * Metrics loaded from project-defined dashboards do not have a metric_id.
+ * This method creates a unique ID combining metric_id and id, if either is present.
+ * This is hopefully a temporary solution until BE processes metrics before passing to FE
+ * @param {Object} metric - metric
+ * @returns {Object} - normalized metric with a uniqueID
+ */
+// eslint-disable-next-line babel/camelcase
+export const uniqMetricsId = ({ metric_id, id }) => `${metric_id}_${id}`;
/**
* Project path has a leading slash that doesn't work well
@@ -41,22 +50,91 @@ export const parseEnvironmentsResponse = (response = [], projectPath) =>
});
/**
- * Metrics loaded from project-defined dashboards do not have a metric_id.
- * This method creates a unique ID combining metric_id and id, if either is present.
- * This is hopefully a temporary solution until BE processes metrics before passing to fE
- * @param {Object} metric - metric
- * @returns {Object} - normalized metric with a uniqueID
+ * Maps metrics to its view model
+ *
+ * This function difers from other in that is maps all
+ * non-define properties as-is to the object. This is not
+ * advisable as it could lead to unexpected side-effects.
+ *
+ * Related issue:
+ * https://gitlab.com/gitlab-org/gitlab/issues/207198
+ *
+ * @param {Array} metrics - Array of prometheus metrics
+ * @param {String} defaultLabel - Default label for metrics
+ * @returns {Object}
*/
+const mapToMetricsViewModel = (metrics, defaultLabel) =>
+ metrics.map(({ label, id, metric_id, query_range, prometheus_endpoint_path, ...metric }) => ({
+ label: label || defaultLabel,
+ queryRange: query_range,
+ prometheusEndpointPath: prometheus_endpoint_path,
+ metricId: uniqMetricsId({ metric_id, id }),
+ ...metric,
+ }));
-export const normalizeMetric = (metric = {}) =>
- omit(
- {
- ...metric,
- metric_id: uniqMetricsId(metric),
- metricId: uniqMetricsId(metric),
- },
- 'id',
- );
+/**
+ * Maps an axis view model
+ *
+ * Defaults to a 2 digit precision and `number` format. It only allows
+ * formats in the SUPPORTED_FORMATS array.
+ *
+ * @param {Object} axis
+ */
+const mapToAxisViewModel = ({ name = '', format = SUPPORTED_FORMATS.number, precision = 2 }) => {
+ return {
+ name,
+ format: SUPPORTED_FORMATS[format] || SUPPORTED_FORMATS.number,
+ precision,
+ };
+};
+
+/**
+ * Maps a metrics panel to its view model
+ *
+ * @param {Object} panel - Metrics panel
+ * @returns {Object}
+ */
+const mapToPanelViewModel = ({ title = '', type, y_label, y_axis = {}, metrics = [] }) => {
+ // Both `y_axis.name` and `y_label` are supported for now
+ // https://gitlab.com/gitlab-org/gitlab/issues/208385
+ const yAxis = mapToAxisViewModel({ name: y_label, ...y_axis }); // eslint-disable-line babel/camelcase
+ return {
+ title,
+ type,
+ y_label: yAxis.name, // Changing y_label to yLabel is pending https://gitlab.com/gitlab-org/gitlab/issues/207198
+ yAxis,
+ metrics: mapToMetricsViewModel(metrics, yAxis.name),
+ };
+};
+
+/**
+ * Maps a metrics panel group to its view model
+ *
+ * @param {Object} panelGroup - Panel Group
+ * @returns {Object}
+ */
+const mapToPanelGroupViewModel = ({ group = '', panels = [] }, i) => {
+ return {
+ key: `${slugify(group || 'default')}-${i}`,
+ group,
+ panels: panels.map(mapToPanelViewModel),
+ };
+};
+
+/**
+ * Maps a dashboard json object to its view model
+ *
+ * @param {Object} dashboard - Dashboard object
+ * @param {String} dashboard.dashboard - Dashboard name object
+ * @param {Array} dashboard.panel_groups - Panel groups array
+ * @returns {Object}
+ */
+export const mapToDashboardViewModel = ({ dashboard = '', panel_groups = [] }) => {
+ return {
+ dashboard,
+ panelGroups: panel_groups.map(mapToPanelGroupViewModel),
+ };
+};
export const normalizeQueryResult = timeSeries => {
let normalizedResult = {};
diff --git a/app/assets/javascripts/monitoring/utils.js b/app/assets/javascripts/monitoring/utils.js
index b2fa44835e6..6694ae2f157 100644
--- a/app/assets/javascripts/monitoring/utils.js
+++ b/app/assets/javascripts/monitoring/utils.js
@@ -7,7 +7,7 @@ import {
/**
* This method is used to validate if the graph data format for a chart component
- * that needs a time series as a response from a prometheus query (query_range) is
+ * that needs a time series as a response from a prometheus query (queryRange) is
* of a valid format or not.
* @param {Object} graphData the graph data response from a prometheus request
* @returns {boolean} whether the graphData format is correct
diff --git a/app/assets/javascripts/notes.js b/app/assets/javascripts/notes.js
index b3b189c1114..9e2231922b7 100644
--- a/app/assets/javascripts/notes.js
+++ b/app/assets/javascripts/notes.js
@@ -1808,11 +1808,7 @@ export default class Notes {
$editingNote.removeClass('is-editing fade-in-full').addClass('being-posted fade-in-half');
$editingNote
.find('.note-headline-meta a')
- .html(
- `<i class="fa fa-spinner fa-spin" aria-label="${__(
- 'Comment is being updated',
- )}" aria-hidden="true"></i>`,
- );
+ .html('<span class="spinner align-text-bottom"></span>');
// Make request to update comment on server
axios
@@ -1825,7 +1821,7 @@ export default class Notes {
// Submission failed, revert back to original note
$noteBodyText.html(escape(cachedNoteBodyText));
$editingNote.removeClass('being-posted fade-in');
- $editingNote.find('.fa.fa-spinner').remove();
+ $editingNote.find('.spinner').remove();
// Show Flash message about failure
this.updateNoteError();
diff --git a/app/assets/javascripts/notes/components/discussion_actions.vue b/app/assets/javascripts/notes/components/discussion_actions.vue
index 8ab31ef3448..251199f1778 100644
--- a/app/assets/javascripts/notes/components/discussion_actions.vue
+++ b/app/assets/javascripts/notes/components/discussion_actions.vue
@@ -73,7 +73,7 @@ export default {
v-if="discussion.resolvable && shouldShowJumpToNextDiscussion"
class="btn-group discussion-actions ml-sm-2"
>
- <jump-to-next-discussion-button />
+ <jump-to-next-discussion-button :from-discussion-id="discussion.id" />
</div>
</div>
</template>
diff --git a/app/assets/javascripts/notes/components/discussion_counter.vue b/app/assets/javascripts/notes/components/discussion_counter.vue
index 70e22db364b..577612de06a 100644
--- a/app/assets/javascripts/notes/components/discussion_counter.vue
+++ b/app/assets/javascripts/notes/components/discussion_counter.vue
@@ -39,7 +39,11 @@ export default {
</script>
<template>
- <div v-if="resolvableDiscussionsCount > 0" class="line-resolve-all-container full-width-mobile">
+ <div
+ v-if="resolvableDiscussionsCount > 0"
+ ref="discussionCounter"
+ class="line-resolve-all-container full-width-mobile"
+ >
<div class="full-width-mobile d-flex d-sm-block">
<div :class="{ 'has-next-btn': hasNextButton }" class="line-resolve-all">
<span
@@ -73,6 +77,9 @@ export default {
v-gl-tooltip
title="Jump to next unresolved thread"
class="btn btn-default discussion-next-btn"
+ data-track-event="click_button"
+ data-track-label="mr_next_unresolved_thread"
+ data-track-property="click_next_unresolved_thread_top"
@click="jumpToNextDiscussion"
>
<icon name="comment-next" />
diff --git a/app/assets/javascripts/notes/components/discussion_jump_to_next_button.vue b/app/assets/javascripts/notes/components/discussion_jump_to_next_button.vue
index 630d4fd89b1..b71ce1b6a0a 100644
--- a/app/assets/javascripts/notes/components/discussion_jump_to_next_button.vue
+++ b/app/assets/javascripts/notes/components/discussion_jump_to_next_button.vue
@@ -12,6 +12,12 @@ export default {
GlTooltip: GlTooltipDirective,
},
mixins: [discussionNavigation],
+ props: {
+ fromDiscussionId: {
+ type: String,
+ required: true,
+ },
+ },
};
</script>
@@ -22,7 +28,10 @@ export default {
v-gl-tooltip
class="btn btn-default discussion-next-btn"
:title="s__('MergeRequests|Jump to next unresolved thread')"
- @click="jumpToNextDiscussion"
+ data-track-event="click_button"
+ data-track-label="mr_next_unresolved_thread"
+ data-track-property="click_next_unresolved_thread"
+ @click="jumpToNextRelativeDiscussion(fromDiscussionId)"
>
<icon name="comment-next" />
</button>
diff --git a/app/assets/javascripts/notes/components/discussion_resolve_button.vue b/app/assets/javascripts/notes/components/discussion_resolve_button.vue
index 2b29d710236..77f6f1e51c5 100644
--- a/app/assets/javascripts/notes/components/discussion_resolve_button.vue
+++ b/app/assets/javascripts/notes/components/discussion_resolve_button.vue
@@ -1,6 +1,11 @@
<script>
+import { GlLoadingIcon } from '@gitlab/ui';
+
export default {
name: 'ResolveDiscussionButton',
+ components: {
+ GlLoadingIcon,
+ },
props: {
isResolving: {
type: Boolean,
@@ -17,12 +22,7 @@ export default {
<template>
<button ref="button" type="button" class="btn btn-default ml-sm-2" @click="$emit('onClick')">
- <i
- v-if="isResolving"
- ref="isResolvingIcon"
- aria-hidden="true"
- class="fa fa-spinner fa-spin"
- ></i>
+ <gl-loading-icon v-if="isResolving" ref="isResolvingIcon" inline />
{{ buttonTitle }}
</button>
</template>
diff --git a/app/assets/javascripts/notes/mixins/discussion_navigation.js b/app/assets/javascripts/notes/mixins/discussion_navigation.js
index e5066695403..08c7efd69a6 100644
--- a/app/assets/javascripts/notes/mixins/discussion_navigation.js
+++ b/app/assets/javascripts/notes/mixins/discussion_navigation.js
@@ -2,6 +2,86 @@ import { mapGetters, mapActions, mapState } from 'vuex';
import { scrollToElement } from '~/lib/utils/common_utils';
import eventHub from '../../notes/event_hub';
+/**
+ * @param {string} selector
+ * @returns {boolean}
+ */
+function scrollTo(selector) {
+ const el = document.querySelector(selector);
+
+ if (el) {
+ scrollToElement(el);
+ return true;
+ }
+
+ return false;
+}
+
+/**
+ * @param {object} self Component instance with mixin applied
+ * @param {string} id Discussion id we are jumping to
+ */
+function diffsJump({ expandDiscussion }, id) {
+ const selector = `ul.notes[data-discussion-id="${id}"]`;
+ eventHub.$once('scrollToDiscussion', () => scrollTo(selector));
+ expandDiscussion({ discussionId: id });
+}
+
+/**
+ * @param {object} self Component instance with mixin applied
+ * @param {string} id Discussion id we are jumping to
+ * @returns {boolean}
+ */
+function discussionJump({ expandDiscussion }, id) {
+ const selector = `div.discussion[data-discussion-id="${id}"]`;
+ expandDiscussion({ discussionId: id });
+ return scrollTo(selector);
+}
+
+/**
+ * @param {object} self Component instance with mixin applied
+ * @param {string} id Discussion id we are jumping to
+ */
+function switchToDiscussionsTabAndJumpTo(self, id) {
+ window.mrTabs.eventHub.$once('MergeRequestTabChange', () => {
+ setTimeout(() => discussionJump(self, id), 0);
+ });
+
+ window.mrTabs.tabShown('show');
+}
+
+/**
+ * @param {object} self Component instance with mixin applied
+ * @param {object} discussion Discussion we are jumping to
+ */
+function jumpToDiscussion(self, discussion) {
+ const { id, diff_discussion: isDiffDiscussion } = discussion;
+ if (id) {
+ const activeTab = window.mrTabs.currentAction;
+
+ if (activeTab === 'diffs' && isDiffDiscussion) {
+ diffsJump(self, id);
+ } else if (activeTab === 'show') {
+ discussionJump(self, id);
+ } else {
+ switchToDiscussionsTabAndJumpTo(self, id);
+ }
+ }
+}
+
+/**
+ * @param {object} self Component instance with mixin applied
+ * @param {function} fn Which function used to get the target discussion's id
+ * @param {string} [discussionId=this.currentDiscussionId] Current discussion id, will be null if discussions have not been traversed yet
+ */
+function handleDiscussionJump(self, fn, discussionId = self.currentDiscussionId) {
+ const isDiffView = window.mrTabs.currentAction === 'diffs';
+ const targetId = fn(discussionId, isDiffView);
+ const discussion = self.getDiscussion(targetId);
+ jumpToDiscussion(self, discussion);
+ self.setCurrentDiscussionId(targetId);
+}
+
export default {
computed: {
...mapGetters([
@@ -16,76 +96,20 @@ export default {
methods: {
...mapActions(['expandDiscussion', 'setCurrentDiscussionId']),
- diffsJump(id) {
- const selector = `ul.notes[data-discussion-id="${id}"]`;
-
- eventHub.$once('scrollToDiscussion', () => {
- const el = document.querySelector(selector);
-
- if (el) {
- scrollToElement(el);
-
- return true;
- }
-
- return false;
- });
-
- this.expandDiscussion({ discussionId: id });
- },
- discussionJump(id) {
- const selector = `div.discussion[data-discussion-id="${id}"]`;
-
- const el = document.querySelector(selector);
-
- this.expandDiscussion({ discussionId: id });
-
- if (el) {
- scrollToElement(el);
-
- return true;
- }
-
- return false;
- },
-
- switchToDiscussionsTabAndJumpTo(id) {
- window.mrTabs.eventHub.$once('MergeRequestTabChange', () => {
- setTimeout(() => this.discussionJump(id), 0);
- });
-
- window.mrTabs.tabShown('show');
- },
-
- jumpToDiscussion(discussion) {
- const { id, diff_discussion: isDiffDiscussion } = discussion;
- if (id) {
- const activeTab = window.mrTabs.currentAction;
-
- if (activeTab === 'diffs' && isDiffDiscussion) {
- this.diffsJump(id);
- } else if (activeTab === 'show') {
- this.discussionJump(id);
- } else {
- this.switchToDiscussionsTabAndJumpTo(id);
- }
- }
- },
-
jumpToNextDiscussion() {
- this.handleDiscussionJump(this.nextUnresolvedDiscussionId);
+ handleDiscussionJump(this, this.nextUnresolvedDiscussionId);
},
jumpToPreviousDiscussion() {
- this.handleDiscussionJump(this.previousUnresolvedDiscussionId);
+ handleDiscussionJump(this, this.previousUnresolvedDiscussionId);
},
- handleDiscussionJump(fn) {
- const isDiffView = window.mrTabs.currentAction === 'diffs';
- const targetId = fn(this.currentDiscussionId, isDiffView);
- const discussion = this.getDiscussion(targetId);
- this.jumpToDiscussion(discussion);
- this.setCurrentDiscussionId(targetId);
+ /**
+ * Go to the next discussion from the given discussionId
+ * @param {String} discussionId The id we are jumping from
+ */
+ jumpToNextRelativeDiscussion(discussionId) {
+ handleDiscussionJump(this, this.nextUnresolvedDiscussionId, discussionId);
},
},
};
diff --git a/app/assets/javascripts/notes/services/notes_service.js b/app/assets/javascripts/notes/services/notes_service.js
deleted file mode 100644
index 4d3dbec435f..00000000000
--- a/app/assets/javascripts/notes/services/notes_service.js
+++ /dev/null
@@ -1,41 +0,0 @@
-import axios from '~/lib/utils/axios_utils';
-import * as constants from '../constants';
-
-export default {
- fetchDiscussions(endpoint, filter, persistFilter = true) {
- const config =
- filter !== undefined
- ? { params: { notes_filter: filter, persist_filter: persistFilter } }
- : null;
- return axios.get(endpoint, config);
- },
- replyToDiscussion(endpoint, data) {
- return axios.post(endpoint, data);
- },
- updateNote(endpoint, data) {
- return axios.put(endpoint, data);
- },
- createNewNote(endpoint, data) {
- return axios.post(endpoint, data);
- },
- toggleResolveNote(endpoint, isResolved) {
- const { RESOLVE_NOTE_METHOD_NAME, UNRESOLVE_NOTE_METHOD_NAME } = constants;
- const method = isResolved ? UNRESOLVE_NOTE_METHOD_NAME : RESOLVE_NOTE_METHOD_NAME;
-
- return axios[method](endpoint);
- },
- poll(data = {}) {
- const endpoint = data.notesData.notesPath;
- const { lastFetchedAt } = data;
- const options = {
- headers: {
- 'X-Last-Fetched-At': lastFetchedAt ? `${lastFetchedAt}` : undefined,
- },
- };
-
- return axios.get(endpoint, options);
- },
- toggleIssueState(endpoint, data) {
- return axios.put(endpoint, data);
- },
-};
diff --git a/app/assets/javascripts/notes/stores/actions.js b/app/assets/javascripts/notes/stores/actions.js
index 594e3a14d56..2e6719bb4fb 100644
--- a/app/assets/javascripts/notes/stores/actions.js
+++ b/app/assets/javascripts/notes/stores/actions.js
@@ -8,7 +8,6 @@ import Poll from '../../lib/utils/poll';
import * as types from './mutation_types';
import * as utils from './utils';
import * as constants from '../constants';
-import service from '../services/notes_service';
import loadAwardsHandler from '../../awards_handler';
import sidebarTimeTrackingEventHub from '../../sidebar/event_hub';
import { isInViewport, scrollToElement, isInMRPage } from '../../lib/utils/common_utils';
@@ -47,11 +46,17 @@ export const setNotesFetchedState = ({ commit }, state) =>
export const toggleDiscussion = ({ commit }, data) => commit(types.TOGGLE_DISCUSSION, data);
-export const fetchDiscussions = ({ commit, dispatch }, { path, filter, persistFilter }) =>
- service.fetchDiscussions(path, filter, persistFilter).then(({ data }) => {
+export const fetchDiscussions = ({ commit, dispatch }, { path, filter, persistFilter }) => {
+ const config =
+ filter !== undefined
+ ? { params: { notes_filter: filter, persist_filter: persistFilter } }
+ : null;
+
+ return axios.get(path, config).then(({ data }) => {
commit(types.SET_INITIAL_DISCUSSIONS, data);
dispatch('updateResolvableDiscussionsCounts');
});
+};
export const updateDiscussion = ({ commit, state }, discussion) => {
commit(types.UPDATE_DISCUSSION, discussion);
@@ -78,7 +83,7 @@ export const deleteNote = ({ dispatch }, note) =>
});
export const updateNote = ({ commit, dispatch }, { endpoint, note }) =>
- service.updateNote(endpoint, note).then(({ data }) => {
+ axios.put(endpoint, note).then(({ data }) => {
commit(types.UPDATE_NOTE, data);
dispatch('startTaskList');
});
@@ -109,7 +114,7 @@ export const replyToDiscussion = (
{ commit, state, getters, dispatch },
{ endpoint, data: reply },
) =>
- service.replyToDiscussion(endpoint, reply).then(({ data }) => {
+ axios.post(endpoint, reply).then(({ data }) => {
if (data.discussion) {
commit(types.UPDATE_DISCUSSION, data.discussion);
@@ -126,7 +131,7 @@ export const replyToDiscussion = (
});
export const createNewNote = ({ commit, dispatch }, { endpoint, data: reply }) =>
- service.createNewNote(endpoint, reply).then(({ data }) => {
+ axios.post(endpoint, reply).then(({ data }) => {
if (!data.errors) {
commit(types.ADD_NEW_NOTE, data);
@@ -156,20 +161,24 @@ export const resolveDiscussion = ({ state, dispatch, getters }, { discussionId }
});
};
-export const toggleResolveNote = ({ commit, dispatch }, { endpoint, isResolved, discussion }) =>
- service.toggleResolveNote(endpoint, isResolved).then(({ data }) => {
- const mutationType = discussion ? types.UPDATE_DISCUSSION : types.UPDATE_NOTE;
+export const toggleResolveNote = ({ commit, dispatch }, { endpoint, isResolved, discussion }) => {
+ const method = isResolved
+ ? constants.UNRESOLVE_NOTE_METHOD_NAME
+ : constants.RESOLVE_NOTE_METHOD_NAME;
+ const mutationType = discussion ? types.UPDATE_DISCUSSION : types.UPDATE_NOTE;
+ return axios[method](endpoint).then(({ data }) => {
commit(mutationType, data);
dispatch('updateResolvableDiscussionsCounts');
dispatch('updateMergeRequestWidget');
});
+};
export const closeIssue = ({ commit, dispatch, state }) => {
dispatch('toggleStateButtonLoading', true);
- return service.toggleIssueState(state.notesData.closePath).then(({ data }) => {
+ return axios.put(state.notesData.closePath).then(({ data }) => {
commit(types.CLOSE_ISSUE);
dispatch('emitStateChangedEvent', data);
dispatch('toggleStateButtonLoading', false);
@@ -178,7 +187,7 @@ export const closeIssue = ({ commit, dispatch, state }) => {
export const reopenIssue = ({ commit, dispatch, state }) => {
dispatch('toggleStateButtonLoading', true);
- return service.toggleIssueState(state.notesData.reopenPath).then(({ data }) => {
+ return axios.put(state.notesData.reopenPath).then(({ data }) => {
commit(types.REOPEN_ISSUE);
dispatch('emitStateChangedEvent', data);
dispatch('toggleStateButtonLoading', false);
@@ -355,11 +364,35 @@ const pollSuccessCallBack = (resp, commit, state, getters, dispatch) => {
return resp;
};
+const getFetchDataParams = state => {
+ const endpoint = state.notesData.notesPath;
+ const options = {
+ headers: {
+ 'X-Last-Fetched-At': state.lastFetchedAt ? `${state.lastFetchedAt}` : undefined,
+ },
+ };
+
+ return { endpoint, options };
+};
+
+export const fetchData = ({ commit, state, getters }) => {
+ const { endpoint, options } = getFetchDataParams(state);
+
+ axios
+ .get(endpoint, options)
+ .then(({ data }) => pollSuccessCallBack(data, commit, state, getters))
+ .catch(() => Flash(__('Something went wrong while fetching latest comments.')));
+};
+
export const poll = ({ commit, state, getters, dispatch }) => {
eTagPoll = new Poll({
- resource: service,
+ resource: {
+ poll: () => {
+ const { endpoint, options } = getFetchDataParams(state);
+ return axios.get(endpoint, options);
+ },
+ },
method: 'poll',
- data: state,
successCallback: ({ data }) => pollSuccessCallBack(data, commit, state, getters, dispatch),
errorCallback: () => Flash(__('Something went wrong while fetching latest comments.')),
});
@@ -367,7 +400,7 @@ export const poll = ({ commit, state, getters, dispatch }) => {
if (!Visibility.hidden()) {
eTagPoll.makeRequest();
} else {
- service.poll(state);
+ fetchData({ commit, state, getters });
}
Visibility.change(() => {
@@ -387,18 +420,6 @@ export const restartPolling = () => {
if (eTagPoll) eTagPoll.restart();
};
-export const fetchData = ({ commit, state, getters }) => {
- const requestData = {
- endpoint: state.notesData.notesPath,
- lastFetchedAt: state.lastFetchedAt,
- };
-
- service
- .poll(requestData)
- .then(({ data }) => pollSuccessCallBack(data, commit, state, getters))
- .catch(() => Flash(__('Something went wrong while fetching latest comments.')));
-};
-
export const toggleAward = ({ commit, getters }, { awardName, noteId }) => {
commit(types.TOGGLE_AWARD, { awardName, note: getters.notesById[noteId] });
};
@@ -494,7 +515,7 @@ export const removeConvertedDiscussion = ({ commit }, noteId) =>
export const setCurrentDiscussionId = ({ commit }, discussionId) =>
commit(types.SET_CURRENT_DISCUSSION_ID, discussionId);
-export const fetchDescriptionVersion = ({ dispatch }, { endpoint, startingVersion }) => {
+export const fetchDescriptionVersion = ({ dispatch }, { endpoint, startingVersion, versionId }) => {
let requestUrl = endpoint;
if (startingVersion) {
@@ -505,7 +526,7 @@ export const fetchDescriptionVersion = ({ dispatch }, { endpoint, startingVersio
return axios
.get(requestUrl)
.then(res => {
- dispatch('receiveDescriptionVersion', res.data);
+ dispatch('receiveDescriptionVersion', { descriptionVersion: res.data, versionId });
})
.catch(error => {
dispatch('receiveDescriptionVersionError', error);
@@ -523,7 +544,10 @@ export const receiveDescriptionVersionError = ({ commit }, error) => {
commit(types.RECEIVE_DESCRIPTION_VERSION_ERROR, error);
};
-export const softDeleteDescriptionVersion = ({ dispatch }, { endpoint, startingVersion }) => {
+export const softDeleteDescriptionVersion = (
+ { dispatch },
+ { endpoint, startingVersion, versionId },
+) => {
let requestUrl = endpoint;
if (startingVersion) {
@@ -534,7 +558,7 @@ export const softDeleteDescriptionVersion = ({ dispatch }, { endpoint, startingV
return axios
.delete(requestUrl)
.then(() => {
- dispatch('receiveDeleteDescriptionVersion');
+ dispatch('receiveDeleteDescriptionVersion', versionId);
})
.catch(error => {
dispatch('receiveDeleteDescriptionVersionError', error);
@@ -545,8 +569,8 @@ export const softDeleteDescriptionVersion = ({ dispatch }, { endpoint, startingV
export const requestDeleteDescriptionVersion = ({ commit }) => {
commit(types.REQUEST_DELETE_DESCRIPTION_VERSION);
};
-export const receiveDeleteDescriptionVersion = ({ commit }) => {
- commit(types.RECEIVE_DELETE_DESCRIPTION_VERSION, __('Deleted'));
+export const receiveDeleteDescriptionVersion = ({ commit }, versionId) => {
+ commit(types.RECEIVE_DELETE_DESCRIPTION_VERSION, { [versionId]: __('Deleted') });
};
export const receiveDeleteDescriptionVersionError = ({ commit }, error) => {
commit(types.RECEIVE_DELETE_DESCRIPTION_VERSION_ERROR, error);
diff --git a/app/assets/javascripts/notes/stores/getters.js b/app/assets/javascripts/notes/stores/getters.js
index 4f8ff8240b2..28cc9cdd7e9 100644
--- a/app/assets/javascripts/notes/stores/getters.js
+++ b/app/assets/javascripts/notes/stores/getters.js
@@ -28,6 +28,8 @@ export const getUserData = state => state.userData || {};
export const getUserDataByProp = state => prop => state.userData && state.userData[prop];
+export const descriptionVersions = state => state.descriptionVersions;
+
export const notesById = state =>
state.discussions.reduce((acc, note) => {
note.notes.every(n => Object.assign(acc, { [n.id]: n }));
diff --git a/app/assets/javascripts/notes/stores/modules/index.js b/app/assets/javascripts/notes/stores/modules/index.js
index 0e991f2f4f0..2d317dcd7da 100644
--- a/app/assets/javascripts/notes/stores/modules/index.js
+++ b/app/assets/javascripts/notes/stores/modules/index.js
@@ -28,7 +28,7 @@ export default () => ({
commentsDisabled: false,
resolvableDiscussionsCount: 0,
unresolvedDiscussionsCount: 0,
- descriptionVersion: null,
+ descriptionVersions: {},
},
actions,
getters,
diff --git a/app/assets/javascripts/notes/stores/mutations.js b/app/assets/javascripts/notes/stores/mutations.js
index d32a88e4c71..c23ef93c056 100644
--- a/app/assets/javascripts/notes/stores/mutations.js
+++ b/app/assets/javascripts/notes/stores/mutations.js
@@ -288,9 +288,9 @@ export default {
[types.REQUEST_DESCRIPTION_VERSION](state) {
state.isLoadingDescriptionVersion = true;
},
- [types.RECEIVE_DESCRIPTION_VERSION](state, descriptionVersion) {
- state.isLoadingDescriptionVersion = false;
- state.descriptionVersion = descriptionVersion;
+ [types.RECEIVE_DESCRIPTION_VERSION](state, { descriptionVersion, versionId }) {
+ const descriptionVersions = { ...state.descriptionVersions, [versionId]: descriptionVersion };
+ Object.assign(state, { descriptionVersions, isLoadingDescriptionVersion: false });
},
[types.RECEIVE_DESCRIPTION_VERSION_ERROR](state) {
state.isLoadingDescriptionVersion = false;
@@ -300,7 +300,7 @@ export default {
},
[types.RECEIVE_DELETE_DESCRIPTION_VERSION](state, descriptionVersion) {
state.isLoadingDescriptionVersion = false;
- state.descriptionVersion = descriptionVersion;
+ Object.assign(state.descriptionVersions, descriptionVersion);
},
[types.RECEIVE_DELETE_DESCRIPTION_VERSION_ERROR](state) {
state.isLoadingDescriptionVersion = false;
diff --git a/app/assets/javascripts/notifications_dropdown.js b/app/assets/javascripts/notifications_dropdown.js
index ab87b0d973c..07e69fa297a 100644
--- a/app/assets/javascripts/notifications_dropdown.js
+++ b/app/assets/javascripts/notifications_dropdown.js
@@ -15,7 +15,7 @@ export default function notificationsDropdown() {
.parents('.notification-form')
.first();
- form.find('.js-notification-loading').toggleClass('fa-bell fa-spin fa-spinner');
+ form.find('.js-notification-loading').toggleClass('spinner');
if (form.hasClass('no-label')) {
form.find('.js-notification-loading').toggleClass('hidden');
form.find('.js-notifications-icon').toggleClass('hidden');
diff --git a/app/assets/javascripts/operation_settings/components/external_dashboard.vue b/app/assets/javascripts/operation_settings/components/external_dashboard.vue
index e90e27a402a..8b6467bc0f6 100644
--- a/app/assets/javascripts/operation_settings/components/external_dashboard.vue
+++ b/app/assets/javascripts/operation_settings/components/external_dashboard.vue
@@ -33,9 +33,9 @@ export default {
<template>
<section class="settings no-animate">
<div class="settings-header">
- <h4 class="js-section-header">
+ <h3 class="js-section-header h4">
{{ s__('ExternalMetrics|External Dashboard') }}
- </h4>
+ </h3>
<gl-button class="js-settings-toggle">{{ __('Expand') }}</gl-button>
<p class="js-section-sub-header">
{{
diff --git a/app/assets/javascripts/pages/admin/broadcast_messages/broadcast_message.js b/app/assets/javascripts/pages/admin/broadcast_messages/broadcast_message.js
index 7c2008d9edc..34a024b1b33 100644
--- a/app/assets/javascripts/pages/admin/broadcast_messages/broadcast_message.js
+++ b/app/assets/javascripts/pages/admin/broadcast_messages/broadcast_message.js
@@ -1,5 +1,5 @@
import $ from 'jquery';
-import _ from 'underscore';
+import { debounce } from 'lodash';
import axios from '~/lib/utils/axios_utils';
import flash from '~/flash';
import { __ } from '~/locale';
@@ -25,16 +25,18 @@ export default () => {
$broadcastMessageType.on('change', () => {
const $broadcastMessageColorFormGroup = $('.js-broadcast-message-background-color-form-group');
+ const $broadcastMessageDismissableFormGroup = $('.js-broadcast-message-dismissable-form-group');
const $broadcastNotificationMessagePreview = $('.js-broadcast-notification-message-preview');
$broadcastMessageColorFormGroup.toggleClass('hidden');
+ $broadcastMessageDismissableFormGroup.toggleClass('hidden');
$broadcastBannerMessagePreview.toggleClass('hidden');
$broadcastNotificationMessagePreview.toggleClass('hidden');
});
$broadcastMessage.on(
'input',
- _.debounce(function onMessageInput() {
+ debounce(function onMessageInput() {
const message = $(this).val();
if (message === '') {
$jsBroadcastMessagePreview.text(__('Your message here'));
diff --git a/app/assets/javascripts/pages/admin/clusters/index/index.js b/app/assets/javascripts/pages/admin/clusters/index/index.js
index 30d519d0e37..744be65bfbe 100644
--- a/app/assets/javascripts/pages/admin/clusters/index/index.js
+++ b/app/assets/javascripts/pages/admin/clusters/index/index.js
@@ -1,6 +1,8 @@
import PersistentUserCallout from '~/persistent_user_callout';
+import initClustersListApp from '~/clusters_list';
document.addEventListener('DOMContentLoaded', () => {
const callout = document.querySelector('.gcp-signup-offer');
PersistentUserCallout.factory(callout);
+ initClustersListApp();
});
diff --git a/app/assets/javascripts/pages/admin/index.js b/app/assets/javascripts/pages/admin/index.js
index 74f2eead755..3f4e658fc8d 100644
--- a/app/assets/javascripts/pages/admin/index.js
+++ b/app/assets/javascripts/pages/admin/index.js
@@ -1,5 +1,8 @@
import initAdmin from './admin';
import initAdminStatisticsPanel from '../../admin/statistics_panel/index';
+import initVueAlerts from '../../vue_alerts';
+
+document.addEventListener('DOMContentLoaded', initVueAlerts);
document.addEventListener('DOMContentLoaded', () => {
const statisticsPanelContainer = document.getElementById('js-admin-statistics-container');
diff --git a/app/assets/javascripts/pages/admin/integrations/edit/index.js b/app/assets/javascripts/pages/admin/integrations/edit/index.js
new file mode 100644
index 00000000000..2d77f2686f7
--- /dev/null
+++ b/app/assets/javascripts/pages/admin/integrations/edit/index.js
@@ -0,0 +1,16 @@
+import IntegrationSettingsForm from '~/integrations/integration_settings_form';
+import PrometheusMetrics from '~/prometheus_metrics/prometheus_metrics';
+import initAlertsSettings from '~/alerts_service_settings';
+
+document.addEventListener('DOMContentLoaded', () => {
+ const prometheusSettingsWrapper = document.querySelector('.js-prometheus-metrics-monitoring');
+ const integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form');
+ integrationSettingsForm.init();
+
+ if (prometheusSettingsWrapper) {
+ const prometheusMetrics = new PrometheusMetrics('.js-prometheus-metrics-monitoring');
+ prometheusMetrics.loadActiveMetrics();
+ }
+
+ initAlertsSettings(document.querySelector('.js-alerts-service-settings'));
+});
diff --git a/app/assets/javascripts/pages/admin/projects/index/components/delete_project_modal.vue b/app/assets/javascripts/pages/admin/projects/index/components/delete_project_modal.vue
index 527c16860c0..a99fde54981 100644
--- a/app/assets/javascripts/pages/admin/projects/index/components/delete_project_modal.vue
+++ b/app/assets/javascripts/pages/admin/projects/index/components/delete_project_modal.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import DeprecatedModal from '~/vue_shared/components/deprecated_modal.vue';
import { s__, sprintf } from '~/locale';
@@ -34,7 +34,7 @@ export default {
return sprintf(
s__('AdminProjects|Delete Project %{projectName}?'),
{
- projectName: `'${_.escape(this.projectName)}'`,
+ projectName: `'${esc(this.projectName)}'`,
},
false,
);
@@ -46,7 +46,7 @@ export default {
and all related resources including issues, merge requests, etc.. Once you confirm and press
%{strong_start}Delete project%{strong_end}, it cannot be undone or recovered.`),
{
- projectName: `<strong>${_.escape(this.projectName)}</strong>`,
+ projectName: `<strong>${esc(this.projectName)}</strong>`,
strong_start: '<strong>',
strong_end: '</strong>',
},
@@ -57,7 +57,7 @@ export default {
return sprintf(
s__('AdminUsers|To confirm, type %{projectName}'),
{
- projectName: `<code>${_.escape(this.projectName)}</code>`,
+ projectName: `<code>${esc(this.projectName)}</code>`,
},
false,
);
diff --git a/app/assets/javascripts/pages/admin/sessions/index.js b/app/assets/javascripts/pages/admin/sessions/index.js
new file mode 100644
index 00000000000..680ebd19a9f
--- /dev/null
+++ b/app/assets/javascripts/pages/admin/sessions/index.js
@@ -0,0 +1 @@
+import '~/pages/sessions/index';
diff --git a/app/assets/javascripts/pages/admin/users/components/delete_user_modal.vue b/app/assets/javascripts/pages/admin/users/components/delete_user_modal.vue
index b43d6ba17d7..831a3ca1658 100644
--- a/app/assets/javascripts/pages/admin/users/components/delete_user_modal.vue
+++ b/app/assets/javascripts/pages/admin/users/components/delete_user_modal.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { GlModal, GlButton, GlFormInput } from '@gitlab/ui';
import { s__, sprintf } from '~/locale';
@@ -56,7 +56,7 @@ export default {
return sprintf(
this.content,
{
- username: `<strong>${_.escape(this.username)}</strong>`,
+ username: `<strong>${esc(this.username)}</strong>`,
strong_start: '<strong>',
strong_end: '</strong>',
},
@@ -67,7 +67,7 @@ export default {
return sprintf(
s__('AdminUsers|To confirm, type %{username}'),
{
- username: `<code>${_.escape(this.username)}</code>`,
+ username: `<code>${esc(this.username)}</code>`,
},
false,
);
diff --git a/app/assets/javascripts/pages/groups/clusters/index/index.js b/app/assets/javascripts/pages/groups/clusters/index/index.js
index 30d519d0e37..744be65bfbe 100644
--- a/app/assets/javascripts/pages/groups/clusters/index/index.js
+++ b/app/assets/javascripts/pages/groups/clusters/index/index.js
@@ -1,6 +1,8 @@
import PersistentUserCallout from '~/persistent_user_callout';
+import initClustersListApp from '~/clusters_list';
document.addEventListener('DOMContentLoaded', () => {
const callout = document.querySelector('.gcp-signup-offer');
PersistentUserCallout.factory(callout);
+ initClustersListApp();
});
diff --git a/app/assets/javascripts/pages/groups/new/group_path_validator.js b/app/assets/javascripts/pages/groups/new/group_path_validator.js
index f1e7ff87e5a..eeaa6527431 100644
--- a/app/assets/javascripts/pages/groups/new/group_path_validator.js
+++ b/app/assets/javascripts/pages/groups/new/group_path_validator.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { debounce } from 'lodash';
import InputValidator from '~/validators/input_validator';
import fetchGroupPathAvailability from './fetch_group_path_availability';
@@ -20,7 +20,7 @@ export default class GroupPathValidator extends InputValidator {
const container = opts.container || '';
const validateElements = document.querySelectorAll(`${container} .js-validate-group-path`);
- this.debounceValidateInput = _.debounce(inputDomElement => {
+ this.debounceValidateInput = debounce(inputDomElement => {
GroupPathValidator.validateGroupPathInput(inputDomElement);
}, debounceTimeoutDuration);
diff --git a/app/assets/javascripts/pages/groups/settings/ci_cd/show/index.js b/app/assets/javascripts/pages/groups/settings/ci_cd/show/index.js
index 8a5300c9266..1ef18b356f2 100644
--- a/app/assets/javascripts/pages/groups/settings/ci_cd/show/index.js
+++ b/app/assets/javascripts/pages/groups/settings/ci_cd/show/index.js
@@ -1,17 +1,25 @@
import initSettingsPanels from '~/settings_panels';
import AjaxVariableList from '~/ci_variable_list/ajax_variable_list';
+import initVariableList from '~/ci_variable_list';
+import DueDateSelectors from '~/due_date_select';
document.addEventListener('DOMContentLoaded', () => {
// Initialize expandable settings panels
initSettingsPanels();
-
- const variableListEl = document.querySelector('.js-ci-variable-list-section');
// eslint-disable-next-line no-new
- new AjaxVariableList({
- container: variableListEl,
- saveButton: variableListEl.querySelector('.js-ci-variables-save-button'),
- errorBox: variableListEl.querySelector('.js-ci-variable-error-box'),
- saveEndpoint: variableListEl.dataset.saveEndpoint,
- maskableRegex: variableListEl.dataset.maskableRegex,
- });
+ new DueDateSelectors();
+
+ if (gon.features.newVariablesUi) {
+ initVariableList();
+ } else {
+ const variableListEl = document.querySelector('.js-ci-variable-list-section');
+ // eslint-disable-next-line no-new
+ new AjaxVariableList({
+ container: variableListEl,
+ saveButton: variableListEl.querySelector('.js-ci-variables-save-button'),
+ errorBox: variableListEl.querySelector('.js-ci-variable-error-box'),
+ saveEndpoint: variableListEl.dataset.saveEndpoint,
+ maskableRegex: variableListEl.dataset.maskableRegex,
+ });
+ }
});
diff --git a/app/assets/javascripts/pages/projects/blob/new/index.js b/app/assets/javascripts/pages/projects/blob/new/index.js
index 189053f3ed7..720cb249052 100644
--- a/app/assets/javascripts/pages/projects/blob/new/index.js
+++ b/app/assets/javascripts/pages/projects/blob/new/index.js
@@ -1,3 +1,12 @@
import initBlobBundle from '~/blob_edit/blob_bundle';
+import initPopover from '~/blob/suggest_gitlab_ci_yml';
-document.addEventListener('DOMContentLoaded', initBlobBundle);
+document.addEventListener('DOMContentLoaded', () => {
+ initBlobBundle();
+
+ const suggestEl = document.querySelector('.js-suggest-gitlab-ci-yml');
+
+ if (suggestEl) {
+ initPopover(suggestEl);
+ }
+});
diff --git a/app/assets/javascripts/pages/projects/blob/show/index.js b/app/assets/javascripts/pages/projects/blob/show/index.js
index caf9a8c0b64..4d308d6b07a 100644
--- a/app/assets/javascripts/pages/projects/blob/show/index.js
+++ b/app/assets/javascripts/pages/projects/blob/show/index.js
@@ -4,6 +4,7 @@ import BlobViewer from '~/blob/viewer/index';
import initBlob from '~/pages/projects/init_blob';
import GpgBadges from '~/gpg_badges';
import '~/sourcegraph/load';
+import PipelineTourSuccessModal from '~/blob/pipeline_tour_success_modal.vue';
document.addEventListener('DOMContentLoaded', () => {
new BlobViewer(); // eslint-disable-line no-new
@@ -35,4 +36,25 @@ document.addEventListener('DOMContentLoaded', () => {
// eslint-disable-next-line promise/catch-or-return
import('~/code_navigation').then(m => m.default());
}
+
+ if (gon.features?.suggestPipeline) {
+ const successPipelineEl = document.querySelector('.js-success-pipeline-modal');
+
+ if (successPipelineEl) {
+ // eslint-disable-next-line no-new
+ new Vue({
+ el: successPipelineEl,
+ render(createElement) {
+ const { commitCookie, pipelinesPath: goToPipelinesPath } = this.$el.dataset;
+
+ return createElement(PipelineTourSuccessModal, {
+ props: {
+ goToPipelinesPath,
+ commitCookie,
+ },
+ });
+ },
+ });
+ }
+ }
});
diff --git a/app/assets/javascripts/pages/projects/clusters/index/index.js b/app/assets/javascripts/pages/projects/clusters/index/index.js
index 30d519d0e37..744be65bfbe 100644
--- a/app/assets/javascripts/pages/projects/clusters/index/index.js
+++ b/app/assets/javascripts/pages/projects/clusters/index/index.js
@@ -1,6 +1,8 @@
import PersistentUserCallout from '~/persistent_user_callout';
+import initClustersListApp from '~/clusters_list';
document.addEventListener('DOMContentLoaded', () => {
const callout = document.querySelector('.gcp-signup-offer');
PersistentUserCallout.factory(callout);
+ initClustersListApp();
});
diff --git a/app/assets/javascripts/pages/projects/labels/components/promote_label_modal.vue b/app/assets/javascripts/pages/projects/labels/components/promote_label_modal.vue
index bb95f33c838..dadb20e511b 100644
--- a/app/assets/javascripts/pages/projects/labels/components/promote_label_modal.vue
+++ b/app/assets/javascripts/pages/projects/labels/components/promote_label_modal.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import axios from '~/lib/utils/axios_utils';
import createFlash from '~/flash';
import DeprecatedModal2 from '~/vue_shared/components/deprecated_modal_2.vue';
@@ -48,7 +48,7 @@ export default {
const label = `<span
class="label color-label"
style="background-color: ${this.labelColor}; color: ${this.labelTextColor};"
- >${_.escape(this.labelTitle)}</span>`;
+ >${esc(this.labelTitle)}</span>`;
return sprintf(
s__('Labels|<span>Promote label</span> %{labelTitle} <span>to Group Label?</span>'),
diff --git a/app/assets/javascripts/pages/projects/logs/index.js b/app/assets/javascripts/pages/projects/logs/index.js
new file mode 100644
index 00000000000..36747069ebb
--- /dev/null
+++ b/app/assets/javascripts/pages/projects/logs/index.js
@@ -0,0 +1,3 @@
+import logsBundle from '~/logs';
+
+document.addEventListener('DOMContentLoaded', logsBundle);
diff --git a/app/assets/javascripts/pages/projects/pipeline_schedules/shared/components/interval_pattern_input.vue b/app/assets/javascripts/pages/projects/pipeline_schedules/shared/components/interval_pattern_input.vue
index bb490919a9a..3a0d9c17228 100644
--- a/app/assets/javascripts/pages/projects/pipeline_schedules/shared/components/interval_pattern_input.vue
+++ b/app/assets/javascripts/pages/projects/pipeline_schedules/shared/components/interval_pattern_input.vue
@@ -1,6 +1,4 @@
<script>
-import _ from 'underscore';
-
export default {
props: {
initialCronInterval: {
@@ -24,7 +22,7 @@ export default {
},
computed: {
intervalIsPreset() {
- return _.contains(this.cronIntervalPresets, this.cronInterval);
+ return Object.values(this.cronIntervalPresets).includes(this.cronInterval);
},
// The text input is editable when there's a custom interval, or when it's
// a preset interval and the user clicks the 'custom' radio button
diff --git a/app/assets/javascripts/pages/projects/pipelines/init_pipelines.js b/app/assets/javascripts/pages/projects/pipelines/init_pipelines.js
index ade6908c4a5..5fd3fce88aa 100644
--- a/app/assets/javascripts/pages/projects/pipelines/init_pipelines.js
+++ b/app/assets/javascripts/pages/projects/pipelines/init_pipelines.js
@@ -13,19 +13,21 @@ export default () => {
});
}
+ const pipelineTabLink = document.querySelector('.js-pipeline-tab-link a');
const { controllerAction } = document.querySelector('.js-pipeline-container').dataset;
- const pipelineStatusUrl = `${document
- .querySelector('.js-pipeline-tab-link a')
- .getAttribute('href')}/status.json`;
- // eslint-disable-next-line no-new
- new Pipelines({
- initTabs: true,
- pipelineStatusUrl,
- tabsOptions: {
- action: controllerAction,
- defaultAction: 'pipelines',
- parentEl: '.pipelines-tabs',
- },
- });
+ if (pipelineTabLink) {
+ const pipelineStatusUrl = `${pipelineTabLink.getAttribute('href')}/status.json`;
+
+ // eslint-disable-next-line no-new
+ new Pipelines({
+ initTabs: true,
+ pipelineStatusUrl,
+ tabsOptions: {
+ action: controllerAction,
+ defaultAction: 'pipelines',
+ parentEl: '.pipelines-tabs',
+ },
+ });
+ }
};
diff --git a/app/assets/javascripts/pages/projects/registry/repositories/index.js b/app/assets/javascripts/pages/projects/registry/repositories/index.js
index 47fea2be189..73469e287ed 100644
--- a/app/assets/javascripts/pages/projects/registry/repositories/index.js
+++ b/app/assets/javascripts/pages/projects/registry/repositories/index.js
@@ -3,7 +3,11 @@ import registryExplorer from '~/registry/explorer/index';
document.addEventListener('DOMContentLoaded', () => {
initRegistryImages();
- const { attachMainComponent, attachBreadcrumb } = registryExplorer();
- attachBreadcrumb();
- attachMainComponent();
+
+ const explorer = registryExplorer();
+
+ if (explorer) {
+ explorer.attachBreadcrumb();
+ explorer.attachMainComponent();
+ }
});
diff --git a/app/assets/javascripts/pages/projects/releases/show/index.js b/app/assets/javascripts/pages/projects/releases/show/index.js
new file mode 100644
index 00000000000..4e17e6ff311
--- /dev/null
+++ b/app/assets/javascripts/pages/projects/releases/show/index.js
@@ -0,0 +1,3 @@
+import initShowRelease from '~/releases/mount_show';
+
+document.addEventListener('DOMContentLoaded', initShowRelease);
diff --git a/app/assets/javascripts/pages/projects/settings/ci_cd/show/index.js b/app/assets/javascripts/pages/projects/settings/ci_cd/show/index.js
index b4aac8eea2b..7f865f4cfb6 100644
--- a/app/assets/javascripts/pages/projects/settings/ci_cd/show/index.js
+++ b/app/assets/javascripts/pages/projects/settings/ci_cd/show/index.js
@@ -2,6 +2,9 @@ import initSettingsPanels from '~/settings_panels';
import SecretValues from '~/behaviors/secret_values';
import AjaxVariableList from '~/ci_variable_list/ajax_variable_list';
import registrySettingsApp from '~/registry/settings/registry_settings_bundle';
+import initVariableList from '~/ci_variable_list';
+import DueDateSelectors from '~/due_date_select';
+import initDeployKeys from '~/deploy_keys';
document.addEventListener('DOMContentLoaded', () => {
// Initialize expandable settings panels
@@ -15,15 +18,19 @@ document.addEventListener('DOMContentLoaded', () => {
runnerTokenSecretValue.init();
}
- const variableListEl = document.querySelector('.js-ci-variable-list-section');
- // eslint-disable-next-line no-new
- new AjaxVariableList({
- container: variableListEl,
- saveButton: variableListEl.querySelector('.js-ci-variables-save-button'),
- errorBox: variableListEl.querySelector('.js-ci-variable-error-box'),
- saveEndpoint: variableListEl.dataset.saveEndpoint,
- maskableRegex: variableListEl.dataset.maskableRegex,
- });
+ if (gon.features.newVariablesUi) {
+ initVariableList();
+ } else {
+ const variableListEl = document.querySelector('.js-ci-variable-list-section');
+ // eslint-disable-next-line no-new
+ new AjaxVariableList({
+ container: variableListEl,
+ saveButton: variableListEl.querySelector('.js-ci-variables-save-button'),
+ errorBox: variableListEl.querySelector('.js-ci-variable-error-box'),
+ saveEndpoint: variableListEl.dataset.saveEndpoint,
+ maskableRegex: variableListEl.dataset.maskableRegex,
+ });
+ }
// hide extra auto devops settings based checkbox state
const autoDevOpsExtraSettings = document.querySelector('.js-extra-settings');
@@ -34,5 +41,9 @@ document.addEventListener('DOMContentLoaded', () => {
autoDevOpsExtraSettings.classList.toggle('hidden', !target.checked);
});
+ // eslint-disable-next-line no-new
+ new DueDateSelectors();
+
registrySettingsApp();
+ initDeployKeys();
});
diff --git a/app/assets/javascripts/pages/projects/settings/integrations/show/index.js b/app/assets/javascripts/pages/projects/settings/integrations/show/index.js
new file mode 100644
index 00000000000..f2cf2eb9b28
--- /dev/null
+++ b/app/assets/javascripts/pages/projects/settings/integrations/show/index.js
@@ -0,0 +1,6 @@
+import PersistentUserCallout from '~/persistent_user_callout';
+
+document.addEventListener('DOMContentLoaded', () => {
+ const callout = document.querySelector('.js-webhooks-moved-alert');
+ PersistentUserCallout.factory(callout);
+});
diff --git a/app/assets/javascripts/pages/projects/settings/operations/show/index.js b/app/assets/javascripts/pages/projects/settings/operations/show/index.js
index a32c188909c..721d4a31fe4 100644
--- a/app/assets/javascripts/pages/projects/settings/operations/show/index.js
+++ b/app/assets/javascripts/pages/projects/settings/operations/show/index.js
@@ -7,5 +7,7 @@ document.addEventListener('DOMContentLoaded', () => {
mountErrorTrackingForm();
mountOperationSettings();
mountGrafanaIntegration();
- initSettingsPanels();
+ if (!IS_EE) {
+ initSettingsPanels();
+ }
});
diff --git a/app/assets/javascripts/pages/projects/settings/repository/form.js b/app/assets/javascripts/pages/projects/settings/repository/form.js
index 3e02893f24c..fa6d17f0729 100644
--- a/app/assets/javascripts/pages/projects/settings/repository/form.js
+++ b/app/assets/javascripts/pages/projects/settings/repository/form.js
@@ -3,7 +3,6 @@
import ProtectedTagCreate from '~/protected_tags/protected_tag_create';
import ProtectedTagEditList from '~/protected_tags/protected_tag_edit_list';
import initSettingsPanels from '~/settings_panels';
-import initDeployKeys from '~/deploy_keys';
import ProtectedBranchCreate from '~/protected_branches/protected_branch_create';
import ProtectedBranchEditList from '~/protected_branches/protected_branch_edit_list';
import DueDateSelectors from '~/due_date_select';
@@ -12,7 +11,6 @@ import fileUpload from '~/lib/utils/file_upload';
export default () => {
new ProtectedTagCreate();
new ProtectedTagEditList();
- initDeployKeys();
initSettingsPanels();
new ProtectedBranchCreate();
new ProtectedBranchEditList();
diff --git a/app/assets/javascripts/pages/projects/shared/permissions/components/settings_panel.vue b/app/assets/javascripts/pages/projects/shared/permissions/components/settings_panel.vue
index 6994f83bce0..faaa65b1a16 100644
--- a/app/assets/javascripts/pages/projects/shared/permissions/components/settings_panel.vue
+++ b/app/assets/javascripts/pages/projects/shared/permissions/components/settings_panel.vue
@@ -165,6 +165,16 @@ export default {
showContainerRegistryPublicNote() {
return this.visibilityLevel === visibilityOptions.PUBLIC;
},
+
+ repositoryHelpText() {
+ if (this.visibilityLevel === visibilityOptions.PRIVATE) {
+ return s__('ProjectSettings|View and edit files in this project');
+ }
+
+ return s__(
+ 'ProjectSettings|View and edit files in this project. Non-project members will only have read access',
+ );
+ },
},
watch: {
@@ -225,6 +235,7 @@ export default {
<div>
<div class="project-visibility-setting">
<project-setting-row
+ ref="project-visibility-settings"
:help-path="visibilityHelpPath"
:label="s__('ProjectSettings|Project visibility')"
>
@@ -270,6 +281,7 @@ export default {
</div>
<div :class="{ 'highlight-changes': highlightChangesClass }" class="project-feature-settings">
<project-setting-row
+ ref="issues-settings"
:label="s__('ProjectSettings|Issues')"
:help-text="s__('ProjectSettings|Lightweight issue tracking system for this project')"
>
@@ -280,8 +292,9 @@ export default {
/>
</project-setting-row>
<project-setting-row
+ ref="repository-settings"
:label="s__('ProjectSettings|Repository')"
- :help-text="s__('ProjectSettings|View and edit files in this project')"
+ :help-text="repositoryHelpText"
>
<project-feature-setting
v-model="repositoryAccessLevel"
@@ -291,6 +304,7 @@ export default {
</project-setting-row>
<div class="project-feature-setting-group">
<project-setting-row
+ ref="merge-request-settings"
:label="s__('ProjectSettings|Merge requests')"
:help-text="s__('ProjectSettings|Submit changes to be merged upstream')"
>
@@ -302,6 +316,7 @@ export default {
/>
</project-setting-row>
<project-setting-row
+ ref="fork-settings"
:label="s__('ProjectSettings|Forks')"
:help-text="
s__('ProjectSettings|Allow users to make copies of your repository to a new project')
@@ -315,6 +330,7 @@ export default {
/>
</project-setting-row>
<project-setting-row
+ ref="pipeline-settings"
:label="s__('ProjectSettings|Pipelines')"
:help-text="s__('ProjectSettings|Build, test, and deploy your changes')"
>
@@ -327,6 +343,7 @@ export default {
</project-setting-row>
<project-setting-row
v-if="registryAvailable"
+ ref="container-registry-settings"
:help-path="registryHelpPath"
:label="s__('ProjectSettings|Container registry')"
:help-text="
@@ -348,6 +365,7 @@ export default {
</project-setting-row>
<project-setting-row
v-if="lfsAvailable"
+ ref="git-lfs-settings"
:help-path="lfsHelpPath"
:label="s__('ProjectSettings|Git Large File Storage')"
:help-text="
@@ -362,6 +380,7 @@ export default {
</project-setting-row>
<project-setting-row
v-if="packagesAvailable"
+ ref="package-settings"
:help-path="packagesHelpPath"
:label="s__('ProjectSettings|Packages')"
:help-text="
@@ -376,6 +395,7 @@ export default {
</project-setting-row>
</div>
<project-setting-row
+ ref="wiki-settings"
:label="s__('ProjectSettings|Wiki')"
:help-text="s__('ProjectSettings|Pages for project documentation')"
>
@@ -386,6 +406,7 @@ export default {
/>
</project-setting-row>
<project-setting-row
+ ref="snippet-settings"
:label="s__('ProjectSettings|Snippets')"
:help-text="s__('ProjectSettings|Share code pastes with others out of Git repository')"
>
@@ -397,6 +418,7 @@ export default {
</project-setting-row>
<project-setting-row
v-if="pagesAvailable && pagesAccessControlEnabled"
+ ref="pages-settings"
:help-path="pagesHelpPath"
:label="s__('ProjectSettings|Pages')"
:help-text="
@@ -410,7 +432,7 @@ export default {
/>
</project-setting-row>
</div>
- <project-setting-row v-if="canDisableEmails" class="mb-3">
+ <project-setting-row v-if="canDisableEmails" ref="email-settings" class="mb-3">
<label class="js-emails-disabled">
<input :value="emailsDisabled" type="hidden" name="project[emails_disabled]" />
<input v-model="emailsDisabled" type="checkbox" />
diff --git a/app/assets/javascripts/pages/projects/snippets/show/index.js b/app/assets/javascripts/pages/projects/snippets/show/index.js
index 738bf08f1bf..d8fbb851ffb 100644
--- a/app/assets/javascripts/pages/projects/snippets/show/index.js
+++ b/app/assets/javascripts/pages/projects/snippets/show/index.js
@@ -14,5 +14,6 @@ document.addEventListener('DOMContentLoaded', () => {
snippetEmbed();
} else {
initSnippetsApp();
+ initNotes();
}
});
diff --git a/app/assets/javascripts/pages/projects/wikis/components/delete_wiki_modal.vue b/app/assets/javascripts/pages/projects/wikis/components/delete_wiki_modal.vue
index 2176309ac84..6af346ace67 100644
--- a/app/assets/javascripts/pages/projects/wikis/components/delete_wiki_modal.vue
+++ b/app/assets/javascripts/pages/projects/wikis/components/delete_wiki_modal.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { GlModal, GlModalDirective } from '@gitlab/ui';
import { s__, sprintf } from '~/locale';
@@ -38,7 +38,7 @@ export default {
return sprintf(
s__('WikiPageConfirmDelete|Delete page %{pageTitle}?'),
{
- pageTitle: _.escape(this.pageTitle),
+ pageTitle: esc(this.pageTitle),
},
false,
);
diff --git a/app/assets/javascripts/pages/projects/wikis/wikis.js b/app/assets/javascripts/pages/projects/wikis/wikis.js
index 6b02a074abf..93afdc54ce1 100644
--- a/app/assets/javascripts/pages/projects/wikis/wikis.js
+++ b/app/assets/javascripts/pages/projects/wikis/wikis.js
@@ -1,6 +1,13 @@
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import { s__, sprintf } from '~/locale';
+const MARKDOWN_LINK_TEXT = {
+ markdown: '[Link Title](page-slug)',
+ rdoc: '{Link title}[link:page-slug]',
+ asciidoc: 'link:page-slug[Link title]',
+ org: '[[page-slug]]',
+};
+
export default class Wikis {
constructor() {
this.sidebarEl = document.querySelector('.js-wiki-sidebar');
@@ -28,6 +35,15 @@ export default class Wikis {
window.addEventListener('resize', () => this.renderSidebar());
this.renderSidebar();
+
+ const changeFormatSelect = document.querySelector('#wiki_format');
+ const linkExample = document.querySelector('.js-markup-link-example');
+
+ if (changeFormatSelect) {
+ changeFormatSelect.addEventListener('change', e => {
+ linkExample.innerHTML = MARKDOWN_LINK_TEXT[e.target.value];
+ });
+ }
}
handleWikiTitleChange(e) {
diff --git a/app/assets/javascripts/pages/registrations/new/index.js b/app/assets/javascripts/pages/registrations/new/index.js
index a33d11f3613..4b4b0555bb2 100644
--- a/app/assets/javascripts/pages/registrations/new/index.js
+++ b/app/assets/javascripts/pages/registrations/new/index.js
@@ -1,9 +1,20 @@
import LengthValidator from '~/pages/sessions/new/length_validator';
import UsernameValidator from '~/pages/sessions/new/username_validator';
import NoEmojiValidator from '~/emoji/no_emoji_validator';
+import Tracking from '~/tracking';
document.addEventListener('DOMContentLoaded', () => {
new UsernameValidator(); // eslint-disable-line no-new
new LengthValidator(); // eslint-disable-line no-new
new NoEmojiValidator(); // eslint-disable-line no-new
});
+
+document.addEventListener('SnowplowInitialized', () => {
+ if (gon.tracking_data) {
+ const { category, action } = gon.tracking_data;
+
+ if (category && action) {
+ Tracking.event(category, action);
+ }
+ }
+});
diff --git a/app/assets/javascripts/pages/sessions/new/username_validator.js b/app/assets/javascripts/pages/sessions/new/username_validator.js
index 25be71d9ed4..1048e3b4548 100644
--- a/app/assets/javascripts/pages/sessions/new/username_validator.js
+++ b/app/assets/javascripts/pages/sessions/new/username_validator.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { debounce } from 'lodash';
import InputValidator from '~/validators/input_validator';
import axios from '~/lib/utils/axios_utils';
@@ -20,7 +20,7 @@ export default class UsernameValidator extends InputValidator {
const container = opts.container || '';
const validateLengthElements = document.querySelectorAll(`${container} .js-validate-username`);
- this.debounceValidateInput = _.debounce(inputDomElement => {
+ this.debounceValidateInput = debounce(inputDomElement => {
UsernameValidator.validateUsernameInput(inputDomElement);
}, debounceTimeoutDuration);
diff --git a/app/assets/javascripts/pages/snippets/show/index.js b/app/assets/javascripts/pages/snippets/show/index.js
index 6e00c14f43e..3bc9d4f957f 100644
--- a/app/assets/javascripts/pages/snippets/show/index.js
+++ b/app/assets/javascripts/pages/snippets/show/index.js
@@ -14,5 +14,6 @@ document.addEventListener('DOMContentLoaded', () => {
snippetEmbed();
} else {
initSnippetsApp();
+ initNotes();
}
});
diff --git a/app/assets/javascripts/pages/users/activity_calendar.js b/app/assets/javascripts/pages/users/activity_calendar.js
index 4f645e511f9..70e9333456d 100644
--- a/app/assets/javascripts/pages/users/activity_calendar.js
+++ b/app/assets/javascripts/pages/users/activity_calendar.js
@@ -1,5 +1,5 @@
import $ from 'jquery';
-import _ from 'underscore';
+import { last } from 'lodash';
import { scaleLinear, scaleThreshold } from 'd3-scale';
import { select } from 'd3-selection';
import dateFormat from 'dateformat';
@@ -164,11 +164,11 @@ export default class ActivityCalendar {
.enter()
.append('g')
.attr('transform', (group, i) => {
- _.each(group, (stamp, a) => {
+ group.forEach((stamp, a) => {
if (a === 0 && stamp.day === this.firstDayOfWeek) {
const month = stamp.date.getMonth();
const x = this.daySizeWithSpace * i + 1 + this.daySizeWithSpace;
- const lastMonth = _.last(this.months);
+ const lastMonth = last(this.months);
if (
lastMonth == null ||
(month !== lastMonth.month && x - this.daySizeWithSpace !== lastMonth.x)
diff --git a/app/assets/javascripts/pipelines/pipeline_details_bundle.js b/app/assets/javascripts/pipelines/pipeline_details_bundle.js
index d9192d3d76b..c901971be50 100644
--- a/app/assets/javascripts/pipelines/pipeline_details_bundle.js
+++ b/app/assets/javascripts/pipelines/pipeline_details_bundle.js
@@ -95,14 +95,14 @@ export default () => {
},
});
+ const tabsElement = document.querySelector('.pipelines-tabs');
const testReportsEnabled =
window.gon && window.gon.features && window.gon.features.junitPipelineView;
- if (testReportsEnabled) {
+ if (tabsElement && testReportsEnabled) {
const fetchReportsAction = 'fetchReports';
testReportsStore.dispatch('setEndpoint', dataset.testReportEndpoint);
- const tabsElmement = document.querySelector('.pipelines-tabs');
const isTestTabActive = Boolean(
document.querySelector('.pipelines-tabs > li > a.test-tab.active'),
);
@@ -113,11 +113,11 @@ export default () => {
const tabClickHandler = e => {
if (e.target.className === 'test-tab') {
testReportsStore.dispatch(fetchReportsAction);
- tabsElmement.removeEventListener('click', tabClickHandler);
+ tabsElement.removeEventListener('click', tabClickHandler);
}
};
- tabsElmement.addEventListener('click', tabClickHandler);
+ tabsElement.addEventListener('click', tabClickHandler);
}
// eslint-disable-next-line no-new
@@ -134,6 +134,10 @@ export default () => {
axios
.get(dataset.testReportsCountEndpoint)
.then(({ data }) => {
+ if (!data.total_count) {
+ return;
+ }
+
document.querySelector('.js-test-report-badge-counter').innerHTML = data.total_count;
})
.catch(() => {});
diff --git a/app/assets/javascripts/profile/account/components/update_username.vue b/app/assets/javascripts/profile/account/components/update_username.vue
index 72867ecd709..fa09e063552 100644
--- a/app/assets/javascripts/profile/account/components/update_username.vue
+++ b/app/assets/javascripts/profile/account/components/update_username.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import axios from '~/lib/utils/axios_utils';
import DeprecatedModal2 from '~/vue_shared/components/deprecated_modal_2.vue';
import { s__, sprintf } from '~/locale';
@@ -43,10 +43,10 @@ You are going to change the username %{currentUsernameBold} to %{newUsernameBold
Profile and projects will be redirected to the %{newUsername} namespace but this redirect will expire once the %{currentUsername} namespace is registered by another user or group.
Please update your Git repository remotes as soon as possible.`),
{
- currentUsernameBold: `<strong>${_.escape(this.username)}</strong>`,
- newUsernameBold: `<strong>${_.escape(this.newUsername)}</strong>`,
- currentUsername: _.escape(this.username),
- newUsername: _.escape(this.newUsername),
+ currentUsernameBold: `<strong>${esc(this.username)}</strong>`,
+ newUsernameBold: `<strong>${esc(this.newUsername)}</strong>`,
+ currentUsername: esc(this.username),
+ newUsername: esc(this.newUsername),
},
false,
);
diff --git a/app/assets/javascripts/profile/gl_crop.js b/app/assets/javascripts/profile/gl_crop.js
index 880e1a88975..55bc9fb8955 100644
--- a/app/assets/javascripts/profile/gl_crop.js
+++ b/app/assets/javascripts/profile/gl_crop.js
@@ -2,7 +2,7 @@
import $ from 'jquery';
import 'cropper';
-import _ from 'underscore';
+import { isString } from 'lodash';
(() => {
// Matches everything but the file name
@@ -29,7 +29,7 @@ import _ from 'underscore';
this.onModalShow = this.onModalShow.bind(this);
this.onPickImageClick = this.onPickImageClick.bind(this);
this.fileInput = $(input);
- this.modalCropImg = _.isString(this.modalCropImg) ? $(this.modalCropImg) : this.modalCropImg;
+ this.modalCropImg = isString(this.modalCropImg) ? $(this.modalCropImg) : this.modalCropImg;
this.fileInput
.attr('name', `${this.fileInput.attr('name')}-trigger`)
.attr('id', `${this.fileInput.attr('id')}-trigger`);
@@ -47,9 +47,9 @@ import _ from 'underscore';
this.filename = this.getElement(filename);
this.previewImage = this.getElement(previewImage);
this.pickImageEl = this.getElement(pickImageEl);
- this.modalCrop = _.isString(modalCrop) ? $(modalCrop) : modalCrop;
- this.uploadImageBtn = _.isString(uploadImageBtn) ? $(uploadImageBtn) : uploadImageBtn;
- this.modalCropImg = _.isString(modalCropImg) ? $(modalCropImg) : modalCropImg;
+ this.modalCrop = isString(modalCrop) ? $(modalCrop) : modalCrop;
+ this.uploadImageBtn = isString(uploadImageBtn) ? $(uploadImageBtn) : uploadImageBtn;
+ this.modalCropImg = isString(modalCropImg) ? $(modalCropImg) : modalCropImg;
this.cropActionsBtn = this.modalCrop.find('[data-method]');
this.bindEvents();
}
diff --git a/app/assets/javascripts/projects/project_new.js b/app/assets/javascripts/projects/project_new.js
index 2aa5f6ec626..9cbda324aff 100644
--- a/app/assets/javascripts/projects/project_new.js
+++ b/app/assets/javascripts/projects/project_new.js
@@ -169,6 +169,10 @@ const bindEvents = () => {
text: s__('ProjectTemplates|Go Micro'),
icon: '.template-option .icon-gomicro',
},
+ gatsby: {
+ text: s__('ProjectTemplates|Pages/Gatsby'),
+ icon: '.template-option .icon-gatsby',
+ },
hugo: {
text: s__('ProjectTemplates|Pages/Hugo'),
icon: '.template-option .icon-hugo',
diff --git a/app/assets/javascripts/prometheus_metrics/prometheus_metrics.js b/app/assets/javascripts/prometheus_metrics/prometheus_metrics.js
index 8380cfb6c59..8d779e04673 100644
--- a/app/assets/javascripts/prometheus_metrics/prometheus_metrics.js
+++ b/app/assets/javascripts/prometheus_metrics/prometheus_metrics.js
@@ -1,5 +1,5 @@
import $ from 'jquery';
-import _ from 'underscore';
+import { escape } from 'lodash';
import { s__, n__, sprintf } from '~/locale';
import axios from '../lib/utils/axios_utils';
import PANEL_STATE from './constants';
@@ -69,13 +69,13 @@ export default class PrometheusMetrics {
if (metric.active_metrics > 0) {
totalExporters += 1;
this.$monitoredMetricsList.append(
- `<li>${_.escape(metric.group)}<span class="badge">${_.escape(
+ `<li>${escape(metric.group)}<span class="badge">${escape(
metric.active_metrics,
)}</span></li>`,
);
totalMonitoredMetrics += metric.active_metrics;
if (metric.metrics_missing_requirements > 0) {
- this.$missingEnvVarMetricsList.append(`<li>${_.escape(metric.group)}</li>`);
+ this.$missingEnvVarMetricsList.append(`<li>${escape(metric.group)}</li>`);
totalMissingEnvVarMetrics += 1;
}
}
diff --git a/app/assets/javascripts/registry/explorer/components/project_policy_alert.vue b/app/assets/javascripts/registry/explorer/components/project_policy_alert.vue
new file mode 100644
index 00000000000..6acf366e531
--- /dev/null
+++ b/app/assets/javascripts/registry/explorer/components/project_policy_alert.vue
@@ -0,0 +1,69 @@
+<script>
+import { GlAlert, GlSprintf, GlLink } from '@gitlab/ui';
+import { mapState } from 'vuex';
+import { approximateDuration, calculateRemainingMilliseconds } from '~/lib/utils/datetime_utility';
+import {
+ EXPIRATION_POLICY_ALERT_TITLE,
+ EXPIRATION_POLICY_ALERT_PRIMARY_BUTTON,
+ EXPIRATION_POLICY_ALERT_FULL_MESSAGE,
+ EXPIRATION_POLICY_ALERT_SHORT_MESSAGE,
+} from '../constants';
+
+export default {
+ components: {
+ GlAlert,
+ GlSprintf,
+ GlLink,
+ },
+
+ computed: {
+ ...mapState(['config', 'images', 'isLoading']),
+ isEmpty() {
+ return !this.images || this.images.length === 0;
+ },
+ showAlert() {
+ return this.config.expirationPolicy?.enabled;
+ },
+ timeTillRun() {
+ const difference = calculateRemainingMilliseconds(this.config.expirationPolicy?.next_run_at);
+ return approximateDuration(difference / 1000);
+ },
+ alertConfiguration() {
+ if (this.isEmpty || this.isLoading) {
+ return {
+ title: null,
+ primaryButton: null,
+ message: EXPIRATION_POLICY_ALERT_SHORT_MESSAGE,
+ };
+ }
+ return {
+ title: EXPIRATION_POLICY_ALERT_TITLE,
+ primaryButton: EXPIRATION_POLICY_ALERT_PRIMARY_BUTTON,
+ message: EXPIRATION_POLICY_ALERT_FULL_MESSAGE,
+ };
+ },
+ },
+};
+</script>
+
+<template>
+ <gl-alert
+ v-if="showAlert"
+ :dismissible="false"
+ :primary-button-text="alertConfiguration.primaryButton"
+ :primary-button-link="config.settingsPath"
+ :title="alertConfiguration.title"
+ class="my-2"
+ >
+ <gl-sprintf :message="alertConfiguration.message">
+ <template #days>
+ <strong>{{ timeTillRun }}</strong>
+ </template>
+ <template #link="{content}">
+ <gl-link :href="config.expirationPolicyHelpPagePath" target="_blank">
+ {{ content }}
+ </gl-link>
+ </template>
+ </gl-sprintf>
+ </gl-alert>
+</template>
diff --git a/app/assets/javascripts/registry/explorer/constants.js b/app/assets/javascripts/registry/explorer/constants.js
index bb311157627..ef72c085972 100644
--- a/app/assets/javascripts/registry/explorer/constants.js
+++ b/app/assets/javascripts/registry/explorer/constants.js
@@ -1,18 +1,24 @@
-import { __ } from '~/locale';
+import { s__ } from '~/locale';
-export const FETCH_IMAGES_LIST_ERROR_MESSAGE = __(
- 'Something went wrong while fetching the packages list.',
+export const FETCH_IMAGES_LIST_ERROR_MESSAGE = s__(
+ 'ContainerRegistry|Something went wrong while fetching the packages list.',
);
-export const FETCH_TAGS_LIST_ERROR_MESSAGE = __(
- 'Something went wrong while fetching the tags list.',
+export const FETCH_TAGS_LIST_ERROR_MESSAGE = s__(
+ 'ContainerRegistry|Something went wrong while fetching the tags list.',
);
-export const DELETE_IMAGE_ERROR_MESSAGE = __('Something went wrong while deleting the image.');
-export const DELETE_IMAGE_SUCCESS_MESSAGE = __('Image deleted successfully');
-export const DELETE_TAG_ERROR_MESSAGE = __('Something went wrong while deleting the tag.');
-export const DELETE_TAG_SUCCESS_MESSAGE = __('Tag deleted successfully');
-export const DELETE_TAGS_ERROR_MESSAGE = __('Something went wrong while deleting the tags.');
-export const DELETE_TAGS_SUCCESS_MESSAGE = __('Tags deleted successfully');
+export const DELETE_IMAGE_ERROR_MESSAGE = s__(
+ 'ContainerRegistry|Something went wrong while deleting the image.',
+);
+export const DELETE_IMAGE_SUCCESS_MESSAGE = s__('ContainerRegistry|Image deleted successfully');
+export const DELETE_TAG_ERROR_MESSAGE = s__(
+ 'ContainerRegistry|Something went wrong while deleting the tag.',
+);
+export const DELETE_TAG_SUCCESS_MESSAGE = s__('ContainerRegistry|Tag deleted successfully');
+export const DELETE_TAGS_ERROR_MESSAGE = s__(
+ 'ContainerRegistry|Something went wrong while deleting the tags.',
+);
+export const DELETE_TAGS_SUCCESS_MESSAGE = s__('ContainerRegistry|Tags deleted successfully');
export const DEFAULT_PAGE = 1;
export const DEFAULT_PAGE_SIZE = 10;
@@ -26,7 +32,18 @@ export const LIST_KEY_LAST_UPDATED = 'created_at';
export const LIST_KEY_ACTIONS = 'actions';
export const LIST_KEY_CHECKBOX = 'checkbox';
-export const LIST_LABEL_TAG = __('Tag');
-export const LIST_LABEL_IMAGE_ID = __('Image ID');
-export const LIST_LABEL_SIZE = __('Size');
-export const LIST_LABEL_LAST_UPDATED = __('Last Updated');
+export const LIST_LABEL_TAG = s__('ContainerRegistry|Tag');
+export const LIST_LABEL_IMAGE_ID = s__('ContainerRegistry|Image ID');
+export const LIST_LABEL_SIZE = s__('ContainerRegistry|Size');
+export const LIST_LABEL_LAST_UPDATED = s__('ContainerRegistry|Last Updated');
+
+export const EXPIRATION_POLICY_ALERT_TITLE = s__(
+ 'ContainerRegistry|Retention policy has been Enabled',
+);
+export const EXPIRATION_POLICY_ALERT_PRIMARY_BUTTON = s__('ContainerRegistry|Edit Settings');
+export const EXPIRATION_POLICY_ALERT_FULL_MESSAGE = s__(
+ 'ContainerRegistry|The retention and expiration policy for this Container Registry has been enabled and will run in %{days}. For more information visit the %{linkStart}documentation%{linkEnd}',
+);
+export const EXPIRATION_POLICY_ALERT_SHORT_MESSAGE = s__(
+ 'ContainerRegistry|The retention and expiration policy for this Container Registry has been enabled. For more information visit the %{linkStart}documentation%{linkEnd}',
+);
diff --git a/app/assets/javascripts/registry/explorer/pages/details.vue b/app/assets/javascripts/registry/explorer/pages/details.vue
index bc613db8672..88e437b16d9 100644
--- a/app/assets/javascripts/registry/explorer/pages/details.vue
+++ b/app/assets/javascripts/registry/explorer/pages/details.vue
@@ -1,5 +1,5 @@
<script>
-import { mapState, mapActions } from 'vuex';
+import { mapState, mapActions, mapGetters } from 'vuex';
import {
GlTable,
GlFormCheckbox,
@@ -8,10 +8,10 @@ import {
GlTooltipDirective,
GlPagination,
GlModal,
- GlLoadingIcon,
GlSprintf,
GlEmptyState,
GlResizeObserverDirective,
+ GlSkeletonLoader,
} from '@gitlab/ui';
import { GlBreakpointInstance } from '@gitlab/ui/dist/utils';
import { n__, s__ } from '~/locale';
@@ -42,7 +42,7 @@ export default {
ClipboardButton,
GlPagination,
GlModal,
- GlLoadingIcon,
+ GlSkeletonLoader,
GlSprintf,
GlEmptyState,
},
@@ -51,6 +51,11 @@ export default {
GlResizeObserver: GlResizeObserverDirective,
},
mixins: [timeagoMixin, Tracking.mixin()],
+ loader: {
+ repeat: 10,
+ width: 1000,
+ height: 40,
+ },
data() {
return {
selectedItems: [],
@@ -61,15 +66,16 @@ export default {
};
},
computed: {
- ...mapState(['tags', 'tagsPagination', 'isLoading', 'config']),
+ ...mapGetters(['tags']),
+ ...mapState(['tagsPagination', 'isLoading', 'config']),
imageName() {
const { name } = decodeAndParse(this.$route.params.id);
return name;
},
fields() {
return [
- { key: LIST_KEY_CHECKBOX, label: '' },
- { key: LIST_KEY_TAG, label: LIST_LABEL_TAG },
+ { key: LIST_KEY_CHECKBOX, label: '', class: 'gl-w-16' },
+ { key: LIST_KEY_TAG, label: LIST_LABEL_TAG, class: 'w-25' },
{ key: LIST_KEY_IMAGE_ID, label: LIST_LABEL_IMAGE_ID },
{ key: LIST_KEY_SIZE, label: LIST_LABEL_SIZE },
{ key: LIST_KEY_LAST_UPDATED, label: LIST_LABEL_LAST_UPDATED },
@@ -96,7 +102,7 @@ export default {
return this.tagsPagination.page;
},
set(page) {
- this.requestTagsList({ pagination: { page }, id: this.$route.params.id });
+ this.requestTagsList({ pagination: { page }, params: this.$route.params.id });
},
},
},
@@ -199,10 +205,7 @@ export default {
</script>
<template>
- <div
- v-gl-resize-observer="handleResize"
- class="my-3 position-absolute w-100 slide-enter-to-element"
- >
+ <div v-gl-resize-observer="handleResize" class="my-3 w-100 slide-enter-to-element">
<div class="d-flex my-3 align-items-center">
<h4>
<gl-sprintf :message="s__('ContainerRegistry|%{imageName} tags')">
@@ -212,122 +215,142 @@ export default {
</gl-sprintf>
</h4>
</div>
- <gl-loading-icon v-if="isLoading" />
- <template v-else-if="tags.length > 0">
- <gl-table :items="tags" :fields="fields" :stacked="!isDesktop">
- <template v-if="isDesktop" #head(checkbox)>
- <gl-form-checkbox
- ref="mainCheckbox"
- :checked="selectAllChecked"
- @change="onSelectAllChange"
- />
- </template>
- <template #head(actions)>
- <gl-button
- ref="bulkDeleteButton"
- v-gl-tooltip
- :disabled="!selectedItems || selectedItems.length === 0"
- class="float-right"
- variant="danger"
- :title="s__('ContainerRegistry|Remove selected tags')"
- :aria-label="s__('ContainerRegistry|Remove selected tags')"
- @click="deleteMultipleItems()"
- >
- <gl-icon name="remove" />
- </gl-button>
- </template>
- <template #cell(checkbox)="{index}">
- <gl-form-checkbox
- ref="rowCheckbox"
- class="js-row-checkbox"
- :checked="selectedItems.includes(index)"
- @change="updateSelectedItems(index)"
- />
- </template>
- <template #cell(name)="{item}">
- <span ref="rowName">
- {{ item.name }}
- </span>
- <clipboard-button
- v-if="item.location"
- ref="rowClipboardButton"
- :title="item.location"
- :text="item.location"
- css-class="btn-default btn-transparent btn-clipboard"
- />
- </template>
- <template #cell(short_revision)="{value}">
- <span ref="rowShortRevision">
- {{ value }}
- </span>
- </template>
- <template #cell(total_size)="{item}">
- <span ref="rowSize">
- {{ formatSize(item.total_size) }}
- <template v-if="item.total_size && item.layers">
- &middot;
- </template>
- {{ layers(item.layers) }}
- </span>
- </template>
- <template #cell(created_at)="{value}">
- <span ref="rowTime">
- {{ timeFormatted(value) }}
- </span>
- </template>
- <template #cell(actions)="{index, item}">
- <gl-button
- ref="singleDeleteButton"
- :title="s__('ContainerRegistry|Remove tag')"
- :aria-label="s__('ContainerRegistry|Remove tag')"
- :disabled="!item.destroy_path"
- variant="danger"
- :class="['js-delete-registry float-right btn-inverted btn-border-color btn-icon']"
- @click="deleteSingleItem(index)"
+ <gl-table :items="tags" :fields="fields" :stacked="!isDesktop" show-empty>
+ <template v-if="isDesktop" #head(checkbox)>
+ <gl-form-checkbox
+ ref="mainCheckbox"
+ :checked="selectAllChecked"
+ @change="onSelectAllChange"
+ />
+ </template>
+ <template #head(actions)>
+ <gl-button
+ ref="bulkDeleteButton"
+ v-gl-tooltip
+ :disabled="!selectedItems || selectedItems.length === 0"
+ class="float-right"
+ variant="danger"
+ :title="s__('ContainerRegistry|Remove selected tags')"
+ :aria-label="s__('ContainerRegistry|Remove selected tags')"
+ @click="deleteMultipleItems()"
+ >
+ <gl-icon name="remove" />
+ </gl-button>
+ </template>
+
+ <template #cell(checkbox)="{index}">
+ <gl-form-checkbox
+ ref="rowCheckbox"
+ class="js-row-checkbox"
+ :checked="selectedItems.includes(index)"
+ @change="updateSelectedItems(index)"
+ />
+ </template>
+ <template #cell(name)="{item}">
+ <span ref="rowName">
+ {{ item.name }}
+ </span>
+ <clipboard-button
+ v-if="item.location"
+ ref="rowClipboardButton"
+ :title="item.location"
+ :text="item.location"
+ css-class="btn-default btn-transparent btn-clipboard"
+ />
+ </template>
+ <template #cell(short_revision)="{value}">
+ <span ref="rowShortRevision">
+ {{ value }}
+ </span>
+ </template>
+ <template #cell(total_size)="{item}">
+ <span ref="rowSize">
+ {{ formatSize(item.total_size) }}
+ <template v-if="item.total_size && item.layers">
+ &middot;
+ </template>
+ {{ layers(item.layers) }}
+ </span>
+ </template>
+ <template #cell(created_at)="{value}">
+ <span ref="rowTime">
+ {{ timeFormatted(value) }}
+ </span>
+ </template>
+ <template #cell(actions)="{index, item}">
+ <gl-button
+ ref="singleDeleteButton"
+ :title="s__('ContainerRegistry|Remove tag')"
+ :aria-label="s__('ContainerRegistry|Remove tag')"
+ :disabled="!item.destroy_path"
+ variant="danger"
+ class="js-delete-registry float-right btn-inverted btn-border-color btn-icon"
+ @click="deleteSingleItem(index)"
+ >
+ <gl-icon name="remove" />
+ </gl-button>
+ </template>
+
+ <template #empty>
+ <template v-if="isLoading">
+ <gl-skeleton-loader
+ v-for="index in $options.loader.repeat"
+ :key="index"
+ :width="$options.loader.width"
+ :height="$options.loader.height"
+ preserve-aspect-ratio="xMinYMax meet"
>
- <gl-icon name="remove" />
- </gl-button>
+ <rect width="15" x="0" y="12.5" height="15" rx="4" />
+ <rect width="250" x="25" y="10" height="20" rx="4" />
+ <circle cx="290" cy="20" r="10" />
+ <rect width="100" x="315" y="10" height="20" rx="4" />
+ <rect width="100" x="500" y="10" height="20" rx="4" />
+ <rect width="100" x="630" y="10" height="20" rx="4" />
+ <rect x="960" y="0" width="40" height="40" rx="4" />
+ </gl-skeleton-loader>
</template>
- </gl-table>
- <gl-pagination
- ref="pagination"
- v-model="currentPage"
- :per-page="tagsPagination.perPage"
- :total-items="tagsPagination.total"
- align="center"
- class="w-100"
- />
- <gl-modal
- ref="deleteModal"
- modal-id="delete-tag-modal"
- ok-variant="danger"
- @ok="onDeletionConfirmed"
- @cancel="track('cancel_delete')"
- >
- <template #modal-title>{{ modalAction }}</template>
- <template #modal-ok>{{ modalAction }}</template>
- <p v-if="modalDescription">
- <gl-sprintf :message="modalDescription.message">
- <template #item>
- <b>{{ modalDescription.item }}</b>
- </template>
- </gl-sprintf>
- </p>
- </gl-modal>
- </template>
- <gl-empty-state
- v-else
- :title="s__('ContainerRegistry|This image has no active tags')"
- :svg-path="config.noContainersImage"
- :description="
- s__(
- `ContainerRegistry|The last tag related to this image was recently removed.
+ <gl-empty-state
+ v-else
+ :title="s__('ContainerRegistry|This image has no active tags')"
+ :svg-path="config.noContainersImage"
+ :description="
+ s__(
+ `ContainerRegistry|The last tag related to this image was recently removed.
This empty image and any associated data will be automatically removed as part of the regular Garbage Collection process.
If you have any questions, contact your administrator.`,
- )
- "
- class="mx-auto my-0"
+ )
+ "
+ class="mx-auto my-0"
+ />
+ </template>
+ </gl-table>
+
+ <gl-pagination
+ ref="pagination"
+ v-model="currentPage"
+ :per-page="tagsPagination.perPage"
+ :total-items="tagsPagination.total"
+ align="center"
+ class="w-100"
/>
+
+ <gl-modal
+ ref="deleteModal"
+ modal-id="delete-tag-modal"
+ ok-variant="danger"
+ @ok="onDeletionConfirmed"
+ @cancel="track('cancel_delete')"
+ >
+ <template #modal-title>{{ modalAction }}</template>
+ <template #modal-ok>{{ modalAction }}</template>
+ <p v-if="modalDescription">
+ <gl-sprintf :message="modalDescription.message">
+ <template #item>
+ <b>{{ modalDescription.item }}</b>
+ </template>
+ </gl-sprintf>
+ </p>
+ </gl-modal>
</div>
</template>
diff --git a/app/assets/javascripts/registry/explorer/pages/index.vue b/app/assets/javascripts/registry/explorer/pages/index.vue
index deefbfc40e0..19ae3bee640 100644
--- a/app/assets/javascripts/registry/explorer/pages/index.vue
+++ b/app/assets/javascripts/registry/explorer/pages/index.vue
@@ -3,7 +3,7 @@ export default {};
</script>
<template>
- <div class="position-relative">
+ <div>
<transition name="slide">
<router-view />
</transition>
diff --git a/app/assets/javascripts/registry/explorer/pages/list.vue b/app/assets/javascripts/registry/explorer/pages/list.vue
index 1dbc7cc2242..c6ba06cd68c 100644
--- a/app/assets/javascripts/registry/explorer/pages/list.vue
+++ b/app/assets/javascripts/registry/explorer/pages/list.vue
@@ -1,7 +1,6 @@
<script>
import { mapState, mapActions } from 'vuex';
import {
- GlLoadingIcon,
GlEmptyState,
GlPagination,
GlTooltipDirective,
@@ -10,31 +9,39 @@ import {
GlModal,
GlSprintf,
GlLink,
+ GlSkeletonLoader,
} from '@gitlab/ui';
import Tracking from '~/tracking';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import ProjectEmptyState from '../components/project_empty_state.vue';
import GroupEmptyState from '../components/group_empty_state.vue';
+import ProjectPolicyAlert from '../components/project_policy_alert.vue';
export default {
name: 'RegistryListApp',
components: {
GlEmptyState,
- GlLoadingIcon,
GlPagination,
ProjectEmptyState,
GroupEmptyState,
+ ProjectPolicyAlert,
ClipboardButton,
GlButton,
GlIcon,
GlModal,
GlSprintf,
GlLink,
+ GlSkeletonLoader,
},
directives: {
GlTooltip: GlTooltipDirective,
},
mixins: [Tracking.mixin()],
+ loader: {
+ repeat: 10,
+ width: 1000,
+ height: 40,
+ },
data() {
return {
itemToDelete: {},
@@ -78,7 +85,9 @@ export default {
</script>
<template>
- <div class="position-absolute w-100 slide-enter-from-element">
+ <div class="w-100 slide-enter-from-element">
+ <project-policy-alert v-if="!config.isGroupPage" />
+
<gl-empty-state
v-if="config.characterError"
:title="s__('ContainerRegistry|Docker connection error')"
@@ -104,74 +113,81 @@ export default {
</gl-empty-state>
<template v-else>
- <gl-loading-icon v-if="isLoading" size="md" class="prepend-top-16" />
-
- <template v-else>
- <div v-if="images.length" ref="imagesList">
- <h4>{{ s__('ContainerRegistry|Container Registry') }}</h4>
- <p>
- <gl-sprintf
- :message="
- s__(`ContainerRegistry|With the Docker Container Registry integrated into GitLab, every
+ <div>
+ <h4>{{ s__('ContainerRegistry|Container Registry') }}</h4>
+ <p>
+ <gl-sprintf
+ :message="
+ s__(`ContainerRegistry|With the Docker Container Registry integrated into GitLab, every
project can have its own space to store its Docker images.
%{docLinkStart}More Information%{docLinkEnd}`)
- "
- >
- <template #docLink="{content}">
- <gl-link :href="config.helpPagePath" target="_blank">
- {{ content }}
- </gl-link>
- </template>
- </gl-sprintf>
- </p>
+ "
+ >
+ <template #docLink="{content}">
+ <gl-link :href="config.helpPagePath" target="_blank">
+ {{ content }}
+ </gl-link>
+ </template>
+ </gl-sprintf>
+ </p>
+ </div>
- <div class="d-flex flex-column">
+ <div v-if="isLoading" class="mt-2">
+ <gl-skeleton-loader
+ v-for="index in $options.loader.repeat"
+ :key="index"
+ :width="$options.loader.width"
+ :height="$options.loader.height"
+ preserve-aspect-ratio="xMinYMax meet"
+ >
+ <rect width="500" x="10" y="10" height="20" rx="4" />
+ <circle cx="525" cy="20" r="10" />
+ <rect x="960" y="0" width="40" height="40" rx="4" />
+ </gl-skeleton-loader>
+ </div>
+ <template v-else>
+ <div v-if="images.length" ref="imagesList" class="d-flex flex-column">
+ <div
+ v-for="(listItem, index) in images"
+ :key="index"
+ ref="rowItem"
+ :class="{ 'border-top': index === 0 }"
+ class="d-flex justify-content-between align-items-center py-2 border-bottom"
+ >
+ <div>
+ <router-link
+ ref="detailsLink"
+ :to="{ name: 'details', params: { id: encodeListItem(listItem) } }"
+ >
+ {{ listItem.path }}
+ </router-link>
+ <clipboard-button
+ v-if="listItem.location"
+ ref="clipboardButton"
+ :text="listItem.location"
+ :title="listItem.location"
+ css-class="btn-default btn-transparent btn-clipboard"
+ />
+ </div>
<div
- v-for="(listItem, index) in images"
- :key="index"
- ref="rowItem"
- :class="[
- 'd-flex justify-content-between align-items-center py-2 border-bottom',
- { 'border-top': index === 0 },
- ]"
+ v-gl-tooltip="{ disabled: listItem.destroy_path }"
+ class="d-none d-sm-block"
+ :title="
+ s__('ContainerRegistry|Missing or insufficient permission, delete button disabled')
+ "
>
- <div>
- <router-link
- ref="detailsLink"
- :to="{ name: 'details', params: { id: encodeListItem(listItem) } }"
- >
- {{ listItem.path }}
- </router-link>
- <clipboard-button
- v-if="listItem.location"
- ref="clipboardButton"
- :text="listItem.location"
- :title="listItem.location"
- css-class="btn-default btn-transparent btn-clipboard"
- />
- </div>
- <div
- v-gl-tooltip="{ disabled: listItem.destroy_path }"
- class="d-none d-sm-block"
- :title="
- s__(
- 'ContainerRegistry|Missing or insufficient permission, delete button disabled',
- )
- "
+ <gl-button
+ ref="deleteImageButton"
+ v-gl-tooltip
+ :disabled="!listItem.destroy_path"
+ :title="s__('ContainerRegistry|Remove repository')"
+ :aria-label="s__('ContainerRegistry|Remove repository')"
+ class="btn-inverted"
+ variant="danger"
+ @click="deleteImage(listItem)"
>
- <gl-button
- ref="deleteImageButton"
- v-gl-tooltip
- :disabled="!listItem.destroy_path"
- :title="s__('ContainerRegistry|Remove repository')"
- :aria-label="s__('ContainerRegistry|Remove repository')"
- class="btn-inverted"
- variant="danger"
- @click="deleteImage(listItem)"
- >
- <gl-icon name="remove" />
- </gl-button>
- </div>
+ <gl-icon name="remove" />
+ </gl-button>
</div>
</div>
<gl-pagination
@@ -182,6 +198,7 @@ export default {
class="w-100 mt-2"
/>
</div>
+
<template v-else>
<project-empty-state v-if="!config.isGroupPage" />
<group-empty-state v-else />
diff --git a/app/assets/javascripts/registry/explorer/stores/actions.js b/app/assets/javascripts/registry/explorer/stores/actions.js
index 86d00d4fca9..7d8201949f4 100644
--- a/app/assets/javascripts/registry/explorer/stores/actions.js
+++ b/app/assets/javascripts/registry/explorer/stores/actions.js
@@ -68,31 +68,28 @@ export const requestDeleteTag = ({ commit, dispatch, state }, { tag, params }) =
.delete(tag.destroy_path)
.then(() => {
createFlash(DELETE_TAG_SUCCESS_MESSAGE, 'success');
- dispatch('requestTagsList', { pagination: state.tagsPagination, params });
+ return dispatch('requestTagsList', { pagination: state.tagsPagination, params });
})
.catch(() => {
createFlash(DELETE_TAG_ERROR_MESSAGE);
- })
- .finally(() => {
commit(types.SET_MAIN_LOADING, false);
});
};
export const requestDeleteTags = ({ commit, dispatch, state }, { ids, params }) => {
commit(types.SET_MAIN_LOADING, true);
- const { id } = decodeAndParse(params);
- const url = `/${state.config.projectPath}/registry/repository/${id}/tags/bulk_destroy`;
+ const { tags_path } = decodeAndParse(params);
+
+ const url = tags_path.replace('?format=json', '/bulk_destroy');
return axios
.delete(url, { params: { ids } })
.then(() => {
createFlash(DELETE_TAGS_SUCCESS_MESSAGE, 'success');
- dispatch('requestTagsList', { pagination: state.tagsPagination, params });
+ return dispatch('requestTagsList', { pagination: state.tagsPagination, params });
})
.catch(() => {
createFlash(DELETE_TAGS_ERROR_MESSAGE);
- })
- .finally(() => {
commit(types.SET_MAIN_LOADING, false);
});
};
diff --git a/app/assets/javascripts/registry/explorer/stores/getters.js b/app/assets/javascripts/registry/explorer/stores/getters.js
new file mode 100644
index 00000000000..5619b73d495
--- /dev/null
+++ b/app/assets/javascripts/registry/explorer/stores/getters.js
@@ -0,0 +1,6 @@
+// eslint-disable-next-line import/prefer-default-export
+export const tags = state => {
+ // to show the loader inside the table we need to pass an empty array to gl-table whenever the table is loading
+ // this is to take in account isLoading = true and state.tags =[1,2,3] during pagination and delete
+ return state.isLoading ? [] : state.tags;
+};
diff --git a/app/assets/javascripts/registry/explorer/stores/index.js b/app/assets/javascripts/registry/explorer/stores/index.js
index 91a35aac149..b3ff2e6e002 100644
--- a/app/assets/javascripts/registry/explorer/stores/index.js
+++ b/app/assets/javascripts/registry/explorer/stores/index.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import Vuex from 'vuex';
import * as actions from './actions';
+import * as getters from './getters';
import mutations from './mutations';
import state from './state';
@@ -9,6 +10,7 @@ Vue.use(Vuex);
export const createStore = () =>
new Vuex.Store({
state,
+ getters,
actions,
mutations,
});
diff --git a/app/assets/javascripts/registry/explorer/stores/mutations.js b/app/assets/javascripts/registry/explorer/stores/mutations.js
index a2c6a11de20..6055efcbd46 100644
--- a/app/assets/javascripts/registry/explorer/stores/mutations.js
+++ b/app/assets/javascripts/registry/explorer/stores/mutations.js
@@ -5,6 +5,7 @@ export default {
[types.SET_INITIAL_STATE](state, config) {
state.config = {
...config,
+ expirationPolicy: config.expirationPolicy ? JSON.parse(config.expirationPolicy) : undefined,
isGroupPage: config.isGroupPage !== undefined,
};
},
diff --git a/app/assets/javascripts/registry/settings/components/registry_settings_app.vue b/app/assets/javascripts/registry/settings/components/registry_settings_app.vue
index 87e65d354bb..4d767f1a578 100644
--- a/app/assets/javascripts/registry/settings/components/registry_settings_app.vue
+++ b/app/assets/javascripts/registry/settings/components/registry_settings_app.vue
@@ -18,14 +18,23 @@ export default {
unavailableFeatureText: s__(
'ContainerRegistry|Currently, the Container Registry tag expiration feature is not available for projects created before GitLab version 12.8. For updates and more information, visit Issue %{linkStart}#196124%{linkEnd}',
),
+ fetchSettingsErrorText: FETCH_SETTINGS_ERROR_MESSAGE,
+ },
+ data() {
+ return {
+ fetchSettingsError: false,
+ };
},
computed: {
...mapState(['isDisabled']),
+ showSettingForm() {
+ return !this.isDisabled && !this.fetchSettingsError;
+ },
},
mounted() {
- this.fetchSettings().catch(() =>
- this.$toast.show(FETCH_SETTINGS_ERROR_MESSAGE, { type: 'error' }),
- );
+ this.fetchSettings().catch(() => {
+ this.fetchSettingsError = true;
+ });
},
methods: {
...mapActions(['fetchSettings']),
@@ -48,17 +57,22 @@ export default {
}}
</li>
</ul>
- <settings-form v-if="!isDisabled" />
- <gl-alert v-else :dismissible="false">
- <p>
- <gl-sprintf :message="$options.i18n.unavailableFeatureText">
- <template #link="{content}">
- <gl-link href="https://gitlab.com/gitlab-org/gitlab/issues/196124" target="_blank">
- {{ content }}
- </gl-link>
- </template>
- </gl-sprintf>
- </p>
- </gl-alert>
+ <settings-form v-if="showSettingForm" />
+ <template v-else>
+ <gl-alert v-if="isDisabled" :dismissible="false">
+ <p>
+ <gl-sprintf :message="$options.i18n.unavailableFeatureText">
+ <template #link="{content}">
+ <gl-link href="https://gitlab.com/gitlab-org/gitlab/issues/196124" target="_blank">
+ {{ content }}
+ </gl-link>
+ </template>
+ </gl-sprintf>
+ </p>
+ </gl-alert>
+ <gl-alert v-else-if="fetchSettingsError" variant="warning" :dismissible="false">
+ <gl-sprintf :message="$options.i18n.fetchSettingsErrorText" />
+ </gl-alert>
+ </template>
</div>
</template>
diff --git a/app/assets/javascripts/related_merge_requests/components/related_merge_requests.vue b/app/assets/javascripts/related_merge_requests/components/related_merge_requests.vue
index dc7c9d9f174..05803ba09ab 100644
--- a/app/assets/javascripts/related_merge_requests/components/related_merge_requests.vue
+++ b/app/assets/javascripts/related_merge_requests/components/related_merge_requests.vue
@@ -1,6 +1,6 @@
<script>
import { mapState, mapActions } from 'vuex';
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlLink, GlLoadingIcon } from '@gitlab/ui';
import { sprintf, n__, s__ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
import RelatedIssuableItem from '~/vue_shared/components/issue/related_issuable_item.vue';
@@ -10,6 +10,7 @@ export default {
name: 'RelatedMergeRequests',
components: {
Icon,
+ GlLink,
GlLoadingIcon,
RelatedIssuableItem,
},
@@ -64,10 +65,19 @@ export default {
</script>
<template>
- <div v-if="isFetchingMergeRequests || (!isFetchingMergeRequests && totalCount)">
+ <div
+ v-if="isFetchingMergeRequests || (!isFetchingMergeRequests && totalCount)"
+ id="related-merge-requests"
+ >
<div id="merge-requests" class="card card-slim mt-3">
<div class="card-header">
- <div class="card-title mt-0 mb-0 h5 merge-requests-title">
+ <div class="card-title mt-0 mb-0 h5 merge-requests-title position-relative">
+ <gl-link
+ id="user-content-related-merge-requests"
+ class="anchor position-absolute text-decoration-none"
+ href="#related-merge-requests"
+ aria-hidden="true"
+ />
<span class="mr-1">
{{ __('Related merge requests') }}
</span>
diff --git a/app/assets/javascripts/releases/components/app_edit.vue b/app/assets/javascripts/releases/components/app_edit.vue
index bdc2b3abb8c..6f4baaa5d74 100644
--- a/app/assets/javascripts/releases/components/app_edit.vue
+++ b/app/assets/javascripts/releases/components/app_edit.vue
@@ -1,10 +1,12 @@
<script>
import { mapState, mapActions } from 'vuex';
-import { GlButton, GlFormInput, GlFormGroup } from '@gitlab/ui';
-import _ from 'underscore';
+import { GlButton, GlLink, GlFormInput, GlFormGroup } from '@gitlab/ui';
+import { escape as esc } from 'lodash';
import { __, sprintf } from '~/locale';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import autofocusonshow from '~/vue_shared/directives/autofocusonshow';
+import { BACK_URL_PARAM } from '~/releases/constants';
+import { getParameterByName } from '~/lib/utils/common_utils';
export default {
name: 'ReleaseEditApp',
@@ -12,6 +14,7 @@ export default {
GlFormInput,
GlFormGroup,
GlButton,
+ GlLink,
MarkdownField,
},
directives: {
@@ -50,7 +53,7 @@ export default {
'Changing a Release tag is only supported via Releases API. %{linkStart}More information%{linkEnd}',
),
{
- linkStart: `<a href="${_.escape(
+ linkStart: `<a href="${esc(
this.updateReleaseApiDocsPath,
)}" target="_blank" rel="noopener noreferrer">`,
linkEnd: '</a>',
@@ -74,6 +77,9 @@ export default {
this.updateReleaseNotes(notes);
},
},
+ cancelPath() {
+ return getParameterByName(BACK_URL_PARAM) || this.releasesPagePath;
+ },
},
created() {
this.fetchRelease();
@@ -84,7 +90,6 @@ export default {
'updateRelease',
'updateReleaseTitle',
'updateReleaseNotes',
- 'navigateToReleasesPage',
]),
},
};
@@ -157,15 +162,9 @@ export default {
>
{{ __('Save changes') }}
</gl-button>
- <gl-button
- class="js-cancel-button"
- variant="default"
- type="button"
- :aria-label="__('Cancel')"
- @click="navigateToReleasesPage()"
- >
+ <gl-link :href="cancelPath" class="js-cancel-button btn btn-default">
{{ __('Cancel') }}
- </gl-button>
+ </gl-link>
</div>
</form>
</div>
diff --git a/app/assets/javascripts/releases/components/app_index.vue b/app/assets/javascripts/releases/components/app_index.vue
index f602c9fdda2..511b3cda9c8 100644
--- a/app/assets/javascripts/releases/components/app_index.vue
+++ b/app/assets/javascripts/releases/components/app_index.vue
@@ -1,11 +1,12 @@
<script>
import { mapState, mapActions } from 'vuex';
-import { GlSkeletonLoading, GlEmptyState } from '@gitlab/ui';
+import { GlSkeletonLoading, GlEmptyState, GlLink } from '@gitlab/ui';
import {
getParameterByName,
historyPushState,
buildUrlWithCurrentLocation,
} from '~/lib/utils/common_utils';
+import { __ } from '~/locale';
import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
import ReleaseBlock from './release_block.vue';
@@ -16,13 +17,14 @@ export default {
GlEmptyState,
ReleaseBlock,
TablePagination,
+ GlLink,
},
props: {
projectId: {
type: String,
required: true,
},
- documentationLink: {
+ documentationPath: {
type: String,
required: true,
},
@@ -30,6 +32,11 @@ export default {
type: String,
required: true,
},
+ newReleasePath: {
+ type: String,
+ required: false,
+ default: '',
+ },
},
computed: {
...mapState('list', ['isLoading', 'releases', 'hasError', 'pageInfo']),
@@ -39,6 +46,11 @@ export default {
shouldRenderSuccessState() {
return this.releases.length && !this.isLoading && !this.hasError;
},
+ emptyStateText() {
+ return __(
+ "Releases are based on Git tags and mark specific points in a project's development history. They can contain information about the type of changes and can also deliver binaries, like compiled versions of your software.",
+ );
+ },
},
created() {
this.fetchReleases({
@@ -56,7 +68,16 @@ export default {
};
</script>
<template>
- <div class="prepend-top-default">
+ <div class="flex flex-column mt-2">
+ <gl-link
+ v-if="newReleasePath"
+ :href="newReleasePath"
+ :aria-describedby="shouldRenderEmptyState && 'releases-description'"
+ class="btn btn-success align-self-end mb-2 js-new-release-btn"
+ >
+ {{ __('New release') }}
+ </gl-link>
+
<gl-skeleton-loading v-if="isLoading" class="js-loading" />
<gl-empty-state
@@ -64,19 +85,25 @@ export default {
class="js-empty-state"
:title="__('Getting started with releases')"
:svg-path="illustrationPath"
- :description="
- __(
- 'Releases are based on Git tags and mark specific points in a project\'s development history. They can contain information about the type of changes and can also deliver binaries, like compiled versions of your software.',
- )
- "
- :primary-button-link="documentationLink"
- :primary-button-text="__('Open Documentation')"
- />
+ >
+ <template #description>
+ <span id="releases-description">
+ {{ emptyStateText }}
+ <gl-link
+ :href="documentationPath"
+ :aria-label="__('Releases documentation')"
+ target="_blank"
+ >
+ {{ __('More information') }}
+ </gl-link>
+ </span>
+ </template>
+ </gl-empty-state>
<div v-else-if="shouldRenderSuccessState" class="js-success-state">
<release-block
v-for="(release, index) in releases"
- :key="release.tag_name"
+ :key="release.tagName"
:release="release"
:class="{ 'linked-card': releases.length > 1 && index !== releases.length - 1 }"
/>
diff --git a/app/assets/javascripts/releases/components/app_show.vue b/app/assets/javascripts/releases/components/app_show.vue
new file mode 100644
index 00000000000..d521edcc361
--- /dev/null
+++ b/app/assets/javascripts/releases/components/app_show.vue
@@ -0,0 +1,29 @@
+<script>
+import { mapState, mapActions } from 'vuex';
+import { GlSkeletonLoading } from '@gitlab/ui';
+import ReleaseBlock from './release_block.vue';
+
+export default {
+ name: 'ReleaseShowApp',
+ components: {
+ GlSkeletonLoading,
+ ReleaseBlock,
+ },
+ computed: {
+ ...mapState('detail', ['isFetchingRelease', 'fetchError', 'release']),
+ },
+ created() {
+ this.fetchRelease();
+ },
+ methods: {
+ ...mapActions('detail', ['fetchRelease']),
+ },
+};
+</script>
+<template>
+ <div class="prepend-top-default">
+ <gl-skeleton-loading v-if="isFetchingRelease" />
+
+ <release-block v-else-if="!fetchError" :release="release" />
+ </div>
+</template>
diff --git a/app/assets/javascripts/releases/components/evidence_block.vue b/app/assets/javascripts/releases/components/evidence_block.vue
index d9abd195fee..0c51fffc96c 100644
--- a/app/assets/javascripts/releases/components/evidence_block.vue
+++ b/app/assets/javascripts/releases/components/evidence_block.vue
@@ -25,16 +25,16 @@ export default {
},
computed: {
evidenceTitle() {
- return sprintf(__('%{tag}-evidence.json'), { tag: this.release.tag_name });
+ return sprintf(__('%{tag}-evidence.json'), { tag: this.release.tagName });
},
evidenceUrl() {
- return this.release.assets && this.release.assets.evidence_file_path;
+ return this.release.assets && this.release.assets.evidenceFilePath;
},
shortSha() {
return truncateSha(this.sha);
},
sha() {
- return this.release.evidence_sha;
+ return this.release.evidenceSha;
},
},
};
@@ -67,7 +67,7 @@ export default {
</template>
</expand-button>
<clipboard-button
- :title="__('Copy commit SHA')"
+ :title="__('Copy evidence SHA')"
:text="sha"
css-class="btn-default btn-transparent btn-clipboard"
/>
diff --git a/app/assets/javascripts/releases/components/release_block.vue b/app/assets/javascripts/releases/components/release_block.vue
index e6bb5325120..61cd22dc161 100644
--- a/app/assets/javascripts/releases/components/release_block.vue
+++ b/app/assets/javascripts/releases/components/release_block.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { isEmpty } from 'lodash';
import $ from 'jquery';
import { slugify } from '~/lib/utils/text_utility';
import { getLocationHash } from '~/lib/utils/url_utility';
@@ -38,13 +38,13 @@ export default {
},
computed: {
id() {
- return slugify(this.release.tag_name);
+ return slugify(this.release.tagName);
},
assets() {
return this.release.assets || {};
},
hasEvidence() {
- return Boolean(this.release.evidence_sha);
+ return Boolean(this.release.evidenceSha);
},
milestones() {
return this.release.milestones || [];
@@ -64,7 +64,7 @@ export default {
return !this.glFeatures.releaseIssueSummary;
},
shouldRenderMilestoneInfo() {
- return Boolean(this.glFeatures.releaseIssueSummary && !_.isEmpty(this.release.milestones));
+ return Boolean(this.glFeatures.releaseIssueSummary && !isEmpty(this.release.milestones));
},
},
@@ -93,7 +93,10 @@ export default {
<release-block-header :release="release" />
<div class="card-body">
<div v-if="shouldRenderMilestoneInfo">
- <release-block-milestone-info :milestones="milestones" />
+ <release-block-milestone-info
+ :milestones="milestones"
+ :open-issues-path="release._links.issuesUrl"
+ />
<hr class="mb-3 mt-0" />
</div>
@@ -102,7 +105,7 @@ export default {
<evidence-block v-if="hasEvidence && shouldShowEvidence" :release="release" />
<div ref="gfm-content" class="card-text prepend-top-default">
- <div v-html="release.description_html"></div>
+ <div v-html="release.descriptionHtml"></div>
</div>
</div>
@@ -110,11 +113,11 @@ export default {
v-if="shouldShowFooter"
class="card-footer"
:commit="release.commit"
- :commit-path="release.commit_path"
- :tag-name="release.tag_name"
- :tag-path="release.tag_path"
+ :commit-path="release.commitPath"
+ :tag-name="release.tagName"
+ :tag-path="release.tagPath"
:author="release.author"
- :released-at="release.released_at"
+ :released-at="release.releasedAt"
/>
</div>
</template>
diff --git a/app/assets/javascripts/releases/components/release_block_assets.vue b/app/assets/javascripts/releases/components/release_block_assets.vue
index 06b7f97a8de..f4b92416e47 100644
--- a/app/assets/javascripts/releases/components/release_block_assets.vue
+++ b/app/assets/javascripts/releases/components/release_block_assets.vue
@@ -34,7 +34,7 @@ export default {
<ul v-if="assets.links.length" class="pl-0 mb-0 prepend-top-8 list-unstyled js-assets-list">
<li v-for="link in assets.links" :key="link.name" class="append-bottom-8">
- <gl-link v-gl-tooltip.bottom :title="__('Download asset')" :href="link.url">
+ <gl-link v-gl-tooltip.bottom :title="__('Download asset')" :href="link.directAssetUrl">
<icon name="package" class="align-middle append-right-4 align-text-bottom" />
{{ link.name }}
<span v-if="link.external">{{ __('(external source)') }}</span>
diff --git a/app/assets/javascripts/releases/components/release_block_author.vue b/app/assets/javascripts/releases/components/release_block_author.vue
index e7075d4d67a..0432d45b2dc 100644
--- a/app/assets/javascripts/releases/components/release_block_author.vue
+++ b/app/assets/javascripts/releases/components/release_block_author.vue
@@ -31,8 +31,8 @@ export default {
<template #user>
<user-avatar-link
class="prepend-left-4"
- :link-href="author.web_url"
- :img-src="author.avatar_url"
+ :link-href="author.webUrl"
+ :img-src="author.avatarUrl"
:img-alt="userImageAltDescription"
:tooltip-text="author.username"
/>
diff --git a/app/assets/javascripts/releases/components/release_block_footer.vue b/app/assets/javascripts/releases/components/release_block_footer.vue
index 8533fc17ffd..a95fbc0b373 100644
--- a/app/assets/javascripts/releases/components/release_block_footer.vue
+++ b/app/assets/javascripts/releases/components/release_block_footer.vue
@@ -66,9 +66,9 @@ export default {
<icon ref="commitIcon" name="commit" class="mr-1" />
<div v-gl-tooltip.bottom :title="commit.title">
<gl-link v-if="commitPath" :href="commitPath">
- {{ commit.short_id }}
+ {{ commit.shortId }}
</gl-link>
- <span v-else>{{ commit.short_id }}</span>
+ <span v-else>{{ commit.shortId }}</span>
</div>
</div>
@@ -100,8 +100,8 @@ export default {
<div v-if="author" class="d-flex">
<span class="text-secondary">{{ __('by') }}&nbsp;</span>
<user-avatar-link
- :link-href="author.web_url"
- :img-src="author.avatar_url"
+ :link-href="author.webUrl"
+ :img-src="author.avatarUrl"
:img-alt="userImageAltDescription"
:tooltip-text="author.username"
tooltip-placement="bottom"
diff --git a/app/assets/javascripts/releases/components/release_block_header.vue b/app/assets/javascripts/releases/components/release_block_header.vue
index b459418aef2..6f7e1dcfe2f 100644
--- a/app/assets/javascripts/releases/components/release_block_header.vue
+++ b/app/assets/javascripts/releases/components/release_block_header.vue
@@ -1,6 +1,8 @@
<script>
import { GlTooltipDirective, GlLink, GlBadge } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
+import { BACK_URL_PARAM } from '~/releases/constants';
+import { setUrlParams } from '~/lib/utils/url_utility';
export default {
name: 'ReleaseBlockHeader',
@@ -20,7 +22,15 @@ export default {
},
computed: {
editLink() {
- return this.release._links?.edit_url;
+ if (this.release._links?.editUrl) {
+ const queryParams = {
+ [BACK_URL_PARAM]: window.location.href,
+ };
+
+ return setUrlParams(queryParams, this.release._links.editUrl);
+ }
+
+ return undefined;
},
selfLink() {
return this.release._links?.self;
@@ -36,7 +46,7 @@ export default {
{{ release.name }}
</gl-link>
<template v-else>{{ release.name }}</template>
- <gl-badge v-if="release.upcoming_release" variant="warning" class="align-middle">{{
+ <gl-badge v-if="release.upcomingRelease" variant="warning" class="align-middle">{{
__('Upcoming Release')
}}</gl-badge>
</h2>
diff --git a/app/assets/javascripts/releases/components/release_block_metadata.vue b/app/assets/javascripts/releases/components/release_block_metadata.vue
index f0aad594062..052e4088a5f 100644
--- a/app/assets/javascripts/releases/components/release_block_metadata.vue
+++ b/app/assets/javascripts/releases/components/release_block_metadata.vue
@@ -32,21 +32,21 @@ export default {
return this.release.commit || {};
},
commitUrl() {
- return this.release.commit_path;
+ return this.release.commitPath;
},
hasAuthor() {
return Boolean(this.author);
},
releasedTimeAgo() {
return sprintf(__('released %{time}'), {
- time: this.timeFormatted(this.release.released_at),
+ time: this.timeFormatted(this.release.releasedAt),
});
},
shouldRenderMilestones() {
return Boolean(this.release.milestones?.length);
},
tagUrl() {
- return this.release.tag_path;
+ return this.release.tagPath;
},
},
};
@@ -57,24 +57,24 @@ export default {
<div class="append-right-8">
<icon name="commit" class="align-middle" />
<gl-link v-if="commitUrl" v-gl-tooltip.bottom :title="commit.title" :href="commitUrl">
- {{ commit.short_id }}
+ {{ commit.shortId }}
</gl-link>
- <span v-else v-gl-tooltip.bottom :title="commit.title">{{ commit.short_id }}</span>
+ <span v-else v-gl-tooltip.bottom :title="commit.title">{{ commit.shortId }}</span>
</div>
<div class="append-right-8">
<icon name="tag" class="align-middle" />
<gl-link v-if="tagUrl" v-gl-tooltip.bottom :title="__('Tag')" :href="tagUrl">
- {{ release.tag_name }}
+ {{ release.tagName }}
</gl-link>
- <span v-else v-gl-tooltip.bottom :title="__('Tag')">{{ release.tag_name }}</span>
+ <span v-else v-gl-tooltip.bottom :title="__('Tag')">{{ release.tagName }}</span>
</div>
<release-block-milestones v-if="shouldRenderMilestones" :milestones="release.milestones" />
<div class="append-right-4">
&bull;
- <span v-gl-tooltip.bottom :title="tooltipTitle(release.released_at)">
+ <span v-gl-tooltip.bottom :title="tooltipTitle(release.releasedAt)">
{{ releasedTimeAgo }}
</span>
</div>
diff --git a/app/assets/javascripts/releases/components/release_block_milestone_info.vue b/app/assets/javascripts/releases/components/release_block_milestone_info.vue
index d3e354d6157..d9fbd2884b7 100644
--- a/app/assets/javascripts/releases/components/release_block_milestone_info.vue
+++ b/app/assets/javascripts/releases/components/release_block_milestone_info.vue
@@ -1,10 +1,15 @@
<script>
-import { GlProgressBar, GlLink, GlBadge, GlButton, GlTooltipDirective } from '@gitlab/ui';
+import {
+ GlProgressBar,
+ GlLink,
+ GlBadge,
+ GlButton,
+ GlTooltipDirective,
+ GlSprintf,
+} from '@gitlab/ui';
import { __, n__, sprintf } from '~/locale';
import { MAX_MILESTONES_TO_DISPLAY } from '../constants';
-
-/** Sums the values of an array. For use with Array.reduce. */
-const sumReducer = (acc, curr) => acc + curr;
+import { sum } from 'lodash';
export default {
name: 'ReleaseBlockMilestoneInfo',
@@ -13,6 +18,7 @@ export default {
GlLink,
GlBadge,
GlButton,
+ GlSprintf,
},
directives: {
GlTooltip: GlTooltipDirective,
@@ -22,6 +28,16 @@ export default {
type: Array,
required: true,
},
+ openIssuesPath: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ closedIssuesPath: {
+ type: String,
+ required: false,
+ default: '',
+ },
},
data() {
return {
@@ -40,16 +56,16 @@ export default {
return Number.isNaN(percent) ? 0 : percent;
},
allIssueStats() {
- return this.milestones.map(m => m.issue_stats || {});
+ return this.milestones.map(m => m.issueStats || {});
},
- openIssuesCount() {
- return this.allIssueStats.map(stats => stats.opened || 0).reduce(sumReducer);
+ totalIssuesCount() {
+ return sum(this.allIssueStats.map(stats => stats.total || 0));
},
closedIssuesCount() {
- return this.allIssueStats.map(stats => stats.closed || 0).reduce(sumReducer);
+ return sum(this.allIssueStats.map(stats => stats.closed || 0));
},
- totalIssuesCount() {
- return this.openIssuesCount + this.closedIssuesCount;
+ openIssuesCount() {
+ return this.totalIssuesCount - this.closedIssuesCount;
},
milestoneLabelText() {
return n__('Milestone', 'Milestones', this.milestones.length);
@@ -109,7 +125,7 @@ export default {
:key="milestone.id"
v-gl-tooltip
:title="milestone.description"
- :href="milestone.web_url"
+ :href="milestone.webUrl"
class="append-right-4"
>
{{ milestone.title }}
@@ -130,7 +146,27 @@ export default {
{{ __('Issues') }}
<gl-badge pill variant="light" class="font-weight-bold">{{ totalIssuesCount }}</gl-badge>
</span>
- {{ issueCountsText }}
+ <div class="d-flex">
+ <gl-link v-if="openIssuesPath" ref="openIssuesLink" :href="openIssuesPath">
+ <gl-sprintf :message="__('Open: %{openIssuesCount}')">
+ <template #openIssuesCount>{{ openIssuesCount }}</template>
+ </gl-sprintf>
+ </gl-link>
+ <span v-else ref="openIssuesText">
+ {{ sprintf(__('Open: %{openIssuesCount}'), { openIssuesCount }) }}
+ </span>
+
+ <span class="mx-1">&bull;</span>
+
+ <gl-link v-if="closedIssuesPath" ref="closedIssuesLink" :href="closedIssuesPath">
+ <gl-sprintf :message="__('Closed: %{closedIssuesCount}')">
+ <template #closedIssuesCount>{{ closedIssuesCount }}</template>
+ </gl-sprintf>
+ </gl-link>
+ <span v-else ref="closedIssuesText">
+ {{ sprintf(__('Closed: %{closedIssuesCount}'), { closedIssuesCount }) }}
+ </span>
+ </div>
</div>
</div>
</template>
diff --git a/app/assets/javascripts/releases/components/release_block_milestones.vue b/app/assets/javascripts/releases/components/release_block_milestones.vue
index a3dff75b828..9abd3345b22 100644
--- a/app/assets/javascripts/releases/components/release_block_milestones.vue
+++ b/app/assets/javascripts/releases/components/release_block_milestones.vue
@@ -38,7 +38,7 @@ export default {
:key="milestone.id"
v-gl-tooltip
:title="milestone.description"
- :href="milestone.web_url"
+ :href="milestone.webUrl"
class="mx-1 js-milestone-link"
>
{{ milestone.title }}
diff --git a/app/assets/javascripts/releases/constants.js b/app/assets/javascripts/releases/constants.js
index defcd917465..1db93323a87 100644
--- a/app/assets/javascripts/releases/constants.js
+++ b/app/assets/javascripts/releases/constants.js
@@ -1,7 +1,3 @@
-/* eslint-disable import/prefer-default-export */
-// This eslint-disable ^^^ can be removed when at least
-// one more constant is added to this file. Currently
-// constants.js files with only a single constant
-// are flagged by this rule.
-
export const MAX_MILESTONES_TO_DISPLAY = 5;
+
+export const BACK_URL_PARAM = 'back_url';
diff --git a/app/assets/javascripts/releases/mount_edit.js b/app/assets/javascripts/releases/mount_edit.js
index 2bc2728312a..102c4367aac 100644
--- a/app/assets/javascripts/releases/mount_edit.js
+++ b/app/assets/javascripts/releases/mount_edit.js
@@ -6,7 +6,15 @@ import detailModule from './stores/modules/detail';
export default () => {
const el = document.getElementById('js-edit-release-page');
- const store = createStore({ detail: detailModule });
+ const store = createStore({
+ modules: {
+ detail: detailModule,
+ },
+ featureFlags: {
+ releaseShowPage: Boolean(gon.features?.releaseShowPage),
+ },
+ });
+
store.dispatch('detail/setInitialState', el.dataset);
return new Vue({
diff --git a/app/assets/javascripts/releases/mount_index.js b/app/assets/javascripts/releases/mount_index.js
index 6fcb6d802e4..5f0bf3b6459 100644
--- a/app/assets/javascripts/releases/mount_index.js
+++ b/app/assets/javascripts/releases/mount_index.js
@@ -8,14 +8,14 @@ export default () => {
return new Vue({
el,
- store: createStore({ list: listModule }),
+ store: createStore({
+ modules: {
+ list: listModule,
+ },
+ }),
render: h =>
h(ReleaseListApp, {
- props: {
- projectId: el.dataset.projectId,
- documentationLink: el.dataset.documentationPath,
- illustrationPath: el.dataset.illustrationPath,
- },
+ props: el.dataset,
}),
});
};
diff --git a/app/assets/javascripts/releases/mount_show.js b/app/assets/javascripts/releases/mount_show.js
new file mode 100644
index 00000000000..73e34869b21
--- /dev/null
+++ b/app/assets/javascripts/releases/mount_show.js
@@ -0,0 +1,21 @@
+import Vue from 'vue';
+import ReleaseShowApp from './components/app_show.vue';
+import createStore from './stores';
+import detailModule from './stores/modules/detail';
+
+export default () => {
+ const el = document.getElementById('js-show-release-page');
+
+ const store = createStore({
+ modules: {
+ detail: detailModule,
+ },
+ });
+ store.dispatch('detail/setInitialState', el.dataset);
+
+ return new Vue({
+ el,
+ store,
+ render: h => h(ReleaseShowApp),
+ });
+};
diff --git a/app/assets/javascripts/releases/stores/index.js b/app/assets/javascripts/releases/stores/index.js
index aa607906a0e..7f211145ccf 100644
--- a/app/assets/javascripts/releases/stores/index.js
+++ b/app/assets/javascripts/releases/stores/index.js
@@ -3,4 +3,8 @@ import Vuex from 'vuex';
Vue.use(Vuex);
-export default modules => new Vuex.Store({ modules });
+export default ({ modules, featureFlags }) =>
+ new Vuex.Store({
+ modules,
+ state: { featureFlags },
+ });
diff --git a/app/assets/javascripts/releases/stores/modules/detail/actions.js b/app/assets/javascripts/releases/stores/modules/detail/actions.js
index c9749582f5c..35901a654b0 100644
--- a/app/assets/javascripts/releases/stores/modules/detail/actions.js
+++ b/app/assets/javascripts/releases/stores/modules/detail/actions.js
@@ -22,8 +22,7 @@ export const fetchRelease = ({ dispatch, state }) => {
return api
.release(state.projectId, state.tagName)
.then(({ data: release }) => {
- const camelCasedRelease = convertObjectPropsToCamelCase(release, { deep: true });
- dispatch('receiveReleaseSuccess', camelCasedRelease);
+ dispatch('receiveReleaseSuccess', convertObjectPropsToCamelCase(release, { deep: true }));
})
.catch(error => {
dispatch('receiveReleaseError', error);
@@ -34,9 +33,11 @@ export const updateReleaseTitle = ({ commit }, title) => commit(types.UPDATE_REL
export const updateReleaseNotes = ({ commit }, notes) => commit(types.UPDATE_RELEASE_NOTES, notes);
export const requestUpdateRelease = ({ commit }) => commit(types.REQUEST_UPDATE_RELEASE);
-export const receiveUpdateReleaseSuccess = ({ commit, dispatch }) => {
+export const receiveUpdateReleaseSuccess = ({ commit, state, rootState }) => {
commit(types.RECEIVE_UPDATE_RELEASE_SUCCESS);
- dispatch('navigateToReleasesPage');
+ redirectTo(
+ rootState.featureFlags.releaseShowPage ? state.release._links.self : state.releasesPagePath,
+ );
};
export const receiveUpdateReleaseError = ({ commit }, error) => {
commit(types.RECEIVE_UPDATE_RELEASE_ERROR, error);
diff --git a/app/assets/javascripts/releases/stores/modules/list/actions.js b/app/assets/javascripts/releases/stores/modules/list/actions.js
index b15fb69226f..06d13890a9d 100644
--- a/app/assets/javascripts/releases/stores/modules/list/actions.js
+++ b/app/assets/javascripts/releases/stores/modules/list/actions.js
@@ -2,7 +2,11 @@ import * as types from './mutation_types';
import createFlash from '~/flash';
import { __ } from '~/locale';
import api from '~/api';
-import { normalizeHeaders, parseIntPagination } from '~/lib/utils/common_utils';
+import {
+ normalizeHeaders,
+ parseIntPagination,
+ convertObjectPropsToCamelCase,
+} from '~/lib/utils/common_utils';
/**
* Commits a mutation to update the state while the main endpoint is being requested.
@@ -28,7 +32,11 @@ export const fetchReleases = ({ dispatch }, { page = '1', projectId }) => {
export const receiveReleasesSuccess = ({ commit }, { data, headers }) => {
const pageInfo = parseIntPagination(normalizeHeaders(headers));
- commit(types.RECEIVE_RELEASES_SUCCESS, { data, pageInfo });
+ const camelCasedReleases = convertObjectPropsToCamelCase(data, { deep: true });
+ commit(types.RECEIVE_RELEASES_SUCCESS, {
+ data: camelCasedReleases,
+ pageInfo,
+ });
};
export const receiveReleasesError = ({ commit }) => {
diff --git a/app/assets/javascripts/reports/store/utils.js b/app/assets/javascripts/reports/store/utils.js
index ce3ffaae703..5d3d9ddda3b 100644
--- a/app/assets/javascripts/reports/store/utils.js
+++ b/app/assets/javascripts/reports/store/utils.js
@@ -11,9 +11,10 @@ const textBuilder = results => {
const { failed, errored, resolved, total } = results;
const failedOrErrored = (failed || 0) + (errored || 0);
- const failedString = failedOrErrored
- ? n__('%d failed/error test result', '%d failed/error test results', failedOrErrored)
- : null;
+ const failedString = failed ? n__('%d failed', '%d failed', failed) : null;
+ const erroredString = errored ? n__('%d error', '%d errors', errored) : null;
+ const combinedString =
+ failed && errored ? `${failedString}, ${erroredString}` : failedString || erroredString;
const resolvedString = resolved
? n__('%d fixed test result', '%d fixed test results', resolved)
: null;
@@ -23,12 +24,12 @@ const textBuilder = results => {
if (failedOrErrored) {
if (resolved) {
- resultsString = sprintf(s__('Reports|%{failedString} and %{resolvedString}'), {
- failedString,
+ resultsString = sprintf(s__('Reports|%{combinedString} and %{resolvedString}'), {
+ combinedString,
resolvedString,
});
} else {
- resultsString = failedString;
+ resultsString = combinedString;
}
} else if (resolved) {
resultsString = resolvedString;
diff --git a/app/assets/javascripts/repository/components/last_commit.vue b/app/assets/javascripts/repository/components/last_commit.vue
index 64003630271..71c3e15ba49 100644
--- a/app/assets/javascripts/repository/components/last_commit.vue
+++ b/app/assets/javascripts/repository/components/last_commit.vue
@@ -71,8 +71,8 @@ export default {
},
computed: {
statusTitle() {
- return sprintf(s__('Commits|Commit: %{commitText}'), {
- commitText: this.commit.pipeline.detailedStatus.text,
+ return sprintf(s__('PipelineStatusTooltip|Pipeline: %{ciStatus}'), {
+ ciStatus: this.commit.pipeline.detailedStatus.text,
});
},
isLoading() {
diff --git a/app/assets/javascripts/right_sidebar.js b/app/assets/javascripts/right_sidebar.js
index fa5649679d7..550ec3cb0d1 100644
--- a/app/assets/javascripts/right_sidebar.js
+++ b/app/assets/javascripts/right_sidebar.js
@@ -1,4 +1,4 @@
-/* eslint-disable func-names, consistent-return, no-else-return, no-param-reassign */
+/* eslint-disable func-names, consistent-return, no-param-reassign */
import $ from 'jquery';
import _ from 'underscore';
@@ -34,8 +34,6 @@ Sidebar.prototype.addEventListeners = function() {
this.sidebar.on('click', '.sidebar-collapsed-icon', this, this.sidebarCollapseClicked);
this.sidebar.on('hidden.gl.dropdown', this, this.onSidebarDropdownHidden);
- $('.dropdown').on('loading.gl.dropdown', this.sidebarDropdownLoading);
- $('.dropdown').on('loaded.gl.dropdown', this.sidebarDropdownLoaded);
$document.on('click', '.js-sidebar-toggle', this.sidebarToggleClicked);
return $(document)
@@ -133,36 +131,6 @@ Sidebar.prototype.todoUpdateDone = function(data) {
});
};
-Sidebar.prototype.sidebarDropdownLoading = function() {
- const $sidebarCollapsedIcon = $(this)
- .closest('.block')
- .find('.sidebar-collapsed-icon');
- const img = $sidebarCollapsedIcon.find('img');
- const i = $sidebarCollapsedIcon.find('i');
- const $loading = $('<i class="fa fa-spinner fa-spin"></i>');
- if (img.length) {
- img.before($loading);
- return img.hide();
- } else if (i.length) {
- i.before($loading);
- return i.hide();
- }
-};
-
-Sidebar.prototype.sidebarDropdownLoaded = function() {
- const $sidebarCollapsedIcon = $(this)
- .closest('.block')
- .find('.sidebar-collapsed-icon');
- const img = $sidebarCollapsedIcon.find('img');
- $sidebarCollapsedIcon.find('i.fa-spin').remove();
- const i = $sidebarCollapsedIcon.find('i');
- if (img.length) {
- return img.show();
- } else {
- return i.show();
- }
-};
-
Sidebar.prototype.sidebarCollapseClicked = function(e) {
if ($(e.currentTarget).hasClass('dont-change-state')) {
return;
diff --git a/app/assets/javascripts/self_monitor/components/self_monitor_form.vue b/app/assets/javascripts/self_monitor/components/self_monitor_form.vue
index 6b19a72317c..1e203d332db 100644
--- a/app/assets/javascripts/self_monitor/components/self_monitor_form.vue
+++ b/app/assets/javascripts/self_monitor/components/self_monitor_form.vue
@@ -129,7 +129,7 @@ export default {
</div>
<div class="settings-content">
<form name="self-monitoring-form">
- <p v-html="selfMonitoringFormText"></p>
+ <p ref="selfMonitoringFormText" v-html="selfMonitoringFormText"></p>
<gl-form-group :label="$options.formLabels.createProject" label-for="self-monitor-toggle">
<gl-toggle
v-model="selfMonitorEnabled"
diff --git a/app/assets/javascripts/self_monitor/store/state.js b/app/assets/javascripts/self_monitor/store/state.js
index a0ce88ff58c..582ce5576f1 100644
--- a/app/assets/javascripts/self_monitor/store/state.js
+++ b/app/assets/javascripts/self_monitor/store/state.js
@@ -9,7 +9,7 @@ export default (initialState = {}) => ({
deleteProjectStatusEndpoint: initialState.statusDeleteSelfMonitoringProjectPath || '',
selfMonitorProjectPath: initialState.selfMonitoringProjectFullPath || '',
showAlert: false,
- projectPath: '',
+ projectPath: initialState.selfMonitoringProjectFullPath || '',
loading: false,
alertContent: {},
});
diff --git a/app/assets/javascripts/shared/popover.js b/app/assets/javascripts/shared/popover.js
index 3fc03553bdd..435ee8fb968 100644
--- a/app/assets/javascripts/shared/popover.js
+++ b/app/assets/javascripts/shared/popover.js
@@ -1,5 +1,5 @@
import $ from 'jquery';
-import _ from 'underscore';
+import { debounce } from 'lodash';
export function togglePopover(show) {
const isAlreadyShown = this.hasClass('js-popover-show');
@@ -29,5 +29,5 @@ export function mouseenter() {
}
export function debouncedMouseleave(debounceTimeout = 300) {
- return _.debounce(mouseleave, debounceTimeout);
+ return debounce(mouseleave, debounceTimeout);
}
diff --git a/app/assets/javascripts/sidebar/components/assignees/assignee_title.vue b/app/assets/javascripts/sidebar/components/assignees/assignee_title.vue
index f4dac38b9e1..5c67e429383 100644
--- a/app/assets/javascripts/sidebar/components/assignees/assignee_title.vue
+++ b/app/assets/javascripts/sidebar/components/assignees/assignee_title.vue
@@ -1,8 +1,12 @@
<script>
+import { GlLoadingIcon } from '@gitlab/ui';
import { n__ } from '~/locale';
export default {
name: 'AssigneeTitle',
+ components: {
+ GlLoadingIcon,
+ },
props: {
loading: {
type: Boolean,
@@ -34,7 +38,7 @@ export default {
<template>
<div class="title hide-collapsed">
{{ assigneeTitle }}
- <i v-if="loading" aria-hidden="true" class="fa fa-spinner fa-spin block-loading"></i>
+ <gl-loading-icon v-if="loading" inline class="align-bottom" />
<a
v-if="editable"
class="js-sidebar-dropdown-toggle edit-link float-right"
diff --git a/app/assets/javascripts/sidebar/components/lock/lock_issue_sidebar.vue b/app/assets/javascripts/sidebar/components/lock/lock_issue_sidebar.vue
index c7c5e0e20f1..012a4f4ad89 100644
--- a/app/assets/javascripts/sidebar/components/lock/lock_issue_sidebar.vue
+++ b/app/assets/javascripts/sidebar/components/lock/lock_issue_sidebar.vue
@@ -63,7 +63,9 @@ export default {
methods: {
toggleForm() {
- this.mediator.store.isLockDialogOpen = !this.mediator.store.isLockDialogOpen;
+ if (this.isEditable) {
+ this.mediator.store.isLockDialogOpen = !this.mediator.store.isLockDialogOpen;
+ }
},
updateLockedAttribute(locked) {
this.mediator.service
diff --git a/app/assets/javascripts/sidebar/components/time_tracking/sidebar_time_tracking.vue b/app/assets/javascripts/sidebar/components/time_tracking/sidebar_time_tracking.vue
index 018b30d2a67..05ad7b4ea3e 100644
--- a/app/assets/javascripts/sidebar/components/time_tracking/sidebar_time_tracking.vue
+++ b/app/assets/javascripts/sidebar/components/time_tracking/sidebar_time_tracking.vue
@@ -1,6 +1,6 @@
<script>
import $ from 'jquery';
-import _ from 'underscore';
+import { intersection } from 'lodash';
import '~/smart_interval';
@@ -38,7 +38,7 @@ export default {
} else {
changedCommands = [];
}
- if (changedCommands && _.intersection(subscribedCommands, changedCommands).length) {
+ if (changedCommands && intersection(subscribedCommands, changedCommands).length) {
this.mediator.fetch();
}
},
diff --git a/app/assets/javascripts/sidebar/lib/sidebar_move_issue.js b/app/assets/javascripts/sidebar/lib/sidebar_move_issue.js
index 66d1fed7d31..2a61f7b5c05 100644
--- a/app/assets/javascripts/sidebar/lib/sidebar_move_issue.js
+++ b/app/assets/javascripts/sidebar/lib/sidebar_move_issue.js
@@ -1,6 +1,6 @@
import $ from 'jquery';
import '~/gl_dropdown';
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { __ } from '~/locale';
function isValidProjectId(id) {
@@ -49,7 +49,7 @@ class SidebarMoveIssue {
renderRow: project => `
<li>
<a href="#" class="js-move-issue-dropdown-item">
- ${_.escape(project.name_with_namespace)}
+ ${esc(project.name_with_namespace)}
</a>
</li>
`,
diff --git a/app/assets/javascripts/sidebar/queries/sidebarDetails.query.graphql b/app/assets/javascripts/sidebar/queries/sidebarDetails.query.graphql
new file mode 100644
index 00000000000..8cc68f6ea9a
--- /dev/null
+++ b/app/assets/javascripts/sidebar/queries/sidebarDetails.query.graphql
@@ -0,0 +1,7 @@
+query ($fullPath: ID!, $iid: String!) {
+ project (fullPath: $fullPath) {
+ issue (iid: $iid) {
+ iid
+ }
+ }
+}
diff --git a/app/assets/javascripts/sidebar/queries/sidebarDetailsForHealthStatusFeatureFlag.query.graphql b/app/assets/javascripts/sidebar/queries/sidebarDetailsForHealthStatusFeatureFlag.query.graphql
new file mode 100644
index 00000000000..8cc68f6ea9a
--- /dev/null
+++ b/app/assets/javascripts/sidebar/queries/sidebarDetailsForHealthStatusFeatureFlag.query.graphql
@@ -0,0 +1,7 @@
+query ($fullPath: ID!, $iid: String!) {
+ project (fullPath: $fullPath) {
+ issue (iid: $iid) {
+ iid
+ }
+ }
+}
diff --git a/app/assets/javascripts/sidebar/queries/updateStatus.mutation.graphql b/app/assets/javascripts/sidebar/queries/updateStatus.mutation.graphql
new file mode 100644
index 00000000000..27a5cff12c7
--- /dev/null
+++ b/app/assets/javascripts/sidebar/queries/updateStatus.mutation.graphql
@@ -0,0 +1,7 @@
+mutation ($projectPath: ID!, $iid: String!, $healthStatus: HealthStatus) {
+ updateIssue(input: { projectPath: $projectPath, iid: $iid, healthStatus: $healthStatus}) {
+ issue {
+ healthStatus
+ }
+ }
+}
diff --git a/app/assets/javascripts/sidebar/services/sidebar_service.js b/app/assets/javascripts/sidebar/services/sidebar_service.js
index feb08e3acaf..3b8903b4a4c 100644
--- a/app/assets/javascripts/sidebar/services/sidebar_service.js
+++ b/app/assets/javascripts/sidebar/services/sidebar_service.js
@@ -1,4 +1,14 @@
import axios from '~/lib/utils/axios_utils';
+import createGqClient, { fetchPolicies } from '~/lib/graphql';
+import sidebarDetailsQuery from 'ee_else_ce/sidebar/queries/sidebarDetails.query.graphql';
+import sidebarDetailsForHealthStatusFeatureFlagQuery from 'ee_else_ce/sidebar/queries/sidebarDetailsForHealthStatusFeatureFlag.query.graphql';
+
+export const gqClient = createGqClient(
+ {},
+ {
+ fetchPolicy: fetchPolicies.NO_CACHE,
+ },
+);
export default class SidebarService {
constructor(endpointMap) {
@@ -7,6 +17,8 @@ export default class SidebarService {
this.toggleSubscriptionEndpoint = endpointMap.toggleSubscriptionEndpoint;
this.moveIssueEndpoint = endpointMap.moveIssueEndpoint;
this.projectsAutocompleteEndpoint = endpointMap.projectsAutocompleteEndpoint;
+ this.fullPath = endpointMap.fullPath;
+ this.iid = endpointMap.iid;
SidebarService.singleton = this;
}
@@ -15,13 +27,37 @@ export default class SidebarService {
}
get() {
- return axios.get(this.endpoint);
+ const hasHealthStatusFeatureFlag = gon.features && gon.features.saveIssuableHealthStatus;
+
+ return Promise.all([
+ axios.get(this.endpoint),
+ gqClient.query({
+ query: hasHealthStatusFeatureFlag
+ ? sidebarDetailsForHealthStatusFeatureFlagQuery
+ : sidebarDetailsQuery,
+ variables: {
+ fullPath: this.fullPath,
+ iid: this.iid.toString(),
+ },
+ }),
+ ]);
}
update(key, data) {
return axios.put(this.endpoint, { [key]: data });
}
+ updateWithGraphQl(mutation, variables) {
+ return gqClient.mutate({
+ mutation,
+ variables: {
+ ...variables,
+ projectPath: this.fullPath,
+ iid: this.iid.toString(),
+ },
+ });
+ }
+
getProjectsAutocomplete(searchTerm) {
return axios.get(this.projectsAutocompleteEndpoint, {
params: {
diff --git a/app/assets/javascripts/sidebar/sidebar_mediator.js b/app/assets/javascripts/sidebar/sidebar_mediator.js
index ce869a625bf..34621fc1036 100644
--- a/app/assets/javascripts/sidebar/sidebar_mediator.js
+++ b/app/assets/javascripts/sidebar/sidebar_mediator.js
@@ -19,6 +19,8 @@ export default class SidebarMediator {
toggleSubscriptionEndpoint: options.toggleSubscriptionEndpoint,
moveIssueEndpoint: options.moveIssueEndpoint,
projectsAutocompleteEndpoint: options.projectsAutocompleteEndpoint,
+ fullPath: options.fullPath,
+ iid: options.iid,
});
SidebarMediator.singleton = this;
}
@@ -45,8 +47,8 @@ export default class SidebarMediator {
fetch() {
return this.service
.get()
- .then(({ data }) => {
- this.processFetchedData(data);
+ .then(([restResponse, graphQlResponse]) => {
+ this.processFetchedData(restResponse.data, graphQlResponse.data);
})
.catch(() => new Flash(__('Error occurred when fetching sidebar data')));
}
diff --git a/app/assets/javascripts/snippet/snippet_bundle.js b/app/assets/javascripts/snippet/snippet_bundle.js
index 652531a1289..a3ed8d9c632 100644
--- a/app/assets/javascripts/snippet/snippet_bundle.js
+++ b/app/assets/javascripts/snippet/snippet_bundle.js
@@ -1,14 +1,50 @@
-/* global ace */
-
-import $ from 'jquery';
+import { initEditorLite } from '~/blob/utils';
import setupCollapsibleInputs from './collapsible_input';
-export default () => {
- const editor = ace.edit('editor');
+let editor;
+
+const initAce = () => {
+ const editorEl = document.getElementById('editor');
+ const form = document.querySelector('.snippet-form-holder form');
+ const content = document.querySelector('.snippet-file-content');
+
+ editor = initEditorLite({ el: editorEl });
+
+ form.addEventListener('submit', () => {
+ content.value = editor.getValue();
+ });
+};
+
+const initMonaco = () => {
+ const editorEl = document.getElementById('editor');
+ const contentEl = document.querySelector('.snippet-file-content');
+ const fileNameEl = document.querySelector('.js-snippet-file-name');
+ const form = document.querySelector('.snippet-form-holder form');
- $('.snippet-form-holder form').on('submit', () => {
- $('.snippet-file-content').val(editor.getValue());
+ editor = initEditorLite({
+ el: editorEl,
+ blobPath: fileNameEl.value,
+ blobContent: contentEl.value,
});
+ fileNameEl.addEventListener('change', () => {
+ editor.updateModelLanguage(fileNameEl.value);
+ });
+
+ form.addEventListener('submit', () => {
+ contentEl.value = editor.getValue();
+ });
+};
+
+export const initEditor = () => {
+ if (window?.gon?.features?.monacoSnippets) {
+ initMonaco();
+ } else {
+ initAce();
+ }
setupCollapsibleInputs();
};
+
+export default () => {
+ initEditor();
+};
diff --git a/app/assets/javascripts/snippets/components/snippet_blob_edit.vue b/app/assets/javascripts/snippets/components/snippet_blob_edit.vue
new file mode 100644
index 00000000000..af1574f98d9
--- /dev/null
+++ b/app/assets/javascripts/snippets/components/snippet_blob_edit.vue
@@ -0,0 +1,36 @@
+<script>
+import BlobHeaderEdit from '~/blob/components/blob_edit_header.vue';
+import BlobContentEdit from '~/blob/components/blob_edit_content.vue';
+
+export default {
+ components: {
+ BlobHeaderEdit,
+ BlobContentEdit,
+ },
+ props: {
+ content: {
+ type: String,
+ required: true,
+ },
+ fileName: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ name: this.fileName,
+ blobContent: this.content,
+ };
+ },
+};
+</script>
+<template>
+ <div class="form-group file-editor">
+ <label>{{ s__('Snippets|File') }}</label>
+ <div class="file-holder snippet">
+ <blob-header-edit v-model="name" />
+ <blob-content-edit v-model="blobContent" :file-name="name" />
+ </div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/snippets/components/snippet_description_edit.vue b/app/assets/javascripts/snippets/components/snippet_description_edit.vue
new file mode 100644
index 00000000000..5b70ac5b715
--- /dev/null
+++ b/app/assets/javascripts/snippets/components/snippet_description_edit.vue
@@ -0,0 +1,72 @@
+<script>
+import { GlFormInput } from '@gitlab/ui';
+import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import setupCollapsibleInputs from '~/snippet/collapsible_input';
+
+export default {
+ components: {
+ GlFormInput,
+ MarkdownField,
+ },
+ props: {
+ description: {
+ type: String,
+ default: '',
+ required: false,
+ },
+ markdownPreviewPath: {
+ type: String,
+ required: true,
+ },
+ markdownDocsPath: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ text: this.description,
+ };
+ },
+ mounted() {
+ setupCollapsibleInputs();
+ },
+};
+</script>
+<template>
+ <div class="form-group js-description-input">
+ <label>{{ s__('Snippets|Description (optional)') }}</label>
+ <div class="js-collapsible-input">
+ <div class="js-collapsed" :class="{ 'd-none': text }">
+ <gl-form-input
+ class="form-control"
+ :placeholder="
+ s__(
+ 'Snippets|Optionally add a description about what your snippet does or how to use it…',
+ )
+ "
+ data-qa-selector="description_placeholder"
+ />
+ </div>
+ <markdown-field
+ class="js-expanded"
+ :class="{ 'd-none': !text }"
+ :markdown-preview-path="markdownPreviewPath"
+ :markdown-docs-path="markdownDocsPath"
+ >
+ <textarea
+ id="snippet-description"
+ slot="textarea"
+ v-model="text"
+ class="note-textarea js-gfm-input js-autosize markdown-area
+ qa-description-textarea"
+ dir="auto"
+ data-supports-quick-actions="false"
+ :aria-label="__('Description')"
+ :placeholder="__('Write a comment or drag your files here…')"
+ >
+ </textarea>
+ </markdown-field>
+ </div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/snippets/components/snippet_title.vue b/app/assets/javascripts/snippets/components/snippet_title.vue
index 6646e70f5db..1fc0423a06c 100644
--- a/app/assets/javascripts/snippets/components/snippet_title.vue
+++ b/app/assets/javascripts/snippets/components/snippet_title.vue
@@ -21,7 +21,7 @@ export default {
{{ snippet.title }}
</h2>
<div v-if="snippet.description" class="description" data-qa-selector="snippet_description">
- <div class="md">{{ snippet.description }}</div>
+ <div class="md js-snippet-description" v-html="snippet.descriptionHtml"></div>
</div>
<small v-if="snippet.updatedAt !== snippet.createdAt" class="edited-text">
diff --git a/app/assets/javascripts/snippets/components/snippet_visibility_edit.vue b/app/assets/javascripts/snippets/components/snippet_visibility_edit.vue
new file mode 100644
index 00000000000..93cd2b58c11
--- /dev/null
+++ b/app/assets/javascripts/snippets/components/snippet_visibility_edit.vue
@@ -0,0 +1,95 @@
+<script>
+import { GlIcon, GlFormGroup, GlFormRadio, GlFormRadioGroup, GlLink } from '@gitlab/ui';
+import { SNIPPET_VISIBILITY } from '~/snippets/constants';
+
+export default {
+ components: {
+ GlIcon,
+ GlFormGroup,
+ GlFormRadio,
+ GlFormRadioGroup,
+ GlLink,
+ },
+ props: {
+ helpLink: {
+ type: String,
+ default: '',
+ required: false,
+ },
+ isProjectSnippet: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
+ visibilityLevel: {
+ type: String,
+ default: '0',
+ required: false,
+ },
+ },
+ data() {
+ return {
+ selected: this.visibilityLevel,
+ };
+ },
+ computed: {
+ visibilityOptions() {
+ return [
+ {
+ value: '0',
+ icon: 'lock',
+ text: SNIPPET_VISIBILITY.private.label,
+ description: this.isProjectSnippet
+ ? SNIPPET_VISIBILITY.private.description_project
+ : SNIPPET_VISIBILITY.private.description,
+ },
+ {
+ value: '1',
+ icon: 'shield',
+ text: SNIPPET_VISIBILITY.internal.label,
+ description: SNIPPET_VISIBILITY.internal.description,
+ },
+ {
+ value: '2',
+ icon: 'earth',
+ text: SNIPPET_VISIBILITY.public.label,
+ description: SNIPPET_VISIBILITY.public.description,
+ },
+ ];
+ },
+ },
+ methods: {
+ updateSelectedOption(newVal) {
+ if (newVal !== this.selected) {
+ this.selected = newVal;
+ }
+ },
+ },
+};
+</script>
+<template>
+ <div class="form-group">
+ <label>
+ {{ __('Visibility level') }}
+ <gl-link v-if="helpLink" :href="helpLink" target="_blank"
+ ><gl-icon :size="12" name="question"
+ /></gl-link>
+ </label>
+ <gl-form-group id="visibility-level-setting">
+ <gl-form-radio-group :checked="selected" stacked @change="updateSelectedOption">
+ <gl-form-radio
+ v-for="option in visibilityOptions"
+ :key="option.icon"
+ :value="option.value"
+ class="mb-3"
+ >
+ <div class="d-flex align-items-center">
+ <gl-icon :size="16" :name="option.icon" />
+ <span class="font-weight-bold ml-1">{{ option.text }}</span>
+ </div>
+ <template #help>{{ option.description }}</template>
+ </gl-form-radio>
+ </gl-form-radio-group>
+ </gl-form-group>
+ </div>
+</template>
diff --git a/app/assets/javascripts/snippets/constants.js b/app/assets/javascripts/snippets/constants.js
index 87e3fe360a3..ed2f1156292 100644
--- a/app/assets/javascripts/snippets/constants.js
+++ b/app/assets/javascripts/snippets/constants.js
@@ -1,3 +1,21 @@
+import { __ } from '~/locale';
+
export const SNIPPET_VISIBILITY_PRIVATE = 'private';
export const SNIPPET_VISIBILITY_INTERNAL = 'internal';
export const SNIPPET_VISIBILITY_PUBLIC = 'public';
+
+export const SNIPPET_VISIBILITY = {
+ private: {
+ label: __('Private'),
+ description: __('The snippet is visible only to me.'),
+ description_project: __('The snippet is visible only to project members.'),
+ },
+ internal: {
+ label: __('Internal'),
+ description: __('The snippet is visible to any logged in user.'),
+ },
+ public: {
+ label: __('Public'),
+ description: __('The snippet can be accessed without any authentication.'),
+ },
+};
diff --git a/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql b/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql
index 57348a422ec..e0cc6cc2dda 100644
--- a/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql
+++ b/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql
@@ -2,6 +2,7 @@ fragment SnippetBase on Snippet {
id
title
description
+ descriptionHtml
createdAt
updatedAt
visibilityLevel
diff --git a/app/assets/javascripts/terminal/terminal.js b/app/assets/javascripts/terminal/terminal.js
index 9c7c10d9864..f4e546e4d4e 100644
--- a/app/assets/javascripts/terminal/terminal.js
+++ b/app/assets/javascripts/terminal/terminal.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { throttle } from 'lodash';
import $ from 'jquery';
import { Terminal } from 'xterm';
import * as fit from 'xterm/lib/addons/fit/fit';
@@ -85,7 +85,7 @@ export default class GLTerminal {
addScrollListener(onScrollLimit) {
const viewport = this.container.querySelector('.xterm-viewport');
- const listener = _.throttle(() => {
+ const listener = throttle(() => {
onScrollLimit({
canScrollUp: canScrollUp(viewport, SCROLL_MARGIN),
canScrollDown: canScrollDown(viewport, SCROLL_MARGIN),
diff --git a/app/assets/javascripts/tracking.js b/app/assets/javascripts/tracking.js
index a17b8a047c0..ab5acd83b01 100644
--- a/app/assets/javascripts/tracking.js
+++ b/app/assets/javascripts/tracking.js
@@ -111,4 +111,6 @@ export function initUserTracking() {
if (opts.linkClickTracking) window.snowplow('enableLinkClickTracking');
Tracking.bindDocument();
+
+ document.dispatchEvent(new Event('SnowplowInitialized'));
}
diff --git a/app/assets/javascripts/u2f/authenticate.js b/app/assets/javascripts/u2f/authenticate.js
index abfc81e681e..6244df1180e 100644
--- a/app/assets/javascripts/u2f/authenticate.js
+++ b/app/assets/javascripts/u2f/authenticate.js
@@ -1,5 +1,5 @@
import $ from 'jquery';
-import _ from 'underscore';
+import { template as lodashTemplate, omit } from 'lodash';
import importU2FLibrary from './util';
import U2FError from './error';
@@ -37,7 +37,7 @@ export default class U2FAuthenticate {
// Note: The server library fixes this behaviour in (unreleased) version 1.0.0.
// This can be removed once we upgrade.
// https://github.com/castle/ruby-u2f/commit/103f428071a81cd3d5f80c2e77d522d5029946a4
- this.signRequests = u2fParams.sign_requests.map(request => _(request).omit('challenge'));
+ this.signRequests = u2fParams.sign_requests.map(request => omit(request, 'challenge'));
this.templates = {
setup: '#js-authenticate-u2f-setup',
@@ -74,7 +74,7 @@ export default class U2FAuthenticate {
renderTemplate(name, params) {
const templateString = $(this.templates[name]).html();
- const template = _.template(templateString);
+ const template = lodashTemplate(templateString);
return this.container.html(template(params));
}
diff --git a/app/assets/javascripts/u2f/register.js b/app/assets/javascripts/u2f/register.js
index 43c814c8070..f5a422727ad 100644
--- a/app/assets/javascripts/u2f/register.js
+++ b/app/assets/javascripts/u2f/register.js
@@ -1,5 +1,5 @@
import $ from 'jquery';
-import _ from 'underscore';
+import { template as lodashTemplate } from 'lodash';
import importU2FLibrary from './util';
import U2FError from './error';
@@ -59,7 +59,7 @@ export default class U2FRegister {
renderTemplate(name, params) {
const templateString = $(this.templates[name]).html();
- const template = _.template(templateString);
+ const template = lodashTemplate(templateString);
return this.container.html(template(params));
}
diff --git a/app/assets/javascripts/user_popovers.js b/app/assets/javascripts/user_popovers.js
index 5cc22f62262..f8c1c3634c2 100644
--- a/app/assets/javascripts/user_popovers.js
+++ b/app/assets/javascripts/user_popovers.js
@@ -39,6 +39,7 @@ const populateUserInfo = user => {
location: userData.location,
bio: userData.bio,
organization: userData.organization,
+ jobTitle: userData.job_title,
loaded: true,
});
}
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/deployment/constants.js b/app/assets/javascripts/vue_merge_request_widget/components/deployment/constants.js
index 90741e3aa44..a7ab11290eb 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/deployment/constants.js
+++ b/app/assets/javascripts/vue_merge_request_widget/components/deployment/constants.js
@@ -6,3 +6,8 @@ export const RUNNING = 'running';
export const SUCCESS = 'success';
export const FAILED = 'failed';
export const CANCELED = 'canceled';
+
+// ACTION STATUSES
+export const STOPPING = 'stopping';
+export const DEPLOYING = 'deploying';
+export const REDEPLOYING = 'redeploying';
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment.vue b/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment.vue
index 34866cdfa6f..9c476d5b2e0 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment.vue
@@ -1,18 +1,15 @@
<script>
-import { __, s__ } from '~/locale';
+import DeploymentActions from './deployment_actions.vue';
import DeploymentInfo from './deployment_info.vue';
-import DeploymentViewButton from './deployment_view_button.vue';
-import DeploymentStopButton from './deployment_stop_button.vue';
-import { MANUAL_DEPLOY, WILL_DEPLOY, CREATED, RUNNING, SUCCESS } from './constants';
+import { MANUAL_DEPLOY, WILL_DEPLOY, CREATED } from './constants';
export default {
// name: 'Deployment' is a false positive: https://gitlab.com/gitlab-org/frontend/eslint-plugin-i18n/issues/26#possible-false-positives
// eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
name: 'Deployment',
components: {
+ DeploymentActions,
DeploymentInfo,
- DeploymentStopButton,
- DeploymentViewButton,
},
props: {
deployment: {
@@ -40,38 +37,14 @@ export default {
},
},
computed: {
- appButtonText() {
- return {
- text: this.isCurrent ? s__('Review App|View app') : s__('Review App|View latest app'),
- tooltip: this.isCurrent
- ? ''
- : __('View the latest successful deployment to this environment'),
- };
- },
- canBeManuallyDeployed() {
- return this.computedDeploymentStatus === MANUAL_DEPLOY;
- },
computedDeploymentStatus() {
if (this.deployment.status === CREATED) {
return this.isManual ? MANUAL_DEPLOY : WILL_DEPLOY;
}
return this.deployment.status;
},
- hasExternalUrls() {
- return Boolean(this.deployment.external_url && this.deployment.external_url_formatted);
- },
- isCurrent() {
- return this.computedDeploymentStatus === SUCCESS;
- },
isManual() {
- return Boolean(
- this.deployment.details &&
- this.deployment.details.playable_build &&
- this.deployment.details.playable_build.play_path,
- );
- },
- isDeployInProgress() {
- return this.deployment.status === RUNNING;
+ return Boolean(this.deployment.details?.playable_build?.play_path);
},
},
};
@@ -87,22 +60,12 @@ export default {
:deployment="deployment"
:show-metrics="showMetrics"
/>
- <div>
- <!-- show appropriate version of review app button -->
- <deployment-view-button
- v-if="hasExternalUrls"
- :app-button-text="appButtonText"
- :deployment="deployment"
- :show-visual-review-app="showVisualReviewApp"
- :visual-review-app-metadata="visualReviewAppMeta"
- />
- <!-- if it is stoppable, show stop -->
- <deployment-stop-button
- v-if="deployment.stop_url"
- :is-deploy-in-progress="isDeployInProgress"
- :stop-url="deployment.stop_url"
- />
- </div>
+ <deployment-actions
+ :deployment="deployment"
+ :computed-deployment-status="computedDeploymentStatus"
+ :show-visual-review-app="showVisualReviewApp"
+ :visual-review-app-meta="visualReviewAppMeta"
+ />
</div>
</div>
</div>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_action_button.vue b/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_action_button.vue
new file mode 100644
index 00000000000..45798fbc9dc
--- /dev/null
+++ b/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_action_button.vue
@@ -0,0 +1,75 @@
+<script>
+import { GlTooltipDirective, GlButton } from '@gitlab/ui';
+import { __ } from '~/locale';
+import { RUNNING } from './constants';
+
+export default {
+ name: 'DeploymentActionButton',
+ components: {
+ GlButton,
+ },
+ directives: {
+ GlTooltip: GlTooltipDirective,
+ },
+ props: {
+ actionsConfiguration: {
+ type: Object,
+ required: true,
+ },
+ actionInProgress: {
+ type: String,
+ required: false,
+ default: null,
+ },
+ buttonTitle: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ computedDeploymentStatus: {
+ type: String,
+ required: true,
+ },
+ containerClasses: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ },
+ computed: {
+ isActionInProgress() {
+ return Boolean(this.computedDeploymentStatus === RUNNING || this.actionInProgress);
+ },
+ actionInProgressTooltip() {
+ switch (this.actionInProgress) {
+ case this.actionsConfiguration.actionName:
+ return this.actionsConfiguration.busyText;
+ case null:
+ return '';
+ default:
+ return __('Another action is currently in progress');
+ }
+ },
+ isLoading() {
+ return this.actionInProgress === this.actionsConfiguration.actionName;
+ },
+ },
+};
+</script>
+
+<template>
+ <span v-gl-tooltip :title="actionInProgressTooltip" class="d-inline-block" tabindex="0">
+ <gl-button
+ v-gl-tooltip
+ :title="buttonTitle"
+ :loading="isLoading"
+ :disabled="isActionInProgress"
+ :class="`btn btn-default btn-sm inline prepend-left-4 ${containerClasses}`"
+ @click="$emit('click')"
+ >
+ <span class="d-inline-flex align-items-baseline">
+ <slot> </slot>
+ </span>
+ </gl-button>
+ </span>
+</template>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_actions.vue b/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_actions.vue
new file mode 100644
index 00000000000..573fc388cca
--- /dev/null
+++ b/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_actions.vue
@@ -0,0 +1,190 @@
+<script>
+import { GlIcon } from '@gitlab/ui';
+import { __, s__ } from '~/locale';
+import createFlash from '~/flash';
+import { visitUrl } from '~/lib/utils/url_utility';
+import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
+import MRWidgetService from '../../services/mr_widget_service';
+import DeploymentActionButton from './deployment_action_button.vue';
+import DeploymentViewButton from './deployment_view_button.vue';
+import { MANUAL_DEPLOY, FAILED, SUCCESS, STOPPING, DEPLOYING, REDEPLOYING } from './constants';
+
+export default {
+ name: 'DeploymentActions',
+ components: {
+ DeploymentActionButton,
+ DeploymentViewButton,
+ GlIcon,
+ },
+ mixins: [glFeatureFlagsMixin()],
+ props: {
+ computedDeploymentStatus: {
+ type: String,
+ required: true,
+ },
+ deployment: {
+ type: Object,
+ required: true,
+ },
+ showVisualReviewApp: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
+ visualReviewAppMeta: {
+ type: Object,
+ required: false,
+ default: () => ({
+ sourceProjectId: '',
+ sourceProjectPath: '',
+ mergeRequestId: '',
+ appUrl: '',
+ }),
+ },
+ },
+ data() {
+ return {
+ actionInProgress: null,
+ constants: {
+ STOPPING,
+ DEPLOYING,
+ REDEPLOYING,
+ },
+ };
+ },
+ computed: {
+ appButtonText() {
+ return {
+ text: this.isCurrent ? s__('Review App|View app') : s__('Review App|View latest app'),
+ tooltip: this.isCurrent
+ ? ''
+ : __('View the latest successful deployment to this environment'),
+ };
+ },
+ canBeManuallyDeployed() {
+ return this.computedDeploymentStatus === MANUAL_DEPLOY && Boolean(this.playPath);
+ },
+ canBeManuallyRedeployed() {
+ return this.computedDeploymentStatus === FAILED && Boolean(this.redeployPath);
+ },
+ shouldShowManualButtons() {
+ return this.glFeatures.deployFromFooter;
+ },
+ hasExternalUrls() {
+ return Boolean(this.deployment.external_url && this.deployment.external_url_formatted);
+ },
+ isCurrent() {
+ return this.computedDeploymentStatus === SUCCESS;
+ },
+ playPath() {
+ return this.deployment.details?.playable_build?.play_path;
+ },
+ redeployPath() {
+ return this.deployment.details?.playable_build?.retry_path;
+ },
+ stopUrl() {
+ return this.deployment.stop_url;
+ },
+ },
+ actionsConfiguration: {
+ [STOPPING]: {
+ actionName: STOPPING,
+ buttonText: s__('MrDeploymentActions|Stop environment'),
+ busyText: __('This environment is being deployed'),
+ confirmMessage: __('Are you sure you want to stop this environment?'),
+ errorMessage: __('Something went wrong while stopping this environment. Please try again.'),
+ },
+ [DEPLOYING]: {
+ actionName: DEPLOYING,
+ buttonText: s__('MrDeploymentActions|Deploy'),
+ busyText: __('This environment is being deployed'),
+ confirmMessage: __('Are you sure you want to deploy this environment?'),
+ errorMessage: __('Something went wrong while deploying this environment. Please try again.'),
+ },
+ [REDEPLOYING]: {
+ actionName: REDEPLOYING,
+ buttonText: s__('MrDeploymentActions|Re-deploy'),
+ busyText: __('This environment is being re-deployed'),
+ confirmMessage: __('Are you sure you want to re-deploy this environment?'),
+ errorMessage: __('Something went wrong while deploying this environment. Please try again.'),
+ },
+ },
+ methods: {
+ executeAction(endpoint, { actionName, confirmMessage, errorMessage }) {
+ const isConfirmed = confirm(confirmMessage); //eslint-disable-line
+
+ if (isConfirmed) {
+ this.actionInProgress = actionName;
+
+ MRWidgetService.executeInlineAction(endpoint)
+ .then(resp => {
+ const redirectUrl = resp?.data?.redirect_url;
+ if (redirectUrl) {
+ visitUrl(redirectUrl);
+ }
+ })
+ .catch(() => {
+ createFlash(errorMessage);
+ })
+ .finally(() => {
+ this.actionInProgress = null;
+ });
+ }
+ },
+ stopEnvironment() {
+ this.executeAction(this.stopUrl, this.$options.actionsConfiguration[STOPPING]);
+ },
+ deployManually() {
+ this.executeAction(this.playPath, this.$options.actionsConfiguration[DEPLOYING]);
+ },
+ redeploy() {
+ this.executeAction(this.redeployPath, this.$options.actionsConfiguration[REDEPLOYING]);
+ },
+ },
+};
+</script>
+
+<template>
+ <div>
+ <deployment-action-button
+ v-if="shouldShowManualButtons && canBeManuallyDeployed"
+ :action-in-progress="actionInProgress"
+ :actions-configuration="$options.actionsConfiguration[constants.DEPLOYING]"
+ :computed-deployment-status="computedDeploymentStatus"
+ container-classes="js-manual-deploy-action"
+ @click="deployManually"
+ >
+ <gl-icon name="play" />
+ <span>{{ $options.actionsConfiguration[constants.DEPLOYING].buttonText }}</span>
+ </deployment-action-button>
+ <deployment-action-button
+ v-if="shouldShowManualButtons && canBeManuallyRedeployed"
+ :action-in-progress="actionInProgress"
+ :actions-configuration="$options.actionsConfiguration[constants.REDEPLOYING]"
+ :computed-deployment-status="computedDeploymentStatus"
+ container-classes="js-manual-redeploy-action"
+ @click="redeploy"
+ >
+ <gl-icon name="repeat" />
+ <span>{{ $options.actionsConfiguration[constants.REDEPLOYING].buttonText }}</span>
+ </deployment-action-button>
+ <deployment-view-button
+ v-if="hasExternalUrls"
+ :app-button-text="appButtonText"
+ :deployment="deployment"
+ :show-visual-review-app="showVisualReviewApp"
+ :visual-review-app-meta="visualReviewAppMeta"
+ />
+ <deployment-action-button
+ v-if="stopUrl"
+ :action-in-progress="actionInProgress"
+ :computed-deployment-status="computedDeploymentStatus"
+ :actions-configuration="$options.actionsConfiguration[constants.STOPPING]"
+ :button-title="$options.actionsConfiguration[constants.STOPPING].buttonText"
+ container-classes="js-stop-env"
+ @click="stopEnvironment"
+ >
+ <gl-icon name="stop" />
+ </deployment-action-button>
+ </div>
+</template>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_stop_button.vue b/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_stop_button.vue
deleted file mode 100644
index e20296c41a2..00000000000
--- a/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_stop_button.vue
+++ /dev/null
@@ -1,83 +0,0 @@
-<script>
-import { GlTooltipDirective } from '@gitlab/ui';
-import { __ } from '~/locale';
-import Icon from '~/vue_shared/components/icon.vue';
-import LoadingButton from '~/vue_shared/components/loading_button.vue';
-import { visitUrl } from '~/lib/utils/url_utility';
-import createFlash from '~/flash';
-import MRWidgetService from '../../services/mr_widget_service';
-
-export default {
- name: 'DeploymentStopButton',
- components: {
- LoadingButton,
- Icon,
- },
- directives: {
- GlTooltip: GlTooltipDirective,
- },
- props: {
- isDeployInProgress: {
- type: Boolean,
- required: true,
- },
- stopUrl: {
- type: String,
- required: true,
- },
- },
- data() {
- return {
- isStopping: false,
- };
- },
- computed: {
- deployInProgressTooltip() {
- return this.isDeployInProgress
- ? __('Stopping this environment is currently not possible as a deployment is in progress')
- : '';
- },
- },
- methods: {
- stopEnvironment() {
- const msg = __('Are you sure you want to stop this environment?');
- const isConfirmed = confirm(msg); // eslint-disable-line
-
- if (isConfirmed) {
- this.isStopping = true;
-
- MRWidgetService.stopEnvironment(this.stopUrl)
- .then(res => res.data)
- .then(data => {
- if (data.redirect_url) {
- visitUrl(data.redirect_url);
- }
-
- this.isStopping = false;
- })
- .catch(() => {
- createFlash(
- __('Something went wrong while stopping this environment. Please try again.'),
- );
- this.isStopping = false;
- });
- }
- },
- },
-};
-</script>
-
-<template>
- <span v-gl-tooltip :title="deployInProgressTooltip" class="d-inline-block" tabindex="0">
- <loading-button
- v-gl-tooltip
- :loading="isStopping"
- :disabled="isDeployInProgress"
- :title="__('Stop environment')"
- container-class="js-stop-env btn btn-default btn-sm inline prepend-left-4"
- @click="stopEnvironment"
- >
- <icon name="stop" />
- </loading-button>
- </span>
-</template>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_view_button.vue b/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_view_button.vue
index 18d4073ecd4..5dabd9fe5fe 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_view_button.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/deployment/deployment_view_button.vue
@@ -1,4 +1,5 @@
<script>
+import { GlLink } from '@gitlab/ui';
import FilteredSearchDropdown from '~/vue_shared/components/filtered_search_dropdown.vue';
import ReviewAppLink from '../review_app_link.vue';
@@ -6,6 +7,7 @@ export default {
name: 'DeploymentViewButton',
components: {
FilteredSearchDropdown,
+ GlLink,
ReviewAppLink,
VisualReviewAppLink: () =>
import('ee_component/vue_merge_request_widget/components/visual_review_app_link.vue'),
@@ -67,7 +69,7 @@ export default {
</template>
<template slot="result" slot-scope="slotProps">
- <a
+ <gl-link
:href="slotProps.result.external_url"
target="_blank"
rel="noopener noreferrer nofollow"
@@ -80,20 +82,21 @@ export default {
<p class="text-secondary str-truncated-100 append-bottom-0 d-block">
{{ slotProps.result.external_url }}
</p>
- </a>
+ </gl-link>
</template>
</filtered-search-dropdown>
- <template v-else>
- <review-app-link
- :display="appButtonText"
- :link="deploymentExternalUrl"
- css-class="js-deploy-url deploy-link btn btn-default btn-sm inline"
- />
- </template>
+ <review-app-link
+ v-else
+ :display="appButtonText"
+ :link="deploymentExternalUrl"
+ css-class="js-deploy-url deploy-link btn btn-default btn-sm inline"
+ />
<visual-review-app-link
v-if="showVisualReviewApp"
+ :view-app-display="appButtonText"
:link="deploymentExternalUrl"
:app-metadata="visualReviewAppMeta"
+ :changes="deployment.changes"
/>
</span>
</template>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.vue
index 7c71463c949..c38272ab239 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { n__, s__, sprintf } from '~/locale';
import { mergeUrlParams, webIDEUrl } from '~/lib/utils/url_utility';
import Icon from '~/vue_shared/components/icon.vue';
@@ -35,7 +35,7 @@ export default {
'mrWidget|The source branch is %{commitsBehindLinkStart}%{commitsBehind}%{commitsBehindLinkEnd} the target branch',
),
{
- commitsBehindLinkStart: `<a href="${_.escape(this.mr.targetBranchPath)}">`,
+ commitsBehindLinkStart: `<a href="${esc(this.mr.targetBranchPath)}">`,
commitsBehind: n__('%d commit behind', '%d commits behind', this.mr.divergedCommitsCount),
commitsBehindLinkEnd: '</a>',
},
@@ -121,6 +121,7 @@ export default {
data-placement="bottom"
tabindex="0"
role="button"
+ data-qa-selector="open_in_web_ide_button"
>
{{ s__('mrWidget|Open in Web IDE') }}
</a>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline_container.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline_container.vue
index 90fb254ecca..d81e99d3c09 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline_container.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline_container.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { isNumber } from 'lodash';
import ArtifactsApp from './artifacts_list_app.vue';
import Deployment from './deployment/deployment.vue';
import MrWidgetContainer from './mr_widget_container.vue';
@@ -67,7 +67,7 @@ export default {
return this.mr.visualReviewAppAvailable && this.glFeatures.anonymousVisualReviewFeedback;
},
showMergeTrainPositionIndicator() {
- return _.isNumber(this.mr.mergeTrainIndex);
+ return isNumber(this.mr.mergeTrainIndex);
},
},
};
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue
index f08bfb3a90f..9942861d9e4 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue
@@ -1,45 +1,75 @@
<script>
import { GlLink, GlSprintf } from '@gitlab/ui';
import MrWidgetIcon from './mr_widget_icon.vue';
+import PipelineTourState from './states/mr_widget_pipeline_tour.vue';
export default {
name: 'MRWidgetSuggestPipeline',
iconName: 'status_notfound',
+ popoverTarget: 'suggest-popover',
+ popoverContainer: 'suggest-pipeline',
+ trackLabel: 'no_pipeline_noticed',
+ linkTrackValue: 30,
+ linkTrackEvent: 'click_link',
components: {
GlLink,
GlSprintf,
MrWidgetIcon,
+ PipelineTourState,
},
props: {
pipelinePath: {
type: String,
required: true,
},
+ pipelineSvgPath: {
+ type: String,
+ required: true,
+ },
+ humanAccess: {
+ type: String,
+ required: true,
+ },
},
};
</script>
<template>
- <div class="d-flex mr-pipeline-suggest append-bottom-default">
+ <div :id="$options.popoverContainer" class="d-flex mr-pipeline-suggest append-bottom-default">
<mr-widget-icon :name="$options.iconName" />
- <gl-sprintf
- class="js-no-pipeline-message"
- :message="
- s__(`mrWidget|%{prefixToLinkStart}No pipeline%{prefixToLinkEnd}
+ <div :id="$options.popoverTarget">
+ <gl-sprintf
+ :message="
+ s__(`mrWidget|%{prefixToLinkStart}No pipeline%{prefixToLinkEnd}
%{addPipelineLinkStart}Add the .gitlab-ci.yml file%{addPipelineLinkEnd}
to create one.`)
- "
- >
- <template #prefixToLink="{content}">
- <strong>
- {{ content }}
- </strong>
- </template>
- <template #addPipelineLink="{content}">
- <gl-link :href="pipelinePath" class="ml-2">
- {{ content }}
- </gl-link>
- &nbsp;
- </template>
- </gl-sprintf>
+ "
+ >
+ <template #prefixToLink="{content}">
+ <strong>
+ {{ content }}
+ </strong>
+ </template>
+ <template #addPipelineLink="{content}">
+ <gl-link
+ :href="pipelinePath"
+ class="ml-2 js-add-pipeline-path"
+ :data-track-property="humanAccess"
+ :data-track-value="$options.linkTrackValue"
+ :data-track-event="$options.linkTrackEvent"
+ :data-track-label="$options.trackLabel"
+ >
+ {{ content }}
+ </gl-link>
+ </template>
+ </gl-sprintf>
+ <pipeline-tour-state
+ :pipeline-path="pipelinePath"
+ :pipeline-svg-path="pipelineSvgPath"
+ :human-access="humanAccess"
+ :popover-target="$options.popoverTarget"
+ :popover-container="$options.popoverContainer"
+ :track-label="$options.trackLabel"
+ />
+ </div>
</div>
</template>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/commits_header.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/commits_header.vue
index 01524f4b650..266c07ead25 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/commits_header.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/commits_header.vue
@@ -1,6 +1,6 @@
<script>
import { GlButton } from '@gitlab/ui';
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { __, n__, sprintf, s__ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
@@ -60,7 +60,7 @@ export default {
{
commitCount: `<strong class="commits-count-message">${this.commitsCountMessage}</strong>`,
mergeCommitCount: `<strong>${s__('mrWidgetCommitsAdded|1 merge commit')}</strong>`,
- targetBranch: `<span class="label-branch">${_.escape(this.targetBranch)}</span>`,
+ targetBranch: `<span class="label-branch">${esc(this.targetBranch)}</span>`,
},
false,
);
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled.vue
index 8e8e67228ed..a368e29d086 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled.vue
@@ -1,5 +1,4 @@
<script>
-import _ from 'underscore';
import autoMergeMixin from 'ee_else_ce/vue_merge_request_widget/mixins/auto_merge';
import Flash from '../../../flash';
import statusIcon from '../mr_widget_status_icon.vue';
@@ -53,6 +52,7 @@ export default {
.then(res => res.data)
.then(data => {
eventHub.$emit('UpdateWidgetData', data);
+ eventHub.$emit('MRWidgetUpdateRequested');
})
.catch(() => {
this.isCancellingAutoMerge = false;
@@ -71,7 +71,7 @@ export default {
.merge(options)
.then(res => res.data)
.then(data => {
- if (_.includes(AUTO_MERGE_STRATEGIES, data.status)) {
+ if (AUTO_MERGE_STRATEGIES.includes(data.status)) {
eventHub.$emit('MRWidgetUpdateRequested');
}
})
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_conflicts.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_conflicts.vue
index 3df4a777aca..139cbe17e35 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_conflicts.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_conflicts.vue
@@ -1,6 +1,6 @@
<script>
import $ from 'jquery';
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import { s__, sprintf } from '~/locale';
import { mouseenter, debouncedMouseleave, togglePopover } from '~/shared/popover';
import StatusIcon from '../mr_widget_status_icon.vue';
@@ -50,7 +50,7 @@ export default {
content: sprintf(
s__('mrWidget|%{link_start}Learn more about resolving conflicts%{link_end}'),
{
- link_start: `<a href="${_.escape(
+ link_start: `<a href="${esc(
this.mr.conflictsDocsPath,
)}" target="_blank" rel="noopener noreferrer">`,
link_end: '</a>',
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_pipeline_tour.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_pipeline_tour.vue
new file mode 100644
index 00000000000..f2d7e86a85e
--- /dev/null
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_pipeline_tour.vue
@@ -0,0 +1,143 @@
+<script>
+import { s__, sprintf } from '~/locale';
+import { GlPopover, GlButton } from '@gitlab/ui';
+import Icon from '~/vue_shared/components/icon.vue';
+import Cookies from 'js-cookie';
+import { parseBoolean } from '~/lib/utils/common_utils';
+import Tracking from '~/tracking';
+
+const trackingMixin = Tracking.mixin();
+
+const cookieKey = 'suggest_pipeline_dismissed';
+
+export default {
+ name: 'MRWidgetPipelineTour',
+ dismissTrackValue: 20,
+ showTrackValue: 10,
+ trackEvent: 'click_button',
+ popoverContent: sprintf(
+ '%{messageText1}%{lineBreak}%{messageText2}%{lineBreak}%{messageText3}%{lineBreak}%{messageText4}%{lineBreak}%{messageText5}',
+ {
+ messageText1: s__('mrWidget|Detect issues before deployment with a CI pipeline'),
+ messageText2: s__('mrWidget|that continuously tests your code. We created'),
+ messageText3: s__("mrWidget|a quick guide that'll show you how to create"),
+ messageText4: s__('mrWidget|one. Make your code more secure and more'),
+ messageText5: s__('mrWidget|robust in just a minute.'),
+ lineBreak: '<br/>',
+ },
+ false,
+ ),
+ components: {
+ GlPopover,
+ GlButton,
+ Icon,
+ },
+ mixins: [trackingMixin],
+ props: {
+ pipelinePath: {
+ type: String,
+ required: true,
+ },
+ pipelineSvgPath: {
+ type: String,
+ required: true,
+ },
+ humanAccess: {
+ type: String,
+ required: true,
+ },
+ popoverTarget: {
+ type: String,
+ required: true,
+ },
+ popoverContainer: {
+ type: String,
+ required: true,
+ },
+ trackLabel: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ popoverDismissed: parseBoolean(Cookies.get(cookieKey)),
+ tracking: {
+ label: this.trackLabel,
+ property: this.humanAccess,
+ },
+ };
+ },
+ mounted() {
+ this.trackOnShow();
+ },
+ methods: {
+ trackOnShow() {
+ if (!this.popoverDismissed) {
+ this.track();
+ }
+ },
+ dismissPopover() {
+ this.popoverDismissed = true;
+ Cookies.set(cookieKey, this.popoverDismissed, { expires: 365 });
+ },
+ },
+};
+</script>
+<template>
+ <gl-popover
+ v-if="!popoverDismissed"
+ show
+ :target="popoverTarget"
+ :container="popoverContainer"
+ placement="rightbottom"
+ >
+ <template #title>
+ <button
+ class="btn-blank float-right mt-1"
+ type="button"
+ :aria-label="__('Close')"
+ :data-track-property="humanAccess"
+ :data-track-value="$options.dismissTrackValue"
+ :data-track-event="$options.trackEvent"
+ :data-track-label="trackLabel"
+ @click="dismissPopover"
+ >
+ <icon name="close" aria-hidden="true" />
+ </button>
+ {{ s__('mrWidget|Are you adding technical debt or code vulnerabilities?') }}
+ </template>
+ <div class="svg-content svg-150 pt-1">
+ <img :src="pipelineSvgPath" />
+ </div>
+ <p v-html="$options.popoverContent"></p>
+ <gl-button
+ ref="ok"
+ category="primary"
+ class="mt-2 mb-0"
+ variant="info"
+ block
+ :href="pipelinePath"
+ :data-track-property="humanAccess"
+ :data-track-value="$options.showTrackValue"
+ :data-track-event="$options.trackEvent"
+ :data-track-label="trackLabel"
+ >
+ {{ __('Show me how') }}
+ </gl-button>
+ <gl-button
+ ref="no-thanks"
+ category="secondary"
+ class="mt-2 mb-0"
+ variant="info"
+ block
+ :data-track-property="humanAccess"
+ :data-track-value="$options.dismissTrackValue"
+ :data-track-event="$options.trackEvent"
+ :data-track-label="trackLabel"
+ @click="dismissPopover"
+ >
+ {{ __("No thanks, don't show this again") }}
+ </gl-button>
+ </gl-popover>
+</template>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
index 66167a0d748..e34060c3393 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
@@ -1,6 +1,6 @@
<script>
-import _ from 'underscore';
-import { GlIcon } from '@gitlab/ui';
+import { isEmpty } from 'lodash';
+import { GlIcon, GlButton } from '@gitlab/ui';
import successSvg from 'icons/_icon_status_success.svg';
import warningSvg from 'icons/_icon_status_warning.svg';
import readyToMergeMixin from 'ee_else_ce/vue_merge_request_widget/mixins/ready_to_merge';
@@ -26,6 +26,7 @@ export default {
CommitEdit,
CommitMessageDropdown,
GlIcon,
+ GlButton,
MergeImmediatelyConfirmationDialog: () =>
import(
'ee_component/vue_merge_request_widget/components/merge_immediately_confirmation_dialog.vue'
@@ -50,7 +51,7 @@ export default {
},
computed: {
isAutoMergeAvailable() {
- return !_.isEmpty(this.mr.availableAutoMergeStrategies);
+ return !isEmpty(this.mr.availableAutoMergeStrategies);
},
status() {
const { pipeline, isPipelineFailed, hasCI, ciStatus } = this.mr;
@@ -67,18 +68,13 @@ export default {
return 'success';
},
- mergeButtonClass() {
- const defaultClass = 'btn btn-sm btn-success accept-merge-request';
- const failedClass = `${defaultClass} btn-danger`;
- const inActionClass = `${defaultClass} btn-info`;
-
+ mergeButtonVariant() {
if (this.status === 'failed') {
- return failedClass;
+ return 'danger';
} else if (this.status === 'pending') {
- return inActionClass;
+ return 'info';
}
-
- return defaultClass;
+ return 'success';
},
iconClass() {
if (
@@ -162,7 +158,7 @@ export default {
.then(data => {
const hasError = data.status === 'failed' || data.status === 'hook_validation_error';
- if (_.includes(AUTO_MERGE_STRATEGIES, data.status)) {
+ if (AUTO_MERGE_STRATEGIES.includes(data.status)) {
eventHub.$emit('MRWidgetUpdateRequested');
} else if (data.status === 'success') {
this.initiateMergePolling();
@@ -267,16 +263,16 @@ export default {
<div class="media-body">
<div class="mr-widget-body-controls media space-children">
<span class="btn-group">
- <button
+ <gl-button
+ size="sm"
+ class="qa-merge-button accept-merge-request"
+ :variant="mergeButtonVariant"
:disabled="isMergeButtonDisabled"
- :class="mergeButtonClass"
- type="button"
- class="qa-merge-button"
+ :loading="isMakingRequest"
@click="handleMergeButtonClick(isAutoMergeAvailable)"
>
- <i v-if="isMakingRequest" class="fa fa-spinner fa-spin" aria-hidden="true"></i>
{{ mergeButtonText }}
- </button>
+ </gl-button>
<button
v-if="shouldShowMergeImmediatelyDropdown"
:disabled="isMergeButtonDisabled"
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/work_in_progress.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/work_in_progress.vue
index 8132b1a944b..e52ad9156d5 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/work_in_progress.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/work_in_progress.vue
@@ -1,15 +1,17 @@
<script>
import $ from 'jquery';
-import { __ } from '~/locale';
+import { GlButton } from '@gitlab/ui';
+import { __, s__ } from '~/locale';
import createFlash from '~/flash';
-import statusIcon from '../mr_widget_status_icon.vue';
+import StatusIcon from '../mr_widget_status_icon.vue';
import tooltip from '../../../vue_shared/directives/tooltip';
import eventHub from '../../event_hub';
export default {
name: 'WorkInProgress',
components: {
- statusIcon,
+ StatusIcon,
+ GlButton,
},
directives: {
tooltip,
@@ -23,8 +25,15 @@ export default {
isMakingRequest: false,
};
},
+ computed: {
+ wipInfoTooltip() {
+ return s__(
+ 'mrWidget|When this merge request is ready, remove the WIP: prefix from the title to allow it to be merged',
+ );
+ },
+ },
methods: {
- removeWIP() {
+ handleRemoveWIP() {
this.isMakingRequest = true;
this.service
.removeWIP()
@@ -52,29 +61,22 @@ export default {
<i
v-tooltip
class="fa fa-question-circle"
- :title="
- s__(
- 'mrWidget|When this merge request is ready, remove the WIP: prefix from the title to allow it to be merged',
- )
- "
- :aria-label="
- s__(
- 'mrWidget|When this merge request is ready, remove the WIP: prefix from the title to allow it to be merged',
- )
- "
+ :title="wipInfoTooltip"
+ :aria-label="wipInfoTooltip"
>
</i>
</span>
- <button
+ <gl-button
v-if="mr.removeWIPPath"
+ size="sm"
+ variant="default"
:disabled="isMakingRequest"
- type="button"
- class="btn btn-default btn-sm js-remove-wip"
- @click="removeWIP"
+ :loading="isMakingRequest"
+ class="js-remove-wip"
+ @click="handleRemoveWIP"
>
- <i v-if="isMakingRequest" class="fa fa-spinner fa-spin" aria-hidden="true"> </i>
{{ s__('mrWidget|Resolve WIP status') }}
- </button>
+ </gl-button>
</div>
</div>
</template>
diff --git a/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue b/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue
index 27f13ace779..8b12e8ffb73 100644
--- a/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue
@@ -1,5 +1,5 @@
<script>
-import _ from 'underscore';
+import { isEmpty } from 'lodash';
import MRWidgetStore from 'ee_else_ce/vue_merge_request_widget/stores/mr_widget_store';
import MRWidgetService from 'ee_else_ce/vue_merge_request_widget/services/mr_widget_service';
import stateMaps from 'ee_else_ce/vue_merge_request_widget/stores/state_maps';
@@ -118,7 +118,7 @@ export default {
return this.mr.allowCollaboration && this.mr.isOpen;
},
shouldRenderMergedPipeline() {
- return this.mr.state === 'merged' && !_.isEmpty(this.mr.mergePipeline);
+ return this.mr.state === 'merged' && !isEmpty(this.mr.mergePipeline);
},
showMergePipelineForkWarning() {
return Boolean(
@@ -212,6 +212,8 @@ export default {
return new MRWidgetService(this.getServiceEndpoints(store));
},
checkStatus(cb, isRebased) {
+ if (document.visibilityState !== 'visible') return Promise.resolve();
+
return this.service
.checkStatus()
.then(({ data }) => {
@@ -362,6 +364,8 @@ export default {
v-if="shouldSuggestPipelines"
class="mr-widget-workflow"
:pipeline-path="mr.mergeRequestAddCiConfigPath"
+ :pipeline-svg-path="mr.pipelinesEmptySvgPath"
+ :human-access="mr.humanAccess.toLowerCase()"
/>
<mr-widget-pipeline-container
v-if="shouldRenderPipelines"
diff --git a/app/assets/javascripts/vue_merge_request_widget/services/mr_widget_service.js b/app/assets/javascripts/vue_merge_request_widget/services/mr_widget_service.js
index d22cb4ced80..c620023a6d6 100644
--- a/app/assets/javascripts/vue_merge_request_widget/services/mr_widget_service.js
+++ b/app/assets/javascripts/vue_merge_request_widget/services/mr_widget_service.js
@@ -54,7 +54,7 @@ export default class MRWidgetService {
return axios.post(this.endpoints.rebasePath);
}
- static stopEnvironment(url) {
+ static executeInlineAction(url) {
return axios.post(url);
}
diff --git a/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js b/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js
index 73a0b3cb673..321b9270dde 100644
--- a/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js
+++ b/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js
@@ -1,5 +1,4 @@
import { format } from 'timeago.js';
-import _ from 'underscore';
import getStateKey from 'ee_else_ce/vue_merge_request_widget/stores/get_state_key';
import { stateKey } from './state_maps';
import { formatDate } from '../../lib/utils/datetime_utility';
@@ -123,13 +122,15 @@ export default class MergeRequestStore {
const currentUser = data.current_user;
- this.cherryPickInForkPath = currentUser.cherry_pick_in_fork_path;
- this.revertInForkPath = currentUser.revert_in_fork_path;
+ if (currentUser) {
+ this.cherryPickInForkPath = currentUser.cherry_pick_in_fork_path;
+ this.revertInForkPath = currentUser.revert_in_fork_path;
- this.canRemoveSourceBranch = currentUser.can_remove_source_branch || false;
- this.canCreateIssue = currentUser.can_create_issue || false;
- this.canCherryPickInCurrentMR = currentUser.can_cherry_pick_on_current_merge_request || false;
- this.canRevertInCurrentMR = currentUser.can_revert_on_current_merge_request || false;
+ this.canRemoveSourceBranch = currentUser.can_remove_source_branch || false;
+ this.canCreateIssue = currentUser.can_create_issue || false;
+ this.canCherryPickInCurrentMR = currentUser.can_cherry_pick_on_current_merge_request || false;
+ this.canRevertInCurrentMR = currentUser.can_revert_on_current_merge_request || false;
+ }
this.setState(data);
}
@@ -176,7 +177,9 @@ export default class MergeRequestStore {
this.eligibleApproversDocsPath = data.eligible_approvers_docs_path;
this.mergeImmediatelyDocsPath = data.merge_immediately_docs_path;
this.mergeRequestAddCiConfigPath = data.merge_request_add_ci_config_path;
+ this.pipelinesEmptySvgPath = data.pipelines_empty_svg_path;
this.humanAccess = data.human_access;
+ this.newPipelinePath = data.new_project_pipeline_path;
}
get isNothingToMergeState() {
@@ -224,11 +227,13 @@ export default class MergeRequestStore {
}
static getPreferredAutoMergeStrategy(availableAutoMergeStrategies) {
- if (_.includes(availableAutoMergeStrategies, MTWPS_MERGE_STRATEGY)) {
+ if (availableAutoMergeStrategies === undefined) return undefined;
+
+ if (availableAutoMergeStrategies.includes(MTWPS_MERGE_STRATEGY)) {
return MTWPS_MERGE_STRATEGY;
- } else if (_.includes(availableAutoMergeStrategies, MT_MERGE_STRATEGY)) {
+ } else if (availableAutoMergeStrategies.includes(MT_MERGE_STRATEGY)) {
return MT_MERGE_STRATEGY;
- } else if (_.includes(availableAutoMergeStrategies, MWPS_MERGE_STRATEGY)) {
+ } else if (availableAutoMergeStrategies.includes(MWPS_MERGE_STRATEGY)) {
return MWPS_MERGE_STRATEGY;
}
diff --git a/app/assets/javascripts/vue_shared/components/blob_viewers/mixins.js b/app/assets/javascripts/vue_shared/components/blob_viewers/mixins.js
index 582213ee8d3..27f1a4f75d5 100644
--- a/app/assets/javascripts/vue_shared/components/blob_viewers/mixins.js
+++ b/app/assets/javascripts/vue_shared/components/blob_viewers/mixins.js
@@ -4,5 +4,9 @@ export default {
type: String,
required: true,
},
+ type: {
+ type: String,
+ required: true,
+ },
},
};
diff --git a/app/assets/javascripts/vue_shared/components/blob_viewers/rich_viewer.vue b/app/assets/javascripts/vue_shared/components/blob_viewers/rich_viewer.vue
index b3a1df8f303..afbfb1e0ee2 100644
--- a/app/assets/javascripts/vue_shared/components/blob_viewers/rich_viewer.vue
+++ b/app/assets/javascripts/vue_shared/components/blob_viewers/rich_viewer.vue
@@ -1,10 +1,14 @@
<script>
import ViewerMixin from './mixins';
+import { handleBlobRichViewer } from '~/blob/viewer';
export default {
mixins: [ViewerMixin],
+ mounted() {
+ handleBlobRichViewer(this.$refs.content, this.type);
+ },
};
</script>
<template>
- <div v-html="content"></div>
+ <div ref="content" v-html="content"></div>
</template>
diff --git a/app/assets/javascripts/vue_shared/components/changed_file_icon.vue b/app/assets/javascripts/vue_shared/components/changed_file_icon.vue
index 9ec99ac93d7..1bd320d81e8 100644
--- a/app/assets/javascripts/vue_shared/components/changed_file_icon.vue
+++ b/app/assets/javascripts/vue_shared/components/changed_file_icon.vue
@@ -1,8 +1,8 @@
<script>
import { GlTooltipDirective } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
-import { __, sprintf } from '~/locale';
import { getCommitIconMap } from '~/ide/utils';
+import { __ } from '~/locale';
export default {
components: {
@@ -49,19 +49,17 @@ export default {
return `${this.changedIcon} float-left d-block`;
},
tooltipTitle() {
- if (!this.showTooltip || !this.file.changed) return undefined;
-
- const type = this.file.tempFile ? 'addition' : 'modification';
-
- if (this.file.staged) {
- return sprintf(__('Staged %{type}'), {
- type,
- });
+ if (!this.showTooltip) {
+ return undefined;
+ } else if (this.file.deleted) {
+ return __('Deleted');
+ } else if (this.file.tempFile) {
+ return __('Added');
+ } else if (this.file.changed) {
+ return __('Modified');
}
- return sprintf(__('Unstaged %{type}'), {
- type,
- });
+ return undefined;
},
showIcon() {
return (
diff --git a/app/assets/javascripts/vue_shared/components/confirm_modal.vue b/app/assets/javascripts/vue_shared/components/confirm_modal.vue
new file mode 100644
index 00000000000..52ff906ccec
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/confirm_modal.vue
@@ -0,0 +1,68 @@
+<script>
+import { GlModal } from '@gitlab/ui';
+import csrf from '~/lib/utils/csrf';
+import { uniqueId } from 'lodash';
+
+export default {
+ components: {
+ GlModal,
+ },
+ props: {
+ selector: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ modalId: uniqueId('confirm-modal-'),
+ path: '',
+ method: '',
+ modalAttributes: {},
+ };
+ },
+ mounted() {
+ document.querySelectorAll(this.selector).forEach(button => {
+ button.addEventListener('click', e => {
+ e.preventDefault();
+
+ this.path = button.dataset.path;
+ this.method = button.dataset.method;
+ this.modalAttributes = JSON.parse(button.dataset.modalAttributes);
+ this.openModal();
+ });
+ });
+ },
+ methods: {
+ openModal() {
+ this.$refs.modal.show();
+ },
+ closeModal() {
+ this.$refs.modal.hide();
+ },
+ submitModal() {
+ this.$refs.form.submit();
+ },
+ },
+ csrf,
+};
+</script>
+
+<template>
+ <gl-modal
+ ref="modal"
+ :modal-id="modalId"
+ v-bind="modalAttributes"
+ @primary="submitModal"
+ @cancel="closeModal"
+ >
+ <form ref="form" :action="path" method="post">
+ <!-- Rails workaround for <form method="delete" />
+ https://github.com/rails/rails/blob/master/actionview/app/assets/javascripts/rails-ujs/features/method.coffee
+ -->
+ <input type="hidden" name="_method" :value="method" />
+ <input type="hidden" name="authenticity_token" :value="$options.csrf.token" />
+ <div>{{ modalAttributes.message }}</div>
+ </form>
+ </gl-modal>
+</template>
diff --git a/app/assets/javascripts/vue_shared/components/date_time_picker/date_time_picker.vue b/app/assets/javascripts/vue_shared/components/date_time_picker/date_time_picker.vue
index 9ac687f5e2c..7b09337eb15 100644
--- a/app/assets/javascripts/vue_shared/components/date_time_picker/date_time_picker.vue
+++ b/app/assets/javascripts/vue_shared/components/date_time_picker/date_time_picker.vue
@@ -43,6 +43,11 @@ export default {
required: false,
default: () => defaultTimeRanges,
},
+ customEnabled: {
+ type: Boolean,
+ required: false,
+ default: true,
+ },
},
data() {
return {
@@ -166,6 +171,7 @@ export default {
>
<div class="d-flex justify-content-between gl-p-2">
<gl-form-group
+ v-if="customEnabled"
:label="__('Custom range')"
label-for="custom-from-time"
label-class="gl-pb-1"
diff --git a/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue b/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue
index e30871b66fc..6f5a133b225 100644
--- a/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue
+++ b/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue
@@ -112,6 +112,7 @@ export default {
<div class="image">
<image-viewer
:path="imagePath"
+ :file-size="isNew ? newSize : oldSize"
:inner-css-classes="[
'frame',
{
diff --git a/app/assets/javascripts/vue_shared/components/gl_mentions.vue b/app/assets/javascripts/vue_shared/components/gl_mentions.vue
new file mode 100644
index 00000000000..bbf293664a6
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/gl_mentions.vue
@@ -0,0 +1,101 @@
+<script>
+import escape from 'lodash/escape';
+import sanitize from 'sanitize-html';
+import Tribute from 'tributejs';
+import axios from '~/lib/utils/axios_utils';
+import { spriteIcon } from '~/lib/utils/common_utils';
+
+/**
+ * Creates the HTML template for each row of the mentions dropdown.
+ *
+ * @param original An object from the array returned from the `autocomplete_sources/members` API
+ * @returns {string} An HTML template
+ */
+function createMenuItemTemplate({ original }) {
+ const rectAvatarClass = original.type === 'Group' ? 'rect-avatar' : '';
+
+ const avatarClasses = `avatar avatar-inline center s26 ${rectAvatarClass}
+ align-items-center d-inline-flex justify-content-center`;
+
+ const avatarTag = original.avatar_url
+ ? `<img
+ src="${original.avatar_url}"
+ alt="${original.username}'s avatar"
+ class="${avatarClasses}"/>`
+ : `<div class="${avatarClasses}">${original.username.charAt(0).toUpperCase()}</div>`;
+
+ const name = escape(sanitize(original.name));
+
+ const count = original.count && !original.mentionsDisabled ? ` (${original.count})` : '';
+
+ const icon = original.mentionsDisabled
+ ? spriteIcon('notifications-off', 's16 vertical-align-middle prepend-left-5')
+ : '';
+
+ return `${avatarTag}
+ ${original.username}
+ <small class="small font-weight-normal gl-color-inherit">${name}${count}</small>
+ ${icon}`;
+}
+
+/**
+ * Creates the list of users to show in the mentions dropdown.
+ *
+ * @param inputText The text entered by the user in the mentions input field
+ * @param processValues Callback function to set the list of users to show in the mentions dropdown
+ */
+function getMembers(inputText, processValues) {
+ if (this.members) {
+ processValues(this.members);
+ } else if (this.dataSources.members) {
+ axios
+ .get(this.dataSources.members)
+ .then(response => {
+ this.members = response.data;
+ processValues(response.data);
+ })
+ .catch(() => {});
+ } else {
+ processValues([]);
+ }
+}
+
+export default {
+ name: 'GlMentions',
+ props: {
+ dataSources: {
+ type: Object,
+ required: false,
+ default: () => gl.GfmAutoComplete?.dataSources || {},
+ },
+ },
+ data() {
+ return {
+ members: undefined,
+ options: {
+ trigger: '@',
+ fillAttr: 'username',
+ lookup(value) {
+ return value.name + value.username;
+ },
+ menuItemTemplate: createMenuItemTemplate.bind(this),
+ values: getMembers.bind(this),
+ },
+ };
+ },
+ mounted() {
+ const input = this.$slots.default[0].elm;
+ this.tribute = new Tribute(this.options);
+ this.tribute.attach(input);
+ },
+ beforeDestroy() {
+ const input = this.$slots.default[0].elm;
+ if (this.tribute) {
+ this.tribute.detach(input);
+ }
+ },
+ render(h) {
+ return h('div', this.$slots.default);
+ },
+};
+</script>
diff --git a/app/assets/javascripts/vue_shared/components/loading_button.vue b/app/assets/javascripts/vue_shared/components/loading_button.vue
index 216f6c62e69..c9286e22f82 100644
--- a/app/assets/javascripts/vue_shared/components/loading_button.vue
+++ b/app/assets/javascripts/vue_shared/components/loading_button.vue
@@ -1,22 +1,11 @@
<script>
import { GlLoadingIcon } from '@gitlab/ui';
/* eslint-disable vue/require-default-prop */
-/* This is a re-usable vue component for rendering a button
- that will probably be sending off ajax requests and need
- to show the loading status by setting the `loading` option.
- This can also be used for initial page load when you don't
- know the action of the button yet by setting
- `loading: true, label: undefined`.
-
- Sample configuration:
-
- <loading-button
- :loading="true"
- :label="Hello"
- @click="..."
- />
-
- */
+/*
+This component will be deprecated in favor of gl-button.
+https://gitlab-org.gitlab.io/gitlab-ui/?path=/story/base-button--loading-button
+https://gitlab.com/gitlab-org/gitlab/issues/207412
+*/
export default {
components: {
diff --git a/app/assets/javascripts/vue_shared/components/markdown/toolbar.vue b/app/assets/javascripts/vue_shared/components/markdown/toolbar.vue
index 5140184eb8e..4da99e00165 100644
--- a/app/assets/javascripts/vue_shared/components/markdown/toolbar.vue
+++ b/app/assets/javascripts/vue_shared/components/markdown/toolbar.vue
@@ -1,10 +1,11 @@
<script>
/* eslint-disable @gitlab/vue-i18n/no-bare-strings */
-import { GlLink } from '@gitlab/ui';
+import { GlLink, GlLoadingIcon } from '@gitlab/ui';
export default {
components: {
GlLink,
+ GlLoadingIcon,
},
props: {
markdownDocsPath: {
@@ -54,9 +55,7 @@ export default {
<i class="fa fa-file-image-o toolbar-button-icon" aria-hidden="true"></i>
<span class="attaching-file-message"></span>
<span class="uploading-progress">0%</span>
- <span class="uploading-spinner">
- <i class="fa fa-spinner fa-spin toolbar-button-icon" aria-hidden="true"></i>
- </span>
+ <gl-loading-icon inline class="align-text-bottom" />
</span>
<span class="uploading-error-container hide">
<span class="uploading-error-icon">
diff --git a/app/assets/javascripts/vue_shared/components/notes/system_note.vue b/app/assets/javascripts/vue_shared/components/notes/system_note.vue
index 0c4d75fb0ad..4ad382ed888 100644
--- a/app/assets/javascripts/vue_shared/components/notes/system_note.vue
+++ b/app/assets/javascripts/vue_shared/components/notes/system_note.vue
@@ -54,8 +54,8 @@ export default {
};
},
computed: {
- ...mapGetters(['targetNoteHash']),
- ...mapState(['descriptionVersion', 'isLoadingDescriptionVersion']),
+ ...mapGetters(['targetNoteHash', 'descriptionVersions']),
+ ...mapState(['isLoadingDescriptionVersion']),
noteAnchorId() {
return `note_${this.note.id}`;
},
@@ -81,6 +81,9 @@ export default {
.children().length > MAX_VISIBLE_COMMIT_LIST_COUNT
);
},
+ descriptionVersion() {
+ return this.descriptionVersions[this.note.description_version_id];
+ },
},
mounted() {
initMRPopovers(this.$el.querySelectorAll('.gfm-merge_request'));
diff --git a/app/assets/javascripts/vue_shared/components/pagination/constants.js b/app/assets/javascripts/vue_shared/components/pagination/constants.js
index 229132c0e33..748ad178c70 100644
--- a/app/assets/javascripts/vue_shared/components/pagination/constants.js
+++ b/app/assets/javascripts/vue_shared/components/pagination/constants.js
@@ -3,8 +3,8 @@ import { s__ } from '~/locale';
export const PAGINATION_UI_BUTTON_LIMIT = 4;
export const UI_LIMIT = 6;
export const SPREAD = '...';
-export const PREV = s__('Pagination|‹ Prev');
-export const NEXT = s__('Pagination|Next ›');
+export const PREV = s__('Pagination|Prev');
+export const NEXT = s__('Pagination|Next');
export const FIRST = s__('Pagination|« First');
export const LAST = s__('Pagination|Last »');
export const LABEL_FIRST_PAGE = s__('Pagination|Go to first page');
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_title.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_title.vue
index 574b63cf8a6..69fb2bb4524 100644
--- a/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_title.vue
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_title.vue
@@ -1,5 +1,10 @@
<script>
+import { GlLoadingIcon } from '@gitlab/ui';
+
export default {
+ components: {
+ GlLoadingIcon,
+ },
props: {
canEdit: {
type: Boolean,
@@ -13,7 +18,7 @@ export default {
<div class="title hide-collapsed append-bottom-10">
{{ __('Labels') }}
<template v-if="canEdit">
- <i aria-hidden="true" class="fa fa-spinner fa-spin block-loading" data-hidden="true"> </i>
+ <gl-loading-icon inline class="align-text-top block-loading" />
<button
type="button"
class="edit-link btn btn-blank float-right js-sidebar-dropdown-toggle"
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value.vue
index 4abf7c478ee..fe43f77b1ee 100644
--- a/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value.vue
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value.vue
@@ -1,12 +1,10 @@
<script>
-import DropdownValueScopedLabel from './dropdown_value_scoped_label.vue';
-import DropdownValueRegularLabel from './dropdown_value_regular_label.vue';
+import { GlLabel } from '@gitlab/ui';
import { isScopedLabel } from '~/lib/utils/common_utils';
export default {
components: {
- DropdownValueScopedLabel,
- DropdownValueRegularLabel,
+ GlLabel,
},
props: {
labels: {
@@ -37,12 +35,6 @@ export default {
labelFilterUrl(label) {
return `${this.labelFilterBasePath}?label_name[]=${encodeURIComponent(label.title)}`;
},
- labelStyle(label) {
- return {
- color: label.textColor,
- backgroundColor: label.color,
- };
- },
scopedLabelsDescription({ description = '' }) {
return `<span class="font-weight-bold scoped-label-tooltip-title">Scoped label</span><br />${description}`;
},
@@ -65,22 +57,15 @@ export default {
</span>
<template v-for="label in labels" v-else>
- <dropdown-value-scoped-label
- v-if="showScopedLabels(label)"
+ <gl-label
:key="label.id"
- :label="label"
- :label-filter-url="labelFilterUrl(label)"
- :label-style="labelStyle(label)"
+ :target="labelFilterUrl(label)"
+ :background-color="label.color"
+ :title="label.title"
+ :description="label.description"
+ :scoped="showScopedLabels(label)"
:scoped-labels-documentation-link="scopedLabelsDocumentationLink"
/>
-
- <dropdown-value-regular-label
- v-else
- :key="label.id"
- :label="label"
- :label-filter-url="labelFilterUrl(label)"
- :label-style="labelStyle(label)"
- />
</template>
</div>
</template>
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_regular_label.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_regular_label.vue
deleted file mode 100644
index f519f90445e..00000000000
--- a/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_regular_label.vue
+++ /dev/null
@@ -1,34 +0,0 @@
-<script>
-import { GlTooltip } from '@gitlab/ui';
-
-export default {
- components: {
- GlTooltip,
- },
- props: {
- label: {
- type: Object,
- required: true,
- },
- labelStyle: {
- type: Object,
- required: true,
- },
- labelFilterUrl: {
- type: String,
- required: true,
- },
- },
-};
-</script>
-
-<template>
- <a ref="regularLabelRef" :href="labelFilterUrl">
- <span :style="labelStyle" class="badge color-label">
- {{ label.title }}
- </span>
- <gl-tooltip :target="() => $refs.regularLabelRef" placement="top" boundary="viewport">
- {{ label.description }}
- </gl-tooltip>
- </a>
-</template>
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_scoped_label.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_scoped_label.vue
deleted file mode 100644
index ad5a86de166..00000000000
--- a/app/assets/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_scoped_label.vue
+++ /dev/null
@@ -1,47 +0,0 @@
-<script>
-import { GlLink, GlTooltip } from '@gitlab/ui';
-
-export default {
- components: {
- GlTooltip,
- GlLink,
- },
- props: {
- label: {
- type: Object,
- required: true,
- },
- labelStyle: {
- type: Object,
- required: true,
- },
- scopedLabelsDocumentationLink: {
- type: String,
- required: true,
- },
- labelFilterUrl: {
- type: String,
- required: true,
- },
- },
-};
-</script>
-
-<template>
- <span class="d-inline-block position-relative scoped-label-wrapper">
- <a :href="labelFilterUrl">
- <span :ref="`labelTitleRef`" :style="labelStyle" class="badge color-label label">
- {{ label.title }}
- </span>
- <gl-tooltip :target="() => $refs.labelTitleRef" placement="top" boundary="viewport">
- <span class="font-weight-bold scoped-label-tooltip-title">{{ __('Scoped label') }}</span
- ><br />
- {{ label.description }}
- </gl-tooltip>
- </a>
-
- <gl-link :href="scopedLabelsDocumentationLink" target="_blank" class="label scoped-label"
- ><i class="fa fa-question-circle" :style="labelStyle"></i
- ></gl-link>
- </span>
-</template>
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_button.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_button.vue
new file mode 100644
index 00000000000..b9c611d2764
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_button.vue
@@ -0,0 +1,21 @@
+<script>
+import { mapGetters } from 'vuex';
+import { GlButton, GlIcon } from '@gitlab/ui';
+
+export default {
+ components: {
+ GlButton,
+ GlIcon,
+ },
+ computed: {
+ ...mapGetters(['dropdownButtonText']),
+ },
+};
+</script>
+
+<template>
+ <gl-button class="labels-select-dropdown-button w-100 text-left">
+ <span class="dropdown-toggle-text">{{ dropdownButtonText }}</span>
+ <gl-icon name="chevron-down" class="pull-right" />
+ </gl-button>
+</template>
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents.vue
new file mode 100644
index 00000000000..ef8218b5135
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents.vue
@@ -0,0 +1,30 @@
+<script>
+import { mapState } from 'vuex';
+
+import DropdownContentsLabelsView from './dropdown_contents_labels_view.vue';
+import DropdownContentsCreateView from './dropdown_contents_create_view.vue';
+
+export default {
+ components: {
+ DropdownContentsLabelsView,
+ DropdownContentsCreateView,
+ },
+ computed: {
+ ...mapState(['showDropdownContentsCreateView']),
+ dropdownContentsView() {
+ if (this.showDropdownContentsCreateView) {
+ return 'dropdown-contents-create-view';
+ }
+ return 'dropdown-contents-labels-view';
+ },
+ },
+};
+</script>
+
+<template>
+ <div
+ class="labels-select-dropdown-contents w-100 mt-1 mb-3 py-2 rounded-top rounded-bottom position-absolute"
+ >
+ <component :is="dropdownContentsView" />
+ </div>
+</template>
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view.vue
new file mode 100644
index 00000000000..285a0fe9ffb
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view.vue
@@ -0,0 +1,124 @@
+<script>
+import { mapState, mapActions } from 'vuex';
+import {
+ GlTooltipDirective,
+ GlButton,
+ GlIcon,
+ GlFormInput,
+ GlLink,
+ GlLoadingIcon,
+} from '@gitlab/ui';
+
+export default {
+ components: {
+ GlButton,
+ GlIcon,
+ GlFormInput,
+ GlLink,
+ GlLoadingIcon,
+ },
+ directives: {
+ GlTooltip: GlTooltipDirective,
+ },
+ data() {
+ return {
+ labelTitle: '',
+ selectedColor: '',
+ };
+ },
+ computed: {
+ ...mapState(['labelsCreateTitle', 'labelCreateInProgress']),
+ disableCreate() {
+ return !this.labelTitle.length || !this.selectedColor.length || this.labelCreateInProgress;
+ },
+ suggestedColors() {
+ const colorsMap = gon.suggested_label_colors;
+ return Object.keys(colorsMap).map(color => ({ [color]: colorsMap[color] }));
+ },
+ },
+ methods: {
+ ...mapActions(['toggleDropdownContents', 'toggleDropdownContentsCreateView', 'createLabel']),
+ getColorCode(color) {
+ return Object.keys(color).pop();
+ },
+ getColorName(color) {
+ return Object.values(color).pop();
+ },
+ handleColorClick(color) {
+ this.selectedColor = this.getColorCode(color);
+ },
+ handleCreateClick() {
+ this.createLabel({
+ title: this.labelTitle,
+ color: this.selectedColor,
+ });
+ },
+ },
+};
+</script>
+
+<template>
+ <div class="labels-select-contents-create">
+ <div class="dropdown-title d-flex align-items-center pt-0 pb-2">
+ <gl-button
+ :aria-label="__('Go back')"
+ variant="link"
+ size="sm"
+ class="dropdown-header-button p-0"
+ @click="toggleDropdownContentsCreateView"
+ >
+ <gl-icon name="arrow-left" />
+ </gl-button>
+ <span class="flex-grow-1">{{ labelsCreateTitle }}</span>
+ <gl-button
+ :aria-label="__('Close')"
+ variant="link"
+ size="sm"
+ class="dropdown-header-button p-0"
+ @click="toggleDropdownContents"
+ >
+ <gl-icon name="close" />
+ </gl-button>
+ </div>
+ <div class="dropdown-input">
+ <gl-form-input
+ v-model.trim="labelTitle"
+ :placeholder="__('Name new label')"
+ :autofocus="true"
+ />
+ </div>
+ <div class="dropdown-content px-2">
+ <div class="suggest-colors suggest-colors-dropdown mt-0 mb-2">
+ <gl-link
+ v-for="(color, index) in suggestedColors"
+ :key="index"
+ v-gl-tooltip:tooltipcontainer
+ :style="{ backgroundColor: getColorCode(color) }"
+ :title="getColorName(color)"
+ @click.prevent="handleColorClick(color)"
+ />
+ </div>
+ <div class="color-input-container d-flex">
+ <span
+ class="dropdown-label-color-preview position-relative position-relative d-inline-block"
+ :style="{ backgroundColor: selectedColor }"
+ ></span>
+ <gl-form-input v-model.trim="selectedColor" :placeholder="__('Use custom color #FF0000')" />
+ </div>
+ </div>
+ <div class="dropdown-actions clearfix pt-2 px-2">
+ <gl-button
+ :disabled="disableCreate"
+ variant="primary"
+ class="pull-left d-flex align-items-center"
+ @click="handleCreateClick"
+ >
+ <gl-loading-icon v-show="labelCreateInProgress" :inline="true" class="mr-1" />
+ {{ __('Create') }}
+ </gl-button>
+ <gl-button class="pull-right" @click="toggleDropdownContentsCreateView">
+ {{ __('Cancel') }}
+ </gl-button>
+ </div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view.vue
new file mode 100644
index 00000000000..7ec420fa908
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view.vue
@@ -0,0 +1,178 @@
+<script>
+import { mapState, mapGetters, mapActions } from 'vuex';
+import { GlLoadingIcon, GlButton, GlIcon, GlSearchBoxByType, GlLink } from '@gitlab/ui';
+
+import { UP_KEY_CODE, DOWN_KEY_CODE, ENTER_KEY_CODE, ESC_KEY_CODE } from '~/lib/utils/keycodes';
+
+export default {
+ components: {
+ GlLoadingIcon,
+ GlButton,
+ GlIcon,
+ GlSearchBoxByType,
+ GlLink,
+ },
+ data() {
+ return {
+ searchKey: '',
+ currentHighlightItem: -1,
+ };
+ },
+ computed: {
+ ...mapState([
+ 'labelsManagePath',
+ 'labels',
+ 'labelsFetchInProgress',
+ 'labelsListTitle',
+ 'footerCreateLabelTitle',
+ 'footerManageLabelTitle',
+ ]),
+ ...mapGetters(['selectedLabelsList']),
+ visibleLabels() {
+ if (this.searchKey) {
+ return this.labels.filter(label =>
+ label.title.toLowerCase().includes(this.searchKey.toLowerCase()),
+ );
+ }
+ return this.labels;
+ },
+ },
+ watch: {
+ searchKey(value) {
+ // When there is search string present
+ // and there are matching results,
+ // highlight first item by default.
+ if (value && this.visibleLabels.length) {
+ this.currentHighlightItem = 0;
+ }
+ },
+ },
+ mounted() {
+ this.fetchLabels();
+ },
+ methods: {
+ ...mapActions([
+ 'toggleDropdownContents',
+ 'toggleDropdownContentsCreateView',
+ 'fetchLabels',
+ 'updateSelectedLabels',
+ ]),
+ getDropdownLabelBoxStyle(label) {
+ return {
+ backgroundColor: label.color,
+ };
+ },
+ isLabelSelected(label) {
+ return this.selectedLabelsList.includes(label.id);
+ },
+ /**
+ * This method scrolls item from dropdown into
+ * the view if it is off the viewable area of the
+ * container.
+ */
+ scrollIntoViewIfNeeded() {
+ const highlightedLabel = this.$refs.labelsListContainer.querySelector('.is-focused');
+
+ if (highlightedLabel) {
+ const rect = highlightedLabel.getBoundingClientRect();
+ if (rect.bottom > this.$refs.labelsListContainer.clientHeight) {
+ highlightedLabel.scrollIntoView(false);
+ }
+ if (rect.top < 0) {
+ highlightedLabel.scrollIntoView();
+ }
+ }
+ },
+ /**
+ * This method enables keyboard navigation support for
+ * the dropdown.
+ */
+ handleKeyDown(e) {
+ if (e.keyCode === UP_KEY_CODE && this.currentHighlightItem > 0) {
+ this.currentHighlightItem -= 1;
+ } else if (
+ e.keyCode === DOWN_KEY_CODE &&
+ this.currentHighlightItem < this.visibleLabels.length - 1
+ ) {
+ this.currentHighlightItem += 1;
+ } else if (e.keyCode === ENTER_KEY_CODE && this.currentHighlightItem > -1) {
+ this.updateSelectedLabels([this.visibleLabels[this.currentHighlightItem]]);
+ } else if (e.keyCode === ESC_KEY_CODE) {
+ this.toggleDropdownContents();
+ }
+
+ if (e.keyCode !== ESC_KEY_CODE) {
+ // Scroll the list only after highlighting
+ // styles are rendered completely.
+ this.$nextTick(() => {
+ this.scrollIntoViewIfNeeded();
+ });
+ }
+ },
+ handleLabelClick(label) {
+ this.updateSelectedLabels([label]);
+ },
+ },
+};
+</script>
+
+<template>
+ <div class="labels-select-contents-list" @keydown="handleKeyDown">
+ <gl-loading-icon
+ v-if="labelsFetchInProgress"
+ class="labels-fetch-loading position-absolute d-flex align-items-center w-100 h-100"
+ size="md"
+ />
+ <div class="dropdown-title d-flex align-items-center pt-0 pb-2">
+ <span class="flex-grow-1">{{ labelsListTitle }}</span>
+ <gl-button
+ :aria-label="__('Close')"
+ variant="link"
+ size="sm"
+ class="dropdown-header-button p-0"
+ @click="toggleDropdownContents"
+ >
+ <gl-icon name="close" />
+ </gl-button>
+ </div>
+ <div class="dropdown-input">
+ <gl-search-box-by-type v-model="searchKey" :autofocus="true" />
+ </div>
+ <div v-if="!labelsFetchInProgress" ref="labelsListContainer" class="dropdown-content">
+ <ul class="list-unstyled mb-0">
+ <li v-for="(label, index) in visibleLabels" :key="label.id" class="d-block text-left">
+ <gl-link
+ class="d-flex align-items-baseline text-break-word label-item"
+ :class="{ 'is-focused': index === currentHighlightItem }"
+ @click="handleLabelClick(label)"
+ >
+ <gl-icon v-show="label.set" name="mobile-issue-close" class="mr-2 align-self-center" />
+ <span v-show="!label.set" class="mr-3 pr-2"></span>
+ <span class="dropdown-label-box" :style="getDropdownLabelBoxStyle(label)"></span>
+ <span>{{ label.title }}</span>
+ </gl-link>
+ </li>
+ <li v-if="!visibleLabels.length" class="p-2 text-center">
+ {{ __('No matching results') }}
+ </li>
+ </ul>
+ </div>
+ <div class="dropdown-footer">
+ <ul class="list-unstyled">
+ <li>
+ <gl-button
+ variant="link"
+ class="d-flex w-100 flex-row text-break-word label-item"
+ @click="toggleDropdownContentsCreateView"
+ >{{ footerCreateLabelTitle }}</gl-button
+ >
+ </li>
+ <li>
+ <gl-link :href="labelsManagePath" class="d-flex flex-row text-break-word label-item">
+ {{ footerManageLabelTitle }}
+ </gl-link>
+ </li>
+ </ul>
+ </div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_title.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_title.vue
new file mode 100644
index 00000000000..57f7962dfe1
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_title.vue
@@ -0,0 +1,39 @@
+<script>
+import { mapState, mapActions } from 'vuex';
+import { GlButton, GlLoadingIcon } from '@gitlab/ui';
+
+export default {
+ components: {
+ GlButton,
+ GlLoadingIcon,
+ },
+ props: {
+ labelsSelectInProgress: {
+ type: Boolean,
+ required: true,
+ },
+ },
+ computed: {
+ ...mapState(['allowLabelEdit', 'labelsFetchInProgress']),
+ },
+ methods: {
+ ...mapActions(['toggleDropdownContents']),
+ },
+};
+</script>
+
+<template>
+ <div class="title hide-collapsed append-bottom-10">
+ {{ __('Labels') }}
+ <template v-if="allowLabelEdit">
+ <gl-loading-icon v-show="labelsSelectInProgress" inline />
+ <gl-button
+ variant="link"
+ class="pull-right js-sidebar-dropdown-toggle"
+ data-qa-selector="labels_edit_button"
+ @click="toggleDropdownContents"
+ >{{ __('Edit') }}</gl-button
+ >
+ </template>
+ </div>
+</template>
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_value.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_value.vue
new file mode 100644
index 00000000000..695af775750
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/dropdown_value.vue
@@ -0,0 +1,53 @@
+<script>
+import { mapState } from 'vuex';
+import { GlLabel } from '@gitlab/ui';
+
+import { isScopedLabel } from '~/lib/utils/common_utils';
+
+export default {
+ components: {
+ GlLabel,
+ },
+ computed: {
+ ...mapState([
+ 'selectedLabels',
+ 'allowScopedLabels',
+ 'labelsFilterBasePath',
+ 'scopedLabelsDocumentationPath',
+ ]),
+ },
+ methods: {
+ labelFilterUrl(label) {
+ return `${this.labelsFilterBasePath}?label_name[]=${encodeURIComponent(label.title)}`;
+ },
+ scopedLabel(label) {
+ return this.allowScopedLabels && isScopedLabel(label);
+ },
+ },
+};
+</script>
+
+<template>
+ <div
+ :class="{
+ 'has-labels': selectedLabels.length,
+ }"
+ class="hide-collapsed value issuable-show-labels js-value"
+ >
+ <span v-if="!selectedLabels.length" class="text-secondary">
+ <slot></slot>
+ </span>
+ <template v-for="label in selectedLabels" v-else>
+ <gl-label
+ :key="label.id"
+ :title="label.title"
+ :description="label.description"
+ :background-color="label.color"
+ :target="labelFilterUrl(label)"
+ :scoped="scopedLabel(label)"
+ :scoped-labels-documentation-link="scopedLabelsDocumentationPath"
+ tooltip-placement="top"
+ />
+ </template>
+ </div>
+</template>
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/labels_select_root.vue b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/labels_select_root.vue
new file mode 100644
index 00000000000..5e41a155ef6
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/labels_select_root.vue
@@ -0,0 +1,195 @@
+<script>
+import Vue from 'vue';
+import Vuex, { mapState, mapActions } from 'vuex';
+import { __ } from '~/locale';
+
+import DropdownValueCollapsed from '~/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed.vue';
+
+import labelsSelectModule from './store';
+
+import DropdownTitle from './dropdown_title.vue';
+import DropdownValue from './dropdown_value.vue';
+import DropdownButton from './dropdown_button.vue';
+import DropdownContents from './dropdown_contents.vue';
+
+Vue.use(Vuex);
+
+export default {
+ store: new Vuex.Store(labelsSelectModule()),
+ components: {
+ DropdownTitle,
+ DropdownValue,
+ DropdownButton,
+ DropdownContents,
+ DropdownValueCollapsed,
+ },
+ props: {
+ allowLabelEdit: {
+ type: Boolean,
+ required: true,
+ },
+ allowLabelCreate: {
+ type: Boolean,
+ required: true,
+ },
+ allowScopedLabels: {
+ type: Boolean,
+ required: true,
+ },
+ dropdownOnly: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
+ selectedLabels: {
+ type: Array,
+ required: false,
+ default: () => [],
+ },
+ labelsSelectInProgress: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
+ labelsFetchPath: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ labelsManagePath: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ labelsFilterBasePath: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ scopedLabelsDocumentationPath: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ labelsListTitle: {
+ type: String,
+ required: false,
+ default: __('Assign labels'),
+ },
+ labelsCreateTitle: {
+ type: String,
+ required: false,
+ default: __('Create group label'),
+ },
+ footerCreateLabelTitle: {
+ type: String,
+ required: false,
+ default: __('Create group label'),
+ },
+ footerManageLabelTitle: {
+ type: String,
+ required: false,
+ default: __('Manage group labels'),
+ },
+ },
+ computed: {
+ ...mapState(['showDropdownButton', 'showDropdownContents']),
+ },
+ watch: {
+ selectedLabels(selectedLabels) {
+ this.setInitialState({
+ selectedLabels,
+ });
+ },
+ },
+ mounted() {
+ this.setInitialState({
+ dropdownOnly: this.dropdownOnly,
+ allowLabelEdit: this.allowLabelEdit,
+ allowLabelCreate: this.allowLabelCreate,
+ allowScopedLabels: this.allowScopedLabels,
+ selectedLabels: this.selectedLabels,
+ labelsFetchPath: this.labelsFetchPath,
+ labelsManagePath: this.labelsManagePath,
+ labelsFilterBasePath: this.labelsFilterBasePath,
+ scopedLabelsDocumentationPath: this.scopedLabelsDocumentationPath,
+ labelsListTitle: this.labelsListTitle,
+ labelsCreateTitle: this.labelsCreateTitle,
+ footerCreateLabelTitle: this.footerCreateLabelTitle,
+ footerManageLabelTitle: this.footerManageLabelTitle,
+ });
+
+ this.$store.subscribeAction({
+ after: this.handleVuexActionDispatch,
+ });
+
+ document.addEventListener('click', this.handleDocumentClick);
+ },
+ beforeDestroy() {
+ document.removeEventListener('click', this.handleDocumentClick);
+ },
+ methods: {
+ ...mapActions(['setInitialState', 'toggleDropdownContents']),
+ /**
+ * This method differentiates between
+ * dispatched actions and calls necessary method.
+ */
+ handleVuexActionDispatch(action, state) {
+ if (
+ action.type === 'toggleDropdownContents' &&
+ !state.showDropdownButton &&
+ !state.showDropdownContents
+ ) {
+ this.handleDropdownClose(state.labels.filter(label => label.touched));
+ }
+ },
+ /**
+ * This method listens for document-wide click event
+ * and toggle dropdown if user clicks anywhere outside
+ * the dropdown while dropdown is visible.
+ */
+ handleDocumentClick({ target }) {
+ if (
+ this.showDropdownButton &&
+ this.showDropdownContents &&
+ !target?.classList.contains('js-sidebar-dropdown-toggle') &&
+ !this.$refs.dropdownButtonCollapsed?.$el.contains(target) &&
+ !this.$refs.dropdownContents?.$el.contains(target)
+ ) {
+ this.toggleDropdownContents();
+ }
+ },
+ handleDropdownClose(labels) {
+ // Only emit label updates if there are any labels to update
+ // on UI.
+ if (labels.length) this.$emit('updateSelectedLabels', labels);
+ this.$emit('onDropdownClose');
+ },
+ handleCollapsedValueClick() {
+ this.$emit('toggleCollapse');
+ },
+ },
+};
+</script>
+
+<template>
+ <div class="labels-select-wrapper position-relative">
+ <div v-if="!dropdownOnly">
+ <dropdown-value-collapsed
+ v-if="allowLabelCreate"
+ ref="dropdownButtonCollapsed"
+ :labels="selectedLabels"
+ @onValueClick="handleCollapsedValueClick"
+ />
+ <dropdown-title
+ :allow-label-edit="allowLabelEdit"
+ :labels-select-in-progress="labelsSelectInProgress"
+ />
+ <dropdown-value v-show="!showDropdownButton">
+ <slot></slot>
+ </dropdown-value>
+ <dropdown-button v-show="showDropdownButton" />
+ <dropdown-contents v-if="showDropdownButton && showDropdownContents" ref="dropdownContents" />
+ </div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/actions.js b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/actions.js
new file mode 100644
index 00000000000..145ec7dc566
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/actions.js
@@ -0,0 +1,61 @@
+import flash from '~/flash';
+import { __ } from '~/locale';
+import axios from '~/lib/utils/axios_utils';
+import * as types from './mutation_types';
+
+export const setInitialState = ({ commit }, props) => commit(types.SET_INITIAL_STATE, props);
+
+export const toggleDropdownButton = ({ commit }) => commit(types.TOGGLE_DROPDOWN_BUTTON);
+export const toggleDropdownContents = ({ commit }) => commit(types.TOGGLE_DROPDOWN_CONTENTS);
+
+export const toggleDropdownContentsCreateView = ({ commit }) =>
+ commit(types.TOGGLE_DROPDOWN_CONTENTS_CREATE_VIEW);
+
+export const requestLabels = ({ commit }) => commit(types.REQUEST_LABELS);
+export const receiveLabelsSuccess = ({ commit }, labels) =>
+ commit(types.RECEIVE_SET_LABELS_SUCCESS, labels);
+export const receiveLabelsFailure = ({ commit }) => {
+ commit(types.RECEIVE_SET_LABELS_FAILURE);
+ flash(__('Error fetching labels.'));
+};
+export const fetchLabels = ({ state, dispatch }) => {
+ dispatch('requestLabels');
+ axios
+ .get(state.labelsFetchPath)
+ .then(({ data }) => {
+ dispatch('receiveLabelsSuccess', data);
+ })
+ .catch(() => dispatch('receiveLabelsFailure'));
+};
+
+export const requestCreateLabel = ({ commit }) => commit(types.REQUEST_CREATE_LABEL);
+export const receiveCreateLabelSuccess = ({ commit }) => commit(types.RECEIVE_CREATE_LABEL_SUCCESS);
+export const receiveCreateLabelFailure = ({ commit }) => {
+ commit(types.RECEIVE_CREATE_LABEL_FAILURE);
+ flash(__('Error creating label.'));
+};
+export const createLabel = ({ state, dispatch }, label) => {
+ dispatch('requestCreateLabel');
+ axios
+ .post(state.labelsManagePath, {
+ label,
+ })
+ .then(({ data }) => {
+ if (data.id) {
+ dispatch('receiveCreateLabelSuccess');
+ dispatch('toggleDropdownContentsCreateView');
+ } else {
+ // eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
+ throw new Error('Error Creating Label');
+ }
+ })
+ .catch(() => {
+ dispatch('receiveCreateLabelFailure');
+ });
+};
+
+export const updateSelectedLabels = ({ commit }, labels) =>
+ commit(types.UPDATE_SELECTED_LABELS, { labels });
+
+// prevent babel-plugin-rewire from generating an invalid default during karma tests
+export default () => {};
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/getters.js b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/getters.js
new file mode 100644
index 00000000000..c08a8a8ea58
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/getters.js
@@ -0,0 +1,30 @@
+import { __, s__, sprintf } from '~/locale';
+
+/**
+ * Returns string representing current labels
+ * selection on dropdown button.
+ *
+ * @param {object} state
+ */
+export const dropdownButtonText = state => {
+ const selectedLabels = state.labels.filter(label => label.set);
+ if (!selectedLabels.length) {
+ return __('Label');
+ } else if (selectedLabels.length > 1) {
+ return sprintf(s__('LabelSelect|%{firstLabelName} +%{remainingLabelCount} more'), {
+ firstLabelName: selectedLabels[0].title,
+ remainingLabelCount: selectedLabels.length - 1,
+ });
+ }
+ return selectedLabels[0].title;
+};
+
+/**
+ * Returns array containing only label IDs from
+ * selectedLabels array.
+ * @param {object} state
+ */
+export const selectedLabelsList = state => state.selectedLabels.map(label => label.id);
+
+// prevent babel-plugin-rewire from generating an invalid default during karma tests
+export default () => {};
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/index.js b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/index.js
new file mode 100644
index 00000000000..5f61cb732c8
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/index.js
@@ -0,0 +1,12 @@
+import * as actions from './actions';
+import * as getters from './getters';
+import mutations from './mutations';
+import state from './state';
+
+export default () => ({
+ namespaced: true,
+ state: state(),
+ actions,
+ getters,
+ mutations,
+});
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/mutation_types.js b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/mutation_types.js
new file mode 100644
index 00000000000..2e044dc3b3c
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/mutation_types.js
@@ -0,0 +1,20 @@
+export const SET_INITIAL_STATE = 'SET_INITIAL_STATE';
+
+export const REQUEST_LABELS = 'REQUEST_LABELS';
+export const RECEIVE_LABELS_SUCCESS = 'RECEIVE_LABELS_SUCCESS';
+export const RECEIVE_LABELS_FAILURE = 'RECEIVE_LABELS_FAILURE';
+
+export const REQUEST_SET_LABELS = 'REQUEST_SET_LABELS';
+export const RECEIVE_SET_LABELS_SUCCESS = 'RECEIVE_SET_LABELS_SUCCESS';
+export const RECEIVE_SET_LABELS_FAILURE = 'RECEIVE_SET_LABELS_FAILURE';
+
+export const REQUEST_CREATE_LABEL = 'REQUEST_CREATE_LABEL';
+export const RECEIVE_CREATE_LABEL_SUCCESS = 'RECEIVE_CREATE_LABEL_SUCCESS';
+export const RECEIVE_CREATE_LABEL_FAILURE = 'RECEIVE_CREATE_LABEL_FAILURE';
+
+export const TOGGLE_DROPDOWN_BUTTON = 'TOGGLE_DROPDOWN_VISIBILITY';
+export const TOGGLE_DROPDOWN_CONTENTS = 'TOGGLE_DROPDOWN_CONTENTS';
+
+export const UPDATE_SELECTED_LABELS = 'UPDATE_SELECTED_LABELS';
+
+export const TOGGLE_DROPDOWN_CONTENTS_CREATE_VIEW = 'TOGGLE_DROPDOWN_CONTENTS_CREATE_VIEW';
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/mutations.js b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/mutations.js
new file mode 100644
index 00000000000..32a78507e88
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/mutations.js
@@ -0,0 +1,76 @@
+import * as types from './mutation_types';
+
+export default {
+ [types.SET_INITIAL_STATE](state, props) {
+ Object.assign(state, { ...props });
+ },
+
+ [types.TOGGLE_DROPDOWN_BUTTON](state) {
+ state.showDropdownButton = !state.showDropdownButton;
+ },
+
+ [types.TOGGLE_DROPDOWN_CONTENTS](state) {
+ if (!state.dropdownOnly) {
+ state.showDropdownButton = !state.showDropdownButton;
+ }
+ state.showDropdownContents = !state.showDropdownContents;
+ // Ensure that Create View is hidden by default
+ // when dropdown contents are revealed.
+ if (state.showDropdownContents) {
+ state.showDropdownContentsCreateView = false;
+ }
+ },
+
+ [types.TOGGLE_DROPDOWN_CONTENTS_CREATE_VIEW](state) {
+ state.showDropdownContentsCreateView = !state.showDropdownContentsCreateView;
+ },
+
+ [types.REQUEST_LABELS](state) {
+ state.labelsFetchInProgress = true;
+ },
+ [types.RECEIVE_SET_LABELS_SUCCESS](state, labels) {
+ // Iterate over every label and add a `set` prop
+ // to determine whether it is already a part of
+ // selectedLabels array.
+ const selectedLabelIds = state.selectedLabels.map(label => label.id);
+ state.labelsFetchInProgress = false;
+ state.labels = labels.reduce((allLabels, label) => {
+ allLabels.push({
+ ...label,
+ set: selectedLabelIds.includes(label.id),
+ });
+ return allLabels;
+ }, []);
+ },
+ [types.RECEIVE_SET_LABELS_FAILURE](state) {
+ state.labelsFetchInProgress = false;
+ },
+
+ [types.REQUEST_CREATE_LABEL](state) {
+ state.labelCreateInProgress = true;
+ },
+ [types.RECEIVE_CREATE_LABEL_SUCCESS](state) {
+ state.labelCreateInProgress = false;
+ },
+ [types.RECEIVE_CREATE_LABEL_FAILURE](state) {
+ state.labelCreateInProgress = false;
+ },
+
+ [types.UPDATE_SELECTED_LABELS](state, { labels }) {
+ // Iterate over all the labels and update
+ // `set` prop value to represent their current state.
+ const labelIds = labels.map(label => label.id);
+ state.labels = state.labels.reduce((allLabels, label) => {
+ if (labelIds.includes(label.id)) {
+ allLabels.push({
+ ...label,
+ touched: true,
+ set: !label.set,
+ });
+ } else {
+ allLabels.push(label);
+ }
+ return allLabels;
+ }, []);
+ },
+};
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/state.js b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/state.js
new file mode 100644
index 00000000000..ceabc696693
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/sidebar/labels_select_vue/store/state.js
@@ -0,0 +1,27 @@
+export default () => ({
+ // Initial Data
+ labels: [],
+ selectedLabels: [],
+ labelsListTitle: '',
+ labelsCreateTitle: '',
+ footerCreateLabelTitle: '',
+ footerManageLabelTitle: '',
+
+ // Paths
+ namespace: '',
+ labelsFetchPath: '',
+ labelsFilterBasePath: '',
+ scopedLabelsDocumentationPath: '#',
+
+ // UI Flags
+ allowLabelCreate: false,
+ allowLabelEdit: false,
+ allowScopedLabels: false,
+ dropdownOnly: false,
+ showDropdownButton: false,
+ showDropdownContents: false,
+ showDropdownContentsCreateView: false,
+ labelsFetchInProgress: false,
+ labelCreateInProgress: false,
+ selectedLabelsUpdated: false,
+});
diff --git a/app/assets/javascripts/vue_shared/components/user_popover/user_popover.vue b/app/assets/javascripts/vue_shared/components/user_popover/user_popover.vue
index ca25d9ee738..602d4ab89e1 100644
--- a/app/assets/javascripts/vue_shared/components/user_popover/user_popover.vue
+++ b/app/assets/javascripts/vue_shared/components/user_popover/user_popover.vue
@@ -1,8 +1,10 @@
<script>
-import { GlPopover, GlSkeletonLoading } from '@gitlab/ui';
+import { GlPopover, GlSkeletonLoading, GlSprintf } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
import UserAvatarImage from '../user_avatar/user_avatar_image.vue';
import { glEmojiTag } from '../../../emoji';
+import { s__ } from '~/locale';
+import { isString } from 'lodash';
export default {
name: 'UserPopover',
@@ -10,6 +12,7 @@ export default {
Icon,
GlPopover,
GlSkeletonLoading,
+ GlSprintf,
UserAvatarImage,
},
props: {
@@ -45,8 +48,27 @@ export default {
nameIsLoading() {
return !this.user.name;
},
- jobInfoIsLoading() {
- return !this.user.loaded && this.user.organization === null;
+ workInformationIsLoading() {
+ return !this.user.loaded && this.workInformation === null;
+ },
+ workInformation() {
+ const { jobTitle, organization } = this.user;
+
+ if (organization && jobTitle) {
+ return {
+ message: s__('Profile|%{job_title} at %{organization}'),
+ placeholders: { job_title: jobTitle, organization },
+ };
+ } else if (organization) {
+ return organization;
+ } else if (jobTitle) {
+ return jobTitle;
+ }
+
+ return null;
+ },
+ workInformationShouldUseSprintf() {
+ return !isString(this.workInformation);
},
locationIsLoading() {
return !this.user.loaded && this.user.location === null;
@@ -72,16 +94,30 @@ export default {
<gl-skeleton-loading v-else :lines="1" class="animation-container-small mb-1" />
</div>
<div class="text-secondary">
- <div v-if="user.bio" class="js-bio d-flex mb-1">
+ <div v-if="user.bio" class="d-flex mb-1">
<icon name="profile" class="category-icon flex-shrink-0" />
- <span class="ml-1">{{ user.bio }}</span>
+ <span ref="bio" class="ml-1">{{ user.bio }}</span>
</div>
- <div v-if="user.organization" class="js-organization d-flex mb-1">
- <icon v-show="!jobInfoIsLoading" name="work" class="category-icon flex-shrink-0" />
- <span class="ml-1">{{ user.organization }}</span>
+ <div v-if="workInformation" class="d-flex mb-1">
+ <icon
+ v-show="!workInformationIsLoading"
+ name="work"
+ class="category-icon flex-shrink-0"
+ />
+ <span ref="workInformation" class="ml-1">
+ <gl-sprintf v-if="workInformationShouldUseSprintf" :message="workInformation.message">
+ <template
+ v-for="(placeholder, slotName) in workInformation.placeholders"
+ v-slot:[slotName]
+ >
+ <span :key="slotName">{{ placeholder }}</span>
+ </template>
+ </gl-sprintf>
+ <span v-else>{{ workInformation }}</span>
+ </span>
</div>
<gl-skeleton-loading
- v-if="jobInfoIsLoading"
+ v-if="workInformationIsLoading"
:lines="1"
class="animation-container-small mb-1"
/>
diff --git a/app/assets/javascripts/vue_shared/constants.js b/app/assets/javascripts/vue_shared/constants.js
new file mode 100644
index 00000000000..63ce4212717
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/constants.js
@@ -0,0 +1,56 @@
+import { __ } from '~/locale';
+
+const INTERVALS = {
+ minute: 'minute',
+ hour: 'hour',
+ day: 'day',
+};
+
+export const timeRanges = [
+ {
+ label: __('30 minutes'),
+ duration: { seconds: 60 * 30 },
+ name: 'thirtyMinutes',
+ interval: INTERVALS.minute,
+ },
+ {
+ label: __('3 hours'),
+ duration: { seconds: 60 * 60 * 3 },
+ name: 'threeHours',
+ interval: INTERVALS.hour,
+ },
+ {
+ label: __('8 hours'),
+ duration: { seconds: 60 * 60 * 8 },
+ name: 'eightHours',
+ default: true,
+ interval: INTERVALS.hour,
+ },
+ {
+ label: __('1 day'),
+ duration: { seconds: 60 * 60 * 24 * 1 },
+ name: 'oneDay',
+ interval: INTERVALS.hour,
+ },
+ {
+ label: __('3 days'),
+ duration: { seconds: 60 * 60 * 24 * 3 },
+ name: 'threeDays',
+ interval: INTERVALS.hour,
+ },
+ {
+ label: __('1 week'),
+ duration: { seconds: 60 * 60 * 24 * 7 * 1 },
+ name: 'oneWeek',
+ interval: INTERVALS.day,
+ },
+ {
+ label: __('1 month'),
+ duration: { seconds: 60 * 60 * 24 * 30 },
+ name: 'oneMonth',
+ interval: INTERVALS.day,
+ },
+];
+
+export const defaultTimeRange = timeRanges.find(tr => tr.default);
+export const getTimeWindow = timeWindowName => timeRanges.find(tr => tr.name === timeWindowName);
diff --git a/app/assets/stylesheets/bootstrap_migration.scss b/app/assets/stylesheets/bootstrap_migration.scss
index 885e9ac6667..f3293510e9a 100644
--- a/app/assets/stylesheets/bootstrap_migration.scss
+++ b/app/assets/stylesheets/bootstrap_migration.scss
@@ -120,8 +120,8 @@ kbd {
code {
padding: 2px 4px;
- color: $red-600;
- background-color: $red-100;
+ color: $code-color;
+ background-color: $gray-100;
border-radius: $border-radius-default;
.code > & {
diff --git a/app/assets/stylesheets/components/popover.scss b/app/assets/stylesheets/components/popover.scss
index 6654553aaa2..198500d07a7 100644
--- a/app/assets/stylesheets/components/popover.scss
+++ b/app/assets/stylesheets/components/popover.scss
@@ -138,3 +138,13 @@
max-width: 40%;
}
}
+
+.suggest-gitlab-ci-yml {
+ margin-top: -1em;
+
+ .popover-header {
+ padding: $gl-padding;
+ display: flex;
+ align-items: center;
+ }
+}
diff --git a/app/assets/stylesheets/framework.scss b/app/assets/stylesheets/framework.scss
index 9032dd28b80..338a8c5497c 100644
--- a/app/assets/stylesheets/framework.scss
+++ b/app/assets/stylesheets/framework.scss
@@ -70,3 +70,4 @@
@import 'framework/system_messages';
@import "framework/spinner";
@import 'framework/card';
+@import 'framework/editor-lite';
diff --git a/app/assets/stylesheets/framework/blocks.scss b/app/assets/stylesheets/framework/blocks.scss
index 0e4080ce201..f922d8bcaab 100644
--- a/app/assets/stylesheets/framework/blocks.scss
+++ b/app/assets/stylesheets/framework/blocks.scss
@@ -161,13 +161,17 @@
}
.cover-controls {
- position: absolute;
- top: 10px;
- right: 10px;
+ @include media-breakpoint-up(sm) {
+ position: absolute;
+ top: 1rem;
+ right: 1.25rem;
+ }
&.left {
- left: 10px;
- right: auto;
+ @include media-breakpoint-up(sm) {
+ left: 1.25rem;
+ right: auto;
+ }
}
}
diff --git a/app/assets/stylesheets/framework/broadcast_messages.scss b/app/assets/stylesheets/framework/broadcast_messages.scss
index 95ea3d90a0e..359f4681938 100644
--- a/app/assets/stylesheets/framework/broadcast_messages.scss
+++ b/app/assets/stylesheets/framework/broadcast_messages.scss
@@ -17,6 +17,10 @@
@extend .broadcast-message;
@extend .alert-warning;
text-align: center;
+
+ .broadcast-message-dismiss {
+ color: inherit;
+ }
}
.broadcast-notification-message {
@@ -36,6 +40,11 @@
&.preview {
position: static;
}
+
+ .broadcast-message-dismiss {
+ height: 100%;
+ color: $gray-800;
+ }
}
.toggle-colors {
diff --git a/app/assets/stylesheets/framework/buttons.scss b/app/assets/stylesheets/framework/buttons.scss
index 1b549c0a4f0..ecbc5fa9351 100644
--- a/app/assets/stylesheets/framework/buttons.scss
+++ b/app/assets/stylesheets/framework/buttons.scss
@@ -371,8 +371,11 @@
}
.btn-loading {
- &:not(.disabled) .fa {
- display: none;
+ &:not(.disabled) {
+ .fa,
+ .spinner {
+ display: none;
+ }
}
.fa {
diff --git a/app/assets/stylesheets/framework/common.scss b/app/assets/stylesheets/framework/common.scss
index 408ca249be2..4d8ae8a5652 100644
--- a/app/assets/stylesheets/framework/common.scss
+++ b/app/assets/stylesheets/framework/common.scss
@@ -413,6 +413,7 @@ img.emoji {
.prepend-left-20 { margin-left: 20px; }
.prepend-left-32 { margin-left: 32px; }
.prepend-left-64 { margin-left: 64px; }
+.append-right-2 { margin-right: 2px; }
.append-right-4 { margin-right: 4px; }
.append-right-5 { margin-right: 5px; }
.append-right-8 { margin-right: 8px; }
@@ -424,6 +425,7 @@ img.emoji {
.append-right-48 { margin-right: 48px; }
.prepend-right-32 { margin-right: 32px; }
.append-bottom-0 { margin-bottom: 0; }
+.append-bottom-2 { margin-bottom: 2px; }
.append-bottom-4 { margin-bottom: $gl-padding-4; }
.append-bottom-5 { margin-bottom: 5px; }
.append-bottom-8 { margin-bottom: $grid-size; }
@@ -522,6 +524,8 @@ img.emoji {
cursor: pointer;
}
+.cursor-not-allowed { cursor: not-allowed; }
+
// this needs to use "!important" due to some very specific styles
// around buttons
.cursor-default {
diff --git a/app/assets/stylesheets/framework/dropdowns.scss b/app/assets/stylesheets/framework/dropdowns.scss
index 41f3603506f..1804f70b37c 100644
--- a/app/assets/stylesheets/framework/dropdowns.scss
+++ b/app/assets/stylesheets/framework/dropdowns.scss
@@ -150,6 +150,12 @@
right: 8px;
}
+ .spinner {
+ position: absolute;
+ top: 9px;
+ right: 8px;
+ }
+
.ic-chevron-down {
position: absolute;
top: $gl-padding-8;
@@ -158,24 +164,11 @@
}
}
-// Temporary hack until `gitlab-ui` issue is fixed.
-// https://gitlab.com/gitlab-org/gitlab-ui/issues/164
.gl-dropdown .dropdown-menu-toggle {
- .gl-dropdown-caret {
- position: absolute;
- right: $gl-padding-8;
- top: $gl-padding-8;
- }
+ padding-right: $gl-padding-8;
- // Add some child to the button so that the default height kicks in
- // when there's no text (since the caret is now aboslute)
- &::after {
- border: 0;
- content: ' ';
- display: inline-block;
- margin: 0;
- padding: 0;
- position: relative;
+ .gl-dropdown-toggle-text {
+ min-height: $gl-line-height-20;
}
}
@@ -404,6 +397,15 @@
}
}
+ > button.dropdown-epic-button {
+ flex-direction: column;
+
+ .reference {
+ color: $gl-gray-400;
+ margin-top: $gl-padding-4;
+ }
+ }
+
&.droplab-item-selected i {
visibility: visible;
}
@@ -1017,3 +1019,54 @@ header.header-content .dropdown-menu.frequent-items-dropdown-menu {
opacity: 0;
}
}
+
+.labels-select-wrapper {
+ .labels-select-dropdown-contents {
+ min-height: $dropdown-min-height;
+ max-height: 330px;
+ background-color: $white-light;
+ border: 1px solid $border-color;
+ box-shadow: 0 2px 4px $dropdown-shadow-color;
+ z-index: 2;
+
+ .dropdown-content {
+ height: 135px;
+ }
+ }
+
+ .labels-fetch-loading {
+ top: 0;
+ left: 0;
+ opacity: 0.5;
+ background-color: $white-light;
+ z-index: 1;
+ }
+
+ .dropdown-header-button {
+ .gl-icon {
+ color: $dropdown-title-btn-color;
+
+ &:hover {
+ color: $gl-gray-400;
+ }
+ }
+ }
+
+ .label-item {
+ padding: 8px 20px;
+
+ &:hover,
+ &.is-focused {
+ @include dropdown-item-hover;
+
+ text-decoration: none;
+ }
+ }
+
+ .color-input-container {
+ .dropdown-label-color-preview {
+ border: 1px solid $gray-200;
+ border-right: 0;
+ }
+ }
+}
diff --git a/app/assets/stylesheets/framework/editor-lite.scss b/app/assets/stylesheets/framework/editor-lite.scss
new file mode 100644
index 00000000000..75d511d7f66
--- /dev/null
+++ b/app/assets/stylesheets/framework/editor-lite.scss
@@ -0,0 +1,5 @@
+.monaco-editor.gl-editor-lite {
+ .line-numbers {
+ @include gl-pt-0;
+ }
+}
diff --git a/app/assets/stylesheets/framework/filters.scss b/app/assets/stylesheets/framework/filters.scss
index 4b45a169a31..e151fff7eb3 100644
--- a/app/assets/stylesheets/framework/filters.scss
+++ b/app/assets/stylesheets/framework/filters.scss
@@ -56,6 +56,7 @@
padding-left: 12px;
position: relative;
margin-bottom: 0;
+ width: 1px;
}
.input-token {
diff --git a/app/assets/stylesheets/framework/images.scss b/app/assets/stylesheets/framework/images.scss
index d78c707192f..2c9397d363c 100644
--- a/app/assets/stylesheets/framework/images.scss
+++ b/app/assets/stylesheets/framework/images.scss
@@ -20,7 +20,7 @@
width: 100%;
}
- $image-widths: 80 130 250 306 394 430;
+ $image-widths: 80 130 150 250 306 394 430;
@each $width in $image-widths {
&.svg-#{$width} {
img,
diff --git a/app/assets/stylesheets/framework/mixins.scss b/app/assets/stylesheets/framework/mixins.scss
index d54648cc34b..621a4eddc34 100644
--- a/app/assets/stylesheets/framework/mixins.scss
+++ b/app/assets/stylesheets/framework/mixins.scss
@@ -257,7 +257,6 @@
width: 15px;
height: 15px;
display: $svg-display;
- fill: $gl-text-color;
top: $svg-top;
}
@@ -402,3 +401,21 @@
line-height: 16px;
text-align: center;
}
+
+@mixin middle-dot-divider {
+ &::after {
+ // Duplicate `content` property used as a fallback
+ // scss-lint:disable DuplicateProperty
+ content: '\00B7'; // middle dot fallback if browser does not support alternative content
+ content: '\00B7' / ''; // tell screen readers to ignore the content https://www.w3.org/TR/css-content-3/#accessibility
+ padding: 0 0.375rem;
+ font-weight: $gl-font-weight-bold;
+ }
+
+ &:last-child {
+ &::after {
+ content: '';
+ padding: 0;
+ }
+ }
+}
diff --git a/app/assets/stylesheets/framework/modal.scss b/app/assets/stylesheets/framework/modal.scss
index ac8437c23ca..f8c46a4495e 100644
--- a/app/assets/stylesheets/framework/modal.scss
+++ b/app/assets/stylesheets/framework/modal.scss
@@ -13,16 +13,14 @@
.page-title,
.modal-title {
+ max-width: 100%;
+ overflow: hidden;
+ text-overflow: ellipsis;
+
.modal-title-with-label span {
vertical-align: middle;
display: inline-block;
}
-
- .color-label {
- font-size: $gl-font-size;
- padding: $gl-vert-padding $label-padding-modal;
- vertical-align: middle;
- }
}
.modal-title {
diff --git a/app/assets/stylesheets/framework/spinner.scss b/app/assets/stylesheets/framework/spinner.scss
index 5e05311041c..b7a99d421c9 100644
--- a/app/assets/stylesheets/framework/spinner.scss
+++ b/app/assets/stylesheets/framework/spinner.scss
@@ -51,7 +51,8 @@
}
.btn {
- .spinner {
+ .spinner,
+ .gl-spinner {
vertical-align: text-bottom;
}
}
diff --git a/app/assets/stylesheets/framework/typography.scss b/app/assets/stylesheets/framework/typography.scss
index a1bfa03a5ac..d61a32d2d95 100644
--- a/app/assets/stylesheets/framework/typography.scss
+++ b/app/assets/stylesheets/framework/typography.scss
@@ -59,7 +59,7 @@
max-width: 100%;
}
- &:not(.md-file) img:not(.emoji) {
+ &:not(.md) img:not(.emoji) {
border: 1px solid $white-normal;
padding: 5px;
margin: 5px 0;
@@ -592,7 +592,7 @@ pre {
word-wrap: break-word;
color: $gl-text-color;
background-color: $gray-light;
- border: 1px solid $border-color;
+ border: 1px solid $gray-200;
border-radius: $border-radius-small;
}
diff --git a/app/assets/stylesheets/framework/variables.scss b/app/assets/stylesheets/framework/variables.scss
index e4853ca7bf5..4d858f88921 100644
--- a/app/assets/stylesheets/framework/variables.scss
+++ b/app/assets/stylesheets/framework/variables.scss
@@ -81,6 +81,7 @@ $gl-gray-400: #999;
$gl-gray-500: #777;
$gl-gray-600: #666;
$gl-gray-700: #555;
+$gl-gray-800: #333;
$green-50: #f1fdf6;
$green-100: #dcf5e7;
@@ -355,6 +356,7 @@ $list-text-height: 42px;
*/
$code-font-size: 90%;
$code-line-height: 1.6;
+$code-color: $gray-950;
/*
* Tooltips
diff --git a/app/assets/stylesheets/framework/vue_transitions.scss b/app/assets/stylesheets/framework/vue_transitions.scss
index a082cd25abe..1a536b97142 100644
--- a/app/assets/stylesheets/framework/vue_transitions.scss
+++ b/app/assets/stylesheets/framework/vue_transitions.scss
@@ -15,6 +15,7 @@
.slide-enter-from-element {
&.slide-enter,
&.slide-leave-to {
+ position: absolute;
transform: translateX(-150%);
}
}
@@ -22,6 +23,7 @@
.slide-enter-to-element {
&.slide-enter,
&.slide-leave-to {
+ position: absolute;
transform: translateX(150%);
}
}
diff --git a/app/assets/stylesheets/highlight/common.scss b/app/assets/stylesheets/highlight/common.scss
index bdeac7e97c0..31075b09b83 100644
--- a/app/assets/stylesheets/highlight/common.scss
+++ b/app/assets/stylesheets/highlight/common.scss
@@ -29,3 +29,15 @@
color: $link;
}
}
+
+@mixin line-coverage-border-color($coverage, $no-coverage) {
+ transition: border-left 0.1s ease-out;
+
+ &.coverage {
+ border-left: 3px solid $coverage;
+ }
+
+ &.no-coverage {
+ border-left: 3px solid $no-coverage;
+ }
+}
diff --git a/app/assets/stylesheets/highlight/themes/dark.scss b/app/assets/stylesheets/highlight/themes/dark.scss
index cbce0ba3f1e..5ab762a5104 100644
--- a/app/assets/stylesheets/highlight/themes/dark.scss
+++ b/app/assets/stylesheets/highlight/themes/dark.scss
@@ -24,6 +24,8 @@ $dark-pre-hll-bg: #373b41;
$dark-hll-bg: #373b41;
$dark-over-bg: #9f9ab5;
$dark-expanded-bg: #3e3e3e;
+$dark-coverage: #b5bd68;
+$dark-no-coverage: #de935f;
$dark-c: #969896;
$dark-err: #c66;
$dark-k: #b294bb;
@@ -124,12 +126,18 @@ $dark-il: #de935f;
}
td.diff-line-num.hll:not(.empty-cell),
+ td.line-coverage.hll:not(.empty-cell),
td.line_content.hll:not(.empty-cell) {
background-color: $dark-diff-not-empty-bg;
border-color: darken($dark-diff-not-empty-bg, 15%);
}
+ .line-coverage {
+ @include line-coverage-border-color($dark-coverage, $dark-no-coverage);
+ }
+
.diff-line-num.new,
+ .line-coverage.new,
.line_content.new {
@include diff-background($dark-new-bg, $dark-new-idiff, $dark-border);
@@ -140,6 +148,7 @@ $dark-il: #de935f;
}
.diff-line-num.old,
+ .line-coverage.old,
.line_content.old {
@include diff-background($dark-old-bg, $dark-old-idiff, $dark-border);
@@ -168,6 +177,7 @@ $dark-il: #de935f;
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
border-top: 1px solid $black;
}
@@ -175,6 +185,7 @@ $dark-il: #de935f;
&.diff-expanded {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
background: $dark-expanded-bg;
border-color: $dark-expanded-bg;
diff --git a/app/assets/stylesheets/highlight/themes/monokai.scss b/app/assets/stylesheets/highlight/themes/monokai.scss
index 1b61ffa37e3..348ef69cc4f 100644
--- a/app/assets/stylesheets/highlight/themes/monokai.scss
+++ b/app/assets/stylesheets/highlight/themes/monokai.scss
@@ -17,6 +17,8 @@ $monokai-diff-border: #808080;
$monokai-highlight-bg: #ffe792;
$monokai-over-bg: #9f9ab5;
$monokai-expanded-bg: #3e3e3e;
+$monokai-coverage: #a6e22e;
+$monokai-no-coverage: #fd971f;
$monokai-new-bg: rgba(166, 226, 46, 0.1);
$monokai-new-idiff: rgba(166, 226, 46, 0.15);
@@ -124,12 +126,18 @@ $monokai-gi: #a6e22e;
}
td.diff-line-num.hll:not(.empty-cell),
+ td.line-coverage.hll:not(.empty-cell),
td.line_content.hll:not(.empty-cell) {
background-color: $monokai-line-empty-bg;
border-color: $monokai-line-empty-border;
}
+ .line-coverage {
+ @include line-coverage-border-color($monokai-coverage, $monokai-no-coverage);
+ }
+
.diff-line-num.new,
+ .line-coverage.new,
.line_content.new {
@include diff-background($monokai-new-bg, $monokai-new-idiff, $monokai-diff-border);
@@ -140,6 +148,7 @@ $monokai-gi: #a6e22e;
}
.diff-line-num.old,
+ .line-coverage.old,
.line_content.old {
@include diff-background($monokai-old-bg, $monokai-old-idiff, $monokai-diff-border);
@@ -168,6 +177,7 @@ $monokai-gi: #a6e22e;
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
border-top: 1px solid $black;
}
@@ -175,6 +185,7 @@ $monokai-gi: #a6e22e;
&.diff-expanded {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
background: $monokai-expanded-bg;
border-color: $monokai-expanded-bg;
diff --git a/app/assets/stylesheets/highlight/themes/none.scss b/app/assets/stylesheets/highlight/themes/none.scss
index a7ede266fb5..c8ac3aa8305 100644
--- a/app/assets/stylesheets/highlight/themes/none.scss
+++ b/app/assets/stylesheets/highlight/themes/none.scss
@@ -51,6 +51,15 @@
@include match-line;
}
+ .line-coverage {
+ @include line-coverage-border-color($green-500, $orange-500);
+
+ &.old,
+ &.new {
+ background-color: $white-normal;
+ }
+ }
+
.diff-line-num {
&.old {
a {
@@ -83,6 +92,7 @@
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
border-top: 1px solid $none-expanded-border;
}
@@ -90,6 +100,7 @@
&.diff-expanded {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
background: $none-expanded-bg;
border-color: $none-expanded-bg;
diff --git a/app/assets/stylesheets/highlight/themes/solarized-dark.scss b/app/assets/stylesheets/highlight/themes/solarized-dark.scss
index 6569f3abc8b..f5b36480f18 100644
--- a/app/assets/stylesheets/highlight/themes/solarized-dark.scss
+++ b/app/assets/stylesheets/highlight/themes/solarized-dark.scss
@@ -21,6 +21,8 @@ $solarized-dark-highlight: #094554;
$solarized-dark-hll-bg: #174652;
$solarized-dark-over-bg: #9f9ab5;
$solarized-dark-expanded-bg: #010d10;
+$solarized-dark-coverage: #859900;
+$solarized-dark-no-coverage: #cb4b16;
$solarized-dark-c: #586e75;
$solarized-dark-err: #93a1a1;
$solarized-dark-g: #93a1a1;
@@ -128,12 +130,18 @@ $solarized-dark-il: #2aa198;
}
td.diff-line-num.hll:not(.empty-cell),
+ td.line-coverage.hll:not(.empty-cell),
td.line_content.hll:not(.empty-cell) {
background-color: $solarized-dark-hll-bg;
border-color: darken($solarized-dark-hll-bg, 15%);
}
+ .line-coverage {
+ @include line-coverage-border-color($solarized-dark-coverage, $solarized-dark-no-coverage);
+ }
+
.diff-line-num.new,
+ .line-coverage.new,
.line_content.new {
@include diff-background($solarized-dark-new-bg, $solarized-dark-new-idiff, $solarized-dark-border);
@@ -144,6 +152,7 @@ $solarized-dark-il: #2aa198;
}
.diff-line-num.old,
+ .line-coverage.old,
.line_content.old {
@include diff-background($solarized-dark-old-bg, $solarized-dark-old-idiff, $solarized-dark-border);
@@ -172,6 +181,7 @@ $solarized-dark-il: #2aa198;
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
border-top: 1px solid $black;
}
@@ -179,6 +189,7 @@ $solarized-dark-il: #2aa198;
&.diff-expanded {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
background: $solarized-dark-expanded-bg;
border-color: $solarized-dark-expanded-bg;
diff --git a/app/assets/stylesheets/highlight/themes/solarized-light.scss b/app/assets/stylesheets/highlight/themes/solarized-light.scss
index 4e74a9ea50a..993370642c3 100644
--- a/app/assets/stylesheets/highlight/themes/solarized-light.scss
+++ b/app/assets/stylesheets/highlight/themes/solarized-light.scss
@@ -23,6 +23,8 @@ $solarized-light-hll-bg: #ddd8c5;
$solarized-light-over-bg: #ded7fc;
$solarized-light-expanded-border: #d2cdbd;
$solarized-light-expanded-bg: #ece6d4;
+$solarized-light-coverage: #859900;
+$solarized-light-no-coverage: #cb4b16;
$solarized-light-c: #93a1a1;
$solarized-light-err: #586e75;
$solarized-light-g: #586e75;
@@ -135,12 +137,18 @@ $solarized-light-il: #2aa198;
}
td.diff-line-num.hll:not(.empty-cell),
+ td.line-coverage.hll:not(.empty-cell),
td.line_content.hll:not(.empty-cell) {
background-color: $solarized-light-hll-bg;
border-color: darken($solarized-light-hll-bg, 15%);
}
+ .line-coverage {
+ @include line-coverage-border-color($solarized-light-coverage, $solarized-light-no-coverage);
+ }
+
.diff-line-num.new,
+ .line-coverage.new,
.line_content.new {
@include diff-background($solarized-light-new-bg,
$solarized-light-new-idiff, $solarized-light-border);
@@ -152,6 +160,7 @@ $solarized-light-il: #2aa198;
}
.diff-line-num.old,
+ .line-coverage.old,
.line_content.old {
@include diff-background($solarized-light-old-bg, $solarized-light-old-idiff, $solarized-light-border);
@@ -180,6 +189,7 @@ $solarized-light-il: #2aa198;
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
border-top: 1px solid $solarized-light-expanded-border;
}
@@ -187,6 +197,7 @@ $solarized-light-il: #2aa198;
&.diff-expanded {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
background: $solarized-light-expanded-bg;
border-color: $solarized-light-expanded-bg;
diff --git a/app/assets/stylesheets/highlight/white_base.scss b/app/assets/stylesheets/highlight/white_base.scss
index 973f94c63aa..d82a0794d29 100644
--- a/app/assets/stylesheets/highlight/white_base.scss
+++ b/app/assets/stylesheets/highlight/white_base.scss
@@ -151,6 +151,7 @@ pre.code,
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
border-top: 1px solid $white-expanded-border;
}
@@ -158,6 +159,7 @@ pre.code,
&.diff-expanded {
> .diff-line-num,
+ > .line-coverage,
> .line_content {
background: $white-expanded-bg;
border-color: $white-expanded-bg;
@@ -197,6 +199,22 @@ pre.code,
background-color: $line-select-yellow;
}
}
+
+ .line-coverage {
+ @include line-coverage-border-color($green-500, $orange-500);
+
+ &.old {
+ background-color: $line-removed;
+ }
+
+ &.new {
+ background-color: $line-added;
+ }
+
+ &.hll:not(.empty-cell) {
+ background-color: $line-select-yellow;
+ }
+ }
}
// highlight line via anchor
diff --git a/app/assets/stylesheets/notify.scss b/app/assets/stylesheets/notify.scss
index d77b7dfad68..ea82ba3e879 100644
--- a/app/assets/stylesheets/notify.scss
+++ b/app/assets/stylesheets/notify.scss
@@ -18,3 +18,19 @@ p.details {
pre.commit-message {
white-space: pre-wrap;
}
+
+.gl-label-scoped {
+ box-shadow: 0 0 0 2px currentColor inset;
+}
+
+.gl-label-text {
+ padding: 0 5px;
+}
+
+.gl-label-text-light {
+ color: $white-light;
+}
+
+.gl-label-text-dark {
+ color: $gl-gray-800;
+}
diff --git a/app/assets/stylesheets/page_bundles/_ide_monaco_overrides.scss b/app/assets/stylesheets/page_bundles/_ide_monaco_overrides.scss
new file mode 100644
index 00000000000..c47901dc177
--- /dev/null
+++ b/app/assets/stylesheets/page_bundles/_ide_monaco_overrides.scss
@@ -0,0 +1,146 @@
+
+// stylelint-disable selector-class-pattern
+// stylelint-disable selector-max-compound-selectors
+// stylelint-disable stylelint-gitlab/duplicate-selectors
+// stylelint-disable stylelint-gitlab/utility-classes
+
+.blob-editor-container {
+ flex: 1;
+ height: 0;
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+
+ .vertical-center {
+ min-height: auto;
+ }
+
+ .monaco-editor .lines-content .cigr {
+ display: none;
+ }
+
+ .monaco-editor .selected-text {
+ z-index: 1;
+ }
+
+ .monaco-editor .view-lines {
+ z-index: 2;
+ }
+
+ .is-readonly,
+ .editor.original {
+ .view-lines {
+ cursor: default;
+ }
+
+ .cursors-layer {
+ display: none;
+ }
+ }
+
+ .is-deleted {
+ .editor.modified {
+ .margin-view-overlays,
+ .lines-content,
+ .decorationsOverviewRuler {
+ // !important to override monaco inline styles
+ display: none !important;
+ }
+ }
+
+ .diffOverviewRuler.modified {
+ // !important to override monaco inline styles
+ display: none !important;
+ }
+ }
+
+ .is-added {
+ .editor.original {
+ .margin-view-overlays,
+ .lines-content,
+ .decorationsOverviewRuler {
+ // !important to override monaco inline styles
+ display: none !important;
+ }
+ }
+
+ .diffOverviewRuler.original {
+ // !important to override monaco inline styles
+ display: none !important;
+ }
+ }
+
+ .monaco-diff-editor.vs {
+ .editor.modified {
+ box-shadow: none;
+ }
+
+ .diagonal-fill {
+ display: none !important;
+ }
+
+ .diffOverview {
+ background-color: $white-light;
+ border-left: 1px solid $white-dark;
+ cursor: ns-resize;
+ }
+
+ .diffViewport {
+ display: none;
+ }
+
+ .char-insert {
+ background-color: $line-added-dark;
+ }
+
+ .char-delete {
+ background-color: $line-removed-dark;
+ }
+
+ .line-numbers {
+ color: $black-transparent;
+ }
+
+ .view-overlays {
+ .line-insert {
+ background-color: $line-added;
+ }
+
+ .line-delete {
+ background-color: $line-removed;
+ }
+ }
+
+ .margin {
+ background-color: $white-light;
+ border-right: 1px solid $gray-100;
+
+ .line-insert {
+ border-right: 1px solid $line-added-dark;
+ }
+
+ .line-delete {
+ border-right: 1px solid $line-removed-dark;
+ }
+ }
+
+ .margin-view-overlays .insert-sign,
+ .margin-view-overlays .delete-sign {
+ opacity: 0.4;
+ }
+ }
+}
+
+.multi-file-editor-holder {
+ height: 100%;
+ min-height: 0; // firefox fix
+
+ &.is-readonly .vs,
+ .vs .editor.original {
+ .monaco-editor,
+ .monaco-editor-background,
+ .monaco-editor .inputarea.ime-input {
+ background-color: $gray-50;
+ }
+ }
+}
diff --git a/app/assets/stylesheets/page_bundles/ide.scss b/app/assets/stylesheets/page_bundles/ide.scss
index 9c64714e5dd..c37f75d1533 100644
--- a/app/assets/stylesheets/page_bundles/ide.scss
+++ b/app/assets/stylesheets/page_bundles/ide.scss
@@ -1,6 +1,7 @@
@import 'framework/variables';
@import 'framework/mixins';
@import './ide_mixins';
+@import './ide_monaco_overrides';
$search-list-icon-width: 18px;
$ide-activity-bar-width: 60px;
@@ -16,11 +17,6 @@ $ide-commit-header-height: 48px;
display: inline-block;
}
-.fade-enter,
-.fade-leave-to {
- opacity: 0;
-}
-
.commit-message {
@include str-truncated(250px);
}
@@ -49,10 +45,6 @@ $ide-commit-header-height: 48px;
flex-direction: column;
flex: 1;
min-height: 0; // firefox fix
-
- a {
- color: $gl-text-color;
- }
}
.multi-file-loading-container {
@@ -75,7 +67,7 @@ $ide-commit-header-height: 48px;
.multi-file-tabs {
display: flex;
- background-color: $white-normal;
+ background-color: $gray-light;
box-shadow: inset 0 -1px $white-dark;
> ul {
@@ -160,157 +152,6 @@ $ide-commit-header-height: 48px;
height: 0;
}
-// stylelint-disable selector-class-pattern
-// stylelint-disable selector-max-compound-selectors
-// stylelint-disable stylelint-gitlab/duplicate-selectors
-// stylelint-disable stylelint-gitlab/utility-classes
-
-.blob-editor-container {
- flex: 1;
- height: 0;
- display: flex;
- flex-direction: column;
- justify-content: center;
-
- .vertical-center {
- min-height: auto;
- }
-
- .monaco-editor .lines-content .cigr {
- display: none;
- }
-
- .monaco-editor .selected-text {
- z-index: 1;
- }
-
- .monaco-editor .view-lines {
- z-index: 2;
- }
-
- .is-readonly,
- .editor.original {
- .view-lines {
- cursor: default;
- }
-
- .cursors-layer {
- display: none;
- }
- }
-
- .is-deleted {
- .editor.modified {
- .margin-view-overlays,
- .lines-content,
- .decorationsOverviewRuler {
- // !important to override monaco inline styles
- display: none !important;
- }
- }
-
- .diffOverviewRuler.modified {
- // !important to override monaco inline styles
- display: none !important;
- }
- }
-
- .is-added {
- .editor.original {
- .margin-view-overlays,
- .lines-content,
- .decorationsOverviewRuler {
- // !important to override monaco inline styles
- display: none !important;
- }
- }
-
- .diffOverviewRuler.original {
- // !important to override monaco inline styles
- display: none !important;
- }
- }
-
- .monaco-diff-editor.vs {
- .editor.modified {
- box-shadow: none;
- }
-
- .diagonal-fill {
- display: none !important;
- }
-
- .diffOverview {
- background-color: $white-light;
- border-left: 1px solid $white-dark;
- cursor: ns-resize;
- }
-
- .diffViewport {
- display: none;
- }
-
- .char-insert {
- background-color: $line-added-dark;
- }
-
- .char-delete {
- background-color: $line-removed-dark;
- }
-
- .line-numbers {
- color: $black-transparent;
- }
-
- .view-overlays {
- .line-insert {
- background-color: $line-added;
- }
-
- .line-delete {
- background-color: $line-removed;
- }
- }
-
- .margin {
- background-color: $white-light;
- border-right: 1px solid $gray-100;
-
- .line-insert {
- border-right: 1px solid $line-added-dark;
- }
-
- .line-delete {
- border-right: 1px solid $line-removed-dark;
- }
- }
-
- .margin-view-overlays .insert-sign,
- .margin-view-overlays .delete-sign {
- opacity: 0.4;
- }
- }
-}
-
-.multi-file-editor-holder {
- height: 100%;
- min-height: 0; // firefox fix
-
- &.is-readonly .vs,
- .vs .editor.original {
- .monaco-editor,
- .monaco-editor-background,
- .monaco-editor .inputarea.ime-input {
- background-color: $gray-50;
- }
- }
-}
-
-// stylelint-enable selector-class-pattern
-// stylelint-enable selector-max-compound-selectors
-// stylelint-enable stylelint-gitlab/duplicate-selectors
-// stylelint-enable stylelint-gitlab/utility-classes
-
.preview-container {
flex-grow: 1;
position: relative;
@@ -671,10 +512,6 @@ $ide-commit-header-height: 48px;
width: $ide-commit-row-height;
height: $ide-commit-row-height;
color: inherit;
-
- > svg {
- top: 0;
- }
}
.ide-commit-file-count {
@@ -864,39 +701,39 @@ $ide-commit-header-height: 48px;
margin-left: auto;
}
- .ide-nav-dropdown {
- width: 100%;
- margin-bottom: 12px;
+ button {
+ color: $gl-text-color;
+ }
+}
- .dropdown-menu {
- width: 385px;
- max-height: initial;
- }
+.ide-nav-dropdown {
+ width: 100%;
+ margin-bottom: 12px;
- .dropdown-menu-toggle {
- svg {
- vertical-align: middle;
- color: $gray-700;
+ .dropdown-menu {
+ width: 385px;
+ max-height: initial;
+ }
- &:hover {
- color: $gray-700;
- }
- }
+ .dropdown-menu-toggle {
+ svg {
+ vertical-align: middle;
+ color: $gray-700;
&:hover {
- background-color: $white-normal;
+ color: $gray-700;
}
}
- &.show {
- .dropdown-menu-toggle {
- background-color: $white-dark;
- }
+ &:hover {
+ background-color: $white-normal;
}
}
- button {
- color: $gl-text-color;
+ &.show {
+ .dropdown-menu-toggle {
+ background-color: $white-dark;
+ }
}
}
@@ -945,6 +782,8 @@ $ide-commit-header-height: 48px;
transform: translateY(0);
}
+.fade-enter,
+.fade-leave-to,
.commit-form-slide-up-enter,
.commit-form-slide-up-leave-to {
opacity: 0;
@@ -994,11 +833,6 @@ $ide-commit-header-height: 48px;
}
.ide-context-header {
- .ide-merge-requests-dropdown.dropdown-menu {
- width: 385px;
- max-height: initial;
- }
-
.avatar-container {
flex: 0 0 auto;
margin-right: 0;
@@ -1068,9 +902,6 @@ $ide-commit-header-height: 48px;
@include ide-trace-view();
.empty-state {
- margin-top: auto;
- margin-bottom: auto;
-
p {
margin: $grid-size 0;
text-align: center;
@@ -1097,10 +928,6 @@ $ide-commit-header-height: 48px;
min-height: 55px;
padding-left: $gl-padding;
padding-right: $gl-padding;
-
- .ci-status-icon {
- display: flex;
- }
}
.ide-job-item {
@@ -1140,7 +967,7 @@ $ide-commit-header-height: 48px;
}
.ide-nav-form {
- .nav-links li {
+ li {
width: 50%;
padding-left: 0;
padding-right: 0;
@@ -1227,10 +1054,6 @@ $ide-commit-header-height: 48px;
background-color: $blue-500;
outline: 0;
}
-
- svg {
- fill: currentColor;
- }
}
.ide-new-btn {
diff --git a/app/assets/stylesheets/pages/boards.scss b/app/assets/stylesheets/pages/boards.scss
index 42d7b0d08f7..a9079f036ab 100644
--- a/app/assets/stylesheets/pages/boards.scss
+++ b/app/assets/stylesheets/pages/boards.scss
@@ -266,20 +266,9 @@
background-color: $blue-50;
}
- .badge {
- border: 0;
- outline: 0;
-
- &:hover {
- text-decoration: underline;
- }
-
- @include media-breakpoint-down(lg) {
- font-size: $gl-font-size-xs;
- padding-left: $gl-padding-4;
- padding-right: $gl-padding-4;
- font-weight: $gl-font-weight-bold;
- }
+ .gl-label {
+ margin-top: 4px;
+ margin-right: 4px;
}
.confidential-icon {
diff --git a/app/assets/stylesheets/pages/builds.scss b/app/assets/stylesheets/pages/builds.scss
index 0db90fc88fc..c829695621c 100644
--- a/app/assets/stylesheets/pages/builds.scss
+++ b/app/assets/stylesheets/pages/builds.scss
@@ -357,3 +357,60 @@
}
}
}
+
+.environment-logs-viewer {
+ .build-trace-container {
+ position: relative;
+ }
+
+ .log-lines,
+ .gl-infinite-scroll-container {
+ // makes scrollbar visible by creating contrast
+ background: $black;
+ }
+
+ .gl-infinite-scroll-legend {
+ margin: 0;
+ }
+
+ .build-trace {
+ @include build-trace();
+ margin: 0;
+ }
+
+ .top-bar {
+ @include build-trace-top-bar($gl-line-height * 5);
+ position: relative;
+ top: 0;
+
+ .dropdown-menu-toggle {
+ width: 200px;
+
+ @include media-breakpoint-up(sm) {
+ width: 300px;
+ }
+ }
+
+ .controllers {
+ @include build-controllers(16px, flex-end, true, 2);
+ }
+
+ .refresh-control {
+ @include build-controllers(16px, flex-end, true, 0);
+ margin-left: 2px;
+ }
+ }
+
+ .btn-refresh svg {
+ top: 0;
+ }
+
+ .build-loader-animation {
+ @include build-loader-animation;
+ }
+
+ .log-footer {
+ color: $white-normal;
+ background-color: $gray-900;
+ }
+}
diff --git a/app/assets/stylesheets/pages/clusters.scss b/app/assets/stylesheets/pages/clusters.scss
index 88d6b0d3746..b9d415ae237 100644
--- a/app/assets/stylesheets/pages/clusters.scss
+++ b/app/assets/stylesheets/pages/clusters.scss
@@ -163,3 +163,9 @@
color: $black;
font-weight: $gl-font-weight-bold;
}
+
+.cluster-status-indicator {
+ &.disabled {
+ background-color: $gray-600;
+ }
+}
diff --git a/app/assets/stylesheets/pages/cycle_analytics.scss b/app/assets/stylesheets/pages/cycle_analytics.scss
index 89b673397a2..b1d79a41ba7 100644
--- a/app/assets/stylesheets/pages/cycle_analytics.scss
+++ b/app/assets/stylesheets/pages/cycle_analytics.scss
@@ -51,11 +51,11 @@
}
.stage-header {
- width: 18.5%;
+ width: 20.5%;
}
.median-header {
- width: 21.5%;
+ width: 19.5%;
}
.event-header {
diff --git a/app/assets/stylesheets/pages/diff.scss b/app/assets/stylesheets/pages/diff.scss
index 24c6fec064a..0c043e4f3fb 100644
--- a/app/assets/stylesheets/pages/diff.scss
+++ b/app/assets/stylesheets/pages/diff.scss
@@ -514,6 +514,10 @@ table.code {
position: absolute;
left: 0.5em;
}
+
+ &.with-coverage::before {
+ left: 0;
+ }
}
&.new {
@@ -522,6 +526,10 @@ table.code {
position: absolute;
left: 0.5em;
}
+
+ &.with-coverage::before {
+ left: 0;
+ }
}
}
}
diff --git a/app/assets/stylesheets/pages/environments.scss b/app/assets/stylesheets/pages/environments.scss
index 3892d9dbd07..1c9bfe962f6 100644
--- a/app/assets/stylesheets/pages/environments.scss
+++ b/app/assets/stylesheets/pages/environments.scss
@@ -98,6 +98,14 @@
}
}
+.refresh-dashboard-button {
+ margin-top: 22px;
+
+ @media(max-width: map-get($grid-breakpoints, sm)) {
+ margin-top: 0;
+ }
+}
+
.metric-area {
opacity: 0.25;
}
diff --git a/app/assets/stylesheets/pages/error_details.scss b/app/assets/stylesheets/pages/error_details.scss
index 61e2df7ea26..78cac12d6be 100644
--- a/app/assets/stylesheets/pages/error_details.scss
+++ b/app/assets/stylesheets/pages/error_details.scss
@@ -7,6 +7,26 @@
color: $blue-500;
border-color: $blue-500;
}
+
+ .error-details-header {
+ border-bottom: 1px solid $border-color;
+
+ @include media-breakpoint-down(xs) {
+ flex-flow: column;
+
+ .error-details-meta-culprit {
+ display: flex;
+ }
+
+ .error-details-options {
+ width: 100%;
+
+ .dropdown-toggle {
+ text-align: center;
+ }
+ }
+ }
+ }
}
.stacktrace {
diff --git a/app/assets/stylesheets/pages/error_list.scss b/app/assets/stylesheets/pages/error_list.scss
index f97953ce824..88fdcc47492 100644
--- a/app/assets/stylesheets/pages/error_list.scss
+++ b/app/assets/stylesheets/pages/error_list.scss
@@ -20,47 +20,19 @@ $gray-border: 1px solid $border-color;
}
}
- @include media-breakpoint-down(xs) {
- .table-row {
- border: $gray-border;
- border-radius: 4px;
- }
-
- .search-box {
- border-top: $gray-border;
- border-bottom: $gray-border;
- background-color: $gray-50;
- }
-
- .table-col {
- min-height: 68px;
-
- &::before {
- text-align: left !important;
- }
-
- &:first-child {
- div {
- padding: 0 !important;
- align-items: flex-end;
- }
- }
-
- &:last-child {
- height: 64px;
- background-color: $gray-normal;
-
- &::before {
- content: none !important;
- }
-
- div {
- width: 100% !important;
- padding: 0 !important;
+ @include media-breakpoint-down(md) {
+ .error-list-table {
+ .table-col {
+ min-height: 68px;
+
+ &:last-child {
+ &::before {
+ content: none !important;
+ }
- a {
- color: $blue-500;
- border-color: $blue-500;
+ div {
+ width: 100% !important;
+ padding: 0 !important;
}
}
}
diff --git a/app/assets/stylesheets/pages/error_tracking_list.scss b/app/assets/stylesheets/pages/error_tracking_list.scss
index cd1adb9a754..cc391ca6c97 100644
--- a/app/assets/stylesheets/pages/error_tracking_list.scss
+++ b/app/assets/stylesheets/pages/error_tracking_list.scss
@@ -1,5 +1,5 @@
.error-list {
- .sort-dropdown {
+ .dropdown {
min-width: auto;
}
}
diff --git a/app/assets/stylesheets/pages/groups.scss b/app/assets/stylesheets/pages/groups.scss
index 3085f5e89b5..305956e1baf 100644
--- a/app/assets/stylesheets/pages/groups.scss
+++ b/app/assets/stylesheets/pages/groups.scss
@@ -420,7 +420,7 @@ table.pipeline-project-metrics tr td {
p {
@include str-truncated;
- max-width: none;
+ max-width: 100%;
}
}
diff --git a/app/assets/stylesheets/pages/issuable.scss b/app/assets/stylesheets/pages/issuable.scss
index 43636f65eb8..fd56f655c0a 100644
--- a/app/assets/stylesheets/pages/issuable.scss
+++ b/app/assets/stylesheets/pages/issuable.scss
@@ -86,14 +86,19 @@
}
.issuable-show-labels {
- a {
+ .gl-label {
margin-bottom: 5px;
margin-right: 5px;
+ }
+
+ a {
display: inline-block;
.color-label {
padding: 4px $grid-size;
border-radius: $label-border-radius;
+ margin-right: 4px;
+ margin-bottom: 4px;
}
&:hover .color-label {
@@ -159,9 +164,25 @@
.avatar {
border-color: rgba($gray-normal, 0.2);
}
+ }
+ }
+ a.gl-label-icon {
+ color: $gray-500;
+ }
+
+ .gl-label .gl-label-link:hover {
+ text-decoration: none;
+ color: inherit;
+
+ .gl-label-text:last-of-type {
+ text-decoration: underline;
}
+ }
+ .gl-label .gl-label-icon:hover {
+ text-decoration: none;
+ color: $gray-500;
}
.btn-link {
@@ -800,11 +821,23 @@
a {
color: $gl-text-color;
+ }
- .fa {
- color: $gl-text-color-secondary;
+ .gl-label-link {
+ color: inherit;
+
+ &:hover {
+ text-decoration: none;
+
+ .gl-label-text:last-of-type {
+ text-decoration: underline;
+ }
}
}
+
+ .gl-label-icon {
+ color: $gray-500;
+ }
}
@media(max-width: map-get($grid-breakpoints, lg)-1) {
diff --git a/app/assets/stylesheets/pages/issues.scss b/app/assets/stylesheets/pages/issues.scss
index b03ad5c6b75..d77b30ce259 100644
--- a/app/assets/stylesheets/pages/issues.scss
+++ b/app/assets/stylesheets/pages/issues.scss
@@ -284,3 +284,22 @@ ul.related-merge-requests > li {
text-align: right;
}
}
+
+.issue-details {
+ .card-title {
+ a.anchor {
+ left: -16px;
+ top: 4px;
+ outline: none;
+
+ &::after {
+ content: image-url('icon_anchor.svg');
+ @include invisible(hidden);
+ }
+ }
+
+ &:hover > a.anchor::after {
+ @include invisible(visible);
+ }
+ }
+}
diff --git a/app/assets/stylesheets/pages/labels.scss b/app/assets/stylesheets/pages/labels.scss
index 7d5e185834b..095e881c50a 100644
--- a/app/assets/stylesheets/pages/labels.scss
+++ b/app/assets/stylesheets/pages/labels.scss
@@ -127,6 +127,11 @@
.color-label {
padding: $gl-padding-4 $grid-size;
}
+
+ .prepend-description-left {
+ vertical-align: top;
+ line-height: 24px;
+ }
}
.prioritized-labels {
@@ -305,10 +310,9 @@
width: 150px;
flex-shrink: 0;
- .badge {
- overflow: hidden;
- text-overflow: ellipsis;
- max-width: 100%;
+ .scoped-label-wrapper,
+ .gl-label {
+ line-height: $gl-line-height;
}
}
@@ -445,10 +449,19 @@
}
}
+.gl-label-scoped {
+ box-shadow: 0 0 0 2px currentColor inset;
+
+ &.gl-label-sm {
+ box-shadow: 0 0 0 1px inset;
+ }
+}
+
// Label inside title of Delete Label Modal
.modal-header .page-title {
.scoped-label-wrapper {
- .scoped-label {
+ .scoped-label,
+ .gl-label-icon {
line-height: 20px;
}
diff --git a/app/assets/stylesheets/pages/merge_requests.scss b/app/assets/stylesheets/pages/merge_requests.scss
index 5ca75c28ac3..ad8b251d3e4 100644
--- a/app/assets/stylesheets/pages/merge_requests.scss
+++ b/app/assets/stylesheets/pages/merge_requests.scss
@@ -614,6 +614,10 @@ $mr-widget-min-height: 69px;
.circle-icon-container {
color: $gl-text-color-quaternary;
}
+
+ .popover {
+ z-index: 240;
+ }
}
.card-new-merge-request {
diff --git a/app/assets/stylesheets/pages/milestone.scss b/app/assets/stylesheets/pages/milestone.scss
index b399662997c..cd1154b88a5 100644
--- a/app/assets/stylesheets/pages/milestone.scss
+++ b/app/assets/stylesheets/pages/milestone.scss
@@ -59,9 +59,19 @@ $status-box-line-height: 26px;
}
.issuable-row {
- span a {
- color: $gl-text-color;
- word-wrap: break-word;
+ span {
+ a {
+ color: $gl-text-color;
+ word-wrap: break-word;
+ }
+
+ .gl-label-link {
+ color: inherit;
+ }
+
+ .gl-label-icon {
+ color: $gray-500;
+ }
}
}
diff --git a/app/assets/stylesheets/pages/notes.scss b/app/assets/stylesheets/pages/notes.scss
index 1a06ae1ed41..aaecbd6ff00 100644
--- a/app/assets/stylesheets/pages/notes.scss
+++ b/app/assets/stylesheets/pages/notes.scss
@@ -283,7 +283,7 @@ $note-form-margin-left: 72px;
text-transform: lowercase;
}
- a {
+ a:not(.gl-link) {
color: $blue-600;
}
@@ -316,7 +316,7 @@ $note-form-margin-left: 72px;
.btn.delete-description-history {
position: absolute;
top: 18px;
- right: 0;
+ right: 10px;
}
pre {
@@ -671,6 +671,16 @@ $note-form-margin-left: 72px;
a:hover {
text-decoration: underline;
}
+
+ .gl-label-link:hover,
+ .gl-label-icon:hover {
+ text-decoration: none;
+ color: inherit;
+
+ .gl-label-text:last-of-type {
+ text-decoration: underline;
+ }
+ }
}
/**
diff --git a/app/assets/stylesheets/pages/pipelines.scss b/app/assets/stylesheets/pages/pipelines.scss
index 82bef91230e..8b0dd73c565 100644
--- a/app/assets/stylesheets/pages/pipelines.scss
+++ b/app/assets/stylesheets/pages/pipelines.scss
@@ -1090,6 +1090,20 @@ button.mini-pipeline-graph-dropdown-toggle {
}
}
+.codequality-report {
+ .media {
+ padding: $gl-padding;
+ }
+
+ .media-body {
+ flex-direction: row;
+ }
+
+ .report-block-container {
+ height: auto !important;
+ }
+}
+
.progress-bar.bg-primary {
background-color: $blue-500 !important;
}
diff --git a/app/assets/stylesheets/pages/profile.scss b/app/assets/stylesheets/pages/profile.scss
index 08796742f08..82b3698287c 100644
--- a/app/assets/stylesheets/pages/profile.scss
+++ b/app/assets/stylesheets/pages/profile.scss
@@ -74,17 +74,12 @@
// Middle dot divider between each element in a list of items.
.middle-dot-divider {
- &::after {
- content: '\00B7'; // Middle Dot
- padding: 0 6px;
- font-weight: $gl-font-weight-bold;
- }
+ @include middle-dot-divider;
+}
- &:last-child {
- &::after {
- content: '';
- padding: 0;
- }
+.middle-dot-divider-sm {
+ @include media-breakpoint-up(sm) {
+ @include middle-dot-divider;
}
}
@@ -118,6 +113,14 @@
}
}
+.ssh-keys-list {
+ .last-used-at,
+ .expires,
+ .key-created-at {
+ line-height: 32px;
+ }
+}
+
.key-created-at {
line-height: 42px;
}
@@ -194,10 +197,6 @@
}
.user-profile {
- .cover-controls a {
- margin-left: 5px;
- }
-
.profile-header {
margin: 0 $gl-padding;
diff --git a/app/assets/stylesheets/pages/prometheus.scss b/app/assets/stylesheets/pages/prometheus.scss
index 8133a167687..30ef047bf04 100644
--- a/app/assets/stylesheets/pages/prometheus.scss
+++ b/app/assets/stylesheets/pages/prometheus.scss
@@ -91,10 +91,6 @@
margin-bottom: $gl-padding-8;
}
-.prometheus-graph-title {
- font-size: $gl-font-size-large;
-}
-
.alert-current-setting {
max-width: 240px;
}
diff --git a/app/assets/stylesheets/pages/settings.scss b/app/assets/stylesheets/pages/settings.scss
index 416537ef763..c90b92a5b49 100644
--- a/app/assets/stylesheets/pages/settings.scss
+++ b/app/assets/stylesheets/pages/settings.scss
@@ -130,6 +130,10 @@
border-radius: $border-radius-base;
}
+.empty-variables {
+ padding: 20px 0;
+}
+
.warning-title {
color: $orange-500;
}
@@ -370,3 +374,31 @@
.push-pull-table {
margin-top: 1em;
}
+
+.ci-variable-table {
+ table {
+ thead {
+ border-bottom: 1px solid $white-normal;
+ }
+
+ tr {
+ td,
+ th {
+ padding-left: 0;
+ }
+
+ th {
+ background-color: transparent;
+ font-weight: $gl-font-weight-bold;
+ border: 0;
+ }
+ }
+ }
+
+ @media(max-width: map-get($grid-breakpoints, lg)-1) {
+ .truncated-container {
+ justify-content: flex-end;
+ }
+ }
+}
+
diff --git a/app/assets/stylesheets/utilities.scss b/app/assets/stylesheets/utilities.scss
index e27ec571531..1eff21401a2 100644
--- a/app/assets/stylesheets/utilities.scss
+++ b/app/assets/stylesheets/utilities.scss
@@ -54,8 +54,10 @@
.mh-50vh { max-height: 50vh; }
.font-size-inherit { font-size: inherit; }
-
+.gl-w-8 { width: px-to-rem($grid-size); }
+.gl-w-16 { width: px-to-rem($grid-size * 2); }
.gl-w-64 { width: px-to-rem($grid-size * 8); }
+.gl-h-8 { height: px-to-rem($grid-size); }
.gl-h-32 { height: px-to-rem($grid-size * 4); }
.gl-h-64 { height: px-to-rem($grid-size * 8); }
@@ -65,6 +67,7 @@
// Classes using mixins coming from @gitlab-ui
// can be removed once https://gitlab.com/gitlab-org/gitlab/merge_requests/19021 has been merged
+.gl-bg-blue-50 { @include gl-bg-blue-50; }
.gl-bg-red-100 { @include gl-bg-red-100; }
.gl-bg-orange-100 { @include gl-bg-orange-100; }
.gl-bg-gray-100 { @include gl-bg-gray-100; }
@@ -76,3 +79,5 @@
.gl-text-red-700 { @include gl-text-red-700; }
.gl-text-orange-700 { @include gl-text-orange-700; }
.gl-text-green-700 { @include gl-text-green-700; }
+
+.gl-align-items-center { @include gl-align-items-center; }
diff --git a/app/assets/stylesheets/vendors/tribute.scss b/app/assets/stylesheets/vendors/tribute.scss
new file mode 100644
index 00000000000..95b1d80a586
--- /dev/null
+++ b/app/assets/stylesheets/vendors/tribute.scss
@@ -0,0 +1,41 @@
+.tribute-container {
+ background: $white-light;
+ border: 1px solid $gl-gray-100;
+ border-radius: $border-radius-base;
+ box-shadow: 0 0 5px $issue-boards-card-shadow;
+ color: $black;
+ margin-top: $gl-padding-12;
+ max-height: 200px;
+ min-width: 120px;
+ overflow-y: auto;
+ z-index: 11110 !important;
+
+ ul {
+ list-style: none;
+ margin-bottom: 0;
+ padding: $gl-padding-8 1px;
+ }
+
+ li {
+ cursor: pointer;
+ padding: $gl-padding-8 $gl-padding;
+ white-space: nowrap;
+
+ small {
+ color: $gl-gray-500;
+ }
+
+ &.highlight {
+ background-color: $gray-darker;
+
+ .avatar {
+ @include disable-all-animation;
+ border: 1px solid $white-light;
+ }
+
+ small {
+ color: inherit;
+ }
+ }
+ }
+}
diff --git a/app/controllers/admin/application_settings_controller.rb b/app/controllers/admin/application_settings_controller.rb
index 54c9bde067d..2192bcc96ee 100644
--- a/app/controllers/admin/application_settings_controller.rb
+++ b/app/controllers/admin/application_settings_controller.rb
@@ -244,6 +244,8 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController
def render_update_error
action = valid_setting_panels.include?(action_name) ? action_name : :general
+ flash[:alert] = _('Application settings update failed')
+
render action
end
diff --git a/app/controllers/admin/broadcast_messages_controller.rb b/app/controllers/admin/broadcast_messages_controller.rb
index 06ba916fc55..3233c765941 100644
--- a/app/controllers/admin/broadcast_messages_controller.rb
+++ b/app/controllers/admin/broadcast_messages_controller.rb
@@ -62,6 +62,7 @@ class Admin::BroadcastMessagesController < Admin::ApplicationController
starts_at
target_path
broadcast_type
+ dismissable
))
end
end
diff --git a/app/controllers/admin/concerns/authenticates_2fa_for_admin_mode.rb b/app/controllers/admin/concerns/authenticates_2fa_for_admin_mode.rb
new file mode 100644
index 00000000000..c6fd1d55e51
--- /dev/null
+++ b/app/controllers/admin/concerns/authenticates_2fa_for_admin_mode.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+module Authenticates2FAForAdminMode
+ extend ActiveSupport::Concern
+
+ included do
+ include AuthenticatesWithTwoFactor
+ end
+
+ def admin_mode_prompt_for_two_factor(user)
+ return handle_locked_user(user) unless user.can?(:log_in)
+
+ session[:otp_user_id] = user.id
+ setup_u2f_authentication(user)
+
+ render 'admin/sessions/two_factor', layout: 'application'
+ end
+
+ def admin_mode_authenticate_with_two_factor
+ user = current_user
+
+ return handle_locked_user(user) unless user.can?(:log_in)
+
+ if user_params[:otp_attempt].present? && session[:otp_user_id]
+ admin_mode_authenticate_with_two_factor_via_otp(user)
+ elsif user_params[:device_response].present? && session[:otp_user_id]
+ admin_mode_authenticate_with_two_factor_via_u2f(user)
+ elsif user && user.valid_password?(user_params[:password])
+ admin_mode_prompt_for_two_factor(user)
+ else
+ invalid_login_redirect
+ end
+ end
+
+ def admin_mode_authenticate_with_two_factor_via_otp(user)
+ if valid_otp_attempt?(user)
+ # Remove any lingering user data from login
+ session.delete(:otp_user_id)
+
+ user.save!
+
+ # The admin user has successfully passed 2fa, enable admin mode ignoring password
+ enable_admin_mode
+ else
+ user.increment_failed_attempts!
+ Gitlab::AppLogger.info("Failed Admin Mode Login: user=#{user.username} ip=#{request.remote_ip} method=OTP")
+ flash.now[:alert] = _('Invalid two-factor code.')
+
+ admin_mode_prompt_for_two_factor(user)
+ end
+ end
+
+ def admin_mode_authenticate_with_two_factor_via_u2f(user)
+ if U2fRegistration.authenticate(user, u2f_app_id, user_params[:device_response], session[:challenge])
+ # Remove any lingering user data from login
+ session.delete(:otp_user_id)
+ session.delete(:challenge)
+
+ # The admin user has successfully passed 2fa, enable admin mode ignoring password
+ enable_admin_mode
+ else
+ user.increment_failed_attempts!
+ Gitlab::AppLogger.info("Failed Admin Mode Login: user=#{user.username} ip=#{request.remote_ip} method=U2F")
+ flash.now[:alert] = _('Authentication via U2F device failed.')
+
+ admin_mode_prompt_for_two_factor(user)
+ end
+ end
+
+ private
+
+ def enable_admin_mode
+ if current_user_mode.enable_admin_mode!(skip_password_validation: true)
+ redirect_to redirect_path, notice: _('Admin mode enabled')
+ else
+ invalid_login_redirect
+ end
+ end
+
+ def invalid_login_redirect
+ flash.now[:alert] = _('Invalid login or password')
+ render :new
+ end
+end
diff --git a/app/controllers/admin/dashboard_controller.rb b/app/controllers/admin/dashboard_controller.rb
index f24ce9b5d03..ae94edac734 100644
--- a/app/controllers/admin/dashboard_controller.rb
+++ b/app/controllers/admin/dashboard_controller.rb
@@ -12,6 +12,7 @@ class Admin::DashboardController < Admin::ApplicationController
@projects = Project.order_id_desc.without_deleted.with_route.limit(10)
@users = User.order_id_desc.limit(10)
@groups = Group.order_id_desc.with_route.limit(10)
+ @notices = Gitlab::ConfigChecker::PumaRuggedChecker.check
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/controllers/admin/integrations_controller.rb b/app/controllers/admin/integrations_controller.rb
new file mode 100644
index 00000000000..715aa882bda
--- /dev/null
+++ b/app/controllers/admin/integrations_controller.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+class Admin::IntegrationsController < Admin::ApplicationController
+ include ServiceParams
+
+ before_action :not_found, unless: :instance_level_integrations_enabled?
+ before_action :service, only: [:edit, :update, :test]
+
+ def edit
+ end
+
+ def update
+ @service.attributes = service_params[:service]
+
+ if @service.save(context: :manual_change)
+ redirect_to edit_admin_application_settings_integration_path(@service), notice: success_message
+ else
+ render :edit
+ end
+ end
+
+ def test
+ if @service.can_test?
+ render json: service_test_response, status: :ok
+ else
+ render json: {}, status: :not_found
+ end
+ end
+
+ private
+
+ def instance_level_integrations_enabled?
+ Feature.enabled?(:instance_level_integrations)
+ end
+
+ def project
+ # TODO: Change to something more meaningful
+ Project.first
+ end
+
+ def service
+ @service ||= project.find_or_initialize_service(params[:id])
+ end
+
+ def success_message
+ message = @service.active? ? _('activated') : _('settings saved, but not activated')
+
+ _('%{service_title} %{message}.') % { service_title: @service.title, message: message }
+ end
+
+ def service_test_response
+ unless @service.update(service_params[:service])
+ return { error: true, message: _('Validations failed.'), service_response: @service.errors.full_messages.join(','), test_failed: false }
+ end
+
+ data = @service.test_data(project, current_user)
+ outcome = @service.test(data)
+
+ unless outcome[:success]
+ return { error: true, message: _('Test failed.'), service_response: outcome[:result].to_s, test_failed: true }
+ end
+
+ {}
+ rescue Gitlab::HTTP::BlockedUrlError => e
+ { error: true, message: _('Test failed.'), service_response: e.message, test_failed: true }
+ end
+end
diff --git a/app/controllers/admin/jobs_controller.rb b/app/controllers/admin/jobs_controller.rb
index 892f6dc657c..a3a18a115e9 100644
--- a/app/controllers/admin/jobs_controller.rb
+++ b/app/controllers/admin/jobs_controller.rb
@@ -3,10 +3,10 @@
class Admin::JobsController < Admin::ApplicationController
def index
# We need all builds for tabs counters
- @all_builds = JobsFinder.new(current_user: current_user).execute
+ @all_builds = Ci::JobsFinder.new(current_user: current_user).execute
@scope = params[:scope]
- @builds = JobsFinder.new(current_user: current_user, params: params).execute
+ @builds = Ci::JobsFinder.new(current_user: current_user, params: params).execute
@builds = @builds.eager_load_everything
@builds = @builds.page(params[:page]).per(30)
end
diff --git a/app/controllers/admin/serverless/domains_controller.rb b/app/controllers/admin/serverless/domains_controller.rb
index c37aec13105..9741a0716f2 100644
--- a/app/controllers/admin/serverless/domains_controller.rb
+++ b/app/controllers/admin/serverless/domains_controller.rb
@@ -2,7 +2,7 @@
class Admin::Serverless::DomainsController < Admin::ApplicationController
before_action :check_feature_flag
- before_action :domain, only: [:update, :verify]
+ before_action :domain, only: [:update, :verify, :destroy]
def index
@domain = PagesDomain.instance_serverless.first_or_initialize
@@ -30,6 +30,20 @@ class Admin::Serverless::DomainsController < Admin::ApplicationController
end
end
+ def destroy
+ if domain.serverless_domain_clusters.count > 0
+ return redirect_to admin_serverless_domains_path,
+ status: :conflict,
+ notice: _('Domain cannot be deleted while associated to one or more clusters.')
+ end
+
+ domain.destroy!
+
+ redirect_to admin_serverless_domains_path,
+ status: :found,
+ notice: _('Domain was successfully deleted.')
+ end
+
def verify
result = VerifyPagesDomainService.new(domain).execute
diff --git a/app/controllers/admin/sessions_controller.rb b/app/controllers/admin/sessions_controller.rb
index f9587655a8d..841ad46b47e 100644
--- a/app/controllers/admin/sessions_controller.rb
+++ b/app/controllers/admin/sessions_controller.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
class Admin::SessionsController < ApplicationController
+ include Authenticates2FAForAdminMode
include InternalRedirect
before_action :user_is_admin!
@@ -15,7 +16,9 @@ class Admin::SessionsController < ApplicationController
end
def create
- if current_user_mode.enable_admin_mode!(password: params[:password])
+ if two_factor_enabled_for_user?
+ admin_mode_authenticate_with_two_factor
+ elsif current_user_mode.enable_admin_mode!(password: user_params[:password])
redirect_to redirect_path, notice: _('Admin mode enabled')
else
flash.now[:alert] = _('Invalid login or password')
@@ -37,6 +40,10 @@ class Admin::SessionsController < ApplicationController
render_404 unless current_user&.admin?
end
+ def two_factor_enabled_for_user?
+ current_user&.two_factor_enabled?
+ end
+
def redirect_path
redirect_to_path = safe_redirect_path(stored_location_for(:redirect)) || safe_redirect_path_for_url(request.referer)
@@ -51,4 +58,13 @@ class Admin::SessionsController < ApplicationController
def excluded_redirect_paths
[new_admin_session_path, admin_session_path]
end
+
+ def user_params
+ params.fetch(:user, {}).permit(:password, :otp_attempt, :device_response)
+ end
+
+ def valid_otp_attempt?(user)
+ user.validate_and_consume_otp!(user_params[:otp_attempt]) ||
+ user.invalidate_otp_backup_code!(user_params[:otp_attempt])
+ end
end
diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb
index 7cb629dee21..c5c586ea489 100644
--- a/app/controllers/application_controller.rb
+++ b/app/controllers/application_controller.rb
@@ -26,6 +26,7 @@ class ApplicationController < ActionController::Base
before_action :ldap_security_check
around_action :sentry_context
before_action :default_headers
+ before_action :default_cache_headers
before_action :add_gon_variables, if: :html_request?
before_action :configure_permitted_parameters, if: :devise_controller?
before_action :require_email, unless: :devise_controller?
@@ -34,7 +35,9 @@ class ApplicationController < ActionController::Base
before_action :check_impersonation_availability
before_action :required_signup_info
- around_action :set_current_context
+ prepend_around_action :set_current_context
+
+ around_action :sessionless_bypass_admin_mode!, if: :sessionless_user?
around_action :set_locale
around_action :set_session_storage
around_action :set_current_admin
@@ -147,10 +150,6 @@ class ApplicationController < ActionController::Base
payload[:username] = logged_user.try(:username)
end
- if response.status == 422 && response.body.present? && response.content_type == 'application/json'
- payload[:response] = response.body
- end
-
payload[:queue_duration] = request.env[::Gitlab::Middleware::RailsQueueDuration::GITLAB_RAILS_QUEUE_DURATION_KEY]
end
@@ -258,7 +257,9 @@ class ApplicationController < ActionController::Base
headers['X-XSS-Protection'] = '1; mode=block'
headers['X-UA-Compatible'] = 'IE=edge'
headers['X-Content-Type-Options'] = 'nosniff'
+ end
+ def default_cache_headers
if current_user
headers['Cache-Control'] = default_cache_control
headers['Pragma'] = 'no-cache' # HTTP 1.0 compatibility
@@ -307,7 +308,7 @@ class ApplicationController < ActionController::Base
if current_user && current_user.requires_ldap_check?
return unless current_user.try_obtain_ldap_lease
- unless Gitlab::Auth::LDAP::Access.allowed?(current_user)
+ unless Gitlab::Auth::Ldap::Access.allowed?(current_user)
sign_out current_user
flash[:alert] = _("Access denied for your LDAP account.")
redirect_to new_user_session_path
diff --git a/app/controllers/concerns/authenticates_with_two_factor.rb b/app/controllers/concerns/authenticates_with_two_factor.rb
index 6f0c7abac16..b885e55f902 100644
--- a/app/controllers/concerns/authenticates_with_two_factor.rb
+++ b/app/controllers/concerns/authenticates_with_two_factor.rb
@@ -3,8 +3,6 @@
# == AuthenticatesWithTwoFactor
#
# Controller concern to handle two-factor authentication
-#
-# Upon inclusion, skips `require_no_authentication` on `:create`.
module AuthenticatesWithTwoFactor
extend ActiveSupport::Concern
diff --git a/app/controllers/concerns/clientside_preview_csp.rb b/app/controllers/concerns/clientside_preview_csp.rb
new file mode 100644
index 00000000000..6892c441b67
--- /dev/null
+++ b/app/controllers/concerns/clientside_preview_csp.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ClientsidePreviewCSP
+ extend ActiveSupport::Concern
+
+ included do
+ content_security_policy do |p|
+ next if p.directives.blank?
+ next unless Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?
+
+ default_frame_src = p.directives['frame-src'] || p.directives['default-src']
+ frame_src_values = Array.wrap(default_frame_src) | [Gitlab::CurrentSettings.web_ide_clientside_preview_bundler_url].compact
+
+ p.frame_src(*frame_src_values)
+ end
+ end
+end
diff --git a/app/controllers/concerns/cycle_analytics_params.rb b/app/controllers/concerns/cycle_analytics_params.rb
index 3e67f1f54cb..50e340dc9b1 100644
--- a/app/controllers/concerns/cycle_analytics_params.rb
+++ b/app/controllers/concerns/cycle_analytics_params.rb
@@ -38,7 +38,7 @@ module CycleAnalyticsParams
end
def to_utc_time(field)
- date = field.is_a?(Date) ? field : Date.parse(field)
+ date = field.is_a?(Date) || field.is_a?(Time) ? field : Date.parse(field)
date.to_time.utc
end
end
diff --git a/app/controllers/concerns/invisible_captcha.rb b/app/controllers/concerns/invisible_captcha.rb
deleted file mode 100644
index 45c0a5c58ef..00000000000
--- a/app/controllers/concerns/invisible_captcha.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-module InvisibleCaptcha
- extend ActiveSupport::Concern
-
- included do
- invisible_captcha only: :create, on_spam: :on_honeypot_spam_callback, on_timestamp_spam: :on_timestamp_spam_callback
- end
-
- def on_honeypot_spam_callback
- return unless Feature.enabled?(:invisible_captcha)
-
- invisible_captcha_honeypot_counter.increment
- log_request('Invisible_Captcha_Honeypot_Request')
-
- head(200)
- end
-
- def on_timestamp_spam_callback
- return unless Feature.enabled?(:invisible_captcha)
-
- invisible_captcha_timestamp_counter.increment
- log_request('Invisible_Captcha_Timestamp_Request')
-
- redirect_to new_user_session_path, alert: InvisibleCaptcha.timestamp_error_message
- end
-
- def invisible_captcha_honeypot_counter
- @invisible_captcha_honeypot_counter ||=
- Gitlab::Metrics.counter(:bot_blocked_by_invisible_captcha_honeypot,
- 'Counter of blocked sign up attempts with filled honeypot')
- end
-
- def invisible_captcha_timestamp_counter
- @invisible_captcha_timestamp_counter ||=
- Gitlab::Metrics.counter(:bot_blocked_by_invisible_captcha_timestamp,
- 'Counter of blocked sign up attempts with invalid timestamp')
- end
-
- def log_request(message)
- request_information = {
- message: message,
- env: :invisible_captcha_signup_bot_detected,
- remote_ip: request.ip,
- request_method: request.request_method,
- path: request.fullpath
- }
-
- Gitlab::AuthLogger.error(request_information)
- end
-end
diff --git a/app/controllers/concerns/invisible_captcha_on_signup.rb b/app/controllers/concerns/invisible_captcha_on_signup.rb
new file mode 100644
index 00000000000..9bea6145ff3
--- /dev/null
+++ b/app/controllers/concerns/invisible_captcha_on_signup.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+module InvisibleCaptchaOnSignup
+ extend ActiveSupport::Concern
+
+ included do
+ invisible_captcha only: :create, on_spam: :on_honeypot_spam_callback, on_timestamp_spam: :on_timestamp_spam_callback
+ end
+
+ def on_honeypot_spam_callback
+ return unless Feature.enabled?(:invisible_captcha)
+
+ invisible_captcha_honeypot_counter.increment
+ log_request('Invisible_Captcha_Honeypot_Request')
+
+ head(200)
+ end
+
+ def on_timestamp_spam_callback
+ return unless Feature.enabled?(:invisible_captcha)
+
+ invisible_captcha_timestamp_counter.increment
+ log_request('Invisible_Captcha_Timestamp_Request')
+
+ redirect_to new_user_session_path, alert: InvisibleCaptcha.timestamp_error_message
+ end
+
+ def invisible_captcha_honeypot_counter
+ @invisible_captcha_honeypot_counter ||=
+ Gitlab::Metrics.counter(:bot_blocked_by_invisible_captcha_honeypot,
+ 'Counter of blocked sign up attempts with filled honeypot')
+ end
+
+ def invisible_captcha_timestamp_counter
+ @invisible_captcha_timestamp_counter ||=
+ Gitlab::Metrics.counter(:bot_blocked_by_invisible_captcha_timestamp,
+ 'Counter of blocked sign up attempts with invalid timestamp')
+ end
+
+ def log_request(message)
+ request_information = {
+ message: message,
+ env: :invisible_captcha_signup_bot_detected,
+ remote_ip: request.ip,
+ request_method: request.request_method,
+ path: request.fullpath
+ }
+
+ Gitlab::AuthLogger.error(request_information)
+ end
+end
diff --git a/app/controllers/concerns/issuable_actions.rb b/app/controllers/concerns/issuable_actions.rb
index c4abaacd573..ca43bf42580 100644
--- a/app/controllers/concerns/issuable_actions.rb
+++ b/app/controllers/concerns/issuable_actions.rb
@@ -137,7 +137,7 @@ module IssuableActions
end
notes = prepare_notes_for_rendering(notes)
- notes = notes.select { |n| n.visible_for?(current_user) }
+ notes = notes.select { |n| n.readable_by?(current_user) }
discussions = Discussion.build_collection(notes, issuable)
diff --git a/app/controllers/concerns/lfs_request.rb b/app/controllers/concerns/lfs_request.rb
index 3152d959ae4..2844acea271 100644
--- a/app/controllers/concerns/lfs_request.rb
+++ b/app/controllers/concerns/lfs_request.rb
@@ -116,6 +116,10 @@ module LfsRequest
@objects ||= (params[:objects] || []).to_a
end
+ def objects_oids
+ objects.map { |o| o['oid'].to_s }
+ end
+
def has_authentication_ability?(capability)
(authentication_abilities || []).include?(capability)
end
diff --git a/app/controllers/concerns/notes_actions.rb b/app/controllers/concerns/notes_actions.rb
index 3d599d9e7f9..7dd2f6e5706 100644
--- a/app/controllers/concerns/notes_actions.rb
+++ b/app/controllers/concerns/notes_actions.rb
@@ -29,7 +29,7 @@ module NotesActions
end
notes = prepare_notes_for_rendering(notes)
- notes = notes.select { |n| n.visible_for?(current_user) }
+ notes = notes.select { |n| n.readable_by?(current_user) }
notes_json[:notes] =
if use_note_serializer?
diff --git a/app/controllers/concerns/sessionless_authentication.rb b/app/controllers/concerns/sessionless_authentication.rb
index d5c26fca957..a9ef33bf3b9 100644
--- a/app/controllers/concerns/sessionless_authentication.rb
+++ b/app/controllers/concerns/sessionless_authentication.rb
@@ -5,12 +5,6 @@
# Controller concern to handle PAT, RSS, and static objects token authentication methods
#
module SessionlessAuthentication
- extend ActiveSupport::Concern
-
- included do
- before_action :enable_admin_mode!, if: :sessionless_user?
- end
-
# This filter handles personal access tokens, atom requests with rss tokens, and static object tokens
def authenticate_sessionless_user!(request_format)
user = Gitlab::Auth::RequestAuthenticator.new(request).find_sessionless_user(request_format)
@@ -32,9 +26,9 @@ module SessionlessAuthentication
end
end
- def enable_admin_mode!
- return unless Feature.enabled?(:user_mode_in_session)
+ def sessionless_bypass_admin_mode!(&block)
+ return yield unless Feature.enabled?(:user_mode_in_session)
- current_user_mode.enable_sessionless_admin_mode!
+ Gitlab::Auth::CurrentUserMode.bypass_session!(current_user.id, &block)
end
end
diff --git a/app/controllers/concerns/snippets_actions.rb b/app/controllers/concerns/snippets_actions.rb
index 014232a7d05..a18cdd58abb 100644
--- a/app/controllers/concerns/snippets_actions.rb
+++ b/app/controllers/concerns/snippets_actions.rb
@@ -6,20 +6,18 @@ module SnippetsActions
def edit
end
- # rubocop:disable Gitlab/ModuleWithInstanceVariables
def raw
disposition = params[:inline] == 'false' ? 'attachment' : 'inline'
workhorse_set_content_type!
send_data(
- convert_line_endings(@snippet.content),
+ convert_line_endings(blob.data),
type: 'text/plain; charset=utf-8',
disposition: disposition,
- filename: @snippet.sanitized_file_name
+ filename: Snippet.sanitized_file_name(blob.name)
)
end
- # rubocop:enable Gitlab/ModuleWithInstanceVariables
def js_request?
request.format.js?
@@ -30,4 +28,11 @@ module SnippetsActions
def convert_line_endings(content)
params[:line_ending] == 'raw' ? content : content.gsub(/\r\n/, "\n")
end
+
+ def check_repository_error
+ repository_errors = Array(snippet.errors.delete(:repository))
+
+ flash.now[:alert] = repository_errors.first if repository_errors.present?
+ recaptcha_check_with_fallback(repository_errors.empty?) { render :edit }
+ end
end
diff --git a/app/controllers/concerns/spammable_actions.rb b/app/controllers/concerns/spammable_actions.rb
index 9ec8f930a78..46ba270f328 100644
--- a/app/controllers/concerns/spammable_actions.rb
+++ b/app/controllers/concerns/spammable_actions.rb
@@ -11,7 +11,7 @@ module SpammableActions
end
def mark_as_spam
- if Spam::MarkAsSpamService.new(spammable: spammable).execute
+ if Spam::MarkAsSpamService.new(target: spammable).execute
redirect_to spammable_path, notice: _("%{spammable_titlecase} was submitted to Akismet successfully.") % { spammable_titlecase: spammable.spammable_entity_type.titlecase }
else
redirect_to spammable_path, alert: _('Error with Akismet. Please check the logs for more info.')
diff --git a/app/controllers/concerns/static_object_external_storage_csp.rb b/app/controllers/concerns/static_object_external_storage_csp.rb
new file mode 100644
index 00000000000..0be83e31d8b
--- /dev/null
+++ b/app/controllers/concerns/static_object_external_storage_csp.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module StaticObjectExternalStorageCSP
+ extend ActiveSupport::Concern
+
+ included do
+ content_security_policy do |p|
+ next if p.directives.blank?
+ next unless Gitlab::CurrentSettings.static_objects_external_storage_enabled?
+
+ default_connect_src = p.directives['connect-src'] || p.directives['default-src']
+ connect_src_values = Array.wrap(default_connect_src) | [Gitlab::CurrentSettings.static_objects_external_storage_url]
+ p.connect_src(*connect_src_values)
+ end
+ end
+end
diff --git a/app/controllers/concerns/uploads_actions.rb b/app/controllers/concerns/uploads_actions.rb
index 549a443b1a8..f489de42864 100644
--- a/app/controllers/concerns/uploads_actions.rb
+++ b/app/controllers/concerns/uploads_actions.rb
@@ -9,6 +9,7 @@ module UploadsActions
included do
prepend_before_action :set_request_format_from_path_extension
+ skip_before_action :default_cache_headers, only: :show
rescue_from FileUploader::InvalidSecret, with: :render_404
end
@@ -35,10 +36,6 @@ module UploadsActions
def show
return render_404 unless uploader&.exists?
- # We need to reset caching from the applications controller to get rid of the no-store value
- headers['Cache-Control'] = ''
- headers['Pragma'] = ''
-
ttl, directives = *cache_settings
ttl ||= 0
directives ||= { private: true, must_revalidate: true }
diff --git a/app/controllers/explore/snippets_controller.rb b/app/controllers/explore/snippets_controller.rb
index 61068df77d1..3a56a48e578 100644
--- a/app/controllers/explore/snippets_controller.rb
+++ b/app/controllers/explore/snippets_controller.rb
@@ -1,17 +1,15 @@
# frozen_string_literal: true
class Explore::SnippetsController < Explore::ApplicationController
- include PaginatedCollection
include Gitlab::NoteableMetadata
def index
@snippets = SnippetsFinder.new(current_user, explore: true)
.execute
.page(params[:page])
+ .without_count
.inc_author
- return if redirect_out_of_range(@snippets)
-
@noteable_meta_data = noteable_meta_data(@snippets, 'Snippet')
end
end
diff --git a/app/controllers/graphql_controller.rb b/app/controllers/graphql_controller.rb
index d7ff2ded5ae..522d171b5bf 100644
--- a/app/controllers/graphql_controller.rb
+++ b/app/controllers/graphql_controller.rb
@@ -15,6 +15,11 @@ class GraphqlController < ApplicationController
before_action :authorize_access_api!
before_action(only: [:execute]) { authenticate_sessionless_user!(:api) }
+ # Since we deactivate authentication from the main ApplicationController and
+ # defer it to :authorize_access_api!, we need to override the bypass session
+ # callback execution order here
+ around_action :sessionless_bypass_admin_mode!, if: :sessionless_user?
+
def execute
result = multiplex? ? execute_multiplex : execute_query
diff --git a/app/controllers/groups/deploy_tokens_controller.rb b/app/controllers/groups/deploy_tokens_controller.rb
new file mode 100644
index 00000000000..a765922fc54
--- /dev/null
+++ b/app/controllers/groups/deploy_tokens_controller.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+class Groups::DeployTokensController < Groups::ApplicationController
+ before_action :authorize_admin_group!
+
+ def revoke
+ @token = @group.deploy_tokens.find(params[:id])
+ @token.revoke!
+
+ redirect_to group_settings_ci_cd_path(@group, anchor: 'js-deploy-tokens')
+ end
+end
diff --git a/app/controllers/groups/registry/repositories_controller.rb b/app/controllers/groups/registry/repositories_controller.rb
index 84c25cfb180..ac4ca197d72 100644
--- a/app/controllers/groups/registry/repositories_controller.rb
+++ b/app/controllers/groups/registry/repositories_controller.rb
@@ -17,7 +17,7 @@ module Groups
serializer = ContainerRepositoriesSerializer
.new(current_user: current_user)
- if Feature.enabled?(:vue_container_registry_explorer)
+ if Feature.enabled?(:vue_container_registry_explorer, group)
render json: serializer.with_pagination(request, response)
.represent_read_only(@images)
else
diff --git a/app/controllers/groups/settings/ci_cd_controller.rb b/app/controllers/groups/settings/ci_cd_controller.rb
index 0e83d057484..3d347429398 100644
--- a/app/controllers/groups/settings/ci_cd_controller.rb
+++ b/app/controllers/groups/settings/ci_cd_controller.rb
@@ -6,9 +6,12 @@ module Groups
skip_cross_project_access_check :show
before_action :authorize_admin_group!
before_action :authorize_update_max_artifacts_size!, only: [:update]
+ before_action do
+ push_frontend_feature_flag(:new_variables_ui, @group, default_enabled: true)
+ end
+ before_action :define_variables, only: [:show, :create_deploy_token]
def show
- define_ci_variables
end
def update
@@ -38,8 +41,23 @@ module Groups
redirect_to group_settings_ci_cd_path
end
+ def create_deploy_token
+ @new_deploy_token = Groups::DeployTokens::CreateService.new(@group, current_user, deploy_token_params).execute
+
+ if @new_deploy_token.persisted?
+ flash.now[:notice] = s_('DeployTokens|Your new group deploy token has been created.')
+ end
+
+ render 'show'
+ end
+
private
+ def define_variables
+ define_ci_variables
+ define_deploy_token_variables
+ end
+
def define_ci_variables
@variable = Ci::GroupVariable.new(group: group)
.present(current_user: current_user)
@@ -47,6 +65,12 @@ module Groups
.map { |variable| variable.present(current_user: current_user) }
end
+ def define_deploy_token_variables
+ @deploy_tokens = @group.deploy_tokens.active
+
+ @new_deploy_token = DeployToken.new
+ end
+
def authorize_admin_group!
return render_404 unless can?(current_user, :admin_group, group)
end
@@ -70,6 +94,10 @@ module Groups
def update_group_params
params.require(:group).permit(:max_artifacts_size)
end
+
+ def deploy_token_params
+ params.require(:deploy_token).permit(:name, :expires_at, :read_repository, :read_registry, :username)
+ end
end
end
end
diff --git a/app/controllers/groups_controller.rb b/app/controllers/groups_controller.rb
index 958dc27984f..7175eefcde7 100644
--- a/app/controllers/groups_controller.rb
+++ b/app/controllers/groups_controller.rb
@@ -97,7 +97,7 @@ class GroupsController < Groups::ApplicationController
end
def edit
- @badge_api_endpoint = expose_url(api_v4_groups_badges_path(id: @group.id))
+ @badge_api_endpoint = expose_path(api_v4_groups_badges_path(id: @group.id))
end
def projects
@@ -195,7 +195,8 @@ class GroupsController < Groups::ApplicationController
:require_two_factor_authentication,
:two_factor_grace_period,
:project_creation_level,
- :subgroup_creation_level
+ :subgroup_creation_level,
+ :default_branch_protection
]
end
diff --git a/app/controllers/ide_controller.rb b/app/controllers/ide_controller.rb
index 4c9aac9a327..8a838db04f9 100644
--- a/app/controllers/ide_controller.rb
+++ b/app/controllers/ide_controller.rb
@@ -3,6 +3,9 @@
class IdeController < ApplicationController
layout 'fullscreen'
+ include ClientsidePreviewCSP
+ include StaticObjectExternalStorageCSP
+
def index
Gitlab::UsageDataCounters::WebIdeCounter.increment_views_count
end
diff --git a/app/controllers/import/gitea_controller.rb b/app/controllers/import/gitea_controller.rb
index a23b2f8139e..f0888e08622 100644
--- a/app/controllers/import/gitea_controller.rb
+++ b/app/controllers/import/gitea_controller.rb
@@ -16,7 +16,13 @@ class Import::GiteaController < Import::GithubController
# Must be defined or it will 404
def status
- super
+ if blocked_url?
+ session[access_token_key] = nil
+
+ redirect_to new_import_url, alert: _('Specified URL cannot be used.')
+ else
+ super
+ end
end
private
@@ -54,4 +60,19 @@ class Import::GiteaController < Import::GithubController
def client_options
{ host: provider_url, api_version: 'v1' }
end
+
+ def blocked_url?
+ Gitlab::UrlBlocker.blocked_url?(
+ provider_url,
+ {
+ allow_localhost: allow_local_requests?,
+ allow_local_network: allow_local_requests?,
+ schemes: %w(http https)
+ }
+ )
+ end
+
+ def allow_local_requests?
+ Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
+ end
end
diff --git a/app/controllers/import/gitlab_projects_controller.rb b/app/controllers/import/gitlab_projects_controller.rb
index c9c487cec26..6b8436d766c 100644
--- a/app/controllers/import/gitlab_projects_controller.rb
+++ b/app/controllers/import/gitlab_projects_controller.rb
@@ -1,9 +1,14 @@
# frozen_string_literal: true
class Import::GitlabProjectsController < Import::BaseController
+ include WorkhorseRequest
+
before_action :whitelist_query_limiting, only: [:create]
before_action :verify_gitlab_project_import_enabled
+ skip_before_action :verify_authenticity_token, only: [:authorize]
+ before_action :verify_workhorse_api!, only: [:authorize]
+
def new
@namespace = Namespace.find(project_params[:namespace_id])
return render_404 unless current_user.can?(:create_projects, @namespace)
@@ -28,10 +33,29 @@ class Import::GitlabProjectsController < Import::BaseController
end
end
+ def authorize
+ set_workhorse_internal_api_content_type
+
+ authorized = ImportExportUploader.workhorse_authorize(
+ has_length: false,
+ maximum_size: Gitlab::CurrentSettings.max_attachment_size.megabytes.to_i)
+
+ render json: authorized
+ rescue SocketError
+ render json: _("Error uploading file"), status: :internal_server_error
+ end
+
private
def file_is_valid?
- return false unless project_params[:file] && project_params[:file].respond_to?(:read)
+ # TODO: remove the condition and the private method after the WH version including
+ # https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/470
+ # is released and GITLAB_WORKHORSE_VERSION is updated accordingly.
+ if with_workhorse_upload_acceleration?
+ return false unless project_params[:file].is_a?(::UploadedFile)
+ else
+ return false unless project_params[:file] && project_params[:file].respond_to?(:read)
+ end
filename = project_params[:file].original_filename
@@ -51,4 +75,8 @@ class Import::GitlabProjectsController < Import::BaseController
def whitelist_query_limiting
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42437')
end
+
+ def with_workhorse_upload_acceleration?
+ request.headers[Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER].present?
+ end
end
diff --git a/app/controllers/ldap/omniauth_callbacks_controller.rb b/app/controllers/ldap/omniauth_callbacks_controller.rb
index 71a88bf3395..8e4d8f3d21b 100644
--- a/app/controllers/ldap/omniauth_callbacks_controller.rb
+++ b/app/controllers/ldap/omniauth_callbacks_controller.rb
@@ -4,9 +4,9 @@ class Ldap::OmniauthCallbacksController < OmniauthCallbacksController
extend ::Gitlab::Utils::Override
def self.define_providers!
- return unless Gitlab::Auth::LDAP::Config.sign_in_enabled?
+ return unless Gitlab::Auth::Ldap::Config.sign_in_enabled?
- Gitlab::Auth::LDAP::Config.available_servers.each do |server|
+ Gitlab::Auth::Ldap::Config.available_servers.each do |server|
alias_method server['provider_name'], :ldap
end
end
@@ -14,9 +14,9 @@ class Ldap::OmniauthCallbacksController < OmniauthCallbacksController
# We only find ourselves here
# if the authentication to LDAP was successful.
def ldap
- return unless Gitlab::Auth::LDAP::Config.sign_in_enabled?
+ return unless Gitlab::Auth::Ldap::Config.sign_in_enabled?
- sign_in_user_flow(Gitlab::Auth::LDAP::User)
+ sign_in_user_flow(Gitlab::Auth::Ldap::User)
end
define_providers!
diff --git a/app/controllers/omniauth_callbacks_controller.rb b/app/controllers/omniauth_callbacks_controller.rb
index bc3308fd6c6..d82a46e57ea 100644
--- a/app/controllers/omniauth_callbacks_controller.rb
+++ b/app/controllers/omniauth_callbacks_controller.rb
@@ -2,6 +2,7 @@
class OmniauthCallbacksController < Devise::OmniauthCallbacksController
include AuthenticatesWithTwoFactor
+ include Authenticates2FAForAdminMode
include Devise::Controllers::Rememberable
include AuthHelper
include InitializesCurrentUserMode
@@ -97,7 +98,7 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
log_audit_event(current_user, with: oauth['provider'])
if Feature.enabled?(:user_mode_in_session)
- return admin_mode_flow if current_user_mode.admin_mode_requested?
+ return admin_mode_flow(auth_module::User) if current_user_mode.admin_mode_requested?
end
identity_linker ||= auth_module::IdentityLinker.new(current_user, oauth, session)
@@ -245,13 +246,19 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
end
end
- def admin_mode_flow
- if omniauth_identity_matches_current_user?
+ def admin_mode_flow(auth_user_class)
+ auth_user = build_auth_user(auth_user_class)
+
+ return fail_admin_mode_invalid_credentials unless omniauth_identity_matches_current_user?
+
+ if current_user.two_factor_enabled? && !auth_user.bypass_two_factor?
+ admin_mode_prompt_for_two_factor(current_user)
+ else
+ # Can only reach here if the omniauth identity matches current user
+ # and current_user is an admin that requested admin mode
current_user_mode.enable_admin_mode!(skip_password_validation: true)
redirect_to stored_location_for(:redirect) || admin_root_path, notice: _('Admin mode enabled')
- else
- fail_admin_mode_invalid_credentials
end
end
diff --git a/app/controllers/profiles/keys_controller.rb b/app/controllers/profiles/keys_controller.rb
index 055d900eece..b9cb71ae89a 100644
--- a/app/controllers/profiles/keys_controller.rb
+++ b/app/controllers/profiles/keys_controller.rb
@@ -55,6 +55,6 @@ class Profiles::KeysController < Profiles::ApplicationController
private
def key_params
- params.require(:key).permit(:title, :key)
+ params.require(:key).permit(:title, :key, :expires_at)
end
end
diff --git a/app/controllers/profiles_controller.rb b/app/controllers/profiles_controller.rb
index 2b7571e42b7..c9f46eb72c5 100644
--- a/app/controllers/profiles_controller.rb
+++ b/app/controllers/profiles_controller.rb
@@ -117,6 +117,7 @@ class ProfilesController < Profiles::ApplicationController
:private_profile,
:include_private_contributions,
:timezone,
+ :job_title,
status: [:emoji, :message]
)
end
diff --git a/app/controllers/projects/blob_controller.rb b/app/controllers/projects/blob_controller.rb
index 01e5103198b..5788fc17a9b 100644
--- a/app/controllers/projects/blob_controller.rb
+++ b/app/controllers/projects/blob_controller.rb
@@ -31,6 +31,7 @@ class Projects::BlobController < Projects::ApplicationController
before_action only: :show do
push_frontend_feature_flag(:code_navigation, @project)
+ push_frontend_feature_flag(:suggest_pipeline) if experiment_enabled?(:suggest_pipeline)
end
def new
diff --git a/app/controllers/projects/commits_controller.rb b/app/controllers/projects/commits_controller.rb
index 15bb35dd0be..b161e44660e 100644
--- a/app/controllers/projects/commits_controller.rb
+++ b/app/controllers/projects/commits_controller.rb
@@ -64,10 +64,13 @@ class Projects::CommitsController < Projects::ApplicationController
render_404 unless @path.empty? || request.format == :atom || @repository.blob_at(@commit.id, @path) || @repository.tree(@commit.id, @path).entries.present?
@limit, @offset = (params[:limit] || 40).to_i, (params[:offset] || 0).to_i
search = params[:search]
+ author = params[:author]
@commits =
if search.present?
@repository.find_commits_by_message(search, @ref, @path, @limit, @offset)
+ elsif author.present?
+ @repository.commits(@ref, author: author, path: @path, limit: @limit, offset: @offset)
else
@repository.commits(@ref, path: @path, limit: @limit, offset: @offset)
end
diff --git a/app/controllers/projects/deploy_keys_controller.rb b/app/controllers/projects/deploy_keys_controller.rb
index f13fb4d0b3d..f43e9f2bd19 100644
--- a/app/controllers/projects/deploy_keys_controller.rb
+++ b/app/controllers/projects/deploy_keys_controller.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
class Projects::DeployKeysController < Projects::ApplicationController
- include RepositorySettingsRedirect
respond_to :html
# Authorize
@@ -12,7 +11,7 @@ class Projects::DeployKeysController < Projects::ApplicationController
def index
respond_to do |format|
- format.html { redirect_to_repository_settings(@project, anchor: 'js-deploy-keys-settings') }
+ format.html { redirect_to_ci_cd_settings }
format.json do
render json: Projects::Settings::DeployKeysPresenter.new(@project, current_user: current_user).as_json
end
@@ -20,7 +19,7 @@ class Projects::DeployKeysController < Projects::ApplicationController
end
def new
- redirect_to_repository_settings(@project, anchor: 'js-deploy-keys-settings')
+ redirect_to_ci_cd_settings
end
def create
@@ -30,7 +29,7 @@ class Projects::DeployKeysController < Projects::ApplicationController
flash[:alert] = @key.errors.full_messages.join(', ').html_safe
end
- redirect_to_repository_settings(@project, anchor: 'js-deploy-keys-settings')
+ redirect_to_ci_cd_settings
end
def edit
@@ -39,7 +38,7 @@ class Projects::DeployKeysController < Projects::ApplicationController
def update
if deploy_key.update(update_params)
flash[:notice] = _('Deploy key was successfully updated.')
- redirect_to_repository_settings(@project, anchor: 'js-deploy-keys-settings')
+ redirect_to_ci_cd_settings
else
render 'edit'
end
@@ -51,7 +50,7 @@ class Projects::DeployKeysController < Projects::ApplicationController
return render_404 unless key
respond_to do |format|
- format.html { redirect_to_repository_settings(@project, anchor: 'js-deploy-keys-settings') }
+ format.html { redirect_to_ci_cd_settings }
format.json { head :ok }
end
end
@@ -62,7 +61,7 @@ class Projects::DeployKeysController < Projects::ApplicationController
return render_404 unless deploy_key_project
respond_to do |format|
- format.html { redirect_to_repository_settings(@project, anchor: 'js-deploy-keys-settings') }
+ format.html { redirect_to_ci_cd_settings }
format.json { head :ok }
end
end
@@ -97,4 +96,8 @@ class Projects::DeployKeysController < Projects::ApplicationController
access_denied!
end
end
+
+ def redirect_to_ci_cd_settings
+ redirect_to project_settings_ci_cd_path(@project, anchor: 'js-deploy-keys-settings')
+ end
end
diff --git a/app/controllers/projects/deploy_tokens_controller.rb b/app/controllers/projects/deploy_tokens_controller.rb
index 830b1f4fe4a..4a70424ec01 100644
--- a/app/controllers/projects/deploy_tokens_controller.rb
+++ b/app/controllers/projects/deploy_tokens_controller.rb
@@ -7,6 +7,6 @@ class Projects::DeployTokensController < Projects::ApplicationController
@token = @project.deploy_tokens.find(params[:id])
@token.revoke!
- redirect_to project_settings_repository_path(project, anchor: 'js-deploy-tokens')
+ redirect_to project_settings_ci_cd_path(project, anchor: 'js-deploy-tokens')
end
end
diff --git a/app/controllers/projects/forks_controller.rb b/app/controllers/projects/forks_controller.rb
index 9806b91c7e8..248b75d16ed 100644
--- a/app/controllers/projects/forks_controller.rb
+++ b/app/controllers/projects/forks_controller.rb
@@ -3,6 +3,7 @@
class Projects::ForksController < Projects::ApplicationController
include ContinueParams
include RendersMemberAccess
+ include Gitlab::Utils::StrongMemoize
# Authorize
before_action :whitelist_query_limiting, only: [:create]
@@ -10,6 +11,7 @@ class Projects::ForksController < Projects::ApplicationController
before_action :authorize_download_code!
before_action :authenticate_user!, only: [:new, :create]
before_action :authorize_fork_project!, only: [:new, :create]
+ before_action :authorize_fork_namespace!, only: [:create]
# rubocop: disable CodeReuse/ActiveRecord
def index
@@ -37,18 +39,15 @@ class Projects::ForksController < Projects::ApplicationController
# rubocop: enable CodeReuse/ActiveRecord
def new
- @namespaces = current_user.manageable_namespaces
- @namespaces.delete(@project.namespace)
+ @namespaces = fork_service.valid_fork_targets - [project.namespace]
end
# rubocop: disable CodeReuse/ActiveRecord
def create
- namespace = Namespace.find(params[:namespace_key])
-
- @forked_project = namespace.projects.find_by(path: project.path)
+ @forked_project = fork_namespace.projects.find_by(path: project.path)
@forked_project = nil unless @forked_project && @forked_project.forked_from_project == project
- @forked_project ||= ::Projects::ForkService.new(project, current_user, namespace: namespace).execute
+ @forked_project ||= fork_service.execute
if !@forked_project.saved? || !@forked_project.forked?
render :error
@@ -64,6 +63,22 @@ class Projects::ForksController < Projects::ApplicationController
private
+ def fork_service
+ strong_memoize(:fork_service) do
+ ::Projects::ForkService.new(project, current_user, namespace: fork_namespace)
+ end
+ end
+
+ def fork_namespace
+ strong_memoize(:fork_namespace) do
+ Namespace.find(params[:namespace_key]) if params[:namespace_key].present?
+ end
+ end
+
+ def authorize_fork_namespace!
+ access_denied! unless fork_namespace && fork_service.valid_fork_target?
+ end
+
def whitelist_query_limiting
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42335')
end
diff --git a/app/controllers/projects/graphs_controller.rb b/app/controllers/projects/graphs_controller.rb
index 67d3f49af18..889dcefb65a 100644
--- a/app/controllers/projects/graphs_controller.rb
+++ b/app/controllers/projects/graphs_controller.rb
@@ -37,7 +37,8 @@ class Projects::GraphsController < Projects::ApplicationController
private
def get_commits
- @commits = @project.repository.commits(@ref, limit: 2000, skip_merges: true)
+ @commits_limit = 2000
+ @commits = @project.repository.commits(@ref, limit: @commits_limit, skip_merges: true)
@commits_graph = Gitlab::Graphs::Commits.new(@commits)
@commits_per_week_days = @commits_graph.commits_per_week_days
@commits_per_time = @commits_graph.commits_per_time
diff --git a/app/controllers/projects/hooks_controller.rb b/app/controllers/projects/hooks_controller.rb
index 5fa0339f44d..097a357889f 100644
--- a/app/controllers/projects/hooks_controller.rb
+++ b/app/controllers/projects/hooks_controller.rb
@@ -12,7 +12,8 @@ class Projects::HooksController < Projects::ApplicationController
layout "project_settings"
def index
- redirect_to project_settings_integrations_path(@project)
+ @hooks = @project.hooks
+ @hook = ProjectHook.new
end
def create
@@ -24,7 +25,7 @@ class Projects::HooksController < Projects::ApplicationController
flash[:alert] = @hook.errors.full_messages.join.html_safe
end
- redirect_to project_settings_integrations_path(@project)
+ redirect_to action: :index
end
def edit
@@ -33,7 +34,7 @@ class Projects::HooksController < Projects::ApplicationController
def update
if hook.update(hook_params)
flash[:notice] = _('Hook was successfully updated.')
- redirect_to project_settings_integrations_path(@project)
+ redirect_to action: :index
else
render 'edit'
end
@@ -44,13 +45,13 @@ class Projects::HooksController < Projects::ApplicationController
set_hook_execution_notice(result)
- redirect_back_or_default(default: { action: 'index' })
+ redirect_back_or_default(default: { action: :index })
end
def destroy
hook.destroy
- redirect_to project_settings_integrations_path(@project), status: :found
+ redirect_to action: :index, status: :found
end
private
diff --git a/app/controllers/projects/import/jira_controller.rb b/app/controllers/projects/import/jira_controller.rb
new file mode 100644
index 00000000000..c74c180fa20
--- /dev/null
+++ b/app/controllers/projects/import/jira_controller.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+module Projects
+ module Import
+ class JiraController < Projects::ApplicationController
+ before_action :jira_import_enabled?
+ before_action :jira_integration_configured?
+
+ def show
+ unless @project.import_state&.in_progress?
+ jira_client = @project.jira_service.client
+ @jira_projects = jira_client.Project.all.map { |p| ["#{p.name} (#{p.key})", p.key] }
+ end
+
+ flash[:notice] = _("Import %{status}") % { status: @project.import_state.status } if @project.import_state.present? && !@project.import_state.none?
+ end
+
+ def import
+ import_state = @project.import_state || @project.create_import_state
+
+ schedule_import(jira_import_params) unless import_state.in_progress?
+
+ redirect_to project_import_jira_path(@project)
+ end
+
+ private
+
+ def jira_import_enabled?
+ return if Feature.enabled?(:jira_issue_import, @project)
+
+ redirect_to project_issues_path(@project)
+ end
+
+ def jira_integration_configured?
+ return if @project.jira_service
+
+ flash[:notice] = _("Configure the Jira integration first on your project's %{strong_start} Settings > Integrations > Jira%{strong_end} page." %
+ { strong_start: '<strong>'.html_safe, strong_end: '</strong>'.html_safe })
+ redirect_to project_issues_path(@project)
+ end
+
+ def schedule_import(params)
+ import_data = @project.create_or_update_import_data(data: {}).becomes(JiraImportData)
+
+ import_data << JiraImportData::JiraProjectDetails.new(
+ params[:jira_project_key],
+ Time.now.strftime('%Y-%m-%d %H:%M:%S'),
+ { user_id: current_user.id, name: current_user.name }
+ )
+
+ @project.import_type = 'jira'
+ @project.import_state.schedule if @project.save
+ end
+
+ def jira_import_params
+ params.permit(:jira_project_key)
+ end
+ end
+ end
+end
diff --git a/app/controllers/projects/issues_controller.rb b/app/controllers/projects/issues_controller.rb
index b14a1179d46..5ddc60707d5 100644
--- a/app/controllers/projects/issues_controller.rb
+++ b/app/controllers/projects/issues_controller.rb
@@ -44,6 +44,7 @@ class Projects::IssuesController < Projects::ApplicationController
before_action do
push_frontend_feature_flag(:vue_issuable_sidebar, project.group)
+ push_frontend_feature_flag(:save_issuable_health_status, project.group)
end
around_action :allow_gitaly_ref_name_caching, only: [:discussions]
diff --git a/app/controllers/projects/jobs_controller.rb b/app/controllers/projects/jobs_controller.rb
index cb473d6ee96..e0457925b34 100644
--- a/app/controllers/projects/jobs_controller.rb
+++ b/app/controllers/projects/jobs_controller.rb
@@ -19,10 +19,10 @@ class Projects::JobsController < Projects::ApplicationController
def index
# We need all builds for tabs counters
- @all_builds = JobsFinder.new(current_user: current_user, project: @project).execute
+ @all_builds = Ci::JobsFinder.new(current_user: current_user, project: @project).execute
@scope = params[:scope]
- @builds = JobsFinder.new(current_user: current_user, project: @project, params: params).execute
+ @builds = Ci::JobsFinder.new(current_user: current_user, project: @project, params: params).execute
@builds = @builds.eager_load_everything
@builds = @builds.page(params[:page]).per(30).without_count
end
diff --git a/app/controllers/projects/logs_controller.rb b/app/controllers/projects/logs_controller.rb
new file mode 100644
index 00000000000..d598171e92c
--- /dev/null
+++ b/app/controllers/projects/logs_controller.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+module Projects
+ class LogsController < Projects::ApplicationController
+ before_action :authorize_read_pod_logs!
+ before_action :environment
+ before_action :ensure_deployments, only: %i(k8s elasticsearch)
+
+ def index
+ if environment.nil?
+ render :empty_logs
+ else
+ render :index
+ end
+ end
+
+ def k8s
+ render_logs(::PodLogs::KubernetesService, k8s_params)
+ end
+
+ def elasticsearch
+ render_logs(::PodLogs::ElasticsearchService, elasticsearch_params)
+ end
+
+ private
+
+ def render_logs(service, permitted_params)
+ ::Gitlab::UsageCounters::PodLogs.increment(project.id)
+ ::Gitlab::PollingInterval.set_header(response, interval: 3_000)
+
+ result = service.new(cluster, namespace, params: permitted_params).execute
+
+ if result.nil?
+ head :accepted
+ elsif result[:status] == :success
+ render json: result
+ else
+ render status: :bad_request, json: result
+ end
+ end
+
+ def index_params
+ params.permit(:environment_name)
+ end
+
+ def k8s_params
+ params.permit(:container_name, :pod_name)
+ end
+
+ def elasticsearch_params
+ params.permit(:container_name, :pod_name, :search, :start, :end, :cursor)
+ end
+
+ def environment
+ @environment ||= if index_params.key?(:environment_name)
+ EnvironmentsFinder.new(project, current_user, name: index_params[:environment_name]).find.first
+ else
+ project.default_environment
+ end
+ end
+
+ def cluster
+ environment.deployment_platform&.cluster
+ end
+
+ def namespace
+ environment.deployment_namespace
+ end
+
+ def ensure_deployments
+ return if cluster && namespace.present?
+
+ render status: :bad_request, json: {
+ status: :error,
+ message: _('Environment does not have deployments')
+ }
+ end
+ end
+end
diff --git a/app/controllers/projects/merge_requests_controller.rb b/app/controllers/projects/merge_requests_controller.rb
index c5f017efe8d..e87f1728cbb 100644
--- a/app/controllers/projects/merge_requests_controller.rb
+++ b/app/controllers/projects/merge_requests_controller.rb
@@ -14,13 +14,14 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
skip_before_action :merge_request, only: [:index, :bulk_update]
before_action :whitelist_query_limiting, only: [:assign_related_issues, :update]
before_action :authorize_update_issuable!, only: [:close, :edit, :update, :remove_wip, :sort]
- before_action :authorize_read_actual_head_pipeline!, only: [:test_reports, :exposed_artifacts]
+ before_action :authorize_read_actual_head_pipeline!, only: [:test_reports, :exposed_artifacts, :coverage_reports]
before_action :set_issuables_index, only: [:index]
before_action :authenticate_user!, only: [:assign_related_issues]
before_action :check_user_can_push_to_source_branch!, only: [:rebase]
before_action only: [:show] do
push_frontend_feature_flag(:diffs_batch_load, @project, default_enabled: true)
- push_frontend_feature_flag(:single_mr_diff_view, @project)
+ push_frontend_feature_flag(:deploy_from_footer, @project, default_enabled: true)
+ push_frontend_feature_flag(:single_mr_diff_view, @project, default_enabled: true)
push_frontend_feature_flag(:suggest_pipeline) if experiment_enabled?(:suggest_pipeline)
end
@@ -62,6 +63,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
@issuable_sidebar = serializer.represent(@merge_request, serializer: 'sidebar')
@current_user_data = UserSerializer.new(project: @project).represent(current_user, {}, MergeRequestUserEntity).to_json
@show_whitespace_default = current_user.nil? || current_user.show_whitespace_in_diffs
+ @coverage_path = coverage_reports_project_merge_request_path(@project, @merge_request, format: :json) if @merge_request.has_coverage_reports?
set_pipeline_variables
@@ -130,6 +132,14 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
reports_response(@merge_request.compare_test_reports)
end
+ def coverage_reports
+ if @merge_request.has_coverage_reports?
+ reports_response(@merge_request.find_coverage_reports)
+ else
+ head :no_content
+ end
+ end
+
def exposed_artifacts
if @merge_request.has_exposed_artifacts?
reports_response(@merge_request.find_exposed_artifacts)
diff --git a/app/controllers/projects/milestones_controller.rb b/app/controllers/projects/milestones_controller.rb
index f6f61b6e5fb..d301a5be391 100644
--- a/app/controllers/projects/milestones_controller.rb
+++ b/app/controllers/projects/milestones_controller.rb
@@ -24,7 +24,6 @@ class Projects::MilestonesController < Projects::ApplicationController
respond_to do |format|
format.html do
- @project_namespace = @project.namespace.becomes(Namespace)
# We need to show group milestones in the JSON response
# so that people can filter by and assign group milestones,
# but we don't need to show them on the project milestones page itself.
@@ -47,8 +46,6 @@ class Projects::MilestonesController < Projects::ApplicationController
end
def show
- @project_namespace = @project.namespace.becomes(Namespace)
-
respond_to do |format|
format.html
end
diff --git a/app/controllers/projects/performance_monitoring/dashboards_controller.rb b/app/controllers/projects/performance_monitoring/dashboards_controller.rb
index 2d872b78096..ec5a33f5dd6 100644
--- a/app/controllers/projects/performance_monitoring/dashboards_controller.rb
+++ b/app/controllers/projects/performance_monitoring/dashboards_controller.rb
@@ -22,6 +22,16 @@ module Projects
end
end
+ def update
+ result = ::Metrics::Dashboard::UpdateDashboardService.new(project, current_user, dashboard_params.merge(file_content_params)).execute
+
+ if result[:status] == :success
+ respond_update_success(result)
+ else
+ respond_error(result)
+ end
+ end
+
private
def respond_success(result)
@@ -43,6 +53,19 @@ module Projects
flash[:notice] = message.html_safe
end
+ def respond_update_success(result)
+ set_web_ide_link_update_notice(result.dig(:dashboard, :path))
+ respond_to do |format|
+ format.json { render status: result.delete(:http_status), json: result }
+ end
+ end
+
+ def set_web_ide_link_update_notice(new_dashboard_path)
+ web_ide_link_start = "<a href=\"#{ide_edit_path(project, redirect_safe_branch_name, new_dashboard_path)}\">"
+ message = _("Your dashboard has been updated. You can %{web_ide_link_start}edit it here%{web_ide_link_end}.") % { web_ide_link_start: web_ide_link_start, web_ide_link_end: "</a>" }
+ flash[:notice] = message.html_safe
+ end
+
def validate_required_params!
params.require(%i(branch file_name dashboard commit_message))
end
@@ -54,6 +77,31 @@ module Projects
def dashboard_params
params.permit(%i(branch file_name dashboard commit_message)).to_h
end
+
+ def file_content_params
+ params.permit(
+ file_content: [
+ :dashboard,
+ panel_groups: [
+ :group,
+ :priority,
+ panels: [
+ :type,
+ :title,
+ :y_label,
+ :weight,
+ metrics: [
+ :id,
+ :unit,
+ :label,
+ :query,
+ :query_range
+ ]
+ ]
+ ]
+ ]
+ )
+ end
end
end
end
diff --git a/app/controllers/projects/pipeline_schedules_controller.rb b/app/controllers/projects/pipeline_schedules_controller.rb
index ead839e8441..e7e8a900060 100644
--- a/app/controllers/projects/pipeline_schedules_controller.rb
+++ b/app/controllers/projects/pipeline_schedules_controller.rb
@@ -13,8 +13,8 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController
# rubocop: disable CodeReuse/ActiveRecord
def index
@scope = params[:scope]
- @all_schedules = PipelineSchedulesFinder.new(@project).execute
- @schedules = PipelineSchedulesFinder.new(@project).execute(scope: params[:scope])
+ @all_schedules = Ci::PipelineSchedulesFinder.new(@project).execute
+ @schedules = Ci::PipelineSchedulesFinder.new(@project).execute(scope: params[:scope])
.includes(:last_pipeline)
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/controllers/projects/pipelines_controller.rb b/app/controllers/projects/pipelines_controller.rb
index 6d902e099d9..ee102248cb7 100644
--- a/app/controllers/projects/pipelines_controller.rb
+++ b/app/controllers/projects/pipelines_controller.rb
@@ -22,7 +22,7 @@ class Projects::PipelinesController < Projects::ApplicationController
def index
@scope = params[:scope]
- @pipelines = PipelinesFinder
+ @pipelines = Ci::PipelinesFinder
.new(project, current_user, scope: @scope)
.execute
.page(params[:page])
@@ -60,10 +60,10 @@ class Projects::PipelinesController < Projects::ApplicationController
.new(project, current_user, create_params)
.execute(:web, ignore_skip_ci: true, save_on_errors: false)
- if @pipeline.persisted?
+ if @pipeline.created_successfully?
redirect_to project_pipeline_path(project, @pipeline)
else
- render 'new'
+ render 'new', status: :bad_request
end
end
@@ -251,7 +251,7 @@ class Projects::PipelinesController < Projects::ApplicationController
end
def limited_pipelines_count(project, scope = nil)
- finder = PipelinesFinder.new(project, current_user, scope: scope)
+ finder = Ci::PipelinesFinder.new(project, current_user, scope: scope)
view_context.limited_counter_with_delimiter(finder.execute)
end
diff --git a/app/controllers/projects/registry/repositories_controller.rb b/app/controllers/projects/registry/repositories_controller.rb
index e524d1c29a2..d6d993f427d 100644
--- a/app/controllers/projects/registry/repositories_controller.rb
+++ b/app/controllers/projects/registry/repositories_controller.rb
@@ -17,7 +17,7 @@ module Projects
serializer = ContainerRepositoriesSerializer
.new(project: project, current_user: current_user)
- if Feature.enabled?(:vue_container_registry_explorer)
+ if Feature.enabled?(:vue_container_registry_explorer, project.group)
render json: serializer.with_pagination(request, response).represent(@images)
else
render json: serializer.represent(@images)
diff --git a/app/controllers/projects/releases_controller.rb b/app/controllers/projects/releases_controller.rb
index 7ad841d645d..7d6b38dd243 100644
--- a/app/controllers/projects/releases_controller.rb
+++ b/app/controllers/projects/releases_controller.rb
@@ -3,12 +3,12 @@
class Projects::ReleasesController < Projects::ApplicationController
# Authorize
before_action :require_non_empty_project, except: [:index]
- before_action :release, only: %i[edit show update]
+ before_action :release, only: %i[edit show update downloads]
before_action :authorize_read_release!
before_action do
- push_frontend_feature_flag(:release_issue_summary, project)
+ push_frontend_feature_flag(:release_issue_summary, project, default_enabled: true)
push_frontend_feature_flag(:release_evidence_collection, project, default_enabled: true)
- push_frontend_feature_flag(:release_show_page, project)
+ push_frontend_feature_flag(:release_show_page, project, default_enabled: true)
end
before_action :authorize_update_release!, only: %i[edit update]
before_action :authorize_read_release_evidence!, only: [:evidence]
@@ -31,7 +31,7 @@ class Projects::ReleasesController < Projects::ApplicationController
end
def show
- return render_404 unless Feature.enabled?(:release_show_page, project)
+ return render_404 unless Feature.enabled?(:release_show_page, project, default_enabled: true)
respond_to do |format|
format.html do
@@ -40,6 +40,10 @@ class Projects::ReleasesController < Projects::ApplicationController
end
end
+ def downloads
+ redirect_to link.url
+ end
+
protected
def releases
@@ -69,6 +73,14 @@ class Projects::ReleasesController < Projects::ApplicationController
@release ||= project.releases.find_by_tag!(sanitized_tag_name)
end
+ def link
+ release.links.find_by_filepath!(sanitized_filepath)
+ end
+
+ def sanitized_filepath
+ CGI.unescape(params[:filepath])
+ end
+
def sanitized_tag_name
CGI.unescape(params[:tag])
end
diff --git a/app/controllers/projects/repositories_controller.rb b/app/controllers/projects/repositories_controller.rb
index d0fb814948f..1cb9e1d2c9b 100644
--- a/app/controllers/projects/repositories_controller.rb
+++ b/app/controllers/projects/repositories_controller.rb
@@ -3,11 +3,13 @@
class Projects::RepositoriesController < Projects::ApplicationController
include ExtractsPath
include StaticObjectExternalStorage
+ include Gitlab::RateLimitHelpers
prepend_before_action(only: [:archive]) { authenticate_sessionless_user!(:archive) }
# Authorize
before_action :require_non_empty_project, except: :create
+ before_action :archive_rate_limit!, only: :archive
before_action :assign_archive_vars, only: :archive
before_action :assign_append_sha, only: :archive
before_action :authorize_download_code!
@@ -34,6 +36,12 @@ class Projects::RepositoriesController < Projects::ApplicationController
private
+ def archive_rate_limit!
+ if archive_rate_limit_reached?(current_user, @project)
+ render plain: ::Gitlab::RateLimitHelpers::ARCHIVE_RATE_LIMIT_REACHED_MESSAGE, status: :too_many_requests
+ end
+ end
+
def repo_params
@repo_params ||= { ref: @ref, path: params[:path], format: params[:format], append_sha: @append_sha }
end
diff --git a/app/controllers/projects/services_controller.rb b/app/controllers/projects/services_controller.rb
index c916140211e..92c6ce324f7 100644
--- a/app/controllers/projects/services_controller.rb
+++ b/app/controllers/projects/services_controller.rb
@@ -52,28 +52,26 @@ class Projects::ServicesController < Projects::ApplicationController
private
def service_test_response
- if @service.update(service_params[:service])
- data = @service.test_data(project, current_user)
- outcome = @service.test(data)
-
- if outcome[:success]
- {}
- else
- { error: true, message: _('Test failed.'), service_response: outcome[:result].to_s, test_failed: true }
- end
- else
- { error: true, message: _('Validations failed.'), service_response: @service.errors.full_messages.join(','), test_failed: false }
+ unless @service.update(service_params[:service])
+ return { error: true, message: _('Validations failed.'), service_response: @service.errors.full_messages.join(','), test_failed: false }
+ end
+
+ data = @service.test_data(project, current_user)
+ outcome = @service.test(data)
+
+ unless outcome[:success]
+ return { error: true, message: _('Test failed.'), service_response: outcome[:result].to_s, test_failed: true }
end
+
+ {}
rescue Gitlab::HTTP::BlockedUrlError => e
{ error: true, message: _('Test failed.'), service_response: e.message, test_failed: true }
end
def success_message
- if @service.active?
- _("%{service_title} activated.") % { service_title: @service.title }
- else
- _("%{service_title} settings saved, but not activated.") % { service_title: @service.title }
- end
+ message = @service.active? ? _('activated') : _('settings saved, but not activated')
+
+ _('%{service_title} %{message}.') % { service_title: @service.title, message: message }
end
def service
diff --git a/app/controllers/projects/settings/ci_cd_controller.rb b/app/controllers/projects/settings/ci_cd_controller.rb
index bf0c2d885f8..43c798bfc6e 100644
--- a/app/controllers/projects/settings/ci_cd_controller.rb
+++ b/app/controllers/projects/settings/ci_cd_controller.rb
@@ -5,6 +5,9 @@ module Projects
class CiCdController < Projects::ApplicationController
before_action :authorize_admin_pipeline!
before_action :define_variables
+ before_action do
+ push_frontend_feature_flag(:new_variables_ui, @project, default_enabled: true)
+ end
def show
end
@@ -43,6 +46,16 @@ module Projects
redirect_to namespace_project_settings_ci_cd_path
end
+ def create_deploy_token
+ @new_deploy_token = Projects::DeployTokens::CreateService.new(@project, current_user, deploy_token_params).execute
+
+ if @new_deploy_token.persisted?
+ flash.now[:notice] = s_('DeployTokens|Your new project deploy token has been created.')
+ end
+
+ render 'show'
+ end
+
private
def update_params
@@ -53,7 +66,7 @@ module Projects
[
:runners_token, :builds_enabled, :build_allow_git_fetch,
:build_timeout_human_readable, :build_coverage_regex, :public_builds,
- :auto_cancel_pending_pipelines, :ci_config_path,
+ :auto_cancel_pending_pipelines, :forward_deployment_enabled, :ci_config_path,
auto_devops_attributes: [:id, :domain, :enabled, :deploy_strategy],
ci_cd_settings_attributes: [:default_git_depth]
].tap do |list|
@@ -61,6 +74,10 @@ module Projects
end
end
+ def deploy_token_params
+ params.require(:deploy_token).permit(:name, :expires_at, :read_repository, :read_registry, :username)
+ end
+
def run_autodevops_pipeline(service)
return unless service.run_auto_devops_pipeline?
@@ -80,9 +97,11 @@ module Projects
def define_variables
define_runners_variables
define_ci_variables
+ define_deploy_token_variables
define_triggers_variables
define_badges_variables
define_auto_devops_variables
+ define_deploy_keys
end
def define_runners_variables
@@ -129,6 +148,16 @@ module Projects
def define_auto_devops_variables
@auto_devops = @project.auto_devops || ProjectAutoDevops.new
end
+
+ def define_deploy_token_variables
+ @deploy_tokens = @project.deploy_tokens.active
+
+ @new_deploy_token = DeployToken.new
+ end
+
+ def define_deploy_keys
+ @deploy_keys = DeployKeysPresenter.new(@project, current_user: current_user)
+ end
end
end
end
diff --git a/app/controllers/projects/settings/integrations_controller.rb b/app/controllers/projects/settings/integrations_controller.rb
index 0c5cf01d912..a4a53676ec7 100644
--- a/app/controllers/projects/settings/integrations_controller.rb
+++ b/app/controllers/projects/settings/integrations_controller.rb
@@ -9,10 +9,6 @@ module Projects
layout "project_settings"
def show
- @hooks = @project.hooks
- @hook = ProjectHook.new
-
- # Services
@services = @project.find_or_initialize_services(exceptions: service_exceptions)
end
diff --git a/app/controllers/projects/settings/operations_controller.rb b/app/controllers/projects/settings/operations_controller.rb
index 12b4f9ac56c..164cd5b9384 100644
--- a/app/controllers/projects/settings/operations_controller.rb
+++ b/app/controllers/projects/settings/operations_controller.rb
@@ -8,6 +8,7 @@ module Projects
helper_method :error_tracking_setting
def show
+ render locals: { prometheus_service: prometheus_service }
end
def update
@@ -28,6 +29,10 @@ module Projects
private
+ def prometheus_service
+ project.find_or_initialize_service(::PrometheusService.to_param)
+ end
+
def render_update_response(result)
respond_to do |format|
format.html do
diff --git a/app/controllers/projects/settings/repository_controller.rb b/app/controllers/projects/settings/repository_controller.rb
index 63f5d5073a7..a1f88c73649 100644
--- a/app/controllers/projects/settings/repository_controller.rb
+++ b/app/controllers/projects/settings/repository_controller.rb
@@ -10,16 +10,6 @@ module Projects
render_show
end
- def create_deploy_token
- @new_deploy_token = DeployTokens::CreateService.new(@project, current_user, deploy_token_params).execute
-
- if @new_deploy_token.persisted?
- flash.now[:notice] = s_('DeployTokens|Your new project deploy token has been created.')
- end
-
- render_show
- end
-
def cleanup
cleanup_params = params.require(:project).permit(:bfg_object_map)
result = Projects::UpdateService.new(project, current_user, cleanup_params).execute
@@ -37,10 +27,6 @@ module Projects
private
def render_show
- @deploy_keys = DeployKeysPresenter.new(@project, current_user: current_user)
- @deploy_tokens = @project.deploy_tokens.active
-
- define_deploy_token
define_protected_refs
remote_mirror
@@ -93,14 +79,6 @@ module Projects
gon.push(protectable_branches_for_dropdown)
gon.push(access_levels_options)
end
-
- def define_deploy_token
- @new_deploy_token ||= DeployToken.new
- end
-
- def deploy_token_params
- params.require(:deploy_token).permit(:name, :expires_at, :read_repository, :read_registry, :username)
- end
end
end
end
diff --git a/app/controllers/projects/snippets_controller.rb b/app/controllers/projects/snippets_controller.rb
index b9c7468890b..48cd42347fc 100644
--- a/app/controllers/projects/snippets_controller.rb
+++ b/app/controllers/projects/snippets_controller.rb
@@ -52,8 +52,15 @@ class Projects::SnippetsController < Projects::ApplicationController
create_params = snippet_params.merge(spammable_params)
service_response = Snippets::CreateService.new(project, current_user, create_params).execute
@snippet = service_response.payload[:snippet]
+ repository_operation_error = service_response.error? && !@snippet.persisted? && @snippet.valid?
- recaptcha_check_with_fallback { render :new }
+ if repository_operation_error
+ flash.now[:alert] = service_response.message
+
+ render :new
+ else
+ recaptcha_check_with_fallback { render :new }
+ end
end
def update
@@ -62,11 +69,10 @@ class Projects::SnippetsController < Projects::ApplicationController
service_response = Snippets::UpdateService.new(project, current_user, update_params).execute(@snippet)
@snippet = service_response.payload[:snippet]
- recaptcha_check_with_fallback { render :edit }
+ check_repository_error
end
def show
- blob = @snippet.blob
conditionally_expand_blob(blob)
respond_to do |format|
@@ -115,6 +121,16 @@ class Projects::SnippetsController < Projects::ApplicationController
alias_method :awardable, :snippet
alias_method :spammable, :snippet
+ def blob
+ return unless snippet
+
+ @blob ||= if Feature.enabled?(:version_snippets, current_user) && !snippet.repository.empty?
+ snippet.blobs.first
+ else
+ snippet.blob
+ end
+ end
+
def spammable_path
project_snippet_path(@project, @snippet)
end
diff --git a/app/controllers/projects/tags/releases_controller.rb b/app/controllers/projects/tags/releases_controller.rb
index 5e4c601a693..c1f4cbce054 100644
--- a/app/controllers/projects/tags/releases_controller.rb
+++ b/app/controllers/projects/tags/releases_controller.rb
@@ -12,11 +12,7 @@ class Projects::Tags::ReleasesController < Projects::ApplicationController
end
def update
- if release_params[:description].present?
- release.update(release_params)
- else
- release.destroy
- end
+ release.update(release_params) if release.persisted? || release_params[:description].present?
redirect_to project_tag_path(@project, tag.name)
end
diff --git a/app/controllers/projects_controller.rb b/app/controllers/projects_controller.rb
index 31b86946ca2..045aa38230c 100644
--- a/app/controllers/projects_controller.rb
+++ b/app/controllers/projects_controller.rb
@@ -50,7 +50,7 @@ class ProjectsController < Projects::ApplicationController
# rubocop: enable CodeReuse/ActiveRecord
def edit
- @badge_api_endpoint = expose_url(api_v4_projects_badges_path(id: @project.id))
+ @badge_api_endpoint = expose_path(api_v4_projects_badges_path(id: @project.id))
render_edit
end
diff --git a/app/controllers/registrations_controller.rb b/app/controllers/registrations_controller.rb
index 06751dfbec8..a6c5a6d8526 100644
--- a/app/controllers/registrations_controller.rb
+++ b/app/controllers/registrations_controller.rb
@@ -4,7 +4,7 @@ class RegistrationsController < Devise::RegistrationsController
include Recaptcha::Verify
include AcceptsPendingInvitations
include RecaptchaExperimentHelper
- include InvisibleCaptcha
+ include InvisibleCaptchaOnSignup
layout :choose_layout
diff --git a/app/controllers/repositories/git_http_client_controller.rb b/app/controllers/repositories/git_http_client_controller.rb
index 76eb7c67205..d03daa406cf 100644
--- a/app/controllers/repositories/git_http_client_controller.rb
+++ b/app/controllers/repositories/git_http_client_controller.rb
@@ -6,7 +6,7 @@ module Repositories
include KerberosSpnegoHelper
include Gitlab::Utils::StrongMemoize
- attr_reader :authentication_result, :redirected_path
+ attr_reader :authentication_result, :redirected_path, :container
delegate :actor, :authentication_abilities, to: :authentication_result, allow_nil: true
delegate :type, to: :authentication_result, allow_nil: true, prefix: :auth_result
@@ -81,7 +81,7 @@ module Repositories
end
def parse_repo_path
- @project, @repo_type, @redirected_path = Gitlab::RepoPath.parse("#{params[:namespace_id]}/#{params[:repository_id]}")
+ @container, @project, @repo_type, @redirected_path = Gitlab::RepoPath.parse("#{params[:namespace_id]}/#{params[:repository_id]}")
end
def render_missing_personal_access_token
@@ -93,7 +93,7 @@ module Repositories
def repository
strong_memoize(:repository) do
- repo_type.repository_for(project)
+ repo_type.repository_for(container)
end
end
@@ -117,7 +117,8 @@ module Repositories
def http_download_allowed?
Gitlab::ProtocolAccess.allowed?('http') &&
download_request? &&
- project && Guest.can?(:download_code, project)
+ container &&
+ Guest.can?(repo_type.guest_read_ability, container)
end
end
end
diff --git a/app/controllers/repositories/git_http_controller.rb b/app/controllers/repositories/git_http_controller.rb
index 75c79881264..35ea77183b8 100644
--- a/app/controllers/repositories/git_http_controller.rb
+++ b/app/controllers/repositories/git_http_controller.rb
@@ -7,11 +7,13 @@ module Repositories
before_action :access_check
prepend_before_action :deny_head_requests, only: [:info_refs]
- rescue_from Gitlab::GitAccess::UnauthorizedError, with: :render_403_with_exception
+ rescue_from Gitlab::GitAccess::ForbiddenError, with: :render_403_with_exception
rescue_from Gitlab::GitAccess::NotFoundError, with: :render_404_with_exception
rescue_from Gitlab::GitAccess::ProjectCreationError, with: :render_422_with_exception
rescue_from Gitlab::GitAccess::TimeoutError, with: :render_503_with_exception
+ before_action :snippet_request_allowed?
+
# GET /foo/bar.git/info/refs?service=git-upload-pack (git pull)
# GET /foo/bar.git/info/refs?service=git-receive-pack (git push)
def info_refs
@@ -84,10 +86,10 @@ module Repositories
end
def access
- @access ||= access_klass.new(access_actor, project, 'http',
+ @access ||= access_klass.new(access_actor, container, 'http',
authentication_abilities: authentication_abilities,
namespace_path: params[:namespace_id],
- project_path: project_path,
+ repository_path: repository_path,
redirected_path: redirected_path,
auth_result_type: auth_result_type)
end
@@ -99,20 +101,29 @@ module Repositories
def access_check
access.check(git_command, Gitlab::GitAccess::ANY)
- @project ||= access.project
+
+ if repo_type.project? && !container
+ @project = @container = access.project
+ end
end
def access_klass
@access_klass ||= repo_type.access_checker_class
end
- def project_path
- @project_path ||= params[:repository_id].sub(/\.git$/, '')
+ def repository_path
+ @repository_path ||= params[:repository_id].sub(/\.git$/, '')
end
def log_user_activity
Users::ActivityService.new(user).execute
end
+
+ def snippet_request_allowed?
+ if repo_type.snippet? && Feature.disabled?(:version_snippets, user)
+ render plain: 'The project you were looking for could not be found.', status: :not_found
+ end
+ end
end
end
diff --git a/app/controllers/repositories/lfs_api_controller.rb b/app/controllers/repositories/lfs_api_controller.rb
index b1e0d1848d7..f93038f455e 100644
--- a/app/controllers/repositories/lfs_api_controller.rb
+++ b/app/controllers/repositories/lfs_api_controller.rb
@@ -45,15 +45,9 @@ module Repositories
params[:operation] == 'upload'
end
- # rubocop: disable CodeReuse/ActiveRecord
- def existing_oids
- @existing_oids ||= begin
- project.all_lfs_objects.where(oid: objects.map { |o| o['oid'].to_s }).pluck(:oid)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
def download_objects!
+ existing_oids = project.all_lfs_objects_oids(oids: objects_oids)
+
objects.each do |object|
if existing_oids.include?(object[:oid])
object[:actions] = download_actions(object)
@@ -68,13 +62,17 @@ module Repositories
}
end
end
+
objects
end
def upload_objects!
+ existing_oids = project.lfs_objects_oids(oids: objects_oids)
+
objects.each do |object|
object[:actions] = upload_actions(object) unless existing_oids.include?(object[:oid])
end
+
objects
end
diff --git a/app/controllers/search_controller.rb b/app/controllers/search_controller.rb
index 04d2b3068da..d1e15a72350 100644
--- a/app/controllers/search_controller.rb
+++ b/app/controllers/search_controller.rb
@@ -5,6 +5,7 @@ class SearchController < ApplicationController
include SearchHelper
include RendersCommits
+ before_action :override_snippet_scope, only: :show
around_action :allow_gitaly_ref_name_caching
skip_before_action :authenticate_user!
@@ -103,4 +104,14 @@ class SearchController < ApplicationController
Gitlab::UsageDataCounters::SearchCounter.increment_navbar_searches_count
end
+
+ # Disallow web snippet_blobs search as we migrate snippet
+ # from database-backed storage to git repository-based,
+ # and searching across multiple git repositories is not feasible.
+ #
+ # TODO: after 13.0 refactor this into Search::SnippetService
+ # See https://gitlab.com/gitlab-org/gitlab/issues/208882
+ def override_snippet_scope
+ params[:scope] = 'snippet_titles' if params[:snippets] == 'true'
+ end
end
diff --git a/app/controllers/sessions_controller.rb b/app/controllers/sessions_controller.rb
index c29e9d3843b..2c87c3c890f 100644
--- a/app/controllers/sessions_controller.rb
+++ b/app/controllers/sessions_controller.rb
@@ -39,7 +39,7 @@ class SessionsController < Devise::SessionsController
# would cause the CSRF token to be cleared and then
# RequestForgeryProtection#verify_authenticity_token would fail because of
# token mismatch.
- protect_from_forgery with: :exception, prepend: true
+ protect_from_forgery with: :exception, prepend: true, except: :destroy
CAPTCHA_HEADER = 'X-GitLab-Show-Login-Captcha'
MAX_FAILED_LOGIN_ATTEMPTS = 5
@@ -271,8 +271,8 @@ class SessionsController < Devise::SessionsController
def ldap_servers
@ldap_servers ||= begin
- if Gitlab::Auth::LDAP::Config.sign_in_enabled?
- Gitlab::Auth::LDAP::Config.available_servers
+ if Gitlab::Auth::Ldap::Config.sign_in_enabled?
+ Gitlab::Auth::Ldap::Config.available_servers
else
[]
end
diff --git a/app/controllers/snippets_controller.rb b/app/controllers/snippets_controller.rb
index b6ad5fd02b0..070391c4b51 100644
--- a/app/controllers/snippets_controller.rb
+++ b/app/controllers/snippets_controller.rb
@@ -52,10 +52,17 @@ class SnippetsController < ApplicationController
create_params = snippet_params.merge(spammable_params)
service_response = Snippets::CreateService.new(nil, current_user, create_params).execute
@snippet = service_response.payload[:snippet]
+ repository_operation_error = service_response.error? && !@snippet.persisted? && @snippet.valid?
- move_temporary_files if @snippet.valid? && params[:files]
+ if repository_operation_error
+ flash.now[:alert] = service_response.message
- recaptcha_check_with_fallback { render :new }
+ render :new
+ else
+ move_temporary_files if @snippet.valid? && params[:files]
+
+ recaptcha_check_with_fallback { render :new }
+ end
end
def update
@@ -64,21 +71,19 @@ class SnippetsController < ApplicationController
service_response = Snippets::UpdateService.new(nil, current_user, update_params).execute(@snippet)
@snippet = service_response.payload[:snippet]
- recaptcha_check_with_fallback { render :edit }
+ check_repository_error
end
def show
- blob = @snippet.blob
conditionally_expand_blob(blob)
- @note = Note.new(noteable: @snippet)
- @noteable = @snippet
-
- @discussions = @snippet.discussions
- @notes = prepare_notes_for_rendering(@discussions.flat_map(&:notes), @noteable)
-
respond_to do |format|
format.html do
+ @note = Note.new(noteable: @snippet)
+ @noteable = @snippet
+
+ @discussions = @snippet.discussions
+ @notes = prepare_notes_for_rendering(@discussions.flat_map(&:notes), @noteable)
render 'show'
end
@@ -121,6 +126,16 @@ class SnippetsController < ApplicationController
alias_method :awardable, :snippet
alias_method :spammable, :snippet
+ def blob
+ return unless snippet
+
+ @blob ||= if Feature.enabled?(:version_snippets, current_user) && !snippet.repository.empty?
+ snippet.blobs.first
+ else
+ snippet.blob
+ end
+ end
+
def spammable_path
snippet_path(@snippet)
end
diff --git a/app/controllers/users/terms_controller.rb b/app/controllers/users/terms_controller.rb
index 3c16d934b4d..231e449f733 100644
--- a/app/controllers/users/terms_controller.rb
+++ b/app/controllers/users/terms_controller.rb
@@ -4,7 +4,7 @@ module Users
class TermsController < ApplicationController
include InternalRedirect
- skip_before_action :authenticate_user!
+ skip_before_action :authenticate_user!, only: [:index]
skip_before_action :enforce_terms!
skip_before_action :check_password_expiration
skip_before_action :check_two_factor_requirement
diff --git a/app/finders/award_emojis_finder.rb b/app/finders/award_emojis_finder.rb
index 7320e035409..7882beb64bf 100644
--- a/app/finders/award_emojis_finder.rb
+++ b/app/finders/award_emojis_finder.rb
@@ -41,7 +41,7 @@ class AwardEmojisFinder
def validate_name_param
return unless params[:name]
- raise ArgumentError, 'Invalid name param' unless params[:name].in?(Gitlab::Emoji.emojis_names)
+ raise ArgumentError, 'Invalid name param' unless params[:name].to_s.in?(Gitlab::Emoji.emojis_names)
end
def validate_awarded_by_param
diff --git a/app/finders/ci/jobs_finder.rb b/app/finders/ci/jobs_finder.rb
new file mode 100644
index 00000000000..2169bf8c53e
--- /dev/null
+++ b/app/finders/ci/jobs_finder.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+module Ci
+ class JobsFinder
+ include Gitlab::Allowable
+
+ def initialize(current_user:, project: nil, params: {})
+ @current_user = current_user
+ @project = project
+ @params = params
+ end
+
+ def execute
+ builds = init_collection.order_id_desc
+ filter_by_scope(builds)
+ rescue Gitlab::Access::AccessDeniedError
+ Ci::Build.none
+ end
+
+ private
+
+ attr_reader :current_user, :project, :params
+
+ def init_collection
+ project ? project_builds : all_builds
+ end
+
+ def all_builds
+ raise Gitlab::Access::AccessDeniedError unless current_user&.admin?
+
+ Ci::Build.all
+ end
+
+ def project_builds
+ raise Gitlab::Access::AccessDeniedError unless can?(current_user, :read_build, project)
+
+ project.builds.relevant
+ end
+
+ def filter_by_scope(builds)
+ case params[:scope]
+ when 'pending'
+ builds.pending.reverse_order
+ when 'running'
+ builds.running.reverse_order
+ when 'finished'
+ builds.finished
+ else
+ builds
+ end
+ end
+ end
+end
diff --git a/app/finders/ci/pipeline_schedules_finder.rb b/app/finders/ci/pipeline_schedules_finder.rb
new file mode 100644
index 00000000000..2544c8c3254
--- /dev/null
+++ b/app/finders/ci/pipeline_schedules_finder.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module Ci
+ class PipelineSchedulesFinder
+ attr_reader :project, :pipeline_schedules
+
+ def initialize(project)
+ @project = project
+ @pipeline_schedules = project.pipeline_schedules
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def execute(scope: nil)
+ scoped_schedules =
+ case scope
+ when 'active'
+ pipeline_schedules.active
+ when 'inactive'
+ pipeline_schedules.inactive
+ else
+ pipeline_schedules
+ end
+
+ scoped_schedules.order(id: :desc)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+end
diff --git a/app/finders/ci/pipelines_finder.rb b/app/finders/ci/pipelines_finder.rb
new file mode 100644
index 00000000000..9e71e92b456
--- /dev/null
+++ b/app/finders/ci/pipelines_finder.rb
@@ -0,0 +1,158 @@
+# frozen_string_literal: true
+
+module Ci
+ class PipelinesFinder
+ attr_reader :project, :pipelines, :params, :current_user
+
+ ALLOWED_INDEXED_COLUMNS = %w[id status ref updated_at user_id].freeze
+
+ def initialize(project, current_user, params = {})
+ @project = project
+ @current_user = current_user
+ @pipelines = project.all_pipelines
+ @params = params
+ end
+
+ def execute
+ unless Ability.allowed?(current_user, :read_pipeline, project)
+ return Ci::Pipeline.none
+ end
+
+ items = pipelines.no_child
+ items = by_scope(items)
+ items = by_status(items)
+ items = by_ref(items)
+ items = by_sha(items)
+ items = by_name(items)
+ items = by_username(items)
+ items = by_yaml_errors(items)
+ items = by_updated_at(items)
+ sort_items(items)
+ end
+
+ private
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def ids_for_ref(refs)
+ pipelines.where(ref: refs).group(:ref).select('max(id)')
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def from_ids(ids)
+ pipelines.unscoped.where(project_id: project.id, id: ids)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def branches
+ project.repository.branch_names
+ end
+
+ def tags
+ project.repository.tag_names
+ end
+
+ def by_scope(items)
+ case params[:scope]
+ when 'running'
+ items.running
+ when 'pending'
+ items.pending
+ when 'finished'
+ items.finished
+ when 'branches'
+ from_ids(ids_for_ref(branches))
+ when 'tags'
+ from_ids(ids_for_ref(tags))
+ else
+ items
+ end
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def by_status(items)
+ return items unless HasStatus::AVAILABLE_STATUSES.include?(params[:status])
+
+ items.where(status: params[:status])
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def by_ref(items)
+ if params[:ref].present?
+ items.where(ref: params[:ref])
+ else
+ items
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def by_sha(items)
+ if params[:sha].present?
+ items.where(sha: params[:sha])
+ else
+ items
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def by_name(items)
+ if params[:name].present?
+ items.joins(:user).where(users: { name: params[:name] })
+ else
+ items
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def by_username(items)
+ if params[:username].present?
+ items.joins(:user).where(users: { username: params[:username] })
+ else
+ items
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def by_yaml_errors(items)
+ case Gitlab::Utils.to_boolean(params[:yaml_errors])
+ when true
+ items.where("yaml_errors IS NOT NULL")
+ when false
+ items.where("yaml_errors IS NULL")
+ else
+ items
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def by_updated_at(items)
+ items = items.updated_before(params[:updated_before]) if params[:updated_before].present?
+ items = items.updated_after(params[:updated_after]) if params[:updated_after].present?
+
+ items
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def sort_items(items)
+ order_by = if ALLOWED_INDEXED_COLUMNS.include?(params[:order_by])
+ params[:order_by]
+ else
+ :id
+ end
+
+ sort = if params[:sort] =~ /\A(ASC|DESC)\z/i
+ params[:sort]
+ else
+ :desc
+ end
+
+ items.order(order_by => sort)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+end
diff --git a/app/finders/ci/pipelines_for_merge_request_finder.rb b/app/finders/ci/pipelines_for_merge_request_finder.rb
new file mode 100644
index 00000000000..c01a68d6749
--- /dev/null
+++ b/app/finders/ci/pipelines_for_merge_request_finder.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+module Ci
+ # A state object to centralize logic related to merge request pipelines
+ class PipelinesForMergeRequestFinder
+ include Gitlab::Utils::StrongMemoize
+
+ EVENT = 'merge_request_event'
+
+ def initialize(merge_request)
+ @merge_request = merge_request
+ end
+
+ attr_reader :merge_request
+
+ delegate :commit_shas, :source_project, :source_branch, to: :merge_request
+
+ def all
+ strong_memoize(:all_pipelines) do
+ next Ci::Pipeline.none unless source_project
+
+ pipelines =
+ if merge_request.persisted?
+ pipelines_using_cte
+ else
+ triggered_for_branch.for_sha(commit_shas)
+ end
+
+ sort(pipelines)
+ end
+ end
+
+ private
+
+ def pipelines_using_cte
+ cte = Gitlab::SQL::CTE.new(:shas, merge_request.all_commits.select(:sha))
+
+ source_pipelines_join = cte.table[:sha].eq(Ci::Pipeline.arel_table[:source_sha])
+ source_pipelines = filter_by(triggered_by_merge_request, cte, source_pipelines_join)
+ detached_pipelines = filter_by_sha(triggered_by_merge_request, cte)
+ pipelines_for_branch = filter_by_sha(triggered_for_branch, cte)
+
+ Ci::Pipeline.with(cte.to_arel) # rubocop: disable CodeReuse/ActiveRecord
+ .from_union([source_pipelines, detached_pipelines, pipelines_for_branch])
+ end
+
+ def filter_by_sha(pipelines, cte)
+ hex = Arel::Nodes::SqlLiteral.new("'hex'")
+ string_sha = Arel::Nodes::NamedFunction.new('encode', [cte.table[:sha], hex])
+ join_condition = string_sha.eq(Ci::Pipeline.arel_table[:sha])
+
+ filter_by(pipelines, cte, join_condition)
+ end
+
+ def filter_by(pipelines, cte, join_condition)
+ shas_table =
+ Ci::Pipeline.arel_table
+ .join(cte.table, Arel::Nodes::InnerJoin)
+ .on(join_condition)
+ .join_sources
+
+ pipelines.joins(shas_table) # rubocop: disable CodeReuse/ActiveRecord
+ end
+
+ # NOTE: this method returns only parent merge request pipelines.
+ # Child merge request pipelines have a different source.
+ def triggered_by_merge_request
+ source_project.ci_pipelines
+ .where(source: :merge_request_event, merge_request: merge_request) # rubocop: disable CodeReuse/ActiveRecord
+ end
+
+ def triggered_for_branch
+ source_project.ci_pipelines
+ .where(source: branch_pipeline_sources, ref: source_branch, tag: false) # rubocop: disable CodeReuse/ActiveRecord
+ end
+
+ def branch_pipeline_sources
+ strong_memoize(:branch_pipeline_sources) do
+ Ci::Pipeline.sources.reject { |source| source == EVENT }.values
+ end
+ end
+
+ def sort(pipelines)
+ sql = 'CASE ci_pipelines.source WHEN (?) THEN 0 ELSE 1 END, ci_pipelines.id DESC'
+ query = ApplicationRecord.send(:sanitize_sql_array, [sql, Ci::Pipeline.sources[:merge_request_event]]) # rubocop:disable GitlabSecurity/PublicSend
+
+ pipelines.order(Arel.sql(query)) # rubocop: disable CodeReuse/ActiveRecord
+ end
+ end
+end
diff --git a/app/finders/ci/runner_jobs_finder.rb b/app/finders/ci/runner_jobs_finder.rb
new file mode 100644
index 00000000000..ffcdb407e7e
--- /dev/null
+++ b/app/finders/ci/runner_jobs_finder.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module Ci
+ class RunnerJobsFinder
+ attr_reader :runner, :params
+
+ ALLOWED_INDEXED_COLUMNS = %w[id].freeze
+
+ def initialize(runner, params = {})
+ @runner = runner
+ @params = params
+ end
+
+ def execute
+ items = @runner.builds
+ items = by_status(items)
+ sort_items(items)
+ end
+
+ private
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def by_status(items)
+ return items unless HasStatus::AVAILABLE_STATUSES.include?(params[:status])
+
+ items.where(status: params[:status])
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def sort_items(items)
+ return items unless ALLOWED_INDEXED_COLUMNS.include?(params[:order_by])
+
+ order_by = params[:order_by]
+ sort = if /\A(ASC|DESC)\z/i.match?(params[:sort])
+ params[:sort]
+ else
+ :desc
+ end
+
+ items.order(order_by => sort)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+end
diff --git a/app/finders/fork_targets_finder.rb b/app/finders/fork_targets_finder.rb
new file mode 100644
index 00000000000..7a08273fa0d
--- /dev/null
+++ b/app/finders/fork_targets_finder.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+class ForkTargetsFinder
+ def initialize(project, user)
+ @project = project
+ @user = user
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def execute
+ ::Namespace.where(id: user.manageable_namespaces).sort_by_type
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ private
+
+ attr_reader :project, :user
+end
+
+ForkTargetsFinder.prepend_if_ee('EE::ForkTargetsFinder')
diff --git a/app/finders/jobs_finder.rb b/app/finders/jobs_finder.rb
deleted file mode 100644
index bac18e69618..00000000000
--- a/app/finders/jobs_finder.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-class JobsFinder
- include Gitlab::Allowable
-
- def initialize(current_user:, project: nil, params: {})
- @current_user = current_user
- @project = project
- @params = params
- end
-
- def execute
- builds = init_collection.order_id_desc
- filter_by_scope(builds)
- rescue Gitlab::Access::AccessDeniedError
- Ci::Build.none
- end
-
- private
-
- attr_reader :current_user, :project, :params
-
- def init_collection
- project ? project_builds : all_builds
- end
-
- def all_builds
- raise Gitlab::Access::AccessDeniedError unless current_user&.admin?
-
- Ci::Build.all
- end
-
- def project_builds
- raise Gitlab::Access::AccessDeniedError unless can?(current_user, :read_build, project)
-
- project.builds.relevant
- end
-
- def filter_by_scope(builds)
- case params[:scope]
- when 'pending'
- builds.pending.reverse_order
- when 'running'
- builds.running.reverse_order
- when 'finished'
- builds.finished
- else
- builds
- end
- end
-end
diff --git a/app/finders/pipeline_schedules_finder.rb b/app/finders/pipeline_schedules_finder.rb
deleted file mode 100644
index 3beee608268..00000000000
--- a/app/finders/pipeline_schedules_finder.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-class PipelineSchedulesFinder
- attr_reader :project, :pipeline_schedules
-
- def initialize(project)
- @project = project
- @pipeline_schedules = project.pipeline_schedules
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def execute(scope: nil)
- scoped_schedules =
- case scope
- when 'active'
- pipeline_schedules.active
- when 'inactive'
- pipeline_schedules.inactive
- else
- pipeline_schedules
- end
-
- scoped_schedules.order(id: :desc)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-end
diff --git a/app/finders/pipelines_finder.rb b/app/finders/pipelines_finder.rb
deleted file mode 100644
index 0599daab564..00000000000
--- a/app/finders/pipelines_finder.rb
+++ /dev/null
@@ -1,156 +0,0 @@
-# frozen_string_literal: true
-
-class PipelinesFinder
- attr_reader :project, :pipelines, :params, :current_user
-
- ALLOWED_INDEXED_COLUMNS = %w[id status ref updated_at user_id].freeze
-
- def initialize(project, current_user, params = {})
- @project = project
- @current_user = current_user
- @pipelines = project.all_pipelines
- @params = params
- end
-
- def execute
- unless Ability.allowed?(current_user, :read_pipeline, project)
- return Ci::Pipeline.none
- end
-
- items = pipelines.no_child
- items = by_scope(items)
- items = by_status(items)
- items = by_ref(items)
- items = by_sha(items)
- items = by_name(items)
- items = by_username(items)
- items = by_yaml_errors(items)
- items = by_updated_at(items)
- sort_items(items)
- end
-
- private
-
- # rubocop: disable CodeReuse/ActiveRecord
- def ids_for_ref(refs)
- pipelines.where(ref: refs).group(:ref).select('max(id)')
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def from_ids(ids)
- pipelines.unscoped.where(project_id: project.id, id: ids)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def branches
- project.repository.branch_names
- end
-
- def tags
- project.repository.tag_names
- end
-
- def by_scope(items)
- case params[:scope]
- when 'running'
- items.running
- when 'pending'
- items.pending
- when 'finished'
- items.finished
- when 'branches'
- from_ids(ids_for_ref(branches))
- when 'tags'
- from_ids(ids_for_ref(tags))
- else
- items
- end
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def by_status(items)
- return items unless HasStatus::AVAILABLE_STATUSES.include?(params[:status])
-
- items.where(status: params[:status])
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def by_ref(items)
- if params[:ref].present?
- items.where(ref: params[:ref])
- else
- items
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def by_sha(items)
- if params[:sha].present?
- items.where(sha: params[:sha])
- else
- items
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def by_name(items)
- if params[:name].present?
- items.joins(:user).where(users: { name: params[:name] })
- else
- items
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def by_username(items)
- if params[:username].present?
- items.joins(:user).where(users: { username: params[:username] })
- else
- items
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def by_yaml_errors(items)
- case Gitlab::Utils.to_boolean(params[:yaml_errors])
- when true
- items.where("yaml_errors IS NOT NULL")
- when false
- items.where("yaml_errors IS NULL")
- else
- items
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def by_updated_at(items)
- items = items.updated_before(params[:updated_before]) if params[:updated_before].present?
- items = items.updated_after(params[:updated_after]) if params[:updated_after].present?
-
- items
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def sort_items(items)
- order_by = if ALLOWED_INDEXED_COLUMNS.include?(params[:order_by])
- params[:order_by]
- else
- :id
- end
-
- sort = if params[:sort] =~ /\A(ASC|DESC)\z/i
- params[:sort]
- else
- :desc
- end
-
- items.order(order_by => sort)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-end
diff --git a/app/finders/projects/export_job_finder.rb b/app/finders/projects/export_job_finder.rb
new file mode 100644
index 00000000000..c26a7a3f1a6
--- /dev/null
+++ b/app/finders/projects/export_job_finder.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module Projects
+ class ExportJobFinder
+ InvalidExportJobStatusError = Class.new(StandardError)
+ attr_reader :project, :params
+
+ def initialize(project, params = {})
+ @project = project
+ @params = params
+ end
+
+ def execute
+ export_jobs = project.export_jobs
+ export_jobs = by_status(export_jobs)
+
+ export_jobs
+ end
+
+ private
+
+ def by_status(export_jobs)
+ return export_jobs unless params[:status]
+ raise InvalidExportJobStatusError, 'Invalid export job status' unless ProjectExportJob.state_machines[:status].states.map(&:name).include?(params[:status])
+
+ export_jobs.with_status(params[:status])
+ end
+ end
+end
diff --git a/app/finders/runner_jobs_finder.rb b/app/finders/runner_jobs_finder.rb
deleted file mode 100644
index ef90817416a..00000000000
--- a/app/finders/runner_jobs_finder.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-class RunnerJobsFinder
- attr_reader :runner, :params
-
- ALLOWED_INDEXED_COLUMNS = %w[id].freeze
-
- def initialize(runner, params = {})
- @runner = runner
- @params = params
- end
-
- def execute
- items = @runner.builds
- items = by_status(items)
- sort_items(items)
- end
-
- private
-
- # rubocop: disable CodeReuse/ActiveRecord
- def by_status(items)
- return items unless HasStatus::AVAILABLE_STATUSES.include?(params[:status])
-
- items.where(status: params[:status])
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def sort_items(items)
- return items unless ALLOWED_INDEXED_COLUMNS.include?(params[:order_by])
-
- order_by = params[:order_by]
- sort = if /\A(ASC|DESC)\z/i.match?(params[:sort])
- params[:sort]
- else
- :desc
- end
-
- items.order(order_by => sort)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-end
diff --git a/app/finders/serverless_domain_finder.rb b/app/finders/serverless_domain_finder.rb
new file mode 100644
index 00000000000..661cd0ca363
--- /dev/null
+++ b/app/finders/serverless_domain_finder.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+class ServerlessDomainFinder
+ attr_reader :match, :serverless_domain_cluster, :environment
+
+ def initialize(uri)
+ @match = ::Serverless::Domain::REGEXP.match(uri)
+ end
+
+ def execute
+ return unless serverless?
+
+ @serverless_domain_cluster = ::Serverless::DomainCluster.for_uuid(serverless_domain_cluster_uuid)
+ return unless serverless_domain_cluster&.knative&.external_ip
+
+ @environment = ::Environment.for_id_and_slug(match[:environment_id].to_i(16), match[:environment_slug])
+ return unless environment
+
+ ::Serverless::Domain.new(
+ function_name: match[:function_name],
+ serverless_domain_cluster: serverless_domain_cluster,
+ environment: environment
+ )
+ end
+
+ def serverless_domain_cluster_uuid
+ return unless serverless?
+
+ match[:cluster_left] + match[:cluster_middle] + match[:cluster_right]
+ end
+
+ def serverless?
+ !!match
+ end
+end
diff --git a/app/finders/snippets_finder.rb b/app/finders/snippets_finder.rb
index 5819f279eaa..4f63810423b 100644
--- a/app/finders/snippets_finder.rb
+++ b/app/finders/snippets_finder.rb
@@ -61,9 +61,11 @@ class SnippetsFinder < UnionFinder
def execute
# The snippet query can be expensive, therefore if the
# author or project params have been passed and they don't
- # exist, it's better to return
+ # exist, or if a Project has been passed and has snippets
+ # disabled, it's better to return
return Snippet.none if author.nil? && params[:author].present?
return Snippet.none if project.nil? && params[:project].present?
+ return Snippet.none if project && !project.feature_available?(:snippets, current_user)
items = init_collection
items = by_ids(items)
diff --git a/app/graphql/gitlab_schema.rb b/app/graphql/gitlab_schema.rb
index ea5776534d5..b81996cf378 100644
--- a/app/graphql/gitlab_schema.rb
+++ b/app/graphql/gitlab_schema.rb
@@ -16,6 +16,7 @@ class GitlabSchema < GraphQL::Schema
use Gitlab::Graphql::CallsGitaly
use Gitlab::Graphql::Connections
use Gitlab::Graphql::GenericTracing
+ use Gitlab::Graphql::Timeout, max_seconds: Gitlab.config.gitlab.graphql_timeout
query_analyzer Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer.new
query_analyzer Gitlab::Graphql::QueryAnalyzers::RecursionAnalyzer.new
@@ -141,3 +142,5 @@ class GitlabSchema < GraphQL::Schema
end
end
end
+
+GitlabSchema.prepend_if_ee('EE::GitlabSchema')
diff --git a/app/graphql/mutations/admin/sidekiq_queues/delete_jobs.rb b/app/graphql/mutations/admin/sidekiq_queues/delete_jobs.rb
new file mode 100644
index 00000000000..a3a421f8938
--- /dev/null
+++ b/app/graphql/mutations/admin/sidekiq_queues/delete_jobs.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+module Mutations
+ module Admin
+ module SidekiqQueues
+ class DeleteJobs < BaseMutation
+ graphql_name 'AdminSidekiqQueuesDeleteJobs'
+
+ ADMIN_MESSAGE = 'You must be an admin to use this mutation'
+
+ Labkit::Context::KNOWN_KEYS.each do |key|
+ argument key,
+ GraphQL::STRING_TYPE,
+ required: false,
+ description: "Delete jobs matching #{key} in the context metadata"
+ end
+
+ argument :queue_name,
+ GraphQL::STRING_TYPE,
+ required: true,
+ description: 'The name of the queue to delete jobs from'
+
+ field :result,
+ Types::Admin::SidekiqQueues::DeleteJobsResponseType,
+ null: true,
+ description: 'Information about the status of the deletion request'
+
+ def ready?(**args)
+ unless current_user&.admin?
+ raise Gitlab::Graphql::Errors::ResourceNotAvailable, ADMIN_MESSAGE
+ end
+
+ super
+ end
+
+ def resolve(args)
+ {
+ result: Gitlab::SidekiqQueue.new(args[:queue_name]).drop_jobs!(args, timeout: 30),
+ errors: []
+ }
+ rescue Gitlab::SidekiqQueue::NoMetadataError
+ {
+ result: nil,
+ errors: ['No metadata provided']
+ }
+ rescue Gitlab::SidekiqQueue::InvalidQueueError
+ raise Gitlab::Graphql::Errors::ResourceNotAvailable, "Queue #{args[:queue_name]} not found"
+ end
+ end
+ end
+ end
+end
diff --git a/app/graphql/mutations/concerns/mutations/resolves_group.rb b/app/graphql/mutations/concerns/mutations/resolves_group.rb
index 4306ce512f1..11d7b34217d 100644
--- a/app/graphql/mutations/concerns/mutations/resolves_group.rb
+++ b/app/graphql/mutations/concerns/mutations/resolves_group.rb
@@ -5,11 +5,11 @@ module Mutations
extend ActiveSupport::Concern
def resolve_group(full_path:)
- resolver.resolve(full_path: full_path)
+ group_resolver.resolve(full_path: full_path)
end
- def resolver
- Resolvers::GroupResolver.new(object: nil, context: context)
+ def group_resolver
+ Resolvers::GroupResolver.new(object: nil, context: context, field: nil)
end
end
end
diff --git a/app/graphql/mutations/concerns/mutations/resolves_issuable.rb b/app/graphql/mutations/concerns/mutations/resolves_issuable.rb
new file mode 100644
index 00000000000..3a4db5ae18d
--- /dev/null
+++ b/app/graphql/mutations/concerns/mutations/resolves_issuable.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module Mutations
+ module ResolvesIssuable
+ extend ActiveSupport::Concern
+ include Mutations::ResolvesProject
+
+ def resolve_issuable(type:, parent_path:, iid:)
+ parent = resolve_issuable_parent(parent_path)
+
+ issuable_resolver(type, parent, context).resolve(iid: iid.to_s)
+ end
+
+ def issuable_resolver(type, parent, context)
+ resolver_class = "Resolvers::#{type.to_s.classify.pluralize}Resolver".constantize
+
+ resolver_class.single.new(object: parent, context: context, field: nil)
+ end
+
+ def resolve_issuable_parent(parent_path)
+ resolve_project(full_path: parent_path)
+ end
+ end
+end
diff --git a/app/graphql/mutations/concerns/mutations/resolves_project.rb b/app/graphql/mutations/concerns/mutations/resolves_project.rb
index da9814e88b0..e223e3edd94 100644
--- a/app/graphql/mutations/concerns/mutations/resolves_project.rb
+++ b/app/graphql/mutations/concerns/mutations/resolves_project.rb
@@ -5,11 +5,11 @@ module Mutations
extend ActiveSupport::Concern
def resolve_project(full_path:)
- resolver.resolve(full_path: full_path)
+ project_resolver.resolve(full_path: full_path)
end
- def resolver
- Resolvers::ProjectResolver.new(object: nil, context: context)
+ def project_resolver
+ Resolvers::ProjectResolver.new(object: nil, context: context, field: nil)
end
end
end
diff --git a/app/graphql/mutations/issues/base.rb b/app/graphql/mutations/issues/base.rb
index b7fa234a50b..7c545c3eb00 100644
--- a/app/graphql/mutations/issues/base.rb
+++ b/app/graphql/mutations/issues/base.rb
@@ -3,7 +3,7 @@
module Mutations
module Issues
class Base < BaseMutation
- include Mutations::ResolvesProject
+ include Mutations::ResolvesIssuable
argument :project_path, GraphQL::ID_TYPE,
required: true,
@@ -23,11 +23,7 @@ module Mutations
private
def find_object(project_path:, iid:)
- project = resolve_project(full_path: project_path)
- resolver = Resolvers::IssuesResolver
- .single.new(object: project, context: context)
-
- resolver.resolve(iid: iid)
+ resolve_issuable(type: :issue, parent_path: project_path, iid: iid)
end
end
end
diff --git a/app/graphql/mutations/issues/update.rb b/app/graphql/mutations/issues/update.rb
index 119bc51e4a4..3710144fff5 100644
--- a/app/graphql/mutations/issues/update.rb
+++ b/app/graphql/mutations/issues/update.rb
@@ -5,7 +5,25 @@ module Mutations
class Update < Base
graphql_name 'UpdateIssue'
- # Add arguments here instead of creating separate mutations
+ argument :title,
+ GraphQL::STRING_TYPE,
+ required: false,
+ description: copy_field_description(Types::IssueType, :title)
+
+ argument :description,
+ GraphQL::STRING_TYPE,
+ required: false,
+ description: copy_field_description(Types::IssueType, :description)
+
+ argument :due_date,
+ Types::TimeType,
+ required: false,
+ description: copy_field_description(Types::IssueType, :due_date)
+
+ argument :confidential,
+ GraphQL::BOOLEAN_TYPE,
+ required: false,
+ description: copy_field_description(Types::IssueType, :confidential)
def resolve(project_path:, iid:, **args)
issue = authorized_find!(project_path: project_path, iid: iid)
diff --git a/app/graphql/mutations/merge_requests/base.rb b/app/graphql/mutations/merge_requests/base.rb
index 28e0cdc8cc7..96228855ace 100644
--- a/app/graphql/mutations/merge_requests/base.rb
+++ b/app/graphql/mutations/merge_requests/base.rb
@@ -3,7 +3,7 @@
module Mutations
module MergeRequests
class Base < BaseMutation
- include Mutations::ResolvesProject
+ include Mutations::ResolvesIssuable
argument :project_path, GraphQL::ID_TYPE,
required: true,
@@ -23,11 +23,7 @@ module Mutations
private
def find_object(project_path:, iid:)
- project = resolve_project(full_path: project_path)
- resolver = Resolvers::MergeRequestsResolver
- .single.new(object: project, context: context)
-
- resolver.resolve(iid: iid)
+ resolve_issuable(type: :merge_request, parent_path: project_path, iid: iid)
end
end
end
diff --git a/app/graphql/resolvers/base_resolver.rb b/app/graphql/resolvers/base_resolver.rb
index 66cb224f157..cf0642930ad 100644
--- a/app/graphql/resolvers/base_resolver.rb
+++ b/app/graphql/resolvers/base_resolver.rb
@@ -28,6 +28,10 @@ module Resolvers
end
end
+ def self.complexity
+ 0
+ end
+
def self.resolver_complexity(args, child_complexity:)
complexity = 1
complexity += 1 if args[:sort]
diff --git a/app/graphql/resolvers/boards_resolver.rb b/app/graphql/resolvers/boards_resolver.rb
index 45c03bf0bef..eceb5b38031 100644
--- a/app/graphql/resolvers/boards_resolver.rb
+++ b/app/graphql/resolvers/boards_resolver.rb
@@ -4,7 +4,11 @@ module Resolvers
class BoardsResolver < BaseResolver
type Types::BoardType, null: true
- def resolve(**args)
+ argument :id, GraphQL::ID_TYPE,
+ required: false,
+ description: 'Find a board by its ID'
+
+ def resolve(id: nil)
# The project or group could have been loaded in batch by `BatchLoader`.
# At this point we need the `id` of the project/group to query for boards, so
# make sure it's loaded and not `nil` before continuing.
@@ -12,7 +16,17 @@ module Resolvers
return Board.none unless parent
- Boards::ListService.new(parent, context[:current_user]).execute(create_default_board: false)
+ Boards::ListService.new(parent, context[:current_user], board_id: extract_board_id(id)).execute(create_default_board: false)
+ rescue ActiveRecord::RecordNotFound
+ Board.none
+ end
+
+ private
+
+ def extract_board_id(gid)
+ return unless gid.present?
+
+ GitlabSchema.parse_gid(gid, expected_type: ::Board).model_id
end
end
end
diff --git a/app/graphql/resolvers/concerns/resolves_pipelines.rb b/app/graphql/resolvers/concerns/resolves_pipelines.rb
index a6f82cc8505..46d9e174deb 100644
--- a/app/graphql/resolvers/concerns/resolves_pipelines.rb
+++ b/app/graphql/resolvers/concerns/resolves_pipelines.rb
@@ -30,6 +30,6 @@ module ResolvesPipelines
end
def resolve_pipelines(project, params = {})
- PipelinesFinder.new(project, context[:current_user], params).execute
+ Ci::PipelinesFinder.new(project, context[:current_user], params).execute
end
end
diff --git a/app/graphql/resolvers/issues_resolver.rb b/app/graphql/resolvers/issues_resolver.rb
index 664e0955535..ae77af32b5b 100644
--- a/app/graphql/resolvers/issues_resolver.rb
+++ b/app/graphql/resolvers/issues_resolver.rb
@@ -15,6 +15,15 @@ module Resolvers
argument :label_name, GraphQL::STRING_TYPE.to_list_type,
required: false,
description: 'Labels applied to this issue'
+ argument :milestone_title, GraphQL::STRING_TYPE.to_list_type,
+ required: false,
+ description: 'Milestones applied to this issue'
+ argument :assignee_username, GraphQL::STRING_TYPE,
+ required: false,
+ description: 'Username of a user assigned to the issues'
+ argument :assignee_id, GraphQL::STRING_TYPE,
+ required: false,
+ description: 'ID of a user assigned to the issues, "none" and "any" values supported'
argument :created_before, Types::TimeType,
required: false,
description: 'Issues created before this date'
diff --git a/app/graphql/resolvers/projects/snippets_resolver.rb b/app/graphql/resolvers/projects/snippets_resolver.rb
index bf9aa45349f..22895a24054 100644
--- a/app/graphql/resolvers/projects/snippets_resolver.rb
+++ b/app/graphql/resolvers/projects/snippets_resolver.rb
@@ -10,6 +10,11 @@ module Resolvers
def resolve(**args)
return Snippet.none if project.nil?
+ unless project.feature_available?(:snippets, current_user)
+ raise Gitlab::Graphql::Errors::ResourceNotAvailable,
+ 'Snippets are not enabled for this Project'
+ end
+
super
end
diff --git a/app/graphql/types/admin/sidekiq_queues/delete_jobs_response_type.rb b/app/graphql/types/admin/sidekiq_queues/delete_jobs_response_type.rb
new file mode 100644
index 00000000000..69af9d463bb
--- /dev/null
+++ b/app/graphql/types/admin/sidekiq_queues/delete_jobs_response_type.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module Types
+ module Admin
+ module SidekiqQueues
+ # We can't authorize against the value passed to this because it's
+ # a plain hash.
+ class DeleteJobsResponseType < BaseObject # rubocop:disable Graphql/AuthorizeTypes
+ graphql_name 'DeleteJobsResponse'
+ description 'The response from the AdminSidekiqQueuesDeleteJobs mutation.'
+
+ field :completed,
+ GraphQL::BOOLEAN_TYPE,
+ null: true,
+ description: 'Whether or not the entire queue was processed in time; if not, retrying the same request is safe'
+
+ field :deleted_jobs,
+ GraphQL::INT_TYPE,
+ null: true,
+ description: 'The number of matching jobs deleted'
+
+ field :queue_size,
+ GraphQL::INT_TYPE,
+ null: true,
+ description: 'The queue size after processing'
+ end
+ end
+ end
+end
diff --git a/app/graphql/types/base_field.rb b/app/graphql/types/base_field.rb
index 3ade1300c2d..1b296f8d52b 100644
--- a/app/graphql/types/base_field.rb
+++ b/app/graphql/types/base_field.rb
@@ -9,7 +9,7 @@ module Types
def initialize(*args, **kwargs, &block)
@calls_gitaly = !!kwargs.delete(:calls_gitaly)
@constant_complexity = !!kwargs[:complexity]
- kwargs[:complexity] ||= field_complexity(kwargs[:resolver_class])
+ kwargs[:complexity] = field_complexity(kwargs[:resolver_class], kwargs[:complexity])
@feature_flag = kwargs[:feature_flag]
kwargs = check_feature_flag(kwargs)
@@ -41,7 +41,7 @@ module Types
attr_reader :feature_flag
def feature_documentation_message(key, description)
- "#{description}. Available only when feature flag #{key} is enabled."
+ "#{description}. Available only when feature flag `#{key}` is enabled."
end
def check_feature_flag(args)
@@ -51,7 +51,9 @@ module Types
args
end
- def field_complexity(resolver_class)
+ def field_complexity(resolver_class, current)
+ return current if current.present? && current > 0
+
if resolver_class
field_resolver_complexity
else
@@ -66,22 +68,30 @@ module Types
# proc because we set complexity depending on arguments and number of
# items which can be loaded.
proc do |ctx, args, child_complexity|
+ next base_complexity unless resolver_complexity_enabled?(ctx)
+
# Resolvers may add extra complexity depending on used arguments
complexity = child_complexity + self.resolver&.try(:resolver_complexity, args, child_complexity: child_complexity).to_i
complexity += 1 if calls_gitaly?
-
- field_defn = to_graphql
-
- if field_defn.connection?
- # Resolvers may add extra complexity depending on number of items being loaded.
- page_size = field_defn.connection_max_page_size || ctx.schema.default_max_page_size
- limit_value = [args[:first], args[:last], page_size].compact.min
- multiplier = self.resolver&.try(:complexity_multiplier, args).to_f
- complexity += complexity * limit_value * multiplier
- end
+ complexity += complexity * connection_complexity_multiplier(ctx, args)
complexity.to_i
end
end
+
+ def resolver_complexity_enabled?(ctx)
+ ctx.fetch(:graphql_resolver_complexity_flag) { |key| ctx[key] = Feature.enabled?(:graphql_resolver_complexity) }
+ end
+
+ def connection_complexity_multiplier(ctx, args)
+ # Resolvers may add extra complexity depending on number of items being loaded.
+ field_defn = to_graphql
+ return 0 unless field_defn.connection?
+
+ page_size = field_defn.connection_max_page_size || ctx.schema.default_max_page_size
+ limit_value = [args[:first], args[:last], page_size].compact.min
+ multiplier = self.resolver&.try(:complexity_multiplier, args).to_f
+ limit_value * multiplier
+ end
end
end
diff --git a/app/graphql/types/group_type.rb b/app/graphql/types/group_type.rb
index 718770ebfbc..bd9efef94f8 100644
--- a/app/graphql/types/group_type.rb
+++ b/app/graphql/types/group_type.rb
@@ -46,6 +46,19 @@ module Types
field :milestones, Types::MilestoneType.connection_type, null: true,
description: 'Find milestones',
resolver: Resolvers::MilestoneResolver
+
+ field :boards,
+ Types::BoardType.connection_type,
+ null: true,
+ description: 'Boards of the group',
+ max_page_size: 2000,
+ resolver: Resolvers::BoardsResolver
+
+ field :board,
+ Types::BoardType,
+ null: true,
+ description: 'A single board of the group',
+ resolver: Resolvers::BoardsResolver.single
end
end
diff --git a/app/graphql/types/mutation_type.rb b/app/graphql/types/mutation_type.rb
index 90e9e1ec0b9..d3c0d9732d2 100644
--- a/app/graphql/types/mutation_type.rb
+++ b/app/graphql/types/mutation_type.rb
@@ -6,6 +6,7 @@ module Types
graphql_name 'Mutation'
+ mount_mutation Mutations::Admin::SidekiqQueues::DeleteJobs
mount_mutation Mutations::AwardEmojis::Add
mount_mutation Mutations::AwardEmojis::Remove
mount_mutation Mutations::AwardEmojis::Toggle
diff --git a/app/graphql/types/project_type.rb b/app/graphql/types/project_type.rb
index b44baa50955..5c0b9182ac5 100644
--- a/app/graphql/types/project_type.rb
+++ b/app/graphql/types/project_type.rb
@@ -179,6 +179,19 @@ module Types
null: true,
description: 'Paginated collection of Sentry errors on the project',
resolver: Resolvers::ErrorTracking::SentryErrorCollectionResolver
+
+ field :boards,
+ Types::BoardType.connection_type,
+ null: true,
+ description: 'Boards of the project',
+ max_page_size: 2000,
+ resolver: Resolvers::BoardsResolver
+
+ field :board,
+ Types::BoardType,
+ null: true,
+ description: 'A single board of the project',
+ resolver: Resolvers::BoardsResolver.single
end
end
diff --git a/app/graphql/types/snippet_type.rb b/app/graphql/types/snippet_type.rb
index c4d65174990..cb0bd5205b0 100644
--- a/app/graphql/types/snippet_type.rb
+++ b/app/graphql/types/snippet_type.rb
@@ -62,6 +62,7 @@ module Types
field :blob, type: Types::Snippets::BlobType,
description: 'Snippet blob',
+ calls_gitaly: true,
null: false
markdown_field :description_html, null: true, method: :description
diff --git a/app/helpers/analytics_navbar_helper.rb b/app/helpers/analytics_navbar_helper.rb
index 021b9bb10cd..f94119c4eef 100644
--- a/app/helpers/analytics_navbar_helper.rb
+++ b/app/helpers/analytics_navbar_helper.rb
@@ -31,11 +31,10 @@ module AnalyticsNavbarHelper
end
def cycle_analytics_navbar_link(project, current_user)
- return unless Feature.enabled?(:analytics_pages_under_project_analytics_sidebar, project, default_enabled: true)
return unless project_nav_tab?(:cycle_analytics)
navbar_sub_item(
- title: _('Value Stream Analytics'),
+ title: _('Value Stream'),
path: 'cycle_analytics#show',
link: project_cycle_analytics_path(project),
link_to_options: { class: 'shortcuts-project-cycle-analytics' }
@@ -43,11 +42,10 @@ module AnalyticsNavbarHelper
end
def repository_analytics_navbar_link(project, current_user)
- return if Feature.disabled?(:analytics_pages_under_project_analytics_sidebar, project, default_enabled: true)
return if project.empty_repo?
navbar_sub_item(
- title: _('Repository Analytics'),
+ title: _('Repository'),
path: 'graphs#charts',
link: charts_project_graph_path(project, current_ref),
link_to_options: { class: 'shortcuts-repository-charts' }
@@ -55,12 +53,11 @@ module AnalyticsNavbarHelper
end
def ci_cd_analytics_navbar_link(project, current_user)
- return unless Feature.enabled?(:analytics_pages_under_project_analytics_sidebar, project, default_enabled: true)
return unless project_nav_tab?(:pipelines)
return unless project.feature_available?(:builds, current_user) || !project.empty_repo?
navbar_sub_item(
- title: _('CI / CD Analytics'),
+ title: _('CI / CD'),
path: 'pipelines#charts',
link: charts_project_pipelines_path(project)
)
diff --git a/app/helpers/application_helper.rb b/app/helpers/application_helper.rb
index 8833b36c42d..83ecc7753b6 100644
--- a/app/helpers/application_helper.rb
+++ b/app/helpers/application_helper.rb
@@ -227,7 +227,7 @@ module ApplicationHelper
end
def outdated_browser?
- browser.ie? && browser.version.to_i < 10
+ browser.ie?
end
def path_to_key(key, admin = false)
diff --git a/app/helpers/application_settings_helper.rb b/app/helpers/application_settings_helper.rb
index f96c26b428c..222a6898726 100644
--- a/app/helpers/application_settings_helper.rb
+++ b/app/helpers/application_settings_helper.rb
@@ -315,7 +315,9 @@ module ApplicationSettingsHelper
:push_event_hooks_limit,
:push_event_activities_limit,
:custom_http_clone_url_root,
- :snippet_size_limit
+ :snippet_size_limit,
+ :email_restrictions_enabled,
+ :email_restrictions
]
end
diff --git a/app/helpers/auth_helper.rb b/app/helpers/auth_helper.rb
index e8d3d5f62cb..26d73007e65 100644
--- a/app/helpers/auth_helper.rb
+++ b/app/helpers/auth_helper.rb
@@ -5,21 +5,29 @@ module AuthHelper
LDAP_PROVIDER = /\Aldap/.freeze
def ldap_enabled?
- Gitlab::Auth::LDAP::Config.enabled?
+ Gitlab::Auth::Ldap::Config.enabled?
end
def ldap_sign_in_enabled?
- Gitlab::Auth::LDAP::Config.sign_in_enabled?
+ Gitlab::Auth::Ldap::Config.sign_in_enabled?
end
def omniauth_enabled?
Gitlab::Auth.omniauth_enabled?
end
- def provider_has_icon?(name)
+ def provider_has_custom_icon?(name)
+ icon_for_provider(name.to_s)
+ end
+
+ def provider_has_builtin_icon?(name)
PROVIDERS_WITH_ICONS.include?(name.to_s)
end
+ def provider_has_icon?(name)
+ provider_has_builtin_icon?(name) || provider_has_custom_icon?(name)
+ end
+
def qa_class_for_provider(provider)
{
saml: 'qa-saml-login-button',
@@ -35,6 +43,10 @@ module AuthHelper
Gitlab::Auth::OAuth::Provider.label_for(name)
end
+ def icon_for_provider(name)
+ Gitlab::Auth::OAuth::Provider.icon_for(name)
+ end
+
def form_based_provider_priority
['crowd', /^ldap/, 'kerberos']
end
@@ -109,7 +121,9 @@ module AuthHelper
def provider_image_tag(provider, size = 64)
label = label_for_provider(provider)
- if provider_has_icon?(provider)
+ if provider_has_custom_icon?(provider)
+ image_tag(icon_for_provider(provider), alt: label, title: "Sign in with #{label}")
+ elsif provider_has_builtin_icon?(provider)
file_name = "#{provider.to_s.split('_').first}_#{size}.png"
image_tag("auth_buttons/#{file_name}", alt: label, title: "Sign in with #{label}")
diff --git a/app/helpers/blob_helper.rb b/app/helpers/blob_helper.rb
index 77a320f8925..4debf66db64 100644
--- a/app/helpers/blob_helper.rb
+++ b/app/helpers/blob_helper.rb
@@ -341,4 +341,20 @@ module BlobHelper
edit_fork_button_tag(common_classes, project, text, edit_blob_fork_params(edit_path))
end
end
+
+ def show_suggest_pipeline_creation_celebration?
+ experiment_enabled?(:suggest_pipeline) &&
+ @blob.path == Gitlab::FileDetector::PATTERNS[:gitlab_ci] &&
+ @blob.auxiliary_viewer.valid?(project: @project, sha: @commit.sha, user: current_user) &&
+ @project.uses_default_ci_config? &&
+ cookies[suggest_pipeline_commit_cookie_name].present?
+ end
+
+ def suggest_pipeline_commit_cookie_name
+ "suggest_gitlab_ci_yml_commit_#{@project.id}"
+ end
+
+ def human_access
+ @project.team.human_max_access(current_user&.id).try(:downcase)
+ end
end
diff --git a/app/helpers/boards_helper.rb b/app/helpers/boards_helper.rb
index d3950219f3f..c14bc454bb9 100644
--- a/app/helpers/boards_helper.rb
+++ b/app/helpers/boards_helper.rb
@@ -13,13 +13,21 @@ module BoardsHelper
disabled: (!can?(current_user, :create_non_backlog_issues, board)).to_s,
issue_link_base: build_issue_link_base,
root_path: root_path,
+ full_path: full_path,
bulk_update_path: @bulk_issues_path,
- default_avatar: image_path(default_avatar),
time_tracking_limit_to_hours: Gitlab::CurrentSettings.time_tracking_limit_to_hours.to_s,
recent_boards_endpoint: recent_boards_path
}
end
+ def full_path
+ if board.group_board?
+ @group.full_path
+ else
+ @project.full_path
+ end
+ end
+
def build_issue_link_base
if board.group_board?
"#{group_path(@board.group)}/:project_path/issues"
diff --git a/app/helpers/broadcast_messages_helper.rb b/app/helpers/broadcast_messages_helper.rb
index 34e65c322c6..73c68dd9e18 100644
--- a/app/helpers/broadcast_messages_helper.rb
+++ b/app/helpers/broadcast_messages_helper.rb
@@ -2,12 +2,14 @@
module BroadcastMessagesHelper
def current_broadcast_banner_messages
- BroadcastMessage.current_banner_messages(request.path)
+ BroadcastMessage.current_banner_messages(request.path).select do |message|
+ cookies["hide_broadcast_message_#{message.id}"].blank?
+ end
end
def current_broadcast_notification_message
not_hidden_messages = BroadcastMessage.current_notification_messages(request.path).select do |message|
- cookies["hide_broadcast_notification_message_#{message.id}"].blank?
+ cookies["hide_broadcast_message_#{message.id}"].blank?
end
not_hidden_messages.last
end
@@ -45,7 +47,15 @@ module BroadcastMessagesHelper
end
def render_broadcast_message(broadcast_message)
- Banzai.render_field(broadcast_message, :message).html_safe
+ if Feature.enabled?(:broadcast_message_placeholders)
+ Banzai.render_and_post_process(broadcast_message.message, {
+ current_user: current_user,
+ skip_project_check: true,
+ broadcast_message_placeholders: true
+ }).html_safe
+ else
+ Banzai.render_field(broadcast_message, :message).html_safe
+ end
end
def broadcast_type_options
diff --git a/app/helpers/ci_variables_helper.rb b/app/helpers/ci_variables_helper.rb
index fc51f00d052..3f4c04070b5 100644
--- a/app/helpers/ci_variables_helper.rb
+++ b/app/helpers/ci_variables_helper.rb
@@ -5,6 +5,22 @@ module CiVariablesHelper
Gitlab::CurrentSettings.current_application_settings.protected_ci_variables
end
+ def create_deploy_token_path(entity, opts = {})
+ if entity.is_a?(Group)
+ create_deploy_token_group_settings_ci_cd_path(entity, opts)
+ else
+ create_deploy_token_project_settings_repository_path(entity, opts)
+ end
+ end
+
+ def revoke_deploy_token_path(entity, token)
+ if entity.is_a?(Group)
+ revoke_group_deploy_token_path(entity, token)
+ else
+ revoke_project_deploy_token_path(entity, token)
+ end
+ end
+
def ci_variable_protected?(variable, only_key_value)
if variable && !only_key_value
variable.protected
diff --git a/app/helpers/clusters_helper.rb b/app/helpers/clusters_helper.rb
index 80bf765f3a4..21bd2ff9e32 100644
--- a/app/helpers/clusters_helper.rb
+++ b/app/helpers/clusters_helper.rb
@@ -26,11 +26,38 @@ module ClustersHelper
end
end
+ def render_cluster_info_tab_content(tab, expanded)
+ case tab
+ when 'environments'
+ render_if_exists 'clusters/clusters/environments'
+ when 'health'
+ render_if_exists 'clusters/clusters/health'
+ when 'apps'
+ render 'applications'
+ when 'settings'
+ render 'advanced_settings_container'
+ else
+ render('details', expanded: expanded)
+ end
+ end
+
def has_rbac_enabled?(cluster)
return cluster.platform_kubernetes_rbac? if cluster.platform_kubernetes
cluster.provider.has_rbac_enabled?
end
+
+ def project_cluster?(cluster)
+ cluster.cluster_type.in?('project_type')
+ end
+
+ def cluster_created?(cluster)
+ !cluster.status_name.in?(%i/scheduled creating/)
+ end
+
+ def can_admin_cluster?(user, cluster)
+ can?(user, :admin_cluster, cluster)
+ end
end
ClustersHelper.prepend_if_ee('EE::ClustersHelper')
diff --git a/app/helpers/environments_helper.rb b/app/helpers/environments_helper.rb
index fd330d4efd9..6bf920448a5 100644
--- a/app/helpers/environments_helper.rb
+++ b/app/helpers/environments_helper.rb
@@ -41,4 +41,13 @@ module EnvironmentsHelper
"external-dashboard-url" => project.metrics_setting_external_dashboard_url
}
end
+
+ def environment_logs_data(project, environment)
+ {
+ "environment-name": environment.name,
+ "environments-path": project_environments_path(project, format: :json),
+ "environment-id": environment.id,
+ "cluster-applications-documentation-path" => help_page_path('user/clusters/applications.md', anchor: 'elastic-stack')
+ }
+ end
end
diff --git a/app/helpers/form_helper.rb b/app/helpers/form_helper.rb
index bdb0a881b08..b611f700d21 100644
--- a/app/helpers/form_helper.rb
+++ b/app/helpers/form_helper.rb
@@ -3,18 +3,23 @@
module FormHelper
prepend_if_ee('::EE::FormHelper') # rubocop: disable Cop/InjectEnterpriseEditionModule
- def form_errors(model, type: 'form')
+ def form_errors(model, type: 'form', truncate: [])
return unless model.errors.any?
headline = n_('The %{type} contains the following error:', 'The %{type} contains the following errors:', model.errors.count) % { type: type }
+ truncate = Array.wrap(truncate)
content_tag(:div, class: 'alert alert-danger', id: 'error_explanation') do
content_tag(:h4, headline) <<
content_tag(:ul) do
- model.errors.full_messages
- .map { |msg| content_tag(:li, msg) }
- .join
- .html_safe
+ messages = model.errors.map do |attribute, message|
+ message = model.errors.full_message(attribute, message)
+ message = content_tag(:span, message, class: 'str-truncated-100') if truncate.include?(attribute)
+
+ content_tag(:li, message)
+ end
+
+ messages.join.html_safe
end
end
end
diff --git a/app/helpers/ide_helper.rb b/app/helpers/ide_helper.rb
index e4d0e605254..d6145493ba6 100644
--- a/app/helpers/ide_helper.rb
+++ b/app/helpers/ide_helper.rb
@@ -10,8 +10,9 @@ module IdeHelper
"promotion-svg-path": image_path('illustrations/web-ide_promotion.svg'),
"ci-help-page-path" => help_page_path('ci/quick_start/README'),
"web-ide-help-page-path" => help_page_path('user/project/web_ide/index.html'),
- "clientside-preview-enabled": Gitlab::CurrentSettings.current_application_settings.web_ide_clientside_preview_enabled.to_s,
- "render-whitespace-in-code": current_user.render_whitespace_in_code.to_s
+ "clientside-preview-enabled": Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?.to_s,
+ "render-whitespace-in-code": current_user.render_whitespace_in_code.to_s,
+ "codesandbox-bundler-url": Gitlab::CurrentSettings.web_ide_clientside_preview_bundler_url
}
end
end
diff --git a/app/helpers/issuables_helper.rb b/app/helpers/issuables_helper.rb
index 8c75a4a13e8..acfd972bb83 100644
--- a/app/helpers/issuables_helper.rb
+++ b/app/helpers/issuables_helper.rb
@@ -463,6 +463,7 @@ module IssuablesHelper
currentUser: issuable[:current_user],
rootPath: root_path,
fullPath: issuable[:project_full_path],
+ iid: issuable[:iid],
timeTrackingLimitToHours: Gitlab::CurrentSettings.time_tracking_limit_to_hours
}
end
diff --git a/app/helpers/labels_helper.rb b/app/helpers/labels_helper.rb
index 0d3cf4d73fb..3142d7d7782 100644
--- a/app/helpers/labels_helper.rb
+++ b/app/helpers/labels_helper.rb
@@ -2,7 +2,6 @@
module LabelsHelper
extend self
- include ActionView::Helpers::TagHelper
def show_label_issuables_link?(label, issuables_type, current_user: nil)
return true unless label.project_label?
@@ -37,37 +36,42 @@ module LabelsHelper
# link_to_label(label) { "My Custom Label Text" }
#
# Returns a String
- def link_to_label(label, type: :issue, tooltip: true, css_class: nil, &block)
+ def link_to_label(label, type: :issue, tooltip: true, small: false, &block)
link = label.filter_path(type: type)
if block_given?
- link_to link, class: css_class, &block
+ link_to link, &block
else
- render_label(label, tooltip: tooltip, link: link, css: css_class)
+ render_label(label, link: link, tooltip: tooltip, small: small)
end
end
- def render_label(label, tooltip: true, link: nil, css: nil, dataset: nil)
- # if scoped label is used then EE wraps label tag with scoped label
- # doc link
- html = render_colored_label(label, tooltip: tooltip)
- html = link_to(html, link, class: css, data: dataset) if link
+ def render_label(label, link: nil, tooltip: true, dataset: nil, small: false)
+ html = render_colored_label(label)
- html
+ if link
+ title = label_tooltip_title(label) if tooltip
+ html = render_label_link(html, link: link, title: title, dataset: dataset)
+ end
+
+ wrap_label_html(html, small: small, label: label)
end
- def render_colored_label(label, label_suffix: '', tooltip: true, title: nil)
- text_color = text_color_for_bg(label.color)
- title ||= tooltip ? label_tooltip_title(label) : label.name
+ def render_colored_label(label, suffix: '')
+ render_label_text(
+ label.name,
+ suffix: suffix,
+ css_class: text_color_class_for_bg(label.color),
+ bg_color: label.color
+ )
+ end
- # Intentionally not using content_tag here so that this method can be called
- # by LabelReferenceFilter
- span = %(<span class="badge color-label #{"has-tooltip" if tooltip}" ) +
- %(data-html="true" style="background-color: #{label.color}; color: #{text_color}" ) +
- %(title="#{escape_once(title)}" data-container="body">) +
- %(#{escape_once(label.name)}#{label_suffix}</span>)
+ # We need the `label` argument here for EE
+ def wrap_label_html(label_html, small:, label:)
+ wrapper_classes = %w(gl-label)
+ wrapper_classes << 'gl-label-sm' if small
- span.html_safe
+ %(<span class="#{wrapper_classes.join(' ')}">#{label_html}</span>).html_safe
end
def label_tooltip_title(label)
@@ -110,20 +114,32 @@ module LabelsHelper
end
end
- def text_color_for_bg(bg_color)
- if bg_color.length == 4
- r, g, b = bg_color[1, 4].scan(/./).map { |v| (v * 2).hex }
+ def text_color_class_for_bg(bg_color)
+ if light_color?(bg_color)
+ 'gl-label-text-dark'
else
- r, g, b = bg_color[1, 7].scan(/.{2}/).map(&:hex)
+ 'gl-label-text-light'
end
+ end
- if (r + g + b) > 500
+ def text_color_for_bg(bg_color)
+ if light_color?(bg_color)
'#333333'
else
'#FFFFFF'
end
end
+ def light_color?(color)
+ if color.length == 4
+ r, g, b = color[1, 4].scan(/./).map { |v| (v * 2).hex }
+ else
+ r, g, b = color[1, 7].scan(/.{2}/).map(&:hex)
+ end
+
+ (r + g + b) > 500
+ end
+
def labels_filter_path_with_defaults(only_group_labels: false, include_ancestor_groups: true, include_descendant_groups: false)
options = {}
options[:include_ancestor_groups] = include_ancestor_groups if include_ancestor_groups
@@ -248,8 +264,30 @@ module LabelsHelper
['issues', 'merge requests']
end
- # Required for Banzai::Filter::LabelReferenceFilter
- module_function :render_colored_label, :text_color_for_bg, :escape_once, :label_tooltip_title
+ private
+
+ def render_label_link(label_html, link:, title:, dataset:)
+ classes = %w(gl-link gl-label-link)
+ dataset ||= {}
+
+ if title.present?
+ classes << 'has-tooltip'
+ dataset.merge!(html: true, title: title)
+ end
+
+ link_to(label_html, link, class: classes.join(' '), data: dataset)
+ end
+
+ def render_label_text(name, suffix: '', css_class: nil, bg_color: nil)
+ <<~HTML.chomp.html_safe
+ <span
+ class="gl-label-text #{css_class}"
+ data-container="body"
+ data-html="true"
+ #{"style=\"background-color: #{bg_color}\"" if bg_color}
+ >#{ERB::Util.html_escape_once(name)}#{suffix}</span>
+ HTML
+ end
end
LabelsHelper.prepend_if_ee('EE::LabelsHelper')
diff --git a/app/helpers/markup_helper.rb b/app/helpers/markup_helper.rb
index a0228c6bd94..4f66356c27e 100644
--- a/app/helpers/markup_helper.rb
+++ b/app/helpers/markup_helper.rb
@@ -79,7 +79,7 @@ module MarkupHelper
md = markdown_field(object, attribute, options.merge(post_process: false))
return unless md.present?
- tags = %w(a gl-emoji b pre code p span)
+ tags = %w(a gl-emoji b strong i em pre code p span)
tags << 'img' if options[:allow_images]
text = truncate_visible(md, max_chars || md.length)
@@ -88,7 +88,7 @@ module MarkupHelper
text,
tags: tags,
attributes: Rails::Html::WhiteListSanitizer.allowed_attributes +
- %w(style data-src data-name data-unicode-version data-iid data-project-path data-mr-title)
+ %w(style data-src data-name data-unicode-version data-iid data-project-path data-mr-title data-html)
)
# since <img> tags are stripped, this can leave empty <a> tags hanging around
@@ -233,7 +233,7 @@ module MarkupHelper
def strip_empty_link_tags(text)
scrubber = Loofah::Scrubber.new do |node|
- node.remove if node.name == 'a' && node.content.blank?
+ node.remove if node.name == 'a' && node.children.empty?
end
sanitize text, scrubber: scrubber
diff --git a/app/helpers/milestones_helper.rb b/app/helpers/milestones_helper.rb
index b12b39073ef..da6a0e38c44 100644
--- a/app/helpers/milestones_helper.rb
+++ b/app/helpers/milestones_helper.rb
@@ -92,12 +92,12 @@ module MilestonesHelper
end
def milestone_progress_tooltip_text(milestone)
- has_issues = milestone.total_issues_count(current_user) > 0
+ has_issues = milestone.total_issues_count > 0
if has_issues
[
_('Progress'),
- _("%{percent}%% complete") % { percent: milestone.percent_complete(current_user) }
+ _("%{percent}%% complete") % { percent: milestone.percent_complete }
].join('<br />')
else
_('Progress')
@@ -107,7 +107,7 @@ module MilestonesHelper
def milestone_progress_bar(milestone)
options = {
class: 'progress-bar bg-success',
- style: "width: #{milestone.percent_complete(current_user)}%;"
+ style: "width: #{milestone.percent_complete}%;"
}
content_tag :div, class: 'progress' do
@@ -151,18 +151,20 @@ module MilestonesHelper
end
def milestone_issues_tooltip_text(milestone)
- issues = milestone.count_issues_by_state(current_user)
+ total = milestone.total_issues_count
+ opened = milestone.opened_issues_count
+ closed = milestone.closed_issues_count
- return _("Issues") if issues.empty?
+ return _("Issues") if total.zero?
content = []
- if issues["opened"]
- content << n_("1 open issue", "%{issues} open issues", issues["opened"]) % { issues: issues["opened"] }
+ if opened > 0
+ content << n_("1 open issue", "%{issues} open issues", opened) % { issues: opened }
end
- if issues["closed"]
- content << n_("1 closed issue", "%{issues} closed issues", issues["closed"]) % { issues: issues["closed"] }
+ if closed > 0
+ content << n_("1 closed issue", "%{issues} closed issues", closed) % { issues: closed }
end
content.join('<br />').html_safe
diff --git a/app/helpers/projects_helper.rb b/app/helpers/projects_helper.rb
index 023790f7d87..cf9f3b9e924 100644
--- a/app/helpers/projects_helper.rb
+++ b/app/helpers/projects_helper.rb
@@ -381,6 +381,14 @@ module ProjectsHelper
@project.grafana_integration&.enabled?
end
+ def project_license_name(project)
+ project.repository.license&.name
+ rescue GRPC::Unavailable, GRPC::DeadlineExceeded, Gitlab::Git::CommandError => e
+ Gitlab::ErrorTracking.track_exception(e)
+
+ nil
+ end
+
private
def get_project_nav_tabs(project, current_user)
@@ -661,6 +669,9 @@ module ProjectsHelper
project_members#index
integrations#show
services#edit
+ hooks#index
+ hooks#edit
+ hook_logs#show
repository#show
ci_cd#show
operations#show
diff --git a/app/helpers/releases_helper.rb b/app/helpers/releases_helper.rb
index c4fe40a0875..6fbef800faa 100644
--- a/app/helpers/releases_helper.rb
+++ b/app/helpers/releases_helper.rb
@@ -17,7 +17,9 @@ module ReleasesHelper
project_id: @project.id,
illustration_path: illustration,
documentation_path: help_page
- }
+ }.tap do |data|
+ data[:new_release_path] = new_project_tag_path(@project) if can?(current_user, :create_release, @project)
+ end
end
def data_for_edit_release_page
diff --git a/app/helpers/submodule_helper.rb b/app/helpers/submodule_helper.rb
index 32c613ab4ad..e9554300075 100644
--- a/app/helpers/submodule_helper.rb
+++ b/app/helpers/submodule_helper.rb
@@ -38,6 +38,8 @@ module SubmoduleHelper
url_helpers.namespace_project_tree_path(namespace, project, submodule_item_id)]
elsif relative_self_url?(url)
relative_self_links(url, submodule_item_id, repository.project)
+ elsif gist_github_dot_com_url?(url)
+ gist_github_com_tree_links(namespace, project, submodule_item_id)
elsif github_dot_com_url?(url)
github_com_tree_links(namespace, project, submodule_item_id)
elsif gitlab_dot_com_url?(url)
@@ -52,6 +54,10 @@ module SubmoduleHelper
protected
+ def gist_github_dot_com_url?(url)
+ url =~ %r{gist\.github\.com[/:][^/]+/[^/]+\Z}
+ end
+
def github_dot_com_url?(url)
url =~ %r{github\.com[/:][^/]+/[^/]+\Z}
end
@@ -66,7 +72,7 @@ module SubmoduleHelper
project].join('')
url_with_dotgit = url_no_dotgit + '.git'
- url_with_dotgit == Gitlab::Shell.new.url_to_repo([namespace, '/', project].join(''))
+ url_with_dotgit == Gitlab::Shell.url_to_repo([namespace, '/', project].join(''))
end
def relative_self_url?(url)
@@ -78,6 +84,11 @@ module SubmoduleHelper
[base, [base, '/-/tree/', commit].join('')]
end
+ def gist_github_com_tree_links(namespace, project, commit)
+ base = ['https://gist.github.com/', namespace, '/', project].join('')
+ [base, [base, commit].join('/')]
+ end
+
def github_com_tree_links(namespace, project, commit)
base = ['https://github.com/', namespace, '/', project].join('')
[base, [base, '/tree/', commit].join('')]
diff --git a/app/helpers/suggest_pipeline_helper.rb b/app/helpers/suggest_pipeline_helper.rb
new file mode 100644
index 00000000000..aa67f0ea770
--- /dev/null
+++ b/app/helpers/suggest_pipeline_helper.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module SuggestPipelineHelper
+ def should_suggest_gitlab_ci_yml?
+ Feature.enabled?(:suggest_pipeline) &&
+ current_user &&
+ params[:suggest_gitlab_ci_yml] == 'true'
+ end
+end
diff --git a/app/helpers/system_note_helper.rb b/app/helpers/system_note_helper.rb
index 05d698a6d99..6702a805cb7 100644
--- a/app/helpers/system_note_helper.rb
+++ b/app/helpers/system_note_helper.rb
@@ -2,7 +2,7 @@
module SystemNoteHelper
ICON_NAMES_BY_ACTION = {
- 'cherry_pick' => 'link',
+ 'cherry_pick' => 'cherry-pick-commit',
'commit' => 'commit',
'description' => 'pencil-square',
'merge' => 'git-merge',
@@ -23,7 +23,7 @@ module SystemNoteHelper
'moved' => 'arrow-right',
'outdated' => 'pencil-square',
'pinned_embed' => 'thumbtack',
- 'duplicate' => 'issue-duplicate',
+ 'duplicate' => 'duplicate',
'locked' => 'lock',
'unlocked' => 'lock-open',
'due_date' => 'calendar'
diff --git a/app/helpers/user_callouts_helper.rb b/app/helpers/user_callouts_helper.rb
index ab691916706..841599abe81 100644
--- a/app/helpers/user_callouts_helper.rb
+++ b/app/helpers/user_callouts_helper.rb
@@ -5,6 +5,7 @@ module UserCalloutsHelper
GCP_SIGNUP_OFFER = 'gcp_signup_offer'
SUGGEST_POPOVER_DISMISSED = 'suggest_popover_dismissed'
TABS_POSITION_HIGHLIGHT = 'tabs_position_highlight'
+ WEBHOOKS_MOVED = 'webhooks_moved'
def show_gke_cluster_integration_callout?(project)
can?(current_user, :create_cluster, project) &&
@@ -33,6 +34,10 @@ module UserCalloutsHelper
current_user && !user_dismissed?(TABS_POSITION_HIGHLIGHT) && !Rails.env.test?
end
+ def show_webhooks_moved_alert?
+ !user_dismissed?(WEBHOOKS_MOVED)
+ end
+
private
def user_dismissed?(feature_name, ignore_dismissal_earlier_than = nil)
diff --git a/app/helpers/users_helper.rb b/app/helpers/users_helper.rb
index e87bb27cf62..c1bca6b4c41 100644
--- a/app/helpers/users_helper.rb
+++ b/app/helpers/users_helper.rb
@@ -91,6 +91,21 @@ module UsersHelper
end
end
+ def work_information(user)
+ return unless user
+
+ organization = user.organization
+ job_title = user.job_title
+
+ if organization.present? && job_title.present?
+ s_('Profile|%{job_title} at %{organization}') % { job_title: job_title, organization: organization }
+ elsif job_title.present?
+ job_title
+ elsif organization.present?
+ organization
+ end
+ end
+
private
def get_profile_tabs
diff --git a/app/helpers/visibility_level_helper.rb b/app/helpers/visibility_level_helper.rb
index a36de5dc548..d62839cf037 100644
--- a/app/helpers/visibility_level_helper.rb
+++ b/app/helpers/visibility_level_helper.rb
@@ -31,7 +31,7 @@ module VisibilityLevelHelper
def project_visibility_level_description(level)
case level
when Gitlab::VisibilityLevel::PRIVATE
- _("Project access must be granted explicitly to each user.")
+ _("Project access must be granted explicitly to each user. If this project is part of a group, access will be granted to members of the group.")
when Gitlab::VisibilityLevel::INTERNAL
_("The project can be accessed by any logged in user.")
when Gitlab::VisibilityLevel::PUBLIC
diff --git a/app/mailers/emails/pipelines.rb b/app/mailers/emails/pipelines.rb
index 773b9fead3a..f2538d28a1a 100644
--- a/app/mailers/emails/pipelines.rb
+++ b/app/mailers/emails/pipelines.rb
@@ -10,6 +10,10 @@ module Emails
pipeline_mail(pipeline, recipients, 'failed')
end
+ def pipeline_fixed_email(pipeline, recipients)
+ pipeline_mail(pipeline, recipients, 'been fixed')
+ end
+
private
def pipeline_mail(pipeline, recipients, status)
diff --git a/app/mailers/previews/notify_preview.rb b/app/mailers/previews/notify_preview.rb
index 381a4f54d9e..114737eb232 100644
--- a/app/mailers/previews/notify_preview.rb
+++ b/app/mailers/previews/notify_preview.rb
@@ -145,6 +145,10 @@ class NotifyPreview < ActionMailer::Preview
Notify.pipeline_failed_email(pipeline, pipeline.user.try(:email))
end
+ def pipeline_fixed_email
+ Notify.pipeline_fixed_email(pipeline, pipeline.user.try(:email))
+ end
+
def autodevops_disabled_email
Notify.autodevops_disabled_email(pipeline, user.email).message
end
diff --git a/app/models/ability.rb b/app/models/ability.rb
index 671a92632d5..514e923c380 100644
--- a/app/models/ability.rb
+++ b/app/models/ability.rb
@@ -44,7 +44,7 @@ class Ability
# Returns an Array of MergeRequests that can be read by the given user.
#
- # merge_requests - MRs out of which to collect mr's readable by the user.
+ # merge_requests - MRs out of which to collect MRs readable by the user.
# user - The User for which to check the merge_requests
# filters - A hash of abilities and filters to apply if the user lacks this
# ability
diff --git a/app/models/appearance.rb b/app/models/appearance.rb
index 1104b676bc4..9da4dfd43b5 100644
--- a/app/models/appearance.rb
+++ b/app/models/appearance.rb
@@ -38,7 +38,7 @@ class Appearance < ApplicationRecord
def single_appearance_row
if self.class.any?
- errors.add(:single_appearance_row, 'Only 1 appearances row can exist')
+ errors.add(:base, _('Only 1 appearances row can exist'))
end
end
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index 2f8f6f6b420..481e1807a78 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -183,6 +183,7 @@ class ApplicationSetting < ApplicationRecord
validates :gitaly_timeout_default,
presence: true,
+ if: :gitaly_timeout_default_changed?,
numericality: {
only_integer: true,
greater_than_or_equal_to: 0,
@@ -191,6 +192,7 @@ class ApplicationSetting < ApplicationRecord
validates :gitaly_timeout_medium,
presence: true,
+ if: :gitaly_timeout_medium_changed?,
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :gitaly_timeout_medium,
numericality: { less_than_or_equal_to: :gitaly_timeout_default },
@@ -201,6 +203,7 @@ class ApplicationSetting < ApplicationRecord
validates :gitaly_timeout_fast,
presence: true,
+ if: :gitaly_timeout_fast_changed?,
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :gitaly_timeout_fast,
numericality: { less_than_or_equal_to: :gitaly_timeout_default },
@@ -254,6 +257,8 @@ class ApplicationSetting < ApplicationRecord
validates :snippet_size_limit, numericality: { only_integer: true, greater_than: 0 }
+ validate :email_restrictions_regex_valid?
+
SUPPORTED_KEY_TYPES.each do |type|
validates :"#{type}_key_restriction", presence: true, key_restriction: { type: type }
end
@@ -406,6 +411,14 @@ class ApplicationSetting < ApplicationRecord
recaptcha_enabled || login_recaptcha_protection_enabled
end
+ def email_restrictions_regex_valid?
+ return if email_restrictions.blank?
+
+ Gitlab::UntrustedRegexp.new(email_restrictions)
+ rescue RegexpError
+ errors.add(:email_restrictions, _('is not a valid regular expression'))
+ end
+
private
def parsed_grafana_url
diff --git a/app/models/application_setting_implementation.rb b/app/models/application_setting_implementation.rb
index 98d8bb43b93..5ad382d8670 100644
--- a/app/models/application_setting_implementation.rb
+++ b/app/models/application_setting_implementation.rb
@@ -62,6 +62,8 @@ module ApplicationSettingImplementation
eks_account_id: nil,
eks_access_key_id: nil,
eks_secret_access_key: nil,
+ email_restrictions_enabled: false,
+ email_restrictions: nil,
first_day_of_week: 0,
gitaly_timeout_default: 55,
gitaly_timeout_fast: 10,
@@ -217,22 +219,15 @@ module ApplicationSettingImplementation
self.outbound_local_requests_whitelist.uniq!
end
+ # This method separates out the strings stored in the
+ # application_setting.outbound_local_requests_whitelist array into 2 arrays;
+ # an array of IPAddr objects (`[IPAddr.new('127.0.0.1')]`), and an array of
+ # domain strings (`['www.example.com']`).
def outbound_local_requests_whitelist_arrays
strong_memoize(:outbound_local_requests_whitelist_arrays) do
next [[], []] unless self.outbound_local_requests_whitelist
- ip_whitelist = []
- domain_whitelist = []
-
- self.outbound_local_requests_whitelist.each do |str|
- ip_obj = Gitlab::Utils.string_to_ip_object(str)
-
- if ip_obj
- ip_whitelist << ip_obj
- else
- domain_whitelist << str
- end
- end
+ ip_whitelist, domain_whitelist = separate_whitelists(self.outbound_local_requests_whitelist)
[ip_whitelist, domain_whitelist]
end
@@ -356,8 +351,43 @@ module ApplicationSettingImplementation
static_objects_external_storage_url.present?
end
+ # This will eventually be configurable
+ # https://gitlab.com/gitlab-org/gitlab/issues/208161
+ def web_ide_clientside_preview_bundler_url
+ 'https://sandbox-prod.gitlab-static.net'
+ end
+
private
+ def separate_whitelists(string_array)
+ string_array.reduce([[], []]) do |(ip_whitelist, domain_whitelist), string|
+ address, port = parse_addr_and_port(string)
+
+ ip_obj = Gitlab::Utils.string_to_ip_object(address)
+
+ if ip_obj
+ ip_whitelist << Gitlab::UrlBlockers::IpWhitelistEntry.new(ip_obj, port: port)
+ else
+ domain_whitelist << Gitlab::UrlBlockers::DomainWhitelistEntry.new(address, port: port)
+ end
+
+ [ip_whitelist, domain_whitelist]
+ end
+ end
+
+ def parse_addr_and_port(str)
+ case str
+ when /\A\[(?<address> .* )\]:(?<port> \d+ )\z/x # string like "[::1]:80"
+ address, port = $~[:address], $~[:port]
+ when /\A(?<address> [^:]+ ):(?<port> \d+ )\z/x # string like "127.0.0.1:80"
+ address, port = $~[:address], $~[:port]
+ else # string with no port number
+ address, port = str, nil
+ end
+
+ [address, port&.to_i]
+ end
+
def array_to_string(arr)
arr&.join("\n")
end
@@ -387,7 +417,7 @@ module ApplicationSettingImplementation
def terms_exist
return unless enforce_terms?
- errors.add(:terms, "You need to set terms to be enforced") unless terms.present?
+ errors.add(:base, _('You need to set terms to be enforced')) unless terms.present?
end
def expire_performance_bar_allowed_user_ids_cache
diff --git a/app/models/ci/bridge.rb b/app/models/ci/bridge.rb
index 26997d17816..fa0619f35b0 100644
--- a/app/models/ci/bridge.rb
+++ b/app/models/ci/bridge.rb
@@ -7,9 +7,10 @@ module Ci
include Ci::Metadatable
include Importable
include AfterCommitQueue
- include HasRef
+ include Ci::HasRef
InvalidBridgeTypeError = Class.new(StandardError)
+ InvalidTransitionError = Class.new(StandardError)
belongs_to :project
belongs_to :trigger_request
@@ -62,6 +63,10 @@ module Ci
end
end
+ def has_downstream_pipeline?
+ sourced_pipelines.exists?
+ end
+
def downstream_pipeline_params
return child_params if triggers_child_pipeline?
return cross_project_params if downstream_project.present?
@@ -85,6 +90,10 @@ module Ci
end
end
+ def parent_pipeline
+ pipeline if triggers_child_pipeline?
+ end
+
def triggers_child_pipeline?
yaml_for_downstream.present?
end
@@ -167,7 +176,10 @@ module Ci
target_revision: {
ref: target_ref || downstream_project.default_branch
},
- execute_params: { ignore_skip_ci: true }
+ execute_params: {
+ ignore_skip_ci: true,
+ bridge: self
+ }
}
end
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index e95e2c538c5..74a1985ca50 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -10,7 +10,7 @@ module Ci
include ObjectStorage::BackgroundMove
include Presentable
include Importable
- include HasRef
+ include Ci::HasRef
include IgnorableColumns
BuildArchivedError = Class.new(StandardError)
@@ -59,15 +59,11 @@ module Ci
##
# Since Gitlab 11.5, deployments records started being created right after
# `ci_builds` creation. We can look up a relevant `environment` through
- # `deployment` relation today. This is much more efficient than expanding
- # environment name with variables.
+ # `deployment` relation today.
# (See more https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/22380)
#
- # However, we have to still expand environment name if it's a stop action,
- # because `deployment` persists information for start action only.
- #
- # We will follow up this by persisting expanded name in build metadata or
- # persisting stop action in database.
+ # Since Gitlab 12.9, we started persisting the expanded environment name to
+ # avoid repeated variables expansion in `action: stop` builds as well.
def persisted_environment
return unless has_environment?
@@ -173,8 +169,10 @@ module Ci
scope :queued_before, ->(time) { where(arel_table[:queued_at].lt(time)) }
scope :order_id_desc, -> { order('ci_builds.id DESC') }
- PROJECT_ROUTE_AND_NAMESPACE_ROUTE = { project: [:project_feature, :route, { namespace: :route }] }.freeze
- scope :preload_project_and_pipeline_project, -> { preload(PROJECT_ROUTE_AND_NAMESPACE_ROUTE, pipeline: PROJECT_ROUTE_AND_NAMESPACE_ROUTE) }
+ scope :preload_project_and_pipeline_project, -> do
+ preload(Ci::Pipeline::PROJECT_ROUTE_AND_NAMESPACE_ROUTE,
+ pipeline: Ci::Pipeline::PROJECT_ROUTE_AND_NAMESPACE_ROUTE)
+ end
acts_as_taggable
@@ -463,7 +461,14 @@ module Ci
return unless has_environment?
strong_memoize(:expanded_environment_name) do
- ExpandVariables.expand(environment, -> { simple_variables })
+ # We're using a persisted expanded environment name in order to avoid
+ # variable expansion per request.
+ if Feature.enabled?(:ci_persisted_expanded_environment_name, project, default_enabled: true) &&
+ metadata&.expanded_environment_name.present?
+ metadata.expanded_environment_name
+ else
+ ExpandVariables.expand(environment, -> { simple_variables })
+ end
end
end
@@ -529,6 +534,7 @@ module Ci
.concat(persisted_variables)
.concat(scoped_variables)
.concat(job_variables)
+ .concat(environment_changed_page_variables)
.concat(persisted_environment_variables)
.to_runner_variables
end
@@ -567,6 +573,15 @@ module Ci
end
end
+ def environment_changed_page_variables
+ Gitlab::Ci::Variables::Collection.new.tap do |variables|
+ break variables unless environment_status
+
+ variables.append(key: 'CI_MERGE_REQUEST_CHANGED_PAGE_PATHS', value: environment_status.changed_paths.join(','))
+ variables.append(key: 'CI_MERGE_REQUEST_CHANGED_PAGE_URLS', value: environment_status.changed_urls.join(','))
+ end
+ end
+
def deploy_token_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
break variables unless gitlab_deploy_token
@@ -901,6 +916,14 @@ module Ci
end
end
+ def collect_coverage_reports!(coverage_report)
+ each_report(Ci::JobArtifact::COVERAGE_REPORT_FILE_TYPES) do |file_type, blob|
+ Gitlab::Ci::Parsers.fabricate!(file_type).parse!(blob, coverage_report)
+ end
+
+ coverage_report
+ end
+
def report_artifacts
job_artifacts.with_reports
end
@@ -968,6 +991,14 @@ module Ci
options&.dig(:environment, :url) || persisted_environment&.external_url
end
+ def environment_status
+ strong_memoize(:environment_status) do
+ if has_environment? && merge_request
+ EnvironmentStatus.new(project, persisted_environment, merge_request, pipeline.sha)
+ end
+ end
+ end
+
# The format of the retry option changed in GitLab 11.5: Before it was
# integer only, after it is a hash. New builds are created with the new
# format, but builds created before GitLab 11.5 and saved in database still
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 564853fc8a1..ae57da9c546 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -11,6 +11,7 @@ module Ci
NotSupportedAdapterError = Class.new(StandardError)
TEST_REPORT_FILE_TYPES = %w[junit].freeze
+ COVERAGE_REPORT_FILE_TYPES = %w[cobertura].freeze
NON_ERASABLE_FILE_TYPES = %w[trace].freeze
DEFAULT_FILE_NAMES = {
archive: nil,
@@ -28,7 +29,9 @@ module Ci
license_scanning: 'gl-license-scanning-report.json',
performance: 'performance.json',
metrics: 'metrics.txt',
- lsif: 'lsif.json'
+ lsif: 'lsif.json',
+ dotenv: '.env',
+ cobertura: 'cobertura-coverage.xml'
}.freeze
INTERNAL_TYPES = {
@@ -42,6 +45,9 @@ module Ci
metrics: :gzip,
metrics_referee: :gzip,
network_referee: :gzip,
+ lsif: :gzip,
+ dotenv: :gzip,
+ cobertura: :gzip,
# All these file formats use `raw` as we need to store them uncompressed
# for Frontend to fetch the files and do analysis
@@ -53,8 +59,7 @@ module Ci
dast: :raw,
license_management: :raw,
license_scanning: :raw,
- performance: :raw,
- lsif: :raw
+ performance: :raw
}.freeze
TYPE_AND_FORMAT_PAIRS = INTERNAL_TYPES.merge(REPORT_TYPES).freeze
@@ -74,7 +79,7 @@ module Ci
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) }
- scope :for_sha, ->(sha) { joins(job: :pipeline).where(ci_pipelines: { sha: sha }) }
+ scope :for_sha, ->(sha, project_id) { joins(job: :pipeline).where(ci_pipelines: { sha: sha, project_id: project_id }) }
scope :with_file_types, -> (file_types) do
types = self.file_types.select { |file_type| file_types.include?(file_type) }.values
@@ -90,6 +95,10 @@ module Ci
with_file_types(TEST_REPORT_FILE_TYPES)
end
+ scope :coverage_reports, -> do
+ with_file_types(COVERAGE_REPORT_FILE_TYPES)
+ end
+
scope :erasable, -> do
types = self.file_types.reject { |file_type| NON_ERASABLE_FILE_TYPES.include?(file_type) }.values
@@ -118,7 +127,9 @@ module Ci
metrics: 12, ## EE-specific
metrics_referee: 13, ## runner referees
network_referee: 14, ## runner referees
- lsif: 15 # LSIF data for code navigation
+ lsif: 15, # LSIF data for code navigation
+ dotenv: 16,
+ cobertura: 17
}
enum file_format: {
@@ -148,7 +159,7 @@ module Ci
def valid_file_format?
unless TYPE_AND_FORMAT_PAIRS[self.file_type&.to_sym] == self.file_format&.to_sym
- errors.add(:file_format, 'Invalid file format with specified file type')
+ errors.add(:base, _('Invalid file format with specified file type'))
end
end
diff --git a/app/models/ci/job_variable.rb b/app/models/ci/job_variable.rb
index 862a0bc1299..f2968c037c7 100644
--- a/app/models/ci/job_variable.rb
+++ b/app/models/ci/job_variable.rb
@@ -4,11 +4,14 @@ module Ci
class JobVariable < ApplicationRecord
extend Gitlab::Ci::Model
include NewHasVariable
+ include BulkInsertSafe
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
alias_attribute :secret_value, :value
- validates :key, uniqueness: { scope: :job_id }
+ validates :key, uniqueness: { scope: :job_id }, unless: :dotenv_source?
+
+ enum source: { internal: 0, dotenv: 1 }, _suffix: true
end
end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 3209e077a08..61b28a3e712 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -11,11 +11,15 @@ module Ci
include Gitlab::Utils::StrongMemoize
include AtomicInternalId
include EnumWithNil
- include HasRef
+ include Ci::HasRef
include ShaAttribute
include FromUnion
include UpdatedAtFilterable
+ PROJECT_ROUTE_AND_NAMESPACE_ROUTE = {
+ project: [:project_feature, :route, { namespace: :route }]
+ }.freeze
+
BridgeStatusError = Class.new(StandardError)
sha_attribute :source_sha
@@ -59,6 +63,14 @@ module Ci
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_pipeline_id
has_one :source_pipeline, class_name: 'Ci::Sources::Pipeline', inverse_of: :pipeline
+
+ has_one :ref_status, ->(pipeline) {
+ # We use .read_attribute to save 1 extra unneeded query to load the :project.
+ unscope(:where)
+ .where(project_id: pipeline.read_attribute(:project_id), ref: pipeline.ref, tag: pipeline.tag)
+ # Sadly :inverse_of is not supported (yet) by Rails for composite PKs.
+ }, class_name: 'Ci::Ref', inverse_of: :pipelines
+
has_one :chat_data, class_name: 'Ci::PipelineChatData'
has_many :triggered_pipelines, through: :sourced_pipelines, source: :pipeline
@@ -215,6 +227,7 @@ module Ci
end
after_transition created: :pending do |pipeline|
+ next if Feature.enabled?(:ci_drop_bridge_on_downstream_errors, pipeline.project, default_enabled: true)
next unless pipeline.bridge_triggered?
next if pipeline.bridge_waiting?
@@ -223,7 +236,11 @@ module Ci
after_transition any => [:success, :failed] do |pipeline|
pipeline.run_after_commit do
- PipelineNotificationWorker.perform_async(pipeline.id)
+ if Feature.enabled?(:ci_pipeline_fixed_notifications)
+ PipelineUpdateCiRefStatusWorker.perform_async(pipeline.id)
+ else
+ PipelineNotificationWorker.perform_async(pipeline.id)
+ end
end
end
@@ -595,7 +612,7 @@ module Ci
# Manually set the notes for a Ci::Pipeline
# There is no ActiveRecord relation between Ci::Pipeline and notes
# as they are related to a commit sha. This method helps importing
- # them using the +Gitlab::ImportExport::ProjectRelationFactory+ class.
+ # them using the +Gitlab::ImportExport::Project::RelationFactory+ class.
def notes=(notes)
notes.each do |note|
note[:id] = nil
@@ -744,6 +761,8 @@ module Ci
raise BridgeStatusError unless source_bridge.active?
source_bridge.success!
+ rescue => e
+ Gitlab::ErrorTracking.track_exception(e, pipeline_id: id)
end
def bridge_triggered?
@@ -762,12 +781,20 @@ module Ci
child_pipelines.exists?
end
+ def created_successfully?
+ persisted? && failure_reason.blank?
+ end
+
def detailed_status(current_user)
Gitlab::Ci::Status::Pipeline::Factory
.new(self, current_user)
.fabricate!
end
+ def find_job_with_archive_artifacts(name)
+ builds.latest.with_artifacts_archive.find_by_name(name)
+ end
+
def latest_builds_with_artifacts
# We purposely cast the builds to an Array here. Because we always use the
# rows if there are more than 0 this prevents us from having to run two
@@ -793,6 +820,14 @@ module Ci
end
end
+ def coverage_reports
+ Gitlab::Ci::Reports::CoverageReports.new.tap do |coverage_reports|
+ builds.latest.with_reports(Ci::JobArtifact.coverage_reports).each do |build|
+ build.collect_coverage_reports!(coverage_reports)
+ end
+ end
+ end
+
def has_exposed_artifacts?
complete? && builds.latest.with_exposed_artifacts.exists?
end
diff --git a/app/models/ci/processable.rb b/app/models/ci/processable.rb
index 6c080582cae..55518f32316 100644
--- a/app/models/ci/processable.rb
+++ b/app/models/ci/processable.rb
@@ -90,6 +90,12 @@ module Ci
end
end
+ def needs_attributes
+ strong_memoize(:needs_attributes) do
+ needs.map { |need| need.attributes.except('id', 'build_id') }
+ end
+ end
+
private
def validate_scheduling_type?
diff --git a/app/models/ci/ref.rb b/app/models/ci/ref.rb
new file mode 100644
index 00000000000..a0782bc0444
--- /dev/null
+++ b/app/models/ci/ref.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Ci
+ class Ref < ApplicationRecord
+ extend Gitlab::Ci::Model
+
+ STATUSES = %w[success failed fixed].freeze
+
+ belongs_to :project
+ belongs_to :last_updated_by_pipeline, foreign_key: :last_updated_by_pipeline_id, class_name: 'Ci::Pipeline'
+ # ActiveRecord doesn't support composite FKs for this reason we have to do the 'unscope(:where)'
+ # hack.
+ has_many :pipelines, ->(ref) {
+ # We use .read_attribute to save 1 extra unneeded query to load the :project.
+ unscope(:where)
+ .where(ref: ref.ref, project_id: ref.read_attribute(:project_id), tag: ref.tag)
+ # Sadly :inverse_of is not supported (yet) by Rails for composite PKs.
+ }, inverse_of: :ref_status
+
+ validates :status, inclusion: { in: STATUSES }
+ validates :last_updated_by_pipeline, presence: true
+ end
+end
diff --git a/app/models/clusters/applications/cert_manager.rb b/app/models/clusters/applications/cert_manager.rb
index 7ba04d1a2de..1efa44c39c5 100644
--- a/app/models/clusters/applications/cert_manager.rb
+++ b/app/models/clusters/applications/cert_manager.rb
@@ -3,8 +3,8 @@
module Clusters
module Applications
class CertManager < ApplicationRecord
- VERSION = 'v0.9.1'
- CRD_VERSION = '0.9'
+ VERSION = 'v0.10.1'
+ CRD_VERSION = '0.10'
self.table_name = 'clusters_applications_cert_managers'
diff --git a/app/models/clusters/applications/crossplane.rb b/app/models/clusters/applications/crossplane.rb
index 36246b26066..420e56c1742 100644
--- a/app/models/clusters/applications/crossplane.rb
+++ b/app/models/clusters/applications/crossplane.rb
@@ -49,8 +49,7 @@ module Clusters
{
"clusterStacks" => {
self.stack => {
- "deploy" => true,
- "version" => "alpha"
+ "deploy" => true
}
}
}
diff --git a/app/models/clusters/applications/elastic_stack.rb b/app/models/clusters/applications/elastic_stack.rb
index ce42bc65579..afdc1c91c69 100644
--- a/app/models/clusters/applications/elastic_stack.rb
+++ b/app/models/clusters/applications/elastic_stack.rb
@@ -3,7 +3,7 @@
module Clusters
module Applications
class ElasticStack < ApplicationRecord
- VERSION = '1.8.0'
+ VERSION = '1.9.0'
ELASTICSEARCH_PORT = 9200
@@ -15,9 +15,6 @@ module Clusters
include ::Clusters::Concerns::ApplicationData
include ::Gitlab::Utils::StrongMemoize
- include IgnorableColumns
- ignore_column :kibana_hostname, remove_with: '12.9', remove_after: '2020-02-22'
-
default_value_for :version, VERSION
def chart
diff --git a/app/models/clusters/applications/ingress.rb b/app/models/clusters/applications/ingress.rb
index bdd7ad90fba..64659208315 100644
--- a/app/models/clusters/applications/ingress.rb
+++ b/app/models/clusters/applications/ingress.rb
@@ -3,7 +3,8 @@
module Clusters
module Applications
class Ingress < ApplicationRecord
- VERSION = '1.29.3'
+ VERSION = '1.29.7'
+ INGRESS_CONTAINER_NAME = 'nginx-ingress-controller'
MODSECURITY_LOG_CONTAINER_NAME = 'modsecurity-log'
self.table_name = 'clusters_applications_ingress'
@@ -15,7 +16,7 @@ module Clusters
include AfterCommitQueue
default_value_for :ingress_type, :nginx
- default_value_for :modsecurity_enabled, false
+ default_value_for :modsecurity_enabled, true
default_value_for :version, VERSION
enum ingress_type: {
@@ -69,7 +70,7 @@ module Clusters
end
def ingress_service
- cluster.kubeclient.get_service('ingress-nginx-ingress-controller', Gitlab::Kubernetes::Helm::NAMESPACE)
+ cluster.kubeclient.get_service("ingress-#{INGRESS_CONTAINER_NAME}", Gitlab::Kubernetes::Helm::NAMESPACE)
end
private
@@ -123,7 +124,7 @@ module Clusters
{
"name" => "modsecurity-template-volume",
"configMap" => {
- "name" => "ingress-nginx-ingress-controller",
+ "name" => "ingress-#{INGRESS_CONTAINER_NAME}",
"items" => [
{
"key" => "modsecurity.conf",
diff --git a/app/models/clusters/applications/knative.rb b/app/models/clusters/applications/knative.rb
index eebcbcba2d3..1f90318f845 100644
--- a/app/models/clusters/applications/knative.rb
+++ b/app/models/clusters/applications/knative.rb
@@ -33,6 +33,12 @@ module Clusters
FETCH_IP_ADDRESS_DELAY, application.name, application.id)
end
end
+
+ after_transition any => [:installed, :updated] do |application|
+ application.run_after_commit do
+ ClusterConfigureIstioWorker.perform_async(application.cluster_id)
+ end
+ end
end
default_value_for :version, VERSION
@@ -41,6 +47,8 @@ module Clusters
scope :for_cluster, -> (cluster) { where(cluster: cluster) }
+ has_one :pages_domain, through: :serverless_domain_cluster
+
def chart
'knative/knative'
end
@@ -49,6 +57,14 @@ module Clusters
{ "domain" => hostname }.to_yaml
end
+ def available_domains
+ PagesDomain.instance_serverless
+ end
+
+ def find_available_domain(pages_domain_id)
+ available_domains.find_by(id: pages_domain_id)
+ end
+
def allowed_to_uninstall?
!pre_installed?
end
diff --git a/app/models/clusters/applications/prometheus.rb b/app/models/clusters/applications/prometheus.rb
index adce55cb61b..8297f653ea7 100644
--- a/app/models/clusters/applications/prometheus.rb
+++ b/app/models/clusters/applications/prometheus.rb
@@ -17,6 +17,11 @@ module Clusters
default_value_for :version, VERSION
+ attr_encrypted :alert_manager_token,
+ mode: :per_attribute_iv,
+ key: Settings.attr_encrypted_db_key_base_truncated,
+ algorithm: 'aes-256-gcm'
+
after_destroy do
run_after_commit do
disable_prometheus_integration
@@ -103,8 +108,18 @@ module Clusters
false
end
+ def generate_alert_manager_token!
+ unless alert_manager_token.present?
+ update!(alert_manager_token: generate_token)
+ end
+ end
+
private
+ def generate_token
+ SecureRandom.hex
+ end
+
def disable_prometheus_integration
::Clusters::Applications::DeactivateServiceWorker
.perform_async(cluster_id, ::PrometheusService.to_param) # rubocop:disable CodeReuse/ServiceClass
diff --git a/app/models/clusters/applications/runner.rb b/app/models/clusters/applications/runner.rb
index 6a9cd77d356..4886f2debb1 100644
--- a/app/models/clusters/applications/runner.rb
+++ b/app/models/clusters/applications/runner.rb
@@ -3,7 +3,7 @@
module Clusters
module Applications
class Runner < ApplicationRecord
- VERSION = '0.13.1'
+ VERSION = '0.14.0'
self.table_name = 'clusters_applications_runners'
diff --git a/app/models/clusters/cluster.rb b/app/models/clusters/cluster.rb
index 7e76d324bdc..78efe2b4337 100644
--- a/app/models/clusters/cluster.rb
+++ b/app/models/clusters/cluster.rb
@@ -11,15 +11,15 @@ module Clusters
self.table_name = 'clusters'
APPLICATIONS = {
- Applications::Helm.application_name => Applications::Helm,
- Applications::Ingress.application_name => Applications::Ingress,
- Applications::CertManager.application_name => Applications::CertManager,
- Applications::Crossplane.application_name => Applications::Crossplane,
- Applications::Prometheus.application_name => Applications::Prometheus,
- Applications::Runner.application_name => Applications::Runner,
- Applications::Jupyter.application_name => Applications::Jupyter,
- Applications::Knative.application_name => Applications::Knative,
- Applications::ElasticStack.application_name => Applications::ElasticStack
+ Clusters::Applications::Helm.application_name => Clusters::Applications::Helm,
+ Clusters::Applications::Ingress.application_name => Clusters::Applications::Ingress,
+ Clusters::Applications::CertManager.application_name => Clusters::Applications::CertManager,
+ Clusters::Applications::Crossplane.application_name => Clusters::Applications::Crossplane,
+ Clusters::Applications::Prometheus.application_name => Clusters::Applications::Prometheus,
+ Clusters::Applications::Runner.application_name => Clusters::Applications::Runner,
+ Clusters::Applications::Jupyter.application_name => Clusters::Applications::Jupyter,
+ Clusters::Applications::Knative.application_name => Clusters::Applications::Knative,
+ Clusters::Applications::ElasticStack.application_name => Clusters::Applications::ElasticStack
}.freeze
DEFAULT_ENVIRONMENT = '*'
KUBE_INGRESS_BASE_DOMAIN = 'KUBE_INGRESS_BASE_DOMAIN'
@@ -249,9 +249,13 @@ module Clusters
platform_kubernetes.kubeclient if kubernetes?
end
- def kubernetes_namespace_for(environment)
+ def kubernetes_namespace_for(environment, deployable: environment.last_deployable)
+ if deployable && environment.project_id != deployable.project_id
+ raise ArgumentError, 'environment.project_id must match deployable.project_id'
+ end
+
managed_namespace(environment) ||
- ci_configured_namespace(environment) ||
+ ci_configured_namespace(deployable) ||
default_namespace(environment)
end
@@ -306,7 +310,7 @@ module Clusters
.where.not(id: id)
if duplicate_management_clusters.any?
- errors.add(:environment_scope, "cannot add duplicated environment scope")
+ errors.add(:environment_scope, 'cannot add duplicated environment scope')
end
end
@@ -318,8 +322,11 @@ module Clusters
).execute&.namespace
end
- def ci_configured_namespace(environment)
- environment.last_deployable&.expanded_kubernetes_namespace
+ def ci_configured_namespace(deployable)
+ # YAML configuration of namespaces not supported for managed clusters
+ return if managed?
+
+ deployable&.expanded_kubernetes_namespace
end
def default_namespace(environment)
@@ -380,7 +387,7 @@ module Clusters
def restrict_modification
if provider&.on_creation?
- errors.add(:base, "cannot modify during creation")
+ errors.add(:base, _('Cannot modify provider during creation'))
return false
end
diff --git a/app/models/clusters/concerns/application_core.rb b/app/models/clusters/concerns/application_core.rb
index b94f2b15846..297d00aa281 100644
--- a/app/models/clusters/concerns/application_core.rb
+++ b/app/models/clusters/concerns/application_core.rb
@@ -15,7 +15,7 @@ module Clusters
def set_initial_status
return unless not_installable?
- self.status = status_states[:installable] if cluster&.application_helm_available? || Feature.enabled?(:managed_apps_local_tiller)
+ self.status = status_states[:installable] if cluster&.application_helm_available? || ::Gitlab::Kubernetes::Helm.local_tiller_enabled?
end
def can_uninstall?
diff --git a/app/models/clusters/concerns/application_data.rb b/app/models/clusters/concerns/application_data.rb
index 3479fea415e..77c606553d2 100644
--- a/app/models/clusters/concerns/application_data.rb
+++ b/app/models/clusters/concerns/application_data.rb
@@ -23,7 +23,7 @@ module Clusters
@files ||= begin
files = { 'values.yaml': values }
- files.merge!(certificate_files) if cluster.application_helm.has_ssl?
+ files.merge!(certificate_files) if use_tiller_ssl?
files
end
@@ -31,6 +31,12 @@ module Clusters
private
+ def use_tiller_ssl?
+ return false if ::Gitlab::Kubernetes::Helm.local_tiller_enabled?
+
+ cluster.application_helm.has_ssl?
+ end
+
def certificate_files
{
'ca.pem': ca_cert,
diff --git a/app/models/clusters/concerns/application_status.rb b/app/models/clusters/concerns/application_status.rb
index b63a596dfee..14237439a8d 100644
--- a/app/models/clusters/concerns/application_status.rb
+++ b/app/models/clusters/concerns/application_status.rb
@@ -92,7 +92,10 @@ module Clusters
# When installing any application we are also performing an update
# of tiller (see Gitlab::Kubernetes::Helm::ClientCommand) so
# therefore we need to reflect that in the database.
- application.cluster.application_helm.update!(version: Gitlab::Kubernetes::Helm::HELM_VERSION)
+
+ unless ::Gitlab::Kubernetes::Helm.local_tiller_enabled?
+ application.cluster.application_helm.update!(version: Gitlab::Kubernetes::Helm::HELM_VERSION)
+ end
end
after_transition any => [:uninstalling], :use_transactions => false do |application, _|
diff --git a/app/models/commit.rb b/app/models/commit.rb
index d8a3bbfeeb2..681fe727456 100644
--- a/app/models/commit.rb
+++ b/app/models/commit.rb
@@ -226,6 +226,7 @@ class Commit
data = {
id: id,
message: safe_message,
+ title: title,
timestamp: committed_date.xmlschema,
url: Gitlab::UrlBuilder.build(self),
author: {
@@ -241,14 +242,6 @@ class Commit
data
end
- # Discover issues should be closed when this commit is pushed to a project's
- # default branch.
- def closes_issues(current_user = self.committer)
- return unless repository.repo_type.project?
-
- Gitlab::ClosingIssueExtractor.new(project, current_user).closed_by_message(safe_message)
- end
-
def lazy_author
BatchLoader.for(author_email.downcase).batch do |emails, loader|
users = User.by_any_email(emails, confirmed: true).includes(:emails)
@@ -298,14 +291,6 @@ class Commit
notes.includes(:author, :award_emoji)
end
- def merge_requests
- strong_memoize(:merge_requests) do
- next MergeRequest.none unless repository.repo_type.project? && project
-
- project.merge_requests.by_commit_sha(sha)
- end
- end
-
def method_missing(method, *args, &block)
@raw.__send__(method, *args, &block) # rubocop:disable GitlabSecurity/PublicSend
end
@@ -414,7 +399,7 @@ class Commit
end
def has_been_reverted?(current_user, notes_association = nil)
- ext = all_references(current_user)
+ ext = Gitlab::ReferenceExtractor.new(project, current_user)
notes_association ||= notes_with_associations
notes_association.system.each do |note|
diff --git a/app/models/concerns/blob_language_from_git_attributes.rb b/app/models/concerns/blob_language_from_git_attributes.rb
index 56e1276a220..01878e067b9 100644
--- a/app/models/concerns/blob_language_from_git_attributes.rb
+++ b/app/models/concerns/blob_language_from_git_attributes.rb
@@ -1,13 +1,11 @@
# frozen_string_literal: true
-# Applicable for blob classes with project attribute
module BlobLanguageFromGitAttributes
extend ActiveSupport::Concern
def language_from_gitattributes
- return unless project
+ return unless repository&.exists?
- repository = project.repository
repository.gitattribute(path, 'gitlab-language')
end
end
diff --git a/app/models/concerns/bulk_insert_safe.rb b/app/models/concerns/bulk_insert_safe.rb
index 6d75906b21f..a61db2dc148 100644
--- a/app/models/concerns/bulk_insert_safe.rb
+++ b/app/models/concerns/bulk_insert_safe.rb
@@ -1,5 +1,37 @@
# frozen_string_literal: true
+##
+# A mixin for ActiveRecord models that enables callers to insert instances of the
+# target class into the database en-bloc via the [bulk_insert] method.
+#
+# Upon inclusion in the target class, the mixin will perform a number of checks to
+# ensure that the target is eligible for bulk insertions. For instance, it must not
+# use ActiveRecord callbacks that fire between [save]s, since these would not run
+# properly when instances are inserted in bulk.
+#
+# The mixin uses ActiveRecord 6's [InsertAll] type internally for bulk insertions.
+# Unlike [InsertAll], however, it requires you to pass instances of the target type
+# rather than row hashes, since it will run validations prior to insertion.
+#
+# @example
+#
+# class MyRecord < ApplicationRecord
+# include BulkInsertSafe # must be included _last_ i.e. after any other concerns
+# end
+#
+# # simple
+# MyRecord.bulk_insert!(items)
+#
+# # with custom batch size
+# MyRecord.bulk_insert!(items, batch_size: 100)
+#
+# # without validations
+# MyRecord.bulk_insert!(items, validate: false)
+#
+# # with attribute hash modification
+# MyRecord.bulk_insert!(items) { |item_attrs| item_attrs['col'] = 42 }
+#
+#
module BulkInsertSafe
extend ActiveSupport::Concern
@@ -13,7 +45,10 @@ module BulkInsertSafe
:destroy
].freeze
+ DEFAULT_BATCH_SIZE = 500
+
MethodNotAllowedError = Class.new(StandardError)
+ PrimaryKeySetError = Class.new(StandardError)
class_methods do
def set_callback(name, *args)
@@ -26,8 +61,108 @@ module BulkInsertSafe
super
end
+ # Inserts the given ActiveRecord [items] to the table mapped to this class.
+ # Items will be inserted in batches of a given size, where insertion semantics are
+ # "atomic across all batches".
+ #
+ # @param [Boolean] validate Whether validations should run on [items]
+ # @param [Integer] batch_size How many items should at most be inserted at once
+ # @param [Boolean] skip_duplicates Marks duplicates as allowed, and skips inserting them
+ # @param [Proc] handle_attributes Block that will receive each item attribute hash
+ # prior to insertion for further processing
+ #
+ # Note that this method will throw on the following occasions:
+ # - [PrimaryKeySetError] when primary keys are set on entities prior to insertion
+ # - [ActiveRecord::RecordInvalid] on entity validation failures
+ # - [ActiveRecord::RecordNotUnique] on duplicate key errors
+ #
+ # @return true if operation succeeded, throws otherwise.
+ #
+ def bulk_insert!(items, validate: true, skip_duplicates: false, batch_size: DEFAULT_BATCH_SIZE, &handle_attributes)
+ _bulk_insert_all!(items,
+ validate: validate,
+ on_duplicate: skip_duplicates ? :skip : :raise,
+ unique_by: nil,
+ batch_size: batch_size,
+ &handle_attributes)
+ end
+
+ # Upserts the given ActiveRecord [items] to the table mapped to this class.
+ # Items will be inserted or updated in batches of a given size,
+ # where insertion semantics are "atomic across all batches".
+ #
+ # @param [Boolean] validate Whether validations should run on [items]
+ # @param [Integer] batch_size How many items should at most be inserted at once
+ # @param [Symbol/Array] unique_by Defines index or columns to use to consider item duplicate
+ # @param [Proc] handle_attributes Block that will receive each item attribute hash
+ # prior to insertion for further processing
+ #
+ # Unique indexes can be identified by columns or name:
+ # - unique_by: :isbn
+ # - unique_by: %i[ author_id name ]
+ # - unique_by: :index_books_on_isbn
+ #
+ # Note that this method will throw on the following occasions:
+ # - [PrimaryKeySetError] when primary keys are set on entities prior to insertion
+ # - [ActiveRecord::RecordInvalid] on entity validation failures
+ # - [ActiveRecord::RecordNotUnique] on duplicate key errors
+ #
+ # @return true if operation succeeded, throws otherwise.
+ #
+ def bulk_upsert!(items, unique_by:, validate: true, batch_size: DEFAULT_BATCH_SIZE, &handle_attributes)
+ _bulk_insert_all!(items,
+ validate: validate,
+ on_duplicate: :update,
+ unique_by: unique_by,
+ batch_size: batch_size,
+ &handle_attributes)
+ end
+
private
+ def _bulk_insert_all!(items, on_duplicate:, unique_by:, validate:, batch_size:, &handle_attributes)
+ return true if items.empty?
+
+ transaction do
+ items.each_slice(batch_size) do |item_batch|
+ attributes = _bulk_insert_item_attributes(
+ item_batch, validate, &handle_attributes)
+
+ ActiveRecord::InsertAll
+ .new(self, attributes, on_duplicate: on_duplicate, unique_by: unique_by)
+ .execute
+ end
+ end
+
+ true
+ end
+
+ def _bulk_insert_item_attributes(items, validate_items)
+ items.map do |item|
+ item.validate! if validate_items
+
+ attributes = {}
+ column_names.each do |name|
+ value = item.read_attribute(name)
+ value = item.type_for_attribute(name).serialize(value) # rubocop:disable Cop/ActiveRecordSerialize
+ attributes[name] = value
+ end
+
+ _bulk_insert_reject_primary_key!(attributes, item.class.primary_key)
+
+ yield attributes if block_given?
+
+ attributes
+ end
+ end
+
+ def _bulk_insert_reject_primary_key!(attributes, primary_key)
+ if existing_pk = attributes.delete(primary_key)
+ raise PrimaryKeySetError, "Primary key set: #{primary_key}:#{existing_pk}\n" \
+ "Bulk-inserts are only supported for rows that don't already have PK set"
+ end
+ end
+
def _bulk_insert_callback_allowed?(name, args)
_bulk_insert_whitelisted?(name) || _bulk_insert_saved_from_belongs_to?(name, args)
end
diff --git a/app/models/concerns/bulk_insertable_associations.rb b/app/models/concerns/bulk_insertable_associations.rb
new file mode 100644
index 00000000000..5ee2e8356bd
--- /dev/null
+++ b/app/models/concerns/bulk_insertable_associations.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+##
+# ActiveRecord model classes can mix in this concern if they own associations
+# who declare themselves to be eligible for bulk-insertion via [BulkInsertSafe].
+# This allows the caller to write items from [has_many] associations en-bloc
+# when the owner is first created.
+#
+# This implementation currently has a few limitations:
+# - only works for [has_many] relations
+# - does not support the [:through] option
+# - it cannot bulk-insert items that had previously been saved, nor can the
+# owner of the association have previously been saved; if you attempt to
+# so, an error will be raised
+#
+# @example
+#
+# class MergeRequestDiff < ApplicationRecord
+# include BulkInsertableAssociations
+#
+# # target association class must `include BulkInsertSafe`
+# has_many :merge_request_diff_commits
+# end
+#
+# diff = MergeRequestDiff.new(...)
+# diff.diff_commits << MergeRequestDiffCommit.build(...)
+# BulkInsertableAssociations.with_bulk_insert do
+# diff.save! # this will also write all `diff_commits` in bulk
+# end
+#
+# Note that just like [BulkInsertSafe.bulk_insert!], validations will run for
+# all items that are scheduled for bulk-insertions.
+#
+module BulkInsertableAssociations
+ extend ActiveSupport::Concern
+
+ class << self
+ def bulk_inserts_enabled?
+ Thread.current['bulk_inserts_enabled']
+ end
+
+ # All associations that are [BulkInsertSafe] and that as a result of calls to
+ # [save] or [save!] would be written to the database, will be inserted using
+ # [bulk_insert!] instead.
+ #
+ # Note that this will only work for entities that have not been persisted yet.
+ #
+ # @param [Boolean] enabled When [true], bulk-inserts will be attempted within
+ # the given block. If [false], bulk-inserts will be
+ # disabled. This behavior can be nested.
+ def with_bulk_insert(enabled: true)
+ previous = bulk_inserts_enabled?
+ Thread.current['bulk_inserts_enabled'] = enabled
+ yield
+ ensure
+ Thread.current['bulk_inserts_enabled'] = previous
+ end
+ end
+
+ def bulk_insert_associations!
+ self.class.reflections.each do |_, reflection|
+ _bulk_insert_association!(reflection)
+ end
+ end
+
+ private
+
+ def _bulk_insert_association!(reflection)
+ return unless _association_supports_bulk_inserts?(reflection)
+
+ association = self.association(reflection.name)
+ association_items = association.target
+ return if association_items.empty?
+
+ if association_items.any?(&:persisted?)
+ raise 'Bulk-insertion of already persisted association items is not currently supported'
+ end
+
+ _bulk_insert_configure_foreign_key(reflection, association_items)
+ association.klass.bulk_insert!(association_items, validate: false)
+
+ # reset relation:
+ # 1. we successfully inserted all items
+ # 2. when accessed we force to reload the relation
+ association.reset
+ end
+
+ def _association_supports_bulk_inserts?(reflection)
+ reflection.macro == :has_many &&
+ reflection.klass < BulkInsertSafe &&
+ !reflection.through_reflection? &&
+ association_cached?(reflection.name)
+ end
+
+ def _bulk_insert_configure_foreign_key(reflection, items)
+ primary_key_column = reflection.active_record_primary_key
+ raise "Classes including `BulkInsertableAssociations` must define a `primary_key`" unless primary_key_column
+
+ primary_key_value = self[primary_key_column]
+ raise "No value found for primary key `#{primary_key_column}`" unless primary_key_value
+
+ items.each do |item|
+ item[reflection.foreign_key] = primary_key_value
+
+ if reflection.type
+ item[reflection.type] = self.class.polymorphic_name
+ end
+ end
+ end
+
+ included do
+ delegate :bulk_inserts_enabled?, to: BulkInsertableAssociations
+ after_create :bulk_insert_associations!, if: :bulk_inserts_enabled?, prepend: true
+ end
+end
diff --git a/app/models/concerns/cache_markdown_field.rb b/app/models/concerns/cache_markdown_field.rb
index 9713e79f525..cc13f279c4d 100644
--- a/app/models/concerns/cache_markdown_field.rb
+++ b/app/models/concerns/cache_markdown_field.rb
@@ -20,6 +20,10 @@ module CacheMarkdownField
false
end
+ def can_cache_field?(field)
+ true
+ end
+
# Returns the default Banzai render context for the cached markdown field.
def banzai_render_context(field)
raise ArgumentError.new("Unknown field: #{field.inspect}") unless
@@ -38,17 +42,23 @@ module CacheMarkdownField
context
end
- # Update every column in a row if any one is invalidated, as we only store
- # one version per row
- def refresh_markdown_cache
+ def rendered_field_content(markdown_field)
+ return unless can_cache_field?(markdown_field)
+
options = { skip_project_check: skip_project_check? }
+ Banzai::Renderer.cacheless_render_field(self, markdown_field, options)
+ end
+ # Update every applicable column in a row if any one is invalidated, as we only store
+ # one version per row
+ def refresh_markdown_cache
updates = cached_markdown_fields.markdown_fields.map do |markdown_field|
[
cached_markdown_fields.html_field(markdown_field),
- Banzai::Renderer.cacheless_render_field(self, markdown_field, options)
+ rendered_field_content(markdown_field)
]
end.to_h
+
updates['cached_markdown_version'] = latest_cached_markdown_version
updates.each { |field, data| write_markdown_field(field, data) }
diff --git a/app/models/concerns/ci/has_ref.rb b/app/models/concerns/ci/has_ref.rb
new file mode 100644
index 00000000000..cf57ff47743
--- /dev/null
+++ b/app/models/concerns/ci/has_ref.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+##
+# We will disable `ref` and `sha` attributes in `Ci::Build` in the future
+# and remove this module in favor of Ci::PipelineDelegator.
+module Ci
+ module HasRef
+ extend ActiveSupport::Concern
+
+ def branch?
+ !tag? && !merge_request?
+ end
+
+ def git_ref
+ if branch?
+ Gitlab::Git::BRANCH_REF_PREFIX + ref.to_s
+ elsif tag?
+ Gitlab::Git::TAG_REF_PREFIX + ref.to_s
+ end
+ end
+
+ # A slugified version of the build ref, suitable for inclusion in URLs and
+ # domain names. Rules:
+ #
+ # * Lowercased
+ # * Anything not matching [a-z0-9-] is replaced with a -
+ # * Maximum length is 63 bytes
+ # * First/Last Character is not a hyphen
+ def ref_slug
+ Gitlab::Utils.slugify(ref.to_s)
+ end
+ end
+end
diff --git a/app/models/concerns/ci/metadatable.rb b/app/models/concerns/ci/metadatable.rb
index 9bfe76728e4..bd40af28bc9 100644
--- a/app/models/concerns/ci/metadatable.rb
+++ b/app/models/concerns/ci/metadatable.rb
@@ -14,6 +14,8 @@ module Ci
inverse_of: :build,
autosave: true
+ accepts_nested_attributes_for :metadata
+
delegate :timeout, to: :metadata, prefix: true, allow_nil: true
delegate :interruptible, to: :metadata, prefix: false, allow_nil: true
delegate :has_exposed_artifacts?, to: :metadata, prefix: false, allow_nil: true
diff --git a/app/models/concerns/has_ref.rb b/app/models/concerns/has_ref.rb
deleted file mode 100644
index 22e5955984d..00000000000
--- a/app/models/concerns/has_ref.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-##
-# We will disable `ref` and `sha` attributes in `Ci::Build` in the future
-# and remove this module in favor of Ci::PipelineDelegator.
-module HasRef
- extend ActiveSupport::Concern
-
- def branch?
- !tag? && !merge_request?
- end
-
- def git_ref
- if branch?
- Gitlab::Git::BRANCH_REF_PREFIX + ref.to_s
- elsif tag?
- Gitlab::Git::TAG_REF_PREFIX + ref.to_s
- end
- end
-
- # A slugified version of the build ref, suitable for inclusion in URLs and
- # domain names. Rules:
- #
- # * Lowercased
- # * Anything not matching [a-z0-9-] is replaced with a -
- # * Maximum length is 63 bytes
- # * First/Last Character is not a hyphen
- def ref_slug
- Gitlab::Utils.slugify(ref.to_s)
- end
-end
diff --git a/app/models/concerns/has_repository.rb b/app/models/concerns/has_repository.rb
index d04a6408a21..cc792eab2e0 100644
--- a/app/models/concerns/has_repository.rb
+++ b/app/models/concerns/has_repository.rb
@@ -9,7 +9,6 @@
# needs any special behavior.
module HasRepository
extend ActiveSupport::Concern
- include Gitlab::ShellAdapter
include AfterCommitQueue
include Referable
include Gitlab::Utils::StrongMemoize
@@ -19,7 +18,7 @@ module HasRepository
def valid_repo?
repository.exists?
rescue
- errors.add(:path, _('Invalid repository path'))
+ errors.add(:base, _('Invalid repository path'))
false
end
@@ -78,7 +77,7 @@ module HasRepository
end
def url_to_repo
- gitlab_shell.url_to_repo(full_path)
+ Gitlab::Shell.url_to_repo(full_path)
end
def ssh_url_to_repo
diff --git a/app/models/concerns/issuable.rb b/app/models/concerns/issuable.rb
index 78d815e5858..7300283f086 100644
--- a/app/models/concerns/issuable.rb
+++ b/app/models/concerns/issuable.rb
@@ -260,12 +260,14 @@ module Issuable
highest_priority = highest_label_priority(params).to_sql
- select_columns = [
- "#{table_name}.*",
- "(#{highest_priority}) AS highest_priority"
- ] + extra_select_columns
+ # When using CTE make sure to select the same columns that are on the group_by clause.
+ # This prevents errors when ignored columns are present in the database.
+ issuable_columns = with_cte ? issue_grouping_columns(use_cte: with_cte) : "#{table_name}.*"
- select(select_columns.join(', '))
+ extra_select_columns = extra_select_columns.unshift("(#{highest_priority}) AS highest_priority")
+
+ select(issuable_columns)
+ .select(extra_select_columns)
.group(issue_grouping_columns(use_cte: with_cte))
.reorder(Gitlab::Database.nulls_last_order('highest_priority', direction))
end
@@ -301,7 +303,7 @@ module Issuable
# Returns an array of arel columns
def issue_grouping_columns(use_cte: false)
if use_cte
- [arel_table[:state]] + attribute_names.map { |attr| arel_table[attr.to_sym] }
+ attribute_names.map { |attr| arel_table[attr.to_sym] }
else
arel_table[:id]
end
diff --git a/app/models/concerns/milestone_eventable.rb b/app/models/concerns/milestone_eventable.rb
new file mode 100644
index 00000000000..17a02c9d3e4
--- /dev/null
+++ b/app/models/concerns/milestone_eventable.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module MilestoneEventable
+ extend ActiveSupport::Concern
+
+ included do
+ has_many :resource_milestone_events
+ end
+end
diff --git a/app/models/concerns/milestoneable.rb b/app/models/concerns/milestoneable.rb
index 7df6981a129..3ffb32f94fc 100644
--- a/app/models/concerns/milestoneable.rb
+++ b/app/models/concerns/milestoneable.rb
@@ -37,7 +37,7 @@ module Milestoneable
private
def milestone_is_valid
- errors.add(:milestone_id, message: "is invalid") if respond_to?(:milestone_id) && milestone_id.present? && !milestone_available?
+ errors.add(:milestone_id, 'is invalid') if respond_to?(:milestone_id) && milestone_id.present? && !milestone_available?
end
end
diff --git a/app/models/concerns/milestoneish.rb b/app/models/concerns/milestoneish.rb
index 88e752e51e7..6dbb9649b9f 100644
--- a/app/models/concerns/milestoneish.rb
+++ b/app/models/concerns/milestoneish.rb
@@ -1,22 +1,24 @@
# frozen_string_literal: true
module Milestoneish
- def total_issues_count(user)
- count_issues_by_state(user).values.sum
+ def total_issues_count
+ @total_issues_count ||= Milestones::IssuesCountService.new(self).count
end
- def closed_issues_count(user)
- closed_state_id = Issue.available_states[:closed]
+ def closed_issues_count
+ @close_issues_count ||= Milestones::ClosedIssuesCountService.new(self).count
+ end
- count_issues_by_state(user)[closed_state_id].to_i
+ def opened_issues_count
+ total_issues_count - closed_issues_count
end
- def complete?(user)
- total_issues_count(user) > 0 && total_issues_count(user) == closed_issues_count(user)
+ def complete?
+ total_issues_count > 0 && total_issues_count == closed_issues_count
end
- def percent_complete(user)
- closed_issues_count(user) * 100 / total_issues_count(user)
+ def percent_complete
+ closed_issues_count * 100 / total_issues_count
rescue ZeroDivisionError
0
end
@@ -121,12 +123,6 @@ module Milestoneish
Gitlab::TimeTrackingFormatter.output(total_issue_time_estimate)
end
- def count_issues_by_state(user)
- memoize_per_user(user, :count_issues_by_state) do
- issues_visible_to_user(user).reorder(nil).group(:state_id).count
- end
- end
-
private
def memoize_per_user(user, method_name)
diff --git a/app/models/concerns/reactive_caching.rb b/app/models/concerns/reactive_caching.rb
index 010e0018414..4b472cfdf45 100644
--- a/app/models/concerns/reactive_caching.rb
+++ b/app/models/concerns/reactive_caching.rb
@@ -52,6 +52,13 @@ module ReactiveCaching
end
end
+ def with_reactive_cache_set(resource, opts, &blk)
+ data = with_reactive_cache(resource, opts, &blk)
+ save_keys_in_set(resource, opts) if data
+
+ data
+ end
+
# This method is used for debugging purposes and should not be used otherwise.
def without_reactive_cache(*args, &blk)
return with_reactive_cache(*args, &blk) unless Rails.env.development?
@@ -65,6 +72,12 @@ module ReactiveCaching
Rails.cache.delete(alive_reactive_cache_key(*args))
end
+ def clear_reactive_cache_set!(*args)
+ cache_key = full_reactive_cache_key(args)
+
+ reactive_set_cache.clear_cache!(cache_key)
+ end
+
def exclusively_update_reactive_cache!(*args)
locking_reactive_cache(*args) do
key = full_reactive_cache_key(*args)
@@ -86,6 +99,16 @@ module ReactiveCaching
private
+ def save_keys_in_set(resource, opts)
+ cache_key = full_reactive_cache_key(resource)
+
+ reactive_set_cache.write(cache_key, "#{cache_key}:#{opts}")
+ end
+
+ def reactive_set_cache
+ Gitlab::ReactiveCacheSetCache.new(expires_in: reactive_cache_lifetime)
+ end
+
def refresh_reactive_cache!(*args)
clear_reactive_cache!(*args)
keep_alive_reactive_cache!(*args)
diff --git a/app/models/concerns/spammable.rb b/app/models/concerns/spammable.rb
index 10bbeecc2f7..7c12fe29ade 100644
--- a/app/models/concerns/spammable.rb
+++ b/app/models/concerns/spammable.rb
@@ -16,7 +16,7 @@ module Spammable
attr_accessor :spam_log
alias_method :spam?, :spam
- after_validation :check_for_spam, on: [:create, :update]
+ after_validation :invalidate_if_spam, on: [:create, :update]
cattr_accessor :spammable_attrs, instance_accessor: false do
[]
@@ -37,7 +37,7 @@ module Spammable
end
end
- def check_for_spam
+ def invalidate_if_spam
error_msg = if Gitlab::Recaptcha.enabled?
"Your #{spammable_entity_type} has been recognized as spam. "\
"Please, change the content or solve the reCAPTCHA to proceed."
diff --git a/app/models/concerns/time_trackable.rb b/app/models/concerns/time_trackable.rb
index f61a0bbc65b..dddf96837b7 100644
--- a/app/models/concerns/time_trackable.rb
+++ b/app/models/concerns/time_trackable.rb
@@ -77,7 +77,7 @@ module TimeTrackable
return if time_spent.nil? || time_spent == :reset
if time_spent < 0 && (time_spent.abs > original_total_time_spent)
- errors.add(:time_spent, 'Time to subtract exceeds the total time spent')
+ errors.add(:base, _('Time to subtract exceeds the total time spent'))
end
end
diff --git a/app/models/concerns/usage_statistics.rb b/app/models/concerns/usage_statistics.rb
new file mode 100644
index 00000000000..d6cafcb22b1
--- /dev/null
+++ b/app/models/concerns/usage_statistics.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module UsageStatistics
+ extend ActiveSupport::Concern
+
+ class_methods do
+ def distinct_count_by(column = nil, fallback = -1)
+ distinct.count(column)
+ rescue ActiveRecord::StatementInvalid
+ fallback
+ end
+ end
+end
diff --git a/app/models/concerns/versioned_description.rb b/app/models/concerns/versioned_description.rb
index 63a24aadc8a..ee8d2d45357 100644
--- a/app/models/concerns/versioned_description.rb
+++ b/app/models/concerns/versioned_description.rb
@@ -16,7 +16,7 @@ module VersionedDescription
def save_description_version
self.saved_description_version = nil
- return unless Feature.enabled?(:save_description_versions, issuing_parent)
+ return unless Feature.enabled?(:save_description_versions, issuing_parent, default_enabled: true)
return unless saved_change_to_description?
unless description_versions.exists?
diff --git a/app/models/concerns/with_uploads.rb b/app/models/concerns/with_uploads.rb
index 6c6febd186c..d90f32d8b1c 100644
--- a/app/models/concerns/with_uploads.rb
+++ b/app/models/concerns/with_uploads.rb
@@ -18,7 +18,6 @@
module WithUploads
extend ActiveSupport::Concern
include FastDestroyAll::Helpers
- include FeatureGate
# Currently there is no simple way how to select only not-mounted
# uploads, it should be all FileUploaders so we select them by
diff --git a/app/models/deploy_token.rb b/app/models/deploy_token.rb
index 31c813edb67..a9844f627b7 100644
--- a/app/models/deploy_token.rb
+++ b/app/models/deploy_token.rb
@@ -105,7 +105,7 @@ class DeployToken < ApplicationRecord
end
def ensure_at_least_one_scope
- errors.add(:base, "Scopes can't be blank") unless read_repository || read_registry
+ errors.add(:base, _("Scopes can't be blank")) unless read_repository || read_registry
end
def default_username
diff --git a/app/models/deployment.rb b/app/models/deployment.rb
index fe42fb93633..707c4e8157d 100644
--- a/app/models/deployment.rb
+++ b/app/models/deployment.rb
@@ -135,7 +135,7 @@ class Deployment < ApplicationRecord
end
def create_ref
- project.repository.create_ref(ref, ref_path)
+ project.repository.create_ref(sha, ref_path)
end
def invalidate_cache
@@ -229,7 +229,14 @@ class Deployment < ApplicationRecord
end
def link_merge_requests(relation)
- select = relation.select(['merge_requests.id', id]).to_sql
+ # NOTE: relation.select will perform column deduplication,
+ # when id == environment_id it will outputs 2 columns instead of 3
+ # i.e.:
+ # MergeRequest.select(1, 2).to_sql #=> SELECT 1, 2 FROM "merge_requests"
+ # MergeRequest.select(1, 1).to_sql #=> SELECT 1 FROM "merge_requests"
+ select = relation.select('merge_requests.id',
+ "#{id} as deployment_id",
+ "#{environment_id} as environment_id").to_sql
# We don't use `Gitlab::Database.bulk_insert` here so that we don't need to
# first pluck lots of IDs into memory.
@@ -238,7 +245,7 @@ class Deployment < ApplicationRecord
# for the same deployment, only inserting any missing merge requests.
DeploymentMergeRequest.connection.execute(<<~SQL)
INSERT INTO #{DeploymentMergeRequest.table_name}
- (merge_request_id, deployment_id)
+ (merge_request_id, deployment_id, environment_id)
#{select}
ON CONFLICT DO NOTHING
SQL
diff --git a/app/models/description_version.rb b/app/models/description_version.rb
index 05362a2f90b..f69564f4893 100644
--- a/app/models/description_version.rb
+++ b/app/models/description_version.rb
@@ -19,7 +19,13 @@ class DescriptionVersion < ApplicationRecord
def exactly_one_issuable
issuable_count = self.class.issuable_attrs.count { |attr| self["#{attr}_id"] }
- errors.add(:base, "Exactly one of #{self.class.issuable_attrs.join(', ')} is required") if issuable_count != 1
+ if issuable_count != 1
+ errors.add(
+ :base,
+ _("Exactly one of %{attributes} is required") %
+ { attributes: self.class.issuable_attrs.join(', ') }
+ )
+ end
end
end
diff --git a/app/models/discussion.rb b/app/models/discussion.rb
index d0a7db39a30..c07078c03dd 100644
--- a/app/models/discussion.rb
+++ b/app/models/discussion.rb
@@ -14,12 +14,14 @@ class Discussion
:author,
:noteable,
:commit_id,
+ :confidential?,
:for_commit?,
:for_merge_request?,
:noteable_ability_name,
:to_ability_name,
:editable?,
- :visible_for?,
+ :system_note_with_references_visible_for?,
+ :resource_parent,
to: :first_note
diff --git a/app/models/environment.rb b/app/models/environment.rb
index bb41c4a066e..3f9247b1544 100644
--- a/app/models/environment.rb
+++ b/app/models/environment.rb
@@ -95,6 +95,10 @@ class Environment < ApplicationRecord
end
end
+ def self.for_id_and_slug(id, slug)
+ find_by(id: id, slug: slug)
+ end
+
def self.max_deployment_id_sql
Deployment.select(Deployment.arel_table[:id].maximum)
.where(Deployment.arel_table[:environment_id].eq(arel_table[:id]))
@@ -189,15 +193,6 @@ class Environment < ApplicationRecord
folder_name == "production"
end
- def first_deployment_for(commit_sha)
- ref = project.repository.ref_name_for_sha(ref_path, commit_sha)
-
- return unless ref
-
- deployment_iid = ref.split('/').last
- deployments.find_by(iid: deployment_iid)
- end
-
def ref_path
"refs/#{Repository::REF_ENVIRONMENTS}/#{slug}"
end
@@ -335,6 +330,10 @@ class Environment < ApplicationRecord
self.auto_stop_at = parsed_result.seconds.from_now
end
+ def elastic_stack_available?
+ !!deployment_platform&.cluster&.application_elastic_stack&.available?
+ end
+
private
def has_metrics_and_can_query?
diff --git a/app/models/environment_status.rb b/app/models/environment_status.rb
index 5fdb5af2d9b..46e41c22139 100644
--- a/app/models/environment_status.rb
+++ b/app/models/environment_status.rb
@@ -62,9 +62,9 @@ class EnvironmentStatus
end
def changes
- return [] unless has_route_map?
-
- changed_files.map { |file| build_change(file) }.compact
+ strong_memoize(:changes) do
+ has_route_map? ? changed_files.map { |file| build_change(file) }.compact : []
+ end
end
def changed_files
@@ -72,6 +72,14 @@ class EnvironmentStatus
.merge_request_diff_files.where(deleted_file: false)
end
+ def changed_paths
+ changes.map { |change| change[:path] }
+ end
+
+ def changed_urls
+ changes.map { |change| change[:external_url] }
+ end
+
def has_route_map?
project.route_map_for(sha).present?
end
diff --git a/app/models/epic.rb b/app/models/epic.rb
index ea4a231931d..04e19c17e18 100644
--- a/app/models/epic.rb
+++ b/app/models/epic.rb
@@ -5,6 +5,8 @@
class Epic < ApplicationRecord
include IgnorableColumns
+ ignore_column :health_status, remove_with: '13.0', remove_after: '2019-05-22'
+
def self.link_reference_pattern
nil
end
diff --git a/app/models/error_tracking/project_error_tracking_setting.rb b/app/models/error_tracking/project_error_tracking_setting.rb
index d328a609439..133850b6ab6 100644
--- a/app/models/error_tracking/project_error_tracking_setting.rb
+++ b/app/models/error_tracking/project_error_tracking_setting.rb
@@ -85,7 +85,7 @@ module ErrorTracking
end
def list_sentry_issues(opts = {})
- with_reactive_cache('list_issues', opts.stringify_keys) do |result|
+ with_reactive_cache_set('list_issues', opts.stringify_keys) do |result|
result
end
end
@@ -130,6 +130,10 @@ module ErrorTracking
end
end
+ def expire_issues_cache
+ clear_reactive_cache_set!('list_issues')
+ end
+
# http://HOST/api/0/projects/ORG/PROJECT
# ->
# http://HOST/ORG/PROJECT
diff --git a/app/models/event.rb b/app/models/event.rb
index 606c4d8302f..18682bd694c 100644
--- a/app/models/event.rb
+++ b/app/models/event.rb
@@ -135,29 +135,11 @@ class Event < ApplicationRecord
super(presenter_class: ::EventPresenter)
end
- # rubocop:disable Metrics/CyclomaticComplexity
- # rubocop:disable Metrics/PerceivedComplexity
def visible_to_user?(user = nil)
- if push_action? || commit_note?
- Ability.allowed?(user, :download_code, project)
- elsif membership_changed?
- Ability.allowed?(user, :read_project, project)
- elsif created_project_action?
- Ability.allowed?(user, :read_project, project)
- elsif issue? || issue_note?
- Ability.allowed?(user, :read_issue, note? ? note_target : target)
- elsif merge_request? || merge_request_note?
- Ability.allowed?(user, :read_merge_request, note? ? note_target : target)
- elsif personal_snippet_note? || project_snippet_note?
- Ability.allowed?(user, :read_snippet, note_target)
- elsif milestone?
- Ability.allowed?(user, :read_milestone, project)
- else
- false # No other event types are visible
- end
+ return false unless capability.present?
+
+ Ability.allowed?(user, capability, permission_object)
end
- # rubocop:enable Metrics/PerceivedComplexity
- # rubocop:enable Metrics/CyclomaticComplexity
def resource_parent
project || group
@@ -364,8 +346,38 @@ class Event < ApplicationRecord
Event._to_partial_path
end
+ protected
+
+ def capability
+ @capability ||= begin
+ if push_action? || commit_note?
+ :download_code
+ elsif membership_changed? || created_project_action?
+ :read_project
+ elsif issue? || issue_note?
+ :read_issue
+ elsif merge_request? || merge_request_note?
+ :read_merge_request
+ elsif personal_snippet_note? || project_snippet_note?
+ :read_snippet
+ elsif milestone?
+ :read_milestone
+ end
+ end
+ end
+
private
+ def permission_object
+ if note?
+ note_target
+ elsif target_id.present?
+ target
+ else
+ project
+ end
+ end
+
def push_action_name
if new_ref?
"pushed new"
diff --git a/app/models/external_pull_request.rb b/app/models/external_pull_request.rb
index 65ae8d95500..9c6d05f773a 100644
--- a/app/models/external_pull_request.rb
+++ b/app/models/external_pull_request.rb
@@ -78,7 +78,7 @@ class ExternalPullRequest < ApplicationRecord
def not_from_fork
if from_fork?
- errors.add(:base, 'Pull requests from fork are not supported')
+ errors.add(:base, _('Pull requests from fork are not supported'))
end
end
diff --git a/app/models/group.rb b/app/models/group.rb
index bf771bd0409..e9b3e3c3369 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -59,6 +59,9 @@ class Group < Namespace
has_many :import_failures, inverse_of: :group
+ has_many :group_deploy_tokens
+ has_many :deploy_tokens, through: :group_deploy_tokens
+
accepts_nested_attributes_for :variables, allow_destroy: true
validate :visibility_level_allowed_by_projects
@@ -403,11 +406,15 @@ class Group < Namespace
end
def ci_variables_for(ref, project)
- list_of_ids = [self] + ancestors
- variables = Ci::GroupVariable.where(group: list_of_ids)
- variables = variables.unprotected unless project.protected_for?(ref)
- variables = variables.group_by(&:group_id)
- list_of_ids.reverse.flat_map { |group| variables[group.id] }.compact
+ cache_key = "ci_variables_for:group:#{self&.id}:project:#{project&.id}:ref:#{ref}"
+
+ ::Gitlab::SafeRequestStore.fetch(cache_key) do
+ list_of_ids = [self] + ancestors
+ variables = Ci::GroupVariable.where(group: list_of_ids)
+ variables = variables.unprotected unless project.protected_for?(ref)
+ variables = variables.group_by(&:group_id)
+ list_of_ids.reverse.flat_map { |group| variables[group.id] }.compact
+ end
end
def group_member(user)
diff --git a/app/models/group_deploy_token.rb b/app/models/group_deploy_token.rb
index 221a7d768ae..d4ad29ddabb 100644
--- a/app/models/group_deploy_token.rb
+++ b/app/models/group_deploy_token.rb
@@ -9,7 +9,7 @@ class GroupDeployToken < ApplicationRecord
validates :deploy_token_id, uniqueness: { scope: [:group_id] }
def has_access_to?(requested_project)
- return false unless Feature.enabled?(:allow_group_deploy_token, default: true)
+ return false unless Feature.enabled?(:allow_group_deploy_token, default_enabled: true)
requested_project_group = requested_project&.group
return false unless requested_project_group
diff --git a/app/models/hooks/project_hook.rb b/app/models/hooks/project_hook.rb
index a5f68831f34..bc480b14e67 100644
--- a/app/models/hooks/project_hook.rb
+++ b/app/models/hooks/project_hook.rb
@@ -21,7 +21,7 @@ class ProjectHook < WebHook
validates :project, presence: true
def pluralized_name
- _('Project Hooks')
+ _('Webhooks')
end
end
diff --git a/app/models/identity.rb b/app/models/identity.rb
index cb7fd553255..40d9f856abf 100644
--- a/app/models/identity.rb
+++ b/app/models/identity.rb
@@ -25,7 +25,7 @@ class Identity < ApplicationRecord
def self.normalize_uid(provider, uid)
if Gitlab::Auth::OAuth::Provider.ldap_provider?(provider)
- Gitlab::Auth::LDAP::Person.normalize_dn(uid)
+ Gitlab::Auth::Ldap::Person.normalize_dn(uid)
else
uid.to_s
end
diff --git a/app/models/internal_id.rb b/app/models/internal_id.rb
index 3e8d0c6a778..b6882701e23 100644
--- a/app/models/internal_id.rb
+++ b/app/models/internal_id.rb
@@ -21,7 +21,7 @@ class InternalId < ApplicationRecord
belongs_to :project
belongs_to :namespace
- enum usage: { issues: 0, merge_requests: 1, deployments: 2, milestones: 3, epics: 4, ci_pipelines: 5, operations_feature_flags: 6 }
+ enum usage: ::InternalIdEnums.usage_resources
validates :usage, presence: true
diff --git a/app/models/internal_id_enums.rb b/app/models/internal_id_enums.rb
new file mode 100644
index 00000000000..2f7d7aeff2f
--- /dev/null
+++ b/app/models/internal_id_enums.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+module InternalIdEnums
+ def self.usage_resources
+ # when adding new resource, make sure it doesn't conflict with EE usage_resources
+ { issues: 0, merge_requests: 1, deployments: 2, milestones: 3, epics: 4, ci_pipelines: 5, operations_feature_flags: 6 }
+ end
+end
+
+InternalIdEnums.prepend_if_ee('EE::InternalIdEnums')
diff --git a/app/models/issue.rb b/app/models/issue.rb
index be702134ced..145807457a1 100644
--- a/app/models/issue.rb
+++ b/app/models/issue.rb
@@ -15,6 +15,7 @@ class Issue < ApplicationRecord
include ThrottledTouch
include LabelEventable
include IgnorableColumns
+ include MilestoneEventable
DueDateStruct = Struct.new(:title, :name).freeze
NoDueDate = DueDateStruct.new('No Due Date', '0').freeze
@@ -43,6 +44,8 @@ class Issue < ApplicationRecord
has_many :assignees, class_name: "User", through: :issue_assignees
has_many :zoom_meetings
has_many :user_mentions, class_name: "IssueUserMention", dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
+ has_many :sent_notifications, as: :noteable
+
has_one :sentry_issue
accepts_nested_attributes_for :sentry_issue
@@ -64,6 +67,7 @@ class Issue < ApplicationRecord
scope :order_due_date_desc, -> { reorder(::Gitlab::Database.nulls_last_order('due_date', 'DESC')) }
scope :order_closest_future_date, -> { reorder(Arel.sql('CASE WHEN issues.due_date >= CURRENT_DATE THEN 0 ELSE 1 END ASC, ABS(CURRENT_DATE - issues.due_date) ASC')) }
scope :order_relative_position_asc, -> { reorder(::Gitlab::Database.nulls_last_order('relative_position', 'ASC')) }
+ scope :order_closed_date_desc, -> { reorder(closed_at: :desc) }
scope :preload_associated_models, -> { preload(:labels, project: :namespace) }
scope :with_api_entity_associations, -> { preload(:timelogs, :assignees, :author, :notes, :labels, project: [:route, { namespace: :route }] ) }
@@ -73,7 +77,7 @@ class Issue < ApplicationRecord
scope :counts_by_state, -> { reorder(nil).group(:state_id).count }
- ignore_column :state, remove_with: '12.7', remove_after: '2019-12-22'
+ ignore_column :state, remove_with: '12.10', remove_after: '2020-03-22'
after_commit :expire_etag_cache, unless: :importing?
after_save :ensure_metrics, unless: :importing?
@@ -128,12 +132,12 @@ class Issue < ApplicationRecord
def self.reference_pattern
@reference_pattern ||= %r{
(#{Project.reference_pattern})?
- #{Regexp.escape(reference_prefix)}(?<issue>\d+)
+ #{Regexp.escape(reference_prefix)}#{Gitlab::Regex.issue}
}x
end
def self.link_reference_pattern
- @link_reference_pattern ||= super("issues", /(?<issue>\d+)/)
+ @link_reference_pattern ||= super("issues", Gitlab::Regex.issue)
end
def self.reference_valid?(reference)
@@ -302,6 +306,10 @@ class Issue < ApplicationRecord
labels.map(&:hook_attrs)
end
+ def previous_updated_at
+ previous_changes['updated_at']&.first || updated_at
+ end
+
private
def ensure_metrics
diff --git a/app/models/jira_import_data.rb b/app/models/jira_import_data.rb
new file mode 100644
index 00000000000..3f882deb24d
--- /dev/null
+++ b/app/models/jira_import_data.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class JiraImportData < ProjectImportData
+ JiraProjectDetails = Struct.new(:key, :scheduled_at, :scheduled_by)
+
+ def projects
+ return [] unless data
+
+ projects = data.dig('jira', 'projects').map do |p|
+ JiraProjectDetails.new(p['key'], p['scheduled_at'], p['scheduled_by'])
+ end
+ projects.sort_by { |jp| jp.scheduled_at }
+ end
+
+ def <<(project)
+ self.data ||= { jira: { projects: [] } }
+ self.data['jira']['projects'] << project.to_h.deep_stringify_keys!
+ end
+end
diff --git a/app/models/key.rb b/app/models/key.rb
index e729ef67346..18fa8aaaa16 100644
--- a/app/models/key.rb
+++ b/app/models/key.rb
@@ -6,6 +6,7 @@ class Key < ApplicationRecord
include AfterCommitQueue
include Sortable
include Sha256Attribute
+ include Expirable
sha256_attribute :fingerprint_sha256
@@ -30,10 +31,10 @@ class Key < ApplicationRecord
delegate :name, :email, to: :user, prefix: true
- after_commit :add_to_shell, on: :create
+ after_commit :add_to_authorized_keys, on: :create
after_create :post_create_hook
after_create :refresh_user_cache
- after_commit :remove_from_shell, on: :destroy
+ after_commit :remove_from_authorized_keys, on: :destroy
after_destroy :post_destroy_hook
after_destroy :refresh_user_cache
@@ -79,12 +80,10 @@ class Key < ApplicationRecord
end
# rubocop: enable CodeReuse/ServiceClass
- def add_to_shell
- GitlabShellWorker.perform_async(
- :add_key,
- shell_id,
- key
- )
+ def add_to_authorized_keys
+ return unless Gitlab::CurrentSettings.authorized_keys_enabled?
+
+ AuthorizedKeysWorker.perform_async(:add_key, shell_id, key)
end
# rubocop: disable CodeReuse/ServiceClass
@@ -93,11 +92,10 @@ class Key < ApplicationRecord
end
# rubocop: enable CodeReuse/ServiceClass
- def remove_from_shell
- GitlabShellWorker.perform_async(
- :remove_key,
- shell_id
- )
+ def remove_from_authorized_keys
+ return unless Gitlab::CurrentSettings.authorized_keys_enabled?
+
+ AuthorizedKeysWorker.perform_async(:remove_key, shell_id)
end
# rubocop: disable CodeReuse/ServiceClass
diff --git a/app/models/label.rb b/app/models/label.rb
index 632207701d8..106d26685d4 100644
--- a/app/models/label.rb
+++ b/app/models/label.rb
@@ -202,10 +202,6 @@ class Label < ApplicationRecord
priorities.present?
end
- def template?
- template
- end
-
def color
super || DEFAULT_COLOR
end
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index 48c971194c6..6a86aebae39 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -11,6 +11,7 @@ class LfsObject < ApplicationRecord
scope :with_files_stored_locally, -> { where(file_store: LfsObjectUploader::Store::LOCAL) }
scope :with_files_stored_remotely, -> { where(file_store: LfsObjectUploader::Store::REMOTE) }
+ scope :for_oids, -> (oids) { where(oid: oids) }
validates :oid, presence: true, uniqueness: true
diff --git a/app/models/list.rb b/app/models/list.rb
index b2ba796e3dc..64247fdb983 100644
--- a/app/models/list.rb
+++ b/app/models/list.rb
@@ -97,6 +97,6 @@ class List < ApplicationRecord
private
def can_be_destroyed
- throw(:abort) unless destroyable?
+ throw(:abort) unless destroyable? # rubocop:disable Cop/BanCatchThrow
end
end
diff --git a/app/models/member.rb b/app/models/member.rb
index a26a0615a6e..99dee67346e 100644
--- a/app/models/member.rb
+++ b/app/models/member.rb
@@ -374,7 +374,7 @@ class Member < ApplicationRecord
# always notify when there isn't a user yet
return true if user.blank?
- NotificationRecipientService.notifiable?(user, type, notifiable_options.merge(opts))
+ NotificationRecipients::BuildService.notifiable?(user, type, notifiable_options.merge(opts))
end
# rubocop: enable CodeReuse/ServiceClass
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index 6c32bdadfa8..412d0fa4ec8 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -18,6 +18,7 @@ class MergeRequest < ApplicationRecord
include DeprecatedAssignee
include ShaAttribute
include IgnorableColumns
+ include MilestoneEventable
sha_attribute :squash_commit_sha
@@ -48,6 +49,7 @@ class MergeRequest < ApplicationRecord
# 1. There are arguments - in which case we might be trying to force-reload.
# 2. This association is already loaded.
# 3. The latest diff does not exist.
+ # 4. It doesn't have any merge_request_diffs - it returns an empty MergeRequestDiff
#
# The second one in particular is important - MergeRequestDiff#merge_request
# is the inverse of MergeRequest#merge_request_diff, which means it may not be
@@ -56,7 +58,7 @@ class MergeRequest < ApplicationRecord
def merge_request_diff
fallback = latest_merge_request_diff unless association(:merge_request_diff).loaded?
- fallback || super
+ fallback || super || MergeRequestDiff.new(merge_request_id: id)
end
belongs_to :head_pipeline, foreign_key: "head_pipeline_id", class_name: "Ci::Pipeline"
@@ -233,12 +235,17 @@ class MergeRequest < ApplicationRecord
end
scope :join_project, -> { joins(:target_project) }
scope :references_project, -> { references(:target_project) }
+
+ PROJECT_ROUTE_AND_NAMESPACE_ROUTE = [
+ target_project: [:route, { namespace: :route }],
+ source_project: [:route, { namespace: :route }]
+ ].freeze
+
scope :with_api_entity_associations, -> {
preload(:assignees, :author, :unresolved_notes, :labels, :milestone,
:timelogs, :latest_merge_request_diff,
- metrics: [:latest_closed_by, :merged_by],
- target_project: [:route, { namespace: :route }],
- source_project: [:route, { namespace: :route }])
+ *PROJECT_ROUTE_AND_NAMESPACE_ROUTE,
+ metrics: [:latest_closed_by, :merged_by])
}
scope :by_target_branch_wildcard, ->(wildcard_branch_name) do
where("target_branch LIKE ?", ApplicationRecord.sanitize_sql_like(wildcard_branch_name).tr('*', '%'))
@@ -250,7 +257,11 @@ class MergeRequest < ApplicationRecord
with_state(:opened).where(auto_merge_enabled: true)
end
- ignore_column :state, remove_with: '12.7', remove_after: '2019-12-22'
+ scope :including_metrics, -> do
+ includes(:metrics)
+ end
+
+ ignore_column :state, remove_with: '12.10', remove_after: '2020-03-22'
after_save :keep_around_commit, unless: :importing?
@@ -404,7 +415,7 @@ class MergeRequest < ApplicationRecord
end
def commits(limit: nil)
- return merge_request_diff.commits(limit: limit) if persisted?
+ return merge_request_diff.commits(limit: limit) if merge_request_diff.persisted?
commits_arr = if compare_commits
reversed_commits = compare_commits.reverse
@@ -421,7 +432,7 @@ class MergeRequest < ApplicationRecord
end
def commits_count
- if persisted?
+ if merge_request_diff.persisted?
merge_request_diff.commits_count
elsif compare_commits
compare_commits.size
@@ -431,7 +442,7 @@ class MergeRequest < ApplicationRecord
end
def commit_shas(limit: nil)
- return merge_request_diff.commit_shas(limit: limit) if persisted?
+ return merge_request_diff.commit_shas(limit: limit) if merge_request_diff.persisted?
shas =
if compare_commits
@@ -492,11 +503,11 @@ class MergeRequest < ApplicationRecord
end
def first_commit
- merge_request_diff ? merge_request_diff.first_commit : compare_commits.first
+ compare_commits.present? ? compare_commits.first : merge_request_diff.first_commit
end
def raw_diffs(*args)
- merge_request_diff ? merge_request_diff.raw_diffs(*args) : compare.raw_diffs(*args)
+ compare.present? ? compare.raw_diffs(*args) : merge_request_diff.raw_diffs(*args)
end
def diffs(diff_options = {})
@@ -556,8 +567,12 @@ class MergeRequest < ApplicationRecord
diffs.modified_paths
end
+ def new_paths
+ diffs.diff_files.map(&:new_path)
+ end
+
def diff_base_commit
- if persisted?
+ if merge_request_diff.persisted?
merge_request_diff.base_commit
else
branch_merge_base_commit
@@ -565,7 +580,7 @@ class MergeRequest < ApplicationRecord
end
def diff_start_commit
- if persisted?
+ if merge_request_diff.persisted?
merge_request_diff.start_commit
else
target_branch_head
@@ -573,7 +588,7 @@ class MergeRequest < ApplicationRecord
end
def diff_head_commit
- if persisted?
+ if merge_request_diff.persisted?
merge_request_diff.head_commit
else
source_branch_head
@@ -581,7 +596,7 @@ class MergeRequest < ApplicationRecord
end
def diff_start_sha
- if persisted?
+ if merge_request_diff.persisted?
merge_request_diff.start_commit_sha
else
target_branch_head.try(:sha)
@@ -589,7 +604,7 @@ class MergeRequest < ApplicationRecord
end
def diff_base_sha
- if persisted?
+ if merge_request_diff.persisted?
merge_request_diff.base_commit_sha
else
branch_merge_base_commit.try(:sha)
@@ -597,7 +612,7 @@ class MergeRequest < ApplicationRecord
end
def diff_head_sha
- if persisted?
+ if merge_request_diff.persisted?
merge_request_diff.head_commit_sha
else
source_branch_head.try(:sha)
@@ -758,7 +773,7 @@ class MergeRequest < ApplicationRecord
end
def ensure_merge_request_diff
- merge_request_diff || create_merge_request_diff
+ merge_request_diff.persisted? || create_merge_request_diff
end
def create_merge_request_diff
@@ -1005,7 +1020,7 @@ class MergeRequest < ApplicationRecord
def closes_issues(current_user = self.author)
if target_branch == project.default_branch
messages = [title, description]
- messages.concat(commits.map(&:safe_message)) if merge_request_diff
+ messages.concat(commits.map(&:safe_message)) if merge_request_diff.persisted?
Gitlab::ClosingIssueExtractor.new(project, current_user)
.closed_by_message(messages.join("\n"))
@@ -1244,7 +1259,7 @@ class MergeRequest < ApplicationRecord
def all_pipelines
strong_memoize(:all_pipelines) do
- MergeRequest::Pipelines.new(self).all
+ Ci::PipelinesForMergeRequestFinder.new(self).all
end
end
@@ -1284,6 +1299,24 @@ class MergeRequest < ApplicationRecord
compare_reports(Ci::CompareTestReportsService)
end
+ def has_coverage_reports?
+ return false unless Feature.enabled?(:coverage_report_view, project)
+
+ actual_head_pipeline&.has_reports?(Ci::JobArtifact.coverage_reports)
+ end
+
+ # TODO: this method and compare_test_reports use the same
+ # result type, which is handled by the controller's #reports_response.
+ # we should minimize mistakes by isolating the common parts.
+ # issue: https://gitlab.com/gitlab-org/gitlab/issues/34224
+ def find_coverage_reports
+ unless has_coverage_reports?
+ return { status: :error, status_reason: 'This merge request does not have coverage reports' }
+ end
+
+ compare_reports(Ci::GenerateCoverageReportsService)
+ end
+
def has_exposed_artifacts?
return false unless Feature.enabled?(:ci_expose_arbitrary_artifacts_in_mr, default_enabled: true)
@@ -1307,7 +1340,7 @@ class MergeRequest < ApplicationRecord
# issue: https://gitlab.com/gitlab-org/gitlab/issues/34224
def compare_reports(service_class, current_user = nil)
with_reactive_cache(service_class.name, current_user&.id) do |data|
- unless service_class.new(project, current_user)
+ unless service_class.new(project, current_user, id: id)
.latest?(base_pipeline, actual_head_pipeline, data)
raise InvalidateReactiveCache
end
@@ -1324,7 +1357,7 @@ class MergeRequest < ApplicationRecord
raise NameError, service_class unless service_class < Ci::CompareReportsBaseService
current_user = User.find_by(id: current_user_id)
- service_class.new(project, current_user).execute(base_pipeline, actual_head_pipeline)
+ service_class.new(project, current_user, id: id).execute(base_pipeline, actual_head_pipeline)
end
def all_commits
@@ -1421,7 +1454,7 @@ class MergeRequest < ApplicationRecord
end
def has_commits?
- merge_request_diff && commits_count.to_i > 0
+ merge_request_diff.persisted? && commits_count.to_i > 0
end
def has_no_commits?
diff --git a/app/models/merge_request/metrics.rb b/app/models/merge_request/metrics.rb
index 05f8e18a2c1..ba363019c72 100644
--- a/app/models/merge_request/metrics.rb
+++ b/app/models/merge_request/metrics.rb
@@ -6,3 +6,5 @@ class MergeRequest::Metrics < ApplicationRecord
belongs_to :latest_closed_by, class_name: 'User'
belongs_to :merged_by, class_name: 'User'
end
+
+MergeRequest::Metrics.prepend_if_ee('EE::MergeRequest::Metrics')
diff --git a/app/models/merge_request/pipelines.rb b/app/models/merge_request/pipelines.rb
deleted file mode 100644
index 72756e8e9d0..00000000000
--- a/app/models/merge_request/pipelines.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-# A state object to centralize logic related to merge request pipelines
-class MergeRequest::Pipelines
- include Gitlab::Utils::StrongMemoize
-
- EVENT = 'merge_request_event'
-
- def initialize(merge_request)
- @merge_request = merge_request
- end
-
- attr_reader :merge_request
-
- delegate :commit_shas, :source_project, :source_branch, to: :merge_request
-
- def all
- strong_memoize(:all_pipelines) do
- next Ci::Pipeline.none unless source_project
-
- pipelines =
- if merge_request.persisted?
- pipelines_using_cte
- else
- triggered_for_branch.for_sha(commit_shas)
- end
-
- sort(pipelines)
- end
- end
-
- private
-
- def pipelines_using_cte
- cte = Gitlab::SQL::CTE.new(:shas, merge_request.all_commits.select(:sha))
-
- source_pipelines_join = cte.table[:sha].eq(Ci::Pipeline.arel_table[:source_sha])
- source_pipelines = filter_by(triggered_by_merge_request, cte, source_pipelines_join)
- detached_pipelines = filter_by_sha(triggered_by_merge_request, cte)
- pipelines_for_branch = filter_by_sha(triggered_for_branch, cte)
-
- Ci::Pipeline.with(cte.to_arel)
- .from_union([source_pipelines, detached_pipelines, pipelines_for_branch])
- end
-
- def filter_by_sha(pipelines, cte)
- hex = Arel::Nodes::SqlLiteral.new("'hex'")
- string_sha = Arel::Nodes::NamedFunction.new('encode', [cte.table[:sha], hex])
- join_condition = string_sha.eq(Ci::Pipeline.arel_table[:sha])
-
- filter_by(pipelines, cte, join_condition)
- end
-
- def filter_by(pipelines, cte, join_condition)
- shas_table =
- Ci::Pipeline.arel_table
- .join(cte.table, Arel::Nodes::InnerJoin)
- .on(join_condition)
- .join_sources
-
- pipelines.joins(shas_table)
- end
-
- # NOTE: this method returns only parent merge request pipelines.
- # Child merge request pipelines have a different source.
- def triggered_by_merge_request
- source_project.ci_pipelines
- .where(source: :merge_request_event, merge_request: merge_request)
- end
-
- def triggered_for_branch
- source_project.ci_pipelines
- .where(source: branch_pipeline_sources, ref: source_branch, tag: false)
- end
-
- def branch_pipeline_sources
- strong_memoize(:branch_pipeline_sources) do
- Ci::Pipeline.sources.reject { |source| source == EVENT }.values
- end
- end
-
- def sort(pipelines)
- sql = 'CASE ci_pipelines.source WHEN (?) THEN 0 ELSE 1 END, ci_pipelines.id DESC'
- query = ApplicationRecord.send(:sanitize_sql_array, [sql, Ci::Pipeline.sources[:merge_request_event]]) # rubocop:disable GitlabSecurity/PublicSend
-
- pipelines.order(Arel.sql(query))
- end
-end
diff --git a/app/models/merge_request_diff.rb b/app/models/merge_request_diff.rb
index ffe95e8f034..fe769573e29 100644
--- a/app/models/merge_request_diff.rb
+++ b/app/models/merge_request_diff.rb
@@ -7,6 +7,7 @@ class MergeRequestDiff < ApplicationRecord
include EachBatch
include Gitlab::Utils::StrongMemoize
include ObjectStorage::BackgroundMove
+ include BulkInsertableAssociations
# Don't display more than 100 commits at once
COMMITS_SAFE_SIZE = 100
diff --git a/app/models/merge_request_diff_commit.rb b/app/models/merge_request_diff_commit.rb
index 460b394f067..2819ea7ce1e 100644
--- a/app/models/merge_request_diff_commit.rb
+++ b/app/models/merge_request_diff_commit.rb
@@ -10,6 +10,8 @@ class MergeRequestDiffCommit < ApplicationRecord
sha_attribute :sha
alias_attribute :id, :sha
+ # Deprecated; use `bulk_insert!` from `BulkInsertSafe` mixin instead.
+ # cf. https://gitlab.com/gitlab-org/gitlab/issues/207989 for progress
def self.create_bulk(merge_request_diff_id, commits)
rows = commits.map.with_index do |commit, index|
# See #parent_ids.
diff --git a/app/models/milestone.rb b/app/models/milestone.rb
index 29c621c54d0..4ccfe314526 100644
--- a/app/models/milestone.rb
+++ b/app/models/milestone.rb
@@ -3,7 +3,13 @@
class Milestone < ApplicationRecord
# Represents a "No Milestone" state used for filtering Issues and Merge
# Requests that have no milestone assigned.
- MilestoneStruct = Struct.new(:title, :name, :id)
+ MilestoneStruct = Struct.new(:title, :name, :id) do
+ # Ensure these models match the interface required for exporting
+ def serializable_hash(_opts = {})
+ { title: title, name: name, id: id }
+ end
+ end
+
None = MilestoneStruct.new('No Milestone', 'No Milestone', 0)
Any = MilestoneStruct.new('Any Milestone', '', -1)
Upcoming = MilestoneStruct.new('Upcoming', '#upcoming', -2)
@@ -128,11 +134,12 @@ class Milestone < ApplicationRecord
reorder(nil).group(:state).count
end
+ def predefined_id?(id)
+ [Any.id, None.id, Upcoming.id, Started.id].include?(id)
+ end
+
def predefined?(milestone)
- milestone == Any ||
- milestone == None ||
- milestone == Upcoming ||
- milestone == Started
+ predefined_id?(milestone&.id)
end
end
diff --git a/app/models/milestone_note.rb b/app/models/milestone_note.rb
new file mode 100644
index 00000000000..4b027b0782c
--- /dev/null
+++ b/app/models/milestone_note.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+class MilestoneNote < ::Note
+ attr_accessor :resource_parent, :event, :milestone
+
+ def self.from_event(event, resource: nil, resource_parent: nil)
+ resource ||= event.resource
+
+ attrs = {
+ system: true,
+ author: event.user,
+ created_at: event.created_at,
+ noteable: resource,
+ milestone: event.milestone,
+ discussion_id: event.discussion_id,
+ event: event,
+ system_note_metadata: ::SystemNoteMetadata.new(action: 'milestone'),
+ resource_parent: resource_parent
+ }
+
+ if resource_parent.is_a?(Project)
+ attrs[:project_id] = resource_parent.id
+ end
+
+ MilestoneNote.new(attrs)
+ end
+
+ def note
+ @note ||= note_text
+ end
+
+ def note_html
+ @note_html ||= Banzai::Renderer.cacheless_render_field(self, :note, { group: group, project: project })
+ end
+
+ def project
+ resource_parent if resource_parent.is_a?(Project)
+ end
+
+ def group
+ resource_parent if resource_parent.is_a?(Group)
+ end
+
+ private
+
+ def note_text(html: false)
+ format = milestone&.group_milestone? ? :name : :iid
+ milestone.nil? ? 'removed milestone' : "changed milestone to #{milestone.to_reference(project, format: format)}"
+ end
+end
diff --git a/app/models/milestone_release.rb b/app/models/milestone_release.rb
index 713c8ef7b94..0a6165c8254 100644
--- a/app/models/milestone_release.rb
+++ b/app/models/milestone_release.rb
@@ -11,6 +11,6 @@ class MilestoneRelease < ApplicationRecord
def same_project_between_milestone_and_release
return if milestone&.project_id == release&.project_id
- errors.add(:base, 'does not have the same project as the milestone')
+ errors.add(:base, _('Release does not have the same project as the milestone'))
end
end
diff --git a/app/models/namespace.rb b/app/models/namespace.rb
index efe14a3e614..aae45144f1a 100644
--- a/app/models/namespace.rb
+++ b/app/models/namespace.rb
@@ -68,6 +68,7 @@ class Namespace < ApplicationRecord
after_destroy :rm_dir
scope :for_user, -> { where('type IS NULL') }
+ scope :sort_by_type, -> { order(Gitlab::Database.nulls_first_order(:type)) }
scope :with_statistics, -> do
joins('LEFT JOIN project_statistics ps ON ps.namespace_id = namespaces.id')
@@ -129,7 +130,7 @@ class Namespace < ApplicationRecord
return unless host.ends_with?(gitlab_host)
name = host.delete_suffix(gitlab_host)
- Namespace.find_by_full_path(name)
+ Namespace.where(parent_id: nil).find_by_path(name)
end
# overridden in ee
@@ -138,6 +139,10 @@ class Namespace < ApplicationRecord
end
end
+ def default_branch_protection
+ super || Gitlab::CurrentSettings.default_branch_protection
+ end
+
def visibility_level_field
:visibility_level
end
@@ -326,7 +331,10 @@ class Namespace < ApplicationRecord
end
def pages_virtual_domain
- Pages::VirtualDomain.new(all_projects_with_pages, trim_prefix: full_path)
+ Pages::VirtualDomain.new(
+ all_projects_with_pages.includes(:route, :project_feature),
+ trim_prefix: full_path
+ )
end
def closest_setting(name)
@@ -368,7 +376,7 @@ class Namespace < ApplicationRecord
def nesting_level_allowed
if ancestors.count > Group::NUMBER_OF_ANCESTORS_ALLOWED
- errors.add(:parent_id, "has too deep level of nesting")
+ errors.add(:parent_id, 'has too deep level of nesting')
end
end
diff --git a/app/models/note.rb b/app/models/note.rb
index 97e84bb79f6..251a75e6025 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -223,7 +223,7 @@ class Note < ApplicationRecord
end
# rubocop: disable CodeReuse/ServiceClass
- def cross_reference?
+ def system_note_with_references?
return unless system?
if force_cross_reference_regex_check?
@@ -290,6 +290,19 @@ class Note < ApplicationRecord
@commit ||= project.commit(commit_id) if commit_id.present?
end
+ # Notes on merge requests and commits can be traced back to one or several
+ # MRs. This method returns a relation if the note is for one of these types,
+ # or nil if it is a note on some other object.
+ def merge_requests
+ if for_commit?
+ project.merge_requests.by_commit_sha(commit_id)
+ elsif for_merge_request?
+ MergeRequest.id_in(noteable_id)
+ else
+ nil
+ end
+ end
+
# override to return commits, which are not active record
def noteable
return commit if for_commit?
@@ -307,6 +320,13 @@ class Note < ApplicationRecord
super(noteable_type.to_s.classify.constantize.base_class.to_s)
end
+ def noteable_assignee_or_author?(user)
+ return false unless user
+ return noteable.assignee_or_author?(user) if [MergeRequest, Issue].include?(noteable.class)
+
+ noteable.author_id == user.id
+ end
+
def special_role=(role)
raise "Role is undefined, #{role} not found in #{SpecialRole.values}" unless SpecialRole.value?(role)
@@ -324,7 +344,7 @@ class Note < ApplicationRecord
end
def confidential?
- noteable.try(:confidential?)
+ confidential || noteable.try(:confidential?)
end
def editable?
@@ -339,12 +359,10 @@ class Note < ApplicationRecord
super
end
- def cross_reference_not_visible_for?(user)
- cross_reference? && !all_referenced_mentionables_allowed?(user)
- end
-
- def visible_for?(user)
- !cross_reference_not_visible_for?(user) && system_note_viewable_by?(user)
+ # This method is to be used for checking read permissions on a note instead of `system_note_with_references_visible_for?`
+ def readable_by?(user)
+ # note_policy accounts for #system_note_with_references_visible_for?(user) check when granting read access
+ Ability.allowed?(user, :read_note, self)
end
def award_emoji?
@@ -504,6 +522,10 @@ class Note < ApplicationRecord
noteable.user_mentions.where(note: self)
end
+ def system_note_with_references_visible_for?(user)
+ (!system_note_with_references? || all_referenced_mentionables_allowed?(user)) && system_note_viewable_by?(user)
+ end
+
private
# Using this method followed by a call to `save` may result in ActiveRecord::RecordNotUnique exception
diff --git a/app/models/notification_recipient.rb b/app/models/notification_recipient.rb
index 8e44e3d8e17..107d00d055a 100644
--- a/app/models/notification_recipient.rb
+++ b/app/models/notification_recipient.rb
@@ -52,7 +52,8 @@ class NotificationRecipient
when :mention
@type == :mention
when :participating
- @custom_action == :failed_pipeline || %i[participating mention].include?(@type)
+ %i[failed_pipeline fixed_pipeline].include?(@custom_action) ||
+ %i[participating mention].include?(@type)
when :custom
custom_enabled? || %i[participating mention].include?(@type)
when :watch
@@ -63,7 +64,13 @@ class NotificationRecipient
end
def custom_enabled?
- @custom_action && notification_setting&.event_enabled?(@custom_action)
+ return false unless @custom_action
+ return false unless notification_setting
+
+ notification_setting.event_enabled?(@custom_action) ||
+ # fixed_pipeline is a subset of success_pipeline event
+ (@custom_action == :fixed_pipeline &&
+ notification_setting.event_enabled?(:success_pipeline))
end
def unsubscribed?
diff --git a/app/models/notification_setting.rb b/app/models/notification_setting.rb
index e2c362538eb..38bd95e6a20 100644
--- a/app/models/notification_setting.rb
+++ b/app/models/notification_setting.rb
@@ -44,6 +44,7 @@ class NotificationSetting < ApplicationRecord
:reassign_merge_request,
:merge_merge_request,
:failed_pipeline,
+ :fixed_pipeline,
:success_pipeline
].freeze
@@ -76,9 +77,9 @@ class NotificationSetting < ApplicationRecord
setting
end
- # Allow people to receive failed pipeline notifications if they already have
- # custom notifications enabled, as these are more like mentions than the other
- # custom settings.
+ # Allow people to receive both failed pipeline/fixed pipeline notifications
+ # if they already have custom notifications enabled,
+ # as these are more like mentions than the other custom settings.
def failed_pipeline
bool = super
@@ -86,6 +87,13 @@ class NotificationSetting < ApplicationRecord
end
alias_method :failed_pipeline?, :failed_pipeline
+ def fixed_pipeline
+ bool = super
+
+ bool.nil? || bool
+ end
+ alias_method :fixed_pipeline?, :fixed_pipeline
+
def event_enabled?(event)
respond_to?(event) && !!public_send(event) # rubocop:disable GitlabSecurity/PublicSend
end
diff --git a/app/models/pages_domain.rb b/app/models/pages_domain.rb
index 05cf427184c..37d45c5934d 100644
--- a/app/models/pages_domain.rb
+++ b/app/models/pages_domain.rb
@@ -11,6 +11,7 @@ class PagesDomain < ApplicationRecord
belongs_to :project
has_many :acme_orders, class_name: "PagesDomainAcmeOrder"
+ has_many :serverless_domain_clusters, class_name: 'Serverless::DomainCluster', inverse_of: :pages_domain
validates :domain, hostname: { allow_numeric_hostname: true }
validates :domain, uniqueness: { case_sensitive: false }
@@ -67,6 +68,10 @@ class PagesDomain < ApplicationRecord
scope :instance_serverless, -> { where(wildcard: true, scope: :instance, usage: :serverless) }
+ def self.find_by_domain_case_insensitive(domain)
+ find_by("LOWER(domain) = LOWER(?)", domain)
+ end
+
def verified?
!!verified_at
end
diff --git a/app/models/project.rb b/app/models/project.rb
index e16bd568153..8578cd0e44a 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -66,7 +66,13 @@ class Project < ApplicationRecord
default_value_for :archived, false
default_value_for :resolve_outdated_diff_discussions, false
default_value_for :container_registry_enabled, gitlab_config_features.container_registry
- default_value_for(:repository_storage) { Gitlab::CurrentSettings.pick_repository_storage }
+ default_value_for(:repository_storage) do
+ # We need to ensure application settings are fresh when we pick
+ # a repository storage to use.
+ Gitlab::CurrentSettings.expire_current_application_settings
+ Gitlab::CurrentSettings.pick_repository_storage
+ end
+
default_value_for(:shared_runners_enabled) { Gitlab::CurrentSettings.shared_runners_enabled }
default_value_for :issues_enabled, gitlab_config_features.issues
default_value_for :merge_requests_enabled, gitlab_config_features.merge_requests
@@ -186,6 +192,7 @@ class Project < ApplicationRecord
has_one :import_state, autosave: true, class_name: 'ProjectImportState', inverse_of: :project
has_one :import_export_upload, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :export_jobs, class_name: 'ProjectExportJob'
has_one :project_repository, inverse_of: :project
has_one :incident_management_setting, inverse_of: :project, class_name: 'IncidentManagement::ProjectIncidentManagementSetting'
has_one :error_tracking_setting, inverse_of: :project, class_name: 'ErrorTracking::ProjectErrorTrackingSetting'
@@ -267,6 +274,7 @@ class Project < ApplicationRecord
class_name: 'Ci::Pipeline',
inverse_of: :project
has_many :stages, class_name: 'Ci::Stage', inverse_of: :project
+ has_many :ci_refs, class_name: 'Ci::Ref'
# Ci::Build objects store data on the file system such as artifact files and
# build traces. Currently there's no efficient way of removing this data in
@@ -780,7 +788,7 @@ class Project < ApplicationRecord
end
def repository
- @repository ||= Repository.new(full_path, self, disk_path: disk_path)
+ @repository ||= Repository.new(full_path, self, shard: repository_storage, disk_path: disk_path)
end
def cleanup
@@ -1188,6 +1196,10 @@ class Project < ApplicationRecord
update_column(:has_external_issue_tracker, services.external_issue_trackers.any?) if Gitlab::Database.read_write?
end
+ def external_references_supported?
+ external_issue_tracker&.support_cross_reference?
+ end
+
def has_wiki?
wiki_enabled? || has_external_wiki?
end
@@ -1374,7 +1386,7 @@ class Project < ApplicationRecord
@lfs_storage_project ||= begin
result = self
- # TODO: Make this go to the fork_network root immeadiatly
+ # TODO: Make this go to the fork_network root immediately
# dependant on the discussion in: https://gitlab.com/gitlab-org/gitlab-foss/issues/39769
result = result.fork_source while result&.forked?
@@ -1397,12 +1409,17 @@ class Project < ApplicationRecord
.where(lfs_objects_projects: { project_id: [self, lfs_storage_project] })
end
- # TODO: Call `#lfs_objects` instead once all LfsObjectsProject records are
- # backfilled. At that point, projects can look at their own `lfs_objects`.
+ # TODO: Remove this method once all LfsObjectsProject records are backfilled
+ # for forks. At that point, projects can look at their own `lfs_objects` so
+ # `lfs_objects_oids` can be used instead.
#
# See https://gitlab.com/gitlab-org/gitlab/issues/122002 for more info.
- def lfs_objects_oids
- all_lfs_objects.pluck(:oid)
+ def all_lfs_objects_oids(oids: [])
+ oids(all_lfs_objects, oids: oids)
+ end
+
+ def lfs_objects_oids(oids: [])
+ oids(lfs_objects, oids: oids)
end
def personal?
@@ -1411,8 +1428,8 @@ class Project < ApplicationRecord
# Expires various caches before a project is renamed.
def expire_caches_before_rename(old_path)
- repo = Repository.new(old_path, self)
- wiki = Repository.new("#{old_path}.wiki", self)
+ repo = Repository.new(old_path, self, shard: repository_storage)
+ wiki = Repository.new("#{old_path}.wiki", self, shard: repository_storage, repo_type: Gitlab::GlRepository::WIKI)
if repo.exists?
repo.before_delete
@@ -1449,13 +1466,14 @@ class Project < ApplicationRecord
# Forked import is handled asynchronously
return if forked? && !force
- if gitlab_shell.create_project_repository(self)
- repository.after_create
- true
- else
- errors.add(:base, _('Failed to create repository via gitlab-shell'))
- false
- end
+ repository.create_repository
+ repository.after_create
+
+ true
+ rescue => err
+ Gitlab::ErrorTracking.track_exception(err, project: { id: id, full_path: full_path, disk_path: disk_path })
+ errors.add(:base, _('Failed to create repository'))
+ false
end
def hook_attrs(backward: true)
@@ -1781,8 +1799,10 @@ class Project < ApplicationRecord
# rubocop:enable Gitlab/RailsLogger
def after_import
- repository.after_import
- wiki.repository.after_import
+ repository.expire_content_cache
+ wiki.repository.expire_content_cache
+
+ DetectRepositoryLanguagesWorker.perform_async(id)
# The import assigns iid values on its own, e.g. by re-using GitHub ids.
# Flush existing InternalId records for this project for consistency reasons.
@@ -1842,10 +1862,12 @@ class Project < ApplicationRecord
end
def export_status
- if export_in_progress?
+ if regeneration_in_progress?
+ :regeneration_in_progress
+ elsif export_enqueued?
+ :queued
+ elsif export_in_progress?
:started
- elsif after_export_in_progress?
- :after_export_action
elsif export_file_exists?
:finished
else
@@ -1854,11 +1876,19 @@ class Project < ApplicationRecord
end
def export_in_progress?
- import_export_shared.active_export_count > 0
+ strong_memoize(:export_in_progress) do
+ ::Projects::ExportJobFinder.new(self, { status: :started }).execute.present?
+ end
+ end
+
+ def export_enqueued?
+ strong_memoize(:export_enqueued) do
+ ::Projects::ExportJobFinder.new(self, { status: :queued }).execute.present?
+ end
end
- def after_export_in_progress?
- import_export_shared.after_export_in_progress?
+ def regeneration_in_progress?
+ (export_enqueued? || export_in_progress?) && export_file_exists?
end
def remove_exports
@@ -1962,6 +1992,14 @@ class Project < ApplicationRecord
end
def ci_variables_for(ref:, environment: nil)
+ cache_key = "ci_variables_for:project:#{self&.id}:ref:#{ref}:environment:#{environment}"
+
+ ::Gitlab::SafeRequestStore.fetch(cache_key) do
+ uncached_ci_variables_for(ref: ref, environment: environment)
+ end
+ end
+
+ def uncached_ci_variables_for(ref:, environment: nil)
result = if protected_for?(ref)
variables
else
@@ -2030,6 +2068,16 @@ class Project < ApplicationRecord
end
end
+ def change_repository_storage(new_repository_storage_key)
+ return if repository_read_only?
+ return if repository_storage == new_repository_storage_key
+
+ raise ArgumentError unless ::Gitlab.config.repositories.storages.key?(new_repository_storage_key)
+
+ run_after_commit { ProjectUpdateRepositoryStorageWorker.perform_async(id, new_repository_storage_key) }
+ self.repository_read_only = true
+ end
+
def pushes_since_gc
Gitlab::Redis::SharedState.with { |redis| redis.get(pushes_since_gc_redis_shared_state_key).to_i }
end
@@ -2342,6 +2390,20 @@ class Project < ApplicationRecord
Gitlab::CurrentSettings.self_monitoring_project_id == id
end
+ def deploy_token_create_url(opts = {})
+ Gitlab::Routing.url_helpers.create_deploy_token_project_settings_ci_cd_path(self, opts)
+ end
+
+ def deploy_token_revoke_url_for(token)
+ Gitlab::Routing.url_helpers.revoke_project_deploy_token_path(self, token)
+ end
+
+ def default_branch_protected?
+ branch_protection = Gitlab::Access::BranchProtection.new(self.namespace.default_branch_protection)
+
+ branch_protection.fully_protected? || branch_protection.developer_can_merge?
+ end
+
private
def closest_namespace_setting(name)
@@ -2392,7 +2454,7 @@ class Project < ApplicationRecord
if repository_storage.blank? || repository_with_same_path_already_exists?
errors.add(:base, _('There is already a repository with that name on disk'))
- throw :abort
+ throw :abort # rubocop:disable Cop/BanCatchThrow
end
end
@@ -2482,6 +2544,12 @@ class Project < ApplicationRecord
reset
retry
end
+
+ def oids(objects, oids: [])
+ collection = oids.any? ? objects.where(oid: oids) : objects
+
+ collection.pluck(:oid)
+ end
end
Project.prepend_if_ee('EE::Project')
diff --git a/app/models/project_ci_cd_setting.rb b/app/models/project_ci_cd_setting.rb
index b26a3025b61..39e177e8bd8 100644
--- a/app/models/project_ci_cd_setting.rb
+++ b/app/models/project_ci_cd_setting.rb
@@ -31,7 +31,7 @@ class ProjectCiCdSetting < ApplicationRecord
end
def forward_deployment_enabled?
- super && ::Feature.enabled?(:forward_deployment_enabled, project)
+ super && ::Feature.enabled?(:forward_deployment_enabled, project, default_enabled: true)
end
private
diff --git a/app/models/project_export_job.rb b/app/models/project_export_job.rb
new file mode 100644
index 00000000000..c7fe3d7bc10
--- /dev/null
+++ b/app/models/project_export_job.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+class ProjectExportJob < ApplicationRecord
+ belongs_to :project
+
+ validates :project, :jid, :status, presence: true
+
+ state_machine :status, initial: :queued do
+ event :start do
+ transition [:queued] => :started
+ end
+
+ event :finish do
+ transition [:started] => :finished
+ end
+
+ event :fail_op do
+ transition [:queued, :started] => :failed
+ end
+
+ state :queued, value: 0
+ state :started, value: 1
+ state :finished, value: 2
+ state :failed, value: 3
+ end
+end
diff --git a/app/models/project_import_state.rb b/app/models/project_import_state.rb
index b79e3554926..cdb034e58fa 100644
--- a/app/models/project_import_state.rb
+++ b/app/models/project_import_state.rb
@@ -98,6 +98,10 @@ class ProjectImportState < ApplicationRecord
Gitlab::SidekiqStatus.set(jid, StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
end
+
+ def self.jid_by(project_id:, status:)
+ select(:jid).with_status(status).find_by(project_id: project_id)
+ end
end
ProjectImportState.prepend_if_ee('EE::ProjectImportState')
diff --git a/app/models/project_services/chat_message/base_message.rb b/app/models/project_services/chat_message/base_message.rb
index 5c39a80b32d..bdd77a919e3 100644
--- a/app/models/project_services/chat_message/base_message.rb
+++ b/app/models/project_services/chat_message/base_message.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-require 'slack-notifier'
-
module ChatMessage
class BaseMessage
RELATIVE_LINK_REGEX = /!\[[^\]]*\]\((\/uploads\/[^\)]*)\)/.freeze
@@ -12,7 +10,6 @@ module ChatMessage
attr_reader :user_avatar
attr_reader :project_name
attr_reader :project_url
- attr_reader :commit_message_html
def initialize(params)
@markdown = params[:markdown] || false
@@ -21,7 +18,6 @@ module ChatMessage
@user_full_name = params.dig(:user, :name) || params[:user_full_name]
@user_name = params.dig(:user, :username) || params[:user_name]
@user_avatar = params.dig(:user, :avatar_url) || params[:user_avatar]
- @commit_message_html = params[:commit_message_html] || false
end
def user_combined_name
@@ -61,7 +57,7 @@ module ChatMessage
end
def format(string)
- Slack::Notifier::LinkFormatter.format(format_relative_links(string))
+ Slack::Messenger::Util::LinkFormatter.format(format_relative_links(string))
end
def format_relative_links(string)
diff --git a/app/models/project_services/chat_message/pipeline_message.rb b/app/models/project_services/chat_message/pipeline_message.rb
index 46fe894cfc3..52a26f6211a 100644
--- a/app/models/project_services/chat_message/pipeline_message.rb
+++ b/app/models/project_services/chat_message/pipeline_message.rb
@@ -1,5 +1,4 @@
# frozen_string_literal: true
-require 'slack-notifier'
module ChatMessage
class PipelineMessage < BaseMessage
@@ -98,7 +97,7 @@ module ChatMessage
def failed_stages_field
{
title: s_("ChatMessage|Failed stage").pluralize(failed_stages.length),
- value: Slack::Notifier::LinkFormatter.format(failed_stages_links),
+ value: Slack::Messenger::Util::LinkFormatter.format(failed_stages_links),
short: true
}
end
@@ -106,7 +105,7 @@ module ChatMessage
def failed_jobs_field
{
title: s_("ChatMessage|Failed job").pluralize(failed_jobs.length),
- value: Slack::Notifier::LinkFormatter.format(failed_jobs_links),
+ value: Slack::Messenger::Util::LinkFormatter.format(failed_jobs_links),
short: true
}
end
@@ -123,12 +122,12 @@ module ChatMessage
fields = [
{
title: ref_type == "tag" ? s_("ChatMessage|Tag") : s_("ChatMessage|Branch"),
- value: Slack::Notifier::LinkFormatter.format(ref_link),
+ value: Slack::Messenger::Util::LinkFormatter.format(ref_link),
short: true
},
{
title: s_("ChatMessage|Commit"),
- value: Slack::Notifier::LinkFormatter.format(commit_link),
+ value: Slack::Messenger::Util::LinkFormatter.format(commit_link),
short: true
}
]
diff --git a/app/models/project_services/chat_message/push_message.rb b/app/models/project_services/chat_message/push_message.rb
index 07622f570c2..c8e70a69c88 100644
--- a/app/models/project_services/chat_message/push_message.rb
+++ b/app/models/project_services/chat_message/push_message.rb
@@ -48,12 +48,11 @@ module ChatMessage
end
def format(string)
- Slack::Notifier::LinkFormatter.format(string)
+ Slack::Messenger::Util::LinkFormatter.format(string)
end
def commit_messages
- linebreak_chars = commit_message_html ? "<br/>\n<br/>\n" : "\n\n"
- commits.map { |commit| compose_commit_message(commit) }.join(linebreak_chars)
+ commits.map { |commit| compose_commit_message(commit) }.join("\n\n")
end
def commit_message_attachments
@@ -63,15 +62,11 @@ module ChatMessage
def compose_commit_message(commit)
author = commit[:author][:name]
id = Commit.truncate_sha(commit[:id])
- message = commit[:message]
-
- if commit_message_html
- message = message.gsub(Gitlab::Regex.breakline_regex, "<br/>\n")
- end
+ title = commit[:title]
url = commit[:url]
- "[#{id}](#{url}): #{message} - #{author}"
+ "[#{id}](#{url}): #{title} - #{author}"
end
def new_branch?
diff --git a/app/models/project_services/chat_notification_service.rb b/app/models/project_services/chat_notification_service.rb
index 46c8260ab48..7bd011101dd 100644
--- a/app/models/project_services/chat_notification_service.rb
+++ b/app/models/project_services/chat_notification_service.rb
@@ -84,10 +84,10 @@ class ChatNotificationService < Service
event_type = data[:event_type] || object_kind
- channel_name = get_channel_field(event_type).presence || channel
+ channel_names = get_channel_field(event_type).presence || channel
opts = {}
- opts[:channel] = channel_name if channel_name
+ opts[:channel] = channel_names.split(',').map(&:strip) if channel_names
opts[:username] = username if username
return false unless notify(message, opts)
diff --git a/app/models/project_services/issue_tracker_service.rb b/app/models/project_services/issue_tracker_service.rb
index 9e1393196ff..3f7e8a720aa 100644
--- a/app/models/project_services/issue_tracker_service.rb
+++ b/app/models/project_services/issue_tracker_service.rb
@@ -19,9 +19,9 @@ class IssueTrackerService < Service
# overridden patterns. See ReferenceRegexes.external_pattern
def self.reference_pattern(only_long: false)
if only_long
- /(\b[A-Z][A-Z0-9_]*-)(?<issue>\d+)/
+ /(\b[A-Z][A-Z0-9_]*-)#{Gitlab::Regex.issue}/
else
- /(\b[A-Z][A-Z0-9_]*-|#{Issue.reference_prefix})(?<issue>\d+)/
+ /(\b[A-Z][A-Z0-9_]*-|#{Issue.reference_prefix})#{Gitlab::Regex.issue}/
end
end
@@ -151,6 +151,14 @@ class IssueTrackerService < Service
result
end
+ def support_close_issue?
+ false
+ end
+
+ def support_cross_reference?
+ false
+ end
+
private
def enabled_in_gitlab_config
@@ -168,7 +176,7 @@ class IssueTrackerService < Service
return if project.blank?
if project.services.external_issue_trackers.where.not(id: id).any?
- errors.add(:base, 'Another issue tracker is already in use. Only one issue tracker service can be active at a time')
+ errors.add(:base, _('Another issue tracker is already in use. Only one issue tracker service can be active at a time'))
end
end
end
diff --git a/app/models/project_services/jira_service.rb b/app/models/project_services/jira_service.rb
index 9875e0b9b88..2766ba11c97 100644
--- a/app/models/project_services/jira_service.rb
+++ b/app/models/project_services/jira_service.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
class JiraService < IssueTrackerService
+ extend ::Gitlab::Utils::Override
include Gitlab::Routing
include ApplicationHelper
include ActionView::Helpers::AssetUrlHelper
@@ -205,6 +206,16 @@ class JiraService < IssueTrackerService
nil
end
+ override :support_close_issue?
+ def support_close_issue?
+ true
+ end
+
+ override :support_cross_reference?
+ def support_cross_reference?
+ true
+ end
+
private
def test_settings
@@ -229,7 +240,15 @@ class JiraService < IssueTrackerService
jira_issue_transition_id.scan(Gitlab::Regex.jira_transition_id_regex).each do |transition_id|
issue.transitions.build.save!(transition: { id: transition_id })
rescue => error
- log_error("Issue transition failed", error: error.message, client_url: client_url)
+ log_error(
+ "Issue transition failed",
+ error: {
+ exception_class: error.class.name,
+ exception_message: error.message,
+ exception_backtrace: Gitlab::BacktraceCleaner.clean_backtrace(error.backtrace)
+ },
+ client_url: client_url
+ )
return false
end
end
@@ -272,21 +291,17 @@ class JiraService < IssueTrackerService
return unless client_url.present?
jira_request do
- create_issue_link(issue, remote_link_props)
- create_issue_comment(issue, message)
+ remote_link = find_remote_link(issue, remote_link_props[:object][:url])
+
+ create_issue_comment(issue, message) unless remote_link
+ remote_link ||= issue.remotelink.build
+ remote_link.save!(remote_link_props)
log_info("Successfully posted", client_url: client_url)
"SUCCESS: Successfully posted to #{client_url}."
end
end
- def create_issue_link(issue, remote_link_props)
- remote_link = find_remote_link(issue, remote_link_props[:object][:url])
- remote_link ||= issue.remotelink.build
-
- remote_link.save!(remote_link_props)
- end
-
def create_issue_comment(issue, message)
return unless comment_on_event_enabled
@@ -354,7 +369,7 @@ class JiraService < IssueTrackerService
error: {
exception_class: error.class.name,
exception_message: error.message,
- exception_backtrace: error.backtrace.join("\n")
+ exception_backtrace: Gitlab::BacktraceCleaner.clean_backtrace(error.backtrace)
}
)
nil
diff --git a/app/models/project_services/microsoft_teams_service.rb b/app/models/project_services/microsoft_teams_service.rb
index 5cabce1376b..111d010d672 100644
--- a/app/models/project_services/microsoft_teams_service.rb
+++ b/app/models/project_services/microsoft_teams_service.rb
@@ -58,6 +58,6 @@ class MicrosoftTeamsService < ChatNotificationService
end
def custom_data(data)
- super(data).merge(markdown: true, commit_message_html: true)
+ super(data).merge(markdown: true)
end
end
diff --git a/app/models/project_services/pipelines_email_service.rb b/app/models/project_services/pipelines_email_service.rb
index 65bf8535d2a..c3ed958242b 100644
--- a/app/models/project_services/pipelines_email_service.rb
+++ b/app/models/project_services/pipelines_email_service.rb
@@ -49,7 +49,7 @@ class PipelinesEmailService < Service
return unless all_recipients.any?
pipeline_id = data[:object_attributes][:id]
- PipelineNotificationWorker.new.perform(pipeline_id, all_recipients)
+ PipelineNotificationWorker.new.perform(pipeline_id, recipients: all_recipients)
end
def can_test?
diff --git a/app/models/project_services/prometheus_service.rb b/app/models/project_services/prometheus_service.rb
index 00b06ae2595..75dfad4f3df 100644
--- a/app/models/project_services/prometheus_service.rb
+++ b/app/models/project_services/prometheus_service.rb
@@ -88,7 +88,9 @@ class PrometheusService < MonitoringService
return false if template?
return false unless project
- project.all_clusters.enabled.any? { |cluster| cluster.application_prometheus_available? }
+ project.all_clusters.enabled.eager_load(:application_prometheus).any? do |cluster|
+ cluster.application_prometheus&.available?
+ end
end
def allow_local_api_url?
diff --git a/app/models/project_services/slack_service.rb b/app/models/project_services/slack_service.rb
index 7290964f442..6d567bb1383 100644
--- a/app/models/project_services/slack_service.rb
+++ b/app/models/project_services/slack_service.rb
@@ -13,18 +13,8 @@ class SlackService < ChatNotificationService
'slack'
end
- def help
- 'This service sends notifications about projects events to Slack channels.<br />
- To set up this service:
- <ol>
- <li><a href="https://slack.com/apps/A0F7XDUAZ-incoming-webhooks">Add an incoming webhook</a> in your Slack team. The default channel can be overridden for each event.</li>
- <li>Paste the <strong>Webhook URL</strong> into the field below.</li>
- <li>Select events below to enable notifications. The <strong>Channel name</strong> and <strong>Username</strong> fields are optional.</li>
- </ol>'
- end
-
def default_channel_placeholder
- "Channel name (e.g. general)"
+ _('Slack channels (e.g. general, development)')
end
def webhook_placeholder
@@ -35,8 +25,8 @@ class SlackService < ChatNotificationService
private
def notify(message, opts)
- # See https://github.com/stevenosloan/slack-notifier#custom-http-client
- notifier = Slack::Notifier.new(webhook, opts.merge(http_client: HTTPClient))
+ # See https://gitlab.com/gitlab-org/slack-notifier/#custom-http-client
+ notifier = Slack::Messenger.new(webhook, opts.merge(http_client: HTTPClient))
notifier.ping(
message.pretext,
diff --git a/app/models/project_services/slack_slash_commands_service.rb b/app/models/project_services/slack_slash_commands_service.rb
index 6a454070fe2..01ded0495a7 100644
--- a/app/models/project_services/slack_slash_commands_service.rb
+++ b/app/models/project_services/slack_slash_commands_service.rb
@@ -29,6 +29,6 @@ class SlackSlashCommandsService < SlashCommandsService
private
def format(text)
- Slack::Notifier::LinkFormatter.format(text) if text
+ Slack::Messenger::Util::LinkFormatter.format(text) if text
end
end
diff --git a/app/models/project_services/youtrack_service.rb b/app/models/project_services/youtrack_service.rb
index 02d06eeb405..0815e27850d 100644
--- a/app/models/project_services/youtrack_service.rb
+++ b/app/models/project_services/youtrack_service.rb
@@ -8,7 +8,7 @@ class YoutrackService < IssueTrackerService
if only_long
/(?<issue>\b[A-Za-z][A-Za-z0-9_]*-\d+\b)/
else
- /(?<issue>\b[A-Za-z][A-Za-z0-9_]*-\d+\b)|(#{Issue.reference_prefix}(?<issue>\d+))/
+ /(?<issue>\b[A-Za-z][A-Za-z0-9_]*-\d+\b)|(#{Issue.reference_prefix}#{Gitlab::Regex.issue})/
end
end
diff --git a/app/models/project_wiki.rb b/app/models/project_wiki.rb
index 1abde5196de..f8528a41634 100644
--- a/app/models/project_wiki.rb
+++ b/app/models/project_wiki.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
class ProjectWiki
- include Gitlab::ShellAdapter
include Storage::LegacyProjectWiki
+ include Gitlab::Utils::StrongMemoize
MARKUPS = {
'Markdown' => :markdown,
@@ -47,7 +47,7 @@ class ProjectWiki
end
def url_to_repo
- gitlab_shell.url_to_repo(full_path)
+ Gitlab::Shell.url_to_repo(full_path)
end
def ssh_url_to_repo
@@ -64,14 +64,15 @@ class ProjectWiki
# Returns the Gitlab::Git::Wiki object.
def wiki
- @wiki ||= begin
- gl_repository = Gitlab::GlRepository::WIKI.identifier_for_container(project)
- raw_repository = Gitlab::Git::Repository.new(project.repository_storage, disk_path + '.git', gl_repository, full_path)
+ strong_memoize(:wiki) do
+ repository.create_if_not_exists
+ raise CouldNotCreateWikiError unless repository_exists?
- create_repo!(raw_repository) unless raw_repository.exists?
-
- Gitlab::Git::Wiki.new(raw_repository)
+ Gitlab::Git::Wiki.new(repository.raw)
end
+ rescue => err
+ Gitlab::ErrorTracking.track_exception(err, project_wiki: { project_id: project.id, full_path: full_path, disk_path: disk_path })
+ raise CouldNotCreateWikiError
end
def repository_exists?
@@ -107,7 +108,7 @@ class ProjectWiki
direction_desc: direction == DIRECTION_DESC,
load_content: load_content
).map do |page|
- WikiPage.new(self, page, true)
+ WikiPage.new(self, page)
end
end
@@ -122,7 +123,7 @@ class ProjectWiki
page_title, page_dir = page_title_and_dir(title)
if page = wiki.page(title: page_title, version: version, dir: page_dir)
- WikiPage.new(self, page, true)
+ WikiPage.new(self, page)
end
end
@@ -170,7 +171,7 @@ class ProjectWiki
end
def repository
- @repository ||= Repository.new(full_path, @project, disk_path: disk_path, repo_type: Gitlab::GlRepository::WIKI)
+ @repository ||= Repository.new(full_path, @project, shard: repository_storage, disk_path: disk_path, repo_type: Gitlab::GlRepository::WIKI)
end
def default_branch
@@ -193,14 +194,6 @@ class ProjectWiki
private
- def create_repo!(raw_repository)
- gitlab_shell.create_wiki_repository(project)
-
- raise CouldNotCreateWikiError unless raw_repository.exists?
-
- repository.after_create
- end
-
def commit_details(action, message = nil, title = nil)
commit_message = message.presence || default_message(action, title)
git_user = Gitlab::Git::User.from_gitlab(@user)
diff --git a/app/models/prometheus_alert.rb b/app/models/prometheus_alert.rb
index 1014231102f..1dc7dc73e31 100644
--- a/app/models/prometheus_alert.rb
+++ b/app/models/prometheus_alert.rb
@@ -69,13 +69,13 @@ class PrometheusAlert < ApplicationRecord
def require_valid_environment_project!
return if project == environment&.project
- errors.add(:environment, "invalid project")
+ errors.add(:environment, 'invalid project')
end
def require_valid_metric_project!
return if prometheus_metric&.common?
return if project == prometheus_metric&.project
- errors.add(:prometheus_metric, "invalid project")
+ errors.add(:prometheus_metric, 'invalid project')
end
end
diff --git a/app/models/protected_branch.rb b/app/models/protected_branch.rb
index 94c3b83564f..594c822c18f 100644
--- a/app/models/protected_branch.rb
+++ b/app/models/protected_branch.rb
@@ -11,7 +11,8 @@ class ProtectedBranch < ApplicationRecord
def self.protected_ref_accessible_to?(ref, user, project:, action:, protected_refs: nil)
# Maintainers, owners and admins are allowed to create the default branch
- if default_branch_protected? && project.empty_repo?
+
+ if project.empty_repo? && project.default_branch_protected?
return true if user.admin? || project.team.max_member_access(user.id) > Gitlab::Access::DEVELOPER
end
@@ -20,7 +21,7 @@ class ProtectedBranch < ApplicationRecord
# Check if branch name is marked as protected in the system
def self.protected?(project, ref_name)
- return true if project.empty_repo? && default_branch_protected?
+ return true if project.empty_repo? && project.default_branch_protected?
self.matching(ref_name, protected_refs: protected_refs(project)).present?
end
@@ -33,11 +34,6 @@ class ProtectedBranch < ApplicationRecord
end
end
- def self.default_branch_protected?
- Gitlab::CurrentSettings.default_branch_protection == Gitlab::Access::PROTECTION_FULL ||
- Gitlab::CurrentSettings.default_branch_protection == Gitlab::Access::PROTECTION_DEV_CAN_MERGE
- end
-
def self.protected_refs(project)
project.protected_branches
end
diff --git a/app/models/release.rb b/app/models/release.rb
index 2543717895f..45c2a56d764 100644
--- a/app/models/release.rb
+++ b/app/models/release.rb
@@ -24,7 +24,7 @@ class Release < ApplicationRecord
accepts_nested_attributes_for :links, allow_destroy: true
- validates :description, :project, :tag, presence: true
+ validates :project, :tag, presence: true
validates_associated :milestone_releases, message: -> (_, obj) { obj[:value].map(&:errors).map(&:full_messages).join(",") }
scope :sorted, -> { order(released_at: :desc) }
diff --git a/app/models/releases/link.rb b/app/models/releases/link.rb
index 58c2b98e524..65be2a22958 100644
--- a/app/models/releases/link.rb
+++ b/app/models/releases/link.rb
@@ -6,8 +6,11 @@ module Releases
belongs_to :release
+ FILEPATH_REGEX = /\A\/([\-\.\w]+\/?)*[\da-zA-Z]+\z/.freeze
+
validates :url, presence: true, addressable_url: { schemes: %w(http https ftp) }, uniqueness: { scope: :release }
validates :name, presence: true, uniqueness: { scope: :release }
+ validates :filepath, uniqueness: { scope: :release }, format: { with: FILEPATH_REGEX }, allow_blank: true, length: { maximum: 128 }
scope :sorted, -> { order(created_at: :desc) }
diff --git a/app/models/repository.rb b/app/models/repository.rb
index cddffa9bb1d..a9ef0504a3d 100644
--- a/app/models/repository.rb
+++ b/app/models/repository.rb
@@ -22,7 +22,7 @@ class Repository
include Gitlab::RepositoryCacheAdapter
- attr_accessor :full_path, :disk_path, :container, :repo_type
+ attr_accessor :full_path, :shard, :disk_path, :container, :repo_type
delegate :ref_name_for_sha, to: :raw_repository
delegate :bundle_to_disk, to: :raw_repository
@@ -65,8 +65,9 @@ class Repository
xcode_config: :xcode_project?
}.freeze
- def initialize(full_path, container, disk_path: nil, repo_type: Gitlab::GlRepository::PROJECT)
+ def initialize(full_path, container, shard:, disk_path: nil, repo_type: Gitlab::GlRepository::PROJECT)
@full_path = full_path
+ @shard = shard
@disk_path = disk_path || full_path
@container = container
@commit_cache = {}
@@ -95,7 +96,7 @@ class Repository
def path_to_repo
@path_to_repo ||=
begin
- storage = Gitlab.config.repositories.storages[container.repository_storage]
+ storage = Gitlab.config.repositories.storages[shard]
File.expand_path(
File.join(storage.legacy_disk_path, disk_path + '.git')
@@ -139,6 +140,7 @@ class Repository
repo: raw_repository,
ref: ref,
path: opts[:path],
+ author: opts[:author],
follow: Array(opts[:path]).length == 1,
limit: opts[:limit],
offset: opts[:offset],
@@ -435,15 +437,6 @@ class Repository
expire_all_method_caches
end
- # Runs code after a repository has been forked/imported.
- def after_import
- expire_content_cache
-
- return unless repo_type.project?
-
- DetectRepositoryLanguagesWorker.perform_async(project.id)
- end
-
# Runs code after a new commit has been pushed.
def after_push_commit(branch_name)
expire_statistics_caches
@@ -909,10 +902,8 @@ class Repository
def merged_branch_names(branch_names = [])
# Currently we should skip caching if requesting all branch names
# This is only used in a few places, notably app/services/branches/delete_merged_service.rb,
- # and it could potentially result in a very large cache/performance issues with the current
- # implementation.
- skip_cache = branch_names.empty? || Feature.disabled?(:merged_branch_names_redis_caching, default_enabled: true)
- return raw_repository.merged_branch_names(branch_names) if skip_cache
+ # and it could potentially result in a very large cache.
+ return raw_repository.merged_branch_names(branch_names) if branch_names.empty?
cache = redis_hash_cache
@@ -1070,8 +1061,7 @@ class Repository
end
def squash(user, merge_request, message)
- raw.squash(user, merge_request.id, branch: merge_request.target_branch,
- start_sha: merge_request.diff_start_sha,
+ raw.squash(user, merge_request.id, start_sha: merge_request.diff_start_sha,
end_sha: merge_request.diff_head_sha,
author: merge_request.author,
message: message)
@@ -1180,7 +1170,7 @@ class Repository
end
def initialize_raw_repository
- Gitlab::Git::Repository.new(container.repository_storage,
+ Gitlab::Git::Repository.new(shard,
disk_path + '.git',
repo_type.identifier_for_container(container),
container.full_path)
diff --git a/app/models/resource_event.rb b/app/models/resource_event.rb
new file mode 100644
index 00000000000..86e11c2d568
--- /dev/null
+++ b/app/models/resource_event.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+class ResourceEvent < ApplicationRecord
+ include Gitlab::Utils::StrongMemoize
+ include Importable
+
+ self.abstract_class = true
+
+ validates :user, presence: { unless: :importing? }, on: :create
+
+ belongs_to :user
+
+ scope :created_after, ->(time) { where('created_at > ?', time) }
+
+ def discussion_id
+ strong_memoize(:discussion_id) do
+ Digest::SHA1.hexdigest(discussion_id_key.join("-"))
+ end
+ end
+
+ private
+
+ def discussion_id_key
+ [self.class.name, id, user_id]
+ end
+
+ def exactly_one_issuable
+ issuable_count = self.class.issuable_attrs.count { |attr| self["#{attr}_id"] }
+
+ return true if issuable_count == 1
+
+ # if none of issuable IDs is set, check explicitly if nested issuable
+ # object is set, this is used during project import
+ if issuable_count == 0 && importing?
+ issuable_count = self.class.issuable_attrs.count { |attr| self.public_send(attr) } # rubocop:disable GitlabSecurity/PublicSend
+
+ return true if issuable_count == 1
+ end
+
+ errors.add(
+ :base, _("Exactly one of %{attributes} is required") %
+ { attributes: self.class.issuable_attrs.join(', ') }
+ )
+ end
+end
diff --git a/app/models/resource_label_event.rb b/app/models/resource_label_event.rb
index 98fc9e7bae8..8e66310f0c5 100644
--- a/app/models/resource_label_event.rb
+++ b/app/models/resource_label_event.rb
@@ -1,21 +1,18 @@
# frozen_string_literal: true
-class ResourceLabelEvent < ApplicationRecord
- include Importable
- include Gitlab::Utils::StrongMemoize
+class ResourceLabelEvent < ResourceEvent
include CacheMarkdownField
cache_markdown_field :reference
- belongs_to :user
belongs_to :issue
belongs_to :merge_request
belongs_to :label
- scope :created_after, ->(time) { where('created_at > ?', time) }
scope :inc_relations, -> { includes(:label, :user) }
+ scope :by_issue, ->(issue) { where(issue_id: issue.id) }
+ scope :by_merge_request, ->(merge_request) { where(merge_request_id: merge_request.id) }
- validates :user, presence: { unless: :importing? }, on: :create
validates :label, presence: { unless: :importing? }, on: :create
validate :exactly_one_issuable
@@ -44,12 +41,6 @@ class ResourceLabelEvent < ApplicationRecord
issue || merge_request
end
- def discussion_id(resource = nil)
- strong_memoize(:discussion_id) do
- Digest::SHA1.hexdigest(discussion_id_key.join("-"))
- end
- end
-
def project
issuable.project
end
@@ -94,22 +85,6 @@ class ResourceLabelEvent < ApplicationRecord
end
end
- def exactly_one_issuable
- issuable_count = self.class.issuable_attrs.count { |attr| self["#{attr}_id"] }
-
- return true if issuable_count == 1
-
- # if none of issuable IDs is set, check explicitly if nested issuable
- # object is set, this is used during project import
- if issuable_count == 0 && importing?
- issuable_count = self.class.issuable_attrs.count { |attr| self.public_send(attr) } # rubocop:disable GitlabSecurity/PublicSend
-
- return true if issuable_count == 1
- end
-
- errors.add(:base, "Exactly one of #{self.class.issuable_attrs.join(', ')} is required")
- end
-
def expire_etag_cache
issuable.expire_note_etag_cache
end
diff --git a/app/models/resource_milestone_event.rb b/app/models/resource_milestone_event.rb
new file mode 100644
index 00000000000..d362ebc307a
--- /dev/null
+++ b/app/models/resource_milestone_event.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+class ResourceMilestoneEvent < ResourceEvent
+ belongs_to :issue
+ belongs_to :merge_request
+ belongs_to :milestone
+
+ scope :by_issue, ->(issue) { where(issue_id: issue.id) }
+ scope :by_merge_request, ->(merge_request) { where(merge_request_id: merge_request.id) }
+
+ validate :exactly_one_issuable
+
+ enum action: {
+ add: 1,
+ remove: 2
+ }
+
+ # state is used for issue and merge request states.
+ enum state: Issue.available_states.merge(MergeRequest.available_states)
+
+ def self.issuable_attrs
+ %i(issue merge_request).freeze
+ end
+end
diff --git a/app/models/resource_weight_event.rb b/app/models/resource_weight_event.rb
index ab288798aed..e0cc0c87a83 100644
--- a/app/models/resource_weight_event.rb
+++ b/app/models/resource_weight_event.rb
@@ -1,26 +1,9 @@
# frozen_string_literal: true
-class ResourceWeightEvent < ApplicationRecord
- include Gitlab::Utils::StrongMemoize
-
- validates :user, presence: true
+class ResourceWeightEvent < ResourceEvent
validates :issue, presence: true
- belongs_to :user
belongs_to :issue
scope :by_issue, ->(issue) { where(issue_id: issue.id) }
- scope :created_after, ->(time) { where('created_at > ?', time) }
-
- def discussion_id(resource = nil)
- strong_memoize(:discussion_id) do
- Digest::SHA1.hexdigest(discussion_id_key.join("-"))
- end
- end
-
- private
-
- def discussion_id_key
- [self.class.name, created_at, user_id]
- end
end
diff --git a/app/models/sent_notification.rb b/app/models/sent_notification.rb
index 0427d5b9ca7..f3a9293376f 100644
--- a/app/models/sent_notification.rb
+++ b/app/models/sent_notification.rb
@@ -111,7 +111,10 @@ class SentNotification < ApplicationRecord
note = create_reply('Test', dryrun: true)
unless note.valid?
- self.errors.add(:base, "Note parameters are invalid: #{note.errors.full_messages.to_sentence}")
+ self.errors.add(
+ :base, _("Note parameters are invalid: %{errors}") %
+ { errors: note.errors.full_messages.to_sentence }
+ )
end
end
diff --git a/app/models/serverless/domain.rb b/app/models/serverless/domain.rb
new file mode 100644
index 00000000000..2fef3b66b08
--- /dev/null
+++ b/app/models/serverless/domain.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module Serverless
+ class Domain
+ include ActiveModel::Model
+
+ REGEXP = %r{^(?<scheme>https?://)?(?<function_name>[^.]+)-(?<cluster_left>\h{2})a1(?<cluster_middle>\h{10})f2(?<cluster_right>\h{2})(?<environment_id>\h+)-(?<environment_slug>[^.]+)\.(?<pages_domain_name>.+)}.freeze
+ UUID_LENGTH = 14
+
+ attr_accessor :function_name, :serverless_domain_cluster, :environment
+
+ validates :function_name, presence: true, allow_blank: false
+ validates :serverless_domain_cluster, presence: true
+ validates :environment, presence: true
+
+ def self.generate_uuid
+ SecureRandom.hex(UUID_LENGTH / 2)
+ end
+
+ def uri
+ URI("https://#{function_name}-#{serverless_domain_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}")
+ end
+
+ def knative_uri
+ URI("http://#{function_name}.#{namespace}.#{serverless_domain_cluster.knative.hostname}")
+ end
+
+ private
+
+ def namespace
+ serverless_domain_cluster.cluster.kubernetes_namespace_for(environment)
+ end
+
+ def serverless_domain_cluster_uuid
+ [
+ serverless_domain_cluster.uuid[0..1],
+ 'a1',
+ serverless_domain_cluster.uuid[2..-3],
+ 'f2',
+ serverless_domain_cluster.uuid[-2..-1]
+ ].join
+ end
+ end
+end
diff --git a/app/models/serverless/domain_cluster.rb b/app/models/serverless/domain_cluster.rb
index 94d90d3e305..9f914d5c3f8 100644
--- a/app/models/serverless/domain_cluster.rb
+++ b/app/models/serverless/domain_cluster.rb
@@ -16,11 +16,18 @@ module Serverless
algorithm: 'aes-256-gcm'
validates :pages_domain, :knative, presence: true
- validates :uuid, presence: true, uniqueness: true, length: { is: Gitlab::Serverless::Domain::UUID_LENGTH },
+ validates :uuid, presence: true, uniqueness: true, length: { is: ::Serverless::Domain::UUID_LENGTH },
format: { with: HEX_REGEXP, message: 'only allows hex characters' }
- default_value_for(:uuid, allows_nil: false) { Gitlab::Serverless::Domain.generate_uuid }
+ default_value_for(:uuid, allows_nil: false) { ::Serverless::Domain.generate_uuid }
delegate :domain, to: :pages_domain
+ delegate :cluster, to: :knative
+
+ def self.for_uuid(uuid)
+ joins(:pages_domain, :knative)
+ .includes(:pages_domain, :knative)
+ .find_by(uuid: uuid)
+ end
end
end
diff --git a/app/models/serverless/lookup_path.rb b/app/models/serverless/lookup_path.rb
new file mode 100644
index 00000000000..c09b3718651
--- /dev/null
+++ b/app/models/serverless/lookup_path.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Serverless
+ class LookupPath
+ attr_reader :serverless_domain
+
+ delegate :serverless_domain_cluster, to: :serverless_domain
+ delegate :knative, to: :serverless_domain_cluster
+ delegate :certificate, to: :serverless_domain_cluster
+ delegate :key, to: :serverless_domain_cluster
+
+ def initialize(serverless_domain)
+ @serverless_domain = serverless_domain
+ end
+
+ def source
+ {
+ type: 'serverless',
+ service: serverless_domain.knative_uri.host,
+ cluster: {
+ hostname: knative.hostname,
+ address: knative.external_ip,
+ port: 443,
+ cert: certificate,
+ key: key
+ }
+ }
+ end
+ end
+end
diff --git a/app/models/serverless/virtual_domain.rb b/app/models/serverless/virtual_domain.rb
new file mode 100644
index 00000000000..d6a23a4c0ce
--- /dev/null
+++ b/app/models/serverless/virtual_domain.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module Serverless
+ class VirtualDomain
+ attr_reader :serverless_domain
+
+ delegate :serverless_domain_cluster, to: :serverless_domain
+ delegate :pages_domain, to: :serverless_domain_cluster
+ delegate :certificate, to: :pages_domain
+ delegate :key, to: :pages_domain
+
+ def initialize(serverless_domain)
+ @serverless_domain = serverless_domain
+ end
+
+ def lookup_paths
+ [
+ ::Serverless::LookupPath.new(serverless_domain)
+ ]
+ end
+ end
+end
diff --git a/app/models/service.rb b/app/models/service.rb
index e60dda59176..d866eeb3531 100644
--- a/app/models/service.rb
+++ b/app/models/service.rb
@@ -32,8 +32,12 @@ class Service < ApplicationRecord
belongs_to :project, inverse_of: :services
has_one :service_hook
- validates :project_id, presence: true, unless: proc { |service| service.template? }
+ validates :project_id, presence: true, unless: -> { template? || instance? }
+ validates :project_id, absence: true, if: -> { template? || instance? }
validates :type, presence: true
+ validates :template, uniqueness: { scope: :type }, if: -> { template? }
+ validates :instance, uniqueness: { scope: :type }, if: -> { instance? }
+ validate :validate_is_instance_or_template
scope :visible, -> { where.not(type: 'GitlabIssueTrackerService') }
scope :issue_trackers, -> { where(category: 'issue_tracker') }
@@ -70,10 +74,6 @@ class Service < ApplicationRecord
true
end
- def template?
- template
- end
-
def category
read_attribute(:category).to_sym
end
@@ -322,10 +322,6 @@ class Service < ApplicationRecord
nil
end
- def self.find_by_template
- find_by(template: true)
- end
-
# override if needed
def supports_data_fields?
false
@@ -333,6 +329,10 @@ class Service < ApplicationRecord
private
+ def validate_is_instance_or_template
+ errors.add(:template, 'The service should be a service template or instance-level integration') if template? && instance?
+ end
+
def cache_project_has_external_issue_tracker
if project && !project.destroyed?
project.cache_has_external_issue_tracker
diff --git a/app/models/snippet.rb b/app/models/snippet.rb
index 4ba8e6a94e6..3000ab7cad0 100644
--- a/app/models/snippet.rb
+++ b/app/models/snippet.rb
@@ -17,8 +17,9 @@ class Snippet < ApplicationRecord
include HasRepository
extend ::Gitlab::Utils::Override
- ignore_column :storage_version, remove_with: '12.9', remove_after: '2020-03-22'
- ignore_column :repository_storage, remove_with: '12.10', remove_after: '2020-04-22'
+ MAX_FILE_COUNT = 1
+
+ ignore_column :repository_storage, remove_with: '12.10', remove_after: '2020-03-22'
cache_markdown_field :title, pipeline: :single_line
cache_markdown_field :description
@@ -161,6 +162,10 @@ class Snippet < ApplicationRecord
@link_reference_pattern ||= super("snippets", /(?<snippet>\d+)/)
end
+ def self.find_by_id_and_project(id:, project:)
+ Snippet.find_by(id: id, project: project)
+ end
+
def initialize(attributes = {})
# We can't use default_value_for because the database has a default
# value of 0 for visibility_level. If someone attempts to create a
@@ -189,16 +194,14 @@ class Snippet < ApplicationRecord
end
end
- def self.content_types
- [
- ".rb", ".py", ".pl", ".scala", ".c", ".cpp", ".java",
- ".haml", ".html", ".sass", ".scss", ".xml", ".php", ".erb",
- ".js", ".sh", ".coffee", ".yml", ".md"
- ]
+ def blob
+ @blob ||= Blob.decorate(SnippetBlob.new(self), self)
end
- def blob
- @blob ||= Blob.decorate(SnippetBlob.new(self), nil)
+ def blobs
+ return [] unless repository_exists?
+
+ repository.ls_files(repository.root_ref).map { |file| Blob.lazy(self, repository.root_ref, file) }
end
def hook_attrs
@@ -209,7 +212,7 @@ class Snippet < ApplicationRecord
super.to_s
end
- def sanitized_file_name
+ def self.sanitized_file_name(file_name)
file_name.gsub(/[^a-zA-Z0-9_\-\.]+/, '')
end
@@ -258,7 +261,7 @@ class Snippet < ApplicationRecord
end
def repository
- @repository ||= Repository.new(full_path, self, disk_path: disk_path, repo_type: Gitlab::GlRepository::SNIPPET)
+ @repository ||= Repository.new(full_path, self, shard: repository_storage, disk_path: disk_path, repo_type: Gitlab::GlRepository::SNIPPET)
end
def storage
@@ -280,17 +283,20 @@ class Snippet < ApplicationRecord
end
end
+ def url_to_repo
+ Gitlab::Shell.url_to_repo(full_path.delete('@'))
+ end
+
def repository_storage
snippet_repository&.shard_name ||
Gitlab::CurrentSettings.pick_repository_storage
end
def create_repository
- return if repository_exists?
+ return if repository_exists? && snippet_repository
repository.create_if_not_exists
-
- track_snippet_repository if repository_exists?
+ track_snippet_repository
end
def track_snippet_repository
@@ -298,8 +304,16 @@ class Snippet < ApplicationRecord
repository.update!(shard_name: repository_storage, disk_path: disk_path)
end
+ def can_cache_field?(field)
+ field != :content || MarkupHelper.gitlab_markdown?(file_name)
+ end
+
+ def hexdigest
+ Digest::SHA256.hexdigest("#{title}#{description}#{created_at}#{updated_at}")
+ end
+
class << self
- # Searches for snippets with a matching title or file name.
+ # Searches for snippets with a matching title, description or file name.
#
# This method uses ILIKE on PostgreSQL and LIKE on MySQL.
#
@@ -307,7 +321,7 @@ class Snippet < ApplicationRecord
#
# Returns an ActiveRecord::Relation.
def search(query)
- fuzzy_search(query, [:title, :file_name])
+ fuzzy_search(query, [:title, :description, :file_name])
end
# Searches for snippets with matching content.
diff --git a/app/models/snippet_repository.rb b/app/models/snippet_repository.rb
index ba2a061a5f4..f879f58b5a3 100644
--- a/app/models/snippet_repository.rb
+++ b/app/models/snippet_repository.rb
@@ -3,11 +3,74 @@
class SnippetRepository < ApplicationRecord
include Shardable
+ DEFAULT_EMPTY_FILE_NAME = 'snippetfile'
+ EMPTY_FILE_PATTERN = /^#{DEFAULT_EMPTY_FILE_NAME}(\d+)\.txt$/.freeze
+
+ CommitError = Class.new(StandardError)
+
belongs_to :snippet, inverse_of: :snippet_repository
+ delegate :repository, to: :snippet
+
class << self
def find_snippet(disk_path)
find_by(disk_path: disk_path)&.snippet
end
end
+
+ def multi_files_action(user, files = [], **options)
+ return if files.nil? || files.empty?
+
+ lease_key = "multi_files_action:#{snippet_id}"
+
+ lease = Gitlab::ExclusiveLease.new(lease_key, timeout: 120)
+ raise CommitError, 'Snippet is already being updated' unless uuid = lease.try_obtain
+
+ options[:actions] = transform_file_entries(files)
+
+ capture_git_error { repository.multi_action(user, **options) }
+ ensure
+ Gitlab::ExclusiveLease.cancel(lease_key, uuid)
+ end
+
+ private
+
+ def capture_git_error(&block)
+ yield block
+ rescue Gitlab::Git::Index::IndexError,
+ Gitlab::Git::CommitError,
+ Gitlab::Git::PreReceiveError,
+ Gitlab::Git::CommandError => e
+ raise CommitError, e.message
+ end
+
+ def transform_file_entries(files)
+ next_index = get_last_empty_file_index + 1
+
+ files.each do |file_entry|
+ file_entry[:action] = infer_action(file_entry) unless file_entry[:action]
+
+ if file_entry[:file_path].blank?
+ file_entry[:file_path] = build_empty_file_name(next_index)
+ next_index += 1
+ end
+ end
+ end
+
+ def infer_action(file_entry)
+ return :create if file_entry[:previous_path].blank?
+
+ file_entry[:previous_path] != file_entry[:file_path] ? :move : :update
+ end
+
+ def get_last_empty_file_index
+ repository.ls_files(nil).inject(0) do |max, file|
+ idx = file[EMPTY_FILE_PATTERN, 1].to_i
+ [idx, max].max
+ end
+ end
+
+ def build_empty_file_name(index)
+ "#{DEFAULT_EMPTY_FILE_NAME}#{index}.txt"
+ end
end
diff --git a/app/models/timelog.rb b/app/models/timelog.rb
index 4ddaf6bcb86..f52dd74d4c9 100644
--- a/app/models/timelog.rb
+++ b/app/models/timelog.rb
@@ -28,9 +28,9 @@ class Timelog < ApplicationRecord
def issuable_id_is_present
if issue_id && merge_request_id
- errors.add(:base, 'Only Issue ID or Merge Request ID is required')
+ errors.add(:base, _('Only Issue ID or Merge Request ID is required'))
elsif issuable.nil?
- errors.add(:base, 'Issue or Merge Request ID is required')
+ errors.add(:base, _('Issue or Merge Request ID is required'))
end
end
diff --git a/app/models/user.rb b/app/models/user.rb
index 3051df822d2..0c7dfac5776 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -60,6 +60,7 @@ class User < ApplicationRecord
MINIMUM_INACTIVE_DAYS = 180
enum bot_type: ::UserBotTypeEnums.bots
+ enum user_type: ::UserTypeEnums.types
# Override Devise::Models::Trackable#update_tracked_fields!
# to limit database writes to at most once every hour
@@ -162,6 +163,8 @@ class User < ApplicationRecord
has_one :status, class_name: 'UserStatus'
has_one :user_preference
+ has_one :user_detail
+ has_one :user_highest_role
#
# Validations
@@ -189,6 +192,7 @@ class User < ApplicationRecord
validate :owns_public_email, if: :public_email_changed?
validate :owns_commit_email, if: :commit_email_changed?
validate :signup_domain_valid?, on: :create, if: ->(user) { !user.created_by_id }
+ validate :check_email_restrictions, on: :create, if: ->(user) { !user.created_by_id }
validates :theme_id, allow_nil: true, inclusion: { in: Gitlab::Themes.valid_ids,
message: _("%{placeholder} is not a valid theme") % { placeholder: '%{value}' } }
@@ -258,8 +262,10 @@ class User < ApplicationRecord
delegate :sourcegraph_enabled, :sourcegraph_enabled=, to: :user_preference
delegate :setup_for_company, :setup_for_company=, to: :user_preference
delegate :render_whitespace_in_code, :render_whitespace_in_code=, to: :user_preference
+ delegate :job_title, :job_title=, to: :user_detail, allow_nil: true
accepts_nested_attributes_for :user_preference, update_only: true
+ accepts_nested_attributes_for :user_detail, update_only: true
state_machine :state, initial: :active do
event :block do
@@ -332,7 +338,7 @@ class User < ApplicationRecord
scope :with_dashboard, -> (dashboard) { where(dashboard: dashboard) }
scope :with_public_profile, -> { where(private_profile: false) }
scope :bots, -> { where.not(bot_type: nil) }
- scope :humans, -> { where(bot_type: nil) }
+ scope :humans, -> { where(user_type: nil, bot_type: nil) }
scope :with_expiring_and_not_notified_personal_access_tokens, ->(at) do
where('EXISTS (?)',
@@ -1186,14 +1192,18 @@ class User < ApplicationRecord
Member.where(invite_email: verified_emails).invite
end
- def all_emails
+ def all_emails(include_private_email: true)
all_emails = []
all_emails << email unless temp_oauth_email?
- all_emails << private_commit_email
+ all_emails << private_commit_email if include_private_email
all_emails.concat(emails.map(&:email))
all_emails
end
+ def all_public_emails
+ all_emails(include_private_email: false)
+ end
+
def verified_emails
verified_emails = []
verified_emails << email if primary_email_verified?
@@ -1553,7 +1563,7 @@ class User < ApplicationRecord
def read_only_attribute?(attribute)
if Feature.enabled?(:ldap_readonly_attributes, default_enabled: true)
- enabled = Gitlab::Auth::LDAP::Config.enabled?
+ enabled = Gitlab::Auth::Ldap::Config.enabled?
read_only = attribute.to_sym.in?(UserSyncedAttributesMetadata::SYNCABLE_ATTRIBUTES)
return true if enabled && read_only
@@ -1614,6 +1624,10 @@ class User < ApplicationRecord
super.presence || build_user_preference
end
+ def user_detail
+ super.presence || build_user_detail
+ end
+
def todos_limited_to(ids)
todos.where(id: ids)
end
@@ -1658,6 +1672,16 @@ class User < ApplicationRecord
callouts.any?
end
+ def gitlab_employee?
+ strong_memoize(:gitlab_employee) do
+ if Gitlab.com?
+ Mail::Address.new(email).domain == "gitlab.com"
+ else
+ false
+ end
+ end
+ end
+
# @deprecated
alias_method :owned_or_masters_groups, :owned_or_maintainers_groups
@@ -1754,6 +1778,18 @@ class User < ApplicationRecord
end
end
+ def check_email_restrictions
+ return unless Feature.enabled?(:email_restrictions)
+ return unless Gitlab::CurrentSettings.email_restrictions_enabled?
+
+ restrictions = Gitlab::CurrentSettings.email_restrictions
+ return if restrictions.blank?
+
+ if Gitlab::UntrustedRegexp.new(restrictions).match?(email)
+ errors.add(:email, _('is not allowed for sign-up'))
+ end
+ end
+
def self.unique_internal(scope, username, email_pattern, &block)
scope.first || create_unique_internal(scope, username, email_pattern, &block)
end
diff --git a/app/models/user_bot_type_enums.rb b/app/models/user_bot_type_enums.rb
index b6b08ce650b..1a9c02a3998 100644
--- a/app/models/user_bot_type_enums.rb
+++ b/app/models/user_bot_type_enums.rb
@@ -2,7 +2,6 @@
module UserBotTypeEnums
def self.bots
- # When adding a new key, please ensure you are not conflicting with EE-only keys in app/models/user_bot_types_enums.rb
{
alert_bot: 2
}
diff --git a/app/models/user_callout_enums.rb b/app/models/user_callout_enums.rb
index ef0b2407e23..625cbb4fe5f 100644
--- a/app/models/user_callout_enums.rb
+++ b/app/models/user_callout_enums.rb
@@ -15,7 +15,8 @@ module UserCalloutEnums
gcp_signup_offer: 2,
cluster_security_warning: 3,
suggest_popover_dismissed: 9,
- tabs_position_highlight: 10
+ tabs_position_highlight: 10,
+ webhooks_moved: 13
}
end
end
diff --git a/app/models/user_detail.rb b/app/models/user_detail.rb
new file mode 100644
index 00000000000..5dc74421705
--- /dev/null
+++ b/app/models/user_detail.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+class UserDetail < ApplicationRecord
+ belongs_to :user
+
+ validates :job_title, length: { maximum: 200 }
+end
diff --git a/app/models/user_highest_role.rb b/app/models/user_highest_role.rb
new file mode 100644
index 00000000000..dc166273787
--- /dev/null
+++ b/app/models/user_highest_role.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+class UserHighestRole < ApplicationRecord
+ belongs_to :user, optional: false
+
+ validates :highest_access_level, allow_nil: true, inclusion: { in: Gitlab::Access.all_values }
+end
diff --git a/app/models/user_type_enums.rb b/app/models/user_type_enums.rb
new file mode 100644
index 00000000000..4e9dd70aee8
--- /dev/null
+++ b/app/models/user_type_enums.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module UserTypeEnums
+ def self.types
+ bots
+ end
+
+ def self.bots
+ {
+ AlertBot: 2
+ }
+ end
+end
+
+UserTypeEnums.prepend_if_ee('EE::UserTypeEnums')
diff --git a/app/models/users_statistics.rb b/app/models/users_statistics.rb
new file mode 100644
index 00000000000..5c5f62d5d87
--- /dev/null
+++ b/app/models/users_statistics.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+class UsersStatistics < ApplicationRecord
+ STATISTICS_NAMES = [
+ :without_groups_and_projects,
+ :with_highest_role_guest,
+ :with_highest_role_reporter,
+ :with_highest_role_developer,
+ :with_highest_role_maintainer,
+ :with_highest_role_owner,
+ :bots,
+ :blocked
+ ].freeze
+end
diff --git a/app/models/wiki_page.rb b/app/models/wiki_page.rb
index 26beb77a025..5436f034657 100644
--- a/app/models/wiki_page.rb
+++ b/app/models/wiki_page.rb
@@ -70,10 +70,9 @@ class WikiPage
Gitlab::HookData::WikiPageBuilder.new(self).build
end
- def initialize(wiki, page = nil, persisted = false)
+ def initialize(wiki, page = nil)
@wiki = wiki
@page = page
- @persisted = persisted
@attributes = {}.with_indifferent_access
set_attributes if persisted?
@@ -94,11 +93,7 @@ class WikiPage
# The formatted title of this page.
def title
- if @attributes[:title]
- CGI.unescape_html(self.class.unhyphenize(@attributes[:title]))
- else
- ""
- end
+ @attributes[:title] || ''
end
# Sets the title of this page.
@@ -176,7 +171,7 @@ class WikiPage
# Returns boolean True or False if this instance
# has been fully created on disk or not.
def persisted?
- @persisted == true
+ @page.present?
end
# Creates a new Wiki Page.
@@ -196,7 +191,7 @@ class WikiPage
def create(attrs = {})
update_attributes(attrs)
- save(page_details: title) do
+ save do
wiki.create_page(title, content, format, attrs[:message])
end
end
@@ -222,18 +217,12 @@ class WikiPage
update_attributes(attrs)
- if title_changed?
- page_details = title
-
- if wiki.find_page(page_details).present?
- @attributes[:title] = @page.url_path
- raise PageRenameError
- end
- else
- page_details = @page.url_path
+ if title.present? && title_changed? && wiki.find_page(title).present?
+ @attributes[:title] = @page.title
+ raise PageRenameError
end
- save(page_details: page_details) do
+ save do
wiki.update_page(
@page,
content: content,
@@ -266,7 +255,14 @@ class WikiPage
end
def title_changed?
- title.present? && (@page.nil? || self.class.unhyphenize(@page.url_path) != title)
+ if persisted?
+ old_title, old_dir = wiki.page_title_and_dir(self.class.unhyphenize(@page.url_path))
+ new_title, new_dir = wiki.page_title_and_dir(self.class.unhyphenize(title))
+
+ new_title != old_title || (title.include?('/') && new_dir != old_dir)
+ else
+ title.present?
+ end
end
# Updates the current @attributes hash by merging a hash of params
@@ -313,31 +309,33 @@ class WikiPage
attributes[:format] = @page.format
end
- def save(page_details:)
- return unless valid?
+ def save
+ return false unless valid?
unless yield
errors.add(:base, wiki.error_message)
return false
end
- page_title, page_dir = wiki.page_title_and_dir(page_details)
- gitlab_git_wiki = wiki.wiki
- @page = gitlab_git_wiki.page(title: page_title, dir: page_dir)
-
+ @page = wiki.find_page(title).page
set_attributes
- @persisted = errors.blank?
+
+ true
end
def validate_path_limits
*dirnames, title = @attributes[:title].split('/')
- if title.bytesize > MAX_TITLE_BYTES
- errors.add(:title, _("exceeds the limit of %{bytes} bytes for page titles") % { bytes: MAX_TITLE_BYTES })
+ if title && title.bytesize > MAX_TITLE_BYTES
+ errors.add(:title, _("exceeds the limit of %{bytes} bytes") % { bytes: MAX_TITLE_BYTES })
end
- if dirnames.any? { |d| d.bytesize > MAX_DIRECTORY_BYTES }
- errors.add(:title, _("exceeds the limit of %{bytes} bytes for directory names") % { bytes: MAX_DIRECTORY_BYTES })
+ invalid_dirnames = dirnames.select { |d| d.bytesize > MAX_DIRECTORY_BYTES }
+ invalid_dirnames.each do |dirname|
+ errors.add(:title, _('exceeds the limit of %{bytes} bytes for directory name "%{dirname}"') % {
+ bytes: MAX_DIRECTORY_BYTES,
+ dirname: dirname
+ })
end
end
end
diff --git a/app/models/x509_certificate.rb b/app/models/x509_certificate.rb
index 43927e65db1..75b711eab5b 100644
--- a/app/models/x509_certificate.rb
+++ b/app/models/x509_certificate.rb
@@ -2,6 +2,7 @@
class X509Certificate < ApplicationRecord
include X509SerialNumberAttribute
+ include AfterCommitQueue
x509_serial_number_attribute :serial_number
@@ -25,8 +26,14 @@ class X509Certificate < ApplicationRecord
validates :x509_issuer_id, presence: true
+ after_commit :mark_commit_signatures_unverified
+
def self.safe_create!(attributes)
create_with(attributes)
.safe_find_or_create_by!(subject_key_identifier: attributes[:subject_key_identifier])
end
+
+ def mark_commit_signatures_unverified
+ X509CertificateRevokeWorker.perform_async(self.id) if revoked?
+ end
end
diff --git a/app/models/zoom_meeting.rb b/app/models/zoom_meeting.rb
index a7ecd1e6a2c..f83aa93b69a 100644
--- a/app/models/zoom_meeting.rb
+++ b/app/models/zoom_meeting.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class ZoomMeeting < ApplicationRecord
+ include UsageStatistics
+
belongs_to :project, optional: false
belongs_to :issue, optional: false
diff --git a/app/policies/group_policy.rb b/app/policies/group_policy.rb
index 3bb7ab05be2..abd63753908 100644
--- a/app/policies/group_policy.rb
+++ b/app/policies/group_policy.rb
@@ -93,6 +93,9 @@ class GroupPolicy < BasePolicy
enable :create_cluster
enable :update_cluster
enable :admin_cluster
+ enable :destroy_deploy_token
+ enable :read_deploy_token
+ enable :create_deploy_token
end
rule { owner }.policy do
diff --git a/app/policies/note_policy.rb b/app/policies/note_policy.rb
index dcde8cefa0d..82ebf6ba6a5 100644
--- a/app/policies/note_policy.rb
+++ b/app/policies/note_policy.rb
@@ -1,7 +1,9 @@
# frozen_string_literal: true
class NotePolicy < BasePolicy
- delegate { @subject.project }
+ include Gitlab::Utils::StrongMemoize
+
+ delegate { @subject.resource_parent }
delegate { @subject.noteable if DeclarativePolicy.has_policy?(@subject.noteable) }
condition(:is_author) { @user && @subject.author == @user }
@@ -10,19 +12,30 @@ class NotePolicy < BasePolicy
condition(:editable, scope: :subject) { @subject.editable? }
condition(:can_read_noteable) { can?(:"read_#{@subject.noteable_ability_name}") }
+ condition(:commit_is_deleted) { @subject.for_commit? && @subject.noteable.blank? }
+
+ condition(:is_visible) { @subject.system_note_with_references_visible_for?(@user) }
+
+ condition(:confidential, scope: :subject) { @subject.confidential? }
- condition(:is_visible) { @subject.visible_for?(@user) }
+ condition(:can_read_confidential) do
+ access_level >= Gitlab::Access::REPORTER || @subject.noteable_assignee_or_author?(@user)
+ end
rule { ~editable }.prevent :admin_note
# If user can't read the issue/MR/etc then they should not be allowed to do anything to their own notes
rule { ~can_read_noteable }.policy do
- prevent :read_note
prevent :admin_note
prevent :resolve_note
prevent :award_emoji
end
+ # Special rule for deleted commits
+ rule { ~(can_read_noteable | commit_is_deleted) }.policy do
+ prevent :read_note
+ end
+
rule { is_author }.policy do
enable :read_note
enable :admin_note
@@ -39,4 +52,37 @@ class NotePolicy < BasePolicy
rule { is_noteable_author }.policy do
enable :resolve_note
end
+
+ rule { confidential & ~can_read_confidential }.policy do
+ prevent :read_note
+ prevent :admin_note
+ prevent :resolve_note
+ prevent :award_emoji
+ end
+
+ def parent_namespace
+ strong_memoize(:parent_namespace) do
+ next if @subject.is_a?(PersonalSnippet)
+ next @subject.noteable.group if @subject.noteable&.is_a?(Epic)
+
+ @subject.project
+ end
+ end
+
+ def access_level
+ return -1 if @user.nil?
+ return -1 unless parent_namespace
+
+ lookup_access_level!
+ end
+
+ def lookup_access_level!
+ return ::Gitlab::Access::REPORTER if alert_bot?
+
+ if parent_namespace.is_a?(Project)
+ parent_namespace.team.max_member_access(@user.id)
+ else
+ parent_namespace.max_member_access_for_user(@user)
+ end
+ end
end
diff --git a/app/policies/personal_snippet_policy.rb b/app/policies/personal_snippet_policy.rb
index bc60913563c..205dad6ea5f 100644
--- a/app/policies/personal_snippet_policy.rb
+++ b/app/policies/personal_snippet_policy.rb
@@ -7,6 +7,7 @@ class PersonalSnippetPolicy < BasePolicy
rule { public_snippet }.policy do
enable :read_snippet
+ enable :read_note
enable :create_note
end
@@ -14,11 +15,13 @@ class PersonalSnippetPolicy < BasePolicy
enable :read_snippet
enable :update_snippet
enable :admin_snippet
+ enable :read_note
enable :create_note
end
rule { internal_snippet & ~external_user }.policy do
enable :read_snippet
+ enable :read_note
enable :create_note
end
diff --git a/app/policies/project_policy.rb b/app/policies/project_policy.rb
index 507e227c952..aecefcc89ab 100644
--- a/app/policies/project_policy.rb
+++ b/app/policies/project_policy.rb
@@ -312,6 +312,10 @@ class ProjectPolicy < BasePolicy
enable :destroy_artifacts
enable :daily_statistics
enable :admin_operations
+ enable :read_deploy_token
+ enable :create_deploy_token
+ enable :read_pod_logs
+ enable :destroy_deploy_token
end
rule { (mirror_available & can?(:admin_project)) | admin }.enable :admin_remote_mirror
@@ -468,6 +472,8 @@ class ProjectPolicy < BasePolicy
prevent :create_pipeline
end
+ rule { admin }.enable :change_repository_storage
+
private
def team_member?
diff --git a/app/policies/project_snippet_policy.rb b/app/policies/project_snippet_policy.rb
index a38d9154102..869f4716298 100644
--- a/app/policies/project_snippet_policy.rb
+++ b/app/policies/project_snippet_policy.rb
@@ -31,7 +31,7 @@ class ProjectSnippetPolicy < BasePolicy
~can?(:read_all_resources))
end.prevent :read_snippet
- rule { internal_snippet & ~is_author & ~admin }.policy do
+ rule { internal_snippet & ~is_author & ~admin & ~project.maintainer }.policy do
prevent :update_snippet
prevent :admin_snippet
end
@@ -42,7 +42,7 @@ class ProjectSnippetPolicy < BasePolicy
prevent :admin_snippet
end
- rule { is_author | admin }.policy do
+ rule { is_author | admin | project.maintainer }.policy do
enable :read_snippet
enable :update_snippet
enable :admin_snippet
diff --git a/app/policies/snippet_policy.rb b/app/policies/snippet_policy.rb
new file mode 100644
index 00000000000..64c56e8091d
--- /dev/null
+++ b/app/policies/snippet_policy.rb
@@ -0,0 +1,4 @@
+# frozen_string_literal: true
+
+class SnippetPolicy < PersonalSnippetPolicy
+end
diff --git a/app/presenters/hooks/project_hook_presenter.rb b/app/presenters/project_hook_presenter.rb
index a65c7221b5a..a65c7221b5a 100644
--- a/app/presenters/hooks/project_hook_presenter.rb
+++ b/app/presenters/project_hook_presenter.rb
diff --git a/app/presenters/projects/import_export/project_export_presenter.rb b/app/presenters/projects/import_export/project_export_presenter.rb
new file mode 100644
index 00000000000..8f3fc53af10
--- /dev/null
+++ b/app/presenters/projects/import_export/project_export_presenter.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Projects
+ module ImportExport
+ class ProjectExportPresenter < Gitlab::View::Presenter::Delegated
+ include ActiveModel::Serializers::JSON
+
+ presents :project
+
+ def project_members
+ super + converted_group_members
+ end
+
+ def description
+ self.respond_to?(:override_description) ? override_description : super
+ end
+
+ private
+
+ def converted_group_members
+ group_members.each do |group_member|
+ group_member.source_type = 'Project' # Make group members project members of the future import
+ end
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def group_members
+ return [] unless current_user.can?(:admin_group, project.group)
+
+ # We need `.where.not(user_id: nil)` here otherwise when a group has an
+ # invitee, it would make the following query return 0 rows since a NULL
+ # user_id would be present in the subquery
+ # See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
+ non_null_user_ids = project.project_members.where.not(user_id: nil).select(:user_id)
+ GroupMembersFinder.new(project.group).execute.where.not(user_id: non_null_user_ids)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+end
diff --git a/app/presenters/projects/prometheus/alert_presenter.rb b/app/presenters/projects/prometheus/alert_presenter.rb
index 8988c567c5c..02e22f8f46a 100644
--- a/app/presenters/projects/prometheus/alert_presenter.rb
+++ b/app/presenters/projects/prometheus/alert_presenter.rb
@@ -6,6 +6,7 @@ module Projects
RESERVED_ANNOTATIONS = %w(gitlab_incident_markdown title).freeze
GENERIC_ALERT_SUMMARY_ANNOTATIONS = %w(monitoring_tool service hosts).freeze
MARKDOWN_LINE_BREAK = " \n".freeze
+ INCIDENT_LABEL_NAME = IncidentManagement::CreateIssueService::INCIDENT_LABEL[:title].freeze
def full_title
[environment_name, alert_title].compact.join(': ')
@@ -31,6 +32,18 @@ module Projects
end
end
+ def show_performance_dashboard_link?
+ gitlab_alert.present?
+ end
+
+ def show_incident_issues_link?
+ project.incident_management_setting&.create_issue?
+ end
+
+ def incident_issues_link
+ project_issues_url(project, label_name: INCIDENT_LABEL_NAME)
+ end
+
def starts_at
super&.rfc3339
end
@@ -40,7 +53,7 @@ module Projects
#### Summary
#{metadata_list}
- #{alert_details}
+ #{alert_details}#{metric_embed_for_alert}
MARKDOWN
end
@@ -105,6 +118,10 @@ module Projects
def host_links
Array(hosts.value).join(' ')
end
+
+ def metric_embed_for_alert; end
end
end
end
+
+Projects::Prometheus::AlertPresenter.prepend_if_ee('EE::Projects::Prometheus::AlertPresenter')
diff --git a/app/presenters/release_presenter.rb b/app/presenters/release_presenter.rb
index 2f91495c34c..8cf7446ce64 100644
--- a/app/presenters/release_presenter.rb
+++ b/app/presenters/release_presenter.rb
@@ -20,7 +20,7 @@ class ReleasePresenter < Gitlab::View::Presenter::Delegated
end
def self_url
- return unless ::Feature.enabled?(:release_show_page, project)
+ return unless ::Feature.enabled?(:release_show_page, project, default_enabled: true)
project_release_url(project, release)
end
diff --git a/app/presenters/hooks/service_hook_presenter.rb b/app/presenters/service_hook_presenter.rb
index bc20d5b1a3b..bc20d5b1a3b 100644
--- a/app/presenters/hooks/service_hook_presenter.rb
+++ b/app/presenters/service_hook_presenter.rb
diff --git a/app/presenters/snippet_blob_presenter.rb b/app/presenters/snippet_blob_presenter.rb
index 70a373619d6..ed9c28bbc2c 100644
--- a/app/presenters/snippet_blob_presenter.rb
+++ b/app/presenters/snippet_blob_presenter.rb
@@ -3,18 +3,15 @@
class SnippetBlobPresenter < BlobPresenter
def rich_data
return if blob.binary?
+ return unless blob.rich_viewer
- if markup?
- blob.rendered_markup
- else
- highlight(plain: false)
- end
+ render_rich_partial
end
def plain_data
return if blob.binary?
- highlight(plain: !markup?)
+ highlight(plain: false)
end
def raw_path
@@ -27,15 +24,25 @@ class SnippetBlobPresenter < BlobPresenter
private
- def markup?
- blob.rich_viewer&.partial_name == 'markup'
- end
-
def snippet
- blob.snippet
+ blob.container
end
def language
nil
end
+
+ def render_rich_partial
+ renderer.render("projects/blob/viewers/_#{blob.rich_viewer.partial_name}",
+ locals: { viewer: blob.rich_viewer, blob: blob, blob_raw_path: raw_path },
+ layout: false)
+ end
+
+ def renderer
+ proxy = Warden::Proxy.new({}, Warden::Manager.new({})).tap do |proxy_instance|
+ proxy_instance.set_user(current_user, scope: :user)
+ end
+
+ ApplicationController.renderer.new('warden' => proxy)
+ end
end
diff --git a/app/presenters/snippet_presenter.rb b/app/presenters/snippet_presenter.rb
index a453be18b95..ba9d566932a 100644
--- a/app/presenters/snippet_presenter.rb
+++ b/app/presenters/snippet_presenter.rb
@@ -27,6 +27,14 @@ class SnippetPresenter < Gitlab::View::Presenter::Delegated
snippet.submittable_as_spam_by?(current_user)
end
+ def blob
+ if Feature.enabled?(:version_snippets, current_user) && !snippet.repository.empty?
+ snippet.blobs.first
+ else
+ snippet.blob
+ end
+ end
+
private
def can_access_resource?(ability_prefix)
diff --git a/app/serializers/cluster_application_entity.rb b/app/serializers/cluster_application_entity.rb
index 632718df780..ac59a9df9e5 100644
--- a/app/serializers/cluster_application_entity.rb
+++ b/app/serializers/cluster_application_entity.rb
@@ -13,4 +13,6 @@ class ClusterApplicationEntity < Grape::Entity
expose :modsecurity_enabled, if: -> (e, _) { e.respond_to?(:modsecurity_enabled) }
expose :update_available?, as: :update_available, if: -> (e, _) { e.respond_to?(:update_available?) }
expose :can_uninstall?, as: :can_uninstall
+ expose :available_domains, using: Serverless::DomainEntity, if: -> (e, _) { e.respond_to?(:available_domains) }
+ expose :pages_domain, using: Serverless::DomainEntity, if: -> (e, _) { e.respond_to?(:pages_domain) }
end
diff --git a/app/serializers/diff_file_entity.rb b/app/serializers/diff_file_entity.rb
index af7d1172f17..45c16aabe9e 100644
--- a/app/serializers/diff_file_entity.rb
+++ b/app/serializers/diff_file_entity.rb
@@ -53,7 +53,7 @@ class DiffFileEntity < DiffFileBaseEntity
end
# Used for inline diffs
- expose :highlighted_diff_lines, using: DiffLineEntity, if: -> (diff_file, options) { inline_diff_view?(options) && diff_file.text? } do |diff_file|
+ expose :highlighted_diff_lines, using: DiffLineEntity, if: -> (diff_file, options) { inline_diff_view?(options, diff_file) && diff_file.text? } do |diff_file|
diff_file.diff_lines_for_serializer
end
@@ -62,19 +62,19 @@ class DiffFileEntity < DiffFileBaseEntity
end
# Used for parallel diffs
- expose :parallel_diff_lines, using: DiffLineParallelEntity, if: -> (diff_file, options) { parallel_diff_view?(options) && diff_file.text? }
+ expose :parallel_diff_lines, using: DiffLineParallelEntity, if: -> (diff_file, options) { parallel_diff_view?(options, diff_file) && diff_file.text? }
private
- def parallel_diff_view?(options)
- return true unless Feature.enabled?(:single_mr_diff_view)
+ def parallel_diff_view?(options, diff_file)
+ return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project, default_enabled: true)
# If we're not rendering inline, we must be rendering parallel
- !inline_diff_view?(options)
+ !inline_diff_view?(options, diff_file)
end
- def inline_diff_view?(options)
- return true unless Feature.enabled?(:single_mr_diff_view)
+ def inline_diff_view?(options, diff_file)
+ return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project, default_enabled: true)
# If nothing is present, inline will be the default.
options.fetch(:diff_view, :inline).to_sym == :inline
diff --git a/app/serializers/environment_entity.rb b/app/serializers/environment_entity.rb
index 74d6806e83f..d9af7af8a8b 100644
--- a/app/serializers/environment_entity.rb
+++ b/app/serializers/environment_entity.rb
@@ -47,6 +47,22 @@ class EnvironmentEntity < Grape::Entity
environment.available? && can?(current_user, :stop_environment, environment)
end
+ expose :logs_path, if: -> (*) { can_read_pod_logs? } do |environment|
+ project_logs_path(environment.project, environment_name: environment.name)
+ end
+
+ expose :logs_api_path, if: -> (*) { can_read_pod_logs? } do |environment|
+ if environment.elastic_stack_available?
+ elasticsearch_project_logs_path(environment.project, environment_name: environment.name, format: :json)
+ else
+ k8s_project_logs_path(environment.project, environment_name: environment.name, format: :json)
+ end
+ end
+
+ expose :enable_advanced_logs_querying, if: -> (*) { can_read_pod_logs? } do |environment|
+ environment.elastic_stack_available?
+ end
+
private
alias_method :environment, :object
@@ -63,6 +79,10 @@ class EnvironmentEntity < Grape::Entity
can?(current_user, :update_environment, environment)
end
+ def can_read_pod_logs?
+ can?(current_user, :read_pod_logs, environment.project)
+ end
+
def cluster_platform_kubernetes?
deployment_platform && deployment_platform.is_a?(Clusters::Platforms::Kubernetes)
end
diff --git a/app/serializers/group_variable_entity.rb b/app/serializers/group_variable_entity.rb
index 19c5fa26f34..622106458c3 100644
--- a/app/serializers/group_variable_entity.rb
+++ b/app/serializers/group_variable_entity.rb
@@ -4,6 +4,7 @@ class GroupVariableEntity < Grape::Entity
expose :id
expose :key
expose :value
+ expose :variable_type
expose :protected?, as: :protected
expose :masked?, as: :masked
diff --git a/app/serializers/issue_board_entity.rb b/app/serializers/issue_board_entity.rb
index 13897279815..ea629d9d774 100644
--- a/app/serializers/issue_board_entity.rb
+++ b/app/serializers/issue_board_entity.rb
@@ -12,6 +12,9 @@ class IssueBoardEntity < Grape::Entity
expose :project_id
expose :relative_position
expose :time_estimate
+ expose :closed do |issue|
+ issue.closed?
+ end
expose :project do |issue|
API::Entities::Project.represent issue.project, only: [:id, :path]
diff --git a/app/serializers/merge_request_widget_entity.rb b/app/serializers/merge_request_widget_entity.rb
index 7d67a35c94c..7ba15dd9acf 100644
--- a/app/serializers/merge_request_widget_entity.rb
+++ b/app/serializers/merge_request_widget_entity.rb
@@ -55,7 +55,8 @@ class MergeRequestWidgetEntity < Grape::Entity
merge_request.source_project,
merge_request.source_branch,
file_name: '.gitlab-ci.yml',
- commit_message: s_("CommitMessage|Add %{file_name}") % { file_name: Gitlab::FileDetector::PATTERNS[:gitlab_ci] }
+ commit_message: s_("CommitMessage|Add %{file_name}") % { file_name: Gitlab::FileDetector::PATTERNS[:gitlab_ci] },
+ suggest_gitlab_ci_yml: true
)
end
@@ -63,6 +64,10 @@ class MergeRequestWidgetEntity < Grape::Entity
merge_request.project.team.human_max_access(current_user&.id)
end
+ expose :new_project_pipeline_path do |merge_request|
+ new_project_pipeline_path(merge_request.project)
+ end
+
# Rendering and redacting Markdown can be expensive. These links are
# just nice to have in the merge request widget, so only
# include them if they are explicitly requested on first load.
@@ -93,7 +98,8 @@ class MergeRequestWidgetEntity < Grape::Entity
merge_request.source_project&.uses_default_ci_config? &&
merge_request.all_pipelines.none? &&
merge_request.commits_count.positive? &&
- can?(current_user, :push_code, merge_request.source_project)
+ can?(current_user, :read_build, merge_request.source_project) &&
+ can?(current_user, :create_pipeline, merge_request.source_project)
end
end
diff --git a/app/serializers/pipeline_serializer.rb b/app/serializers/pipeline_serializer.rb
index 3ad9f2bc0bf..b2c0ceb640b 100644
--- a/app/serializers/pipeline_serializer.rb
+++ b/app/serializers/pipeline_serializer.rb
@@ -56,9 +56,14 @@ class PipelineSerializer < BaseSerializer
:manual_actions,
:scheduled_actions,
:artifacts,
- :merge_request,
:user,
{
+ merge_request: {
+ source_project: [:route, { namespace: :route }],
+ target_project: [:route, { namespace: :route }]
+ }
+ },
+ {
pending_builds: :project,
project: [:route, { namespace: :route }],
artifacts: {
diff --git a/app/serializers/serverless/domain_entity.rb b/app/serializers/serverless/domain_entity.rb
new file mode 100644
index 00000000000..556e3c99eee
--- /dev/null
+++ b/app/serializers/serverless/domain_entity.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+module Serverless
+ class DomainEntity < Grape::Entity
+ expose :id
+ expose :domain
+ end
+end
diff --git a/app/services/audit_event_service.rb b/app/services/audit_event_service.rb
index 9fd892ead82..42ed5f17d8d 100644
--- a/app/services/audit_event_service.rb
+++ b/app/services/audit_event_service.rb
@@ -1,10 +1,28 @@
# frozen_string_literal: true
class AuditEventService
+ # Instantiates a new service
+ #
+ # @param [User] author the user who authors the change
+ # @param [User, Project, Group] entity the scope which audit event belongs to
+ # This param is also used to determine the visibility of the audit event.
+ # - Project: events are visible at Project and Instance level
+ # - Group: events are visible at Group and Instance level
+ # - User: events are visible at Instance level
+ # @param [Hash] details extra data of audit event
+ #
+ # @return [AuditEventService]
def initialize(author, entity, details = {})
- @author, @entity, @details = author, entity, details
+ @author = author
+ @entity = entity
+ @details = details
end
+ # Builds the @details attribute for authentication
+ #
+ # This uses the @author as the target object being audited
+ #
+ # @return [AuditEventService]
def for_authentication
@details = {
with: @details[:with],
@@ -16,11 +34,15 @@ class AuditEventService
self
end
+ # Writes event to a file and creates an event record in DB
+ #
+ # @return [SecurityEvent] persited if saves and non-persisted if fails
def security_event
log_security_event_to_file
log_security_event_to_database
end
+ # Writes event to a file
def log_security_event_to_file
file_logger.info(base_payload.merge(formatted_details))
end
diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb
index 699fa17cb65..337710b60e0 100644
--- a/app/services/boards/issues/list_service.rb
+++ b/app/services/boards/issues/list_service.rb
@@ -10,6 +10,8 @@ module Boards
end
def execute
+ return fetch_issues.order_closed_date_desc if list&.closed?
+
fetch_issues.order_by_position_and_priority(with_cte: can_attempt_search_optimization?)
end
diff --git a/app/services/ci/create_cross_project_pipeline_service.rb b/app/services/ci/create_cross_project_pipeline_service.rb
index 8de72ace261..3a2cc3f9d32 100644
--- a/app/services/ci/create_cross_project_pipeline_service.rb
+++ b/app/services/ci/create_cross_project_pipeline_service.rb
@@ -5,9 +5,19 @@ module Ci
class CreateCrossProjectPipelineService < ::BaseService
include Gitlab::Utils::StrongMemoize
+ DuplicateDownstreamPipelineError = Class.new(StandardError)
+
def execute(bridge)
@bridge = bridge
+ if bridge.has_downstream_pipeline?
+ Gitlab::ErrorTracking.track_exception(
+ DuplicateDownstreamPipelineError.new,
+ bridge_id: @bridge.id, project_id: @bridge.project_id
+ )
+ return
+ end
+
pipeline_params = @bridge.downstream_pipeline_params
target_ref = pipeline_params.dig(:target_revision, :ref)
@@ -18,20 +28,37 @@ module Ci
current_user,
pipeline_params.fetch(:target_revision))
- service.execute(
+ downstream_pipeline = service.execute(
pipeline_params.fetch(:source), pipeline_params[:execute_params]) do |pipeline|
- @bridge.sourced_pipelines.build(
- source_pipeline: @bridge.pipeline,
- source_project: @bridge.project,
- project: @bridge.downstream_project,
- pipeline: pipeline)
-
pipeline.variables.build(@bridge.downstream_variables)
end
+
+ downstream_pipeline.tap do |pipeline|
+ next if Feature.disabled?(:ci_drop_bridge_on_downstream_errors, project, default_enabled: true)
+
+ update_bridge_status!(@bridge, pipeline)
+ end
end
private
+ def update_bridge_status!(bridge, pipeline)
+ Gitlab::OptimisticLocking.retry_lock(bridge) do |subject|
+ if pipeline.created_successfully?
+ # If bridge uses `strategy:depend` we leave it running
+ # and update the status when the downstream pipeline completes.
+ subject.success! unless subject.dependent?
+ else
+ subject.drop!(:downstream_pipeline_creation_failed)
+ end
+ end
+ rescue StateMachines::InvalidTransition => e
+ Gitlab::ErrorTracking.track_exception(
+ Ci::Bridge::InvalidTransitionError.new(e.message),
+ bridge_id: bridge.id,
+ downstream_pipeline_id: pipeline.id)
+ end
+
def ensure_preconditions!(target_ref)
unless downstream_project_accessible?
@bridge.drop!(:downstream_bridge_project_not_found)
diff --git a/app/services/ci/create_job_artifacts_service.rb b/app/services/ci/create_job_artifacts_service.rb
index e633dc7f633..d207c215618 100644
--- a/app/services/ci/create_job_artifacts_service.rb
+++ b/app/services/ci/create_job_artifacts_service.rb
@@ -1,14 +1,33 @@
# frozen_string_literal: true
module Ci
- class CreateJobArtifactsService
+ class CreateJobArtifactsService < ::BaseService
ArtifactsExistError = Class.new(StandardError)
+ OBJECT_STORAGE_ERRORS = [
+ Errno::EIO,
+ Google::Apis::ServerError,
+ Signet::RemoteServerError
+ ].freeze
def execute(job, artifacts_file, params, metadata_file: nil)
+ return success if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file)
+
+ artifact, artifact_metadata = build_artifact(job, artifacts_file, params, metadata_file)
+ result = parse_artifact(job, artifact)
+
+ return result unless result[:status] == :success
+
+ persist_artifact(job, artifact, artifact_metadata)
+ end
+
+ private
+
+ def build_artifact(job, artifacts_file, params, metadata_file)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
- job.job_artifacts.build(
+ artifact = Ci::JobArtifact.new(
+ job_id: job.id,
project: job.project,
file: artifacts_file,
file_type: params['artifact_type'],
@@ -16,31 +35,50 @@ module Ci
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
- if metadata_file
- job.job_artifacts.build(
- project: job.project,
- file: metadata_file,
- file_type: :metadata,
- file_format: :gzip,
- file_sha256: metadata_file.sha256,
- expire_in: expire_in)
- end
+ artifact_metadata = if metadata_file
+ Ci::JobArtifact.new(
+ job_id: job.id,
+ project: job.project,
+ file: metadata_file,
+ file_type: :metadata,
+ file_format: :gzip,
+ file_sha256: metadata_file.sha256,
+ expire_in: expire_in)
+ end
- job.update(artifacts_expire_in: expire_in)
- rescue ActiveRecord::RecordNotUnique => error
- return true if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file)
+ [artifact, artifact_metadata]
+ end
- Gitlab::ErrorTracking.track_exception(error,
- job_id: job.id,
- project_id: job.project_id,
- uploading_type: params['artifact_type']
- )
+ def parse_artifact(job, artifact)
+ unless Feature.enabled?(:ci_synchronous_artifact_parsing, job.project, default_enabled: true)
+ return success
+ end
- job.errors.add(:base, 'another artifact of the same type already exists')
- false
+ case artifact.file_type
+ when 'dotenv' then parse_dotenv_artifact(job, artifact)
+ else success
+ end
end
- private
+ def persist_artifact(job, artifact, artifact_metadata)
+ Ci::JobArtifact.transaction do
+ artifact.save!
+ artifact_metadata&.save!
+
+ # NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
+ job.update_column(:artifacts_expire_at, artifact.expire_at)
+ end
+
+ success
+ rescue ActiveRecord::RecordNotUnique => error
+ track_exception(error, job, params)
+ error('another artifact of the same type already exists', :bad_request)
+ rescue *OBJECT_STORAGE_ERRORS => error
+ track_exception(error, job, params)
+ error(error.message, :service_unavailable)
+ rescue => error
+ error(error.message, :bad_request)
+ end
def sha256_matches_existing_artifact?(job, artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
@@ -48,5 +86,17 @@ module Ci
existing_artifact.file_sha256 == artifacts_file.sha256
end
+
+ def track_exception(error, job, params)
+ Gitlab::ErrorTracking.track_exception(error,
+ job_id: job.id,
+ project_id: job.project_id,
+ uploading_type: params['artifact_type']
+ )
+ end
+
+ def parse_dotenv_artifact(job, artifact)
+ Ci::ParseDotenvArtifactService.new(job.project, current_user).execute(artifact)
+ end
end
end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 52977034b70..347630f865f 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -7,6 +7,7 @@ module Ci
CreateError = Class.new(StandardError)
SEQUENCE = [Gitlab::Ci::Pipeline::Chain::Build,
+ Gitlab::Ci::Pipeline::Chain::Build::Associations,
Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
Gitlab::Ci::Pipeline::Chain::Validate::Repository,
Gitlab::Ci::Pipeline::Chain::Config::Content,
diff --git a/app/services/ci/find_exposed_artifacts_service.rb b/app/services/ci/find_exposed_artifacts_service.rb
index d268252577f..abbeb101be2 100644
--- a/app/services/ci/find_exposed_artifacts_service.rb
+++ b/app/services/ci/find_exposed_artifacts_service.rb
@@ -35,7 +35,7 @@ module Ci
{
text: job.artifacts_expose_as,
url: path_for_entries(metadata_entries, job),
- job_path: project_job_path(project, job),
+ job_path: project_job_path(job.project, job),
job_name: job.name
}
end
@@ -59,9 +59,9 @@ module Ci
return if entries.empty?
if single_artifact?(entries)
- file_project_job_artifacts_path(project, job, entries.first.path)
+ file_project_job_artifacts_path(job.project, job, entries.first.path)
else
- browse_project_job_artifacts_path(project, job)
+ browse_project_job_artifacts_path(job.project, job)
end
end
diff --git a/app/services/ci/generate_coverage_reports_service.rb b/app/services/ci/generate_coverage_reports_service.rb
new file mode 100644
index 00000000000..ebd1eaf0bad
--- /dev/null
+++ b/app/services/ci/generate_coverage_reports_service.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Ci
+ # TODO: a couple of points with this approach:
+ # + reuses existing architecture and reactive caching
+ # - it's not a report comparison and some comparing features must be turned off.
+ # see CompareReportsBaseService for more notes.
+ # issue: https://gitlab.com/gitlab-org/gitlab/issues/34224
+ class GenerateCoverageReportsService < CompareReportsBaseService
+ def execute(base_pipeline, head_pipeline)
+ merge_request = MergeRequest.find_by_id(params[:id])
+ {
+ status: :parsed,
+ key: key(base_pipeline, head_pipeline),
+ data: head_pipeline.coverage_reports.pick(merge_request.new_paths)
+ }
+ rescue => e
+ Gitlab::ErrorTracking.track_exception(e, project_id: project.id)
+ {
+ status: :error,
+ key: key(base_pipeline, head_pipeline),
+ status_reason: _('An error occurred while fetching coverage reports.')
+ }
+ end
+
+ def latest?(base_pipeline, head_pipeline, data)
+ data&.fetch(:key, nil) == key(base_pipeline, head_pipeline)
+ end
+ end
+end
diff --git a/app/services/ci/parse_dotenv_artifact_service.rb b/app/services/ci/parse_dotenv_artifact_service.rb
new file mode 100644
index 00000000000..fcbdc94c097
--- /dev/null
+++ b/app/services/ci/parse_dotenv_artifact_service.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+module Ci
+ class ParseDotenvArtifactService < ::BaseService
+ MAX_ACCEPTABLE_DOTENV_SIZE = 5.kilobytes
+ MAX_ACCEPTABLE_VARIABLES_COUNT = 10
+
+ SizeLimitError = Class.new(StandardError)
+ ParserError = Class.new(StandardError)
+
+ def execute(artifact)
+ validate!(artifact)
+
+ variables = parse!(artifact)
+ Ci::JobVariable.bulk_insert!(variables)
+
+ success
+ rescue SizeLimitError, ParserError, ActiveRecord::RecordInvalid => error
+ Gitlab::ErrorTracking.track_exception(error, job_id: artifact.job_id)
+ error(error.message, :bad_request)
+ end
+
+ private
+
+ def validate!(artifact)
+ unless artifact&.dotenv?
+ raise ArgumentError, 'Artifact is not dotenv file type'
+ end
+
+ unless artifact.file.size < MAX_ACCEPTABLE_DOTENV_SIZE
+ raise SizeLimitError,
+ "Dotenv Artifact Too Big. Maximum Allowable Size: #{MAX_ACCEPTABLE_DOTENV_SIZE}"
+ end
+ end
+
+ def parse!(artifact)
+ variables = []
+
+ artifact.each_blob do |blob|
+ blob.each_line do |line|
+ key, value = scan_line!(line)
+
+ variables << Ci::JobVariable.new(job_id: artifact.job_id,
+ source: :dotenv, key: key, value: value)
+ end
+ end
+
+ if variables.size > MAX_ACCEPTABLE_VARIABLES_COUNT
+ raise SizeLimitError,
+ "Dotenv files cannot have more than #{MAX_ACCEPTABLE_VARIABLES_COUNT} variables"
+ end
+
+ variables
+ end
+
+ def scan_line!(line)
+ result = line.scan(/^(.*)=(.*)$/).last
+
+ raise ParserError, 'Invalid Format' if result.nil?
+
+ result.each(&:strip!)
+ end
+ end
+end
diff --git a/app/services/ci/pipeline_bridge_status_service.rb b/app/services/ci/pipeline_bridge_status_service.rb
index 19ed5026a3a..e2e5dd386f2 100644
--- a/app/services/ci/pipeline_bridge_status_service.rb
+++ b/app/services/ci/pipeline_bridge_status_service.rb
@@ -5,7 +5,14 @@ module Ci
def execute(pipeline)
return unless pipeline.bridge_triggered?
- pipeline.source_bridge.inherit_status_from_downstream!(pipeline)
+ begin
+ pipeline.source_bridge.inherit_status_from_downstream!(pipeline)
+ rescue StateMachines::InvalidTransition => e
+ Gitlab::ErrorTracking.track_exception(
+ Ci::Bridge::InvalidTransitionError.new(e.message),
+ bridge_id: pipeline.source_bridge.id,
+ downstream_pipeline_id: pipeline.id)
+ end
end
end
end
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index 57c0cdd0602..fb59797a8df 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -8,6 +8,8 @@ module Ci
JOB_QUEUE_DURATION_SECONDS_BUCKETS = [1, 3, 10, 30, 60, 300, 900, 1800, 3600].freeze
JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET = 5.freeze
+ METRICS_SHARD_TAG_PREFIX = 'metrics_shard::'.freeze
+ DEFAULT_METRICS_SHARD = 'default'.freeze
Result = Struct.new(:build, :valid?)
@@ -193,7 +195,13 @@ module Ci
def register_success(job)
labels = { shared_runner: runner.instance_type?,
- jobs_running_for_project: jobs_running_for_project(job) }
+ jobs_running_for_project: jobs_running_for_project(job),
+ shard: DEFAULT_METRICS_SHARD }
+
+ if runner.instance_type?
+ shard = runner.tag_list.sort.find { |name| name.starts_with?(METRICS_SHARD_TAG_PREFIX) }
+ labels[:shard] = shard.gsub(METRICS_SHARD_TAG_PREFIX, '') if shard
+ end
job_queue_duration_seconds.observe(labels, Time.now - job.queued_at) unless job.queued_at.nil?
attempt_counter.increment
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index 838ed789155..a65fe2ecb3a 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -5,7 +5,8 @@ module Ci
CLONE_ACCESSORS = %i[pipeline project ref tag options name
allow_failure stage stage_id stage_idx trigger_request
yaml_variables when environment coverage_regex
- description tag_list protected needs resource_group scheduling_type].freeze
+ description tag_list protected needs_attributes
+ resource_group scheduling_type].freeze
def execute(build)
reprocess!(build).tap do |new_build|
@@ -51,7 +52,7 @@ module Ci
def create_build!(attributes)
build = project.builds.new(attributes)
- build.deployment = ::Gitlab::Ci::Pipeline::Seed::Deployment.new(build).to_resource
+ build.assign_attributes(::Gitlab::Ci::Pipeline::Seed::Build.environment_attributes_for(build))
build.retried = false
build.save!
build
diff --git a/app/services/ci/update_ci_ref_status_service.rb b/app/services/ci/update_ci_ref_status_service.rb
new file mode 100644
index 00000000000..4f7ac4d11b0
--- /dev/null
+++ b/app/services/ci/update_ci_ref_status_service.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+module Ci
+ class UpdateCiRefStatusService
+ include Gitlab::OptimisticLocking
+
+ attr_reader :pipeline
+
+ def initialize(pipeline)
+ @pipeline = pipeline
+ end
+
+ def call
+ save.tap { |success| after_save if success }
+ end
+
+ private
+
+ def save
+ might_insert = ref.new_record?
+
+ begin
+ retry_optimistic_lock(ref) do
+ next false if ref.persisted? &&
+ (ref.last_updated_by_pipeline_id || 0) > pipeline.id
+
+ ref.update(status: next_status(ref.status, pipeline.status),
+ last_updated_by_pipeline: pipeline)
+ end
+ rescue ActiveRecord::RecordNotUnique
+ if might_insert
+ @ref = pipeline.reset.ref_status
+ might_insert = false
+ retry
+ else
+ raise
+ end
+ end
+ end
+
+ def next_status(ref_status, pipeline_status)
+ if ref_status == 'failed' && pipeline_status == 'success'
+ 'fixed'
+ else
+ pipeline_status
+ end
+ end
+
+ def after_save
+ enqueue_pipeline_notification
+ end
+
+ def enqueue_pipeline_notification
+ PipelineNotificationWorker.perform_async(pipeline.id, ref_status: ref.status)
+ end
+
+ def ref
+ @ref ||= pipeline.ref_status || build_ref
+ end
+
+ def build_ref
+ Ci::Ref.new(ref: pipeline.ref, project: pipeline.project, tag: pipeline.tag)
+ end
+ end
+end
diff --git a/app/services/clusters/applications/base_helm_service.rb b/app/services/clusters/applications/base_helm_service.rb
index 57bc8bc0d9b..0c9b41be8d2 100644
--- a/app/services/clusters/applications/base_helm_service.rb
+++ b/app/services/clusters/applications/base_helm_service.rb
@@ -50,7 +50,7 @@ module Clusters
end
def helm_api
- @helm_api ||= Gitlab::Kubernetes::Helm::Api.new(kubeclient)
+ @helm_api ||= Gitlab::Kubernetes::Helm::API.new(kubeclient)
end
def install_command
diff --git a/app/services/clusters/applications/base_service.rb b/app/services/clusters/applications/base_service.rb
index 2585d815e07..a3b39f0994d 100644
--- a/app/services/clusters/applications/base_service.rb
+++ b/app/services/clusters/applications/base_service.rb
@@ -35,6 +35,12 @@ module Clusters
application.oauth_application = create_oauth_application(application, request)
end
+ if application.instance_of?(Knative)
+ Serverless::AssociateDomainService
+ .new(application, pages_domain_id: params[:pages_domain_id], creator: current_user)
+ .execute
+ end
+
worker = worker_class(application)
application.make_scheduled!
diff --git a/app/services/clusters/kubernetes/configure_istio_ingress_service.rb b/app/services/clusters/kubernetes/configure_istio_ingress_service.rb
index fe577beaa8a..a81014d99ff 100644
--- a/app/services/clusters/kubernetes/configure_istio_ingress_service.rb
+++ b/app/services/clusters/kubernetes/configure_istio_ingress_service.rb
@@ -27,6 +27,10 @@ module Clusters
return configure_certificates if serverless_domain_cluster
configure_passthrough
+ rescue Kubeclient::HttpError => e
+ knative.make_errored!(_('Kubernetes error: %{error_code}') % { error_code: e.error_code })
+ rescue StandardError
+ knative.make_errored!(_('Failed to update.'))
end
private
diff --git a/app/services/commits/cherry_pick_service.rb b/app/services/commits/cherry_pick_service.rb
index 91a18909e22..7e982bf7686 100644
--- a/app/services/commits/cherry_pick_service.rb
+++ b/app/services/commits/cherry_pick_service.rb
@@ -11,8 +11,6 @@ module Commits
private
def track_mr_picking(pick_sha)
- return unless Feature.enabled?(:track_mr_picking, project)
-
merge_request = project.merge_requests.by_merge_commit_sha(@commit.sha).first
return unless merge_request
diff --git a/app/services/concerns/akismet_methods.rb b/app/services/concerns/akismet_methods.rb
index 105b79785bd..4e554ddac4c 100644
--- a/app/services/concerns/akismet_methods.rb
+++ b/app/services/concerns/akismet_methods.rb
@@ -1,15 +1,15 @@
# frozen_string_literal: true
module AkismetMethods
- def spammable_owner
- @user ||= User.find(spammable.author_id)
+ def target_owner
+ @user ||= User.find(target.author_id)
end
def akismet
@akismet ||= Spam::AkismetService.new(
- spammable_owner.name,
- spammable_owner.email,
- spammable.try(:spammable_text) || spammable&.text,
+ target_owner.name,
+ target_owner.email,
+ target.try(:spammable_text) || target&.text,
options
)
end
diff --git a/app/services/concerns/deploy_token_methods.rb b/app/services/concerns/deploy_token_methods.rb
new file mode 100644
index 00000000000..c0208b16623
--- /dev/null
+++ b/app/services/concerns/deploy_token_methods.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module DeployTokenMethods
+ def create_deploy_token_for(entity, params)
+ params[:deploy_token_type] = DeployToken.deploy_token_types["#{entity.class.name.downcase}_type".to_sym]
+
+ entity.deploy_tokens.create(params) do |deploy_token|
+ deploy_token.username = params[:username].presence
+ end
+ end
+end
diff --git a/app/services/concerns/incident_management/settings.rb b/app/services/concerns/incident_management/settings.rb
new file mode 100644
index 00000000000..5f56d6e7f53
--- /dev/null
+++ b/app/services/concerns/incident_management/settings.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+module IncidentManagement
+ module Settings
+ def incident_management_setting
+ strong_memoize(:incident_management_setting) do
+ project.incident_management_setting ||
+ project.build_incident_management_setting
+ end
+ end
+
+ def process_issues?
+ incident_management_setting.create_issue?
+ end
+ end
+end
diff --git a/app/services/deploy_tokens/create_service.rb b/app/services/deploy_tokens/create_service.rb
deleted file mode 100644
index 327a1dbf408..00000000000
--- a/app/services/deploy_tokens/create_service.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-module DeployTokens
- class CreateService < BaseService
- def execute
- @project.deploy_tokens.create(params) do |deploy_token|
- deploy_token.username = params[:username].presence
- end
- end
- end
-end
diff --git a/app/services/deployments/link_merge_requests_service.rb b/app/services/deployments/link_merge_requests_service.rb
index 67a2230350d..eba5082e6c3 100644
--- a/app/services/deployments/link_merge_requests_service.rb
+++ b/app/services/deployments/link_merge_requests_service.rb
@@ -38,8 +38,6 @@ module Deployments
.commits_between(from, to)
.map(&:id)
- track_mr_picking = Feature.enabled?(:track_mr_picking, project)
-
# For some projects the list of commits to deploy may be very large. To
# ensure we do not end up running SQL queries with thousands of WHERE IN
# values, we run one query per a certain number of commits.
@@ -53,8 +51,6 @@ module Deployments
deployment.link_merge_requests(merge_requests)
- next unless track_mr_picking
-
picked_merge_requests =
project.merge_requests.by_cherry_pick_sha(slice)
diff --git a/app/services/error_tracking/issue_update_service.rb b/app/services/error_tracking/issue_update_service.rb
index e516ac95138..b8235678d1d 100644
--- a/app/services/error_tracking/issue_update_service.rb
+++ b/app/services/error_tracking/issue_update_service.rb
@@ -11,6 +11,7 @@ module ErrorTracking
)
compose_response(response) do
+ project_error_tracking_setting.expire_issues_cache
response[:closed_issue_iid] = update_related_issue&.iid
end
end
diff --git a/app/services/git/process_ref_changes_service.rb b/app/services/git/process_ref_changes_service.rb
index 3052bed51bc..387cd29d69d 100644
--- a/app/services/git/process_ref_changes_service.rb
+++ b/app/services/git/process_ref_changes_service.rb
@@ -35,7 +35,7 @@ module Git
end
def execute_project_hooks?(changes)
- (changes.size <= Gitlab::CurrentSettings.push_event_hooks_limit) || Feature.enabled?(:git_push_execute_all_project_hooks, project)
+ changes.size <= Gitlab::CurrentSettings.push_event_hooks_limit
end
def process_changes(ref_type, action, changes, execute_project_hooks:)
diff --git a/app/services/groups/deploy_tokens/create_service.rb b/app/services/groups/deploy_tokens/create_service.rb
new file mode 100644
index 00000000000..8c42b56ebb0
--- /dev/null
+++ b/app/services/groups/deploy_tokens/create_service.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Groups
+ module DeployTokens
+ class CreateService < BaseService
+ include DeployTokenMethods
+
+ def execute
+ create_deploy_token_for(@group, params)
+ end
+ end
+ end
+end
diff --git a/app/services/groups/import_export/export_service.rb b/app/services/groups/import_export/export_service.rb
index 2c3975961a8..0bf54844430 100644
--- a/app/services/groups/import_export/export_service.rb
+++ b/app/services/groups/import_export/export_service.rb
@@ -11,24 +11,30 @@ module Groups
end
def execute
- unless @current_user.can?(:admin_group, @group)
- raise ::Gitlab::ImportExport::Error.new(
- "User with ID: %s does not have permission to Group %s with ID: %s." %
- [@current_user.id, @group.name, @group.id])
- end
+ validate_user_permissions
save!
+ ensure
+ cleanup
end
private
attr_accessor :shared
+ def validate_user_permissions
+ unless @current_user.can?(:admin_group, @group)
+ @shared.error(::Gitlab::ImportExport::Error.permission_error(@current_user, @group))
+
+ notify_error!
+ end
+ end
+
def save!
if savers.all?(&:save)
notify_success
else
- cleanup_and_notify_error!
+ notify_error!
end
end
@@ -37,21 +43,19 @@ module Groups
end
def tree_exporter
- Gitlab::ImportExport::GroupTreeSaver.new(group: @group, current_user: @current_user, shared: @shared, params: @params)
+ Gitlab::ImportExport::Group::TreeSaver.new(group: @group, current_user: @current_user, shared: @shared, params: @params)
end
def file_saver
Gitlab::ImportExport::Saver.new(exportable: @group, shared: @shared)
end
- def cleanup_and_notify_error
- FileUtils.rm_rf(shared.export_path)
-
- notify_error
+ def cleanup
+ FileUtils.rm_rf(shared.archive_path) if shared&.archive_path
end
- def cleanup_and_notify_error!
- cleanup_and_notify_error
+ def notify_error!
+ notify_error
raise Gitlab::ImportExport::Error.new(shared.errors.to_sentence)
end
diff --git a/app/services/groups/import_export/import_service.rb b/app/services/groups/import_export/import_service.rb
index 628c8f5bac0..548a4a98dc1 100644
--- a/app/services/groups/import_export/import_service.rb
+++ b/app/services/groups/import_export/import_service.rb
@@ -12,15 +12,14 @@ module Groups
end
def execute
- validate_user_permissions
+ if valid_user_permissions? && import_file && restorer.restore
+ notify_success
- if import_file && restorer.restore
@group
else
- raise StandardError.new(@shared.errors.to_sentence)
+ notify_error!
end
- rescue => e
- raise StandardError.new(e.message)
+
ensure
remove_import_file
end
@@ -34,7 +33,7 @@ module Groups
end
def restorer
- @restorer ||= Gitlab::ImportExport::GroupTreeRestorer.new(user: @current_user,
+ @restorer ||= Gitlab::ImportExport::Group::TreeRestorer.new(user: @current_user,
shared: @shared,
group: @group,
group_hash: nil)
@@ -49,13 +48,37 @@ module Groups
upload.save!
end
- def validate_user_permissions
- unless current_user.can?(:admin_group, group)
- raise ::Gitlab::ImportExport::Error.new(
- "User with ID: %s does not have permission to Group %s with ID: %s." %
- [current_user.id, group.name, group.id])
+ def valid_user_permissions?
+ if current_user.can?(:admin_group, group)
+ true
+ else
+ @shared.error(::Gitlab::ImportExport::Error.permission_error(current_user, group))
+
+ false
end
end
+
+ def notify_success
+ @shared.logger.info(
+ group_id: @group.id,
+ group_name: @group.name,
+ message: 'Group Import/Export: Import succeeded'
+ )
+ end
+
+ def notify_error
+ @shared.logger.error(
+ group_id: @group.id,
+ group_name: @group.name,
+ message: "Group Import/Export: Errors occurred, see '#{Gitlab::ErrorTracking::Logger.file_name}' for details"
+ )
+ end
+
+ def notify_error!
+ notify_error
+
+ raise Gitlab::ImportExport::Error.new(@shared.errors.to_sentence)
+ end
end
end
end
diff --git a/app/services/issuable/clone/attributes_rewriter.rb b/app/services/issuable/clone/attributes_rewriter.rb
index 2b436f6322c..135ab011d69 100644
--- a/app/services/issuable/clone/attributes_rewriter.rb
+++ b/app/services/issuable/clone/attributes_rewriter.rb
@@ -19,6 +19,7 @@ module Issuable
copy_resource_label_events
copy_resource_weight_events
+ copy_resource_milestone_events
end
private
@@ -65,6 +66,23 @@ module Issuable
end
end
+ def copy_resource_milestone_events
+ entity_key = new_entity.class.name.underscore.foreign_key
+
+ copy_events(ResourceMilestoneEvent.table_name, original_entity.resource_milestone_events) do |event|
+ matching_destination_milestone = matching_milestone(event.milestone.title)
+
+ if matching_destination_milestone.present?
+ event.attributes
+ .except('id', 'reference', 'reference_html')
+ .merge(entity_key => new_entity.id,
+ 'milestone_id' => matching_destination_milestone.id,
+ 'action' => ResourceMilestoneEvent.actions[event.action],
+ 'state' => ResourceMilestoneEvent.states[event.state])
+ end
+ end
+ end
+
def copy_events(table_name, events_to_copy)
events_to_copy.find_in_batches do |batch|
events = batch.map do |event|
diff --git a/app/services/issuable/common_system_notes_service.rb b/app/services/issuable/common_system_notes_service.rb
index 846b881e819..67cf212691f 100644
--- a/app/services/issuable/common_system_notes_service.rb
+++ b/app/services/issuable/common_system_notes_service.rb
@@ -22,13 +22,17 @@ module Issuable
end
create_due_date_note if issuable.previous_changes.include?('due_date')
- create_milestone_note if issuable.previous_changes.include?('milestone_id')
+ create_milestone_note if has_milestone_changes?
create_labels_note(old_labels) if old_labels && issuable.labels != old_labels
end
end
private
+ def has_milestone_changes?
+ issuable.previous_changes.include?('milestone_id')
+ end
+
def handle_time_tracking_note
if issuable.previous_changes.include?('time_estimate')
create_time_estimate_note
@@ -95,7 +99,16 @@ module Issuable
end
def create_milestone_note
- SystemNoteService.change_milestone(issuable, issuable.project, current_user, issuable.milestone)
+ if milestone_changes_tracking_enabled?
+ # Creates a synthetic note
+ ResourceEvents::ChangeMilestoneService.new(issuable, current_user).execute
+ else
+ SystemNoteService.change_milestone(issuable, issuable.project, current_user, issuable.milestone)
+ end
+ end
+
+ def milestone_changes_tracking_enabled?
+ ::Feature.enabled?(:track_resource_milestone_change_events, issuable.project)
end
def create_due_date_note
diff --git a/app/services/issues/base_service.rb b/app/services/issues/base_service.rb
index 974f7e598ca..9e72f6dad8d 100644
--- a/app/services/issues/base_service.rb
+++ b/app/services/issues/base_service.rb
@@ -34,6 +34,18 @@ module Issues
def update_project_counter_caches?(issue)
super || issue.confidential_changed?
end
+
+ def delete_milestone_closed_issue_counter_cache(milestone)
+ return unless milestone
+
+ Milestones::ClosedIssuesCountService.new(milestone).delete_cache
+ end
+
+ def delete_milestone_total_issue_counter_cache(milestone)
+ return unless milestone
+
+ Milestones::IssuesCountService.new(milestone).delete_cache
+ end
end
end
diff --git a/app/services/issues/close_service.rb b/app/services/issues/close_service.rb
index 14585c2850b..2cf4bbcd590 100644
--- a/app/services/issues/close_service.rb
+++ b/app/services/issues/close_service.rb
@@ -18,9 +18,9 @@ module Issues
# The code calling this method is responsible for ensuring that a user is
# allowed to close the given issue.
def close_issue(issue, closed_via: nil, notifications: true, system_note: true)
- if project.jira_tracker_active? && issue.is_a?(ExternalIssue)
- project.jira_service.close_issue(closed_via, issue)
- todo_service.close_issue(issue, current_user)
+ if issue.is_a?(ExternalIssue)
+ close_external_issue(issue, closed_via)
+
return issue
end
@@ -36,6 +36,10 @@ module Issues
execute_hooks(issue, 'close')
invalidate_cache_counts(issue, users: issue.assignees)
issue.update_project_counter_caches
+
+ store_first_mentioned_in_commit_at(issue, closed_via) if closed_via.is_a?(MergeRequest)
+
+ delete_milestone_closed_issue_counter_cache(issue.milestone)
end
issue
@@ -43,8 +47,27 @@ module Issues
private
+ def close_external_issue(issue, closed_via)
+ return unless project.external_issue_tracker&.support_close_issue?
+
+ project.external_issue_tracker.close_issue(closed_via, issue)
+ todo_service.close_issue(issue, current_user)
+ end
+
def create_note(issue, current_commit)
SystemNoteService.change_status(issue, issue.project, current_user, issue.state, current_commit)
end
+
+ def store_first_mentioned_in_commit_at(issue, merge_request)
+ return unless Feature.enabled?(:store_first_mentioned_in_commit_on_issue_close, issue.project, default_enabled: true)
+
+ metrics = issue.metrics
+ return if metrics.nil? || metrics.first_mentioned_in_commit_at
+
+ first_commit_timestamp = merge_request.commits(limit: 1).first.try(:authored_date)
+ return unless first_commit_timestamp
+
+ metrics.update!(first_mentioned_in_commit_at: first_commit_timestamp)
+ end
end
end
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index e8879d4df66..7869509aa9c 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -29,6 +29,7 @@ module Issues
todo_service.new_issue(issuable, current_user)
user_agent_detail_service.create
resolve_discussions_with_issue(issuable)
+ delete_milestone_total_issue_counter_cache(issuable.milestone)
super
end
diff --git a/app/services/issues/import_csv_service.rb b/app/services/issues/import_csv_service.rb
index ef08fafa7cc..c01db5fcfe6 100644
--- a/app/services/issues/import_csv_service.rb
+++ b/app/services/issues/import_csv_service.rb
@@ -21,8 +21,19 @@ module Issues
def process_csv
csv_data = @csv_io.open(&:read).force_encoding(Encoding::UTF_8)
- CSV.new(csv_data, col_sep: detect_col_sep(csv_data.lines.first), headers: true).each.with_index(2) do |row, line_no|
- issue = Issues::CreateService.new(@project, @user, title: row[0], description: row[1]).execute
+ csv_parsing_params = {
+ col_sep: detect_col_sep(csv_data.lines.first),
+ headers: true,
+ header_converters: :symbol
+ }
+
+ CSV.new(csv_data, csv_parsing_params).each.with_index(2) do |row, line_no|
+ issue_attributes = {
+ title: row[:title],
+ description: row[:description]
+ }
+
+ issue = Issues::CreateService.new(@project, @user, issue_attributes).execute
if issue.persisted?
@results[:success] += 1
diff --git a/app/services/issues/reopen_service.rb b/app/services/issues/reopen_service.rb
index 56d59b235a7..0ffe33dd317 100644
--- a/app/services/issues/reopen_service.rb
+++ b/app/services/issues/reopen_service.rb
@@ -12,6 +12,7 @@ module Issues
execute_hooks(issue, 'reopen')
invalidate_cache_counts(issue, users: issue.assignees)
issue.update_project_counter_caches
+ delete_milestone_closed_issue_counter_cache(issue.milestone)
end
issue
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index 68d1657d881..78ebbd7bff2 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -115,10 +115,26 @@ module Issues
end
def handle_milestone_change(issue)
- return if skip_milestone_email
-
return unless issue.previous_changes.include?('milestone_id')
+ invalidate_milestone_issue_counters(issue)
+ send_milestone_change_notification(issue)
+ end
+
+ def invalidate_milestone_issue_counters(issue)
+ issue.previous_changes['milestone_id'].each do |milestone_id|
+ next unless milestone_id
+
+ milestone = Milestone.find_by_id(milestone_id)
+
+ delete_milestone_closed_issue_counter_cache(milestone)
+ delete_milestone_total_issue_counter_cache(milestone)
+ end
+ end
+
+ def send_milestone_change_notification(issue)
+ return if skip_milestone_email
+
if issue.milestone.nil?
notification_service.async.removed_milestone_issue(issue, current_user)
else
diff --git a/app/services/labels/transfer_service.rb b/app/services/labels/transfer_service.rb
index 91984403db3..e6f9cf35fcb 100644
--- a/app/services/labels/transfer_service.rb
+++ b/app/services/labels/transfer_service.rb
@@ -49,7 +49,7 @@ module Labels
Label.joins(:issues)
.where(
issues: { project_id: project.id },
- labels: { type: 'GroupLabel', group_id: old_group.id }
+ labels: { type: 'GroupLabel', group_id: old_group.self_and_ancestors }
)
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -59,14 +59,14 @@ module Labels
Label.joins(:merge_requests)
.where(
merge_requests: { target_project_id: project.id },
- labels: { type: 'GroupLabel', group_id: old_group.id }
+ labels: { type: 'GroupLabel', group_id: old_group.self_and_ancestors }
)
end
# rubocop: enable CodeReuse/ActiveRecord
def find_or_create_label!(label)
params = label.attributes.slice('title', 'description', 'color')
- new_label = FindOrCreateService.new(current_user, project, params).execute
+ new_label = FindOrCreateService.new(current_user, project, params.merge(include_ancestor_groups: true)).execute
new_label.id
end
diff --git a/app/services/lfs/lock_file_service.rb b/app/services/lfs/lock_file_service.rb
index 383a0d6b4e3..1b283018c16 100644
--- a/app/services/lfs/lock_file_service.rb
+++ b/app/services/lfs/lock_file_service.rb
@@ -4,13 +4,13 @@ module Lfs
class LockFileService < BaseService
def execute
unless can?(current_user, :push_code, project)
- raise Gitlab::GitAccess::UnauthorizedError, 'You have no permissions'
+ raise Gitlab::GitAccess::ForbiddenError, 'You have no permissions'
end
create_lock!
rescue ActiveRecord::RecordNotUnique
error('already locked', 409, current_lock)
- rescue Gitlab::GitAccess::UnauthorizedError => ex
+ rescue Gitlab::GitAccess::ForbiddenError => ex
error(ex.message, 403)
rescue => ex
error(ex.message, 500)
diff --git a/app/services/lfs/unlock_file_service.rb b/app/services/lfs/unlock_file_service.rb
index ea5a67b727f..a13e89904a0 100644
--- a/app/services/lfs/unlock_file_service.rb
+++ b/app/services/lfs/unlock_file_service.rb
@@ -4,11 +4,11 @@ module Lfs
class UnlockFileService < BaseService
def execute
unless can?(current_user, :push_code, project)
- raise Gitlab::GitAccess::UnauthorizedError, _('You have no permissions')
+ raise Gitlab::GitAccess::ForbiddenError, _('You have no permissions')
end
unlock_file
- rescue Gitlab::GitAccess::UnauthorizedError => ex
+ rescue Gitlab::GitAccess::ForbiddenError => ex
error(ex.message, 403)
rescue ActiveRecord::RecordNotFound
error(_('Lock not found'), 404)
diff --git a/app/services/merge_requests/after_create_service.rb b/app/services/merge_requests/after_create_service.rb
new file mode 100644
index 00000000000..6da30f8af16
--- /dev/null
+++ b/app/services/merge_requests/after_create_service.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ class AfterCreateService < MergeRequests::BaseService
+ def execute(merge_request)
+ event_service.open_mr(merge_request, current_user)
+ notification_service.new_merge_request(merge_request, current_user)
+
+ # https://gitlab.com/gitlab-org/gitlab/issues/208813
+ if ::Feature.enabled?(:create_merge_request_pipelines_in_sidekiq, project)
+ create_pipeline_for(merge_request, current_user)
+ merge_request.update_head_pipeline
+ end
+
+ merge_request.diffs(include_stats: false).write_cache
+ merge_request.create_cross_references!(current_user)
+ end
+ end
+end
diff --git a/app/services/merge_requests/create_pipeline_service.rb b/app/services/merge_requests/create_pipeline_service.rb
index 8258efba6bf..f802aa44487 100644
--- a/app/services/merge_requests/create_pipeline_service.rb
+++ b/app/services/merge_requests/create_pipeline_service.rb
@@ -9,15 +9,10 @@ module MergeRequests
end
def create_detached_merge_request_pipeline(merge_request)
- if can_use_merge_request_ref?(merge_request)
- Ci::CreatePipelineService.new(merge_request.source_project, current_user,
- ref: merge_request.ref_path)
- .execute(:merge_request_event, merge_request: merge_request)
- else
- Ci::CreatePipelineService.new(merge_request.source_project, current_user,
- ref: merge_request.source_branch)
- .execute(:merge_request_event, merge_request: merge_request)
- end
+ Ci::CreatePipelineService.new(merge_request.source_project,
+ current_user,
+ ref: pipeline_ref_for_detached_merge_request_pipeline(merge_request))
+ .execute(:merge_request_event, merge_request: merge_request)
end
def can_create_pipeline_for?(merge_request)
@@ -33,6 +28,16 @@ module MergeRequests
def allow_duplicate
params[:allow_duplicate]
end
+
+ private
+
+ def pipeline_ref_for_detached_merge_request_pipeline(merge_request)
+ if can_use_merge_request_ref?(merge_request)
+ merge_request.ref_path
+ else
+ merge_request.source_branch
+ end
+ end
end
end
diff --git a/app/services/merge_requests/create_service.rb b/app/services/merge_requests/create_service.rb
index 4a05d1fd7ef..d957a6425b2 100644
--- a/app/services/merge_requests/create_service.rb
+++ b/app/services/merge_requests/create_service.rb
@@ -13,19 +13,20 @@ module MergeRequests
create(merge_request)
end
- def before_create(merge_request)
- # current_user (defined in BaseService) is not available within run_after_commit block
- user = current_user
- merge_request.run_after_commit do
- NewMergeRequestWorker.perform_async(merge_request.id, user.id)
- end
- end
-
def after_create(issuable)
+ # Add new items to MergeRequests::AfterCreateService if they can
+ # be performed in Sidekiq
+ NewMergeRequestWorker.perform_async(issuable.id, current_user.id)
+
todo_service.new_merge_request(issuable, current_user)
issuable.cache_merge_request_closes_issues!(current_user)
- create_pipeline_for(issuable, current_user)
- issuable.update_head_pipeline
+
+ # https://gitlab.com/gitlab-org/gitlab/issues/208813
+ unless ::Feature.enabled?(:create_merge_request_pipelines_in_sidekiq, project)
+ create_pipeline_for(issuable, current_user)
+ issuable.update_head_pipeline
+ end
+
Gitlab::UsageDataCounters::MergeRequestCounter.count(:create)
link_lfs_objects(issuable)
diff --git a/app/services/merge_requests/merge_to_ref_service.rb b/app/services/merge_requests/merge_to_ref_service.rb
index 37b5805ae7e..1876b1096fe 100644
--- a/app/services/merge_requests/merge_to_ref_service.rb
+++ b/app/services/merge_requests/merge_to_ref_service.rb
@@ -60,7 +60,7 @@ module MergeRequests
def commit
repository.merge_to_ref(current_user, source, merge_request, target_ref, commit_message, first_parent_ref)
- rescue Gitlab::Git::PreReceiveError => error
+ rescue Gitlab::Git::PreReceiveError, Gitlab::Git::CommandError => error
raise MergeError, error.message
end
end
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index 1dc5503d368..1516e33a7c6 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -43,11 +43,7 @@ module MergeRequests
abort_auto_merge(merge_request, 'target branch was changed')
end
- if merge_request.assignees != old_assignees
- create_assignee_note(merge_request, old_assignees)
- notification_service.async.reassigned_merge_request(merge_request, current_user, old_assignees)
- todo_service.reassigned_issuable(merge_request, current_user, old_assignees)
- end
+ handle_assignees_change(merge_request, old_assignees) if merge_request.assignees != old_assignees
if merge_request.previous_changes.include?('target_branch') ||
merge_request.previous_changes.include?('source_branch')
@@ -120,6 +116,12 @@ module MergeRequests
end
end
+ def handle_assignees_change(merge_request, old_assignees)
+ create_assignee_note(merge_request, old_assignees)
+ notification_service.async.reassigned_merge_request(merge_request, current_user, old_assignees)
+ todo_service.reassigned_issuable(merge_request, current_user, old_assignees)
+ end
+
def create_branch_change_note(issuable, branch_type, old_branch, new_branch)
SystemNoteService.change_branch(
issuable, issuable.project, current_user, branch_type,
diff --git a/app/services/metrics/dashboard/base_embed_service.rb b/app/services/metrics/dashboard/base_embed_service.rb
index 8aef9873ac1..4c7fa454460 100644
--- a/app/services/metrics/dashboard/base_embed_service.rb
+++ b/app/services/metrics/dashboard/base_embed_service.rb
@@ -5,6 +5,10 @@
module Metrics
module Dashboard
class BaseEmbedService < ::Metrics::Dashboard::BaseService
+ def self.embedded?(embed_param)
+ ActiveModel::Type::Boolean.new.cast(embed_param)
+ end
+
def cache_key
"dynamic_metrics_dashboard_#{identifiers}"
end
diff --git a/app/services/metrics/dashboard/clone_dashboard_service.rb b/app/services/metrics/dashboard/clone_dashboard_service.rb
index 990dc462432..3b06a7713d7 100644
--- a/app/services/metrics/dashboard/clone_dashboard_service.rb
+++ b/app/services/metrics/dashboard/clone_dashboard_service.rb
@@ -22,9 +22,10 @@ module Metrics
end
end
+ # rubocop:disable Cop/BanCatchThrow
def execute
catch(:error) do
- throw(:error, error(_(%q(You can't commit to this project)), :forbidden)) unless push_authorized?
+ throw(:error, error(_(%q(You are not allowed to push into this branch. Create another branch or open a merge request.)), :forbidden)) unless push_authorized?
result = ::Files::CreateService.new(project, current_user, dashboard_attrs).execute
throw(:error, wrap_error(result)) unless result[:status] == :success
@@ -33,6 +34,7 @@ module Metrics
success(result.merge(http_status: :created, dashboard: dashboard_details))
end
end
+ # rubocop:enable Cop/BanCatchThrow
private
@@ -60,6 +62,7 @@ module Metrics
Gitlab::UserAccess.new(current_user, project: project).can_push_to_branch?(branch)
end
+ # rubocop:disable Cop/BanCatchThrow
def dashboard_template
@dashboard_template ||= begin
throw(:error, error(_('Not found.'), :not_found)) unless self.class.allowed_dashboard_templates.include?(params[:dashboard])
@@ -67,7 +70,9 @@ module Metrics
params[:dashboard]
end
end
+ # rubocop:enable Cop/BanCatchThrow
+ # rubocop:disable Cop/BanCatchThrow
def branch
@branch ||= begin
throw(:error, error(_('There was an error creating the dashboard, branch name is invalid.'), :bad_request)) unless valid_branch_name?
@@ -76,6 +81,7 @@ module Metrics
params[:branch]
end
end
+ # rubocop:enable Cop/BanCatchThrow
def new_or_default_branch?
!repository.branch_exists?(params[:branch]) || project.default_branch == params[:branch]
@@ -89,6 +95,7 @@ module Metrics
@new_dashboard_path ||= File.join(USER_DASHBOARDS_DIR, file_name)
end
+ # rubocop:disable Cop/BanCatchThrow
def file_name
@file_name ||= begin
throw(:error, error(_('The file name should have a .yml extension'), :bad_request)) unless target_file_type_valid?
@@ -96,6 +103,7 @@ module Metrics
File.basename(params[:file_name])
end
end
+ # rubocop:enable Cop/BanCatchThrow
def target_file_type_valid?
File.extname(params[:file_name]) == ALLOWED_FILE_TYPE
diff --git a/app/services/metrics/dashboard/custom_metric_embed_service.rb b/app/services/metrics/dashboard/custom_metric_embed_service.rb
index 9e616f4e379..456074ae6ad 100644
--- a/app/services/metrics/dashboard/custom_metric_embed_service.rb
+++ b/app/services/metrics/dashboard/custom_metric_embed_service.rb
@@ -18,7 +18,7 @@ module Metrics
# custom metrics from the DB.
def valid_params?(params)
[
- params[:embedded],
+ embedded?(params[:embedded]),
valid_dashboard?(params[:dashboard_path]),
valid_group_title?(params[:group]),
params[:title].present?,
diff --git a/app/services/metrics/dashboard/default_embed_service.rb b/app/services/metrics/dashboard/default_embed_service.rb
index 39f7c3943dd..30a8150d6be 100644
--- a/app/services/metrics/dashboard/default_embed_service.rb
+++ b/app/services/metrics/dashboard/default_embed_service.rb
@@ -22,7 +22,7 @@ module Metrics
class << self
def valid_params?(params)
- params[:embedded].present?
+ embedded?(params[:embedded])
end
end
diff --git a/app/services/metrics/dashboard/dynamic_embed_service.rb b/app/services/metrics/dashboard/dynamic_embed_service.rb
index db5b7c9e32a..ff540c30579 100644
--- a/app/services/metrics/dashboard/dynamic_embed_service.rb
+++ b/app/services/metrics/dashboard/dynamic_embed_service.rb
@@ -22,7 +22,7 @@ module Metrics
# for additional info on defining custom dashboards.
def valid_params?(params)
[
- params[:embedded],
+ embedded?(params[:embedded]),
params[:group].present?,
params[:title].present?,
params[:y_label]
diff --git a/app/services/metrics/dashboard/grafana_metric_embed_service.rb b/app/services/metrics/dashboard/grafana_metric_embed_service.rb
index 44b58ad9729..274057b8262 100644
--- a/app/services/metrics/dashboard/grafana_metric_embed_service.rb
+++ b/app/services/metrics/dashboard/grafana_metric_embed_service.rb
@@ -6,7 +6,7 @@
# Use Gitlab::Metrics::Dashboard::Finder to retrive dashboards.
module Metrics
module Dashboard
- class GrafanaMetricEmbedService < ::Metrics::Dashboard::BaseService
+ class GrafanaMetricEmbedService < ::Metrics::Dashboard::BaseEmbedService
include ReactiveCaching
SEQUENCE = [
@@ -24,7 +24,7 @@ module Metrics
# to uniquely identify a grafana dashboard.
def valid_params?(params)
[
- params[:embedded],
+ embedded?(params[:embedded]),
params[:grafana_url]
].all?
end
@@ -138,7 +138,9 @@ module Metrics
end
# Identifies the name of the datasource for a dashboard
- # based on the panelId query parameter found in the url
+ # based on the panelId query parameter found in the url.
+ #
+ # If no panel is specified, defaults to the first valid panel.
class DatasourceNameParser
def initialize(grafana_url, grafana_dashboard)
@grafana_url, @grafana_dashboard = grafana_url, grafana_dashboard
@@ -146,15 +148,29 @@ module Metrics
def parse
@grafana_dashboard[:dashboard][:panels]
- .find { |panel| panel[:id].to_s == query_params[:panelId] }
+ .find { |panel| panel_id ? matching_panel?(panel) : valid_panel?(panel) }
.try(:[], :datasource)
end
private
+ def panel_id
+ query_params[:panelId]
+ end
+
def query_params
Gitlab::Metrics::Dashboard::Url.parse_query(@grafana_url)
end
+
+ def matching_panel?(panel)
+ panel[:id].to_s == panel_id
+ end
+
+ def valid_panel?(panel)
+ ::Grafana::Validator
+ .new(@grafana_dashboard, nil, panel, query_params)
+ .valid?
+ end
end
end
end
diff --git a/app/services/metrics/dashboard/update_dashboard_service.rb b/app/services/metrics/dashboard/update_dashboard_service.rb
new file mode 100644
index 00000000000..65e6e195f79
--- /dev/null
+++ b/app/services/metrics/dashboard/update_dashboard_service.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+# Updates the content of a specified dashboard in .yml file inside `.gitlab/dashboards`
+module Metrics
+ module Dashboard
+ class UpdateDashboardService < ::BaseService
+ include Stepable
+
+ ALLOWED_FILE_TYPE = '.yml'
+ USER_DASHBOARDS_DIR = ::Metrics::Dashboard::ProjectDashboardService::DASHBOARD_ROOT
+
+ steps :check_push_authorized,
+ :check_branch_name,
+ :check_file_type,
+ :update_file
+
+ def execute
+ execute_steps
+ end
+
+ private
+
+ def check_push_authorized(result)
+ return error(_('You are not allowed to push into this branch. Create another branch or open a merge request.'), :forbidden) unless push_authorized?
+
+ success(result)
+ end
+
+ def check_branch_name(result)
+ return error(_('There was an error updating the dashboard, branch name is invalid.'), :bad_request) unless valid_branch_name?
+ return error(_('There was an error updating the dashboard, branch named: %{branch} already exists.') % { branch: params[:branch] }, :bad_request) unless new_or_default_branch?
+
+ success(result)
+ end
+
+ def check_file_type(result)
+ return error(_('The file name should have a .yml extension'), :bad_request) unless target_file_type_valid?
+
+ success(result)
+ end
+
+ def update_file(result)
+ file_update_response = ::Files::UpdateService.new(project, current_user, dashboard_attrs).execute
+
+ if file_update_response[:status] == :success
+ success(result.merge(file_update_response, http_status: :created, dashboard: dashboard_details))
+ else
+ error(file_update_response[:message], :bad_request)
+ end
+ end
+
+ def push_authorized?
+ Gitlab::UserAccess.new(current_user, project: project).can_push_to_branch?(branch)
+ end
+
+ def valid_branch_name?
+ Gitlab::GitRefValidator.validate(branch)
+ end
+
+ def new_or_default_branch?
+ !repository.branch_exists?(branch) || project.default_branch == branch
+ end
+
+ def target_file_type_valid?
+ File.extname(params[:file_name]) == ALLOWED_FILE_TYPE
+ end
+
+ def dashboard_attrs
+ {
+ commit_message: params[:commit_message],
+ file_path: update_dashboard_path,
+ file_content: update_dashboard_content,
+ encoding: 'text',
+ branch_name: branch,
+ start_branch: repository.branch_exists?(branch) ? branch : project.default_branch
+ }
+ end
+
+ def update_dashboard_path
+ File.join(USER_DASHBOARDS_DIR, file_name)
+ end
+
+ def file_name
+ @file_name ||= File.basename(CGI.unescape(params[:file_name]))
+ end
+
+ def branch
+ @branch ||= params[:branch]
+ end
+
+ def update_dashboard_content
+ ::PerformanceMonitoring::PrometheusDashboard.from_json(params[:file_content]).to_yaml
+ end
+
+ def repository
+ @repository ||= project.repository
+ end
+
+ def dashboard_details
+ {
+ path: update_dashboard_path,
+ display_name: ::Metrics::Dashboard::ProjectDashboardService.name_for_path(update_dashboard_path),
+ default: false,
+ system_dashboard: false
+ }
+ end
+ end
+ end
+end
diff --git a/app/services/milestones/closed_issues_count_service.rb b/app/services/milestones/closed_issues_count_service.rb
new file mode 100644
index 00000000000..80aab235e49
--- /dev/null
+++ b/app/services/milestones/closed_issues_count_service.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Milestones
+ class ClosedIssuesCountService < BaseCountService
+ def initialize(milestone)
+ @milestone = milestone
+ end
+
+ def cache_key
+ "milestone_closed_issues_count_#{@milestone.milestoneish_id}"
+ end
+
+ def relation_for_count
+ @milestone.issues.closed
+ end
+ end
+end
diff --git a/app/services/milestones/issues_count_service.rb b/app/services/milestones/issues_count_service.rb
new file mode 100644
index 00000000000..f8b80fa9aef
--- /dev/null
+++ b/app/services/milestones/issues_count_service.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Milestones
+ class IssuesCountService < BaseCountService
+ def initialize(milestone)
+ @milestone = milestone
+ end
+
+ def cache_key
+ "milestone_total_issues_count_#{@milestone.milestoneish_id}"
+ end
+
+ def relation_for_count
+ @milestone.issues
+ end
+ end
+end
diff --git a/app/services/milestones/transfer_service.rb b/app/services/milestones/transfer_service.rb
index 1efbfed4853..213c6f8f1dd 100644
--- a/app/services/milestones/transfer_service.rb
+++ b/app/services/milestones/transfer_service.rb
@@ -22,7 +22,7 @@ module Milestones
milestones_to_transfer.find_each do |milestone|
new_milestone = find_or_create_milestone(milestone)
- update_issues_milestone(milestone.id, new_milestone&.id)
+ update_issues_milestone(milestone, new_milestone)
update_merge_requests_milestone(milestone.id, new_milestone&.id)
end
end
@@ -68,9 +68,12 @@ module Milestones
end
# rubocop: disable CodeReuse/ActiveRecord
- def update_issues_milestone(old_milestone_id, new_milestone_id)
- Issue.where(project: project, milestone_id: old_milestone_id)
- .update_all(milestone_id: new_milestone_id)
+ def update_issues_milestone(old_milestone, new_milestone)
+ Issue.where(project: project, milestone_id: old_milestone.id)
+ .update_all(milestone_id: new_milestone&.id)
+
+ delete_milestone_issues_caches(old_milestone)
+ delete_milestone_issues_caches(new_milestone)
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -80,5 +83,12 @@ module Milestones
.update_all(milestone_id: new_milestone_id)
end
# rubocop: enable CodeReuse/ActiveRecord
+
+ def delete_milestone_issues_caches(milestone)
+ return unless milestone
+
+ Milestones::IssuesCountService.new(milestone).delete_cache
+ Milestones::ClosedIssuesCountService.new(milestone).delete_cache
+ end
end
end
diff --git a/app/services/notification_recipient_service.rb b/app/services/notification_recipient_service.rb
deleted file mode 100644
index 0bdf6a0e6bc..00000000000
--- a/app/services/notification_recipient_service.rb
+++ /dev/null
@@ -1,429 +0,0 @@
-# frozen_string_literal: true
-
-#
-# Used by NotificationService to determine who should receive notification
-#
-module NotificationRecipientService
- def self.notifiable_users(users, *args)
- users.compact.map { |u| NotificationRecipient.new(u, *args) }.select(&:notifiable?).map(&:user)
- end
-
- def self.notifiable?(user, *args)
- NotificationRecipient.new(user, *args).notifiable?
- end
-
- def self.build_recipients(*args)
- Builder::Default.new(*args).notification_recipients
- end
-
- def self.build_new_note_recipients(*args)
- Builder::NewNote.new(*args).notification_recipients
- end
-
- def self.build_merge_request_unmergeable_recipients(*args)
- Builder::MergeRequestUnmergeable.new(*args).notification_recipients
- end
-
- def self.build_project_maintainers_recipients(*args)
- Builder::ProjectMaintainers.new(*args).notification_recipients
- end
-
- def self.build_new_release_recipients(*args)
- Builder::NewRelease.new(*args).notification_recipients
- end
-
- module Builder
- class Base
- def initialize(*)
- raise 'abstract'
- end
-
- def build!
- raise 'abstract'
- end
-
- def filter!
- recipients.select!(&:notifiable?)
- end
-
- def acting_user
- current_user
- end
-
- def target
- raise 'abstract'
- end
-
- def project
- target.project
- end
-
- def group
- project&.group || target.try(:group)
- end
-
- def recipients
- @recipients ||= []
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def add_recipients(users, type, reason)
- if users.is_a?(ActiveRecord::Relation)
- users = users.includes(:notification_settings)
- end
-
- users = Array(users).compact
- recipients.concat(users.map { |u| make_recipient(u, type, reason) })
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def user_scope
- User.includes(:notification_settings)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def make_recipient(user, type, reason)
- NotificationRecipient.new(
- user, type,
- reason: reason,
- project: project,
- group: group,
- custom_action: custom_action,
- target: target,
- acting_user: acting_user
- )
- end
-
- def notification_recipients
- @notification_recipients ||=
- begin
- build!
- filter!
- recipients = self.recipients.sort_by { |r| NotificationReason.priority(r.reason) }.uniq(&:user)
- recipients.freeze
- end
- end
-
- def custom_action
- nil
- end
-
- protected
-
- def add_participants(user)
- return unless target.respond_to?(:participants)
-
- add_recipients(target.participants(user), :participating, nil)
- end
-
- def add_mentions(user, target:)
- return unless target.respond_to?(:mentioned_users)
-
- add_recipients(target.mentioned_users(user), :mention, NotificationReason::MENTIONED)
- end
-
- # Get project/group users with CUSTOM notification level
- # rubocop: disable CodeReuse/ActiveRecord
- def add_custom_notifications
- user_ids = []
-
- # Users with a notification setting on group or project
- user_ids += user_ids_notifiable_on(project, :custom)
- user_ids += user_ids_notifiable_on(group, :custom)
-
- # Users with global level custom
- user_ids_with_project_level_global = user_ids_notifiable_on(project, :global)
- user_ids_with_group_level_global = user_ids_notifiable_on(group, :global)
-
- global_users_ids = user_ids_with_project_level_global.concat(user_ids_with_group_level_global)
- user_ids += user_ids_with_global_level_custom(global_users_ids, custom_action)
-
- add_recipients(user_scope.where(id: user_ids), :custom, nil)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def add_project_watchers
- add_recipients(project_watchers, :watch, nil) if project
- end
-
- def add_group_watchers
- add_recipients(group_watchers, :watch, nil)
- end
-
- # Get project users with WATCH notification level
- # rubocop: disable CodeReuse/ActiveRecord
- def project_watchers
- project_members_ids = user_ids_notifiable_on(project)
-
- user_ids_with_project_global = user_ids_notifiable_on(project, :global)
- user_ids_with_group_global = user_ids_notifiable_on(project.group, :global)
-
- user_ids = user_ids_with_global_level_watch((user_ids_with_project_global + user_ids_with_group_global).uniq)
-
- user_ids_with_project_setting = select_project_members_ids(user_ids_with_project_global, user_ids)
- user_ids_with_group_setting = select_group_members_ids(project.group, project_members_ids, user_ids_with_group_global, user_ids)
-
- user_scope.where(id: user_ids_with_project_setting.concat(user_ids_with_group_setting).uniq)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def group_watchers
- user_ids_with_group_global = user_ids_notifiable_on(group, :global)
- user_ids = user_ids_with_global_level_watch(user_ids_with_group_global)
- user_ids_with_group_setting = select_group_members_ids(group, [], user_ids_with_group_global, user_ids)
-
- user_scope.where(id: user_ids_with_group_setting)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def add_subscribed_users
- return unless target.respond_to? :subscribers
-
- add_recipients(target.subscribers(project), :subscription, NotificationReason::SUBSCRIBED)
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def user_ids_notifiable_on(resource, notification_level = nil)
- return [] unless resource
-
- scope = resource.notification_settings
-
- if notification_level
- scope = scope.where(level: NotificationSetting.levels[notification_level])
- end
-
- scope.pluck(:user_id)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # Build a list of user_ids based on project notification settings
- def select_project_members_ids(global_setting, user_ids_global_level_watch)
- user_ids = user_ids_notifiable_on(project, :watch)
-
- # If project setting is global, add to watch list if global setting is watch
- user_ids + (global_setting & user_ids_global_level_watch)
- end
-
- # Build a list of user_ids based on group notification settings
- def select_group_members_ids(group, project_members, global_setting, user_ids_global_level_watch)
- uids = user_ids_notifiable_on(group, :watch)
-
- # Group setting is global, add to user_ids list if global setting is watch
- uids + (global_setting & user_ids_global_level_watch) - project_members
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def user_ids_with_global_level_watch(ids)
- settings_with_global_level_of(:watch, ids).pluck(:user_id)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def user_ids_with_global_level_custom(ids, action)
- settings_with_global_level_of(:custom, ids).pluck(:user_id)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def settings_with_global_level_of(level, ids)
- NotificationSetting.where(
- user_id: ids,
- source_type: nil,
- level: NotificationSetting.levels[level]
- )
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def add_labels_subscribers(labels: nil)
- return unless target.respond_to? :labels
-
- (labels || target.labels).each do |label|
- add_recipients(label.subscribers(project), :subscription, NotificationReason::SUBSCRIBED)
- end
- end
- end
-
- class Default < Base
- MENTION_TYPE_ACTIONS = [:new_issue, :new_merge_request].freeze
-
- attr_reader :target
- attr_reader :current_user
- attr_reader :action
- attr_reader :previous_assignees
- attr_reader :skip_current_user
-
- def initialize(target, current_user, action:, custom_action: nil, previous_assignees: nil, skip_current_user: true)
- @target = target
- @current_user = current_user
- @action = action
- @custom_action = custom_action
- @previous_assignees = previous_assignees
- @skip_current_user = skip_current_user
- end
-
- def add_watchers
- add_project_watchers
- end
-
- def build!
- add_participants(current_user)
- add_watchers
- add_custom_notifications
-
- # Re-assign is considered as a mention of the new assignee
- case custom_action
- when :reassign_merge_request, :reassign_issue
- add_recipients(previous_assignees, :mention, nil)
- add_recipients(target.assignees, :mention, NotificationReason::ASSIGNED)
- end
-
- add_subscribed_users
-
- if self.class.mention_type_actions.include?(custom_action)
- # These will all be participants as well, but adding with the :mention
- # type ensures that users with the mention notification level will
- # receive them, too.
- add_mentions(current_user, target: target)
-
- # We use the `:participating` notification level in order to match existing legacy behavior as captured
- # in existing specs (notification_service_spec.rb ~ line 507)
- if target.is_a?(Issuable)
- add_recipients(target.assignees, :participating, NotificationReason::ASSIGNED)
- end
-
- add_labels_subscribers
- end
- end
-
- def acting_user
- current_user if skip_current_user
- end
-
- # Build event key to search on custom notification level
- # Check NotificationSetting.email_events
- def custom_action
- @custom_action ||= "#{action}_#{target.class.model_name.name.underscore}".to_sym
- end
-
- def self.mention_type_actions
- MENTION_TYPE_ACTIONS.dup
- end
- end
-
- class NewNote < Base
- attr_reader :note
- def initialize(note)
- @note = note
- end
-
- def target
- note.noteable
- end
-
- # NOTE: may be nil, in the case of a PersonalSnippet
- #
- # (this is okay because NotificationRecipient is written
- # to handle nil projects)
- def project
- note.project
- end
-
- def group
- if note.for_project_noteable?
- project.group
- else
- target.try(:group)
- end
- end
-
- def build!
- # Add all users participating in the thread (author, assignee, comment authors)
- add_participants(note.author)
- add_mentions(note.author, target: note)
-
- if note.for_project_noteable?
- # Merge project watchers
- add_project_watchers
- else
- add_group_watchers
- end
-
- add_custom_notifications
- add_subscribed_users
- end
-
- def custom_action
- :new_note
- end
-
- def acting_user
- note.author
- end
- end
-
- class NewRelease < Base
- attr_reader :target
-
- def initialize(target)
- @target = target
- end
-
- def build!
- add_recipients(target.project.authorized_users, :custom, nil)
- end
-
- def custom_action
- :new_release
- end
-
- def acting_user
- target.author
- end
- end
-
- class MergeRequestUnmergeable < Base
- attr_reader :target
- def initialize(merge_request)
- @target = merge_request
- end
-
- def build!
- target.merge_participants.each do |user|
- add_recipients(user, :participating, nil)
- end
- end
-
- def custom_action
- :unmergeable_merge_request
- end
-
- def acting_user
- nil
- end
- end
-
- class ProjectMaintainers < Base
- attr_reader :target
-
- def initialize(target, action:)
- @target = target
- @action = action
- end
-
- def build!
- return [] unless project
-
- add_recipients(project.team.maintainers, :mention, nil)
- end
-
- def acting_user
- nil
- end
- end
- end
-end
-
-NotificationRecipientService::Builder::Default.prepend_if_ee('EE::NotificationRecipientBuilders::Default') # rubocop: disable Cop/InjectEnterpriseEditionModule
-NotificationRecipientService.prepend_if_ee('EE::NotificationRecipientService')
diff --git a/app/services/notification_recipients/build_service.rb b/app/services/notification_recipients/build_service.rb
new file mode 100644
index 00000000000..df807f11e1b
--- /dev/null
+++ b/app/services/notification_recipients/build_service.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+#
+# Used by NotificationService to determine who should receive notification
+#
+module NotificationRecipients
+ module BuildService
+ def self.notifiable_users(users, *args)
+ users.compact.map { |u| NotificationRecipient.new(u, *args) }.select(&:notifiable?).map(&:user)
+ end
+
+ def self.notifiable?(user, *args)
+ NotificationRecipient.new(user, *args).notifiable?
+ end
+
+ def self.build_recipients(*args)
+ ::NotificationRecipients::Builder::Default.new(*args).notification_recipients
+ end
+
+ def self.build_new_note_recipients(*args)
+ ::NotificationRecipients::Builder::NewNote.new(*args).notification_recipients
+ end
+
+ def self.build_merge_request_unmergeable_recipients(*args)
+ ::NotificationRecipients::Builder::MergeRequestUnmergeable.new(*args).notification_recipients
+ end
+
+ def self.build_project_maintainers_recipients(*args)
+ ::NotificationRecipients::Builder::ProjectMaintainers.new(*args).notification_recipients
+ end
+
+ def self.build_new_release_recipients(*args)
+ ::NotificationRecipients::Builder::NewRelease.new(*args).notification_recipients
+ end
+ end
+end
+
+NotificationRecipients::BuildService.prepend_if_ee('EE::NotificationRecipients::BuildService')
diff --git a/app/services/notification_recipients/builder/base.rb b/app/services/notification_recipients/builder/base.rb
new file mode 100644
index 00000000000..3aa00c09ba2
--- /dev/null
+++ b/app/services/notification_recipients/builder/base.rb
@@ -0,0 +1,217 @@
+# frozen_string_literal: true
+
+module NotificationRecipients
+ module Builder
+ class Base
+ def initialize(*)
+ raise 'abstract'
+ end
+
+ def build!
+ raise 'abstract'
+ end
+
+ def filter!
+ recipients.select!(&:notifiable?)
+ end
+
+ def acting_user
+ current_user
+ end
+
+ def target
+ raise 'abstract'
+ end
+
+ def project
+ target.project
+ end
+
+ def group
+ project&.group || target.try(:group)
+ end
+
+ def recipients
+ @recipients ||= []
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def add_recipients(users, type, reason)
+ if users.is_a?(ActiveRecord::Relation)
+ users = users.includes(:notification_settings)
+ end
+
+ users = Array(users).compact
+ recipients.concat(users.map { |u| make_recipient(u, type, reason) })
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def user_scope
+ User.includes(:notification_settings)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def make_recipient(user, type, reason)
+ NotificationRecipient.new(
+ user, type,
+ reason: reason,
+ project: project,
+ group: group,
+ custom_action: custom_action,
+ target: target,
+ acting_user: acting_user
+ )
+ end
+
+ def notification_recipients
+ @notification_recipients ||=
+ begin
+ build!
+ filter!
+ recipients = self.recipients.sort_by { |r| NotificationReason.priority(r.reason) }.uniq(&:user)
+ recipients.freeze
+ end
+ end
+
+ def custom_action
+ nil
+ end
+
+ protected
+
+ def add_participants(user)
+ return unless target.respond_to?(:participants)
+
+ add_recipients(target.participants(user), :participating, nil)
+ end
+
+ def add_mentions(user, target:)
+ return unless target.respond_to?(:mentioned_users)
+
+ add_recipients(target.mentioned_users(user), :mention, NotificationReason::MENTIONED)
+ end
+
+ # Get project/group users with CUSTOM notification level
+ # rubocop: disable CodeReuse/ActiveRecord
+ def add_custom_notifications
+ user_ids = []
+
+ # Users with a notification setting on group or project
+ user_ids += user_ids_notifiable_on(project, :custom)
+ user_ids += user_ids_notifiable_on(group, :custom)
+
+ # Users with global level custom
+ user_ids_with_project_level_global = user_ids_notifiable_on(project, :global)
+ user_ids_with_group_level_global = user_ids_notifiable_on(group, :global)
+
+ global_users_ids = user_ids_with_project_level_global.concat(user_ids_with_group_level_global)
+ user_ids += user_ids_with_global_level_custom(global_users_ids, custom_action)
+
+ add_recipients(user_scope.where(id: user_ids), :custom, nil)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def add_project_watchers
+ add_recipients(project_watchers, :watch, nil) if project
+ end
+
+ def add_group_watchers
+ add_recipients(group_watchers, :watch, nil)
+ end
+
+ # Get project users with WATCH notification level
+ # rubocop: disable CodeReuse/ActiveRecord
+ def project_watchers
+ project_members_ids = user_ids_notifiable_on(project)
+
+ user_ids_with_project_global = user_ids_notifiable_on(project, :global)
+ user_ids_with_group_global = user_ids_notifiable_on(project.group, :global)
+
+ user_ids = user_ids_with_global_level_watch((user_ids_with_project_global + user_ids_with_group_global).uniq)
+
+ user_ids_with_project_setting = select_project_members_ids(user_ids_with_project_global, user_ids)
+ user_ids_with_group_setting = select_group_members_ids(project.group, project_members_ids, user_ids_with_group_global, user_ids)
+
+ user_scope.where(id: user_ids_with_project_setting.concat(user_ids_with_group_setting).uniq)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def group_watchers
+ user_ids_with_group_global = user_ids_notifiable_on(group, :global)
+ user_ids = user_ids_with_global_level_watch(user_ids_with_group_global)
+ user_ids_with_group_setting = select_group_members_ids(group, [], user_ids_with_group_global, user_ids)
+
+ user_scope.where(id: user_ids_with_group_setting)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def add_subscribed_users
+ return unless target.respond_to? :subscribers
+
+ add_recipients(target.subscribers(project), :subscription, NotificationReason::SUBSCRIBED)
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def user_ids_notifiable_on(resource, notification_level = nil)
+ return [] unless resource
+
+ scope = resource.notification_settings
+
+ if notification_level
+ scope = scope.where(level: NotificationSetting.levels[notification_level])
+ end
+
+ scope.pluck(:user_id)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Build a list of user_ids based on project notification settings
+ def select_project_members_ids(global_setting, user_ids_global_level_watch)
+ user_ids = user_ids_notifiable_on(project, :watch)
+
+ # If project setting is global, add to watch list if global setting is watch
+ user_ids + (global_setting & user_ids_global_level_watch)
+ end
+
+ # Build a list of user_ids based on group notification settings
+ def select_group_members_ids(group, project_members, global_setting, user_ids_global_level_watch)
+ uids = user_ids_notifiable_on(group, :watch)
+
+ # Group setting is global, add to user_ids list if global setting is watch
+ uids + (global_setting & user_ids_global_level_watch) - project_members
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def user_ids_with_global_level_watch(ids)
+ settings_with_global_level_of(:watch, ids).pluck(:user_id)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def user_ids_with_global_level_custom(ids, action)
+ settings_with_global_level_of(:custom, ids).pluck(:user_id)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def settings_with_global_level_of(level, ids)
+ NotificationSetting.where(
+ user_id: ids,
+ source_type: nil,
+ level: NotificationSetting.levels[level]
+ )
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def add_labels_subscribers(labels: nil)
+ return unless target.respond_to? :labels
+
+ (labels || target.labels).each do |label|
+ add_recipients(label.subscribers(project), :subscription, NotificationReason::SUBSCRIBED)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/notification_recipients/builder/default.rb b/app/services/notification_recipients/builder/default.rb
new file mode 100644
index 00000000000..790ce57452c
--- /dev/null
+++ b/app/services/notification_recipients/builder/default.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+module NotificationRecipients
+ module Builder
+ class Default < Base
+ MENTION_TYPE_ACTIONS = [:new_issue, :new_merge_request].freeze
+
+ attr_reader :target
+ attr_reader :current_user
+ attr_reader :action
+ attr_reader :previous_assignees
+ attr_reader :skip_current_user
+
+ def initialize(target, current_user, action:, custom_action: nil, previous_assignees: nil, skip_current_user: true)
+ @target = target
+ @current_user = current_user
+ @action = action
+ @custom_action = custom_action
+ @previous_assignees = previous_assignees
+ @skip_current_user = skip_current_user
+ end
+
+ def add_watchers
+ add_project_watchers
+ end
+
+ def build!
+ add_participants(current_user)
+ add_watchers
+ add_custom_notifications
+
+ # Re-assign is considered as a mention of the new assignee
+ case custom_action
+ when :reassign_merge_request, :reassign_issue
+ add_recipients(previous_assignees, :mention, nil)
+ add_recipients(target.assignees, :mention, NotificationReason::ASSIGNED)
+ end
+
+ add_subscribed_users
+
+ if self.class.mention_type_actions.include?(custom_action)
+ # These will all be participants as well, but adding with the :mention
+ # type ensures that users with the mention notification level will
+ # receive them, too.
+ add_mentions(current_user, target: target)
+
+ # We use the `:participating` notification level in order to match existing legacy behavior as captured
+ # in existing specs (notification_service_spec.rb ~ line 507)
+ if target.is_a?(Issuable)
+ add_recipients(target.assignees, :participating, NotificationReason::ASSIGNED)
+ end
+
+ add_labels_subscribers
+ end
+ end
+
+ def acting_user
+ current_user if skip_current_user
+ end
+
+ # Build event key to search on custom notification level
+ # Check NotificationSetting.email_events
+ def custom_action
+ @custom_action ||= "#{action}_#{target.class.model_name.name.underscore}".to_sym
+ end
+
+ def self.mention_type_actions
+ MENTION_TYPE_ACTIONS.dup
+ end
+ end
+ end
+end
+
+NotificationRecipients::Builder::Default.prepend_if_ee('EE::NotificationRecipients::Builder::Default')
diff --git a/app/services/notification_recipients/builder/merge_request_unmergeable.rb b/app/services/notification_recipients/builder/merge_request_unmergeable.rb
new file mode 100644
index 00000000000..24d96b98002
--- /dev/null
+++ b/app/services/notification_recipients/builder/merge_request_unmergeable.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module NotificationRecipients
+ module Builder
+ class MergeRequestUnmergeable < Base
+ attr_reader :target
+ def initialize(merge_request)
+ @target = merge_request
+ end
+
+ def build!
+ target.merge_participants.each do |user|
+ add_recipients(user, :participating, nil)
+ end
+ end
+
+ def custom_action
+ :unmergeable_merge_request
+ end
+
+ def acting_user
+ nil
+ end
+ end
+ end
+end
diff --git a/app/services/notification_recipients/builder/new_note.rb b/app/services/notification_recipients/builder/new_note.rb
new file mode 100644
index 00000000000..27699a0d9cc
--- /dev/null
+++ b/app/services/notification_recipients/builder/new_note.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+module NotificationRecipients
+ module Builder
+ class NewNote < Base
+ attr_reader :note
+ def initialize(note)
+ @note = note
+ end
+
+ def target
+ note.noteable
+ end
+
+ # NOTE: may be nil, in the case of a PersonalSnippet
+ #
+ # (this is okay because NotificationRecipient is written
+ # to handle nil projects)
+ def project
+ note.project
+ end
+
+ def group
+ if note.for_project_noteable?
+ project.group
+ else
+ target.try(:group)
+ end
+ end
+
+ def build!
+ # Add all users participating in the thread (author, assignee, comment authors)
+ add_participants(note.author)
+ add_mentions(note.author, target: note)
+
+ if note.for_project_noteable?
+ # Merge project watchers
+ add_project_watchers
+ else
+ add_group_watchers
+ end
+
+ add_custom_notifications
+ add_subscribed_users
+ end
+
+ def custom_action
+ :new_note
+ end
+
+ def acting_user
+ note.author
+ end
+ end
+ end
+end
diff --git a/app/services/notification_recipients/builder/new_release.rb b/app/services/notification_recipients/builder/new_release.rb
new file mode 100644
index 00000000000..67676b6eec8
--- /dev/null
+++ b/app/services/notification_recipients/builder/new_release.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module NotificationRecipients
+ module Builder
+ class NewRelease < Base
+ attr_reader :target
+
+ def initialize(target)
+ @target = target
+ end
+
+ def build!
+ add_recipients(target.project.authorized_users, :custom, nil)
+ end
+
+ def custom_action
+ :new_release
+ end
+
+ def acting_user
+ target.author
+ end
+ end
+ end
+end
diff --git a/app/services/notification_recipients/builder/project_maintainers.rb b/app/services/notification_recipients/builder/project_maintainers.rb
new file mode 100644
index 00000000000..e8f22c00a83
--- /dev/null
+++ b/app/services/notification_recipients/builder/project_maintainers.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module NotificationRecipients
+ module Builder
+ class ProjectMaintainers < Base
+ attr_reader :target
+
+ def initialize(target, action:)
+ @target = target
+ @action = action
+ end
+
+ def build!
+ return [] unless project
+
+ add_recipients(project.team.maintainers, :mention, nil)
+ end
+
+ def acting_user
+ nil
+ end
+ end
+ end
+end
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index a75eaa99c23..6b92e5a5625 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -108,7 +108,7 @@ class NotificationService
# * users with custom level checked with "reassign issue"
#
def reassigned_issue(issue, current_user, previous_assignees = [])
- recipients = NotificationRecipientService.build_recipients(
+ recipients = NotificationRecipients::BuildService.build_recipients(
issue,
current_user,
action: "reassign",
@@ -161,7 +161,7 @@ class NotificationService
def push_to_merge_request(merge_request, current_user, new_commits: [], existing_commits: [])
new_commits = new_commits.map { |c| { short_id: c.short_id, title: c.title } }
existing_commits = existing_commits.map { |c| { short_id: c.short_id, title: c.title } }
- recipients = NotificationRecipientService.build_recipients(merge_request, current_user, action: "push_to")
+ recipients = NotificationRecipients::BuildService.build_recipients(merge_request, current_user, action: "push_to")
recipients.each do |recipient|
mailer.send(:push_to_merge_request_email, recipient.user.id, merge_request.id, current_user.id, recipient.reason, new_commits: new_commits, existing_commits: existing_commits).deliver_later
@@ -197,7 +197,7 @@ class NotificationService
# * users with custom level checked with "reassign merge request"
#
def reassigned_merge_request(merge_request, current_user, previous_assignees = [])
- recipients = NotificationRecipientService.build_recipients(
+ recipients = NotificationRecipients::BuildService.build_recipients(
merge_request,
current_user,
action: "reassign",
@@ -260,7 +260,7 @@ class NotificationService
end
def resolve_all_discussions(merge_request, current_user)
- recipients = NotificationRecipientService.build_recipients(
+ recipients = NotificationRecipients::BuildService.build_recipients(
merge_request,
current_user,
action: "resolve_all_discussions")
@@ -283,7 +283,7 @@ class NotificationService
return true unless note.noteable_type.present?
# ignore gitlab service messages
- return true if note.cross_reference? && note.system?
+ return true if note.system_note_with_references?
send_new_note_notifications(note)
end
@@ -291,7 +291,7 @@ class NotificationService
def send_new_note_notifications(note)
notify_method = "note_#{note.noteable_ability_name}_email".to_sym
- recipients = NotificationRecipientService.build_new_note_recipients(note)
+ recipients = NotificationRecipients::BuildService.build_new_note_recipients(note)
recipients.each do |recipient|
mailer.send(notify_method, recipient.user.id, note.id, recipient.reason).deliver_later
end
@@ -299,7 +299,7 @@ class NotificationService
# Notify users when a new release is created
def send_new_release_notifications(release)
- recipients = NotificationRecipientService.build_new_release_recipients(release)
+ recipients = NotificationRecipients::BuildService.build_new_release_recipients(release)
recipients.each do |recipient|
mailer.new_release_email(recipient.user.id, release, recipient.reason).deliver_later
@@ -413,7 +413,7 @@ class NotificationService
end
def issue_moved(issue, new_issue, current_user)
- recipients = NotificationRecipientService.build_recipients(issue, current_user, action: 'moved')
+ recipients = NotificationRecipients::BuildService.build_recipients(issue, current_user, action: 'moved')
recipients.map do |recipient|
email = mailer.issue_moved_email(recipient.user, issue, new_issue, current_user, recipient.reason)
@@ -434,18 +434,19 @@ class NotificationService
mailer.project_was_not_exported_email(current_user, project, errors).deliver_later
end
- def pipeline_finished(pipeline, recipients = nil)
+ def pipeline_finished(pipeline, ref_status: nil, recipients: nil)
# Must always check project configuration since recipients could be a list of emails
# from the PipelinesEmailService integration.
return if pipeline.project.emails_disabled?
- email_template = "pipeline_#{pipeline.status}_email"
+ ref_status ||= pipeline.status
+ email_template = "pipeline_#{ref_status}_email"
return unless mailer.respond_to?(email_template)
recipients ||= notifiable_users(
[pipeline.user], :watch,
- custom_action: :"#{pipeline.status}_pipeline",
+ custom_action: :"#{ref_status}_pipeline",
target: pipeline
).map do |user|
user.notification_email_for(pipeline.project.group)
@@ -489,7 +490,7 @@ class NotificationService
end
def issue_due(issue)
- recipients = NotificationRecipientService.build_recipients(
+ recipients = NotificationRecipients::BuildService.build_recipients(
issue,
issue.author,
action: 'due',
@@ -525,7 +526,7 @@ class NotificationService
protected
def new_resource_email(target, method)
- recipients = NotificationRecipientService.build_recipients(target, target.author, action: "new")
+ recipients = NotificationRecipients::BuildService.build_recipients(target, target.author, action: "new")
recipients.each do |recipient|
mailer.send(method, recipient.user.id, target.id, recipient.reason).deliver_later
@@ -533,7 +534,7 @@ class NotificationService
end
def new_mentions_in_resource_email(target, new_mentioned_users, current_user, method)
- recipients = NotificationRecipientService.build_recipients(target, current_user, action: "new")
+ recipients = NotificationRecipients::BuildService.build_recipients(target, current_user, action: "new")
recipients = recipients.select {|r| new_mentioned_users.include?(r.user) }
recipients.each do |recipient|
@@ -544,7 +545,7 @@ class NotificationService
def close_resource_email(target, current_user, method, skip_current_user: true, closed_via: nil)
action = method == :merged_merge_request_email ? "merge" : "close"
- recipients = NotificationRecipientService.build_recipients(
+ recipients = NotificationRecipients::BuildService.build_recipients(
target,
current_user,
action: action,
@@ -572,7 +573,7 @@ class NotificationService
end
def removed_milestone_resource_email(target, current_user, method)
- recipients = NotificationRecipientService.build_recipients(
+ recipients = NotificationRecipients::BuildService.build_recipients(
target,
current_user,
action: 'removed_milestone'
@@ -584,7 +585,7 @@ class NotificationService
end
def changed_milestone_resource_email(target, milestone, current_user, method)
- recipients = NotificationRecipientService.build_recipients(
+ recipients = NotificationRecipients::BuildService.build_recipients(
target,
current_user,
action: 'changed_milestone'
@@ -596,7 +597,7 @@ class NotificationService
end
def reopen_resource_email(target, current_user, method, status)
- recipients = NotificationRecipientService.build_recipients(target, current_user, action: "reopen")
+ recipients = NotificationRecipients::BuildService.build_recipients(target, current_user, action: "reopen")
recipients.each do |recipient|
mailer.send(method, recipient.user.id, target.id, status, current_user.id, recipient.reason).deliver_later
@@ -604,7 +605,7 @@ class NotificationService
end
def merge_request_unmergeable_email(merge_request)
- recipients = NotificationRecipientService.build_merge_request_unmergeable_recipients(merge_request)
+ recipients = NotificationRecipients::BuildService.build_merge_request_unmergeable_recipients(merge_request)
recipients.each do |recipient|
mailer.merge_request_unmergeable_email(recipient.user.id, merge_request.id).deliver_later
@@ -618,15 +619,15 @@ class NotificationService
private
def project_maintainers_recipients(target, action:)
- NotificationRecipientService.build_project_maintainers_recipients(target, action: action)
+ NotificationRecipients::BuildService.build_project_maintainers_recipients(target, action: action)
end
def notifiable?(*args)
- NotificationRecipientService.notifiable?(*args)
+ NotificationRecipients::BuildService.notifiable?(*args)
end
def notifiable_users(*args)
- NotificationRecipientService.notifiable_users(*args)
+ NotificationRecipients::BuildService.notifiable_users(*args)
end
def deliver_access_request_email(recipient, member)
diff --git a/app/services/pod_logs/base_service.rb b/app/services/pod_logs/base_service.rb
new file mode 100644
index 00000000000..bfd6799d711
--- /dev/null
+++ b/app/services/pod_logs/base_service.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+module PodLogs
+ class BaseService < ::BaseService
+ include ReactiveCaching
+ include Stepable
+
+ attr_reader :cluster, :namespace, :params
+
+ CACHE_KEY_GET_POD_LOG = 'get_pod_log'
+ K8S_NAME_MAX_LENGTH = 253
+
+ def id
+ cluster.id
+ end
+
+ def initialize(cluster, namespace, params: {})
+ @cluster = cluster
+ @namespace = namespace
+ @params = filter_params(params.dup.stringify_keys).to_hash
+ end
+
+ def execute
+ with_reactive_cache(
+ CACHE_KEY_GET_POD_LOG,
+ namespace,
+ params
+ ) do |result|
+ result
+ end
+ end
+
+ def calculate_reactive_cache(request, _namespace, _params)
+ case request
+ when CACHE_KEY_GET_POD_LOG
+ execute_steps
+ else
+ exception = StandardError.new('Unknown reactive cache request')
+ Gitlab::ErrorTracking.track_and_raise_for_dev_exception(exception, request: request)
+ error(_('Unknown cache key'))
+ end
+ end
+
+ private
+
+ def valid_params
+ %w(pod_name container_name)
+ end
+
+ def success_return_keys
+ %i(status logs pod_name container_name pods)
+ end
+
+ def check_arguments(result)
+ return error(_('Cluster does not exist')) if cluster.nil?
+ return error(_('Namespace is empty')) if namespace.blank?
+
+ success(result)
+ end
+
+ def check_param_lengths(_result)
+ pod_name = params['pod_name'].presence
+ container_name = params['container_name'].presence
+
+ if pod_name&.length.to_i > K8S_NAME_MAX_LENGTH
+ return error(_('pod_name cannot be larger than %{max_length}'\
+ ' chars' % { max_length: K8S_NAME_MAX_LENGTH }))
+ elsif container_name&.length.to_i > K8S_NAME_MAX_LENGTH
+ return error(_('container_name cannot be larger than'\
+ ' %{max_length} chars' % { max_length: K8S_NAME_MAX_LENGTH }))
+ end
+
+ success(pod_name: pod_name, container_name: container_name)
+ end
+
+ def get_raw_pods(result)
+ result[:raw_pods] = cluster.kubeclient.get_pods(namespace: namespace)
+
+ success(result)
+ end
+
+ def get_pod_names(result)
+ result[:pods] = result[:raw_pods].map(&:metadata).map(&:name)
+
+ success(result)
+ end
+
+ def check_pod_name(result)
+ # If pod_name is not received as parameter, get the pod logs of the first
+ # pod of this namespace.
+ result[:pod_name] ||= result[:pods].first
+
+ unless result[:pod_name]
+ return error(_('No pods available'))
+ end
+
+ unless result[:pods].include?(result[:pod_name])
+ return error(_('Pod does not exist'))
+ end
+
+ success(result)
+ end
+
+ def check_container_name(result)
+ pod_details = result[:raw_pods].find { |p| p.metadata.name == result[:pod_name] }
+ containers = pod_details.spec.containers.map(&:name)
+
+ # select first container if not specified
+ result[:container_name] ||= containers.first
+
+ unless result[:container_name]
+ return error(_('No containers available'))
+ end
+
+ unless containers.include?(result[:container_name])
+ return error(_('Container does not exist'))
+ end
+
+ success(result)
+ end
+
+ def pod_logs(result)
+ raise NotImplementedError
+ end
+
+ def filter_return_keys(result)
+ result.slice(*success_return_keys)
+ end
+
+ def filter_params(params)
+ params.slice(*valid_params)
+ end
+ end
+end
diff --git a/app/services/pod_logs/elasticsearch_service.rb b/app/services/pod_logs/elasticsearch_service.rb
new file mode 100644
index 00000000000..3bb6e2bd846
--- /dev/null
+++ b/app/services/pod_logs/elasticsearch_service.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+module PodLogs
+ class ElasticsearchService < PodLogs::BaseService
+ steps :check_arguments,
+ :check_param_lengths,
+ :get_raw_pods,
+ :get_pod_names,
+ :check_pod_name,
+ :check_container_name,
+ :check_times,
+ :check_search,
+ :check_cursor,
+ :pod_logs,
+ :filter_return_keys
+
+ self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) }
+
+ private
+
+ def valid_params
+ super + %w(search start end cursor)
+ end
+
+ def success_return_keys
+ super + %i(cursor)
+ end
+
+ def check_times(result)
+ result[:start] = params['start'] if params.key?('start') && Time.iso8601(params['start'])
+ result[:end] = params['end'] if params.key?('end') && Time.iso8601(params['end'])
+
+ success(result)
+ rescue ArgumentError
+ error(_('Invalid start or end time format'))
+ end
+
+ def check_search(result)
+ result[:search] = params['search'] if params.key?('search')
+
+ success(result)
+ end
+
+ def check_cursor(result)
+ result[:cursor] = params['cursor'] if params.key?('cursor')
+
+ success(result)
+ end
+
+ def pod_logs(result)
+ client = cluster&.application_elastic_stack&.elasticsearch_client
+ return error(_('Unable to connect to Elasticsearch')) unless client
+
+ response = ::Gitlab::Elasticsearch::Logs.new(client).pod_logs(
+ namespace,
+ result[:pod_name],
+ container_name: result[:container_name],
+ search: result[:search],
+ start_time: result[:start],
+ end_time: result[:end],
+ cursor: result[:cursor]
+ )
+
+ result.merge!(response)
+
+ success(result)
+ rescue Elasticsearch::Transport::Transport::ServerError => e
+ ::Gitlab::ErrorTracking.track_exception(e)
+
+ error(_('Elasticsearch returned status code: %{status_code}') % {
+ # ServerError is the parent class of exceptions named after HTTP status codes, eg: "Elasticsearch::Transport::Transport::Errors::NotFound"
+ # there is no method on the exception other than the class name to determine the type of error encountered.
+ status_code: e.class.name.split('::').last
+ })
+ rescue ::Gitlab::Elasticsearch::Logs::InvalidCursor
+ error(_('Invalid cursor value provided'))
+ end
+ end
+end
diff --git a/app/services/pod_logs/kubernetes_service.rb b/app/services/pod_logs/kubernetes_service.rb
new file mode 100644
index 00000000000..6c8ed74f8e1
--- /dev/null
+++ b/app/services/pod_logs/kubernetes_service.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+module PodLogs
+ class KubernetesService < PodLogs::BaseService
+ LOGS_LIMIT = 500.freeze
+ REPLACEMENT_CHAR = "\u{FFFD}"
+
+ EncodingHelperError = Class.new(StandardError)
+
+ steps :check_arguments,
+ :check_param_lengths,
+ :get_raw_pods,
+ :get_pod_names,
+ :check_pod_name,
+ :check_container_name,
+ :pod_logs,
+ :encode_logs_to_utf8,
+ :split_logs,
+ :filter_return_keys
+
+ self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) }
+
+ private
+
+ def pod_logs(result)
+ result[:logs] = cluster.kubeclient.get_pod_log(
+ result[:pod_name],
+ namespace,
+ container: result[:container_name],
+ tail_lines: LOGS_LIMIT,
+ timestamps: true
+ ).body
+
+ success(result)
+ rescue Kubeclient::ResourceNotFoundError
+ error(_('Pod not found'))
+ rescue Kubeclient::HttpError => e
+ ::Gitlab::ErrorTracking.track_exception(e)
+
+ error(_('Kubernetes API returned status code: %{error_code}') % {
+ error_code: e.error_code
+ })
+ end
+
+ # Check https://gitlab.com/gitlab-org/gitlab/issues/34965#note_292261879
+ # for more details on why this is necessary.
+ def encode_logs_to_utf8(result)
+ return success(result) if result[:logs].nil?
+ return success(result) if result[:logs].encoding == Encoding::UTF_8
+
+ result[:logs] = encode_utf8(result[:logs])
+
+ success(result)
+ rescue EncodingHelperError
+ error(_('Unable to convert Kubernetes logs encoding to UTF-8'))
+ end
+
+ def split_logs(result)
+ result[:logs] = result[:logs].strip.lines(chomp: true).map do |line|
+ # message contains a RFC3339Nano timestamp, then a space, then the log line.
+ # resolution of the nanoseconds can vary, so we split on the first space
+ values = line.split(' ', 2)
+ {
+ timestamp: values[0],
+ message: values[1]
+ }
+ end
+
+ success(result)
+ end
+
+ def encode_utf8(logs)
+ utf8_logs = Gitlab::EncodingHelper.encode_utf8(logs.dup, replace: REPLACEMENT_CHAR)
+
+ # Gitlab::EncodingHelper.encode_utf8 can return '' or nil if an exception
+ # is raised while encoding. We prefer to return an error rather than wrongly
+ # display blank logs.
+ no_utf8_logs = logs.present? && utf8_logs.blank?
+ unexpected_encoding = utf8_logs&.encoding != Encoding::UTF_8
+
+ if no_utf8_logs || unexpected_encoding
+ raise EncodingHelperError, 'Could not convert Kubernetes logs to UTF-8'
+ end
+
+ utf8_logs
+ end
+ end
+end
diff --git a/app/services/post_receive_service.rb b/app/services/post_receive_service.rb
index e3818e76c4c..f12e45d701a 100644
--- a/app/services/post_receive_service.rb
+++ b/app/services/post_receive_service.rb
@@ -4,10 +4,11 @@
#
# Used for scheduling related jobs after a push action has been performed
class PostReceiveService
- attr_reader :user, :project, :params
+ attr_reader :user, :repository, :project, :params
- def initialize(user, project, params)
+ def initialize(user, repository, project, params)
@user = user
+ @repository = repository
@project = project
@params = params
end
@@ -24,11 +25,11 @@ class PostReceiveService
mr_options = push_options.get(:merge_request)
if mr_options.present?
- message = process_mr_push_options(mr_options, project, user, params[:changes])
+ message = process_mr_push_options(mr_options, params[:changes])
response.add_alert_message(message)
end
- broadcast_message = BroadcastMessage.current&.last&.message
+ broadcast_message = BroadcastMessage.current_banner_messages&.last&.message
response.add_alert_message(broadcast_message)
response.add_merge_request_urls(merge_request_urls)
@@ -46,8 +47,13 @@ class PostReceiveService
response
end
- def process_mr_push_options(push_options, project, user, changes)
+ def process_mr_push_options(push_options, changes)
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/61359')
+ return unless repository
+
+ unless repository.repo_type.project?
+ return push_options_warning('Push options are only supported for projects')
+ end
service = ::MergeRequests::PushOptionsHandlerService.new(
project, user, changes, push_options
@@ -64,6 +70,8 @@ class PostReceiveService
end
def merge_request_urls
+ return [] unless repository&.repo_type&.project?
+
::MergeRequests::GetUrlsService.new(project).execute(params[:changes])
end
end
diff --git a/app/services/projects/alerting/notify_service.rb b/app/services/projects/alerting/notify_service.rb
index 4ca3b154e4b..d34d6f6a915 100644
--- a/app/services/projects/alerting/notify_service.rb
+++ b/app/services/projects/alerting/notify_service.rb
@@ -4,12 +4,14 @@ module Projects
module Alerting
class NotifyService < BaseService
include Gitlab::Utils::StrongMemoize
+ include IncidentManagement::Settings
def execute(token)
return forbidden unless alerts_service_activated?
return unauthorized unless valid_token?(token)
- process_incident_issues
+ process_incident_issues if process_issues?
+ send_alert_email if send_email?
ServiceResponse.success
rescue Gitlab::Alerting::NotificationPayloadParser::BadPayloadError
@@ -20,11 +22,21 @@ module Projects
delegate :alerts_service, :alerts_service_activated?, to: :project
+ def send_email?
+ incident_management_setting.send_email?
+ end
+
def process_incident_issues
IncidentManagement::ProcessAlertWorker
.perform_async(project.id, parsed_payload)
end
+ def send_alert_email
+ notification_service
+ .async
+ .prometheus_alerts_fired(project, [parsed_payload])
+ end
+
def parsed_payload
Gitlab::Alerting::NotificationPayloadParser.call(params.to_h)
end
diff --git a/app/services/projects/container_repository/cleanup_tags_service.rb b/app/services/projects/container_repository/cleanup_tags_service.rb
index 046745d725e..6eb8f5c27d9 100644
--- a/app/services/projects/container_repository/cleanup_tags_service.rb
+++ b/app/services/projects/container_repository/cleanup_tags_service.rb
@@ -61,10 +61,15 @@ module Projects
end
def filter_by_name(tags)
- regex = Gitlab::UntrustedRegexp.new("\\A#{params['name_regex']}\\z")
+ # Technical Debt: https://gitlab.com/gitlab-org/gitlab/issues/207267
+ # name_regex to be removed when container_expiration_policies is updated
+ # to have both regex columns
+ regex_delete = Gitlab::UntrustedRegexp.new("\\A#{params['name_regex_delete'] || params['name_regex']}\\z")
+ regex_retain = Gitlab::UntrustedRegexp.new("\\A#{params['name_regex_keep']}\\z")
tags.select do |tag|
- regex.scan(tag.name).any?
+ # regex_retain will override any overlapping matches by regex_delete
+ regex_delete.match?(tag.name) && !regex_retain.match?(tag.name)
end
end
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 7bf68e7d315..cef86e9763c 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -98,6 +98,7 @@ module Projects
setup_authorizations
current_user.invalidate_personal_projects_count
+ create_prometheus_service
create_readme if @initialize_with_readme
end
@@ -169,6 +170,20 @@ module Projects
end
# rubocop: enable CodeReuse/ActiveRecord
+ def create_prometheus_service
+ service = @project.find_or_initialize_service(::PrometheusService.to_param)
+
+ if service.prometheus_available?
+ service.save!
+ else
+ @project.prometheus_service = nil
+ end
+
+ rescue ActiveRecord::RecordInvalid => e
+ Gitlab::ErrorTracking.track_exception(e, extra: { project_id: project.id })
+ @project.prometheus_service = nil
+ end
+
def set_project_name_from_path
# Set project name from path
if @project.name.present? && @project.path.present?
diff --git a/app/services/projects/deploy_tokens/create_service.rb b/app/services/projects/deploy_tokens/create_service.rb
new file mode 100644
index 00000000000..51cb68dfb10
--- /dev/null
+++ b/app/services/projects/deploy_tokens/create_service.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Projects
+ module DeployTokens
+ class CreateService < BaseService
+ include DeployTokenMethods
+
+ def execute
+ create_deploy_token_for(@project, params)
+ end
+ end
+ end
+end
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index 066d1f1ca72..fd1366d2c4a 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -47,7 +47,7 @@ module Projects
private
- def trash_repositories!
+ def trash_project_repositories!
unless remove_repository(project.repository)
raise_error(s_('DeleteProject|Failed to remove project repository. Please try again or contact administrator.'))
end
@@ -57,6 +57,18 @@ module Projects
end
end
+ def trash_relation_repositories!
+ unless remove_snippets
+ raise_error(s_('DeleteProject|Failed to remove project snippets. Please try again or contact administrator.'))
+ end
+ end
+
+ def remove_snippets
+ response = Snippets::BulkDestroyService.new(current_user, project.snippets).execute
+
+ response.success?
+ end
+
def remove_repository(repository)
return true unless repository
@@ -95,7 +107,8 @@ module Projects
Project.transaction do
log_destroy_event
- trash_repositories!
+ trash_relation_repositories!
+ trash_project_repositories!
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
@@ -103,7 +116,7 @@ module Projects
#
# Exclude container repositories because its before_destroy would be
# called multiple times, and it doesn't destroy any database records.
- project.destroy_dependent_associations_in_batches(exclude: [:container_repositories])
+ project.destroy_dependent_associations_in_batches(exclude: [:container_repositories, :snippets])
project.destroy!
end
end
diff --git a/app/services/projects/fork_service.rb b/app/services/projects/fork_service.rb
index fcfea567885..6ac53b15ef9 100644
--- a/app/services/projects/fork_service.rb
+++ b/app/services/projects/fork_service.rb
@@ -3,24 +3,25 @@
module Projects
class ForkService < BaseService
def execute(fork_to_project = nil)
- forked_project =
- if fork_to_project
- link_existing_project(fork_to_project)
- else
- fork_new_project
- end
+ forked_project = fork_to_project ? link_existing_project(fork_to_project) : fork_new_project
refresh_forks_count if forked_project&.saved?
forked_project
end
- private
+ def valid_fork_targets
+ @valid_fork_targets ||= ForkTargetsFinder.new(@project, current_user).execute
+ end
- def allowed_fork?
- current_user.can?(:fork_project, @project)
+ def valid_fork_target?
+ return true if current_user.admin?
+
+ valid_fork_targets.include?(target_namespace)
end
+ private
+
def link_existing_project(fork_to_project)
return if fork_to_project.forked?
@@ -30,6 +31,21 @@ module Projects
end
def fork_new_project
+ new_project = CreateService.new(current_user, new_fork_params).execute
+ return new_project unless new_project.persisted?
+
+ # Set the forked_from_project relation after saving to avoid having to
+ # reload the project to reset the association information and cause an
+ # extra query.
+ new_project.forked_from_project = @project
+
+ builds_access_level = @project.project_feature.builds_access_level
+ new_project.project_feature.update(builds_access_level: builds_access_level)
+
+ new_project
+ end
+
+ def new_fork_params
new_params = {
visibility_level: allowed_visibility_level,
description: @project.description,
@@ -57,18 +73,11 @@ module Projects
new_params.merge!(@project.object_pool_params)
- new_project = CreateService.new(current_user, new_params).execute
- return new_project unless new_project.persisted?
-
- # Set the forked_from_project relation after saving to avoid having to
- # reload the project to reset the association information and cause an
- # extra query.
- new_project.forked_from_project = @project
-
- builds_access_level = @project.project_feature.builds_access_level
- new_project.project_feature.update(builds_access_level: builds_access_level)
+ new_params
+ end
- new_project
+ def allowed_fork?
+ current_user.can?(:fork_project, @project)
end
def fork_network
diff --git a/app/services/projects/import_export/export_service.rb b/app/services/projects/import_export/export_service.rb
index 38859c1efa4..f4214410226 100644
--- a/app/services/projects/import_export/export_service.rb
+++ b/app/services/projects/import_export/export_service.rb
@@ -5,15 +5,15 @@ module Projects
class ExportService < BaseService
def execute(after_export_strategy = nil, options = {})
unless project.template_source? || can?(current_user, :admin_project, project)
- raise ::Gitlab::ImportExport::Error.new(
- "User with ID: %s does not have permission to Project %s with ID: %s." %
- [current_user.id, project.name, project.id])
+ raise ::Gitlab::ImportExport::Error.permission_error(current_user, project)
end
@shared = project.import_export_shared
save_all!
execute_after_export_action(after_export_strategy)
+ ensure
+ cleanup
end
private
@@ -24,7 +24,7 @@ module Projects
return unless after_export_strategy
unless after_export_strategy.execute(current_user, project)
- cleanup_and_notify_error
+ notify_error
end
end
@@ -33,7 +33,7 @@ module Projects
Gitlab::ImportExport::Saver.save(exportable: project, shared: shared)
notify_success
else
- cleanup_and_notify_error!
+ notify_error!
end
end
@@ -42,7 +42,7 @@ module Projects
end
def exporters
- [version_saver, avatar_saver, project_tree_saver, uploads_saver, repo_saver, wiki_repo_saver, lfs_saver]
+ [version_saver, avatar_saver, project_tree_saver, uploads_saver, repo_saver, wiki_repo_saver, lfs_saver, snippets_repo_saver]
end
def version_saver
@@ -54,7 +54,16 @@ module Projects
end
def project_tree_saver
- Gitlab::ImportExport::ProjectTreeSaver.new(project: project, current_user: current_user, shared: shared, params: params)
+ tree_saver_class.new(project: project, current_user: current_user, shared: shared, params: params)
+ end
+
+ def tree_saver_class
+ if ::Feature.enabled?(:streaming_serializer, project)
+ Gitlab::ImportExport::Project::TreeSaver
+ else
+ # Once we remove :streaming_serializer feature flag, Project::LegacyTreeSaver should be removed as well
+ Gitlab::ImportExport::Project::LegacyTreeSaver
+ end
end
def uploads_saver
@@ -73,16 +82,16 @@ module Projects
Gitlab::ImportExport::LfsSaver.new(project: project, shared: shared)
end
- def cleanup_and_notify_error
- Rails.logger.error("Import/Export - Project #{project.name} with ID: #{project.id} export error - #{shared.errors.join(', ')}") # rubocop:disable Gitlab/RailsLogger
-
- FileUtils.rm_rf(shared.export_path)
+ def snippets_repo_saver
+ Gitlab::ImportExport::SnippetsRepoSaver.new(current_user: current_user, project: project, shared: shared)
+ end
- notify_error
+ def cleanup
+ FileUtils.rm_rf(shared.archive_path) if shared&.archive_path
end
- def cleanup_and_notify_error!
- cleanup_and_notify_error
+ def notify_error!
+ notify_error
raise Gitlab::ImportExport::Error.new(shared.errors.to_sentence)
end
@@ -92,6 +101,8 @@ module Projects
end
def notify_error
+ Rails.logger.error("Import/Export - Project #{project.name} with ID: #{project.id} export error - #{shared.errors.join(', ')}") # rubocop:disable Gitlab/RailsLogger
+
notification_service.project_not_exported(project, current_user, shared.errors)
end
end
diff --git a/app/services/projects/import_service.rb b/app/services/projects/import_service.rb
index a4771e864d4..4b294a97516 100644
--- a/app/services/projects/import_service.rb
+++ b/app/services/projects/import_service.rb
@@ -2,8 +2,6 @@
module Projects
class ImportService < BaseService
- include Gitlab::ShellAdapter
-
Error = Class.new(StandardError)
# Returns true if this importer is supposed to perform its work in the
@@ -72,9 +70,9 @@ module Projects
project.ensure_repository
project.repository.fetch_as_mirror(project.import_url, refmap: refmap)
else
- gitlab_shell.import_project_repository(project)
+ project.repository.import_repository(project.import_url)
end
- rescue Gitlab::Shell::Error => e
+ rescue ::Gitlab::Git::CommandError => e
# Expire cache to prevent scenarios such as:
# 1. First import failed, but the repo was imported successfully, so +exists?+ returns true
# 2. Retried import, repo is broken or not imported but +exists?+ still returns true
diff --git a/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb b/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb
index 5ef7e03ea02..48a21bf94ba 100644
--- a/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb
@@ -7,8 +7,13 @@ module Projects
class LfsDownloadLinkListService < BaseService
DOWNLOAD_ACTION = 'download'
+ # This could be different per server, but it seems like a reasonable value to start with.
+ # https://github.com/git-lfs/git-lfs/issues/419
+ REQUEST_BATCH_SIZE = 100
+
DownloadLinksError = Class.new(StandardError)
DownloadLinkNotFound = Class.new(StandardError)
+ DownloadLinksRequestEntityTooLargeError = Class.new(StandardError)
attr_reader :remote_uri
@@ -25,16 +30,39 @@ module Projects
def execute(oids)
return [] unless project&.lfs_enabled? && remote_uri && oids.present?
- get_download_links(oids)
+ get_download_links_in_batches(oids)
end
private
+ def get_download_links_in_batches(oids, batch_size = REQUEST_BATCH_SIZE)
+ download_links = []
+
+ oids.each_slice(batch_size) do |batch|
+ download_links += get_download_links(batch)
+ end
+
+ download_links
+
+ rescue DownloadLinksRequestEntityTooLargeError => e
+ # Log this exceptions to see how open it happens
+ Gitlab::ErrorTracking
+ .track_exception(e, project_id: project&.id, batch_size: batch_size, oids_count: oids.count)
+
+ # Try again with a smaller batch
+ batch_size /= 2
+
+ retry if batch_size > REQUEST_BATCH_SIZE / 3
+
+ raise DownloadLinksError, 'Unable to download due to RequestEntityTooLarge errors'
+ end
+
def get_download_links(oids)
response = Gitlab::HTTP.post(remote_uri,
body: request_body(oids),
headers: headers)
+ raise DownloadLinksRequestEntityTooLargeError if response.request_entity_too_large?
raise DownloadLinksError, response.message unless response.success?
# Since the LFS Batch API may return a Content-Ttpe of
diff --git a/app/services/projects/lfs_pointers/lfs_link_service.rb b/app/services/projects/lfs_pointers/lfs_link_service.rb
index a05c76f5e85..39cd553261f 100644
--- a/app/services/projects/lfs_pointers/lfs_link_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_link_service.rb
@@ -25,7 +25,6 @@ module Projects
private
- # rubocop: disable CodeReuse/ActiveRecord
def link_existing_lfs_objects(oids)
linked_existing_objects = []
iterations = 0
@@ -33,7 +32,7 @@ module Projects
oids.each_slice(BATCH_SIZE) do |oids_batch|
# Load all existing LFS Objects immediately so we don't issue an extra
# query for the `.any?`
- existent_lfs_objects = LfsObject.where(oid: oids_batch).load
+ existent_lfs_objects = LfsObject.for_oids(oids_batch).load
next unless existent_lfs_objects.any?
rows = existent_lfs_objects
@@ -49,7 +48,6 @@ module Projects
linked_existing_objects
end
- # rubocop: enable CodeReuse/ActiveRecord
def log_lfs_link_results(lfs_objects_linked_count, iterations)
Gitlab::Import::Logger.info(
diff --git a/app/services/projects/lsif_data_service.rb b/app/services/projects/lsif_data_service.rb
index 971885b680e..142a5a910d4 100644
--- a/app/services/projects/lsif_data_service.rb
+++ b/app/services/projects/lsif_data_service.rb
@@ -2,20 +2,22 @@
module Projects
class LsifDataService
- attr_reader :file, :project, :path, :commit_id,
- :docs, :doc_ranges, :ranges, :def_refs, :hover_refs
+ attr_reader :file, :project, :commit_id, :docs,
+ :doc_ranges, :ranges, :def_refs, :hover_refs
CACHE_EXPIRE_IN = 1.hour
- def initialize(file, project, params)
+ def initialize(file, project, commit_id)
@file = file
@project = project
- @path = params[:path]
- @commit_id = params[:commit_id]
- end
+ @commit_id = commit_id
- def execute
fetch_data!
+ end
+
+ def execute(path)
+ doc_id = find_doc_id(docs, path)
+ dir_absolute_path = docs[doc_id]&.delete_suffix(path)
doc_ranges[doc_id]&.map do |range_id|
location, ref_id = ranges[range_id].values_at('loc', 'ref_id')
@@ -26,7 +28,7 @@ module Projects
end_line: line_data.last,
start_char: column_data.first,
end_char: column_data.last,
- definition_url: definition_url_for(def_refs[ref_id]),
+ definition_url: definition_url_for(def_refs[ref_id], dir_absolute_path),
hover: highlighted_hover(hover_refs[ref_id])
}
end
@@ -58,8 +60,8 @@ module Projects
@hover_refs = data['hover_refs']
end
- def doc_id
- @doc_id ||= docs.reduce(nil) do |doc_id, (id, doc_path)|
+ def find_doc_id(docs, path)
+ docs.reduce(nil) do |doc_id, (id, doc_path)|
next doc_id unless doc_path =~ /#{path}$/
if doc_id.nil? || docs[doc_id].size > doc_path.size
@@ -70,11 +72,7 @@ module Projects
end
end
- def dir_absolute_path
- @dir_absolute_path ||= docs[doc_id]&.delete_suffix(path)
- end
-
- def definition_url_for(ref_id)
+ def definition_url_for(ref_id, dir_absolute_path)
return unless range = ranges[ref_id]
def_doc_id, location = range.values_at('doc_id', 'loc')
diff --git a/app/services/projects/protect_default_branch_service.rb b/app/services/projects/protect_default_branch_service.rb
index 245490791bf..1d3fb523448 100644
--- a/app/services/projects/protect_default_branch_service.rb
+++ b/app/services/projects/protect_default_branch_service.rb
@@ -11,7 +11,7 @@ module Projects
@project = project
@default_branch_protection = Gitlab::Access::BranchProtection
- .new(Gitlab::CurrentSettings.default_branch_protection)
+ .new(project.namespace.default_branch_protection)
end
def execute
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index 8b23f610ad1..59389a0fa65 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -27,7 +27,7 @@ module Projects
@status.run!
raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
- raise InvalidStateError, 'pages are outdated' unless latest?
+ raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
# Create temporary directory in which we will extract the artifacts
make_secure_tmp_dir(tmp_path) do |archive_path|
@@ -36,7 +36,7 @@ module Projects
# Check if we did extract public directory
archive_public_path = File.join(archive_path, PUBLIC_DIR)
raise InvalidStateError, 'pages miss the public folder' unless Dir.exist?(archive_public_path)
- raise InvalidStateError, 'pages are outdated' unless latest?
+ raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
deploy_page!(archive_public_path)
success
diff --git a/app/services/projects/update_repository_storage_service.rb b/app/services/projects/update_repository_storage_service.rb
new file mode 100644
index 00000000000..0602089a3ab
--- /dev/null
+++ b/app/services/projects/update_repository_storage_service.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+module Projects
+ class UpdateRepositoryStorageService < BaseService
+ include Gitlab::ShellAdapter
+
+ Error = Class.new(StandardError)
+
+ def initialize(project)
+ @project = project
+ end
+
+ def execute(new_repository_storage_key)
+ mirror_repositories(new_repository_storage_key)
+
+ mark_old_paths_for_archive
+
+ project.update(repository_storage: new_repository_storage_key, repository_read_only: false)
+ project.leave_pool_repository
+ project.track_project_repository
+
+ enqueue_housekeeping
+
+ success
+
+ rescue Error, ArgumentError, Gitlab::Git::BaseError => e
+ project.update(repository_read_only: false)
+
+ Gitlab::ErrorTracking.track_exception(e, project_path: project.full_path)
+
+ error(s_("UpdateRepositoryStorage|Error moving repository storage for %{project_full_path} - %{message}") % { project_full_path: project.full_path, message: e.message })
+ end
+
+ private
+
+ def mirror_repositories(new_repository_storage_key)
+ mirror_repository(new_repository_storage_key)
+
+ if project.wiki.repository_exists?
+ mirror_repository(new_repository_storage_key, type: Gitlab::GlRepository::WIKI)
+ end
+ end
+
+ def mirror_repository(new_storage_key, type: Gitlab::GlRepository::PROJECT)
+ unless wait_for_pushes(type)
+ raise Error, s_('UpdateRepositoryStorage|Timeout waiting for %{type} repository pushes') % { type: type.name }
+ end
+
+ repository = type.repository_for(project)
+ full_path = repository.full_path
+ raw_repository = repository.raw
+ checksum = repository.checksum
+
+ # Initialize a git repository on the target path
+ new_repository = Gitlab::Git::Repository.new(
+ new_storage_key,
+ raw_repository.relative_path,
+ raw_repository.gl_repository,
+ full_path
+ )
+
+ new_repository.create_repository
+
+ new_repository.replicate(raw_repository)
+ new_checksum = new_repository.checksum
+
+ if checksum != new_checksum
+ raise Error, s_('UpdateRepositoryStorage|Failed to verify %{type} repository checksum from %{old} to %{new}') % { type: type.name, old: checksum, new: new_checksum }
+ end
+ end
+
+ def mark_old_paths_for_archive
+ old_repository_storage = project.repository_storage
+ new_project_path = moved_path(project.disk_path)
+
+ # Notice that the block passed to `run_after_commit` will run with `project`
+ # as its context
+ project.run_after_commit do
+ GitlabShellWorker.perform_async(:mv_repository,
+ old_repository_storage,
+ disk_path,
+ new_project_path)
+
+ if wiki.repository_exists?
+ GitlabShellWorker.perform_async(:mv_repository,
+ old_repository_storage,
+ wiki.disk_path,
+ "#{new_project_path}.wiki")
+ end
+ end
+ end
+
+ def moved_path(path)
+ "#{path}+#{project.id}+moved+#{Time.now.to_i}"
+ end
+
+ # The underlying FetchInternalRemote call uses a `git fetch` to move data
+ # to the new repository, which leaves it in a less-well-packed state,
+ # lacking bitmaps and commit graphs. Housekeeping will boost performance
+ # significantly.
+ def enqueue_housekeeping
+ return unless Gitlab::CurrentSettings.housekeeping_enabled?
+ return unless Feature.enabled?(:repack_after_shard_migration, project)
+
+ Projects::HousekeepingService.new(project, :gc).execute
+ rescue Projects::HousekeepingService::LeaseTaken
+ # No action required
+ end
+
+ def wait_for_pushes(type)
+ reference_counter = project.reference_counter(type: type)
+
+ # Try for 30 seconds, polling every 10
+ 3.times do
+ return true if reference_counter.value == 0
+
+ sleep 10
+ end
+
+ false
+ end
+ end
+end
+
+Projects::UpdateRepositoryStorageService.prepend_if_ee('EE::Projects::UpdateRepositoryStorageService')
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index aedd7252f63..e10dede632a 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -13,6 +13,10 @@ module Projects
ensure_wiki_exists if enabling_wiki?
+ if changing_storage_size?
+ project.change_repository_storage(params.delete(:repository_storage))
+ end
+
yield if block_given?
validate_classification_label(project, :external_authorization_classification_label)
@@ -140,6 +144,13 @@ module Projects
def changing_pages_https_only?
project.previous_changes.include?(:pages_https_only)
end
+
+ def changing_storage_size?
+ new_repository_storage = params[:repository_storage]
+
+ new_repository_storage && project.repository.exists? &&
+ can?(current_user, :change_repository_storage, project)
+ end
end
end
diff --git a/app/services/repositories/base_service.rb b/app/services/repositories/base_service.rb
index 6a39399c791..a99a65b7edb 100644
--- a/app/services/repositories/base_service.rb
+++ b/app/services/repositories/base_service.rb
@@ -7,8 +7,8 @@ class Repositories::BaseService < BaseService
attr_reader :repository
- delegate :project, :disk_path, :full_path, to: :repository
- delegate :repository_storage, to: :project
+ delegate :container, :disk_path, :full_path, to: :repository
+ delegate :repository_storage, to: :container
def initialize(repository)
@repository = repository
@@ -31,7 +31,7 @@ class Repositories::BaseService < BaseService
# gitlab/cookies.git -> gitlab/cookies+119+deleted.git
#
def removal_path
- "#{disk_path}+#{project.id}#{DELETED_FLAG}"
+ "#{disk_path}+#{container.id}#{DELETED_FLAG}"
end
# If we get a Gitaly error, the repository may be corrupted. We can
@@ -40,7 +40,7 @@ class Repositories::BaseService < BaseService
def ignore_git_errors(&block)
yield
rescue Gitlab::Git::CommandError => e
- Gitlab::GitLogger.warn(class: self.class.name, project_id: project.id, disk_path: disk_path, message: e.to_s)
+ Gitlab::GitLogger.warn(class: self.class.name, container_id: container.id, disk_path: disk_path, message: e.to_s)
end
def move_error(path)
diff --git a/app/services/repositories/destroy_service.rb b/app/services/repositories/destroy_service.rb
index 374968f610e..b12d0744387 100644
--- a/app/services/repositories/destroy_service.rb
+++ b/app/services/repositories/destroy_service.rb
@@ -14,11 +14,11 @@ class Repositories::DestroyService < Repositories::BaseService
log_info(%Q{Repository "#{disk_path}" moved to "#{removal_path}" for repository "#{full_path}"})
current_repository = repository
- project.run_after_commit do
+ container.run_after_commit do
Repositories::ShellDestroyService.new(current_repository).execute
end
- log_info("Project \"#{project.full_path}\" was removed")
+ log_info("Repository \"#{full_path}\" was removed")
success
else
diff --git a/app/services/resource_events/change_milestone_service.rb b/app/services/resource_events/change_milestone_service.rb
new file mode 100644
index 00000000000..ea196822f74
--- /dev/null
+++ b/app/services/resource_events/change_milestone_service.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module ResourceEvents
+ class ChangeMilestoneService
+ attr_reader :resource, :user, :event_created_at, :milestone
+
+ def initialize(resource, user, created_at: Time.now)
+ @resource = resource
+ @user = user
+ @event_created_at = created_at
+ @milestone = resource&.milestone
+ end
+
+ def execute
+ ResourceMilestoneEvent.create(build_resource_args)
+
+ resource.expire_note_etag_cache
+ end
+
+ private
+
+ def build_resource_args
+ action = milestone.blank? ? :remove : :add
+ key = resource.class.name.foreign_key
+
+ {
+ user_id: user.id,
+ created_at: event_created_at,
+ milestone_id: milestone&.id,
+ state: ResourceMilestoneEvent.states[resource.state],
+ action: ResourceMilestoneEvent.actions[action],
+ key => resource.id
+ }
+ end
+ end
+end
diff --git a/app/services/resource_events/merge_into_notes_service.rb b/app/services/resource_events/merge_into_notes_service.rb
index 47948fcff6e..4aa9bb80229 100644
--- a/app/services/resource_events/merge_into_notes_service.rb
+++ b/app/services/resource_events/merge_into_notes_service.rb
@@ -9,6 +9,11 @@ module ResourceEvents
class MergeIntoNotesService
include Gitlab::Utils::StrongMemoize
+ SYNTHETIC_NOTE_BUILDER_SERVICES = [
+ SyntheticLabelNotesBuilderService,
+ SyntheticMilestoneNotesBuilderService
+ ].freeze
+
attr_reader :resource, :current_user, :params
def initialize(resource, current_user, params = {})
@@ -24,7 +29,9 @@ module ResourceEvents
private
def synthetic_notes
- SyntheticLabelNotesBuilderService.new(resource, current_user, params).execute
+ SYNTHETIC_NOTE_BUILDER_SERVICES.flat_map do |service|
+ service.new(resource, current_user, params).execute
+ end
end
end
end
diff --git a/app/services/resource_events/synthetic_milestone_notes_builder_service.rb b/app/services/resource_events/synthetic_milestone_notes_builder_service.rb
new file mode 100644
index 00000000000..ad58417834e
--- /dev/null
+++ b/app/services/resource_events/synthetic_milestone_notes_builder_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+# We store events about resource milestone changes in a separate table,
+# but we still want to display notes about milestone changes
+# as classic system notes in UI. This service generates "synthetic" notes for
+# milestone event changes.
+
+module ResourceEvents
+ class SyntheticMilestoneNotesBuilderService < BaseSyntheticNotesBuilderService
+ private
+
+ def synthetic_notes
+ return [] unless tracking_enabled?
+
+ milestone_change_events.map do |event|
+ MilestoneNote.from_event(event, resource: resource, resource_parent: resource_parent)
+ end
+ end
+
+ def milestone_change_events
+ return [] unless resource.respond_to?(:resource_milestone_events)
+
+ events = resource.resource_milestone_events.includes(user: :status) # rubocop: disable CodeReuse/ActiveRecord
+ since_fetch_at(events)
+ end
+
+ def tracking_enabled?
+ ::Feature.enabled?(:track_resource_milestone_change_events, resource.project)
+ end
+ end
+end
diff --git a/app/services/search_service.rb b/app/services/search_service.rb
index fe5e823b56c..75cd6c78a52 100644
--- a/app/services/search_service.rb
+++ b/app/services/search_service.rb
@@ -3,6 +3,11 @@
class SearchService
include Gitlab::Allowable
+ REDACTABLE_RESULTS = [
+ ActiveRecord::Relation,
+ Gitlab::Search::FoundBlob
+ ].freeze
+
SEARCH_TERM_LIMIT = 64
SEARCH_CHAR_LIMIT = 4096
@@ -60,11 +65,52 @@ class SearchService
end
def search_objects
- @search_objects ||= search_results.objects(scope, params[:page])
+ @search_objects ||= redact_unauthorized_results(search_results.objects(scope, params[:page]))
+ end
+
+ def redactable_results
+ REDACTABLE_RESULTS
end
private
+ def visible_result?(object)
+ return true unless object.respond_to?(:to_ability_name) && DeclarativePolicy.has_policy?(object)
+
+ Ability.allowed?(current_user, :"read_#{object.to_ability_name}", object)
+ end
+
+ def redact_unauthorized_results(results)
+ return results unless redactable_results.any? { |redactable| results.is_a?(redactable) }
+
+ permitted_results = results.select do |object|
+ visible_result?(object)
+ end
+
+ filtered_results = (results - permitted_results).each_with_object({}) do |object, memo|
+ memo[object.id] = { ability: :"read_#{object.to_ability_name}", id: object.id, class_name: object.class.name }
+ end
+
+ log_redacted_search_results(filtered_results.values) if filtered_results.any?
+
+ return results.id_not_in(filtered_results.keys) if results.is_a?(ActiveRecord::Relation)
+
+ Kaminari.paginate_array(
+ permitted_results,
+ total_count: results.total_count,
+ limit: results.limit_value,
+ offset: results.offset_value
+ )
+ end
+
+ def log_redacted_search_results(filtered_results)
+ logger.error(message: "redacted_search_results", filtered: filtered_results, current_user_id: current_user&.id, query: params[:search])
+ end
+
+ def logger
+ @logger ||= ::Gitlab::RedactedSearchResultsLogger.build
+ end
+
def search_service
@search_service ||=
if project
diff --git a/app/services/serverless/associate_domain_service.rb b/app/services/serverless/associate_domain_service.rb
new file mode 100644
index 00000000000..673f1f83260
--- /dev/null
+++ b/app/services/serverless/associate_domain_service.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Serverless
+ class AssociateDomainService
+ PLACEHOLDER_HOSTNAME = 'example.com'.freeze
+
+ def initialize(knative, pages_domain_id:, creator:)
+ @knative = knative
+ @pages_domain_id = pages_domain_id
+ @creator = creator
+ end
+
+ def execute
+ return if unchanged?
+
+ knative.hostname ||= PLACEHOLDER_HOSTNAME
+
+ knative.pages_domain = knative.find_available_domain(pages_domain_id)
+ knative.serverless_domain_cluster.update(creator: creator) if knative.pages_domain
+ end
+
+ private
+
+ attr_reader :knative, :pages_domain_id, :creator
+
+ def unchanged?
+ knative.pages_domain&.id == pages_domain_id
+ end
+ end
+end
diff --git a/app/services/snippets/bulk_destroy_service.rb b/app/services/snippets/bulk_destroy_service.rb
new file mode 100644
index 00000000000..d9cc383a5a6
--- /dev/null
+++ b/app/services/snippets/bulk_destroy_service.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+module Snippets
+ class BulkDestroyService
+ include Gitlab::Allowable
+
+ attr_reader :current_user, :snippets
+
+ DeleteRepositoryError = Class.new(StandardError)
+ SnippetAccessError = Class.new(StandardError)
+
+ def initialize(user, snippets)
+ @current_user = user
+ @snippets = snippets
+ end
+
+ def execute
+ return ServiceResponse.success(message: 'No snippets found.') if snippets.empty?
+
+ user_can_delete_snippets!
+ attempt_delete_repositories!
+ snippets.destroy_all # rubocop: disable DestroyAll
+
+ ServiceResponse.success(message: 'Snippets were deleted.')
+ rescue SnippetAccessError
+ service_response_error("You don't have access to delete these snippets.", 403)
+ rescue DeleteRepositoryError
+ attempt_rollback_repositories
+ service_response_error('Failed to delete snippet repositories.', 400)
+ rescue
+ # In case the delete operation fails
+ attempt_rollback_repositories
+ service_response_error('Failed to remove snippets.', 400)
+ end
+
+ private
+
+ def user_can_delete_snippets!
+ allowed = DeclarativePolicy.user_scope do
+ snippets.find_each.all? { |snippet| user_can_delete_snippet?(snippet) }
+ end
+
+ raise SnippetAccessError unless allowed
+ end
+
+ def user_can_delete_snippet?(snippet)
+ can?(current_user, :admin_snippet, snippet)
+ end
+
+ def attempt_delete_repositories!
+ snippets.each do |snippet|
+ result = Repositories::DestroyService.new(snippet.repository).execute
+
+ raise DeleteRepositoryError if result[:status] == :error
+ end
+ end
+
+ def attempt_rollback_repositories
+ snippets.each do |snippet|
+ result = Repositories::DestroyRollbackService.new(snippet.repository).execute
+
+ log_rollback_error(snippet) if result[:status] == :error
+ end
+ end
+
+ def log_rollback_error(snippet)
+ Gitlab::AppLogger.error("Repository #{snippet.full_path} in path #{snippet.disk_path} could not be rolled back")
+ end
+
+ def service_response_error(message, http_status)
+ ServiceResponse.error(message: message, http_status: http_status)
+ end
+ end
+end
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
index 7ded185a6f9..2998208f50b 100644
--- a/app/services/snippets/create_service.rb
+++ b/app/services/snippets/create_service.rb
@@ -4,6 +4,8 @@ module Snippets
class CreateService < Snippets::BaseService
include SpamCheckMethods
+ CreateRepositoryError = Class.new(StandardError)
+
def execute
filter_spam_check_params
@@ -23,13 +25,7 @@ module Snippets
spam_check(snippet, current_user)
- snippet_saved = snippet.with_transaction_returning_status do
- (snippet.save && snippet.store_mentions!).tap do |saved|
- create_repository_for(snippet, current_user) if saved
- end
- end
-
- if snippet_saved
+ if save_and_commit(snippet)
UserAgentDetailService.new(snippet, @request).create
Gitlab::UsageDataCounters::SnippetCounter.count(:create)
@@ -41,8 +37,50 @@ module Snippets
private
- def create_repository_for(snippet, user)
- snippet.create_repository if Feature.enabled?(:version_snippets, user)
+ def save_and_commit(snippet)
+ result = snippet.with_transaction_returning_status do
+ (snippet.save && snippet.store_mentions!).tap do |saved|
+ break false unless saved
+
+ if Feature.enabled?(:version_snippets, current_user)
+ create_repository_for(snippet)
+ end
+ end
+ end
+
+ create_commit(snippet) if result && snippet.repository_exists?
+
+ result
+ rescue => e # Rescuing all because we can receive Creation exceptions, GRPC exceptions, Git exceptions, ...
+ snippet.errors.add(:base, e.message)
+
+ # If the commit action failed we need to remove the repository if exists
+ snippet.repository.remove if snippet.repository_exists?
+
+ # If the snippet was created, we need to remove it as we
+ # would do like if it had had any validation error
+ snippet.delete if snippet.persisted?
+
+ false
+ end
+
+ def create_repository_for(snippet)
+ snippet.create_repository
+
+ raise CreateRepositoryError, 'Repository could not be created' unless snippet.repository_exists?
+ end
+
+ def create_commit(snippet)
+ commit_attrs = {
+ branch_name: 'master',
+ message: 'Initial commit'
+ }
+
+ snippet.snippet_repository.multi_files_action(current_user, snippet_files, commit_attrs)
+ end
+
+ def snippet_files
+ [{ file_path: params[:file_name], content: params[:content] }]
end
end
end
diff --git a/app/services/snippets/destroy_service.rb b/app/services/snippets/destroy_service.rb
index c1e87e74aa4..977626fcf17 100644
--- a/app/services/snippets/destroy_service.rb
+++ b/app/services/snippets/destroy_service.rb
@@ -4,12 +4,13 @@ module Snippets
class DestroyService
include Gitlab::Allowable
- attr_reader :current_user, :project
+ attr_reader :current_user, :snippet
+
+ DestroyError = Class.new(StandardError)
def initialize(user, snippet)
@current_user = user
@snippet = snippet
- @project = snippet&.project
end
def execute
@@ -24,16 +25,29 @@ module Snippets
)
end
- if snippet.destroy
- ServiceResponse.success(message: 'Snippet was deleted.')
- else
- service_response_error('Failed to remove snippet.', 400)
- end
+ attempt_destroy!
+
+ ServiceResponse.success(message: 'Snippet was deleted.')
+ rescue DestroyError
+ service_response_error('Failed to remove snippet repository.', 400)
+ rescue
+ attempt_rollback_repository
+ service_response_error('Failed to remove snippet.', 400)
end
private
- attr_reader :snippet
+ def attempt_destroy!
+ result = Repositories::DestroyService.new(snippet.repository).execute
+
+ raise DestroyError if result[:status] == :error
+
+ snippet.destroy!
+ end
+
+ def attempt_rollback_repository
+ Repositories::DestroyRollbackService.new(snippet.repository).execute
+ end
def user_can_delete_snippet?
can?(current_user, :admin_snippet, snippet)
diff --git a/app/services/snippets/update_service.rb b/app/services/snippets/update_service.rb
index c0c0aec2050..874357f36cc 100644
--- a/app/services/snippets/update_service.rb
+++ b/app/services/snippets/update_service.rb
@@ -4,6 +4,9 @@ module Snippets
class UpdateService < Snippets::BaseService
include SpamCheckMethods
+ UpdateError = Class.new(StandardError)
+ CreateRepositoryError = Class.new(StandardError)
+
def execute(snippet)
# check that user is allowed to set specified visibility_level
new_visibility = visibility_level
@@ -20,11 +23,7 @@ module Snippets
snippet.assign_attributes(params)
spam_check(snippet, current_user)
- snippet_saved = snippet.with_transaction_returning_status do
- snippet.save
- end
-
- if snippet_saved
+ if save_and_commit(snippet)
Gitlab::UsageDataCounters::SnippetCounter.count(:update)
ServiceResponse.success(payload: { snippet: snippet } )
@@ -32,5 +31,54 @@ module Snippets
snippet_error_response(snippet, 400)
end
end
+
+ private
+
+ def save_and_commit(snippet)
+ snippet.with_transaction_returning_status do
+ snippet.save.tap do |saved|
+ break false unless saved
+
+ # In order to avoid non migrated snippets scenarios,
+ # if the snippet does not have a repository we created it
+ # We don't need to check if the repository exists
+ # because `create_repository` already handles it
+ if Feature.enabled?(:version_snippets, current_user)
+ create_repository_for(snippet)
+ end
+
+ # If the snippet repository exists we commit always
+ # the changes
+ create_commit(snippet) if snippet.repository_exists?
+ end
+ rescue
+ snippet.errors.add(:repository, 'Error updating the snippet')
+
+ false
+ end
+ end
+
+ def create_repository_for(snippet)
+ snippet.create_repository
+
+ raise CreateRepositoryError, 'Repository could not be created' unless snippet.repository_exists?
+ end
+
+ def create_commit(snippet)
+ raise UpdateError unless snippet.snippet_repository
+
+ commit_attrs = {
+ branch_name: 'master',
+ message: 'Update snippet'
+ }
+
+ snippet.snippet_repository.multi_files_action(current_user, snippet_files(snippet), commit_attrs)
+ end
+
+ def snippet_files(snippet)
+ [{ previous_path: snippet.blobs.first&.path,
+ file_path: params[:file_name],
+ content: params[:content] }]
+ end
end
end
diff --git a/app/services/spam/ham_service.rb b/app/services/spam/ham_service.rb
index d0f53eea90c..87069d5cd54 100644
--- a/app/services/spam/ham_service.rb
+++ b/app/services/spam/ham_service.rb
@@ -23,6 +23,6 @@ module Spam
end
end
- alias_method :spammable, :spam_log
+ alias_method :target, :spam_log
end
end
diff --git a/app/services/spam/mark_as_spam_service.rb b/app/services/spam/mark_as_spam_service.rb
index 0ebcf17927a..ed5e674d8e9 100644
--- a/app/services/spam/mark_as_spam_service.rb
+++ b/app/services/spam/mark_as_spam_service.rb
@@ -4,21 +4,21 @@ module Spam
class MarkAsSpamService
include ::AkismetMethods
- attr_accessor :spammable, :options
+ attr_accessor :target, :options
- def initialize(spammable:)
- @spammable = spammable
+ def initialize(target:)
+ @target = target
@options = {}
- @options[:ip_address] = @spammable.ip_address
- @options[:user_agent] = @spammable.user_agent
+ @options[:ip_address] = @target.ip_address
+ @options[:user_agent] = @target.user_agent
end
def execute
- return unless spammable.submittable_as_spam?
+ return unless target.submittable_as_spam?
return unless akismet.submit_spam
- spammable.user_agent_detail.update_attribute(:submitted, true)
+ target.user_agent_detail.update_attribute(:submitted, true)
end
end
end
diff --git a/app/services/spam/spam_check_service.rb b/app/services/spam/spam_check_service.rb
index d19ef03976f..3269f9d687a 100644
--- a/app/services/spam/spam_check_service.rb
+++ b/app/services/spam/spam_check_service.rb
@@ -4,11 +4,11 @@ module Spam
class SpamCheckService
include AkismetMethods
- attr_accessor :spammable, :request, :options
+ attr_accessor :target, :request, :options
attr_reader :spam_log
def initialize(spammable:, request:)
- @spammable = spammable
+ @target = spammable
@request = request
@options = {}
@@ -17,8 +17,8 @@ module Spam
@options[:user_agent] = @request.env['HTTP_USER_AGENT']
@options[:referrer] = @request.env['HTTP_REFERRER']
else
- @options[:ip_address] = @spammable.ip_address
- @options[:user_agent] = @spammable.user_agent
+ @options[:ip_address] = @target.ip_address
+ @options[:user_agent] = @target.user_agent
end
end
@@ -31,8 +31,8 @@ module Spam
# Otherwise, it goes to Akismet for spam check.
# If so, it assigns spammable object as "spam" and creates a SpamLog record.
possible_spam = check(api)
- spammable.spam = possible_spam unless spammable.allow_possible_spam?
- spammable.spam_log = spam_log
+ target.spam = possible_spam unless target.allow_possible_spam?
+ target.spam_log = spam_log
end
end
@@ -48,18 +48,18 @@ module Spam
end
def check_for_spam?
- spammable.check_for_spam?
+ target.check_for_spam?
end
def create_spam_log(api)
@spam_log = SpamLog.create!(
{
- user_id: spammable.author_id,
- title: spammable.spam_title,
- description: spammable.spam_description,
+ user_id: target.author_id,
+ title: target.spam_title,
+ description: target.spam_description,
source_ip: options[:ip_address],
user_agent: options[:user_agent],
- noteable_type: spammable.class.to_s,
+ noteable_type: target.class.to_s,
via_api: api
}
)
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 8a0f44b4e93..1b9f5971f73 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -241,6 +241,10 @@ module SystemNoteService
def zoom_link_removed(issue, project, author)
::SystemNotes::ZoomService.new(noteable: issue, project: project, author: author).zoom_link_removed
end
+
+ def auto_resolve_prometheus_alert(noteable, project, author)
+ ::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).auto_resolve_prometheus_alert
+ end
end
SystemNoteService.prepend_if_ee('EE::SystemNoteService')
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index d7787dac4b8..275c64bea89 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -144,7 +144,7 @@ module SystemNotes
#
# Returns Boolean
def cross_reference_disallowed?(mentioner)
- return true if noteable.is_a?(ExternalIssue) && !noteable.project.jira_tracker_active?
+ return true if noteable.is_a?(ExternalIssue) && !noteable.project&.external_references_supported?
return false unless mentioner.is_a?(MergeRequest)
return false unless noteable.is_a?(Commit)
@@ -288,6 +288,12 @@ module SystemNotes
create_note(NoteSummary.new(noteable, project, author, body, action: 'closed'))
end
+ def auto_resolve_prometheus_alert
+ body = 'automatically closed this issue because the alert resolved.'
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'closed'))
+ end
+
private
def cross_reference_note_content(gfm_reference)
diff --git a/app/services/system_notes/merge_requests_service.rb b/app/services/system_notes/merge_requests_service.rb
index a26fc0f7d35..baf26245eb9 100644
--- a/app/services/system_notes/merge_requests_service.rb
+++ b/app/services/system_notes/merge_requests_service.rb
@@ -143,7 +143,7 @@ module SystemNotes
def picked_into_branch(branch_name, pick_commit)
link = url_helpers.project_tree_path(project, branch_name)
- body = "picked this merge request into branch [`#{branch_name}`](#{link}) with commit #{pick_commit}"
+ body = "picked the changes into the branch [`#{branch_name}`](#{link}) with commit #{pick_commit}"
summary = NoteSummary.new(noteable, project, author, body, action: 'cherry_pick')
summary.note[:commit_id] = pick_commit
diff --git a/app/services/test_hooks/base_service.rb b/app/services/test_hooks/base_service.rb
index 8b5439c00bf..ebebf29c28b 100644
--- a/app/services/test_hooks/base_service.rb
+++ b/app/services/test_hooks/base_service.rb
@@ -18,7 +18,7 @@ module TestHooks
return error('Testing not available for this hook')
end
- error_message = catch(:validation_error) do
+ error_message = catch(:validation_error) do # rubocop:disable Cop/BanCatchThrow
sample_data = self.__send__(trigger_data_method) # rubocop:disable GitlabSecurity/PublicSend
return hook.execute(sample_data, trigger_key) # rubocop:disable Cop/AvoidReturnFromBlocks
diff --git a/app/services/test_hooks/project_service.rb b/app/services/test_hooks/project_service.rb
index a71278e8b8b..aa80cc928b9 100644
--- a/app/services/test_hooks/project_service.rb
+++ b/app/services/test_hooks/project_service.rb
@@ -11,7 +11,7 @@ module TestHooks
private
def push_events_data
- throw(:validation_error, s_('TestHooks|Ensure the project has at least one commit.')) if project.empty_repo?
+ throw(:validation_error, s_('TestHooks|Ensure the project has at least one commit.')) if project.empty_repo? # rubocop:disable Cop/BanCatchThrow
Gitlab::DataBuilder::Push.build_sample(project, current_user)
end
@@ -20,14 +20,14 @@ module TestHooks
def note_events_data
note = project.notes.first
- throw(:validation_error, s_('TestHooks|Ensure the project has notes.')) unless note.present?
+ throw(:validation_error, s_('TestHooks|Ensure the project has notes.')) unless note.present? # rubocop:disable Cop/BanCatchThrow
Gitlab::DataBuilder::Note.build(note, current_user)
end
def issues_events_data
issue = project.issues.first
- throw(:validation_error, s_('TestHooks|Ensure the project has issues.')) unless issue.present?
+ throw(:validation_error, s_('TestHooks|Ensure the project has issues.')) unless issue.present? # rubocop:disable Cop/BanCatchThrow
issue.to_hook_data(current_user)
end
@@ -36,21 +36,21 @@ module TestHooks
def merge_requests_events_data
merge_request = project.merge_requests.first
- throw(:validation_error, s_('TestHooks|Ensure the project has merge requests.')) unless merge_request.present?
+ throw(:validation_error, s_('TestHooks|Ensure the project has merge requests.')) unless merge_request.present? # rubocop:disable Cop/BanCatchThrow
merge_request.to_hook_data(current_user)
end
def job_events_data
build = project.builds.first
- throw(:validation_error, s_('TestHooks|Ensure the project has CI jobs.')) unless build.present?
+ throw(:validation_error, s_('TestHooks|Ensure the project has CI jobs.')) unless build.present? # rubocop:disable Cop/BanCatchThrow
Gitlab::DataBuilder::Build.build(build)
end
def pipeline_events_data
pipeline = project.ci_pipelines.first
- throw(:validation_error, s_('TestHooks|Ensure the project has CI pipelines.')) unless pipeline.present?
+ throw(:validation_error, s_('TestHooks|Ensure the project has CI pipelines.')) unless pipeline.present? # rubocop:disable Cop/BanCatchThrow
Gitlab::DataBuilder::Pipeline.build(pipeline)
end
@@ -58,7 +58,7 @@ module TestHooks
def wiki_page_events_data
page = project.wiki.list_pages(limit: 1).first
if !project.wiki_enabled? || page.blank?
- throw(:validation_error, s_('TestHooks|Ensure the wiki is enabled and has pages.'))
+ throw(:validation_error, s_('TestHooks|Ensure the wiki is enabled and has pages.')) # rubocop:disable Cop/BanCatchThrow
end
Gitlab::DataBuilder::WikiPage.build(page, current_user, 'create')
diff --git a/app/services/test_hooks/system_service.rb b/app/services/test_hooks/system_service.rb
index fedf9c6799b..5c7961f417d 100644
--- a/app/services/test_hooks/system_service.rb
+++ b/app/services/test_hooks/system_service.rb
@@ -18,7 +18,7 @@ module TestHooks
def merge_requests_events_data
merge_request = MergeRequest.of_projects(current_user.projects.select(:id)).first
- throw(:validation_error, s_('TestHooks|Ensure one of your projects has merge requests.')) unless merge_request.present?
+ throw(:validation_error, s_('TestHooks|Ensure one of your projects has merge requests.')) unless merge_request.present? # rubocop:disable Cop/BanCatchThrow
merge_request.to_hook_data(current_user)
end
diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb
index 56631bf2785..4c3ae2d204d 100644
--- a/app/services/users/build_service.rb
+++ b/app/services/users/build_service.rb
@@ -28,9 +28,7 @@ module Users
end
end
- unless identity_params.empty?
- user.identities.build(identity_params)
- end
+ build_identity(user)
user
end
@@ -41,6 +39,12 @@ module Users
[:extern_uid, :provider]
end
+ def build_identity(user)
+ return if identity_params.empty?
+
+ user.identities.build(identity_params)
+ end
+
def can_create_user?
(current_user.nil? && Gitlab::CurrentSettings.allow_signup?) || current_user&.admin?
end
diff --git a/app/services/users/destroy_service.rb b/app/services/users/destroy_service.rb
index ef79ee3d06e..587a8516394 100644
--- a/app/services/users/destroy_service.rb
+++ b/app/services/users/destroy_service.rb
@@ -56,10 +56,13 @@ module Users
MigrateToGhostUserService.new(user).execute unless options[:hard_delete]
+ response = Snippets::BulkDestroyService.new(current_user, user.snippets).execute
+ raise DestroyError, response.message if response.error?
+
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
# This ensures we delete records in batches.
- user.destroy_dependent_associations_in_batches
+ user.destroy_dependent_associations_in_batches(exclude: [:snippets])
# Destroy the namespace after destroying the user since certain methods may depend on the namespace existing
user_data = user.destroy
diff --git a/app/services/x509_certificate_revoke_service.rb b/app/services/x509_certificate_revoke_service.rb
new file mode 100644
index 00000000000..fdd0dd74a59
--- /dev/null
+++ b/app/services/x509_certificate_revoke_service.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class X509CertificateRevokeService
+ def execute(certificate)
+ return unless certificate.revoked?
+
+ certificate.x509_commit_signatures.update_all(verification_status: :unverified)
+ end
+end
diff --git a/app/uploaders/attachment_uploader.rb b/app/uploaders/attachment_uploader.rb
index b488bba00e9..47de6fe0fbd 100644
--- a/app/uploaders/attachment_uploader.rb
+++ b/app/uploaders/attachment_uploader.rb
@@ -11,4 +11,15 @@ class AttachmentUploader < GitlabUploader
def dynamic_segment
File.join(model.class.underscore, mounted_as.to_s, model.id.to_s)
end
+
+ def mounted_as
+ # Geo fails to sync attachments on Note, and LegacyDiffNotes with missing mount_point.
+ #
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/209752 for more details.
+ if model.class.underscore.include?('note')
+ super || 'attachment'
+ else
+ super
+ end
+ end
end
diff --git a/app/uploaders/avatar_uploader.rb b/app/uploaders/avatar_uploader.rb
index e4046e4b7e6..73dafaefb41 100644
--- a/app/uploaders/avatar_uploader.rb
+++ b/app/uploaders/avatar_uploader.rb
@@ -5,9 +5,8 @@ class AvatarUploader < GitlabUploader
include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
- include UploadTypeCheck::Concern
- check_upload_type extensions: AvatarUploader::SAFE_IMAGE_EXT
+ MIME_WHITELIST = %w[image/png image/jpeg image/gif image/bmp image/tiff image/vnd.microsoft.icon].freeze
def exists?
model.avatar.file && model.avatar.file.present?
@@ -29,6 +28,10 @@ class AvatarUploader < GitlabUploader
super || 'avatar'
end
+ def content_type_whitelist
+ MIME_WHITELIST
+ end
+
private
def dynamic_segment
diff --git a/app/uploaders/content_type_whitelist.rb b/app/uploaders/content_type_whitelist.rb
new file mode 100644
index 00000000000..b3975d7e2e0
--- /dev/null
+++ b/app/uploaders/content_type_whitelist.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+# Currently we run CarrierWave 1.3.1 which means we can not whitelist files
+# by their content type through magic header parsing.
+#
+# This is a patch to hold us over until we get to CarrierWave 2 :) It's a mashup of
+# CarrierWave's lib/carrierwave/uploader/content_type_whitelist.rb and
+# lib/carrierwave/sanitized_file.rb
+#
+# Include this concern and add a content_type_whitelist method to get the same
+# behavior as you would with CarrierWave 2.
+#
+# This is not an exact replacement as we don't override
+# SanitizedFile#content_type but we do set the content_type attribute when we
+# check the whitelist.
+#
+# Remove this after moving to CarrierWave 2, though on practical terms it shouldn't
+# break anything if left for a while.
+module ContentTypeWhitelist
+ module Concern
+ extend ActiveSupport::Concern
+
+ private
+
+ # CarrierWave calls this method as part of it's before :cache callbacks.
+ # Here we override and extend CarrierWave's method that does not parse the
+ # magic headers.
+ def check_content_type_whitelist!(new_file)
+ new_file.content_type = mime_magic_content_type(new_file.path)
+
+ if content_type_whitelist && !whitelisted_content_type?(new_file.content_type)
+ message = I18n.translate(:"errors.messages.content_type_whitelist_error", allowed_types: Array(content_type_whitelist).join(", "))
+ raise CarrierWave::IntegrityError, message
+ end
+
+ super(new_file)
+ end
+
+ def whitelisted_content_type?(content_type)
+ Array(content_type_whitelist).any? { |item| content_type =~ /#{item}/ }
+ end
+
+ def mime_magic_content_type(path)
+ if path
+ File.open(path) do |file|
+ MimeMagic.by_magic(file).try(:type) || 'invalid/invalid'
+ end
+ end
+ rescue Errno::ENOENT
+ nil
+ end
+ end
+end
diff --git a/app/uploaders/favicon_uploader.rb b/app/uploaders/favicon_uploader.rb
index f393fdf0d84..c9be55e001c 100644
--- a/app/uploaders/favicon_uploader.rb
+++ b/app/uploaders/favicon_uploader.rb
@@ -1,16 +1,17 @@
# frozen_string_literal: true
class FaviconUploader < AttachmentUploader
- include UploadTypeCheck::Concern
-
EXTENSION_WHITELIST = %w[png ico].freeze
-
- check_upload_type extensions: EXTENSION_WHITELIST
+ MIME_WHITELIST = %w[image/png image/vnd.microsoft.icon].freeze
def extension_whitelist
EXTENSION_WHITELIST
end
+ def content_type_whitelist
+ MIME_WHITELIST
+ end
+
private
def filename_for_different_format(filename, format)
diff --git a/app/uploaders/gitlab_uploader.rb b/app/uploaders/gitlab_uploader.rb
index 7dc211b14e4..654bb15378c 100644
--- a/app/uploaders/gitlab_uploader.rb
+++ b/app/uploaders/gitlab_uploader.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class GitlabUploader < CarrierWave::Uploader::Base
+ include ContentTypeWhitelist::Concern
+
class_attribute :options
class << self
diff --git a/app/uploaders/object_storage.rb b/app/uploaders/object_storage.rb
index 450ebb00b49..0e83932ff26 100644
--- a/app/uploaders/object_storage.rb
+++ b/app/uploaders/object_storage.rb
@@ -394,7 +394,7 @@ module ObjectStorage
def storage_for(store)
case store
when Store::REMOTE
- raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
+ raise "Object Storage is not enabled for #{self.class}" unless self.class.object_store_enabled?
CarrierWave::Storage::Fog.new(self)
when Store::LOCAL
diff --git a/app/uploaders/upload_type_check.rb b/app/uploaders/upload_type_check.rb
deleted file mode 100644
index 2837b001660..00000000000
--- a/app/uploaders/upload_type_check.rb
+++ /dev/null
@@ -1,98 +0,0 @@
-# frozen_string_literal: true
-
-# Ensure that uploaded files are what they say they are for security and
-# handling purposes. The checks are not 100% reliable so we err on the side of
-# caution and allow by default, and deny when we're confident of a fail state.
-#
-# Include this concern, then call `check_upload_type` to check all
-# uploads. Attach a `mime_type` or `extensions` parameter to only check
-# specific upload types. Both parameters will be normalized to a MIME type and
-# checked against the inferred MIME type of the upload content and filename
-# extension.
-#
-# class YourUploader
-# include UploadTypeCheck::Concern
-# check_upload_type mime_types: ['image/png', /image\/jpe?g/]
-#
-# # or...
-#
-# check_upload_type extensions: ['png', 'jpg', 'jpeg']
-# end
-#
-# The mime_types parameter can accept `NilClass`, `String`, `Regexp`,
-# `Array[String, Regexp]`. This matches the CarrierWave `extension_whitelist`
-# and `content_type_whitelist` family of behavior.
-#
-# The extensions parameter can accept `NilClass`, `String`, `Array[String]`.
-module UploadTypeCheck
- module Concern
- extend ActiveSupport::Concern
-
- class_methods do
- def check_upload_type(mime_types: nil, extensions: nil)
- define_method :check_upload_type_callback do |file|
- magic_file = MagicFile.new(file.to_file)
-
- # Map file extensions back to mime types.
- if extensions
- mime_types = Array(mime_types) +
- Array(extensions).map { |e| MimeMagic::EXTENSIONS[e] }
- end
-
- if mime_types.nil? || magic_file.matches_mime_types?(mime_types)
- check_content_matches_extension!(magic_file)
- end
- end
- before :cache, :check_upload_type_callback
- end
- end
-
- def check_content_matches_extension!(magic_file)
- return if magic_file.ambiguous_type?
-
- if magic_file.magic_type != magic_file.ext_type
- raise CarrierWave::IntegrityError, 'Content type does not match file extension'
- end
- end
- end
-
- # Convenience class to wrap MagicMime objects.
- class MagicFile
- attr_reader :file
-
- def initialize(file)
- @file = file
- end
-
- def magic_type
- @magic_type ||= MimeMagic.by_magic(file)
- end
-
- def ext_type
- @ext_type ||= MimeMagic.by_path(file.path)
- end
-
- def magic_type_type
- magic_type&.type
- end
-
- def ext_type_type
- ext_type&.type
- end
-
- def matches_mime_types?(mime_types)
- Array(mime_types).any? do |mt|
- magic_type_type =~ /\A#{mt}\z/ || ext_type_type =~ /\A#{mt}\z/
- end
- end
-
- # - Both types unknown or text/plain.
- # - Ambiguous magic type with text extension. Plain text file.
- # - Text magic type with ambiguous extension. TeX file missing extension.
- def ambiguous_type?
- (ext_type.to_s.blank? && magic_type.to_s.blank?) ||
- (magic_type.to_s.blank? && ext_type_type == 'text/plain') ||
- (ext_type.to_s.blank? && magic_type_type == 'text/plain')
- end
- end
-end
diff --git a/app/views/admin/application_settings/_ci_cd.html.haml b/app/views/admin/application_settings/_ci_cd.html.haml
index cb9f992bb1d..c7918881bdf 100644
--- a/app/views/admin/application_settings/_ci_cd.html.haml
+++ b/app/views/admin/application_settings/_ci_cd.html.haml
@@ -58,6 +58,6 @@
= f.text_field :default_ci_config_path, class: 'form-control', placeholder: '.gitlab-ci.yml'
%p.form-text.text-muted
= _("The default CI configuration path for new projects.").html_safe
- = link_to icon('question-circle'), help_page_path('user/project/pipelines/settings', anchor: 'custom-ci-configuration-path'), target: '_blank'
+ = link_to icon('question-circle'), help_page_path('ci/pipelines/settings', anchor: 'custom-ci-configuration-path'), target: '_blank'
= f.submit _('Save changes'), class: "btn btn-success"
diff --git a/app/views/admin/application_settings/_signin.html.haml b/app/views/admin/application_settings/_signin.html.haml
index 0e45301b598..007cd343339 100644
--- a/app/views/admin/application_settings/_signin.html.haml
+++ b/app/views/admin/application_settings/_signin.html.haml
@@ -16,7 +16,7 @@
Password authentication enabled for Git over HTTP(S)
.form-text.text-muted
When disabled, a Personal Access Token
- - if Gitlab::Auth::LDAP::Config.enabled?
+ - if Gitlab::Auth::Ldap::Config.enabled?
or LDAP password
must be used to authenticate.
- if omniauth_enabled? && button_based_providers.any?
diff --git a/app/views/admin/application_settings/_signup.html.haml b/app/views/admin/application_settings/_signup.html.haml
index c29e52abaf6..dc6d68e54ec 100644
--- a/app/views/admin/application_settings/_signup.html.haml
+++ b/app/views/admin/application_settings/_signup.html.haml
@@ -49,6 +49,20 @@
= f.label :domain_blacklist, 'Blacklisted domains for sign-ups', class: 'label-bold'
= f.text_area :domain_blacklist_raw, placeholder: 'domain.com', class: 'form-control', rows: 8
.form-text.text-muted Users with e-mail addresses that match these domain(s) will NOT be able to sign-up. Wildcards allowed. Use separate lines for multiple entries. Ex: domain.com, *.domain.com
+ - if Feature.enabled?(:email_restrictions)
+ .form-group
+ = f.label :email_restrictions_enabled, _('Email restrictions'), class: 'label-bold'
+ .form-check
+ = f.check_box :email_restrictions_enabled, class: 'form-check-input'
+ = f.label :email_restrictions_enabled, class: 'form-check-label' do
+ = _('Enable email restrictions for sign ups')
+ .form-group
+ = f.label :email_restrictions, _('Email restrictions for sign-ups'), class: 'label-bold'
+ = f.text_area :email_restrictions, class: 'form-control', rows: 4
+ .form-text.text-muted
+ - supported_syntax_link_url = 'https://github.com/google/re2/wiki/Syntax'
+ - supported_syntax_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: supported_syntax_link_url }
+ = _('Restricts sign-ups for email addresses that match the given regex. See the %{supported_syntax_link_start}supported syntax%{supported_syntax_link_end} for more information.').html_safe % { supported_syntax_link_start: supported_syntax_link_start, supported_syntax_link_end: '</a>'.html_safe }
.form-group
= f.label :after_sign_up_text, class: 'label-bold'
diff --git a/app/views/admin/application_settings/_visibility_and_access.html.haml b/app/views/admin/application_settings/_visibility_and_access.html.haml
index ae90ffd9efc..a4acbe6c885 100644
--- a/app/views/admin/application_settings/_visibility_and_access.html.haml
+++ b/app/views/admin/application_settings/_visibility_and_access.html.haml
@@ -2,9 +2,8 @@
= form_errors(@application_setting)
%fieldset
- .form-group
- = f.label :default_branch_protection, class: 'label-bold'
- = f.select :default_branch_protection, options_for_select(Gitlab::Access.protection_options, @application_setting.default_branch_protection), {}, class: 'form-control'
+ = render 'shared/default_branch_protection', f: f, selected_level: @application_setting.default_branch_protection
+
.form-group
= f.label s_('ProjectCreationLevel|Default project creation protection'), class: 'label-bold'
= f.select :default_project_creation, options_for_select(Gitlab::Access.project_creation_options, @application_setting.default_project_creation), {}, class: 'form-control'
diff --git a/app/views/admin/application_settings/ci_cd.html.haml b/app/views/admin/application_settings/ci_cd.html.haml
index a2aa1687f80..81f06926188 100644
--- a/app/views/admin/application_settings/ci_cd.html.haml
+++ b/app/views/admin/application_settings/ci_cd.html.haml
@@ -15,6 +15,8 @@
= render_if_exists 'admin/application_settings/required_instance_ci_setting', expanded: expanded_by_default?
+= render_if_exists 'admin/application_settings/package_registry', expanded: expanded_by_default?
+
- if Gitlab.config.registry.enabled
%section.settings.as-registry.no-animate#js-registry-settings{ class: ('expanded' if expanded_by_default?) }
.settings-header
diff --git a/app/views/admin/broadcast_messages/_form.html.haml b/app/views/admin/broadcast_messages/_form.html.haml
index 9577a2a79df..8b86a024a6e 100644
--- a/app/views/admin/broadcast_messages/_form.html.haml
+++ b/app/views/admin/broadcast_messages/_form.html.haml
@@ -46,6 +46,13 @@
= render_suggested_colors
+ .form-group.row.js-broadcast-message-dismissable-form-group{ class: ('hidden' unless @broadcast_message.banner? ) }
+ .col-sm-2.col-form-label.pt-0
+ = f.label :starts_at, _("Dismissable")
+ .col-sm-10
+ = f.check_box :dismissable
+ = f.label :dismissable do
+ = _('Allow users to dismiss the broadcast message')
.form-group.row.js-toggle-colors-container.toggle-colors.hide
.col-sm-2.col-form-label
= f.label :font, "Font Color"
diff --git a/app/views/admin/dashboard/index.html.haml b/app/views/admin/dashboard/index.html.haml
index 0ec81d0eb04..68f761c75d8 100644
--- a/app/views/admin/dashboard/index.html.haml
+++ b/app/views/admin/dashboard/index.html.haml
@@ -3,6 +3,12 @@
- if show_license_breakdown?
= render_if_exists 'admin/licenses/breakdown', license: @license
+- if @notices
+ - @notices.each do |notice|
+ .js-vue-alert{ 'v-cloak': true, data: { variant: notice[:type],
+ dismissible: true.to_s } }
+ = notice[:message].html_safe
+
.admin-dashboard.prepend-top-default
.row
.col-sm-4
@@ -93,7 +99,7 @@
%p
GitLab Shell
%span.float-right
- = Gitlab::Shell.new.version
+ = Gitlab::Shell.version
%p
GitLab Workhorse
%span.float-right
diff --git a/app/views/admin/integrations/_form.html.haml b/app/views/admin/integrations/_form.html.haml
new file mode 100644
index 00000000000..aa865c3b052
--- /dev/null
+++ b/app/views/admin/integrations/_form.html.haml
@@ -0,0 +1,12 @@
+%h3.page-title
+ = @service.title
+
+%p= @service.description
+
+= form_for @service, as: :service, url: admin_application_settings_integration_path, method: :put, html: { class: 'gl-show-field-errors fieldset-form integration-settings-form js-integration-settings-form', data: { 'can-test' => @service.can_test?, 'test-url' => test_admin_application_settings_integration_path(@service) } } do |form|
+ = render 'shared/service_settings', form: form, service: @service
+
+ - if @service.editable?
+ .footer-block.row-content-block
+ = service_save_button(@service)
+ = link_to _('Cancel'), admin_application_settings_integration_path, class: 'btn btn-cancel'
diff --git a/app/views/admin/integrations/edit.html.haml b/app/views/admin/integrations/edit.html.haml
new file mode 100644
index 00000000000..dea0f524f03
--- /dev/null
+++ b/app/views/admin/integrations/edit.html.haml
@@ -0,0 +1,5 @@
+- add_to_breadcrumbs _('Integrations'), admin_application_settings_integration_path
+- breadcrumb_title @service.title
+- page_title @service.title, _('Integrations')
+
+= render 'form'
diff --git a/app/views/admin/services/_form.html.haml b/app/views/admin/services/_form.html.haml
index 495ee6a04ea..d18e91c0b14 100644
--- a/app/views/admin/services/_form.html.haml
+++ b/app/views/admin/services/_form.html.haml
@@ -4,7 +4,7 @@
%p #{@service.description} template.
= form_for :service, url: admin_application_settings_service_path, method: :put, html: { class: 'fieldset-form' } do |form|
- = render 'shared/service_settings', form: form, subject: @service
+ = render 'shared/service_settings', form: form, service: @service
.footer-block.row-content-block
= form.submit 'Save', class: 'btn btn-success'
diff --git a/app/views/admin/sessions/_new_base.html.haml b/app/views/admin/sessions/_new_base.html.haml
index 50fa48855c0..a8d678d2b61 100644
--- a/app/views/admin/sessions/_new_base.html.haml
+++ b/app/views/admin/sessions/_new_base.html.haml
@@ -1,7 +1,7 @@
= form_tag(admin_session_path, method: :post, html: { class: 'new_user gl-show-field-errors', 'aria-live': 'assertive'}) do
.form-group
- = label_tag :password, _('Password'), class: 'label-bold'
- = password_field_tag :password, nil, class: 'form-control', required: true, title: _('This field is required.'), data: { qa_selector: 'password_field' }
+ = label_tag :user_password, _('Password'), class: 'label-bold'
+ = password_field_tag 'user[password]', nil, class: 'form-control', required: true, title: _('This field is required.'), data: { qa_selector: 'password_field' }
.submit-container.move-submit-down
= submit_tag _('Enter Admin Mode'), class: 'btn btn-success', data: { qa_selector: 'enter_admin_mode_button' }
diff --git a/app/views/admin/sessions/_tabs_normal.html.haml b/app/views/admin/sessions/_tabs_normal.html.haml
index 20830051d31..2e279013720 100644
--- a/app/views/admin/sessions/_tabs_normal.html.haml
+++ b/app/views/admin/sessions/_tabs_normal.html.haml
@@ -1,3 +1,3 @@
%ul.nav-links.new-session-tabs.nav-tabs.nav{ role: 'tablist' }
%li.nav-item{ role: 'presentation' }
- %a.nav-link.active{ href: '#login-pane', data: { toggle: 'tab', qa_selector: 'sign_in_tab' }, role: 'tab' }= _('Enter Admin Mode')
+ %a.nav-link.active{ href: '#login-pane', data: { toggle: 'tab', qa_selector: 'sign_in_tab' }, role: 'tab' }= tab_title
diff --git a/app/views/admin/sessions/_two_factor_otp.html.haml b/app/views/admin/sessions/_two_factor_otp.html.haml
new file mode 100644
index 00000000000..9d4acbf1b99
--- /dev/null
+++ b/app/views/admin/sessions/_two_factor_otp.html.haml
@@ -0,0 +1,9 @@
+= form_tag(admin_session_path, { method: :post, class: "edit_user gl-show-field-errors js-2fa-form #{'hidden' if current_user.two_factor_u2f_enabled?}" }) do
+ .form-group
+ = label_tag :user_otp_attempt, _('Two-Factor Authentication code')
+ = text_field_tag 'user[otp_attempt]', nil, class: 'form-control', required: true, autofocus: true, autocomplete: 'off', title: _('This field is required.')
+ %p.form-text.text-muted.hint
+ = _("Enter the code from the two-factor app on your mobile device. If you've lost your device, you may enter one of your recovery codes.")
+
+ .submit-container.move-submit-down
+ = submit_tag 'Verify code', class: 'btn btn-success'
diff --git a/app/views/admin/sessions/_two_factor_u2f.html.haml b/app/views/admin/sessions/_two_factor_u2f.html.haml
new file mode 100644
index 00000000000..09b91d76295
--- /dev/null
+++ b/app/views/admin/sessions/_two_factor_u2f.html.haml
@@ -0,0 +1,17 @@
+#js-authenticate-u2f
+%a.btn.btn-block.btn-info#js-login-2fa-device{ href: '#' }= _("Sign in via 2FA code")
+
+%script#js-authenticate-u2f-in-progress{ type: "text/template" }
+ %p= _("Trying to communicate with your device. Plug it in (if you haven't already) and press the button on the device now.")
+
+-# haml-lint:disable NoPlainNodes
+%script#js-authenticate-u2f-error{ type: "text/template" }
+ %div
+ %p <%= error_message %> (#{_("error code:")} <%= error_code %>)
+ %a.btn.btn-block.btn-warning#js-u2f-try-again= _("Try again?")
+
+%script#js-authenticate-u2f-authenticated{ type: "text/template" }
+ %div
+ %p= _("We heard back from your U2F device. You have been authenticated.")
+ = form_tag(admin_session_path, method: :post, id: 'js-login-u2f-form') do |f|
+ = hidden_field_tag 'user[device_response]', nil, class: 'form-control', required: true, id: "js-device-response"
diff --git a/app/views/admin/sessions/new.html.haml b/app/views/admin/sessions/new.html.haml
index a1d440f2cfd..0a7f20b861e 100644
--- a/app/views/admin/sessions/new.html.haml
+++ b/app/views/admin/sessions/new.html.haml
@@ -2,10 +2,10 @@
- page_title _('Enter Admin Mode')
.row.justify-content-center
- .col-6.new-session-forms-container
+ .col-md-5.new-session-forms-container
.login-page
#signin-container
- = render 'admin/sessions/tabs_normal'
+ = render 'admin/sessions/tabs_normal', tab_title: _('Enter Admin Mode')
.tab-content
- if !current_user.require_password_creation_for_web?
.login-box.tab-pane.active{ id: 'login-pane', role: 'tabpanel' }
@@ -14,7 +14,7 @@
- if omniauth_enabled? && button_based_providers_enabled?
.clearfix
- = render 'devise/shared/omniauth_box'
+ = render 'devise/shared/omniauth_box', hide_remember_me: true
-# Show a message if none of the mechanisms above are enabled
- if current_user.require_password_creation_for_web? && !omniauth_enabled?
diff --git a/app/views/admin/sessions/two_factor.html.haml b/app/views/admin/sessions/two_factor.html.haml
new file mode 100644
index 00000000000..3a0cbe3facb
--- /dev/null
+++ b/app/views/admin/sessions/two_factor.html.haml
@@ -0,0 +1,15 @@
+- @hide_breadcrumbs = true
+- page_title _('Enter 2FA for Admin Mode')
+
+.row.justify-content-center
+ .col-md-5.new-session-forms-container
+ .login-page
+ #signin-container
+ = render 'admin/sessions/tabs_normal', tab_title: _('Enter Admin Mode')
+ .tab-content
+ .login-box.tab-pane.active{ id: 'login-pane', role: 'tabpanel' }
+ .login-body
+ - if current_user.two_factor_otp_enabled?
+ = render 'admin/sessions/two_factor_otp'
+ - if current_user.two_factor_u2f_enabled?
+ = render 'admin/sessions/two_factor_u2f'
diff --git a/app/views/award_emoji/_awards_block.html.haml b/app/views/award_emoji/_awards_block.html.haml
index 793ddef2c58..a063fe54c99 100644
--- a/app/views/award_emoji/_awards_block.html.haml
+++ b/app/views/award_emoji/_awards_block.html.haml
@@ -16,5 +16,4 @@
%span{ class: "award-control-icon award-control-icon-neutral" }= sprite_icon('slight-smile')
%span{ class: "award-control-icon award-control-icon-positive" }= sprite_icon('smiley')
%span{ class: "award-control-icon award-control-icon-super-positive" }= sprite_icon('smile')
- = icon('spinner spin', class: "award-control-icon award-control-icon-loading")
= yield
diff --git a/app/views/ci/variables/_index.html.haml b/app/views/ci/variables/_index.html.haml
index 7ae5c48b93c..aadb2c62d83 100644
--- a/app/views/ci/variables/_index.html.haml
+++ b/app/views/ci/variables/_index.html.haml
@@ -5,27 +5,34 @@
- link_start = '<a href="%{url}">'.html_safe % { url: help_page_path('ci/variables/README', anchor: 'protected-variables') }
= s_('Environment variables are configured by your administrator to be %{link_start}protected%{link_end} by default').html_safe % { link_start: link_start, link_end: '</a>'.html_safe }
-.row
- .col-lg-12.js-ci-variable-list-section{ data: { save_endpoint: save_endpoint, maskable_regex: ci_variable_maskable_regex } }
- .hide.alert.alert-danger.js-ci-variable-error-box
+- if Feature.enabled?(:new_variables_ui, @project || @group, default_enabled: true)
+ - is_group = !@group.nil?
- %ul.ci-variable-list
- = render 'ci/variables/variable_header'
- - @variables.each.each do |variable|
- = render 'ci/variables/variable_row', form_field: 'variables', variable: variable
- = render 'ci/variables/variable_row', form_field: 'variables'
- .prepend-top-20
- %button.btn.btn-success.js-ci-variables-save-button{ type: 'button' }
- %span.hide.js-ci-variables-save-loading-icon
- = icon('spinner spin')
- = _('Save variables')
- %button.btn.btn-info.btn-inverted.prepend-left-10.js-secret-value-reveal-button{ type: 'button', data: { secret_reveal_status: "#{@variables.size == 0}" } }
- - if @variables.size == 0
- = n_('Hide value', 'Hide values', @variables.size)
- - else
- = n_('Reveal value', 'Reveal values', @variables.size)
- - if !@group && @project.group
- .settings-header.border-top.prepend-top-20
- = render 'ci/group_variables/header'
- .settings-content.pr-0
- = render 'ci/group_variables/index'
+ #js-ci-project-variables{ data: { endpoint: save_endpoint, project_id: @project&.id || '', group: is_group.to_s, maskable_regex: ci_variable_maskable_regex} }
+
+- else
+ .row
+ .col-lg-12.js-ci-variable-list-section{ data: { save_endpoint: save_endpoint, maskable_regex: ci_variable_maskable_regex } }
+ .hide.alert.alert-danger.js-ci-variable-error-box
+
+ %ul.ci-variable-list
+ = render 'ci/variables/variable_header'
+ - @variables.each.each do |variable|
+ = render 'ci/variables/variable_row', form_field: 'variables', variable: variable
+ = render 'ci/variables/variable_row', form_field: 'variables'
+ .prepend-top-20
+ %button.btn.btn-success.js-ci-variables-save-button{ type: 'button' }
+ %span.hide.js-ci-variables-save-loading-icon
+ = icon('spinner spin')
+ = _('Save variables')
+ %button.btn.btn-info.btn-inverted.prepend-left-10.js-secret-value-reveal-button{ type: 'button', data: { secret_reveal_status: "#{@variables.size == 0}" } }
+ - if @variables.size == 0
+ = n_('Hide value', 'Hide values', @variables.size)
+ - else
+ = n_('Reveal value', 'Reveal values', @variables.size)
+
+- if !@group && @project.group
+ .settings-header.border-top.prepend-top-20
+ = render 'ci/group_variables/header'
+ .settings-content.pr-0
+ = render 'ci/group_variables/index'
diff --git a/app/views/clusters/clusters/_advanced_settings_container.html.haml b/app/views/clusters/clusters/_advanced_settings_container.html.haml
new file mode 100644
index 00000000000..8aae9bfa462
--- /dev/null
+++ b/app/views/clusters/clusters/_advanced_settings_container.html.haml
@@ -0,0 +1,6 @@
+- if can_admin_cluster?(current_user, @cluster)
+ .settings.expanded.border-0.m-0
+ %p
+ = s_('ClusterIntegration|Advanced options on this Kubernetes cluster’s integration')
+ .settings-content#advanced-settings-section
+ = render 'clusters/clusters/advanced_settings'
diff --git a/app/views/clusters/clusters/_advanced_settings_tab.html.haml b/app/views/clusters/clusters/_advanced_settings_tab.html.haml
new file mode 100644
index 00000000000..b491a64e43d
--- /dev/null
+++ b/app/views/clusters/clusters/_advanced_settings_tab.html.haml
@@ -0,0 +1,6 @@
+- active = params[:tab] == 'settings'
+
+- if can_admin_cluster?(current_user, @cluster)
+ %li.nav-item{ role: 'presentation' }
+ %a#cluster-settings-tab.nav-link{ class: active_when(active), href: clusterable.cluster_path(@cluster.id, params: {tab: 'settings'}) }
+ %span= _('Advanced Settings')
diff --git a/app/views/clusters/clusters/_applications.html.haml b/app/views/clusters/clusters/_applications.html.haml
new file mode 100644
index 00000000000..f83a414a0aa
--- /dev/null
+++ b/app/views/clusters/clusters/_applications.html.haml
@@ -0,0 +1 @@
+.cluster-applications-table#js-cluster-applications
diff --git a/app/views/clusters/clusters/_applications_tab.html.haml b/app/views/clusters/clusters/_applications_tab.html.haml
new file mode 100644
index 00000000000..e1455b0f60a
--- /dev/null
+++ b/app/views/clusters/clusters/_applications_tab.html.haml
@@ -0,0 +1,5 @@
+- active = params[:tab] == 'apps'
+
+%li.nav-item{ role: 'presentation' }
+ %a#cluster-apps-tab.nav-link.qa-applications{ class: active_when(active), href: clusterable.cluster_path(@cluster.id, params: {tab: 'apps'}) }
+ %span= _('Applications')
diff --git a/app/views/clusters/clusters/_configure.html.haml b/app/views/clusters/clusters/_configure.html.haml
deleted file mode 100644
index 4ce00c67866..00000000000
--- a/app/views/clusters/clusters/_configure.html.haml
+++ /dev/null
@@ -1,26 +0,0 @@
-%section#cluster-integration
- - unless @cluster.status_name.in? %i/scheduled creating/
- = render 'form'
-
-- unless @cluster.status_name.in? %i/scheduled creating/
- = render_if_exists 'projects/clusters/prometheus_graphs'
-
- .cluster-applications-table#js-cluster-applications
-
- %section.settings#js-cluster-details{ class: ('expanded' if expanded) }
- .settings-header
- %h4= s_('ClusterIntegration|Kubernetes cluster details')
- %button.btn.js-settings-toggle{ type: 'button' }
- = expanded ? _('Collapse') : _('Expand')
- %p= s_('ClusterIntegration|See and edit the details for your Kubernetes cluster')
- .settings-content
- = render 'clusters/platforms/kubernetes/form', cluster: @cluster, platform: @cluster.platform_kubernetes, update_cluster_url_path: clusterable.cluster_path(@cluster)
-
- %section.settings.no-animate#js-cluster-advanced-settings{ class: ('expanded' if expanded) }
- .settings-header
- %h4= _('Advanced settings')
- %button.btn.js-settings-toggle{ type: 'button' }
- = expanded ? _('Collapse') : _('Expand')
- %p= s_("ClusterIntegration|Advanced options on this Kubernetes cluster's integration")
- .settings-content#advanced-settings-section
- = render 'advanced_settings'
diff --git a/app/views/clusters/clusters/_details.html.haml b/app/views/clusters/clusters/_details.html.haml
new file mode 100644
index 00000000000..fb0a1aaebc4
--- /dev/null
+++ b/app/views/clusters/clusters/_details.html.haml
@@ -0,0 +1,11 @@
+%section#cluster-integration
+ = render 'gitlab_integration_form'
+
+ %section.settings.no-animate{ class: ('expanded' if expanded) }
+ .settings-header
+ %h4= s_('ClusterIntegration|Provider details')
+ %button.btn.js-settings-toggle{ type: 'button' }
+ = expanded ? _('Collapse') : _('Expand')
+ %p= s_('ClusterIntegration|See and edit the details for your Kubernetes cluster')
+ .settings-content
+ = render 'provider_details_form', cluster: @cluster, platform: @cluster.platform_kubernetes, update_cluster_url_path: clusterable.cluster_path(@cluster)
diff --git a/app/views/clusters/clusters/_details_tab.html.haml b/app/views/clusters/clusters/_details_tab.html.haml
new file mode 100644
index 00000000000..564c5103d34
--- /dev/null
+++ b/app/views/clusters/clusters/_details_tab.html.haml
@@ -0,0 +1,5 @@
+- active = params[:tab] == 'details' || !params[:tab].present?
+
+%li.nav-item{ role: 'presentation' }
+ %a#cluster-details-tab.nav-link.qa-details{ class: active_when(active), href: clusterable.cluster_path(@cluster.id, params: {tab: 'details'}) }
+ %span= _('Details')
diff --git a/app/views/clusters/clusters/_form.html.haml b/app/views/clusters/clusters/_form.html.haml
deleted file mode 100644
index a85b005b2b4..00000000000
--- a/app/views/clusters/clusters/_form.html.haml
+++ /dev/null
@@ -1,40 +0,0 @@
-= form_for @cluster, url: clusterable.cluster_path(@cluster), as: :cluster, html: { class: 'cluster_integration_form' } do |field|
- = form_errors(@cluster)
- .form-group
- %h5= s_('ClusterIntegration|Integration status')
- %label.append-bottom-0.js-cluster-enable-toggle-area
- = render "shared/buttons/project_feature_toggle", is_checked: @cluster.enabled?, label: s_("ClusterIntegration|Toggle Kubernetes cluster"), disabled: !can?(current_user, :update_cluster, @cluster) do
- = field.hidden_field :enabled, { class: 'js-project-feature-toggle-input'}
- .form-text.text-muted= s_('ClusterIntegration|Enable or disable GitLab\'s connection to your Kubernetes cluster.')
-
- .form-group
- %h5= s_('ClusterIntegration|Environment scope')
- - if has_multiple_clusters?
- = field.text_field :environment_scope, class: 'col-md-6 form-control js-select-on-focus', placeholder: s_('ClusterIntegration|Environment scope')
- .form-text.text-muted= s_("ClusterIntegration|Choose which of your environments will use this cluster.")
- - else
- = text_field_tag :environment_scope, '*', class: 'col-md-6 form-control disabled', placeholder: s_('ClusterIntegration|Environment scope'), disabled: true
- - environment_scope_url = help_page_path('user/project/clusters/index', anchor: 'base-domain')
- - environment_scope_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: environment_scope_url }
- .form-text.text-muted
- %code *
- = s_("ClusterIntegration| is the default environment scope for this cluster. This means that all jobs, regardless of their environment, will use this cluster. %{environment_scope_start}More information%{environment_scope_end}").html_safe % { environment_scope_start: environment_scope_start, environment_scope_end: '</a>'.html_safe }
-
- .form-group
- %h5= s_('ClusterIntegration|Base domain')
- = field.text_field :base_domain, class: 'col-md-6 form-control js-select-on-focus qa-base-domain'
- .form-text.text-muted
- - auto_devops_url = help_page_path('topics/autodevops/index')
- - auto_devops_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: auto_devops_url }
- = s_('ClusterIntegration|Specifying a domain will allow you to use Auto Review Apps and Auto Deploy stages for %{auto_devops_start}Auto DevOps%{auto_devops_end}. The domain should have a wildcard DNS configured matching the domain.').html_safe % { auto_devops_start: auto_devops_start, auto_devops_end: '</a>'.html_safe }
- %span{ :class => ["js-ingress-domain-help-text", ("hide" unless @cluster.application_ingress_external_ip.present?)] }
- = s_('ClusterIntegration|Alternatively')
- %code{ :class => "js-ingress-domain-snippet" } #{@cluster.application_ingress_external_ip}.nip.io
- = s_('ClusterIntegration| can be used instead of a custom domain.')
- - custom_domain_url = help_page_path('user/clusters/applications.md', anchor: 'pointing-your-dns-at-the-external-endpoint')
- - custom_domain_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: custom_domain_url }
- = s_('ClusterIntegration| %{custom_domain_start}More information%{custom_domain_end}.').html_safe % { custom_domain_start: custom_domain_start, custom_domain_end: '</a>'.html_safe }
-
- - if can?(current_user, :update_cluster, @cluster)
- .form-group
- = field.submit _('Save changes'), class: 'btn btn-success qa-save-domain'
diff --git a/app/views/clusters/clusters/_gitlab_integration_form.html.haml b/app/views/clusters/clusters/_gitlab_integration_form.html.haml
new file mode 100644
index 00000000000..2489f78b403
--- /dev/null
+++ b/app/views/clusters/clusters/_gitlab_integration_form.html.haml
@@ -0,0 +1,43 @@
+= form_for @cluster, url: clusterable.cluster_path(@cluster), as: :cluster, html: { class: 'js-cluster-integration-form' } do |field|
+ = form_errors(@cluster)
+ .form-group
+ .d-flex.align-items-center
+ %h4.pr-2.m-0
+ = s_('ClusterIntegration|GitLab Integration')
+ %label.append-bottom-0.js-cluster-enable-toggle-area{ title: s_('ClusterIntegration|Enable or disable GitLab\'s connection to your Kubernetes cluster.'), data: { toggle: 'tooltip', container: 'body' } }
+ = render "shared/buttons/project_feature_toggle", is_checked: @cluster.enabled?, label: s_("ClusterIntegration|Toggle Kubernetes cluster"), disabled: !can?(current_user, :update_cluster, @cluster), data: { qa_selector: 'integration_status_toggle' } do
+ = field.hidden_field :enabled, { class: 'js-project-feature-toggle-input'}
+
+ .form-group
+ %h5= s_('ClusterIntegration|Environment scope')
+ - if has_multiple_clusters?
+ = field.text_field :environment_scope, class: 'col-md-6 form-control js-select-on-focus', placeholder: s_('ClusterIntegration|Environment scope')
+ .form-text.text-muted= s_("ClusterIntegration|Choose which of your environments will use this cluster.")
+ - else
+ = text_field_tag :environment_scope, '*', class: 'col-md-6 form-control disabled', placeholder: s_('ClusterIntegration|Environment scope'), disabled: true
+ - environment_scope_url = help_page_path('user/project/clusters/index', anchor: 'base-domain')
+ - environment_scope_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: environment_scope_url }
+ .form-text.text-muted
+ %code
+ = _('*')
+ = s_("ClusterIntegration| is the default environment scope for this cluster. This means that all jobs, regardless of their environment, will use this cluster. %{environment_scope_start}More information%{environment_scope_end}").html_safe % { environment_scope_start: environment_scope_start, environment_scope_end: '</a>'.html_safe }
+
+ .form-group
+ %h5= s_('ClusterIntegration|Base domain')
+ = field.text_field :base_domain, class: 'col-md-6 form-control js-select-on-focus', data: { qa_selector: 'base_domain_field' }
+ .form-text.text-muted
+ - auto_devops_url = help_page_path('topics/autodevops/index')
+ - auto_devops_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: auto_devops_url }
+ = s_('ClusterIntegration|Specifying a domain will allow you to use Auto Review Apps and Auto Deploy stages for %{auto_devops_start}Auto DevOps%{auto_devops_end}. The domain should have a wildcard DNS configured matching the domain.').html_safe % { auto_devops_start: auto_devops_start, auto_devops_end: '</a>'.html_safe }
+ %span{ :class => ["js-ingress-domain-help-text", ("hide" unless @cluster.application_ingress_external_ip.present?)] }
+ = s_('ClusterIntegration|Alternatively')
+ %code{ :class => "js-ingress-domain-snippet" }
+ = s_('ClusterIntegration|%{external_ip}.nip.io').html_safe % { external_ip: @cluster.application_ingress_external_ip }
+ = s_('ClusterIntegration| can be used instead of a custom domain.')
+ - custom_domain_url = help_page_path('user/clusters/applications.md', anchor: 'pointing-your-dns-at-the-external-endpoint')
+ - custom_domain_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: custom_domain_url }
+ = s_('ClusterIntegration| %{custom_domain_start}More information%{custom_domain_end}.').html_safe % { custom_domain_start: custom_domain_start, custom_domain_end: '</a>'.html_safe }
+
+ - if can?(current_user, :update_cluster, @cluster)
+ .form-group
+ = field.submit _('Save changes'), class: 'btn btn-success', data: { qa_selector: 'save_changes_button' }
diff --git a/app/views/clusters/clusters/_namespace.html.haml b/app/views/clusters/clusters/_namespace.html.haml
index 8a86fd90963..9b728e7a89b 100644
--- a/app/views/clusters/clusters/_namespace.html.haml
+++ b/app/views/clusters/clusters/_namespace.html.haml
@@ -1,5 +1,5 @@
- managed_namespace_help_text = s_('ClusterIntegration|Set a prefix for your namespaces. If not set, defaults to your project path. If modified, existing environments will use their current namespaces until the cluster cache is cleared.')
-- non_managed_namespace_help_text = s_('ClusterIntegration|The namespace associated with your project. This will be used for deploy boards, pod logs, and Web terminals.')
+- non_managed_namespace_help_text = s_('ClusterIntegration|The namespace associated with your project. This will be used for deploy boards, logs, and Web terminals.')
- managed_namespace_help_link = link_to _('More information'), help_page_path('user/project/clusters/index.md',
anchor: 'gitlab-managed-clusters'), target: '_blank'
diff --git a/app/views/clusters/clusters/_provider_details_form.html.haml b/app/views/clusters/clusters/_provider_details_form.html.haml
new file mode 100644
index 00000000000..dd7d6182e3c
--- /dev/null
+++ b/app/views/clusters/clusters/_provider_details_form.html.haml
@@ -0,0 +1,58 @@
+= bootstrap_form_for cluster, url: update_cluster_url_path, html: { class: 'js-provider-details gl-show-field-errors' },
+ as: :cluster do |field|
+ - copy_name_btn = clipboard_button(text: cluster.name, title: s_('ClusterIntegration|Copy Kubernetes cluster name'),
+ class: 'input-group-text btn-default') if cluster.read_only_kubernetes_platform_fields?
+ = field.text_field :name, class: 'js-select-on-focus cluster-name', required: true,
+ title: s_('ClusterIntegration|Cluster name is required.'),
+ readonly: cluster.read_only_kubernetes_platform_fields?,
+ label: s_('ClusterIntegration|Kubernetes cluster name'), label_class: 'label-bold',
+ input_group_class: 'gl-field-error-anchor', append: copy_name_btn
+
+ = field.fields_for :platform_kubernetes, platform do |platform_field|
+ - copy_api_url = clipboard_button(text: platform.api_url, title: s_('ClusterIntegration|Copy API URL'),
+ class: 'input-group-text btn-default') if cluster.read_only_kubernetes_platform_fields?
+ = platform_field.text_field :api_url, class: 'js-select-on-focus', required: true,
+ title: s_('ClusterIntegration|API URL should be a valid http/https url.'),
+ readonly: cluster.read_only_kubernetes_platform_fields?,
+ label: s_('ClusterIntegration|API URL'), label_class: 'label-bold',
+ input_group_class: 'gl-field-error-anchor', append: copy_api_url
+
+ - copy_ca_cert_btn = clipboard_button(text: platform.ca_cert, title: s_('ClusterIntegration|Copy CA Certificate'),
+ class: 'input-group-text btn-default') if cluster.read_only_kubernetes_platform_fields?
+ = platform_field.text_area :ca_cert, class: 'js-select-on-focus', rows: '10',
+ readonly: cluster.read_only_kubernetes_platform_fields?,
+ placeholder: s_('ClusterIntegration|Certificate Authority bundle (PEM format)'),
+ label: s_('ClusterIntegration|CA Certificate'), label_class: 'label-bold',
+ input_group_class: 'gl-field-error-anchor', append: copy_ca_cert_btn
+
+ - show_token_btn = (platform_field.button s_('ClusterIntegration|Show'),
+ type: 'button', class: 'js-show-cluster-token btn btn-default')
+ - copy_token_btn = clipboard_button(text: platform.token, title: s_('ClusterIntegration|Copy Service Token'),
+ class: 'input-group-text btn-default') if cluster.read_only_kubernetes_platform_fields?
+
+ = platform_field.text_field :token, type: 'password', class: 'js-select-on-focus js-cluster-token',
+ required: true, title: s_('ClusterIntegration|Service token is required.'),
+ readonly: cluster.read_only_kubernetes_platform_fields?,
+ label: s_('ClusterIntegration|Service Token'), label_class: 'label-bold',
+ input_group_class: 'gl-field-error-anchor', append: show_token_btn + copy_token_btn
+
+ = platform_field.form_group :authorization_type do
+ = platform_field.check_box :authorization_type, { disabled: true, label: s_('ClusterIntegration|RBAC-enabled cluster'),
+ label_class: 'label-bold', inline: true }, 'rbac', 'abac'
+ .form-text.text-muted
+ = s_('ClusterIntegration|Enable this setting if using role-based access control (RBAC).')
+ = s_('ClusterIntegration|This option will allow you to install applications on RBAC clusters.')
+
+ .form-group
+ = field.check_box :managed, { label: s_('ClusterIntegration|GitLab-managed cluster'),
+ class: 'js-gl-managed',
+ label_class: 'label-bold' }
+ .form-text.text-muted
+ = s_('ClusterIntegration|Allow GitLab to manage namespace and service accounts for this cluster.')
+ = link_to _('More information'), help_page_path('user/project/clusters/index.md', anchor: 'gitlab-managed-clusters'), target: '_blank'
+
+ - if cluster.allow_user_defined_namespace?
+ = render('clusters/clusters/namespace', platform_field: platform_field)
+
+ .form-group
+ = field.submit s_('ClusterIntegration|Save changes'), class: 'btn btn-success'
diff --git a/app/views/clusters/clusters/index.html.haml b/app/views/clusters/clusters/index.html.haml
index 049010cadf4..28002dbff92 100644
--- a/app/views/clusters/clusters/index.html.haml
+++ b/app/views/clusters/clusters/index.html.haml
@@ -18,13 +18,16 @@
%strong
= link_to _('More information'), help_page_path('user/group/clusters/index', anchor: 'cluster-precedence')
- .clusters-table.js-clusters-list
- .gl-responsive-table-row.table-row-header{ role: "row" }
- .table-section.section-60{ role: "rowheader" }
- = s_("ClusterIntegration|Kubernetes cluster")
- .table-section.section-30{ role: "rowheader" }
- = s_("ClusterIntegration|Environment scope")
- .table-section.section-10{ role: "rowheader" }
- - @clusters.each do |cluster|
- = render "cluster", cluster: cluster.present(current_user: current_user)
- = paginate @clusters, theme: "gitlab"
+ - if Feature.enabled?(:clusters_list_redesign)
+ #js-clusters-list-app{ data: { endpoint: 'todo/add/endpoint' } }
+ - else
+ .clusters-table.js-clusters-list
+ .gl-responsive-table-row.table-row-header{ role: "row" }
+ .table-section.section-60{ role: "rowheader" }
+ = s_("ClusterIntegration|Kubernetes cluster")
+ .table-section.section-30{ role: "rowheader" }
+ = s_("ClusterIntegration|Environment scope")
+ .table-section.section-10{ role: "rowheader" }
+ - @clusters.each do |cluster|
+ = render "cluster", cluster: cluster.present(current_user: current_user)
+ = paginate @clusters, theme: "gitlab"
diff --git a/app/views/clusters/clusters/show.html.haml b/app/views/clusters/clusters/show.html.haml
index e1f011a3225..4aa5b6af636 100644
--- a/app/views/clusters/clusters/show.html.haml
+++ b/app/views/clusters/clusters/show.html.haml
@@ -5,8 +5,6 @@
- manage_prometheus_path = edit_project_service_path(@cluster.project, 'prometheus') if @project
- cluster_environments_path = clusterable.environments_cluster_path(@cluster)
-- expanded = expanded_by_default?
-
- status_path = clusterable.cluster_status_cluster_path(@cluster.id, format: :json) if can?(current_user, :admin_cluster, @cluster)
.edit-cluster-form.js-edit-cluster-form{ data: { status_path: status_path,
install_helm_path: clusterable.install_applications_cluster_path(@cluster, :helm),
@@ -44,7 +42,19 @@
%h4= @cluster.name
= render 'banner'
- - if cluster_environments_path.present?
- = render_if_exists 'clusters/clusters/cluster_environments', expanded: expanded
- - else
- = render 'configure', expanded: expanded
+ - if cluster_created?(@cluster)
+ .js-toggle-container
+ %ul.nav-links.mobile-separator.nav.nav-tabs{ role: 'tablist' }
+ = render 'details_tab'
+ = render_if_exists 'clusters/clusters/environments_tab'
+ = render_if_exists 'clusters/clusters/health_tab'
+ = render 'applications_tab'
+ = render 'advanced_settings_tab'
+
+ .tab-content.py-3
+ .tab-pane.active{ role: 'tabpanel' }
+ = render_cluster_info_tab_content(params[:tab], expanded_by_default?)
+
+
+
+
diff --git a/app/views/clusters/clusters/user/_form.html.haml b/app/views/clusters/clusters/user/_form.html.haml
index 39b6d74d9f9..ce226d29113 100644
--- a/app/views/clusters/clusters/user/_form.html.haml
+++ b/app/views/clusters/clusters/user/_form.html.haml
@@ -54,4 +54,4 @@
= render('clusters/clusters/namespace', platform_field: platform_kubernetes_field)
.form-group
- = field.submit s_('ClusterIntegration|Add Kubernetes cluster'), class: 'btn btn-success'
+ = field.submit s_('ClusterIntegration|Add Kubernetes cluster'), class: 'btn btn-success', data: { qa_selector: 'add_kubernetes_cluster_button' }
diff --git a/app/views/clusters/platforms/kubernetes/_form.html.haml b/app/views/clusters/platforms/kubernetes/_form.html.haml
deleted file mode 100644
index 41701b5614a..00000000000
--- a/app/views/clusters/platforms/kubernetes/_form.html.haml
+++ /dev/null
@@ -1,58 +0,0 @@
-= bootstrap_form_for cluster, url: update_cluster_url_path, html: { class: 'gl-show-field-errors' },
- as: :cluster do |field|
- - copy_name_btn = clipboard_button(text: cluster.name, title: s_('ClusterIntegration|Copy Kubernetes cluster name'),
- class: 'input-group-text btn-default') if cluster.read_only_kubernetes_platform_fields?
- = field.text_field :name, class: 'js-select-on-focus cluster-name', required: true,
- title: s_('ClusterIntegration|Cluster name is required.'),
- readonly: cluster.read_only_kubernetes_platform_fields?,
- label: s_('ClusterIntegration|Kubernetes cluster name'), label_class: 'label-bold',
- input_group_class: 'gl-field-error-anchor', append: copy_name_btn
-
- = field.fields_for :platform_kubernetes, platform do |platform_field|
- - copy_api_url = clipboard_button(text: platform.api_url, title: s_('ClusterIntegration|Copy API URL'),
- class: 'input-group-text btn-default') if cluster.read_only_kubernetes_platform_fields?
- = platform_field.text_field :api_url, class: 'js-select-on-focus', required: true,
- title: s_('ClusterIntegration|API URL should be a valid http/https url.'),
- readonly: cluster.read_only_kubernetes_platform_fields?,
- label: s_('ClusterIntegration|API URL'), label_class: 'label-bold',
- input_group_class: 'gl-field-error-anchor', append: copy_api_url
-
- - copy_ca_cert_btn = clipboard_button(text: platform.ca_cert, title: s_('ClusterIntegration|Copy CA Certificate'),
- class: 'input-group-text btn-default') if cluster.read_only_kubernetes_platform_fields?
- = platform_field.text_area :ca_cert, class: 'js-select-on-focus', rows: '10',
- readonly: cluster.read_only_kubernetes_platform_fields?,
- placeholder: s_('ClusterIntegration|Certificate Authority bundle (PEM format)'),
- label: s_('ClusterIntegration|CA Certificate'), label_class: 'label-bold',
- input_group_class: 'gl-field-error-anchor', append: copy_ca_cert_btn
-
- - show_token_btn = (platform_field.button s_('ClusterIntegration|Show'),
- type: 'button', class: 'js-show-cluster-token btn btn-default')
- - copy_token_btn = clipboard_button(text: platform.token, title: s_('ClusterIntegration|Copy Service Token'),
- class: 'input-group-text btn-default') if cluster.read_only_kubernetes_platform_fields?
-
- = platform_field.text_field :token, type: 'password', class: 'js-select-on-focus js-cluster-token',
- required: true, title: s_('ClusterIntegration|Service token is required.'),
- readonly: cluster.read_only_kubernetes_platform_fields?,
- label: s_('ClusterIntegration|Service Token'), label_class: 'label-bold',
- input_group_class: 'gl-field-error-anchor', append: show_token_btn + copy_token_btn
-
- = platform_field.form_group :authorization_type do
- = platform_field.check_box :authorization_type, { disabled: true, label: s_('ClusterIntegration|RBAC-enabled cluster'),
- label_class: 'label-bold', inline: true }, 'rbac', 'abac'
- .form-text.text-muted
- = s_('ClusterIntegration|Enable this setting if using role-based access control (RBAC).')
- = s_('ClusterIntegration|This option will allow you to install applications on RBAC clusters.')
-
- .form-group
- = field.check_box :managed, { label: s_('ClusterIntegration|GitLab-managed cluster'),
- class: 'js-gl-managed',
- label_class: 'label-bold' }
- .form-text.text-muted
- = s_('ClusterIntegration|Allow GitLab to manage namespace and service accounts for this cluster.')
- = link_to _('More information'), help_page_path('user/project/clusters/index.md', anchor: 'gitlab-managed-clusters'), target: '_blank'
-
- - if cluster.allow_user_defined_namespace?
- = render('clusters/clusters/namespace', platform_field: platform_field)
-
- .form-group
- = field.submit s_('ClusterIntegration|Save changes'), class: 'btn btn-success'
diff --git a/app/views/dashboard/merge_requests.html.haml b/app/views/dashboard/merge_requests.html.haml
index 3956f03a3c8..dd9fd34f284 100644
--- a/app/views/dashboard/merge_requests.html.haml
+++ b/app/views/dashboard/merge_requests.html.haml
@@ -4,11 +4,11 @@
= render_dashboard_gold_trial(current_user)
-.page-title-holder.d-flex.align-items-center
+.page-title-holder.d-flex.align-items-start.flex-column.flex-sm-row.align-items-sm-center
%h1.page-title= _('Merge Requests')
- if current_user
- .page-title-controls
+ .page-title-controls.ml-0.mb-3.ml-sm-auto.mb-sm-0
= render 'shared/new_project_item_select', path: 'merge_requests/new', label: "New merge request", with_feature_enabled: 'merge_requests', type: :merge_requests
.top-area
diff --git a/app/views/dashboard/todos/_todo.html.haml b/app/views/dashboard/todos/_todo.html.haml
index fdb71d3a221..f5ffe8f2e36 100644
--- a/app/views/dashboard/todos/_todo.html.haml
+++ b/app/views/dashboard/todos/_todo.html.haml
@@ -48,14 +48,14 @@
- if todo.pending?
.todo-actions
- = link_to dashboard_todo_path(todo), method: :delete, class: 'btn btn-loading js-done-todo', data: { href: dashboard_todo_path(todo) } do
+ = link_to dashboard_todo_path(todo), method: :delete, class: 'btn btn-loading d-flex align-items-center js-done-todo', data: { href: dashboard_todo_path(todo) } do
Done
- = icon('spinner spin')
- = link_to restore_dashboard_todo_path(todo), method: :patch, class: 'btn btn-loading js-undo-todo hidden', data: { href: restore_dashboard_todo_path(todo) } do
+ %span.spinner.ml-1
+ = link_to restore_dashboard_todo_path(todo), method: :patch, class: 'btn btn-loading d-flex align-items-center js-undo-todo hidden', data: { href: restore_dashboard_todo_path(todo) } do
Undo
- = icon('spinner spin')
+ %span.spinner.ml-1
- else
.todo-actions
- = link_to restore_dashboard_todo_path(todo), method: :patch, class: 'btn btn-loading js-add-todo', data: { href: restore_dashboard_todo_path(todo) } do
+ = link_to restore_dashboard_todo_path(todo), method: :patch, class: 'btn btn-loading d-flex align-items-center js-add-todo', data: { href: restore_dashboard_todo_path(todo) } do
Add a To Do
- = icon('spinner spin')
+ %span.spinner.ml-1
diff --git a/app/views/dashboard/todos/index.html.haml b/app/views/dashboard/todos/index.html.haml
index 731e763f2be..cfc637592d3 100644
--- a/app/views/dashboard/todos/index.html.haml
+++ b/app/views/dashboard/todos/index.html.haml
@@ -26,12 +26,12 @@
.nav-controls
- if @todos.any?(&:pending?)
.append-right-default
- = link_to destroy_all_dashboard_todos_path(todos_filter_params), class: 'btn btn-loading js-todos-mark-all', method: :delete, data: { href: destroy_all_dashboard_todos_path(todos_filter_params) } do
+ = link_to destroy_all_dashboard_todos_path(todos_filter_params), class: 'btn btn-loading d-flex align-items-center js-todos-mark-all', method: :delete, data: { href: destroy_all_dashboard_todos_path(todos_filter_params) } do
Mark all as done
- = icon('spinner spin')
- = link_to bulk_restore_dashboard_todos_path, class: 'btn btn-loading js-todos-undo-all hidden', method: :patch , data: { href: bulk_restore_dashboard_todos_path(todos_filter_params) } do
+ %span.spinner.ml-1
+ = link_to bulk_restore_dashboard_todos_path, class: 'btn btn-loading d-flex align-items-center js-todos-undo-all hidden', method: :patch , data: { href: bulk_restore_dashboard_todos_path(todos_filter_params) } do
Undo mark all as done
- = icon('spinner spin')
+ %span.spinner.ml-1
.todos-filters
.issues-details-filters.row-content-block.second-block
diff --git a/app/views/devise/shared/_omniauth_box.html.haml b/app/views/devise/shared/_omniauth_box.html.haml
index 1b583ea85d6..cca0f756e76 100644
--- a/app/views/devise/shared/_omniauth_box.html.haml
+++ b/app/views/devise/shared/_omniauth_box.html.haml
@@ -10,8 +10,9 @@
= provider_image_tag(provider)
%span
= label_for_provider(provider)
- %fieldset.remember-me
- %label
- = check_box_tag :remember_me, nil, false, class: 'remember-me-checkbox'
- %span
- Remember me
+ - unless defined?(hide_remember_me) && hide_remember_me
+ %fieldset.remember-me
+ %label
+ = check_box_tag :remember_me, nil, false, class: 'remember-me-checkbox'
+ %span
+ Remember me
diff --git a/app/views/discussions/_resolve_all.html.haml b/app/views/discussions/_resolve_all.html.haml
deleted file mode 100644
index 689a22acd27..00000000000
--- a/app/views/discussions/_resolve_all.html.haml
+++ /dev/null
@@ -1,8 +0,0 @@
-%resolve-discussion-btn{ ":discussion-id" => "'#{discussion.id}'",
- ":merge-request-id" => discussion.noteable.iid,
- ":can-resolve" => discussion.can_resolve?(current_user),
- "inline-template" => true }
- .btn-group{ role: "group", "v-if" => "showButton" }
- %button.btn.btn-default{ type: "button", "@click" => "resolve", ":disabled" => "loading", "v-cloak" => "true" }
- = icon("spinner spin", "v-show" => "loading")
- {{ buttonText }}
diff --git a/app/views/groups/registry/repositories/index.html.haml b/app/views/groups/registry/repositories/index.html.haml
index b82910df5d5..eff8d77ac72 100644
--- a/app/views/groups/registry/repositories/index.html.haml
+++ b/app/views/groups/registry/repositories/index.html.haml
@@ -1,9 +1,10 @@
- page_title _("Container Registry")
+- @content_class = "limit-container-width" unless fluid_layout
%section
.row.registry-placeholder.prepend-bottom-10
.col-12
- - if Feature.enabled?(:vue_container_registry_explorer)
+ - if Feature.enabled?(:vue_container_registry_explorer, @group)
#js-container-registry{ data: { endpoint: group_container_registries_path(@group),
"help_page_path" => help_page_path('user/packages/container_registry/index'),
"two_factor_auth_help_link" => help_page_path('user/profile/account/two_factor_authentication'),
diff --git a/app/views/groups/settings/_permanent_deletion.html.haml b/app/views/groups/settings/_permanent_deletion.html.haml
index 31e2bac70be..155efc03ffe 100644
--- a/app/views/groups/settings/_permanent_deletion.html.haml
+++ b/app/views/groups/settings/_permanent_deletion.html.haml
@@ -2,7 +2,7 @@
%h4.danger-title= _('Remove group')
= form_tag(group, method: :delete) do
%p
- = _('Removing group will cause all child projects and resources to be removed.')
+ = _('Removing this group also removes all child projects, including archived projects, and their resources.')
%br
%strong= _('Removed group can not be restored!')
diff --git a/app/views/groups/settings/_permissions.html.haml b/app/views/groups/settings/_permissions.html.haml
index 618cfe57be4..016a9c8e054 100644
--- a/app/views/groups/settings/_permissions.html.haml
+++ b/app/views/groups/settings/_permissions.html.haml
@@ -33,6 +33,7 @@
= render_if_exists 'groups/settings/ip_restriction', f: f, group: @group
= render_if_exists 'groups/settings/allowed_email_domain', f: f, group: @group
= render 'groups/settings/lfs', f: f
+ = render 'shared/default_branch_protection', f: f, selected_level: @group.default_branch_protection
= render 'groups/settings/project_creation_level', f: f, group: @group
= render 'groups/settings/subgroup_creation_level', f: f, group: @group
= render 'groups/settings/two_factor_auth', f: f
diff --git a/app/views/groups/settings/ci_cd/show.html.haml b/app/views/groups/settings/ci_cd/show.html.haml
index 8c9b859e127..4aef30622cd 100644
--- a/app/views/groups/settings/ci_cd/show.html.haml
+++ b/app/views/groups/settings/ci_cd/show.html.haml
@@ -3,6 +3,7 @@
- expanded = expanded_by_default?
- general_expanded = @group.errors.empty? ? expanded : true
+- deploy_token_description = s_('DeployTokens|Group deploy tokens allow read-only access to the repositories and registry images within the group.')
-# Given we only have one field in this form which is also admin-only,
-# we don't want to show an empty section to non-admin users,
@@ -24,6 +25,8 @@
.settings-content
= render 'ci/variables/index', save_endpoint: group_variables_path
+= render "shared/deploy_tokens/index", group_or_project: @group, description: deploy_token_description
+
%section.settings#runners-settings.no-animate{ class: ('expanded' if expanded) }
.settings-header
%h4
diff --git a/app/views/groups/show.html.haml b/app/views/groups/show.html.haml
index 4916c4651dd..a9c19502a7c 100644
--- a/app/views/groups/show.html.haml
+++ b/app/views/groups/show.html.haml
@@ -11,6 +11,8 @@
= render_if_exists 'groups/self_or_ancestor_marked_for_deletion_notice', group: @group
+ = render_if_exists 'groups/group_activity_analytics', group: @group
+
.groups-listing{ data: { endpoints: { default: group_children_path(@group, format: :json), shared: group_shared_projects_path(@group, format: :json) } } }
.top-area.group-nav-container.justify-content-between
.scrolling-tabs-container.inner-page-scroll-tabs
diff --git a/app/views/help/ui.html.haml b/app/views/help/ui.html.haml
index 7e0b444e5d7..d71650ae50c 100644
--- a/app/views/help/ui.html.haml
+++ b/app/views/help/ui.html.haml
@@ -1,5 +1,6 @@
- page_title "UI Development Kit", "Help"
- lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed fermentum nisi sapien, non consequat lectus aliquam ultrices. Suspendisse sodales est euismod nunc condimentum, a consectetur diam ornare."
+- link_classes = "flex-grow-1 mx-1 "
.gitlab-ui-dev-kit
%h1 GitLab UI development kit
@@ -64,7 +65,12 @@
Cover block for profile page with avatar, name and description
%code .cover-block
.example
- .cover-block
+ .cover-block.user-cover-block
+ = render layout: 'users/cover_controls' do
+ = link_to '#', class: link_classes + 'btn btn-default' do
+ = icon('pencil')
+ = link_to '#', class: link_classes + 'btn btn-default' do
+ = icon('rss')
.avatar-holder
= image_tag avatar_icon_for_email('admin@example.com', 90), class: "avatar s90", alt: ''
.cover-title
@@ -73,13 +79,6 @@
.cover-desc.cgray
= lorem
- .cover-controls
- = link_to '#', class: 'btn btn-default' do
- = icon('pencil')
- &nbsp;
- = link_to '#', class: 'btn btn-default' do
- = icon('rss')
-
%h2#lists Lists
.lead
@@ -323,8 +322,8 @@
.dropdown-footer
%strong Tip:
If an author is not a member of this project, you can still filter by their name while using the search field.
- .dropdown-loading
- = icon('spinner spin')
+ .dropdown-loading.text-center
+ .spinner.spinner-md.mt-8
.example
%div
@@ -404,8 +403,8 @@
%input.dropdown-input-field{ type: "search", placeholder: "Filter results" }
= icon('search')
.dropdown-content
- .dropdown-loading
- = icon('spinner spin')
+ .dropdown-loading.text-center
+ .spinner.spinner-md.mt-8
.example
%div
diff --git a/app/views/ide/_show.html.haml b/app/views/ide/_show.html.haml
index 057225d021f..b871f0363f3 100644
--- a/app/views/ide/_show.html.haml
+++ b/app/views/ide/_show.html.haml
@@ -6,5 +6,5 @@
#ide.ide-loading{ data: ide_data }
.text-center
- = icon('spinner spin 2x')
+ .spinner.spinner-md
%h2.clgray= _('Loading the GitLab IDE...')
diff --git a/app/views/import/shared/_new_project_form.html.haml b/app/views/import/shared/_new_project_form.html.haml
index 35059229a55..a558b21b461 100644
--- a/app/views/import/shared/_new_project_form.html.haml
+++ b/app/views/import/shared/_new_project_form.html.haml
@@ -15,7 +15,7 @@
.input-group-prepend.static-namespace.has-tooltip{ title: user_url(current_user.username) + '/' }
.input-group-text.border-0
#{user_url(current_user.username)}/
- = hidden_field_tag :namespace_id, value: current_user.namespace_id
+ = hidden_field_tag :namespace_id, current_user.namespace_id
.form-group.col-12.col-sm-6.project-path
= label_tag :path, _('Project slug'), class: 'label-bold'
= text_field_tag :path, @path, placeholder: "my-awesome-project", class: "js-path-name form-control", tabindex: 2, required: true
diff --git a/app/views/layouts/_page.html.haml b/app/views/layouts/_page.html.haml
index 2b2ffd6abeb..06e3bca99a1 100644
--- a/app/views/layouts/_page.html.haml
+++ b/app/views/layouts/_page.html.haml
@@ -2,9 +2,9 @@
- if defined?(nav) && nav
= render "layouts/nav/sidebar/#{nav}"
.content-wrapper{ class: "#{@content_wrapper_class}" }
- = render 'shared/outdated_browser'
.mobile-overlay
.alert-wrapper
+ = render 'shared/outdated_browser'
= render_if_exists "layouts/header/ee_license_banner"
= render "layouts/broadcast"
= render "layouts/header/read_only_banner"
diff --git a/app/views/layouts/fullscreen.html.haml b/app/views/layouts/fullscreen.html.haml
index 8d0775f6f27..63bb9f8fff5 100644
--- a/app/views/layouts/fullscreen.html.haml
+++ b/app/views/layouts/fullscreen.html.haml
@@ -5,9 +5,9 @@
= render 'peek/bar'
= header_message
= render partial: "layouts/header/default", locals: { project: @project, group: @group }
- = render 'shared/outdated_browser'
.mobile-overlay
.alert-wrapper
+ = render 'shared/outdated_browser'
= render "layouts/broadcast"
= yield :flash_message
= render "layouts/flash"
diff --git a/app/views/layouts/nav/sidebar/_analytics_links.html.haml b/app/views/layouts/nav/sidebar/_analytics_links.html.haml
index e87cf92374a..90f9432af03 100644
--- a/app/views/layouts/nav/sidebar/_analytics_links.html.haml
+++ b/app/views/layouts/nav/sidebar/_analytics_links.html.haml
@@ -10,6 +10,11 @@
= _('Analytics')
%ul.sidebar-sub-level-items{ data: { qa_selector: 'analytics_sidebar_submenu' } }
+ = nav_link(path: navbar_links.first.path, html_options: { class: "fly-out-top-item" } ) do
+ = link_to navbar_links.first.link do
+ %strong.fly-out-top-item-name
+ = _('Analytics')
+ %li.divider.fly-out-top-item
- navbar_links.each do |menu_item|
= nav_link(path: menu_item.path) do
= link_to(menu_item.link, menu_item.link_to_options) do
diff --git a/app/views/layouts/nav/sidebar/_group.html.haml b/app/views/layouts/nav/sidebar/_group.html.haml
index c00c48b623c..588ee63c56f 100644
--- a/app/views/layouts/nav/sidebar/_group.html.haml
+++ b/app/views/layouts/nav/sidebar/_group.html.haml
@@ -1,4 +1,3 @@
-- should_display_analytics_pages_in_sidebar = Feature.enabled?(:analytics_pages_under_group_analytics_sidebar, @group, default_enabled: true)
- issues_count = group_issues_count(state: 'opened')
- merge_requests_count = group_merge_requests_count(state: 'opened')
@@ -13,8 +12,7 @@
%ul.sidebar-top-level-items.qa-group-sidebar
- if group_sidebar_link?(:overview)
- paths = group_overview_nav_link_paths
- - paths << 'contribution_analytics#show' unless should_display_analytics_pages_in_sidebar
- = nav_link(path: paths, unless: -> { should_display_analytics_pages_in_sidebar && current_path?('groups/contribution_analytics#show') }, html_options: { class: 'home' }) do
+ = nav_link(path: paths, unless: -> { current_path?('groups/contribution_analytics#show') }, html_options: { class: 'home' }) do
= link_to group_path(@group) do
.nav-icon-container
= sprite_icon('home')
@@ -45,19 +43,10 @@
%span
= _('Activity')
- - unless should_display_analytics_pages_in_sidebar
- - if group_sidebar_link?(:contribution_analytics)
- = nav_link(path: 'contribution_analytics#show') do
- = link_to group_contribution_analytics_path(@group), title: _('Contribution Analytics'), data: { placement: 'right', qa_selector: 'contribution_analytics_link' } do
- %span
- = _('Contribution Analytics')
-
- = render_if_exists 'layouts/nav/group_insights_link'
-
= render_if_exists "layouts/nav/ee/epic_link", group: @group
- if group_sidebar_link?(:issues)
- = nav_link(path: group_issues_sub_menu_items, unless: -> { should_display_analytics_pages_in_sidebar && current_path?('issues_analytics#show') }) do
+ = nav_link(path: group_issues_sub_menu_items, unless: -> { current_path?('issues_analytics#show') }) do
= link_to issues_group_path(@group), data: { qa_selector: 'group_issues_item' } do
.nav-icon-container
= sprite_icon('issues')
@@ -84,9 +73,6 @@
%span
= boards_link_text
- - unless should_display_analytics_pages_in_sidebar
- = render_if_exists 'layouts/nav/issues_analytics_link'
-
- if group_sidebar_link?(:labels)
= nav_link(path: 'labels#index') do
= link_to group_labels_path(@group), title: _('Labels') do
diff --git a/app/views/layouts/nav/sidebar/_project.html.haml b/app/views/layouts/nav/sidebar/_project.html.haml
index b9324f0596c..c11d1256d21 100644
--- a/app/views/layouts/nav/sidebar/_project.html.haml
+++ b/app/views/layouts/nav/sidebar/_project.html.haml
@@ -1,5 +1,3 @@
-- should_display_analytics_pages_in_sidebar = Feature.enabled?(:analytics_pages_under_project_analytics_sidebar, @project, default_enabled: true)
-
.nav-sidebar{ class: ("sidebar-collapsed-desktop" if collapsed_sidebar?) }
.nav-sidebar-inner-scroll
- can_edit = can?(current_user, :admin_project, @project)
@@ -10,9 +8,7 @@
.sidebar-context-title
= @project.name
%ul.sidebar-top-level-items.qa-project-sidebar
- - paths = sidebar_projects_paths
- - paths << 'cycle_analytics#show' unless should_display_analytics_pages_in_sidebar
- = nav_link(path: paths, html_options: { class: 'home' }) do
+ = nav_link(path: sidebar_projects_paths, html_options: { class: 'home' }) do
= link_to project_path(@project), class: 'shortcuts-project rspec-project-link', data: { qa_selector: 'project_link' } do
.nav-icon-container
= sprite_icon('home')
@@ -39,17 +35,8 @@
%span= _('Releases')
- - unless should_display_analytics_pages_in_sidebar
- - if can?(current_user, :read_cycle_analytics, @project)
- = nav_link(path: 'cycle_analytics#show') do
- = link_to project_cycle_analytics_path(@project), title: _('Value Stream Analytics'), class: 'shortcuts-project-cycle-analytics' do
- %span= _('Value Stream Analytics')
-
- = render_if_exists 'layouts/nav/project_insights_link'
-
-
- if project_nav_tab? :files
- = nav_link(controller: sidebar_repository_paths, unless: -> { should_display_analytics_pages_in_sidebar && current_path?('projects/graphs#charts') }) do
+ = nav_link(controller: sidebar_repository_paths, unless: -> { current_path?('projects/graphs#charts') }) do
= link_to project_tree_path(@project), class: 'shortcuts-tree qa-project-menu-repo' do
.nav-icon-container
= sprite_icon('doc-text')
@@ -90,11 +77,6 @@
= link_to project_compare_index_path(@project, from: @repository.root_ref, to: current_ref) do
= _('Compare')
- - unless should_display_analytics_pages_in_sidebar
- = nav_link(path: 'graphs#charts') do
- = link_to charts_project_graph_path(@project, current_ref) do
- = _('Charts')
-
= render_if_exists 'projects/sidebar/repository_locked_files'
- if project_nav_tab? :issues
@@ -143,7 +125,7 @@
- issue_tracker = @project.external_issue_tracker
= link_to issue_tracker.issue_tracker_path, class: 'shortcuts-external_tracker' do
.nav-icon-container
- = sprite_icon('issue-external')
+ = sprite_icon('external-link')
%span.nav-item-name
= issue_tracker.title
%ul.sidebar-sub-level-items.is-fly-out-only
@@ -177,8 +159,10 @@
%span.badge.badge-pill.count.merge_counter.js-merge-counter.fly-out-badge
= number_with_delimiter(@project.open_merge_requests_count)
+ = render_if_exists "layouts/nav/requirements_link", project: @project
+
- if project_nav_tab? :pipelines
- = nav_link(controller: [:pipelines, :builds, :jobs, :pipeline_schedules, :artifacts], unless: -> { should_display_analytics_pages_in_sidebar && current_path?('projects/pipelines#charts') }) do
+ = nav_link(controller: [:pipelines, :builds, :jobs, :pipeline_schedules, :artifacts], unless: -> { current_path?('projects/pipelines#charts') }) do
= link_to project_pipelines_path(@project), class: 'shortcuts-pipelines qa-link-pipelines rspec-link-pipelines', data: { qa_selector: 'ci_cd_link' } do
.nav-icon-container
= sprite_icon('rocket')
@@ -215,12 +199,6 @@
%span
= _('Schedules')
- - if !should_display_analytics_pages_in_sidebar && @project.feature_available?(:builds, current_user) && !@project.empty_repo?
- = nav_link(path: 'pipelines#charts') do
- = link_to charts_project_pipelines_path(@project), title: _('Charts'), class: 'shortcuts-pipelines-charts' do
- %span
- = _('Charts')
-
= render_if_exists 'layouts/nav/sidebar/project_security_link' # EE-specific
- if project_nav_tab? :operations
@@ -263,7 +241,11 @@
%span
= _('Serverless')
- = render_if_exists 'layouts/nav/sidebar/pod_logs_link' # EE-specific
+ - if project_nav_tab?(:environments) && can?(current_user, :read_pod_logs, @project)
+ = nav_link(controller: :logs, action: [:index]) do
+ = link_to project_logs_path(@project), title: _('Logs') do
+ %span
+ = _('Logs')
- if project_nav_tab? :clusters
- show_cluster_hint = show_gke_cluster_integration_callout?(@project)
@@ -319,7 +301,7 @@
= nav_link do
= link_to external_wiki_url, class: 'shortcuts-external_wiki' do
.nav-icon-container
- = sprite_icon('issue-external')
+ = sprite_icon('external-link')
%span.nav-item-name
= _('External Wiki')
%ul.sidebar-sub-level-items.is-fly-out-only
@@ -366,10 +348,14 @@
%span
= _('Members')
- if can_edit
- = nav_link(controller: [:integrations, :services, :hooks, :hook_logs]) do
+ = nav_link(controller: [:integrations, :services]) do
= link_to project_settings_integrations_path(@project), title: _('Integrations'), data: { qa_selector: 'integrations_settings_link' } do
%span
= _('Integrations')
+ = nav_link(controller: [:hooks, :hook_logs]) do
+ = link_to project_hooks_path(@project), title: _('Webhooks'), data: { qa_selector: 'webhooks_settings_link' } do
+ %span
+ = _('Webhooks')
= nav_link(controller: :repository) do
= link_to project_settings_repository_path(@project), title: _('Repository') do
%span
@@ -418,13 +404,6 @@
= link_to project_network_path(@project, current_ref), title: _('Network'), class: 'shortcuts-network' do
= _('Graph')
- - unless should_display_analytics_pages_in_sidebar
- -# Shortcut to Repository > Charts (formerly, top-nav item "Graphs")
- - unless @project.empty_repo?
- %li.hidden
- = link_to charts_project_graph_path(@project, current_ref), title: _('Charts'), class: 'shortcuts-repository-charts' do
- = _('Charts')
-
-# Shortcut to Issues > New Issue
- if project_nav_tab?(:issues)
%li.hidden
diff --git a/app/views/notify/_successful_pipeline.html.haml b/app/views/notify/_successful_pipeline.html.haml
new file mode 100644
index 00000000000..231df2e9206
--- /dev/null
+++ b/app/views/notify/_successful_pipeline.html.haml
@@ -0,0 +1,118 @@
+- title = local_assigns[:title]
+%tr.table-success
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;padding:10px;border-radius:3px;font-size:14px;line-height:1.3;text-align:center;overflow:hidden;color:#ffffff;background-color:#31af64;" }
+ %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;margin:0 auto;" }
+ %tbody
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;vertical-align:middle;color:#ffffff;text-align:center;padding-right:5px;" }
+ %img{ alt: "✓", height: "13", src: image_url('mailers/ci_pipeline_notif_v1/icon-check-green-inverted.gif'), style: "display:block;", width: "13" }/
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;vertical-align:middle;color:#ffffff;text-align:center;" }
+ = title
+%tr.spacer
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;height:18px;font-size:18px;line-height:18px;" }
+ &nbsp;
+%tr.section
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;padding:0 15px;border:1px solid #ededed;border-radius:3px;overflow:hidden;" }
+ %table.table-info{ border: "0", cellpadding: "0", cellspacing: "0", style: "width:100%;" }
+ %tbody
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:300;padding:14px 0;margin:0;" } Project
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:500;padding:14px 0;margin:0;color:#333333;width:75%;padding-left:5px;" }
+ - namespace_name = @project.group ? @project.group.name : @project.namespace.owner.name
+ - namespace_url = @project.group ? group_url(@project.group) : user_url(@project.namespace.owner)
+ %a.muted{ href: namespace_url, style: "color:#333333;text-decoration:none;" }
+ = namespace_name
+ \/
+ %a.muted{ href: project_url(@project), style: "color:#333333;text-decoration:none;" }
+ = @project.name
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:300;padding:14px 0;margin:0;border-top:1px solid #ededed;" } Branch
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:500;padding:14px 0;margin:0;color:#333333;width:75%;padding-left:5px;border-top:1px solid #ededed;" }
+ %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;" }
+ %tbody
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;" }
+ %img{ height: "13", src: image_url('mailers/ci_pipeline_notif_v1/icon-branch-gray.gif'), style: "display:block;", width: "13", alt: "" }/
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;" }
+ %a.muted{ href: commits_url(@pipeline), style: "color:#333333;text-decoration:none;" }
+ = @pipeline.source_ref
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:300;padding:14px 0;margin:0;border-top:1px solid #ededed;" } Commit
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:400;padding:14px 0;margin:0;color:#333333;width:75%;padding-left:5px;border-top:1px solid #ededed;" }
+ %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;" }
+ %tbody
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;" }
+ %img{ height: "13", src: image_url('mailers/ci_pipeline_notif_v1/icon-commit-gray.gif'), style: "display:block;", width: "13", alt: "" }/
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;" }
+ %a{ href: commit_url(@pipeline), style: "color:#3777b0;text-decoration:none;" }
+ = @pipeline.short_sha
+ - if @merge_request
+ in
+ %a{ href: merge_request_url(@merge_request), style: "color:#3777b0;text-decoration:none;" }
+ = @merge_request.to_reference
+ .commit{ style: "color:#5c5c5c;font-weight:300;" }
+ = @pipeline.git_commit_message.truncate(50)
+ - commit = @pipeline.commit
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:300;padding:14px 0;margin:0;border-top:1px solid #ededed;" } Commit Author
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:500;padding:14px 0;margin:0;color:#333333;width:75%;padding-left:5px;border-top:1px solid #ededed;" }
+ %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;" }
+ %tbody
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;" }
+ %img.avatar{ height: "24", src: avatar_icon_for(commit.author, commit.author_email, 24, only_path: false), style: "display:block;border-radius:12px;margin:-2px 0;", width: "24", alt: "" }/
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;" }
+ - if commit.author
+ %a.muted{ href: user_url(commit.author), style: "color:#333333;text-decoration:none;" }
+ = commit.author.name
+ - else
+ %span
+ = commit.author_name
+ - if commit.different_committer?
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:300;padding:14px 0;margin:0;border-top:1px solid #ededed;" } Committed by
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:500;padding:14px 0;margin:0;color:#333333;width:75%;padding-left:5px;border-top:1px solid #ededed;" }
+ %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;" }
+ %tbody
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;" }
+ %img.avatar{ height: "24", src: avatar_icon_for(commit.committer, commit.committer_email, 24, only_path: false), style: "display:block;border-radius:12px;margin:-2px 0;", width: "24", alt: "" }/
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;" }
+ - if commit.committer
+ %a.muted{ href: user_url(commit.committer), style: "color:#333333;text-decoration:none;" }
+ = commit.committer.name
+ - else
+ %span
+ = commit.committer_name
+
+%tr.spacer
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;height:18px;font-size:18px;line-height:18px;" }
+ &nbsp;
+%tr.success-message
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;color:#333333;font-size:15px;font-weight:400;line-height:1.4;padding:15px 5px 0 5px;text-align:center;" }
+ %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;margin:0 auto;" }
+ %tbody
+ %tr
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;font-weight:500;line-height:1.4;vertical-align:baseline;" }
+ Pipeline
+ %a{ href: pipeline_url(@pipeline), style: "color:#3777b0;text-decoration:none;" }
+ = "\##{@pipeline.id}"
+ triggered by
+ - if @pipeline.user
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;padding-left:5px", width: "24" }
+ %img.avatar{ height: "24", src: avatar_icon_for_user(@pipeline.user, 24, only_path: false), style: "display:block;border-radius:12px;margin:-2px 0;", width: "24", alt: "" }/
+ %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;font-weight:500;line-height:1.4;vertical-align:baseline;" }
+ %a.muted{ href: user_url(@pipeline.user), style: "color:#333333;text-decoration:none;" }
+ = @pipeline.user.name
+ - else
+ %td{ style: "font-family:'Menlo','Liberation Mono','Consolas','DejaVu Sans Mono','Ubuntu Mono','Courier New','andale mono','lucida console',monospace;font-size:14px;line-height:1.4;vertical-align:baseline;padding:0 5px;" }
+ API
+%tr
+ %td{ colspan: 2, style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;color:#333333;font-size:15px;font-weight:300;line-height:1.4;padding:15px 5px;text-align:center;" }
+ - job_count = @pipeline.total_size
+ - stage_count = @pipeline.stages_count
+ successfully completed
+ #{job_count} #{'job'.pluralize(job_count)}
+ in
+ #{stage_count} #{'stage'.pluralize(stage_count)}.
diff --git a/app/views/notify/_successful_pipeline.text.erb b/app/views/notify/_successful_pipeline.text.erb
new file mode 100644
index 00000000000..628976e2dda
--- /dev/null
+++ b/app/views/notify/_successful_pipeline.text.erb
@@ -0,0 +1,32 @@
+<%= local_assigns[:title] %>
+
+Project: <%= @project.name %> ( <%= project_url(@project) %> )
+Branch: <%= @pipeline.source_ref %> ( <%= commits_url(@pipeline) %> )
+<% if @merge_request -%>
+Merge Request: <%= @merge_request.to_reference %> ( <%= merge_request_url(@merge_request) %> )
+<% end -%>
+
+Commit: <%= @pipeline.short_sha %> ( <%= commit_url(@pipeline) %> )
+Commit Message: <%= @pipeline.git_commit_message.truncate(50) %>
+<% commit = @pipeline.commit -%>
+<% if commit.author -%>
+Commit Author: <%= sanitize_name(commit.author.name) %> ( <%= user_url(commit.author) %> )
+<% else -%>
+Commit Author: <%= commit.author_name %>
+<% end -%>
+<% if commit.different_committer? -%>
+<% if commit.committer -%>
+Committed by: <%= sanitize_name(commit.committer.name) %> ( <%= user_url(commit.committer) %> )
+<% else -%>
+Committed by: <%= commit.committer_name %>
+<% end -%>
+<% end -%>
+
+<% job_count = @pipeline.total_size -%>
+<% stage_count = @pipeline.stages_count -%>
+<% if @pipeline.user -%>
+Pipeline #<%= @pipeline.id %> ( <%= pipeline_url(@pipeline) %> ) triggered by <%= sanitize_name(@pipeline.user.name) %> ( <%= user_url(@pipeline.user) %> )
+<% else -%>
+Pipeline #<%= @pipeline.id %> ( <%= pipeline_url(@pipeline) %> ) triggered by API
+<% end -%>
+successfully completed <%= job_count %> <%= 'job'.pluralize(job_count) %> in <%= stage_count %> <%= 'stage'.pluralize(stage_count) %>.
diff --git a/app/views/notify/pipeline_fixed_email.html.haml b/app/views/notify/pipeline_fixed_email.html.haml
new file mode 100644
index 00000000000..05c0027a6fc
--- /dev/null
+++ b/app/views/notify/pipeline_fixed_email.html.haml
@@ -0,0 +1 @@
+= render 'notify/successful_pipeline', title: 'Your pipeline has been fixed!'
diff --git a/app/views/notify/pipeline_fixed_email.text.erb b/app/views/notify/pipeline_fixed_email.text.erb
new file mode 100644
index 00000000000..75268531bdc
--- /dev/null
+++ b/app/views/notify/pipeline_fixed_email.text.erb
@@ -0,0 +1 @@
+<%= render 'notify/successful_pipeline', title: 'Your pipeline has been fixed!' -%>
diff --git a/app/views/notify/pipeline_success_email.html.haml b/app/views/notify/pipeline_success_email.html.haml
index e575a5569fa..c34e02b5fee 100644
--- a/app/views/notify/pipeline_success_email.html.haml
+++ b/app/views/notify/pipeline_success_email.html.haml
@@ -1,117 +1 @@
-%tr.table-success
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;padding:10px;border-radius:3px;font-size:14px;line-height:1.3;text-align:center;overflow:hidden;color:#ffffff;background-color:#31af64;" }
- %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;margin:0 auto;" }
- %tbody
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;vertical-align:middle;color:#ffffff;text-align:center;padding-right:5px;" }
- %img{ alt: "✓", height: "13", src: image_url('mailers/ci_pipeline_notif_v1/icon-check-green-inverted.gif'), style: "display:block;", width: "13" }/
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;vertical-align:middle;color:#ffffff;text-align:center;" }
- Your pipeline has passed.
-%tr.spacer
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;height:18px;font-size:18px;line-height:18px;" }
- &nbsp;
-%tr.section
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;padding:0 15px;border:1px solid #ededed;border-radius:3px;overflow:hidden;" }
- %table.table-info{ border: "0", cellpadding: "0", cellspacing: "0", style: "width:100%;" }
- %tbody
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:300;padding:14px 0;margin:0;" } Project
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:500;padding:14px 0;margin:0;color:#333333;width:75%;padding-left:5px;" }
- - namespace_name = @project.group ? @project.group.name : @project.namespace.owner.name
- - namespace_url = @project.group ? group_url(@project.group) : user_url(@project.namespace.owner)
- %a.muted{ href: namespace_url, style: "color:#333333;text-decoration:none;" }
- = namespace_name
- \/
- %a.muted{ href: project_url(@project), style: "color:#333333;text-decoration:none;" }
- = @project.name
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:300;padding:14px 0;margin:0;border-top:1px solid #ededed;" } Branch
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:500;padding:14px 0;margin:0;color:#333333;width:75%;padding-left:5px;border-top:1px solid #ededed;" }
- %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;" }
- %tbody
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;" }
- %img{ height: "13", src: image_url('mailers/ci_pipeline_notif_v1/icon-branch-gray.gif'), style: "display:block;", width: "13", alt: "" }/
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;" }
- %a.muted{ href: commits_url(@pipeline), style: "color:#333333;text-decoration:none;" }
- = @pipeline.source_ref
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:300;padding:14px 0;margin:0;border-top:1px solid #ededed;" } Commit
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:400;padding:14px 0;margin:0;color:#333333;width:75%;padding-left:5px;border-top:1px solid #ededed;" }
- %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;" }
- %tbody
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;" }
- %img{ height: "13", src: image_url('mailers/ci_pipeline_notif_v1/icon-commit-gray.gif'), style: "display:block;", width: "13", alt: "" }/
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;" }
- %a{ href: commit_url(@pipeline), style: "color:#3777b0;text-decoration:none;" }
- = @pipeline.short_sha
- - if @merge_request
- in
- %a{ href: merge_request_url(@merge_request), style: "color:#3777b0;text-decoration:none;" }
- = @merge_request.to_reference
- .commit{ style: "color:#5c5c5c;font-weight:300;" }
- = @pipeline.git_commit_message.truncate(50)
- - commit = @pipeline.commit
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:300;padding:14px 0;margin:0;border-top:1px solid #ededed;" } Commit Author
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:500;padding:14px 0;margin:0;color:#333333;width:75%;padding-left:5px;border-top:1px solid #ededed;" }
- %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;" }
- %tbody
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;" }
- %img.avatar{ height: "24", src: avatar_icon_for(commit.author, commit.author_email, 24, only_path: false), style: "display:block;border-radius:12px;margin:-2px 0;", width: "24", alt: "" }/
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;" }
- - if commit.author
- %a.muted{ href: user_url(commit.author), style: "color:#333333;text-decoration:none;" }
- = commit.author.name
- - else
- %span
- = commit.author_name
- - if commit.different_committer?
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:300;padding:14px 0;margin:0;border-top:1px solid #ededed;" } Committed by
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;color:#8c8c8c;font-weight:500;padding:14px 0;margin:0;color:#333333;width:75%;padding-left:5px;border-top:1px solid #ededed;" }
- %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;" }
- %tbody
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;" }
- %img.avatar{ height: "24", src: avatar_icon_for(commit.committer, commit.committer_email, 24, only_path: false), style: "display:block;border-radius:12px;margin:-2px 0;", width: "24", alt: "" }/
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;" }
- - if commit.committer
- %a.muted{ href: user_url(commit.committer), style: "color:#333333;text-decoration:none;" }
- = commit.committer.name
- - else
- %span
- = commit.committer_name
-
-%tr.spacer
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;height:18px;font-size:18px;line-height:18px;" }
- &nbsp;
-%tr.success-message
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;color:#333333;font-size:15px;font-weight:400;line-height:1.4;padding:15px 5px 0 5px;text-align:center;" }
- %table.img{ border: "0", cellpadding: "0", cellspacing: "0", style: "border-collapse:collapse;margin:0 auto;" }
- %tbody
- %tr
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;font-weight:500;line-height:1.4;vertical-align:baseline;" }
- Pipeline
- %a{ href: pipeline_url(@pipeline), style: "color:#3777b0;text-decoration:none;" }
- = "\##{@pipeline.id}"
- triggered by
- - if @pipeline.user
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;padding-left:5px", width: "24" }
- %img.avatar{ height: "24", src: avatar_icon_for_user(@pipeline.user, 24, only_path: false), style: "display:block;border-radius:12px;margin:-2px 0;", width: "24", alt: "" }/
- %td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;font-weight:500;line-height:1.4;vertical-align:baseline;" }
- %a.muted{ href: user_url(@pipeline.user), style: "color:#333333;text-decoration:none;" }
- = @pipeline.user.name
- - else
- %td{ style: "font-family:'Menlo','Liberation Mono','Consolas','DejaVu Sans Mono','Ubuntu Mono','Courier New','andale mono','lucida console',monospace;font-size:14px;line-height:1.4;vertical-align:baseline;padding:0 5px;" }
- API
-%tr
- %td{ colspan: 2, style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;color:#333333;font-size:15px;font-weight:300;line-height:1.4;padding:15px 5px;text-align:center;" }
- - job_count = @pipeline.total_size
- - stage_count = @pipeline.stages_count
- successfully completed
- #{job_count} #{'job'.pluralize(job_count)}
- in
- #{stage_count} #{'stage'.pluralize(stage_count)}.
+= render 'notify/successful_pipeline', title: 'Your pipeline has passed.'
diff --git a/app/views/notify/pipeline_success_email.text.erb b/app/views/notify/pipeline_success_email.text.erb
index 4005158dc9e..b554bffc908 100644
--- a/app/views/notify/pipeline_success_email.text.erb
+++ b/app/views/notify/pipeline_success_email.text.erb
@@ -1,32 +1 @@
-Your pipeline has passed.
-
-Project: <%= @project.name %> ( <%= project_url(@project) %> )
-Branch: <%= @pipeline.source_ref %> ( <%= commits_url(@pipeline) %> )
-<% if @merge_request -%>
-Merge Request: <%= @merge_request.to_reference %> ( <%= merge_request_url(@merge_request) %> )
-<% end -%>
-
-Commit: <%= @pipeline.short_sha %> ( <%= commit_url(@pipeline) %> )
-Commit Message: <%= @pipeline.git_commit_message.truncate(50) %>
-<% commit = @pipeline.commit -%>
-<% if commit.author -%>
-Commit Author: <%= sanitize_name(commit.author.name) %> ( <%= user_url(commit.author) %> )
-<% else -%>
-Commit Author: <%= commit.author_name %>
-<% end -%>
-<% if commit.different_committer? -%>
-<% if commit.committer -%>
-Committed by: <%= sanitize_name(commit.committer.name) %> ( <%= user_url(commit.committer) %> )
-<% else -%>
-Committed by: <%= commit.committer_name %>
-<% end -%>
-<% end -%>
-
-<% job_count = @pipeline.total_size -%>
-<% stage_count = @pipeline.stages_count -%>
-<% if @pipeline.user -%>
-Pipeline #<%= @pipeline.id %> ( <%= pipeline_url(@pipeline) %> ) triggered by <%= sanitize_name(@pipeline.user.name) %> ( <%= user_url(@pipeline.user) %> )
-<% else -%>
-Pipeline #<%= @pipeline.id %> ( <%= pipeline_url(@pipeline) %> ) triggered by API
-<% end -%>
-successfully completed <%= job_count %> <%= 'job'.pluralize(job_count) %> in <%= stage_count %> <%= 'stage'.pluralize(stage_count) %>.
+<%= render 'notify/successful_pipeline', title: 'Your pipeline has passed.' -%>
diff --git a/app/views/profiles/_email_settings.html.haml b/app/views/profiles/_email_settings.html.haml
index fb4da08e129..beda6f05f88 100644
--- a/app/views/profiles/_email_settings.html.haml
+++ b/app/views/profiles/_email_settings.html.haml
@@ -5,7 +5,7 @@
- help_text = email_change_disabled ? s_("Your account uses dedicated credentials for the \"%{group_name}\" group and can only be updated through SSO.") % { group_name: @user.managing_group.name } : read_only_help_text
= form.text_field :email, required: true, class: 'input-lg', value: (@user.email unless @user.temp_oauth_email?), help: help_text.html_safe, readonly: readonly || email_change_disabled
-= form.select :public_email, options_for_select(@user.all_emails, selected: @user.public_email),
+= form.select :public_email, options_for_select(@user.all_public_emails, selected: @user.public_email),
{ help: s_("Profiles|This email will be displayed on your public profile"), include_blank: s_("Profiles|Do not show on profile") },
control_class: 'select2 input-lg', disabled: email_change_disabled
- commit_email_link_url = help_page_path('user/profile/index', anchor: 'commit-email', target: '_blank')
diff --git a/app/views/profiles/keys/_form.html.haml b/app/views/profiles/keys/_form.html.haml
index 63ef5eaa172..34e81285328 100644
--- a/app/views/profiles/keys/_form.html.haml
+++ b/app/views/profiles/keys/_form.html.haml
@@ -6,10 +6,15 @@
= f.label :key, s_('Profiles|Key'), class: 'label-bold'
%p= _("Paste your public SSH key, which is usually contained in the file '~/.ssh/id_ed25519.pub' or '~/.ssh/id_rsa.pub' and begins with 'ssh-ed25519' or 'ssh-rsa'. Don't use your private SSH key.")
= f.text_area :key, class: "form-control js-add-ssh-key-validation-input qa-key-public-key-field", rows: 8, required: true, placeholder: s_('Profiles|Typically starts with "ssh-ed25519 …" or "ssh-rsa …"')
- .form-group
- = f.label :title, _('Title'), class: 'label-bold'
- = f.text_field :title, class: "form-control input-lg qa-key-title-field", required: true, placeholder: s_('Profiles|e.g. My MacBook key')
- %p.form-text.text-muted= _('Name your individual key via a title')
+ .form-row
+ .col.form-group
+ = f.label :title, _('Title'), class: 'label-bold'
+ = f.text_field :title, class: "form-control input-lg qa-key-title-field", required: true, placeholder: s_('Profiles|e.g. My MacBook key')
+ %p.form-text.text-muted= s_('Profiles|Give your individual key a title')
+
+ .col.form-group
+ = f.label :expires_at, s_('Profiles|Expires at'), class: 'label-bold'
+ = f.date_field :expires_at, class: "form-control input-lg qa-key-expiry-field", min: Date.tomorrow
.js-add-ssh-key-validation-warning.hide
.bs-callout.bs-callout-warning{ role: 'alert', aria_live: 'assertive' }
diff --git a/app/views/profiles/keys/_key.html.haml b/app/views/profiles/keys/_key.html.haml
index 0e94e6563fd..b227041c9de 100644
--- a/app/views/profiles/keys/_key.html.haml
+++ b/app/views/profiles/keys/_key.html.haml
@@ -1,24 +1,31 @@
-%li.key-list-item
- .float-left.append-right-10
+%li.d-flex.align-items-center.key-list-item
+ .append-right-10
- if key.valid?
- = icon 'key', class: 'settings-list-icon d-none d-sm-block'
+ - if key.expired?
+ %span.d-inline-block.has-tooltip{ title: s_('Profiles|Your key has expired') }
+ = sprite_icon('warning-solid', size: 16, css_class: 'settings-list-icon d-none d-sm-block')
+ - else
+ = sprite_icon('key', size: 16, css_class: 'settings-list-icon d-none d-sm-block ')
- else
- = icon 'exclamation-triangle', class: 'settings-list-icon d-none d-sm-block has-tooltip',
- title: key.errors.full_messages.join(', ')
+ %span.d-inline-block.has-tooltip{ title: key.errors.full_messages.join(', ') }
+ = sprite_icon('warning-solid', size: 16, css_class: 'settings-list-icon d-none d-sm-block')
-
- .key-list-item-info
+ .key-list-item-info.w-100.float-none
= link_to path_to_key(key, is_admin), class: "title" do
= key.title
%span.text-truncate
= key.fingerprint
- .last-used-at
- last used:
- = key.last_used_at ? time_ago_with_tooltip(key.last_used_at) : 'n/a'
- .float-right
- %span.key-created-at
- = s_('Profiles|Created %{time_ago}'.html_safe) % { time_ago:time_ago_with_tooltip(key.created_at)}
- - if key.can_delete?
- = link_to path_to_key(key, is_admin), data: { confirm: _('Are you sure?')}, method: :delete, class: "btn btn-transparent prepend-left-10" do
- %span.sr-only= _('Remove')
- = icon('trash')
+
+ .key-list-item-dates.d-flex.align-items-start.justify-content-between
+ %span.last-used-at.append-right-10
+ = s_('Profiles|Last used:')
+ = key.last_used_at ? time_ago_with_tooltip(key.last_used_at) : _('Never')
+ %span.expires.append-right-10
+ = s_('Profiles|Expires:')
+ = key.expires_at ? key.expires_at.to_date : _('Never')
+ %span.key-created-at
+ = s_('Profiles|Created %{time_ago}'.html_safe) % { time_ago:time_ago_with_tooltip(key.created_at)}
+ - if key.can_delete?
+ = link_to path_to_key(key, is_admin), data: { confirm: _('Are you sure?')}, method: :delete, class: "btn btn-transparent prepend-left-10 align-baseline" do
+ %span.sr-only= _('Remove')
+ = sprite_icon('remove', size: 16)
diff --git a/app/views/profiles/keys/_key_details.html.haml b/app/views/profiles/keys/_key_details.html.haml
index 02f1a267044..88deb0f11cb 100644
--- a/app/views/profiles/keys/_key_details.html.haml
+++ b/app/views/profiles/keys/_key_details.html.haml
@@ -12,8 +12,11 @@
%span.light= _('Created on:')
%strong= @key.created_at.to_s(:medium)
%li
+ %span.light= _('Expires:')
+ %strong= @key.expires_at.try(:to_s, :medium) || _('Never')
+ %li
%span.light= _('Last used on:')
- %strong= @key.last_used_at.try(:to_s, :medium) || 'N/A'
+ %strong= @key.last_used_at.try(:to_s, :medium) || _('Never')
.col-md-8
= form_errors(@key, type: 'key') unless @key.valid?
diff --git a/app/views/profiles/keys/_key_table.html.haml b/app/views/profiles/keys/_key_table.html.haml
index 8b862522645..176d7a42002 100644
--- a/app/views/profiles/keys/_key_table.html.haml
+++ b/app/views/profiles/keys/_key_table.html.haml
@@ -1,7 +1,7 @@
- is_admin = local_assigns.fetch(:admin, false)
- if @keys.any?
- %ul.content-list{ data: { qa_selector: 'ssh_keys_list' } }
+ %ul.content-list.ssh-keys-list{ data: { qa_selector: 'ssh_keys_list' } }
= render partial: 'profiles/keys/key', collection: @keys, locals: { is_admin: is_admin }
- else
%p.settings-message.text-center
diff --git a/app/views/profiles/notifications/_email_settings.html.haml b/app/views/profiles/notifications/_email_settings.html.haml
index 34dcf8f5402..d2c62d3d006 100644
--- a/app/views/profiles/notifications/_email_settings.html.haml
+++ b/app/views/profiles/notifications/_email_settings.html.haml
@@ -1,6 +1,6 @@
- form = local_assigns.fetch(:form)
.form-group
= form.label :notification_email, class: "label-bold"
- = form.select :notification_email, @user.all_emails, { include_blank: false }, class: "select2", disabled: local_assigns.fetch(:email_change_disabled, nil)
+ = form.select :notification_email, @user.all_public_emails, { include_blank: false }, class: "select2", disabled: local_assigns.fetch(:email_change_disabled, nil)
.help-block
= local_assigns.fetch(:help_text, nil)
diff --git a/app/views/profiles/notifications/_group_settings.html.haml b/app/views/profiles/notifications/_group_settings.html.haml
index 33b0aa93d84..5be086948e7 100644
--- a/app/views/profiles/notifications/_group_settings.html.haml
+++ b/app/views/profiles/notifications/_group_settings.html.haml
@@ -13,4 +13,4 @@
.table-section.section-30
= form_for setting, url: profile_notifications_group_path(group), method: :put, html: { class: 'update-notifications' } do |f|
- = f.select :notification_email, @user.all_emails, { include_blank: 'Global notification email' }, class: 'select2 js-group-notification-email'
+ = f.select :notification_email, @user.all_public_emails, { include_blank: 'Global notification email' }, class: 'select2 js-group-notification-email'
diff --git a/app/views/profiles/show.html.haml b/app/views/profiles/show.html.haml
index 49533c18c8f..86e157ee042 100644
--- a/app/views/profiles/show.html.haml
+++ b/app/views/profiles/show.html.haml
@@ -90,7 +90,6 @@
.row
= render 'profiles/name', form: f, user: @user
= f.text_field :id, readonly: true, label: s_('Profiles|User ID'), wrapper: { class: 'col-md-3' }
- = f.select :role, ::User.roles.keys.map { |role| [role.titleize, role] }, { prompt: _('Select your role') }, required: true, class: 'input-md'
= render_if_exists 'profiles/email_settings', form: f
= f.text_field :skype, class: 'input-md', placeholder: s_("Profiles|username")
@@ -101,6 +100,7 @@
= f.text_field :location, readonly: true, help: s_("Profiles|Your location was automatically set based on your %{provider_label} account") % { provider_label: attribute_provider_label(:location) }
- else
= f.text_field :location, label: s_('Profiles|Location'), class: 'input-lg', placeholder: s_("Profiles|City, country")
+ = f.text_field :job_title, class: 'input-md'
= f.text_field :organization, label: s_('Profiles|Organization'), class: 'input-md', help: s_("Profiles|Who you represent or work for")
= f.text_area :bio, label: s_('Profiles|Bio'), rows: 4, maxlength: 250, help: s_("Profiles|Tell us about yourself in fewer than 250 characters")
%hr
diff --git a/app/views/projects/_activity.html.haml b/app/views/projects/_activity.html.haml
index 12da62f4c64..07faf5a66da 100644
--- a/app/views/projects/_activity.html.haml
+++ b/app/views/projects/_activity.html.haml
@@ -6,4 +6,5 @@
= icon('rss')
.content_list.project-activity{ :"data-href" => activity_project_path(@project) }
- = spinner
+ .loading
+ .spinner.spinner-md
diff --git a/app/views/projects/_commit_button.html.haml b/app/views/projects/_commit_button.html.haml
index b6689f4b57a..5f7ed46297b 100644
--- a/app/views/projects/_commit_button.html.haml
+++ b/app/views/projects/_commit_button.html.haml
@@ -1,5 +1,6 @@
.form-actions
- = button_tag 'Commit changes', class: 'btn commit-btn js-commit-button btn-success qa-commit-button'
+ = button_tag 'Commit changes', id: 'commit-changes', class: 'btn commit-btn js-commit-button btn-success qa-commit-button'
+
= link_to 'Cancel', cancel_path,
class: 'btn btn-cancel', data: {confirm: leave_edit_message}
diff --git a/app/views/projects/_wiki.html.haml b/app/views/projects/_wiki.html.haml
index 6103d86bf5a..57a5d3e2e83 100644
--- a/app/views/projects/_wiki.html.haml
+++ b/app/views/projects/_wiki.html.haml
@@ -1,6 +1,6 @@
- if @wiki_home.present?
%div{ class: container_class }
- .md.md-file.prepend-top-default.append-bottom-default
+ .md.prepend-top-default.append-bottom-default
= render_wiki_content(@wiki_home)
- else
- can_create_wiki = can?(current_user, :create_wiki, @project)
diff --git a/app/views/projects/blob/_editor.html.haml b/app/views/projects/blob/_editor.html.haml
index 961b873b571..51b0b2722d1 100644
--- a/app/views/projects/blob/_editor.html.haml
+++ b/app/views/projects/blob/_editor.html.haml
@@ -17,8 +17,14 @@
%span.pull-left.append-right-10
\/
= text_field_tag 'file_name', params[:file_name], placeholder: "File name",
- required: true, class: 'form-control new-file-name js-file-path-name-input'
+ required: true, class: 'form-control new-file-name js-file-path-name-input', value: params[:file_name] || (should_suggest_gitlab_ci_yml? ? '.gitlab-ci.yml' : '')
= render 'template_selectors'
+ - if should_suggest_gitlab_ci_yml?
+ .js-suggest-gitlab-ci-yml{ data: { toggle: 'popover',
+ target: '#gitlab-ci-yml-selector',
+ track_label: 'suggest_gitlab_ci_yml',
+ dismiss_key: "suggest_gitlab_ci_yml_#{@project.id}",
+ human_access: human_access } }
.file-buttons
- if is_markdown
diff --git a/app/views/projects/blob/_pipeline_tour_success.html.haml b/app/views/projects/blob/_pipeline_tour_success.html.haml
new file mode 100644
index 00000000000..7ecbc1974ec
--- /dev/null
+++ b/app/views/projects/blob/_pipeline_tour_success.html.haml
@@ -0,0 +1 @@
+.js-success-pipeline-modal{ 'data-commit-cookie': suggest_pipeline_commit_cookie_name, 'data-pipelines-path': project_pipelines_path(@project) }
diff --git a/app/views/projects/blob/_template_selectors.html.haml b/app/views/projects/blob/_template_selectors.html.haml
index 5ecfa135446..2be95bc5541 100644
--- a/app/views/projects/blob/_template_selectors.html.haml
+++ b/app/views/projects/blob/_template_selectors.html.haml
@@ -1,12 +1,13 @@
.template-selectors-menu.gl-pl-2
.template-selector-dropdowns-wrap
.template-type-selector.js-template-type-selector-wrap.hidden
- = dropdown_tag(_("Select a template type"), options: { toggle_class: 'js-template-type-selector qa-template-type-dropdown', dropdown_class: 'dropdown-menu-selectable'} )
+ - toggle_text = should_suggest_gitlab_ci_yml? ? '.gitlab-ci.yml' : 'Select a template type'
+ = dropdown_tag(_(toggle_text), options: { toggle_class: 'js-template-type-selector qa-template-type-dropdown', dropdown_class: 'dropdown-menu-selectable' })
.license-selector.js-license-selector-wrap.js-template-selector-wrap.hidden
= dropdown_tag(_("Apply a template"), options: { toggle_class: 'js-license-selector qa-license-dropdown', dropdown_class: 'dropdown-menu-selectable', filter: true, placeholder: "Filter", data: { data: licenses_for_select(@project), project: @project.name, fullname: @project.namespace.human_name } } )
.gitignore-selector.js-gitignore-selector-wrap.js-template-selector-wrap.hidden
= dropdown_tag(_("Apply a template"), options: { toggle_class: 'js-gitignore-selector qa-gitignore-dropdown', dropdown_class: 'dropdown-menu-selectable', filter: true, placeholder: "Filter", data: { data: gitignore_names(@project) } } )
- .gitlab-ci-yml-selector.js-gitlab-ci-yml-selector-wrap.js-template-selector-wrap.hidden
+ #gitlab-ci-yml-selector.gitlab-ci-yml-selector.js-gitlab-ci-yml-selector-wrap.js-template-selector-wrap.hidden
= dropdown_tag(_("Apply a template"), options: { toggle_class: 'js-gitlab-ci-yml-selector qa-gitlab-ci-yml-dropdown', dropdown_class: 'dropdown-menu-selectable', filter: true, placeholder: "Filter", data: { data: gitlab_ci_ymls(@project) } } )
.dockerfile-selector.js-dockerfile-selector-wrap.js-template-selector-wrap.hidden
= dropdown_tag(_("Apply a template"), options: { toggle_class: 'js-dockerfile-selector qa-dockerfile-dropdown', dropdown_class: 'dropdown-menu-selectable', filter: true, placeholder: "Filter", data: { data: dockerfile_names(@project) } } )
diff --git a/app/views/projects/blob/new.html.haml b/app/views/projects/blob/new.html.haml
index c5e3923f1e8..1afbe1fe24e 100644
--- a/app/views/projects/blob/new.html.haml
+++ b/app/views/projects/blob/new.html.haml
@@ -13,3 +13,9 @@
= hidden_field_tag 'content', '', id: 'file-content'
= render 'projects/commit_button', ref: @ref,
cancel_path: project_tree_path(@project, @id)
+ - if should_suggest_gitlab_ci_yml?
+ .js-suggest-gitlab-ci-yml-commit-changes{ data: { toggle: 'popover',
+ target: '#commit-changes',
+ track_label: 'suggest_commit_first_project_gitlab_ci_yml',
+ dismiss_key: "suggest_commit_first_project_gitlab_ci_yml_#{@project.id}",
+ human_access: human_access } }
diff --git a/app/views/projects/blob/preview.html.haml b/app/views/projects/blob/preview.html.haml
index 46e76e4d175..41a0045be89 100644
--- a/app/views/projects/blob/preview.html.haml
+++ b/app/views/projects/blob/preview.html.haml
@@ -1,5 +1,5 @@
- if markup?(@blob.name)
- .file-content.md.md-file
+ .file-content.md
= markup(@blob.name, @content)
- else
.diff-file
diff --git a/app/views/projects/blob/show.html.haml b/app/views/projects/blob/show.html.haml
index 7c73bbc7479..c66300aa947 100644
--- a/app/views/projects/blob/show.html.haml
+++ b/app/views/projects/blob/show.html.haml
@@ -14,3 +14,5 @@
- title = "Replace #{@blob.name}"
= render 'projects/blob/upload', title: title, placeholder: title, button_title: 'Replace file', form_path: project_update_blob_path(@project, @id), method: :put
+
+= render partial: 'pipeline_tour_success' if show_suggest_pipeline_creation_celebration?
diff --git a/app/views/projects/blob/viewers/_markup.html.haml b/app/views/projects/blob/viewers/_markup.html.haml
index c71df29354b..8134adcbc32 100644
--- a/app/views/projects/blob/viewers/_markup.html.haml
+++ b/app/views/projects/blob/viewers/_markup.html.haml
@@ -1,4 +1,4 @@
- blob = viewer.blob
- context = blob.respond_to?(:rendered_markup) ? { rendered: blob.rendered_markup } : {}
-.file-content.md.md-file
+.file-content.md
= markup(blob.name, blob.data, context)
diff --git a/app/views/projects/ci/builds/_build.html.haml b/app/views/projects/ci/builds/_build.html.haml
index 18bdbd42d0d..4c20ac84b24 100644
--- a/app/views/projects/ci/builds/_build.html.haml
+++ b/app/views/projects/ci/builds/_build.html.haml
@@ -45,7 +45,7 @@
= tag
- if job.try(:trigger_request)
%span.badge.badge-info= _('triggered')
- - if job.try(:allow_failure)
+ - if job.try(:allow_failure) && !job.success?
%span.badge.badge-warning= _('allowed to fail')
- if job.schedulable?
%span.badge.badge-info= s_('DelayedJobs|delayed')
diff --git a/app/views/projects/commit/_commit_box.html.haml b/app/views/projects/commit/_commit_box.html.haml
index d07407a6d13..88d1ec54cb0 100644
--- a/app/views/projects/commit/_commit_box.html.haml
+++ b/app/views/projects/commit/_commit_box.html.haml
@@ -66,13 +66,13 @@
- @commit.parents.each do |parent|
= link_to parent.short_id, project_commit_path(@project, parent), class: "commit-sha"
.commit-info.branches
- %i.fa.fa-spinner.fa-spin
+ .spinner.vertical-align-middle
.well-segment.merge-request-info
.icon-container
= custom_icon('mr_bold')
%span.commit-info.merge-requests{ 'data-project-commit-path' => merge_requests_project_commit_path(@project, @commit.id, format: :json) }
- = icon('spinner spin')
+ .spinner.vertical-align-middle
- last_pipeline = @commit.last_pipeline
- if can?(current_user, :read_pipeline, last_pipeline)
diff --git a/app/views/projects/commit/x509/_certificate_details.html.haml b/app/views/projects/commit/x509/_certificate_details.html.haml
index 51667010d6f..cea216d0d9d 100644
--- a/app/views/projects/commit/x509/_certificate_details.html.haml
+++ b/app/views/projects/commit/x509/_certificate_details.html.haml
@@ -1,5 +1,7 @@
.gpg-popover-certificate-details
%strong= _('Certificate Subject')
+ - if signature.x509_certificate.revoked?
+ %strong.cred= _('(revoked)')
%ul
- x509_subject(signature.x509_certificate.subject, ["CN", "O"]).map do |key, value|
%li= key + "=" + value
diff --git a/app/views/projects/deploy_tokens/_form.html.haml b/app/views/projects/deploy_tokens/_form.html.haml
deleted file mode 100644
index f846dbd3763..00000000000
--- a/app/views/projects/deploy_tokens/_form.html.haml
+++ /dev/null
@@ -1,34 +0,0 @@
-%p.profile-settings-content
- = s_("DeployTokens|Pick a name for the application, and we'll give you a unique deploy token.")
-
-= form_for token, url: create_deploy_token_namespace_project_settings_repository_path(project.namespace, project, anchor: 'js-deploy-tokens'), method: :post do |f|
- = form_errors(token)
-
- .form-group
- = f.label :name, class: 'label-bold'
- = f.text_field :name, class: 'form-control qa-deploy-token-name', required: true
-
- .form-group
- = f.label :expires_at, class: 'label-bold'
- = f.text_field :expires_at, class: 'datepicker form-control qa-deploy-token-expires-at', value: f.object.expires_at
-
- .form-group
- = f.label :username, class: 'label-bold'
- = f.text_field :username, class: 'form-control qa-deploy-token-username'
- .text-secondary= s_('DeployTokens|Default format is "gitlab+deploy-token-{n}". Enter custom username if you want to change it.')
-
- .form-group
- = f.label :scopes, class: 'label-bold'
- %fieldset.form-group.form-check
- = f.check_box :read_repository, class: 'form-check-input qa-deploy-token-read-repository'
- = label_tag ("deploy_token_read_repository"), 'read_repository', class: 'label-bold form-check-label'
- .text-secondary= s_('DeployTokens|Allows read-only access to the repository')
-
- - if container_registry_enabled?(project)
- %fieldset.form-group.form-check
- = f.check_box :read_registry, class: 'form-check-input qa-deploy-token-read-registry'
- = label_tag ("deploy_token_read_registry"), 'read_registry', class: 'label-bold form-check-label'
- .text-secondary= s_('DeployTokens|Allows read-only access to the registry images')
-
- .prepend-top-default
- = f.submit s_('DeployTokens|Create deploy token'), class: 'btn btn-success qa-create-deploy-token'
diff --git a/app/views/projects/deploy_tokens/_index.html.haml b/app/views/projects/deploy_tokens/_index.html.haml
deleted file mode 100644
index 4619522cfaf..00000000000
--- a/app/views/projects/deploy_tokens/_index.html.haml
+++ /dev/null
@@ -1,17 +0,0 @@
-- expanded = expand_deploy_tokens_section?(@new_deploy_token)
-
-%section.qa-deploy-tokens-settings.settings.no-animate#js-deploy-tokens{ class: ('expanded' if expanded) }
- .settings-header
- %h4= s_('DeployTokens|Deploy Tokens')
- %button.btn.js-settings-toggle.qa-expand-deploy-keys{ type: 'button' }
- = expanded ? 'Collapse' : 'Expand'
- %p
- = s_('DeployTokens|Deploy tokens allow read-only access to your repository and registry images.')
- .settings-content
- - if @new_deploy_token.persisted?
- = render 'projects/deploy_tokens/new_deploy_token', deploy_token: @new_deploy_token
- %h5.prepend-top-0
- = s_('DeployTokens|Add a deploy token')
- = render 'projects/deploy_tokens/form', project: @project, token: @new_deploy_token, presenter: @deploy_tokens
- %hr
- = render 'projects/deploy_tokens/table', project: @project, active_tokens: @deploy_tokens
diff --git a/app/views/projects/deploy_tokens/_revoke_modal.html.haml b/app/views/projects/deploy_tokens/_revoke_modal.html.haml
deleted file mode 100644
index 35eacae2c2e..00000000000
--- a/app/views/projects/deploy_tokens/_revoke_modal.html.haml
+++ /dev/null
@@ -1,17 +0,0 @@
-.modal{ id: "revoke-modal-#{token.id}", tabindex: -1 }
- .modal-dialog
- .modal-content
- .modal-header
- %h4.modal-title
- = s_('DeployTokens|Revoke')
- %b #{token.name}?
- %button.close{ type: "button", "data-dismiss": "modal", "aria-label" => _('Close') }
- %span{ "aria-hidden": true } &times;
- .modal-body
- %p
- = s_('DeployTokens|You are about to revoke')
- %b #{token.name}.
- = s_('DeployTokens|This action cannot be undone.')
- .modal-footer
- %a{ href: '#', data: { dismiss: 'modal' }, class: 'btn btn-default' }= _('Cancel')
- = link_to s_('DeployTokens|Revoke %{name}') % { name: token.name }, revoke_project_deploy_token_path(project, token), method: :put, class: 'btn btn-danger'
diff --git a/app/views/projects/deploy_tokens/_table.html.haml b/app/views/projects/deploy_tokens/_table.html.haml
deleted file mode 100644
index 91466a6736b..00000000000
--- a/app/views/projects/deploy_tokens/_table.html.haml
+++ /dev/null
@@ -1,31 +0,0 @@
-%h5= s_("DeployTokens|Active Deploy Tokens (%{active_tokens})") % { active_tokens: active_tokens.length }
-
-- if active_tokens.present?
- .table-responsive.deploy-tokens
- %table.table
- %thead
- %tr
- %th= s_('DeployTokens|Name')
- %th= s_('DeployTokens|Username')
- %th= s_('DeployTokens|Created')
- %th= s_('DeployTokens|Expires')
- %th= s_('DeployTokens|Scopes')
- %th
- %tbody
- - active_tokens.each do |token|
- %tr
- %td= token.name
- %td= token.username
- %td= token.created_at.to_date.to_s(:medium)
- %td
- - if token.expires?
- %span{ class: ('text-warning' if token.expires_soon?) }
- In #{distance_of_time_in_words_to_now(token.expires_at)}
- - else
- %span.token-never-expires-label Never
- %td= token.scopes.present? ? token.scopes.join(", ") : "<no scopes selected>"
- %td= link_to s_('DeployTokens|Revoke'), "#", class: "btn btn-danger float-right", data: { toggle: "modal", target: "#revoke-modal-#{token.id}"}
- = render 'projects/deploy_tokens/revoke_modal', token: token, project: project
-- else
- .settings-message.text-center
- = s_('DeployTokens|This project has no active Deploy Tokens.')
diff --git a/app/views/projects/edit.html.haml b/app/views/projects/edit.html.haml
index 1c18487f688..3c6fb5b19a4 100644
--- a/app/views/projects/edit.html.haml
+++ b/app/views/projects/edit.html.haml
@@ -124,7 +124,7 @@
.save-project-loader.hide
.center
%h2
- %i.fa.fa-spinner.fa-spin
+ .spinner.spinner-md.align-text-bottom
= _('Saving project.')
%p= _('Please wait a moment, this page will automatically refresh when ready.')
diff --git a/app/views/projects/find_file/show.html.haml b/app/views/projects/find_file/show.html.haml
index caaf164a763..971107675ab 100644
--- a/app/views/projects/find_file/show.html.haml
+++ b/app/views/projects/find_file/show.html.haml
@@ -23,4 +23,5 @@
= _('There are no matching files')
%p.text-secondary
= _('Try using a different search term to find the file you are looking for.')
- = spinner nil, true
+ .text-center.prepend-top-default.loading
+ .spinner.spinner-md
diff --git a/app/views/projects/forks/new.html.haml b/app/views/projects/forks/new.html.haml
index bf03353a565..8a5b08a19c8 100644
--- a/app/views/projects/forks/new.html.haml
+++ b/app/views/projects/forks/new.html.haml
@@ -19,9 +19,3 @@
%p.prepend-top-default
= _("You must have permission to create a project in a namespace before forking.")
- .save-project-loader.hide.js-fork-content
- %h2.text-center
- = icon('spinner spin')
- = _("Forking repository")
- %p.text-center
- = _("Please wait a moment, this page will automatically refresh when ready.")
diff --git a/app/views/projects/graphs/charts.html.haml b/app/views/projects/graphs/charts.html.haml
index b38449b3ab9..cb76e89f736 100644
--- a/app/views/projects/graphs/charts.html.haml
+++ b/app/views/projects/graphs/charts.html.haml
@@ -20,6 +20,7 @@
- end_time = capture do
#{@commits_graph.end_date.strftime('%b %d')}
= (_("Commit statistics for %{ref} %{start_time} - %{end_time}") % { ref: "<strong>#{h @ref}</strong>", start_time: start_time, end_time: end_time }).html_safe
+ = _("Excluding merge commits. Limited to %{limit} commits.") % {limit: number_with_delimiter(@commits_limit, delimiter: ',')}
.col-md-6
.tree-ref-container
diff --git a/app/views/projects/hook_logs/show.html.haml b/app/views/projects/hook_logs/show.html.haml
index a8796cd7b1c..873fb4d47b7 100644
--- a/app/views/projects/hook_logs/show.html.haml
+++ b/app/views/projects/hook_logs/show.html.haml
@@ -1,3 +1,7 @@
+- @content_class = 'limit-container-width' unless fluid_layout
+- add_to_breadcrumbs _('Webhook Settings'), namespace_project_hooks_path
+- page_title _('Webhook Logs')
+
.row.prepend-top-default.append-bottom-default
.col-lg-3
%h4.prepend-top-0
diff --git a/app/views/projects/hooks/_index.html.haml b/app/views/projects/hooks/_index.html.haml
deleted file mode 100644
index 70f2fa0e758..00000000000
--- a/app/views/projects/hooks/_index.html.haml
+++ /dev/null
@@ -1,10 +0,0 @@
-.row.prepend-top-default
- .col-lg-4
- = render 'shared/web_hooks/title_and_docs', hook: @hook
-
- .col-lg-8.append-bottom-default
- = form_for @hook, as: :hook, url: polymorphic_path([@project.namespace.becomes(Namespace), @project, :hooks]) do |f|
- = render partial: 'shared/web_hooks/form', locals: { form: f, hook: @hook }
- = f.submit 'Add webhook', class: 'btn btn-success'
-
- = render 'shared/web_hooks/index', hooks: @hooks, hook_class: @hook.class
diff --git a/app/views/projects/hooks/edit.html.haml b/app/views/projects/hooks/edit.html.haml
index c1fdf619eb5..f7eae802dac 100644
--- a/app/views/projects/hooks/edit.html.haml
+++ b/app/views/projects/hooks/edit.html.haml
@@ -1,5 +1,6 @@
-- add_to_breadcrumbs _('ProjectService|Integrations'), namespace_project_settings_integrations_path
-- page_title _('Edit Project Hook')
+- @content_class = 'limit-container-width' unless fluid_layout
+- add_to_breadcrumbs _('Webhook Settings'), namespace_project_hooks_path
+- page_title _('Webhook')
.row.prepend-top-default
.col-lg-3
diff --git a/app/views/projects/hooks/index.html.haml b/app/views/projects/hooks/index.html.haml
new file mode 100644
index 00000000000..169a5cc9d6b
--- /dev/null
+++ b/app/views/projects/hooks/index.html.haml
@@ -0,0 +1,14 @@
+- @content_class = 'limit-container-width' unless fluid_layout
+- breadcrumb_title _('Webhook Settings')
+- page_title _('Webhooks')
+
+.row.prepend-top-default
+ .col-lg-4
+ = render 'shared/web_hooks/title_and_docs', hook: @hook
+
+ .col-lg-8.append-bottom-default
+ = form_for @hook, as: :hook, url: polymorphic_path([@project.namespace.becomes(Namespace), @project, :hooks]) do |f|
+ = render partial: 'shared/web_hooks/form', locals: { form: f, hook: @hook }
+ = f.submit 'Add webhook', class: 'btn btn-success'
+
+ = render 'shared/web_hooks/index', hooks: @hooks, hook_class: @hook.class
diff --git a/app/views/projects/import/jira/show.html.haml b/app/views/projects/import/jira/show.html.haml
new file mode 100644
index 00000000000..f295a241113
--- /dev/null
+++ b/app/views/projects/import/jira/show.html.haml
@@ -0,0 +1,24 @@
+- title = _('Jira Issue Import')
+- page_title title
+- breadcrumb_title title
+- header_title _("Projects"), root_path
+
+= render 'import/shared/errors'
+
+- if @project.import_state&.in_progress?
+ %h3.page-title.d-flex.align-items-center
+ = sprite_icon('issues', size: 16, css_class: 'mr-1')
+ = _('Import in progress')
+- else
+ %h3.page-title.d-flex.align-items-center
+ = sprite_icon('issues', size: 16, css_class: 'mr-1')
+ = _('Import issues from Jira')
+
+ = form_tag import_project_import_jira_path(@project), method: :post do
+ .form-group.row
+ = label_tag :jira_project_key, _('From project'), class: 'col-form-label col-md-2'
+ .col-md-4
+ = select_tag :jira_project_key, options_for_select(@jira_projects, ''), { class: 'select2' }
+ .form-actions
+ = submit_tag _('Import issues'), class: 'btn btn-success'
+ = link_to _('Cancel'), project_issues_path(@project), class: 'btn btn-cancel'
diff --git a/app/views/projects/issues/_discussion.html.haml b/app/views/projects/issues/_discussion.html.haml
index 42b6aaa2634..9c129fa9ecc 100644
--- a/app/views/projects/issues/_discussion.html.haml
+++ b/app/views/projects/issues/_discussion.html.haml
@@ -7,7 +7,7 @@
%section.issuable-discussion.js-vue-notes-event
#js-vue-notes{ data: { notes_data: notes_data(@issue).to_json,
- noteable_data: serialize_issuable(@issue),
+ noteable_data: serialize_issuable(@issue, with_blocking_issues: Feature.enabled?(:prevent_closing_blocked_issues, @issue.project)),
noteable_type: 'Issue',
target_type: 'issue',
current_user_data: UserSerializer.new.represent(current_user, {only_path: true}, CurrentUserEntity).to_json } }
diff --git a/app/views/projects/issues/_issue.html.haml b/app/views/projects/issues/_issue.html.haml
index c8ab47888d0..a6c6b77c9dd 100644
--- a/app/views/projects/issues/_issue.html.haml
+++ b/app/views/projects/issues/_issue.html.haml
@@ -38,7 +38,7 @@
- if issue.labels.any?
&nbsp;
- presented_labels_sorted_by_title(issue.labels, issue.project).each do |label|
- = link_to_label(label, css_class: 'label-link')
+ = link_to_label(label, small: true)
= render_if_exists "projects/issues/issue_weight", issue: issue
diff --git a/app/views/projects/issues/_new_branch.html.haml b/app/views/projects/issues/_new_branch.html.haml
index eb76326602f..f3a1edd2571 100644
--- a/app/views/projects/issues/_new_branch.html.haml
+++ b/app/views/projects/issues/_new_branch.html.haml
@@ -13,7 +13,7 @@
.create-mr-dropdown-wrap.d-inline-block.full-width-mobile.js-create-mr{ data: { project_path: @project.full_path, project_id: @project.id, can_create_path: can_create_path, create_mr_path: create_mr_path, create_branch_path: create_branch_path, refs_path: refs_path, is_confidential: can_create_confidential_merge_request?.to_s } }
.btn-group.btn-group-sm.unavailable
%button.btn.btn-grouped{ type: 'button', disabled: 'disabled' }
- = icon('spinner', class: 'fa-spin')
+ .spinner.align-text-bottom.mr-1.hide
%span.text
Checking branch availability…
diff --git a/app/views/projects/issues/import_csv/_button.html.haml b/app/views/projects/issues/import_csv/_button.html.haml
index fe89d2fb748..78c561e81ef 100644
--- a/app/views/projects/issues/import_csv/_button.html.haml
+++ b/app/views/projects/issues/import_csv/_button.html.haml
@@ -7,3 +7,5 @@
- else
= _('Import CSV')
+- if Feature.enabled?(:jira_issue_import, @project)
+ = link_to _("Import Jira issues"), project_import_jira_path(@project), class: "btn btn-default"
diff --git a/app/views/projects/logs/empty_logs.html.haml b/app/views/projects/logs/empty_logs.html.haml
new file mode 100644
index 00000000000..52598e0be8d
--- /dev/null
+++ b/app/views/projects/logs/empty_logs.html.haml
@@ -0,0 +1,14 @@
+- page_title _('Logs')
+
+.row.empty-state
+ .col-sm-12
+ .svg-content
+ = image_tag 'illustrations/operations_log_pods_empty.svg'
+ .col-12
+ .text-content
+ %h4.text-center
+ = s_('Environments|No deployed environments')
+ %p.state-description.text-center
+ = s_('Logs|To see the logs, deploy your code to an environment.')
+ .text-center
+ = link_to s_('Environments|Learn about environments'), help_page_path('ci/environments'), class: 'btn btn-success'
diff --git a/app/views/projects/logs/index.html.haml b/app/views/projects/logs/index.html.haml
new file mode 100644
index 00000000000..1f74eb52fd9
--- /dev/null
+++ b/app/views/projects/logs/index.html.haml
@@ -0,0 +1 @@
+#environment-logs{ data: environment_logs_data(@project, @environment) }
diff --git a/app/views/projects/merge_requests/_merge_request.html.haml b/app/views/projects/merge_requests/_merge_request.html.haml
index 36f19ee6175..744dca1c462 100644
--- a/app/views/projects/merge_requests/_merge_request.html.haml
+++ b/app/views/projects/merge_requests/_merge_request.html.haml
@@ -35,7 +35,7 @@
- if merge_request.labels.any?
&nbsp;
- presented_labels_sorted_by_title(merge_request.labels, merge_request.project).each do |label|
- = link_to_label(label, type: :merge_request, css_class: 'label-link')
+ = link_to_label(label, type: :merge_request, small: true)
.issuable-meta
%ul.controls.d-flex.align-items-end
diff --git a/app/views/projects/merge_requests/_widget.html.haml b/app/views/projects/merge_requests/_widget.html.haml
index 3fe6f0a6640..1853d40c2e4 100644
--- a/app/views/projects/merge_requests/_widget.html.haml
+++ b/app/views/projects/merge_requests/_widget.html.haml
@@ -10,5 +10,6 @@
window.gl.mrWidgetData.troubleshooting_docs_path = '#{help_page_path('user/project/merge_requests/reviewing_and_managing_merge_requests.md', anchor: 'troubleshooting')}';
window.gl.mrWidgetData.security_approvals_help_page_path = '#{help_page_path('user/application_security/index.html', anchor: 'security-approvals-in-merge-requests-ultimate')}';
window.gl.mrWidgetData.eligible_approvers_docs_path = '#{help_page_path('user/project/merge_requests/merge_request_approvals', anchor: 'eligible-approvers')}';
+ window.gl.mrWidgetData.pipelines_empty_svg_path = '#{image_path('illustrations/pipelines_empty.svg')}';
#js-vue-mr-widget.mr-widget
diff --git a/app/views/projects/merge_requests/show.html.haml b/app/views/projects/merge_requests/show.html.haml
index d65c874f245..4304a18558e 100644
--- a/app/views/projects/merge_requests/show.html.haml
+++ b/app/views/projects/merge_requests/show.html.haml
@@ -78,6 +78,7 @@
endpoint: diffs_project_merge_request_path(@project, @merge_request, 'json', request.query_parameters),
endpoint_metadata: diffs_metadata_project_json_merge_request_path(@project, @merge_request, 'json', request.query_parameters),
endpoint_batch: diffs_batch_project_json_merge_request_path(@project, @merge_request, 'json', request.query_parameters),
+ endpoint_coverage: @coverage_path,
help_page_path: suggest_changes_help_path,
current_user_data: @current_user_data,
project_path: project_path(@merge_request.project),
diff --git a/app/views/projects/milestones/show.html.haml b/app/views/projects/milestones/show.html.haml
index 5f244d3a6c3..b83204c27e3 100644
--- a/app/views/projects/milestones/show.html.haml
+++ b/app/views/projects/milestones/show.html.haml
@@ -8,10 +8,10 @@
= render_if_exists 'shared/milestones/burndown', milestone: @milestone, project: @project
-- if can?(current_user, :read_issue, @project) && @milestone.total_issues_count(current_user).zero?
+- if can?(current_user, :read_issue, @project) && @milestone.total_issues_count.zero?
.alert.alert-success.prepend-top-default
%span= _('Assign some issues to this milestone.')
-- elsif @milestone.complete?(current_user) && @milestone.active?
+- elsif @milestone.complete? && @milestone.active?
.alert.alert-success.prepend-top-default
%span= _('All issues for this milestone are closed. You may close this milestone now.')
diff --git a/app/views/projects/new.html.haml b/app/views/projects/new.html.haml
index fabe636b05c..3ff4ab354b9 100644
--- a/app/views/projects/new.html.haml
+++ b/app/views/projects/new.html.haml
@@ -75,7 +75,7 @@
.save-project-loader.d-none
.center
%h2
- %i.fa.fa-spinner.fa-spin
+ .spinner.spinner-md.align-text-bottom
= s_('ProjectsNew|Creating project & repository.')
%p
= s_('ProjectsNew|Please wait a moment, this page will automatically refresh when ready.')
diff --git a/app/views/projects/notes/_actions.html.haml b/app/views/projects/notes/_actions.html.haml
index 407de590efb..7de7dd3b98b 100644
--- a/app/views/projects/notes/_actions.html.haml
+++ b/app/views/projects/notes/_actions.html.haml
@@ -29,8 +29,7 @@
":title" => "buttonText",
":ref" => "'button'" }
- = icon('spin spinner', 'v-if' => 'loading', class: 'loading', 'aria-hidden' => 'true', 'aria-label' => 'Loading')
- %div{ 'v-else' => '' }
+ %div
%template{ 'v-if' => 'isResolved' }
= render 'shared/icons/icon_status_success_solid.svg'
%template{ 'v-else' => '' }
@@ -40,7 +39,6 @@
- if note.emoji_awardable?
.note-actions-item
= button_tag title: 'Add reaction', class: "note-action-button note-emoji-button js-add-award js-note-emoji has-tooltip btn btn-transparent", data: { position: 'right', container: 'body' } do
- = icon('spinner spin')
%span{ class: 'link-highlight award-control-icon-neutral' }= sprite_icon('slight-smile')
%span{ class: 'link-highlight award-control-icon-positive' }= sprite_icon('smiley')
%span{ class: 'link-highlight award-control-icon-super-positive' }= sprite_icon('smile')
diff --git a/app/views/projects/pipeline_schedules/index.html.haml b/app/views/projects/pipeline_schedules/index.html.haml
index 4a0be9e67cb..2b2b79d886b 100644
--- a/app/views/projects/pipeline_schedules/index.html.haml
+++ b/app/views/projects/pipeline_schedules/index.html.haml
@@ -2,7 +2,7 @@
- page_title _("Pipeline Schedules")
-#pipeline-schedules-callout{ data: { docs_url: help_page_path('user/project/pipelines/schedules') } }
+#pipeline-schedules-callout{ data: { docs_url: help_page_path('ci/pipelines/schedules') } }
.top-area
- schedule_path_proc = ->(scope) { pipeline_schedules_path(@project, scope: scope) }
= render "tabs", schedule_path_proc: schedule_path_proc, all_schedules: @all_schedules, scope: @scope
diff --git a/app/views/projects/pipelines/_with_tabs.html.haml b/app/views/projects/pipelines/_with_tabs.html.haml
index cdd75d43a78..37ca020cfb6 100644
--- a/app/views/projects/pipelines/_with_tabs.html.haml
+++ b/app/views/projects/pipelines/_with_tabs.html.haml
@@ -3,7 +3,7 @@
.tabs-holder
%ul.pipelines-tabs.nav-links.no-top.no-bottom.mobile-separator.nav.nav-tabs
%li.js-pipeline-tab-link
- = link_to @pipeline_path, data: { target: '#js-tab-pipeline', action: 'pipelines', toggle: 'tab' }, class: 'pipeline-tab' do
+ = link_to project_pipeline_path(@project, @pipeline), data: { target: '#js-tab-pipeline', action: 'pipelines', toggle: 'tab' }, class: 'pipeline-tab' do
= _('Pipeline')
%li.js-builds-tab-link
= link_to builds_project_pipeline_path(@project, @pipeline), data: { target: '#js-tab-builds', action: 'builds', toggle: 'tab' }, class: 'builds-tab' do
diff --git a/app/views/projects/registry/repositories/index.html.haml b/app/views/projects/registry/repositories/index.html.haml
index 6ff7c27b1bc..810830fd0c4 100644
--- a/app/views/projects/registry/repositories/index.html.haml
+++ b/app/views/projects/registry/repositories/index.html.haml
@@ -1,11 +1,13 @@
- page_title _("Container Registry")
+- @content_class = "limit-container-width" unless fluid_layout
%section
.row.registry-placeholder.prepend-bottom-10
.col-12
- - if Feature.enabled?(:vue_container_registry_explorer)
+ - if Feature.enabled?(:vue_container_registry_explorer, @project.group)
#js-container-registry{ data: { endpoint: project_container_registry_index_path(@project),
- project_path: @project.full_path,
+ settings_path: project_settings_ci_cd_path(@project),
+ expiration_policy: @project.container_expiration_policy.to_json,
"help_page_path" => help_page_path('user/packages/container_registry/index'),
"two_factor_auth_help_link" => help_page_path('user/profile/account/two_factor_authentication'),
"personal_access_tokens_help_link" => help_page_path('user/profile/personal_access_tokens'),
@@ -13,6 +15,7 @@
"containers_error_image" => image_path('illustrations/docker-error-state.svg'),
"repository_url" => escape_once(@project.container_registry_url),
"registry_host_url_with_port" => escape_once(registry_config.host_port),
+ "expiration_policy_help_page_path" => help_page_path('user/packages/container_registry/index', anchor: 'expiration-policy'),
character_error: @character_error.to_s } }
- else
#js-vue-registry-images{ data: { endpoint: project_container_registry_index_path(@project, format: :json),
diff --git a/app/views/projects/runners/_runner.html.haml b/app/views/projects/runners/_runner.html.haml
index 548977d6a80..92680a70da2 100644
--- a/app/views/projects/runners/_runner.html.haml
+++ b/app/views/projects/runners/_runner.html.haml
@@ -3,7 +3,7 @@
= runner_status_icon(runner)
- if @project_runners.include?(runner)
- = link_to runner.short_sha, project_runner_path(@project, runner), class: 'commit-sha'
+ = link_to _("%{token}...") % { token: runner.short_sha }, project_runner_path(@project, runner), class: 'commit-sha has-tooltip', title: _("Partial token for reference only")
- if runner.locked?
= icon('lock', class: 'has-tooltip', title: _('Locked to current projects'))
diff --git a/app/views/projects/services/_form.html.haml b/app/views/projects/services/_form.html.haml
index 582f3d6fce4..a0d9d29a7ae 100644
--- a/app/views/projects/services/_form.html.haml
+++ b/app/views/projects/services/_form.html.haml
@@ -11,7 +11,7 @@
%p= @service.detailed_description
.col-lg-9
= form_for(@service, as: :service, url: project_service_path(@project, @service.to_param), method: :put, html: { class: 'gl-show-field-errors integration-settings-form js-integration-settings-form', data: { 'can-test' => @service.can_test?, 'test-url' => test_project_service_path(@project, @service) } }) do |form|
- = render 'shared/service_settings', form: form, subject: @service
+ = render 'shared/service_settings', form: form, service: @service
- if @service.editable?
.footer-block.row-content-block
= service_save_button(@service)
diff --git a/app/views/projects/services/_index.html.haml b/app/views/projects/services/_index.html.haml
index 3f33d72d3ec..a4041d09415 100644
--- a/app/views/projects/services/_index.html.haml
+++ b/app/views/projects/services/_index.html.haml
@@ -1,8 +1,8 @@
-.row.prepend-top-default.append-bottom-default
+.row.prepend-top-default
.col-lg-4
%h4.prepend-top-0
- = s_("ProjectService|Project services")
- %p= s_("ProjectService|Project services allow you to integrate GitLab with other applications")
+ = _('Integrations')
+ %p= _('Integrations allow you to integrate GitLab with other applications')
.col-lg-8
%table.table
%colgroup
diff --git a/app/views/projects/services/edit.html.haml b/app/views/projects/services/edit.html.haml
index e3e8a312431..ef799d2c046 100644
--- a/app/views/projects/services/edit.html.haml
+++ b/app/views/projects/services/edit.html.haml
@@ -1,7 +1,6 @@
- breadcrumb_title @service.title
+- add_to_breadcrumbs _('Integration Settings'), project_settings_integrations_path(@project)
- page_title @service.title, s_("ProjectService|Services")
-- add_to_breadcrumbs(s_("ProjectService|Settings"), edit_project_path(@project))
-- add_to_breadcrumbs(s_("ProjectService|Integrations"), project_settings_integrations_path(@project))
= render 'deprecated_message' if @service.deprecation_message
diff --git a/app/views/projects/services/slack/_help.haml b/app/views/projects/services/slack/_help.haml
new file mode 100644
index 00000000000..d7ea1b270f5
--- /dev/null
+++ b/app/views/projects/services/slack/_help.haml
@@ -0,0 +1,16 @@
+- webhooks_link_url = 'https://slack.com/apps/A0F7XDUAZ-incoming-webhooks'
+- webhooks_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: webhooks_link_url }
+
+.info-well
+ .well-segment
+ %p= s_('SlackIntegration|This service send notifications about projects\' events to Slack channels. To set up this service:')
+ %ol
+ %li
+ = s_('SlackIntegration|%{webhooks_link_start}Add an incoming webhook%{webhooks_link_end} in your Slack team. The default channel can be overridden for each event.').html_safe % { webhooks_link_start: webhooks_link_start, webhooks_link_end: '</a>'.html_safe }
+ %li
+ = s_('SlackIntegration|Paste the <strong>Webhook URL</strong> into the field below.').html_safe
+ %li
+ = s_('SlackIntegration|Select events below to enable notifications. The <strong>Slack channel names</strong> and <strong>Slack username</strong> fields are optional.').html_safe
+ %p.mt-3.mb-0
+ = s_('SlackIntegration|<strong>Note:</strong> Usernames and private channels are not supported.').html_safe
+ = link_to _('Learn more'), help_page_path('user/project/integrations/slack')
diff --git a/app/views/projects/settings/ci_cd/_form.html.haml b/app/views/projects/settings/ci_cd/_form.html.haml
index a72179f40ad..54e46501287 100644
--- a/app/views/projects/settings/ci_cd/_form.html.haml
+++ b/app/views/projects/settings/ci_cd/_form.html.haml
@@ -8,7 +8,7 @@
= _("Git strategy for pipelines")
%p
= _("Choose between <code>clone</code> or <code>fetch</code> to get the recent application code").html_safe
- = link_to icon('question-circle'), help_page_path('user/project/pipelines/settings', anchor: 'git-strategy'), target: '_blank'
+ = link_to icon('question-circle'), help_page_path('ci/pipelines/settings', anchor: 'git-strategy'), target: '_blank'
.form-check
= f.radio_button :build_allow_git_fetch, 'false', { class: 'form-check-input' }
= f.label :build_allow_git_fetch_false, class: 'form-check-label' do
@@ -38,7 +38,7 @@
= f.text_field :build_timeout_human_readable, class: 'form-control'
%p.form-text.text-muted
= _('If any job surpasses this timeout threshold, it will be marked as failed. Human readable time input language is accepted like "1 hour". Values without specification represent seconds.')
- = link_to icon('question-circle'), help_page_path('user/project/pipelines/settings', anchor: 'timeout'), target: '_blank'
+ = link_to icon('question-circle'), help_page_path('ci/pipelines/settings', anchor: 'timeout'), target: '_blank'
- if can?(current_user, :update_max_artifacts_size, @project)
%hr
@@ -55,7 +55,7 @@
= f.text_field :ci_config_path, class: 'form-control', placeholder: '.gitlab-ci.yml'
%p.form-text.text-muted
= _("The path to the CI configuration file. Defaults to <code>.gitlab-ci.yml</code>").html_safe
- = link_to icon('question-circle'), help_page_path('user/project/pipelines/settings', anchor: 'custom-ci-configuration-path'), target: '_blank'
+ = link_to icon('question-circle'), help_page_path('ci/pipelines/settings', anchor: 'custom-ci-configuration-path'), target: '_blank'
%hr
.form-group
@@ -65,7 +65,7 @@
%strong= _("Public pipelines")
.form-text.text-muted
= _("Allow public access to pipelines and job details, including output logs and artifacts")
- = link_to icon('question-circle'), help_page_path('user/project/pipelines/settings', anchor: 'visibility-of-pipelines'), target: '_blank'
+ = link_to icon('question-circle'), help_page_path('ci/pipelines/settings', anchor: 'visibility-of-pipelines'), target: '_blank'
.bs-callout.bs-callout-info
%p #{_("If enabled")}:
%ul
@@ -86,7 +86,16 @@
%strong= _("Auto-cancel redundant, pending pipelines")
.form-text.text-muted
= _("New pipelines will cancel older, pending pipelines on the same branch")
- = link_to icon('question-circle'), help_page_path('user/project/pipelines/settings', anchor: 'auto-cancel-pending-pipelines'), target: '_blank'
+ = link_to icon('question-circle'), help_page_path('ci/pipelines/settings', anchor: 'auto-cancel-pending-pipelines'), target: '_blank'
+
+ .form-group
+ .form-check
+ = f.check_box :forward_deployment_enabled, { class: 'form-check-input' }
+ = f.label :forward_deployment_enabled, class: 'form-check-label' do
+ %strong= _("Skip older, pending deployment jobs")
+ .form-text.text-muted
+ = _("When a deployment job is successful, skip older deployment jobs that are still pending")
+ = link_to icon('question-circle'), help_page_path('ci/pipelines/settings', anchor: 'skip-older-pending-deployment-jobs'), target: '_blank'
%hr
.form-group
@@ -99,7 +108,7 @@
.input-group-text /
%p.form-text.text-muted
= _("A regular expression that will be used to find the test coverage output in the job log. Leave blank to disable")
- = link_to icon('question-circle'), help_page_path('user/project/pipelines/settings', anchor: 'test-coverage-parsing'), target: '_blank'
+ = link_to icon('question-circle'), help_page_path('ci/pipelines/settings', anchor: 'test-coverage-parsing'), target: '_blank'
.bs-callout.bs-callout-info
%p= _("Below are examples of regex for existing tools:")
%ul
diff --git a/app/views/projects/settings/ci_cd/show.html.haml b/app/views/projects/settings/ci_cd/show.html.haml
index 1358077f2b2..ab2f64cdc21 100644
--- a/app/views/projects/settings/ci_cd/show.html.haml
+++ b/app/views/projects/settings/ci_cd/show.html.haml
@@ -4,6 +4,7 @@
- expanded = expanded_by_default?
- general_expanded = @project.errors.empty? ? expanded : true
+- deploy_token_description = s_('DeployTokens|Deploy tokens allow read-only access to your repository and registry images.')
%section.settings#js-general-pipeline-settings.no-animate{ class: ('expanded' if general_expanded) }
.settings-header
@@ -51,6 +52,10 @@
.settings-content
= render 'ci/variables/index', save_endpoint: project_variables_path(@project)
+= render "shared/deploy_tokens/index", group_or_project: @project, description: deploy_token_description
+
+= render @deploy_keys
+
%section.settings.no-animate#js-pipeline-triggers{ class: ('expanded' if expanded) }
.settings-header
%h4
diff --git a/app/views/projects/settings/integrations/show.html.haml b/app/views/projects/settings/integrations/show.html.haml
index 76770290f36..f603f23a2c7 100644
--- a/app/views/projects/settings/integrations/show.html.haml
+++ b/app/views/projects/settings/integrations/show.html.haml
@@ -1,5 +1,15 @@
- @content_class = "limit-container-width" unless fluid_layout
-- breadcrumb_title _("Integrations Settings")
+- breadcrumb_title _('Integration Settings')
- page_title _('Integrations')
-= render 'projects/hooks/index'
+
+- if show_webhooks_moved_alert?
+ .gl-alert.gl-alert-info.js-webhooks-moved-alert.prepend-top-default{ role: 'alert', data: { feature_id: UserCalloutsHelper::WEBHOOKS_MOVED, dismiss_endpoint: user_callouts_path } }
+ = sprite_icon('information-o', size: 16, css_class: 'gl-icon gl-alert-icon gl-alert-icon-no-title')
+ %button.js-close.gl-alert-dismiss{ type: 'button', 'aria-label' => _('Dismiss') }
+ = sprite_icon('close', size: 16, css_class: 'gl-icon')
+ .gl-alert-body
+ = _('Webhooks have moved. They can now be found under the Settings menu.')
+ .gl-alert-actions
+ = link_to _('Go to Webhooks'), project_hooks_path(@project), class: 'btn gl-alert-action btn-info new-gl-button'
+
= render 'projects/services/index'
diff --git a/app/views/projects/settings/operations/_configuration_banner.html.haml b/app/views/projects/settings/operations/_configuration_banner.html.haml
new file mode 100644
index 00000000000..bdbc9b7d69d
--- /dev/null
+++ b/app/views/projects/settings/operations/_configuration_banner.html.haml
@@ -0,0 +1,24 @@
+%b
+ = s_('PrometheusService|Auto configuration')
+
+- if service.manual_configuration?
+ .info-well.p-2.mt-2
+ = s_('PrometheusService|To enable the installation of Prometheus on your clusters, deactivate the manual configuration below')
+- else
+ .container-fluid
+ .row
+ - if service.prometheus_available?
+ .col-sm-2
+ .svg-container
+ = image_tag 'illustrations/monitoring/getting_started.svg'
+ .col-sm-10
+ %p.text-success.prepend-top-default
+ = s_('PrometheusService|Prometheus is being automatically managed on your clusters')
+ = link_to s_('PrometheusService|Manage clusters'), project_clusters_path(project), class: 'btn'
+ - else
+ .col-sm-2
+ = image_tag 'illustrations/monitoring/loading.svg'
+ .col-sm-10
+ %p.prepend-top-default
+ = s_('PrometheusService|Automatically deploy and configure Prometheus on your clusters to monitor your project’s environments')
+ = link_to s_('PrometheusService|Install Prometheus on clusters'), project_clusters_path(project), class: 'btn btn-success'
diff --git a/app/views/projects/settings/operations/_error_tracking.html.haml b/app/views/projects/settings/operations/_error_tracking.html.haml
index 06b5243dfd9..393b1f9d21a 100644
--- a/app/views/projects/settings/operations/_error_tracking.html.haml
+++ b/app/views/projects/settings/operations/_error_tracking.html.haml
@@ -4,7 +4,7 @@
%section.settings.no-animate.js-error-tracking-settings
.settings-header
- %h4
+ %h3{ :class => "h4" }
= _('Error Tracking')
%button.btn.js-settings-toggle{ type: 'button' }
= _('Expand')
diff --git a/app/views/projects/settings/operations/_incidents.html.haml b/app/views/projects/settings/operations/_incidents.html.haml
index 756d4042613..a96a41b78c2 100644
--- a/app/views/projects/settings/operations/_incidents.html.haml
+++ b/app/views/projects/settings/operations/_incidents.html.haml
@@ -4,7 +4,7 @@
%section.settings.no-animate.qa-incident-management-settings
.settings-header
- %h4= _('Incidents')
+ %h3{ :class => "h4" }= _('Incidents')
%button.btn.js-settings-toggle{ type: 'button' }
= _('Expand')
%p
diff --git a/app/views/projects/settings/operations/_prometheus.html.haml b/app/views/projects/settings/operations/_prometheus.html.haml
new file mode 100644
index 00000000000..3d7a6b021a8
--- /dev/null
+++ b/app/views/projects/settings/operations/_prometheus.html.haml
@@ -0,0 +1,19 @@
+%section.settings.no-animate.js-prometheus-settings
+ .settings-header
+ %h4
+ = _('Prometheus')
+ %button.btn.js-settings-toggle{ type: 'button' }
+ = _('Expand')
+ %p
+ = _('Link Prometheus monitoring to GitLab.')
+ = link_to _('More information'), help_page_path('user/project/integrations/prometheus'), target: '_blank', rel: 'noopener noreferrer'
+ .settings-content
+ - if @project
+ = render 'projects/settings/operations/configuration_banner', project: @project, service: service
+
+ %b.append-bottom-default
+ = s_('PrometheusService|Manual configuration')
+
+ - unless service.editable?
+ .info-well
+ = s_('PrometheusService|To enable manual configuration, uninstall Prometheus from your clusters')
diff --git a/app/views/projects/settings/operations/show.html.haml b/app/views/projects/settings/operations/show.html.haml
index 30b914b5199..ee47d70171b 100644
--- a/app/views/projects/settings/operations/show.html.haml
+++ b/app/views/projects/settings/operations/show.html.haml
@@ -4,6 +4,8 @@
= render 'projects/settings/operations/incidents'
= render 'projects/settings/operations/error_tracking'
+= render 'projects/settings/operations/prometheus', service: prometheus_service if Feature.enabled?(:settings_operations_prometheus_service)
= render 'projects/settings/operations/external_dashboard'
= render 'projects/settings/operations/grafana_integration'
= render_if_exists 'projects/settings/operations/tracing'
+= render_if_exists 'projects/settings/operations/status_page'
diff --git a/app/views/projects/settings/repository/show.html.haml b/app/views/projects/settings/repository/show.html.haml
index ff30cc4f6db..5bf92d32474 100644
--- a/app/views/projects/settings/repository/show.html.haml
+++ b/app/views/projects/settings/repository/show.html.haml
@@ -11,9 +11,6 @@
-# Those are used throughout the actual views. These `shared` views are then
-# reused in EE.
= render "projects/settings/repository/protected_branches"
-
-= render @deploy_keys
-= render "projects/deploy_tokens/index"
= render "projects/cleanup/show"
= render_if_exists 'shared/promotions/promote_repository_features'
diff --git a/app/views/projects/snippets/show.html.haml b/app/views/projects/snippets/show.html.haml
index 422a467574b..ccf109968fc 100644
--- a/app/views/projects/snippets/show.html.haml
+++ b/app/views/projects/snippets/show.html.haml
@@ -9,8 +9,7 @@
= render 'shared/snippets/header'
.project-snippets
- %article.file-holder.snippet-file-content
- = render 'shared/snippets/blob'
+ = render 'shared/snippets/blob', blob: @blob
.row-content-block.top-block.content-component-block
= render 'award_emoji/awards_block', awardable: @snippet, inline: true
diff --git a/app/views/projects/tags/new.html.haml b/app/views/projects/tags/new.html.haml
index a7f739ab13d..1b3b0972744 100644
--- a/app/views/projects/tags/new.html.haml
+++ b/app/views/projects/tags/new.html.haml
@@ -36,11 +36,19 @@
.form-group.row
= label_tag :release_description, s_('TagsPage|Release notes'), class: 'col-form-label col-sm-2'
.col-sm-10
+ .form-text.mb-3
+ - link_start = '<a href="%{url}" rel="noopener noreferrer" target="_blank">'.html_safe
+ - releases_page_path = project_releases_path(@project)
+ - releases_page_link_start = link_start % { url: releases_page_path }
+ - docs_url = help_page_path('user/project/releases/index.md', anchor: 'creating-a-release')
+ - docs_link_start = link_start % { url: docs_url }
+ - link_end = '</a>'.html_safe
+ - replacements = { releases_page_link_start: releases_page_link_start, docs_link_start: docs_link_start, link_end: link_end }
+ = s_('TagsPage|Optionally, create a public Release of your project, based on this tag. Release notes are displayed on the %{releases_page_link_start}Releases%{link_end} page. %{docs_link_start}More information%{link_end}').html_safe % replacements
+
= render layout: 'projects/md_preview', locals: { url: preview_markdown_path(@project), referenced_users: true } do
= render 'projects/zen', attr: :release_description, classes: 'note-textarea', placeholder: s_('TagsPage|Write your release notes or drag files here…'), current_text: @release_description
= render 'shared/notes/hints'
- .form-text.text-muted
- = s_('TagsPage|Optionally, add release notes to the tag. They will be stored in the GitLab database and displayed on the tags page.')
.form-actions
= button_tag s_('TagsPage|Create tag'), class: 'btn btn-success'
= link_to s_('TagsPage|Cancel'), project_tags_path(@project), class: 'btn btn-cancel'
diff --git a/app/views/projects/wikis/_form.html.haml b/app/views/projects/wikis/_form.html.haml
index 438d390389c..d29abfa937d 100644
--- a/app/views/projects/wikis/_form.html.haml
+++ b/app/views/projects/wikis/_form.html.haml
@@ -4,7 +4,7 @@
= form_for [@project.namespace.becomes(Namespace), @project, @page], method: @page.persisted? ? :put : :post,
html: { class: form_classes },
data: { uploads_path: uploads_path } do |f|
- = form_errors(@page)
+ = form_errors(@page, truncate: :title)
- if @page.persisted?
= f.hidden_field :last_commit_sha, value: @page.last_commit_sha
@@ -12,7 +12,7 @@
.form-group.row
.col-sm-12= f.label :title, class: 'control-label-full-width'
.col-sm-12
- = f.text_field :title, class: 'form-control qa-wiki-title-textbox', value: @page.title, required: true, autofocus: !@page.persisted?, placeholder: _('Wiki|Page title')
+ = f.text_field :title, class: 'form-control qa-wiki-title-textbox', value: @page.title, required: true, autofocus: !@page.persisted?, placeholder: s_('Wiki|Page title')
%span.d-inline-block.mw-100.prepend-top-5
= icon('lightbulb-o')
- if @page.persisted?
@@ -43,8 +43,16 @@
.form-text.text-muted
= succeed '.' do
- = (s_("WikiMarkdownTip|To link to a (new) page, simply type %{link_example}") % { link_example: '<code>[Link Title](page-slug)</code>' }).html_safe
-
+ - case @page.format.to_s
+ - when 'rdoc'
+ - link_example = '{Link title}[link:page-slug]'
+ - when 'asciidoc'
+ - link_example = 'link:page-slug[Link title]'
+ - when 'org'
+ - link_example = '[[page-slug]]'
+ - else
+ - link_example = '[Link Title](page-slug)'
+ = (s_('WikiMarkdownTip|To link to a (new) page, simply type <code class="js-markup-link-example">%{link_example}</code>') % { link_example: link_example }).html_safe
= succeed '.' do
- markdown_link = link_to s_("WikiMarkdownDocs|documentation"), help_page_path('user/markdown', anchor: 'wiki-specific-markdown')
= (s_("WikiMarkdownDocs|More examples are in the %{docs_link}") % { docs_link: markdown_link }).html_safe
diff --git a/app/views/projects/wikis/show.html.haml b/app/views/projects/wikis/show.html.haml
index ebd99cf8605..74798311c2e 100644
--- a/app/views/projects/wikis/show.html.haml
+++ b/app/views/projects/wikis/show.html.haml
@@ -26,7 +26,7 @@
= (s_("WikiHistoricalPage|You can view the %{most_recent_link} or browse the %{history_link}.") % { most_recent_link: most_recent_link, history_link: history_link }).html_safe
.prepend-top-default.append-bottom-default
- .md.md-file{ data: { qa_selector: 'wiki_page_content' } }
+ .md{ data: { qa_selector: 'wiki_page_content' } }
= render_wiki_content(@page)
= render 'sidebar'
diff --git a/app/views/search/_category.html.haml b/app/views/search/_category.html.haml
index 255a62d0d06..6ad155eb715 100644
--- a/app/views/search/_category.html.haml
+++ b/app/views/search/_category.html.haml
@@ -24,7 +24,6 @@
= users
- elsif @show_snippets
- = search_filter_link 'snippet_blobs', _("Snippet Contents"), search: { snippets: true, group_id: nil, project_id: nil }
= search_filter_link 'snippet_titles', _("Titles and Filenames"), search: { snippets: true, group_id: nil, project_id: nil }
- else
= search_filter_link 'projects', _("Projects"), data: { qa_selector: 'projects_tab' }
diff --git a/app/views/search/results/_snippet_blob.html.haml b/app/views/search/results/_snippet_blob.html.haml
index 5126351b0bb..fa77566dddb 100644
--- a/app/views/search/results/_snippet_blob.html.haml
+++ b/app/views/search/results/_snippet_blob.html.haml
@@ -23,7 +23,7 @@
%i.fa.fa-file
%strong= snippet.file_name
- if markup?(snippet.file_name)
- .file-content.md.md-file
+ .file-content.md
- snippet_chunks.each do |chunk|
- unless chunk[:data].empty?
= markup(snippet.file_name, chunk[:data])
diff --git a/app/views/shared/_broadcast_message.html.haml b/app/views/shared/_broadcast_message.html.haml
index c058b210688..bc4db672938 100644
--- a/app/views/shared/_broadcast_message.html.haml
+++ b/app/views/shared/_broadcast_message.html.haml
@@ -1,8 +1,10 @@
%div{ class: "broadcast-#{message.broadcast_type}-message #{opts[:preview] && 'preview'} js-broadcast-notification-#{message.id} d-flex",
style: broadcast_message_style(message), dir: 'auto' }
- %div
+ .flex-grow-1.text-right.pr-2
= sprite_icon('bullhorn', size: 16, css_class: 'vertical-align-text-top')
+ %div{ class: !fluid_layout && 'container-limited' }
= render_broadcast_message(message)
- - if message.notification? && opts[:preview].blank?
- %button.js-dismiss-current-broadcast-notification.btn.btn-link.text-dark.pl-2.pr-2{ 'aria-label' => _('Close'), :type => 'button', data: { id: message.id } }
- %i.fa.fa-times
+ .flex-grow-1.text-right{ style: 'flex-basis: 0' }
+ - if (message.notification? || message.dismissable?) && opts[:preview].blank?
+ %button.broadcast-message-dismiss.js-dismiss-current-broadcast-notification.btn.btn-link.pl-2.pr-2{ 'aria-label' => _('Close'), :type => 'button', data: { id: message.id } }
+ %i.fa.fa-times
diff --git a/app/views/shared/_default_branch_protection.html.haml b/app/views/shared/_default_branch_protection.html.haml
new file mode 100644
index 00000000000..d7ae21debd8
--- /dev/null
+++ b/app/views/shared/_default_branch_protection.html.haml
@@ -0,0 +1,3 @@
+.form-group
+ = f.label :default_branch_protection, class: 'label-bold'
+ = f.select :default_branch_protection, options_for_select(Gitlab::Access.protection_options, selected_level), {}, class: 'form-control'
diff --git a/app/views/shared/_delete_label_modal.html.haml b/app/views/shared/_delete_label_modal.html.haml
index f37dd2cdf02..c6629cd33a5 100644
--- a/app/views/shared/_delete_label_modal.html.haml
+++ b/app/views/shared/_delete_label_modal.html.haml
@@ -2,7 +2,7 @@
.modal-dialog
.modal-content
.modal-header
- %h3.page-title Delete #{render_label(label, tooltip: false)} ?
+ %h3.page-title Delete label: #{label.name} ?
%button.close{ type: "button", "data-dismiss": "modal", "aria-label" => _('Close') }
%span{ "aria-hidden": true } &times;
diff --git a/app/views/shared/_no_ssh.html.haml b/app/views/shared/_no_ssh.html.haml
index 17ef5327341..fbfd4d0e9a9 100644
--- a/app/views/shared/_no_ssh.html.haml
+++ b/app/views/shared/_no_ssh.html.haml
@@ -1,9 +1,10 @@
- if show_no_ssh_key_message?
- .no-ssh-key-message.alert.alert-warning
- - add_ssh_key_link = link_to s_('MissingSSHKeyWarningLink|add an SSH key'), profile_keys_path, class: 'alert-link'
- - ssh_message = _("You won't be able to pull or push project code via SSH until you %{add_ssh_key_link} to your profile") % { add_ssh_key_link: add_ssh_key_link }
- = ssh_message.html_safe
- .alert-link-group
- = link_to _("Don't show again"), profile_path(user: {hide_no_ssh_key: true}), method: :put, class: 'alert-link'
- |
- = link_to _('Remind later'), '#', class: 'hide-no-ssh-message alert-link'
+ %div{ class: 'no-ssh-key-message gl-alert gl-alert-warning', role: 'alert' }
+ = sprite_icon('warning', size: 16, css_class: 'gl-icon s16 gl-alert-icon gl-alert-icon-no-title')
+ %button{ class: 'gl-alert-dismiss hide-no-ssh-message', type: 'button', 'aria-label': 'Dismiss' }
+ = sprite_icon('close', size: 16, css_class: 'gl-icon s16')
+ .gl-alert-body
+ = s_("MissingSSHKeyWarningLink|You won't be able to pull or push project code via SSH until you add an SSH key to your profile").html_safe
+ .gl-alert-actions
+ = link_to s_('MissingSSHKeyWarningLink|Add SSH key'), profile_keys_path, class: "btn gl-alert-action btn-warning btn-md new-gl-button"
+ = link_to s_("MissingSSHKeyWarningLink|Don't show again"), profile_path(user: {hide_no_ssh_key: true}), method: :put, role: 'button', class: 'btn gl-alert-action btn-md btn-warning btn-secondary new-gl-button'
diff --git a/app/views/shared/_outdated_browser.html.haml b/app/views/shared/_outdated_browser.html.haml
index 8ddb1b2bc99..30255e18f04 100644
--- a/app/views/shared/_outdated_browser.html.haml
+++ b/app/views/shared/_outdated_browser.html.haml
@@ -1,8 +1,15 @@
- if outdated_browser?
- .flash-container
- .flash-alert.text-center
- GitLab may not work properly because you are using an outdated web browser.
+ .gl-alert.gl-alert-danger.outdated-browser{ :role => "alert" }
+ = sprite_icon('error', size: 16, css_class: "gl-alert-icon gl-alert-icon-no-title gl-icon")
+ .gl-alert-body
+ - if browser.ie? && browser.version.to_i == 11
+ - feedback_link_url = 'https://gitlab.com/gitlab-org/gitlab/issues/197987'
+ - feedback_link_start = '<a href="%{url}" class="gl-link" target="_blank" rel="noopener noreferrer">'.html_safe % { url: feedback_link_url }
+ = s_('OutdatedBrowser|From May 2020 GitLab no longer supports Internet Explorer 11.')
+ %br
+ = s_('OutdatedBrowser|You can provide feedback %{feedback_link_start}on this issue%{feedback_link_end} or via your usual support channels.').html_safe % { feedback_link_start: feedback_link_start, feedback_link_end: '</a>'.html_safe }
+ - else
+ = s_('OutdatedBrowser|GitLab may not work properly, because you are using an outdated web browser.')
%br
- Please install a
- = link_to 'supported web browser', help_page_path('install/requirements', anchor: 'supported-web-browsers')
- for a better experience.
+ - browser_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: help_page_path('install/requirements', anchor: 'supported-web-browsers') }
+ = s_('OutdatedBrowser|Please install a %{browser_link_start}supported web browser%{browser_link_end} for a better experience.').html_safe % { browser_link_start: browser_link_start, browser_link_end: '</a>'.html_safe }
diff --git a/app/views/shared/_service_settings.html.haml b/app/views/shared/_service_settings.html.haml
index 4415c654ab9..aeda7ea9909 100644
--- a/app/views/shared/_service_settings.html.haml
+++ b/app/views/shared/_service_settings.html.haml
@@ -1,7 +1,7 @@
= form_errors(@service)
- if lookup_context.template_exists?('help', "projects/services/#{@service.to_param}", true)
- = render "projects/services/#{@service.to_param}/help", subject: subject
+ = render "projects/services/#{@service.to_param}/help", subject: @service
- elsif @service.help.present?
.info-well
.well-segment
diff --git a/app/views/shared/badges/_badge_settings.html.haml b/app/views/shared/badges/_badge_settings.html.haml
index b7c250d3b1c..4d54981cb0a 100644
--- a/app/views/shared/badges/_badge_settings.html.haml
+++ b/app/views/shared/badges/_badge_settings.html.haml
@@ -1,4 +1,2 @@
#badge-settings{ data: { api_endpoint_url: @badge_api_endpoint,
docs_url: help_page_path('user/project/badges')} }
- .text-center.prepend-top-default
- = icon('spinner spin 2x')
diff --git a/app/views/shared/boards/components/_board.html.haml b/app/views/shared/boards/components/_board.html.haml
index 3db96db73ce..e42d8650708 100644
--- a/app/views/shared/boards/components/_board.html.haml
+++ b/app/views/shared/boards/components/_board.html.haml
@@ -29,11 +29,14 @@
":title" => '(list.assignee && list.assignee.username || "")' }
@{{ list.assignee.username }}
- %span.has-tooltip.badge.color-label.title.d-inline-block.mw-100.text-truncate.align-middle{ "v-if": "list.type === \"label\"",
- ":title" => '(list.label ? list.label.description : "")',
- data: { container: "body", placement: "bottom" },
- ":style" => "{ backgroundColor: (list.label && list.label.color ? list.label.color : null), color: (list.label && list.label.textColor ? list.label.textColor : \"#2e2e2e\") }" }
- {{ list.title }}
+ %gl-label{ "v-if" => " list.type === \"label\"",
+ ":background-color" => "list.label.color",
+ ":title" => "list.label.title",
+ ":description" => "list.label.description",
+ "tooltipPlacement" => "bottom",
+ ":size" => '(!list.isExpanded ? "sm" : "")',
+ ":scoped" => "showScopedLabels(list.label)",
+ ":scoped-labels-documentation-link" => "helpLink" }
- if can?(current_user, :admin_list, current_board_parent)
%board-delete{ "inline-template" => true,
diff --git a/app/views/shared/boards/components/sidebar/_labels.html.haml b/app/views/shared/boards/components/sidebar/_labels.html.haml
index c50826a7cda..a1088dc5222 100644
--- a/app/views/shared/boards/components/sidebar/_labels.html.haml
+++ b/app/views/shared/boards/components/sidebar/_labels.html.haml
@@ -8,15 +8,12 @@
%span.no-value{ "v-if" => "issue.labels && issue.labels.length === 0" }
= _("None")
%span{ "v-for" => "label in issue.labels" }
- %span.d-inline-block.position-relative.scoped-label-wrapper{ "v-if" => "showScopedLabels(label)" }
- %a{ href: '#' }
- %span.badge.color-label.label{ ":style" => "{ backgroundColor: label.color, color: label.textColor }" }
- {{ label.title }}
- %a.label.scoped-label{ ":href" => "helpLink()" }
- %i.fa.fa-question-circle{ ":style" => "{ backgroundColor: label.color, color: label.textColor }" }
- %a{ href: "#", "v-else" => true }
- .badge.color-label.has-tooltip{ ":style" => "{ backgroundColor: label.color, color: label.textColor }" }
- {{ label.title }}
+ %gl-label{ ":key" => "label.id",
+ ":background-color" => "label.color",
+ ":title" => "label.title",
+ ":description" => "label.description",
+ ":scoped" => "showScopedLabels(label)",
+ ":scoped-labels-documentation-link" => "helpLink" }
- if can_admin_issue?
.selectbox
diff --git a/app/views/shared/deploy_tokens/_form.html.haml b/app/views/shared/deploy_tokens/_form.html.haml
new file mode 100644
index 00000000000..99e259ba944
--- /dev/null
+++ b/app/views/shared/deploy_tokens/_form.html.haml
@@ -0,0 +1,34 @@
+%p.profile-settings-content
+ = s_("DeployTokens|Pick a name for the application, and we'll give you a unique deploy token.")
+
+= form_for token, url: create_deploy_token_path(group_or_project, anchor: 'js-deploy-tokens'), method: :post do |f|
+ = form_errors(token)
+
+ .form-group
+ = f.label :name, class: 'label-bold'
+ = f.text_field :name, class: 'form-control qa-deploy-token-name', required: true
+
+ .form-group
+ = f.label :expires_at, class: 'label-bold'
+ = f.text_field :expires_at, class: 'datepicker form-control qa-deploy-token-expires-at', value: f.object.expires_at
+
+ .form-group
+ = f.label :username, class: 'label-bold'
+ = f.text_field :username, class: 'form-control qa-deploy-token-username'
+ .text-secondary= s_('DeployTokens|Default format is "gitlab+deploy-token-{n}". Enter custom username if you want to change it.')
+
+ .form-group
+ = f.label :scopes, class: 'label-bold'
+ %fieldset.form-group.form-check
+ = f.check_box :read_repository, class: 'form-check-input qa-deploy-token-read-repository'
+ = label_tag ("deploy_token_read_repository"), 'read_repository', class: 'label-bold form-check-label'
+ .text-secondary= s_('DeployTokens|Allows read-only access to the repository')
+
+ - if container_registry_enabled?(group_or_project)
+ %fieldset.form-group.form-check
+ = f.check_box :read_registry, class: 'form-check-input qa-deploy-token-read-registry'
+ = label_tag ("deploy_token_read_registry"), 'read_registry', class: 'label-bold form-check-label'
+ .text-secondary= s_('DeployTokens|Allows read-only access to the registry images')
+
+ .prepend-top-default
+ = f.submit s_('DeployTokens|Create deploy token'), class: 'btn btn-success qa-create-deploy-token'
diff --git a/app/views/shared/deploy_tokens/_index.html.haml b/app/views/shared/deploy_tokens/_index.html.haml
new file mode 100644
index 00000000000..b0c9c72dfaa
--- /dev/null
+++ b/app/views/shared/deploy_tokens/_index.html.haml
@@ -0,0 +1,18 @@
+- expanded = expand_deploy_tokens_section?(@new_deploy_token)
+
+%section.qa-deploy-tokens-settings.settings.no-animate#js-deploy-tokens{ class: ('expanded' if expanded), data: { qa_selector: 'deploy_tokens_settings' } }
+ .settings-header
+ %h4= s_('DeployTokens|Deploy Tokens')
+ %button.btn.js-settings-toggle.qa-expand-deploy-keys{ type: 'button' }
+ = expanded ? 'Collapse' : 'Expand'
+ %p
+ = description
+ .settings-content
+ - if @new_deploy_token.persisted?
+ = render 'shared/deploy_tokens/new_deploy_token', deploy_token: @new_deploy_token
+ %h5.prepend-top-0
+ = s_('DeployTokens|Add a deploy token')
+ = render 'shared/deploy_tokens/form', group_or_project: group_or_project, token: @new_deploy_token, presenter: @deploy_tokens
+ %hr
+ = render 'shared/deploy_tokens/table', group_or_project: group_or_project, active_tokens: @deploy_tokens
+
diff --git a/app/views/projects/deploy_tokens/_new_deploy_token.html.haml b/app/views/shared/deploy_tokens/_new_deploy_token.html.haml
index f295fa82192..f295fa82192 100644
--- a/app/views/projects/deploy_tokens/_new_deploy_token.html.haml
+++ b/app/views/shared/deploy_tokens/_new_deploy_token.html.haml
diff --git a/app/views/shared/deploy_tokens/_revoke_modal.html.haml b/app/views/shared/deploy_tokens/_revoke_modal.html.haml
new file mode 100644
index 00000000000..5a3759ef755
--- /dev/null
+++ b/app/views/shared/deploy_tokens/_revoke_modal.html.haml
@@ -0,0 +1,15 @@
+.modal{ id: "revoke-modal-#{token.id}", tabindex: -1 }
+ .modal-dialog
+ .modal-content
+ .modal-header
+ %h4.modal-title
+ = s_('DeployTokens|Revoke %{b_start}%{name}%{b_end}?').html_safe % { b_start: '<b>'.html_safe, name: token.name, b_end: '</b>'.html_safe }
+ %button.close{ type: "button", "data-dismiss": "modal", "aria-label" => _('Close') }
+ %span{ "aria-hidden": true } &times;
+ .modal-body
+ %p
+ = s_('DeployTokens|You are about to revoke %{b_start}%{name}%{b_end}.').html_safe % { b_start: '<b>'.html_safe, name: token.name, b_end: '</b>'.html_safe }
+ = s_('DeployTokens|This action cannot be undone.')
+ .modal-footer
+ %a{ href: '#', data: { dismiss: 'modal' }, class: 'btn btn-default' }= _('Cancel')
+ = link_to s_('DeployTokens|Revoke %{name}') % { name: token.name }, revoke_deploy_token_path(group_or_project, token), method: :put, class: 'btn btn-danger'
diff --git a/app/views/shared/deploy_tokens/_table.html.haml b/app/views/shared/deploy_tokens/_table.html.haml
new file mode 100644
index 00000000000..d4e20805a2a
--- /dev/null
+++ b/app/views/shared/deploy_tokens/_table.html.haml
@@ -0,0 +1,31 @@
+%h5= s_("DeployTokens|Active Deploy Tokens (%{active_tokens})") % { active_tokens: active_tokens.length }
+
+- if active_tokens.present?
+ .table-responsive.deploy-tokens
+ %table.table
+ %thead
+ %tr
+ %th= s_('DeployTokens|Name')
+ %th= s_('DeployTokens|Username')
+ %th= s_('DeployTokens|Created')
+ %th= s_('DeployTokens|Expires')
+ %th= s_('DeployTokens|Scopes')
+ %th
+ %tbody
+ - active_tokens.each do |token|
+ %tr
+ %td= token.name
+ %td= token.username
+ %td= token.created_at.to_date.to_s(:medium)
+ %td
+ - if token.expires?
+ %span{ class: ('text-warning' if token.expires_soon?) }
+ In #{distance_of_time_in_words_to_now(token.expires_at)}
+ - else
+ %span.token-never-expires-label= _('Never')
+ %td= token.scopes.present? ? token.scopes.join(", ") : _('<no scopes selected>')
+ %td= link_to s_('DeployTokens|Revoke'), "#", class: "btn btn-danger float-right", data: { toggle: "modal", target: "#revoke-modal-#{token.id}"}
+ = render 'shared/deploy_tokens/revoke_modal', token: token, group_or_project: group_or_project
+- else
+ .settings-message.text-center
+ = s_('DeployTokens|This %{entity_type} has no active Deploy Tokens.') % { entity_type: group_or_project.class.name.downcase }
diff --git a/app/views/shared/issuable/_search_bar.html.haml b/app/views/shared/issuable/_search_bar.html.haml
index a27ceaff782..d9ca0b8869f 100644
--- a/app/views/shared/issuable/_search_bar.html.haml
+++ b/app/views/shared/issuable/_search_bar.html.haml
@@ -159,6 +159,8 @@
= render_if_exists 'shared/issuable/filter_weight', type: type
+ = render_if_exists 'shared/issuable/filter_epic', type: type
+
%button.clear-search.hidden{ type: 'button' }
= icon('times')
.filter-dropdown-container.d-flex.flex-column.flex-md-row
diff --git a/app/views/shared/issuable/_sidebar.html.haml b/app/views/shared/issuable/_sidebar.html.haml
index 2a853de12a4..1a1da6b3801 100644
--- a/app/views/shared/issuable/_sidebar.html.haml
+++ b/app/views/shared/issuable/_sidebar.html.haml
@@ -129,6 +129,9 @@
= render_if_exists 'shared/issuable/sidebar_weight', issuable_sidebar: issuable_sidebar
+ - if Feature.enabled?(:save_issuable_health_status, @project.group) && issuable_sidebar[:type] == "issue"
+ .js-sidebar-status-entry-point
+
- if issuable_sidebar.has_key?(:confidential)
-# haml-lint:disable InlineJavaScript
%script#js-confidential-issue-data{ type: "application/json" }= { is_confidential: issuable_sidebar[:confidential], is_editable: can_edit_issuable }.to_json.html_safe
diff --git a/app/views/shared/issuable/_sidebar_assignees.html.haml b/app/views/shared/issuable/_sidebar_assignees.html.haml
index e6b8e299e1c..b5a27f2f17d 100644
--- a/app/views/shared/issuable/_sidebar_assignees.html.haml
+++ b/app/views/shared/issuable/_sidebar_assignees.html.haml
@@ -4,7 +4,7 @@
#js-vue-sidebar-assignees{ data: { field: "#{issuable_type}", signed_in: signed_in } }
.title.hide-collapsed
= _('Assignee')
- = icon('spinner spin')
+ .spinner.spinner-sm.align-bottom
.selectbox.hide-collapsed
- if assignees.none?
diff --git a/app/views/shared/milestones/_issuable.html.haml b/app/views/shared/milestones/_issuable.html.haml
index ae3ab2adfd0..0adfe2f0c04 100644
--- a/app/views/shared/milestones/_issuable.html.haml
+++ b/app/views/shared/milestones/_issuable.html.haml
@@ -1,9 +1,8 @@
-# @project is present when viewing Project's milestone
- project = @project || issuable.project
-- namespace = @project_namespace || project.namespace.becomes(Namespace)
- labels = issuable.labels
- assignees = issuable.assignees
-- base_url_args = [namespace, project]
+- base_url_args = [project]
- issuable_type_args = base_url_args + [issuable.class.table_name]
- issuable_url_args = base_url_args + [issuable]
@@ -17,11 +16,11 @@
= confidential_icon(issuable)
= link_to issuable.title, issuable_url_args, title: issuable.title
.issuable-detail
- = link_to [namespace, project, issuable], class: 'issue-link' do
+ = link_to issuable_url_args, class: 'issue-link' do
%span.issuable-number= issuable.to_reference
- labels.each do |label|
- = render_label(label.present(issuable_subject: project), link: polymorphic_path(issuable_type_args, { milestone_title: @milestone.title, label_name: label.title, state: 'all' }))
+ = render_label(label.present(issuable_subject: project), link: polymorphic_path(issuable_type_args, { milestone_title: @milestone.title, label_name: label.title, state: 'all' }), small: true)
%span.assignee-icon
- assignees.each do |assignee|
diff --git a/app/views/shared/milestones/_labels_tab.html.haml b/app/views/shared/milestones/_labels_tab.html.haml
index ecab037e378..4c930b90ce7 100644
--- a/app/views/shared/milestones/_labels_tab.html.haml
+++ b/app/views/shared/milestones/_labels_tab.html.haml
@@ -3,11 +3,9 @@
- options = { milestone_title: @milestone.title, label_name: label.title }
%li.no-border
- %span.label-row
- %span.label-name
- = render_label(label, tooltip: false, link: milestones_label_path(options))
- %span.prepend-description-left
- = markdown_field(label, :description)
+ = render_label(label, tooltip: false, link: milestones_label_path(options))
+ %span.prepend-description-left
+ = markdown_field(label, :description)
.float-right.d-none.d-lg-block.d-xl-block
= link_to milestones_label_path(options.merge(state: 'opened')), class: 'btn btn-transparent btn-action' do
diff --git a/app/views/shared/milestones/_milestone.html.haml b/app/views/shared/milestones/_milestone.html.haml
index 6e50b31fd71..451c2c2ba10 100644
--- a/app/views/shared/milestones/_milestone.html.haml
+++ b/app/views/shared/milestones/_milestone.html.haml
@@ -42,11 +42,11 @@
.col-sm-4.milestone-progress
= milestone_progress_bar(milestone)
- = link_to pluralize(milestone.total_issues_count(current_user), 'Issue'), issues_path
+ = link_to pluralize(milestone.total_issues_count, 'Issue'), issues_path
- if milestone.merge_requests_enabled?
&middot;
= link_to pluralize(milestone.merge_requests_visible_to_user(current_user).size, 'Merge Request'), merge_requests_path
- .float-lg-right.light #{milestone.percent_complete(current_user)}% complete
+ .float-lg-right.light #{milestone.percent_complete}% complete
.col-sm-2
.milestone-actions.d-flex.justify-content-sm-start.justify-content-md-end
- if @project
diff --git a/app/views/shared/milestones/_sidebar.html.haml b/app/views/shared/milestones/_sidebar.html.haml
index a6fb8e6d4fc..aa9c4be1cc1 100644
--- a/app/views/shared/milestones/_sidebar.html.haml
+++ b/app/views/shared/milestones/_sidebar.html.haml
@@ -7,7 +7,7 @@
%a.gutter-toggle.float-right.js-sidebar-toggle.has-tooltip{ role: "button", href: "#", "aria-label" => "Toggle sidebar", title: sidebar_gutter_tooltip_text, data: { container: 'body', placement: 'left', boundary: 'viewport' } }
= sidebar_gutter_toggle_icon
.title.hide-collapsed
- %strong.bold== #{milestone.percent_complete(current_user)}%
+ %strong.bold== #{milestone.percent_complete}%
%span.hide-collapsed
complete
.value.hide-collapsed
@@ -15,7 +15,7 @@
.block.milestone-progress.hide-expanded
.sidebar-collapsed-icon.has-tooltip{ title: milestone_progress_tooltip_text(milestone), data: { container: 'body', html: 'true', placement: 'left', boundary: 'viewport' } }
- %span== #{milestone.percent_complete(current_user)}%
+ %span== #{milestone.percent_complete}%
= milestone_progress_bar(milestone)
.block.start_date.hide-collapsed
diff --git a/app/views/shared/milestones/_top.html.haml b/app/views/shared/milestones/_top.html.haml
index 12575b30a6c..8d911d4247e 100644
--- a/app/views/shared/milestones/_top.html.haml
+++ b/app/views/shared/milestones/_top.html.haml
@@ -8,7 +8,7 @@
= render 'shared/milestones/deprecation_message' if is_dynamic_milestone
= render 'shared/milestones/description', milestone: milestone
-- if milestone.complete?(current_user) && milestone.active?
+- if milestone.complete? && milestone.active?
.alert.alert-success.prepend-top-default
%span
= _('All issues for this milestone are closed.')
diff --git a/app/views/shared/notes/_note.html.haml b/app/views/shared/notes/_note.html.haml
index 5c9dd72418e..50bc4fb35df 100644
--- a/app/views/shared/notes/_note.html.haml
+++ b/app/views/shared/notes/_note.html.haml
@@ -1,5 +1,5 @@
- return unless note.author
-- return if note.cross_reference_not_visible_for?(current_user)
+- return unless note.readable_by?(current_user)
- show_image_comment_badge = local_assigns.fetch(:show_image_comment_badge, false)
- note_editable = can?(current_user, :admin_note, note)
diff --git a/app/views/shared/notifications/_custom_notifications.html.haml b/app/views/shared/notifications/_custom_notifications.html.haml
index 0e1e3beeb1c..58d02602423 100644
--- a/app/views/shared/notifications/_custom_notifications.html.haml
+++ b/app/views/shared/notifications/_custom_notifications.html.haml
@@ -23,6 +23,7 @@
#{ paragraph.html_safe }
.col-lg-8
- notification_setting.email_events.each_with_index do |event, index|
+ - next if event == :fixed_pipeline && Feature.disabled?(:ci_pipeline_fixed_notifications)
- field_id = "#{notifications_menu_identifier("modal", notification_setting)}_notification_setting[#{event}]"
.form-group
.form-check{ class: ("prepend-top-0" if index == 0) }
diff --git a/app/views/shared/projects/_project.html.haml b/app/views/shared/projects/_project.html.haml
index 45e95685677..d29ba3eedc6 100644
--- a/app/views/shared/projects/_project.html.haml
+++ b/app/views/shared/projects/_project.html.haml
@@ -16,6 +16,7 @@
- css_controls_class = compact_mode ? [] : ["flex-lg-row", "justify-content-lg-between"]
- css_controls_class << "with-pipeline-status" if show_pipeline_status_icon
- avatar_container_class = project.creator && use_creator_avatar ? '' : 'rect-avatar'
+- license_name = project_license_name(project)
%li.project-row.d-flex{ class: css_class }
= cache(cache_key) do
@@ -42,10 +43,10 @@
%span.metadata-info.visibility-icon.append-right-10.prepend-top-8.text-secondary.has-tooltip{ data: { container: 'body', placement: 'top' }, title: visibility_icon_description(project) }
= visibility_level_icon(project.visibility_level, fw: true)
- - if explore_projects_tab? && project.repository.license
+ - if explore_projects_tab? && license_name
%span.metadata-info.d-inline-flex.align-items-center.append-right-10.prepend-top-8
= sprite_icon('scale', size: 14, css_class: 'append-right-4')
- = project.repository.license.name
+ = license_name
- if !explore_projects_tab? && access&.nonzero?
-# haml-lint:disable UnnecessaryStringOutput
diff --git a/app/views/shared/snippets/_blob.html.haml b/app/views/shared/snippets/_blob.html.haml
index 6a5e777706c..a2169deb592 100644
--- a/app/views/shared/snippets/_blob.html.haml
+++ b/app/views/shared/snippets/_blob.html.haml
@@ -1,13 +1,13 @@
-- blob = @snippet.blob
-.js-file-title.file-title-flex-parent
- = render 'projects/blob/header_content', blob: blob
+%article.file-holder.snippet-file-content
+ .js-file-title.file-title-flex-parent
+ = render 'projects/blob/header_content', blob: blob
- .file-actions.d-none.d-sm-block
- = render 'projects/blob/viewer_switcher', blob: blob
+ .file-actions.d-none.d-sm-block
+ = render 'projects/blob/viewer_switcher', blob: blob
- .btn-group{ role: "group" }<
- = copy_blob_source_button(blob)
- = open_raw_blob_button(blob)
- = download_raw_snippet_button(@snippet)
+ .btn-group{ role: "group" }<
+ = copy_blob_source_button(blob)
+ = open_raw_blob_button(blob)
+ = download_raw_snippet_button(@snippet)
-= render 'projects/blob/content', blob: blob
+ = render 'projects/blob/content', blob: blob
diff --git a/app/views/shared/snippets/_embed.html.haml b/app/views/shared/snippets/_embed.html.haml
index b401820daf6..f1abd3a2ce4 100644
--- a/app/views/shared/snippets/_embed.html.haml
+++ b/app/views/shared/snippets/_embed.html.haml
@@ -1,4 +1,3 @@
-- blob = @snippet.blob
.gitlab-embed-snippets
.js-file-title.file-title-flex-parent
.file-header-content
@@ -6,10 +5,10 @@
%strong.file-title-name
%a.gitlab-embedded-snippets-title{ href: url_for(only_path: false, overwrite_params: nil) }
- = blob.name
+ = @blob.name
%small
- = number_to_human_size(blob.raw_size)
+ = number_to_human_size(@blob.raw_size)
%a.gitlab-logo-wrapper{ href: url_for(only_path: false, overwrite_params: nil), title: 'view on gitlab' }
%img.gitlab-logo{ src: image_url('ext_snippet_icons/logo.svg'), alt: "GitLab logo" }
@@ -19,4 +18,4 @@
= embedded_snippet_download_button
%article.file-holder.snippet-file-content
- = render 'projects/blob/viewer', viewer: @snippet.blob.simple_viewer, load_async: false, external_embed: true
+ = render 'projects/blob/viewer', viewer: @blob.simple_viewer, load_async: false, external_embed: true
diff --git a/app/views/shared/snippets/_form.html.haml b/app/views/shared/snippets/_form.html.haml
index 3c2c751c579..828015d29f5 100644
--- a/app/views/shared/snippets/_form.html.haml
+++ b/app/views/shared/snippets/_form.html.haml
@@ -26,9 +26,9 @@
= f.label :file_name, s_('Snippets|File')
.file-holder.snippet
.js-file-title.file-title-flex-parent
- = f.text_field :file_name, placeholder: s_("Snippets|Give your file a name to add code highlighting, e.g. example.rb for Ruby"), class: 'form-control snippet-file-name qa-snippet-file-name'
+ = f.text_field :file_name, placeholder: s_("Snippets|Give your file a name to add code highlighting, e.g. example.rb for Ruby"), class: 'form-control js-snippet-file-name qa-snippet-file-name'
.file-content.code
- %pre#editor= @snippet.content
+ %pre#editor{ data: { 'editor-loading': true } }= @snippet.content
= f.hidden_field :content, class: 'snippet-file-content'
.form-group
diff --git a/app/views/shared/snippets/_list.html.haml b/app/views/shared/snippets/_list.html.haml
index 766f48fff3d..ca3a291ae27 100644
--- a/app/views/shared/snippets/_list.html.haml
+++ b/app/views/shared/snippets/_list.html.haml
@@ -8,4 +8,4 @@
%ul.content-list
= render partial: 'shared/snippets/snippet', collection: @snippets, locals: { link_project: link_project }
- = paginate @snippets, theme: 'gitlab', remote: remote
+ = paginate_collection @snippets, remote: remote
diff --git a/app/views/shared/web_hooks/_form.html.haml b/app/views/shared/web_hooks/_form.html.haml
index 9c5b9593bba..ce85cbd7f07 100644
--- a/app/views/shared/web_hooks/_form.html.haml
+++ b/app/views/shared/web_hooks/_form.html.haml
@@ -15,62 +15,62 @@
= form.check_box :push_events, class: 'form-check-input'
= form.label :push_events, class: 'list-label form-check-label ml-1' do
%strong Push events
- %p.light.ml-1
+ = form.text_field :push_events_branch_filter, class: 'form-control', placeholder: 'Branch name or wildcard pattern to trigger on (leave blank for all)'
+ %p.text-muted.ml-1
This URL will be triggered by a push to the repository
- = form.text_field :push_events_branch_filter, class: 'form-control', placeholder: 'Branch name or wildcard pattern to trigger on (leave blank for all)'
%li
= form.check_box :tag_push_events, class: 'form-check-input'
= form.label :tag_push_events, class: 'list-label form-check-label ml-1' do
%strong Tag push events
- %p.light.ml-1
+ %p.text-muted.ml-1
This URL will be triggered when a new tag is pushed to the repository
%li
= form.check_box :note_events, class: 'form-check-input'
= form.label :note_events, class: 'list-label form-check-label ml-1' do
%strong Comments
- %p.light.ml-1
+ %p.text-muted.ml-1
This URL will be triggered when someone adds a comment
%li
= form.check_box :confidential_note_events, class: 'form-check-input'
= form.label :confidential_note_events, class: 'list-label form-check-label ml-1' do
%strong Confidential Comments
- %p.light.ml-1
+ %p.text-muted.ml-1
This URL will be triggered when someone adds a comment on a confidential issue
%li
= form.check_box :issues_events, class: 'form-check-input'
= form.label :issues_events, class: 'list-label form-check-label ml-1' do
%strong Issues events
- %p.light.ml-1
+ %p.text-muted.ml-1
This URL will be triggered when an issue is created/updated/merged
%li
= form.check_box :confidential_issues_events, class: 'form-check-input'
= form.label :confidential_issues_events, class: 'list-label form-check-label ml-1' do
%strong Confidential Issues events
- %p.light.ml-1
+ %p.text-muted.ml-1
This URL will be triggered when a confidential issue is created/updated/merged
%li
= form.check_box :merge_requests_events, class: 'form-check-input'
= form.label :merge_requests_events, class: 'list-label form-check-label ml-1' do
%strong Merge request events
- %p.light.ml-1
+ %p.text-muted.ml-1
This URL will be triggered when a merge request is created/updated/merged
%li
= form.check_box :job_events, class: 'form-check-input'
= form.label :job_events, class: 'list-label form-check-label ml-1' do
%strong Job events
- %p.light.ml-1
+ %p.text-muted.ml-1
This URL will be triggered when the job status changes
%li
= form.check_box :pipeline_events, class: 'form-check-input'
= form.label :pipeline_events, class: 'list-label form-check-label ml-1' do
%strong Pipeline events
- %p.light.ml-1
+ %p.text-muted.ml-1
This URL will be triggered when the pipeline status changes
%li
= form.check_box :wiki_page_events, class: 'form-check-input'
= form.label :wiki_page_events, class: 'list-label form-check-label ml-1' do
%strong Wiki Page events
- %p.light.ml-1
+ %p.text-muted.ml-1
This URL will be triggered when a wiki page is created/updated
.form-group
= form.label :enable_ssl_verification, 'SSL verification', class: 'label-bold checkbox'
diff --git a/app/views/snippets/notes/_actions.html.haml b/app/views/snippets/notes/_actions.html.haml
index 6e20890a47f..28fbeaa25f0 100644
--- a/app/views/snippets/notes/_actions.html.haml
+++ b/app/views/snippets/notes/_actions.html.haml
@@ -2,7 +2,6 @@
- if note.emoji_awardable?
.note-actions-item
= link_to '#', title: _('Add reaction'), class: "note-action-button note-emoji-button js-add-award js-note-emoji has-tooltip", data: { position: 'right' } do
- = icon('spinner spin')
%span{ class: 'link-highlight award-control-icon-neutral' }= sprite_icon('slight-smile')
%span{ class: 'link-highlight award-control-icon-positive' }= sprite_icon('smiley')
%span{ class: 'link-highlight award-control-icon-super-positive' }= sprite_icon('smile')
diff --git a/app/views/snippets/show.html.haml b/app/views/snippets/show.html.haml
index 30f760f2122..741e38e3d84 100644
--- a/app/views/snippets/show.html.haml
+++ b/app/views/snippets/show.html.haml
@@ -10,8 +10,7 @@
= render 'shared/snippets/header'
.personal-snippets
- %article.file-holder.snippet-file-content
- = render 'shared/snippets/blob'
+ = render 'shared/snippets/blob', blob: @blob
.row-content-block.top-block.content-component-block
= render 'award_emoji/awards_block', awardable: @snippet, inline: true
diff --git a/app/views/u2f/_authenticate.html.haml b/app/views/u2f/_authenticate.html.haml
index 979f6862de3..51018428b1b 100644
--- a/app/views/u2f/_authenticate.html.haml
+++ b/app/views/u2f/_authenticate.html.haml
@@ -1,7 +1,6 @@
#js-authenticate-u2f
%a.btn.btn-block.btn-info#js-login-2fa-device{ href: '#' }= _("Sign in via 2FA code")
--# haml-lint:disable InlineJavaScript
%script#js-authenticate-u2f-in-progress{ type: "text/template" }
%p= _("Trying to communicate with your device. Plug it in (if you haven't already) and press the button on the device now.")
diff --git a/app/views/users/_cover_controls.html.haml b/app/views/users/_cover_controls.html.haml
new file mode 100644
index 00000000000..43278e9d232
--- /dev/null
+++ b/app/views/users/_cover_controls.html.haml
@@ -0,0 +1,2 @@
+.cover-controls.d-flex.px-2.pb-4.d-sm-block.p-sm-0
+ = yield
diff --git a/app/views/users/_profile_basic_info.html.haml b/app/views/users/_profile_basic_info.html.haml
index af0a766bab0..c431a72d0e7 100644
--- a/app/views/users/_profile_basic_info.html.haml
+++ b/app/views/users/_profile_basic_info.html.haml
@@ -1,4 +1,4 @@
-%p
+%p.mb-1.mb-sm-2.mt-2.mt-sm-3
%span.middle-dot-divider
@#{@user.username}
- if can?(current_user, :read_user_profile, @user)
diff --git a/app/views/users/show.html.haml b/app/views/users/show.html.haml
index 3c164588b13..9f5124afc16 100644
--- a/app/views/users/show.html.haml
+++ b/app/views/users/show.html.haml
@@ -4,30 +4,31 @@
- page_title @user.blocked? ? s_('UserProfile|Blocked user') : @user.name
- page_description @user.bio
- header_title @user.name, user_path(@user)
+- link_classes = "flex-grow-1 mx-1 "
= content_for :meta_tags do
= auto_discovery_link_tag(:atom, user_url(@user, format: :atom), title: "#{@user.name} activity")
.user-profile
.cover-block.user-cover-block{ class: [('border-bottom' if profile_tabs.empty?)] }
- .cover-controls
+ = render layout: 'users/cover_controls' do
- if @user == current_user
- = link_to profile_path, class: 'btn btn-default has-tooltip', title: s_('UserProfile|Edit profile'), 'aria-label': 'Edit profile' do
+ = link_to profile_path, class: link_classes + 'btn btn-default has-tooltip', title: s_('UserProfile|Edit profile'), 'aria-label': 'Edit profile' do
= icon('pencil')
- elsif current_user
- if @user.abuse_report
- %button.btn.btn-danger{ title: s_('UserProfile|Already reported for abuse'),
+ %button{ class: link_classes + 'btn btn-danger mr-1', title: s_('UserProfile|Already reported for abuse'),
data: { toggle: 'tooltip', placement: 'bottom', container: 'body' } }
= icon('exclamation-circle')
- else
- = link_to new_abuse_report_path(user_id: @user.id, ref_url: request.referrer), class: 'btn',
+ = link_to new_abuse_report_path(user_id: @user.id, ref_url: request.referrer), class: link_classes + 'btn',
title: s_('UserProfile|Report abuse'), data: { toggle: 'tooltip', placement: 'bottom', container: 'body' } do
= icon('exclamation-circle')
- if can?(current_user, :read_user_profile, @user)
- = link_to user_path(@user, rss_url_options), class: 'btn btn-default has-tooltip', title: s_('UserProfile|Subscribe'), 'aria-label': 'Subscribe' do
+ = link_to user_path(@user, rss_url_options), class: link_classes + 'btn btn-default has-tooltip', title: s_('UserProfile|Subscribe'), 'aria-label': 'Subscribe' do
= icon('rss')
- if current_user && current_user.admin?
- = link_to [:admin, @user], class: 'btn btn-default', title: s_('UserProfile|View user in admin area'),
+ = link_to [:admin, @user], class: link_classes + 'btn btn-default', title: s_('UserProfile|View user in admin area'),
data: {toggle: 'tooltip', placement: 'bottom', container: 'body'} do
= icon('users')
@@ -51,10 +52,18 @@
= emoji_icon(@user.status.emoji)
= markdown_field(@user.status, :message)
= render "users/profile_basic_info"
- .cover-desc.cgray
- - unless @user.public_email.blank?
- .profile-link-holder.middle-dot-divider
- = link_to @user.public_email, "mailto:#{@user.public_email}", class: 'text-link'
+ .cover-desc.cgray.mb-1.mb-sm-2
+ - unless @user.location.blank?
+ .profile-link-holder.middle-dot-divider-sm.d-block.d-sm-inline.mb-1.mb-sm-0
+ = sprite_icon('location', size: 16, css_class: 'vertical-align-sub fgray')
+ %span.vertical-align-middle
+ = @user.location
+ - unless work_information(@user).blank?
+ .profile-link-holder.middle-dot-divider-sm.d-block.d-sm-inline
+ = sprite_icon('work', size: 16, css_class: 'vertical-align-middle fgray')
+ %span.vertical-align-middle
+ = work_information(@user)
+ .cover-desc.cgray.mb-1.mb-sm-2
- unless @user.skype.blank?
.profile-link-holder.middle-dot-divider
= link_to "skype:#{@user.skype}", title: "Skype" do
@@ -64,24 +73,18 @@
= link_to linkedin_url(@user), title: "LinkedIn", target: '_blank', rel: 'noopener noreferrer nofollow' do
= icon('linkedin-square')
- unless @user.twitter.blank?
- .profile-link-holder.middle-dot-divider
+ .profile-link-holder.middle-dot-divider-sm
= link_to twitter_url(@user), title: "Twitter", target: '_blank', rel: 'noopener noreferrer nofollow' do
= icon('twitter-square')
- unless @user.website_url.blank?
- .profile-link-holder.middle-dot-divider
+ .profile-link-holder.middle-dot-divider-sm.d-block.d-sm-inline.mt-1.mt-sm-0
= link_to @user.short_website_url, @user.full_website_url, class: 'text-link', target: '_blank', rel: 'me noopener noreferrer nofollow'
- - unless @user.location.blank?
- .profile-link-holder.middle-dot-divider
- = sprite_icon('location', size: 16, css_class: 'vertical-align-sub')
- = @user.location
- - unless @user.organization.blank?
- .profile-link-holder.middle-dot-divider
- = sprite_icon('work', size: 16, css_class: 'vertical-align-sub')
- = @user.organization
-
+ - unless @user.public_email.blank?
+ .profile-link-holder.middle-dot-divider-sm.d-block.d-sm-inline.mt-1.mt-sm-0
+ = link_to @user.public_email, "mailto:#{@user.public_email}", class: 'text-link'
- if @user.bio.present?
.cover-desc.cgray
- %p.profile-user-bio
+ %p.profile-user-bio.font-italic
= @user.bio
- unless profile_tabs.empty?
diff --git a/app/workers/admin_email_worker.rb b/app/workers/admin_email_worker.rb
index a7cc4fb0d11..c84ac60d777 100644
--- a/app/workers/admin_email_worker.rb
+++ b/app/workers/admin_email_worker.rb
@@ -1,13 +1,13 @@
# frozen_string_literal: true
-class AdminEmailWorker
+class AdminEmailWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
include CronjobQueue
# rubocop:enable Scalability/CronWorkerContext
- feature_category_not_owned!
+ feature_category :source_code_management
def perform
send_repository_check_mail if Gitlab::CurrentSettings.repository_checks_enabled
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index f6daab73689..28fab10d931 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -6,1080 +6,1295 @@
- :name: auto_devops:auto_devops_disable
:feature_category: :auto_devops
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: auto_merge:auto_merge_process
:feature_category: :continuous_delivery
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 3
+ :idempotent:
- :name: chaos:chaos_cpu_spin
:feature_category: :chaos_engineering
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: chaos:chaos_db_spin
:feature_category: :chaos_engineering
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: chaos:chaos_kill
:feature_category: :chaos_engineering
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: chaos:chaos_leak_mem
:feature_category: :chaos_engineering
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: chaos:chaos_sleep
:feature_category: :chaos_engineering
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: container_repository:cleanup_container_repository
:feature_category: :container_registry
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: container_repository:delete_container_repository
:feature_category: :container_registry
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:admin_email
- :feature_category: :not_owned
+ :feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:ci_archive_traces_cron
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:container_expiration_policy
:feature_category: :container_registry
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:environments_auto_stop_cron
:feature_category: :continuous_delivery
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:expire_build_artifacts
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:gitlab_usage_ping
- :feature_category: :not_owned
+ :feature_category: :collection
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:import_export_project_cleanup
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:issue_due_scheduler
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:namespaces_prune_aggregation_schedules
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 1
+ :idempotent:
- :name: cronjob:pages_domain_removal_cron
:feature_category: :pages
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 1
+ :idempotent:
- :name: cronjob:pages_domain_ssl_renewal_cron
:feature_category: :pages
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:pages_domain_verification_cron
:feature_category: :pages
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:personal_access_tokens_expiring
:feature_category: :authentication_and_authorization
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:pipeline_schedule
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 1
+ :idempotent:
- :name: cronjob:prune_old_events
- :feature_category: :not_owned
+ :feature_category: :users
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:prune_web_hook_logs
:feature_category: :integrations
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:remove_expired_group_links
:feature_category: :authentication_and_authorization
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:remove_expired_members
:feature_category: :authentication_and_authorization
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 1
+ :idempotent:
- :name: cronjob:remove_unreferenced_lfs_objects
:feature_category: :git_lfs
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:repository_archive_cache
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:repository_check_dispatch
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:requests_profiles
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:schedule_migrate_external_diffs
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:stuck_ci_jobs
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 1
+ :idempotent:
+- :name: cronjob:stuck_export_jobs
+ :feature_category: :importers
+ :has_external_dependencies:
+ :urgency: :low
+ :resource_boundary: :cpu
+ :weight: 1
+ :idempotent:
- :name: cronjob:stuck_import_jobs
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 1
+ :idempotent:
- :name: cronjob:stuck_merge_jobs
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: cronjob:trending_projects
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: deployment:deployments_finished
:feature_category: :continuous_delivery
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 3
+ :idempotent:
- :name: deployment:deployments_forward_deployment
:feature_category: :continuous_delivery
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 3
+ :idempotent:
- :name: deployment:deployments_success
:feature_category: :continuous_delivery
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 3
+ :idempotent:
- :name: gcp_cluster:cluster_configure
:feature_category: :kubernetes_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:cluster_configure_istio
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:cluster_install_app
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:cluster_patch_app
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:cluster_project_configure
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:cluster_provision
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:cluster_upgrade_app
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:cluster_wait_for_app_installation
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 1
+ :idempotent:
- :name: gcp_cluster:cluster_wait_for_ingress_ip_address
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:clusters_applications_activate_service
:feature_category: :kubernetes_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:clusters_applications_deactivate_service
:feature_category: :kubernetes_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:clusters_applications_uninstall
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:clusters_applications_wait_for_uninstall_app
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 1
+ :idempotent:
- :name: gcp_cluster:clusters_cleanup_app
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:clusters_cleanup_project_namespace
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:clusters_cleanup_service_account
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gcp_cluster:wait_for_cluster_creation
:feature_category: :kubernetes_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_import_diff_note
:feature_category: :importers
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_import_issue
:feature_category: :importers
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_import_lfs_object
:feature_category: :importers
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_import_note
:feature_category: :importers
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_import_pull_request
:feature_category: :importers
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_refresh_import_jid
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_stage_finish_import
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_stage_import_base_data
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_stage_import_issues_and_diff_notes
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_stage_import_lfs_objects
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_stage_import_notes
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_stage_import_pull_requests
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_importer:github_import_stage_import_repository
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: hashed_storage:hashed_storage_migrator
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: hashed_storage:hashed_storage_project_migrate
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: hashed_storage:hashed_storage_project_rollback
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: hashed_storage:hashed_storage_rollbacker
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: incident_management:incident_management_process_alert
:feature_category: :incident_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: mail_scheduler:mail_scheduler_issue_due
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: mail_scheduler:mail_scheduler_notification_service
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 2
+ :idempotent:
- :name: notifications:new_release
:feature_category: :release_orchestration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: object_pool:object_pool_create
:feature_category: :gitaly
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: object_pool:object_pool_destroy
:feature_category: :gitaly
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: object_pool:object_pool_join
:feature_category: :gitaly
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 1
+ :idempotent:
- :name: object_pool:object_pool_schedule_join
:feature_category: :gitaly
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: object_storage:object_storage_background_move
:feature_category: :not_owned
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: object_storage:object_storage_migrate_uploads
:feature_category: :not_owned
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: pipeline_background:archive_trace
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: pipeline_background:ci_build_trace_chunk_flush
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: pipeline_cache:expire_job_cache
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 3
+ :idempotent: true
- :name: pipeline_cache:expire_pipeline_cache
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 3
+ :idempotent:
- :name: pipeline_creation:create_pipeline
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 4
+ :idempotent:
- :name: pipeline_creation:run_pipeline_schedule
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 4
+ :idempotent:
- :name: pipeline_default:build_coverage
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 3
+ :idempotent:
- :name: pipeline_default:build_trace_sections
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 3
+ :idempotent:
- :name: pipeline_default:ci_create_cross_project_pipeline
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 3
+ :idempotent:
- :name: pipeline_default:ci_pipeline_bridge_status
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 3
+ :idempotent:
- :name: pipeline_default:pipeline_metrics
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 3
+ :idempotent:
- :name: pipeline_default:pipeline_notification
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
+ :resource_boundary: :cpu
+ :weight: 3
+ :idempotent:
+- :name: pipeline_default:pipeline_update_ci_ref_status
+ :feature_category: :continuous_integration
+ :has_external_dependencies:
+ :urgency: :high
:resource_boundary: :cpu
:weight: 3
+ :idempotent:
- :name: pipeline_hooks:build_hooks
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: pipeline_hooks:pipeline_hooks
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 2
+ :idempotent:
- :name: pipeline_processing:build_finished
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 5
+ :idempotent:
- :name: pipeline_processing:build_queue
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 5
+ :idempotent:
- :name: pipeline_processing:build_success
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 5
+ :idempotent:
- :name: pipeline_processing:ci_build_prepare
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 5
+ :idempotent:
- :name: pipeline_processing:ci_build_schedule
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 5
+ :idempotent:
- :name: pipeline_processing:ci_resource_groups_assign_resource_from_resource_group
:feature_category: :continuous_delivery
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 5
+ :idempotent:
- :name: pipeline_processing:pipeline_process
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 5
+ :idempotent:
- :name: pipeline_processing:pipeline_success
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 5
+ :idempotent:
- :name: pipeline_processing:pipeline_update
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 5
+ :idempotent:
- :name: pipeline_processing:stage_update
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 5
+ :idempotent:
- :name: pipeline_processing:update_head_pipeline_for_merge_request
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 5
+ :idempotent:
- :name: repository_check:repository_check_batch
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: repository_check:repository_check_clear
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: repository_check:repository_check_single_repository
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: todos_destroyer:todos_destroyer_confidential_issue
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: todos_destroyer:todos_destroyer_entity_leave
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: todos_destroyer:todos_destroyer_group_private
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: todos_destroyer:todos_destroyer_private_features
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: todos_destroyer:todos_destroyer_project_private
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: update_namespace_statistics:namespaces_root_statistics
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: update_namespace_statistics:namespaces_schedule_aggregation
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
+- :name: authorized_keys
+ :feature_category: :source_code_management
+ :has_external_dependencies:
+ :urgency: :high
+ :resource_boundary: :unknown
+ :weight: 2
+ :idempotent: true
- :name: authorized_projects
:feature_category: :authentication_and_authorization
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 2
+ :idempotent: true
- :name: background_migration
:feature_category: :not_owned
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: chat_notification
:feature_category: :chatops
- :has_external_dependencies:
- :latency_sensitive: true
+ :has_external_dependencies: true
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: create_commit_signature
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: create_evidence
- :feature_category: :release_governance
+ :feature_category: :release_evidence
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: create_note_diff_file
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: default
:feature_category:
:has_external_dependencies:
- :latency_sensitive:
+ :urgency:
:resource_boundary:
:weight: 1
+ :idempotent:
- :name: delete_diff_files
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: delete_merged_branches
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: delete_stored_files
:feature_category: :not_owned
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: delete_user
:feature_category: :authentication_and_authorization
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: detect_repository_languages
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: email_receiver
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: emails_on_push
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 2
+ :idempotent:
- :name: error_tracking_issue_link
:feature_category: :error_tracking
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: expire_build_instance_artifacts
:feature_category: :continuous_integration
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: file_hook
:feature_category: :integrations
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: git_garbage_collect
:feature_category: :gitaly
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: github_import_advance_stage
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: gitlab_shell
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: group_destroy
:feature_category: :subgroups
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: group_export
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: group_import
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: import_issues_csv
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :cpu
:weight: 2
+ :idempotent:
- :name: invalid_gpg_signature_update
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: irker
:feature_category: :integrations
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: mailers
:feature_category:
:has_external_dependencies:
- :latency_sensitive:
+ :urgency:
:resource_boundary:
:weight: 2
+ :idempotent:
- :name: merge
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 5
+ :idempotent:
- :name: merge_request_mergeability_check
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: migrate_external_diffs
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: namespaceless_project_destroy
:feature_category: :authentication_and_authorization
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: new_issue
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 2
+ :idempotent:
- :name: new_merge_request
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 2
+ :idempotent:
- :name: new_note
:feature_category: :issue_tracking
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 2
+ :idempotent:
- :name: pages
:feature_category: :pages
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: pages_domain_ssl_renewal
:feature_category: :pages
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: pages_domain_verification
:feature_category: :pages
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: phabricator_import_import_tasks
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: post_receive
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 5
+ :idempotent:
- :name: process_commit
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 3
+ :idempotent:
- :name: project_cache
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: project_daily_statistics
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: project_destroy
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: project_export
:feature_category: :importers
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :memory
:weight: 1
+ :idempotent:
- :name: project_service
:feature_category: :integrations
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
+- :name: project_update_repository_storage
+ :feature_category: :gitaly
+ :has_external_dependencies:
+ :urgency: :low
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent:
- :name: propagate_service_template
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: reactive_caching
:feature_category: :not_owned
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 1
+ :idempotent:
- :name: rebase
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: remote_mirror_notification
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: repository_cleanup
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: repository_fork
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: repository_import
:feature_category: :importers
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: repository_remove_remote
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: repository_update_remote_mirror
:feature_category: :source_code_management
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: self_monitoring_project_create
:feature_category: :metrics
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: self_monitoring_project_delete
:feature_category: :metrics
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 2
+ :idempotent:
- :name: system_hook_push
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: update_external_pull_requests
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 3
+ :idempotent:
- :name: update_merge_requests
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive: true
+ :urgency: :high
:resource_boundary: :cpu
:weight: 3
+ :idempotent:
- :name: update_project_statistics
:feature_category: :source_code_management
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: upload_checksum
:feature_category: :geo_replication
:has_external_dependencies:
- :latency_sensitive:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent:
- :name: web_hook
:feature_category: :integrations
:has_external_dependencies: true
- :latency_sensitive:
+ :urgency: :low
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent:
+- :name: x509_certificate_revoke
+ :feature_category: :source_code_management
+ :has_external_dependencies:
+ :urgency: :low
:resource_boundary: :unknown
:weight: 1
+ :idempotent: true
diff --git a/app/workers/archive_trace_worker.rb b/app/workers/archive_trace_worker.rb
index 66f9b8d9e80..3ddb5686bf2 100644
--- a/app/workers/archive_trace_worker.rb
+++ b/app/workers/archive_trace_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ArchiveTraceWorker
+class ArchiveTraceWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineBackgroundQueue
diff --git a/app/workers/authorized_keys_worker.rb b/app/workers/authorized_keys_worker.rb
new file mode 100644
index 00000000000..b2333033e56
--- /dev/null
+++ b/app/workers/authorized_keys_worker.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+class AuthorizedKeysWorker
+ include ApplicationWorker
+
+ PERMITTED_ACTIONS = [:add_key, :remove_key].freeze
+
+ feature_category :source_code_management
+ urgency :high
+ weight 2
+ idempotent!
+
+ def perform(action, *args)
+ return unless Gitlab::CurrentSettings.authorized_keys_enabled?
+
+ case action
+ when :add_key
+ authorized_keys.add_key(*args)
+ when :remove_key
+ authorized_keys.remove_key(*args)
+ end
+ end
+
+ private
+
+ def authorized_keys
+ @authorized_keys ||= Gitlab::AuthorizedKeys.new
+ end
+end
diff --git a/app/workers/authorized_projects_worker.rb b/app/workers/authorized_projects_worker.rb
index 1ab2fd6023f..a35e0320553 100644
--- a/app/workers/authorized_projects_worker.rb
+++ b/app/workers/authorized_projects_worker.rb
@@ -5,9 +5,11 @@ class AuthorizedProjectsWorker
prepend WaitableWorker
feature_category :authentication_and_authorization
- latency_sensitive_worker!
+ urgency :high
weight 2
+ idempotent!
+
# This is a workaround for a Ruby 2.3.7 bug. rspec-mocks cannot restore the
# visibility of prepended modules. See https://github.com/rspec/rspec-mocks/issues/1231
# for more details.
diff --git a/app/workers/auto_devops/disable_worker.rb b/app/workers/auto_devops/disable_worker.rb
index 73ddc591505..bae08cf9e18 100644
--- a/app/workers/auto_devops/disable_worker.rb
+++ b/app/workers/auto_devops/disable_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module AutoDevops
- class DisableWorker
+ class DisableWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include AutoDevopsQueue
diff --git a/app/workers/auto_merge_process_worker.rb b/app/workers/auto_merge_process_worker.rb
index 1681fac3363..2599c76c900 100644
--- a/app/workers/auto_merge_process_worker.rb
+++ b/app/workers/auto_merge_process_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class AutoMergeProcessWorker
+class AutoMergeProcessWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :auto_merge
diff --git a/app/workers/background_migration_worker.rb b/app/workers/background_migration_worker.rb
index 20e2cdd7f96..231c2bcd83b 100644
--- a/app/workers/background_migration_worker.rb
+++ b/app/workers/background_migration_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class BackgroundMigrationWorker
+class BackgroundMigrationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category_not_owned!
@@ -22,17 +22,19 @@ class BackgroundMigrationWorker
# class_name - The class name of the background migration to run.
# arguments - The arguments to pass to the migration class.
def perform(class_name, arguments = [])
- should_perform, ttl = perform_and_ttl(class_name)
+ with_context(caller_id: class_name.to_s) do
+ should_perform, ttl = perform_and_ttl(class_name)
- if should_perform
- Gitlab::BackgroundMigration.perform(class_name, arguments)
- else
- # If the lease could not be obtained this means either another process is
- # running a migration of this class or we ran one recently. In this case
- # we'll reschedule the job in such a way that it is picked up again around
- # the time the lease expires.
- self.class
- .perform_in(ttl || self.class.minimum_interval, class_name, arguments)
+ if should_perform
+ Gitlab::BackgroundMigration.perform(class_name, arguments)
+ else
+ # If the lease could not be obtained this means either another process is
+ # running a migration of this class or we ran one recently. In this case
+ # we'll reschedule the job in such a way that it is picked up again around
+ # the time the lease expires.
+ self.class
+ .perform_in(ttl || self.class.minimum_interval, class_name, arguments)
+ end
end
end
diff --git a/app/workers/build_coverage_worker.rb b/app/workers/build_coverage_worker.rb
index 912c53e11f8..7d893024abc 100644
--- a/app/workers/build_coverage_worker.rb
+++ b/app/workers/build_coverage_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class BuildCoverageWorker
+class BuildCoverageWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
diff --git a/app/workers/build_finished_worker.rb b/app/workers/build_finished_worker.rb
index 77ce0923307..b6ef9ab4710 100644
--- a/app/workers/build_finished_worker.rb
+++ b/app/workers/build_finished_worker.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
-class BuildFinishedWorker
+class BuildFinishedWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_processing
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/workers/build_hooks_worker.rb b/app/workers/build_hooks_worker.rb
index fa55769e486..9693d3eb57f 100644
--- a/app/workers/build_hooks_worker.rb
+++ b/app/workers/build_hooks_worker.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-class BuildHooksWorker
+class BuildHooksWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_hooks
feature_category :continuous_integration
- latency_sensitive_worker!
+ urgency :high
# rubocop: disable CodeReuse/ActiveRecord
def perform(build_id)
diff --git a/app/workers/build_queue_worker.rb b/app/workers/build_queue_worker.rb
index 6f75f403e6e..b71afbbeb8f 100644
--- a/app/workers/build_queue_worker.rb
+++ b/app/workers/build_queue_worker.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-class BuildQueueWorker
+class BuildQueueWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_processing
feature_category :continuous_integration
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/workers/build_success_worker.rb b/app/workers/build_success_worker.rb
index b7dbd367fee..e4a2dd500cc 100644
--- a/app/workers/build_success_worker.rb
+++ b/app/workers/build_success_worker.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
-class BuildSuccessWorker
+class BuildSuccessWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_processing
- latency_sensitive_worker!
+ urgency :high
# rubocop: disable CodeReuse/ActiveRecord
def perform(build_id)
diff --git a/app/workers/build_trace_sections_worker.rb b/app/workers/build_trace_sections_worker.rb
index 0641130fd64..c25f77974e9 100644
--- a/app/workers/build_trace_sections_worker.rb
+++ b/app/workers/build_trace_sections_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class BuildTraceSectionsWorker
+class BuildTraceSectionsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
diff --git a/app/workers/chaos/cpu_spin_worker.rb b/app/workers/chaos/cpu_spin_worker.rb
index 43a32c3274f..0b565e0d49c 100644
--- a/app/workers/chaos/cpu_spin_worker.rb
+++ b/app/workers/chaos/cpu_spin_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Chaos
- class CpuSpinWorker
+ class CpuSpinWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ChaosQueue
diff --git a/app/workers/chaos/db_spin_worker.rb b/app/workers/chaos/db_spin_worker.rb
index 217ddabbcb6..099660d440c 100644
--- a/app/workers/chaos/db_spin_worker.rb
+++ b/app/workers/chaos/db_spin_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Chaos
- class DbSpinWorker
+ class DbSpinWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ChaosQueue
diff --git a/app/workers/chaos/kill_worker.rb b/app/workers/chaos/kill_worker.rb
index 80f04db1be4..3dedd47a1f9 100644
--- a/app/workers/chaos/kill_worker.rb
+++ b/app/workers/chaos/kill_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Chaos
- class KillWorker
+ class KillWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ChaosQueue
diff --git a/app/workers/chaos/leak_mem_worker.rb b/app/workers/chaos/leak_mem_worker.rb
index 0caa99e0de9..b77d1a20541 100644
--- a/app/workers/chaos/leak_mem_worker.rb
+++ b/app/workers/chaos/leak_mem_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Chaos
- class LeakMemWorker
+ class LeakMemWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ChaosQueue
diff --git a/app/workers/chaos/sleep_worker.rb b/app/workers/chaos/sleep_worker.rb
index 7c724c4cb4e..6887258e961 100644
--- a/app/workers/chaos/sleep_worker.rb
+++ b/app/workers/chaos/sleep_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Chaos
- class SleepWorker
+ class SleepWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ChaosQueue
diff --git a/app/workers/chat_notification_worker.rb b/app/workers/chat_notification_worker.rb
index f23c787559f..5fab437f49f 100644
--- a/app/workers/chat_notification_worker.rb
+++ b/app/workers/chat_notification_worker.rb
@@ -1,19 +1,15 @@
# frozen_string_literal: true
-class ChatNotificationWorker
+class ChatNotificationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
TimeoutExceeded = Class.new(StandardError)
sidekiq_options retry: false
feature_category :chatops
- latency_sensitive_worker!
+ urgency :low # Can't be high as it has external dependencies
weight 2
-
- # TODO: break this into multiple jobs
- # as the `responder` uses external dependencies
- # See https://gitlab.com/gitlab-com/gl-infra/scalability/issues/34
- # worker_has_external_dependencies!
+ worker_has_external_dependencies!
RESCHEDULE_INTERVAL = 2.seconds
RESCHEDULE_TIMEOUT = 5.minutes
diff --git a/app/workers/ci/archive_traces_cron_worker.rb b/app/workers/ci/archive_traces_cron_worker.rb
index c73c7ba2dd8..0171c1d482d 100644
--- a/app/workers/ci/archive_traces_cron_worker.rb
+++ b/app/workers/ci/archive_traces_cron_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Ci
- class ArchiveTracesCronWorker
+ class ArchiveTracesCronWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
diff --git a/app/workers/ci/build_prepare_worker.rb b/app/workers/ci/build_prepare_worker.rb
index 20208c18d03..7f640633070 100644
--- a/app/workers/ci/build_prepare_worker.rb
+++ b/app/workers/ci/build_prepare_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Ci
- class BuildPrepareWorker
+ class BuildPrepareWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
diff --git a/app/workers/ci/build_schedule_worker.rb b/app/workers/ci/build_schedule_worker.rb
index e34f16f46c2..9231b40978d 100644
--- a/app/workers/ci/build_schedule_worker.rb
+++ b/app/workers/ci/build_schedule_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Ci
- class BuildScheduleWorker
+ class BuildScheduleWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
diff --git a/app/workers/ci/build_trace_chunk_flush_worker.rb b/app/workers/ci/build_trace_chunk_flush_worker.rb
index 23a11c28f9b..fe59ba896a4 100644
--- a/app/workers/ci/build_trace_chunk_flush_worker.rb
+++ b/app/workers/ci/build_trace_chunk_flush_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Ci
- class BuildTraceChunkFlushWorker
+ class BuildTraceChunkFlushWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineBackgroundQueue
diff --git a/app/workers/ci/create_cross_project_pipeline_worker.rb b/app/workers/ci/create_cross_project_pipeline_worker.rb
index 91e9317713e..713d0092b32 100644
--- a/app/workers/ci/create_cross_project_pipeline_worker.rb
+++ b/app/workers/ci/create_cross_project_pipeline_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Ci
- class CreateCrossProjectPipelineWorker
+ class CreateCrossProjectPipelineWorker # rubocop:disable Scalability/IdempotentWorker
include ::ApplicationWorker
include ::PipelineQueue
diff --git a/app/workers/ci/pipeline_bridge_status_worker.rb b/app/workers/ci/pipeline_bridge_status_worker.rb
index f196573deaa..3f92f4561e0 100644
--- a/app/workers/ci/pipeline_bridge_status_worker.rb
+++ b/app/workers/ci/pipeline_bridge_status_worker.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
module Ci
- class PipelineBridgeStatusWorker
+ class PipelineBridgeStatusWorker # rubocop:disable Scalability/IdempotentWorker
include ::ApplicationWorker
include ::PipelineQueue
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
def perform(pipeline_id)
diff --git a/app/workers/ci/resource_groups/assign_resource_from_resource_group_worker.rb b/app/workers/ci/resource_groups/assign_resource_from_resource_group_worker.rb
index 62233d19516..8063e34a1b8 100644
--- a/app/workers/ci/resource_groups/assign_resource_from_resource_group_worker.rb
+++ b/app/workers/ci/resource_groups/assign_resource_from_resource_group_worker.rb
@@ -2,7 +2,7 @@
module Ci
module ResourceGroups
- class AssignResourceFromResourceGroupWorker
+ class AssignResourceFromResourceGroupWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
diff --git a/app/workers/cleanup_container_repository_worker.rb b/app/workers/cleanup_container_repository_worker.rb
index 83397a1dda2..c3fac453e73 100644
--- a/app/workers/cleanup_container_repository_worker.rb
+++ b/app/workers/cleanup_container_repository_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class CleanupContainerRepositoryWorker
+class CleanupContainerRepositoryWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :container_repository
diff --git a/app/workers/cluster_configure_istio_worker.rb b/app/workers/cluster_configure_istio_worker.rb
index dfdd408f286..ec6bdfbd6b6 100644
--- a/app/workers/cluster_configure_istio_worker.rb
+++ b/app/workers/cluster_configure_istio_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ClusterConfigureIstioWorker
+class ClusterConfigureIstioWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
diff --git a/app/workers/cluster_configure_worker.rb b/app/workers/cluster_configure_worker.rb
index e7a4797e68e..f9364ab7144 100644
--- a/app/workers/cluster_configure_worker.rb
+++ b/app/workers/cluster_configure_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ClusterConfigureWorker
+class ClusterConfigureWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
diff --git a/app/workers/cluster_install_app_worker.rb b/app/workers/cluster_install_app_worker.rb
index 0e075b295dd..002932a0fa5 100644
--- a/app/workers/cluster_install_app_worker.rb
+++ b/app/workers/cluster_install_app_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ClusterInstallAppWorker
+class ClusterInstallAppWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
diff --git a/app/workers/cluster_patch_app_worker.rb b/app/workers/cluster_patch_app_worker.rb
index 3f95a764567..f75004aa3e5 100644
--- a/app/workers/cluster_patch_app_worker.rb
+++ b/app/workers/cluster_patch_app_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ClusterPatchAppWorker
+class ClusterPatchAppWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
diff --git a/app/workers/cluster_project_configure_worker.rb b/app/workers/cluster_project_configure_worker.rb
index 614029c2b5c..b68df01dc7a 100644
--- a/app/workers/cluster_project_configure_worker.rb
+++ b/app/workers/cluster_project_configure_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ClusterProjectConfigureWorker
+class ClusterProjectConfigureWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
diff --git a/app/workers/cluster_provision_worker.rb b/app/workers/cluster_provision_worker.rb
index c34284319dd..cb750f3021e 100644
--- a/app/workers/cluster_provision_worker.rb
+++ b/app/workers/cluster_provision_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ClusterProvisionWorker
+class ClusterProvisionWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
diff --git a/app/workers/cluster_upgrade_app_worker.rb b/app/workers/cluster_upgrade_app_worker.rb
index cd06f0a2224..99f48415f08 100644
--- a/app/workers/cluster_upgrade_app_worker.rb
+++ b/app/workers/cluster_upgrade_app_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ClusterUpgradeAppWorker
+class ClusterUpgradeAppWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
diff --git a/app/workers/cluster_wait_for_app_installation_worker.rb b/app/workers/cluster_wait_for_app_installation_worker.rb
index 7155dc6f835..e098c3b86b5 100644
--- a/app/workers/cluster_wait_for_app_installation_worker.rb
+++ b/app/workers/cluster_wait_for_app_installation_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ClusterWaitForAppInstallationWorker
+class ClusterWaitForAppInstallationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
diff --git a/app/workers/cluster_wait_for_ingress_ip_address_worker.rb b/app/workers/cluster_wait_for_ingress_ip_address_worker.rb
index 14b1651cc72..c7336ee515d 100644
--- a/app/workers/cluster_wait_for_ingress_ip_address_worker.rb
+++ b/app/workers/cluster_wait_for_ingress_ip_address_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ClusterWaitForIngressIpAddressWorker
+class ClusterWaitForIngressIpAddressWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
diff --git a/app/workers/clusters/applications/activate_service_worker.rb b/app/workers/clusters/applications/activate_service_worker.rb
index 4f285d55162..abd7f8eaea4 100644
--- a/app/workers/clusters/applications/activate_service_worker.rb
+++ b/app/workers/clusters/applications/activate_service_worker.rb
@@ -2,7 +2,7 @@
module Clusters
module Applications
- class ActivateServiceWorker
+ class ActivateServiceWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
diff --git a/app/workers/clusters/applications/deactivate_service_worker.rb b/app/workers/clusters/applications/deactivate_service_worker.rb
index 2c560cc998c..fecbb6dde45 100644
--- a/app/workers/clusters/applications/deactivate_service_worker.rb
+++ b/app/workers/clusters/applications/deactivate_service_worker.rb
@@ -2,7 +2,7 @@
module Clusters
module Applications
- class DeactivateServiceWorker
+ class DeactivateServiceWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
diff --git a/app/workers/clusters/applications/uninstall_worker.rb b/app/workers/clusters/applications/uninstall_worker.rb
index 6180998c8d9..977a25e8442 100644
--- a/app/workers/clusters/applications/uninstall_worker.rb
+++ b/app/workers/clusters/applications/uninstall_worker.rb
@@ -2,7 +2,7 @@
module Clusters
module Applications
- class UninstallWorker
+ class UninstallWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
diff --git a/app/workers/clusters/applications/wait_for_uninstall_app_worker.rb b/app/workers/clusters/applications/wait_for_uninstall_app_worker.rb
index 7907aa8dfff..a486cfa90b7 100644
--- a/app/workers/clusters/applications/wait_for_uninstall_app_worker.rb
+++ b/app/workers/clusters/applications/wait_for_uninstall_app_worker.rb
@@ -2,7 +2,7 @@
module Clusters
module Applications
- class WaitForUninstallAppWorker
+ class WaitForUninstallAppWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
diff --git a/app/workers/clusters/cleanup/app_worker.rb b/app/workers/clusters/cleanup/app_worker.rb
index 8b2fddd3164..1d01cec174b 100644
--- a/app/workers/clusters/cleanup/app_worker.rb
+++ b/app/workers/clusters/cleanup/app_worker.rb
@@ -2,7 +2,7 @@
module Clusters
module Cleanup
- class AppWorker
+ class AppWorker # rubocop:disable Scalability/IdempotentWorker
include ClusterCleanupMethods
def perform(cluster_id, execution_count = 0)
diff --git a/app/workers/clusters/cleanup/project_namespace_worker.rb b/app/workers/clusters/cleanup/project_namespace_worker.rb
index 8a7fbf0fde7..a7a951f2937 100644
--- a/app/workers/clusters/cleanup/project_namespace_worker.rb
+++ b/app/workers/clusters/cleanup/project_namespace_worker.rb
@@ -2,7 +2,7 @@
module Clusters
module Cleanup
- class ProjectNamespaceWorker
+ class ProjectNamespaceWorker # rubocop:disable Scalability/IdempotentWorker
include ClusterCleanupMethods
def perform(cluster_id, execution_count = 0)
diff --git a/app/workers/clusters/cleanup/service_account_worker.rb b/app/workers/clusters/cleanup/service_account_worker.rb
index 95de56d8ebe..a829d68fb20 100644
--- a/app/workers/clusters/cleanup/service_account_worker.rb
+++ b/app/workers/clusters/cleanup/service_account_worker.rb
@@ -2,7 +2,7 @@
module Clusters
module Cleanup
- class ServiceAccountWorker
+ class ServiceAccountWorker # rubocop:disable Scalability/IdempotentWorker
include ClusterCleanupMethods
def perform(cluster_id)
diff --git a/app/workers/concerns/application_worker.rb b/app/workers/concerns/application_worker.rb
index 733156ab758..c0062780688 100644
--- a/app/workers/concerns/application_worker.rb
+++ b/app/workers/concerns/application_worker.rb
@@ -13,6 +13,17 @@ module ApplicationWorker
included do
set_queue
+
+ def structured_payload(payload = {})
+ context = Labkit::Context.current.to_h.merge(
+ 'class' => self.class,
+ 'job_status' => 'running',
+ 'queue' => self.class.queue,
+ 'jid' => jid
+ )
+
+ payload.stringify_keys.merge(context)
+ end
end
class_methods do
diff --git a/app/workers/concerns/gitlab/github_import/notify_upon_death.rb b/app/workers/concerns/gitlab/github_import/notify_upon_death.rb
deleted file mode 100644
index 3d7120665b6..00000000000
--- a/app/workers/concerns/gitlab/github_import/notify_upon_death.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module GithubImport
- # NotifyUponDeath can be included into a GitHub worker class if it should
- # notify any JobWaiter instances upon being moved to the Sidekiq dead queue.
- #
- # Note that this will only notify the waiter upon graceful termination, a
- # SIGKILL will still result in the waiter _not_ being notified.
- #
- # Workers including this module must have jobs passed where the last
- # argument is the key to notify, as a String.
- module NotifyUponDeath
- extend ActiveSupport::Concern
-
- included do
- # If a job is being exhausted we still want to notify the
- # AdvanceStageWorker. This prevents the entire import from getting stuck
- # just because 1 job threw too many errors.
- sidekiq_retries_exhausted do |job|
- args = job['args']
- jid = job['jid']
-
- if args.length == 3 && (key = args.last) && key.is_a?(String)
- JobWaiter.notify(key, jid)
- end
- end
- end
- end
- end
-end
diff --git a/app/workers/concerns/gitlab/github_import/object_importer.rb b/app/workers/concerns/gitlab/github_import/object_importer.rb
index bd0b566658e..63c1ba8e699 100644
--- a/app/workers/concerns/gitlab/github_import/object_importer.rb
+++ b/app/workers/concerns/gitlab/github_import/object_importer.rb
@@ -11,7 +11,7 @@ module Gitlab
include ApplicationWorker
include GithubImport::Queue
include ReschedulingMethods
- include NotifyUponDeath
+ include Gitlab::NotifyUponDeath
feature_category :importers
worker_has_external_dependencies!
diff --git a/app/workers/concerns/gitlab/notify_upon_death.rb b/app/workers/concerns/gitlab/notify_upon_death.rb
new file mode 100644
index 00000000000..66dc6270637
--- /dev/null
+++ b/app/workers/concerns/gitlab/notify_upon_death.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module Gitlab
+ # NotifyUponDeath can be included into a worker class if it should
+ # notify any JobWaiter instances upon being moved to the Sidekiq dead queue.
+ #
+ # Note that this will only notify the waiter upon graceful termination, a
+ # SIGKILL will still result in the waiter _not_ being notified.
+ #
+ # Workers including this module must have jobs passed where the last
+ # argument is the key to notify, as a String.
+ module NotifyUponDeath
+ extend ActiveSupport::Concern
+
+ included do
+ # If a job is being exhausted we still want to notify the
+ # Gitlab::Import::AdvanceStageWorker. This prevents the entire import from getting stuck
+ # just because 1 job threw too many errors.
+ sidekiq_retries_exhausted do |job|
+ args = job['args']
+ jid = job['jid']
+
+ if args.length == 3 && (key = args.last) && key.is_a?(String)
+ JobWaiter.notify(key, jid)
+ end
+ end
+ end
+ end
+end
diff --git a/app/workers/concerns/project_export_options.rb b/app/workers/concerns/project_export_options.rb
new file mode 100644
index 00000000000..e9318c1ba43
--- /dev/null
+++ b/app/workers/concerns/project_export_options.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module ProjectExportOptions
+ extend ActiveSupport::Concern
+
+ EXPORT_RETRY_COUNT = 3
+
+ included do
+ sidekiq_options retry: EXPORT_RETRY_COUNT, status_expiration: StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION
+
+ # We mark the project export as failed once we have exhausted all retries
+ sidekiq_retries_exhausted do |job|
+ project = Project.find(job['args'][1])
+ # rubocop: disable CodeReuse/ActiveRecord
+ job = project.export_jobs.find_by(jid: job["jid"])
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ if job&.fail_op
+ Sidekiq.logger.info "Job #{job['jid']} for project #{project.id} has been set to failed state"
+ else
+ Sidekiq.logger.error "Failed to set Job #{job['jid']} for project #{project.id} to failed state"
+ end
+ end
+ end
+end
diff --git a/app/workers/concerns/waitable_worker.rb b/app/workers/concerns/waitable_worker.rb
index 17946bbc5ca..f995aced542 100644
--- a/app/workers/concerns/waitable_worker.rb
+++ b/app/workers/concerns/waitable_worker.rb
@@ -9,7 +9,7 @@ module WaitableWorker
# Short-circuit: it's more efficient to do small numbers of jobs inline
return bulk_perform_inline(args_list) if args_list.size <= 3
- waiter = Gitlab::JobWaiter.new(args_list.size)
+ waiter = Gitlab::JobWaiter.new(args_list.size, worker_label: self.to_s)
# Point all the bulk jobs at the same JobWaiter. Converts, [[1], [2], [3]]
# into [[1, "key"], [2, "key"], [3, "key"]]
diff --git a/app/workers/concerns/worker_attributes.rb b/app/workers/concerns/worker_attributes.rb
index babdb46bb85..b60179531af 100644
--- a/app/workers/concerns/worker_attributes.rb
+++ b/app/workers/concerns/worker_attributes.rb
@@ -4,9 +4,12 @@ module WorkerAttributes
extend ActiveSupport::Concern
# Resource boundaries that workers can declare through the
- # `worker_resource_boundary` attribute
+ # `resource_boundary` attribute
VALID_RESOURCE_BOUNDARIES = [:memory, :cpu, :unknown].freeze
+ # Urgencies that workers can declare through the `urgencies` attribute
+ VALID_URGENCIES = [:high, :low, :throttled].freeze
+
NAMESPACE_WEIGHTS = {
auto_devops: 2,
auto_merge: 3,
@@ -47,21 +50,22 @@ module WorkerAttributes
get_worker_attribute(:feature_category) == :not_owned
end
- # This should be set for jobs that need to be run immediately, or, if
- # they are delayed, risk creating inconsistencies in the application
- # that could being perceived by the user as incorrect behavior
- # (ie, a bug)
- # See doc/development/sidekiq_style_guide.md#Latency-Sensitive-Jobs
+ # This should be set to :high for jobs that need to be run
+ # immediately, or, if they are delayed, risk creating
+ # inconsistencies in the application that could being perceived by
+ # the user as incorrect behavior (ie, a bug)
+ #
+ # See
+ # doc/development/sidekiq_style_guide.md#urgency
# for details
- def latency_sensitive_worker!
- worker_attributes[:latency_sensitive] = true
+ def urgency(urgency)
+ raise "Invalid urgency: #{urgency}" unless VALID_URGENCIES.include?(urgency)
+
+ worker_attributes[:urgency] = urgency
end
- # Returns a truthy value if the worker is latency sensitive.
- # See doc/development/sidekiq_style_guide.md#Latency-Sensitive-Jobs
- # for details
- def latency_sensitive_worker?
- worker_attributes[:latency_sensitive]
+ def get_urgency
+ worker_attributes[:urgency] || :low
end
# Set this attribute on a job when it will call to services outside of the
@@ -89,6 +93,14 @@ module WorkerAttributes
worker_attributes[:resource_boundary] || :unknown
end
+ def idempotent!
+ worker_attributes[:idempotent] = true
+ end
+
+ def idempotent?
+ worker_attributes[:idempotent]
+ end
+
def weight(value)
worker_attributes[:weight] = value
end
diff --git a/app/workers/container_expiration_policy_worker.rb b/app/workers/container_expiration_policy_worker.rb
index e07a6546e2d..e1544be5aed 100644
--- a/app/workers/container_expiration_policy_worker.rb
+++ b/app/workers/container_expiration_policy_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ContainerExpirationPolicyWorker
+class ContainerExpirationPolicyWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue
diff --git a/app/workers/create_commit_signature_worker.rb b/app/workers/create_commit_signature_worker.rb
index 027fea3e402..3da21c56eff 100644
--- a/app/workers/create_commit_signature_worker.rb
+++ b/app/workers/create_commit_signature_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class CreateCommitSignatureWorker
+class CreateCommitSignatureWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/create_evidence_worker.rb b/app/workers/create_evidence_worker.rb
index e6fbf59d702..c2faba84cfc 100644
--- a/app/workers/create_evidence_worker.rb
+++ b/app/workers/create_evidence_worker.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
-class CreateEvidenceWorker
+class CreateEvidenceWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
- feature_category :release_governance
+ feature_category :release_evidence
weight 2
def perform(release_id)
diff --git a/app/workers/create_note_diff_file_worker.rb b/app/workers/create_note_diff_file_worker.rb
index ca200bd17b4..8a1709f04e1 100644
--- a/app/workers/create_note_diff_file_worker.rb
+++ b/app/workers/create_note_diff_file_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class CreateNoteDiffFileWorker
+class CreateNoteDiffFileWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/create_pipeline_worker.rb b/app/workers/create_pipeline_worker.rb
index a75cc643038..54698518e4f 100644
--- a/app/workers/create_pipeline_worker.rb
+++ b/app/workers/create_pipeline_worker.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-class CreatePipelineWorker
+class CreatePipelineWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_creation
feature_category :continuous_integration
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
def perform(project_id, user_id, ref, source, params = {})
diff --git a/app/workers/delete_container_repository_worker.rb b/app/workers/delete_container_repository_worker.rb
index e70b4fb0a58..dbfc273a5ce 100644
--- a/app/workers/delete_container_repository_worker.rb
+++ b/app/workers/delete_container_repository_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class DeleteContainerRepositoryWorker
+class DeleteContainerRepositoryWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExclusiveLeaseGuard
diff --git a/app/workers/delete_diff_files_worker.rb b/app/workers/delete_diff_files_worker.rb
index e0c1724f1f7..a6759a9d7c4 100644
--- a/app/workers/delete_diff_files_worker.rb
+++ b/app/workers/delete_diff_files_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class DeleteDiffFilesWorker
+class DeleteDiffFilesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/delete_merged_branches_worker.rb b/app/workers/delete_merged_branches_worker.rb
index f3d86233c1b..ab3d42e5384 100644
--- a/app/workers/delete_merged_branches_worker.rb
+++ b/app/workers/delete_merged_branches_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class DeleteMergedBranchesWorker
+class DeleteMergedBranchesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/delete_stored_files_worker.rb b/app/workers/delete_stored_files_worker.rb
index e1e2f66f573..463f26fdd5a 100644
--- a/app/workers/delete_stored_files_worker.rb
+++ b/app/workers/delete_stored_files_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class DeleteStoredFilesWorker
+class DeleteStoredFilesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category_not_owned!
diff --git a/app/workers/delete_user_worker.rb b/app/workers/delete_user_worker.rb
index 0e49e787d8a..d3b87c133d3 100644
--- a/app/workers/delete_user_worker.rb
+++ b/app/workers/delete_user_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class DeleteUserWorker
+class DeleteUserWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :authentication_and_authorization
diff --git a/app/workers/deployments/finished_worker.rb b/app/workers/deployments/finished_worker.rb
index 6196b032f63..0be420af718 100644
--- a/app/workers/deployments/finished_worker.rb
+++ b/app/workers/deployments/finished_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Deployments
- class FinishedWorker
+ class FinishedWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :deployment
diff --git a/app/workers/deployments/forward_deployment_worker.rb b/app/workers/deployments/forward_deployment_worker.rb
index a25b8ca0478..a6f246dbbbd 100644
--- a/app/workers/deployments/forward_deployment_worker.rb
+++ b/app/workers/deployments/forward_deployment_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Deployments
- class ForwardDeploymentWorker
+ class ForwardDeploymentWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :deployment
diff --git a/app/workers/deployments/success_worker.rb b/app/workers/deployments/success_worker.rb
index 4a29f1aef52..17f790d2f6f 100644
--- a/app/workers/deployments/success_worker.rb
+++ b/app/workers/deployments/success_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Deployments
- class SuccessWorker
+ class SuccessWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :deployment
diff --git a/app/workers/detect_repository_languages_worker.rb b/app/workers/detect_repository_languages_worker.rb
index 954d0f9336b..ef66287a692 100644
--- a/app/workers/detect_repository_languages_worker.rb
+++ b/app/workers/detect_repository_languages_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class DetectRepositoryLanguagesWorker
+class DetectRepositoryLanguagesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExceptionBacktrace
include ExclusiveLeaseGuard
diff --git a/app/workers/email_receiver_worker.rb b/app/workers/email_receiver_worker.rb
index c2b1e642604..fcb88982c0b 100644
--- a/app/workers/email_receiver_worker.rb
+++ b/app/workers/email_receiver_worker.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
-class EmailReceiverWorker
+class EmailReceiverWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :issue_tracking
- latency_sensitive_worker!
+ urgency :high
weight 2
def perform(raw)
diff --git a/app/workers/emails_on_push_worker.rb b/app/workers/emails_on_push_worker.rb
index be66e2b1188..cc114acf7e9 100644
--- a/app/workers/emails_on_push_worker.rb
+++ b/app/workers/emails_on_push_worker.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-class EmailsOnPushWorker
+class EmailsOnPushWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
attr_reader :email, :skip_premailer
feature_category :source_code_management
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
weight 2
diff --git a/app/workers/environments/auto_stop_cron_worker.rb b/app/workers/environments/auto_stop_cron_worker.rb
index fdc9490453c..de5e10a0976 100644
--- a/app/workers/environments/auto_stop_cron_worker.rb
+++ b/app/workers/environments/auto_stop_cron_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Environments
- class AutoStopCronWorker
+ class AutoStopCronWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
diff --git a/app/workers/error_tracking_issue_link_worker.rb b/app/workers/error_tracking_issue_link_worker.rb
index b306ecc154b..9febd5cfecc 100644
--- a/app/workers/error_tracking_issue_link_worker.rb
+++ b/app/workers/error_tracking_issue_link_worker.rb
@@ -5,7 +5,7 @@
# If a link to a different GitLab issue exists, a new link
# will still be created, but will not be visible in Sentry
# until the prior link is deleted.
-class ErrorTrackingIssueLinkWorker
+class ErrorTrackingIssueLinkWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExclusiveLeaseGuard
include Gitlab::Utils::StrongMemoize
@@ -26,8 +26,8 @@ class ErrorTrackingIssueLinkWorker
logger.info("Linking Sentry issue #{sentry_issue_id} to GitLab issue #{issue.id}")
sentry_client.create_issue_link(integration_id, sentry_issue_id, issue)
- rescue Sentry::Client::Error
- logger.info("Failed to link Sentry issue #{sentry_issue_id} to GitLab issue #{issue.id}")
+ rescue Sentry::Client::Error => e
+ logger.info("Failed to link Sentry issue #{sentry_issue_id} to GitLab issue #{issue.id} with error: #{e.message}")
end
end
diff --git a/app/workers/expire_build_artifacts_worker.rb b/app/workers/expire_build_artifacts_worker.rb
index 07f516a3390..12372961250 100644
--- a/app/workers/expire_build_artifacts_worker.rb
+++ b/app/workers/expire_build_artifacts_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ExpireBuildArtifactsWorker
+class ExpireBuildArtifactsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
diff --git a/app/workers/expire_build_instance_artifacts_worker.rb b/app/workers/expire_build_instance_artifacts_worker.rb
index db5240d5c8e..48fd086f88f 100644
--- a/app/workers/expire_build_instance_artifacts_worker.rb
+++ b/app/workers/expire_build_instance_artifacts_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ExpireBuildInstanceArtifactsWorker
+class ExpireBuildInstanceArtifactsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :continuous_integration
diff --git a/app/workers/expire_job_cache_worker.rb b/app/workers/expire_job_cache_worker.rb
index 0363429587e..ce27fed7fb1 100644
--- a/app/workers/expire_job_cache_worker.rb
+++ b/app/workers/expire_job_cache_worker.rb
@@ -5,7 +5,8 @@ class ExpireJobCacheWorker
include PipelineQueue
queue_namespace :pipeline_cache
- latency_sensitive_worker!
+ urgency :high
+ idempotent!
# rubocop: disable CodeReuse/ActiveRecord
def perform(job_id)
diff --git a/app/workers/expire_pipeline_cache_worker.rb b/app/workers/expire_pipeline_cache_worker.rb
index 1d204e0a19e..1d2708cdb44 100644
--- a/app/workers/expire_pipeline_cache_worker.rb
+++ b/app/workers/expire_pipeline_cache_worker.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
-class ExpirePipelineCacheWorker
+class ExpirePipelineCacheWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_cache
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/workers/file_hook_worker.rb b/app/workers/file_hook_worker.rb
index 24fc2d75d24..f8cdea54a17 100644
--- a/app/workers/file_hook_worker.rb
+++ b/app/workers/file_hook_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class FileHookWorker
+class FileHookWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
sidekiq_options retry: false
diff --git a/app/workers/git_garbage_collect_worker.rb b/app/workers/git_garbage_collect_worker.rb
index ad119917774..37ca3af517f 100644
--- a/app/workers/git_garbage_collect_worker.rb
+++ b/app/workers/git_garbage_collect_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class GitGarbageCollectWorker
+class GitGarbageCollectWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
sidekiq_options retry: false
diff --git a/app/workers/gitlab/github_import/advance_stage_worker.rb b/app/workers/gitlab/github_import/advance_stage_worker.rb
index 44e69e48694..8fbf88a1762 100644
--- a/app/workers/gitlab/github_import/advance_stage_worker.rb
+++ b/app/workers/gitlab/github_import/advance_stage_worker.rb
@@ -6,17 +6,14 @@ module Gitlab
# number of jobs to complete, without blocking a thread. Once all jobs have
# been completed this worker will advance the import process to the next
# stage.
- class AdvanceStageWorker
+ class AdvanceStageWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
+ include ::Gitlab::Import::AdvanceStage
sidekiq_options dead: false
feature_category :importers
- INTERVAL = 30.seconds.to_i
-
- # The number of seconds to wait (while blocking the thread) before
- # continuing to the next waiter.
- BLOCKING_WAIT_TIME = 5
+ private
# The known importer stages and their corresponding Sidekiq workers.
STAGES = {
@@ -26,49 +23,9 @@ module Gitlab
finish: Stage::FinishImportWorker
}.freeze
- # project_id - The ID of the project being imported.
- # waiters - A Hash mapping Gitlab::JobWaiter keys to the number of
- # remaining jobs.
- # next_stage - The name of the next stage to start when all jobs have been
- # completed.
- def perform(project_id, waiters, next_stage)
- return unless import_state = find_import_state(project_id)
-
- new_waiters = wait_for_jobs(waiters)
-
- if new_waiters.empty?
- # We refresh the import JID here so workers importing individual
- # resources (e.g. notes) don't have to do this all the time, reducing
- # the pressure on Redis. We _only_ do this once all jobs are done so
- # we don't get stuck forever if one or more jobs failed to notify the
- # JobWaiter.
- import_state.refresh_jid_expiration
-
- STAGES.fetch(next_stage.to_sym).perform_async(project_id)
- else
- self.class.perform_in(INTERVAL, project_id, new_waiters, next_stage)
- end
- end
-
- def wait_for_jobs(waiters)
- waiters.each_with_object({}) do |(key, remaining), new_waiters|
- waiter = JobWaiter.new(remaining, key)
-
- # We wait for a brief moment of time so we don't reschedule if we can
- # complete the work fast enough.
- waiter.wait(BLOCKING_WAIT_TIME)
-
- next unless waiter.jobs_remaining.positive?
-
- new_waiters[waiter.key] = waiter.jobs_remaining
- end
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def find_import_state(project_id)
- ProjectImportState.select(:jid).with_status(:started).find_by(project_id: project_id)
+ def next_stage_worker(next_stage)
+ STAGES.fetch(next_stage.to_sym)
end
- # rubocop: enable CodeReuse/ActiveRecord
end
end
end
diff --git a/app/workers/gitlab/github_import/import_diff_note_worker.rb b/app/workers/gitlab/github_import/import_diff_note_worker.rb
index ef2a74c51c5..25fb0375692 100644
--- a/app/workers/gitlab/github_import/import_diff_note_worker.rb
+++ b/app/workers/gitlab/github_import/import_diff_note_worker.rb
@@ -2,7 +2,7 @@
module Gitlab
module GithubImport
- class ImportDiffNoteWorker
+ class ImportDiffNoteWorker # rubocop:disable Scalability/IdempotentWorker
include ObjectImporter
def representation_class
diff --git a/app/workers/gitlab/github_import/import_issue_worker.rb b/app/workers/gitlab/github_import/import_issue_worker.rb
index 1b081ae5966..d9c496e3eb3 100644
--- a/app/workers/gitlab/github_import/import_issue_worker.rb
+++ b/app/workers/gitlab/github_import/import_issue_worker.rb
@@ -2,7 +2,7 @@
module Gitlab
module GithubImport
- class ImportIssueWorker
+ class ImportIssueWorker # rubocop:disable Scalability/IdempotentWorker
include ObjectImporter
def representation_class
diff --git a/app/workers/gitlab/github_import/import_lfs_object_worker.rb b/app/workers/gitlab/github_import/import_lfs_object_worker.rb
index 520c5cb091a..78f78fdb160 100644
--- a/app/workers/gitlab/github_import/import_lfs_object_worker.rb
+++ b/app/workers/gitlab/github_import/import_lfs_object_worker.rb
@@ -2,7 +2,7 @@
module Gitlab
module GithubImport
- class ImportLfsObjectWorker
+ class ImportLfsObjectWorker # rubocop:disable Scalability/IdempotentWorker
include ObjectImporter
def representation_class
diff --git a/app/workers/gitlab/github_import/import_note_worker.rb b/app/workers/gitlab/github_import/import_note_worker.rb
index d2b4c36a5b9..d0f97a15afd 100644
--- a/app/workers/gitlab/github_import/import_note_worker.rb
+++ b/app/workers/gitlab/github_import/import_note_worker.rb
@@ -2,7 +2,7 @@
module Gitlab
module GithubImport
- class ImportNoteWorker
+ class ImportNoteWorker # rubocop:disable Scalability/IdempotentWorker
include ObjectImporter
def representation_class
diff --git a/app/workers/gitlab/github_import/import_pull_request_worker.rb b/app/workers/gitlab/github_import/import_pull_request_worker.rb
index 62a6da152a3..ec806ad170b 100644
--- a/app/workers/gitlab/github_import/import_pull_request_worker.rb
+++ b/app/workers/gitlab/github_import/import_pull_request_worker.rb
@@ -2,7 +2,7 @@
module Gitlab
module GithubImport
- class ImportPullRequestWorker
+ class ImportPullRequestWorker # rubocop:disable Scalability/IdempotentWorker
include ObjectImporter
def representation_class
diff --git a/app/workers/gitlab/github_import/refresh_import_jid_worker.rb b/app/workers/gitlab/github_import/refresh_import_jid_worker.rb
index 76723e4a61f..0ddd893d0d1 100644
--- a/app/workers/gitlab/github_import/refresh_import_jid_worker.rb
+++ b/app/workers/gitlab/github_import/refresh_import_jid_worker.rb
@@ -2,7 +2,7 @@
module Gitlab
module GithubImport
- class RefreshImportJidWorker
+ class RefreshImportJidWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include GithubImport::Queue
diff --git a/app/workers/gitlab/github_import/stage/finish_import_worker.rb b/app/workers/gitlab/github_import/stage/finish_import_worker.rb
index ee64f62637e..73699a74a4a 100644
--- a/app/workers/gitlab/github_import/stage/finish_import_worker.rb
+++ b/app/workers/gitlab/github_import/stage/finish_import_worker.rb
@@ -3,7 +3,7 @@
module Gitlab
module GithubImport
module Stage
- class FinishImportWorker
+ class FinishImportWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
diff --git a/app/workers/gitlab/github_import/stage/import_base_data_worker.rb b/app/workers/gitlab/github_import/stage/import_base_data_worker.rb
index ccfed2ae187..11c2a2ac9b4 100644
--- a/app/workers/gitlab/github_import/stage/import_base_data_worker.rb
+++ b/app/workers/gitlab/github_import/stage/import_base_data_worker.rb
@@ -3,7 +3,7 @@
module Gitlab
module GithubImport
module Stage
- class ImportBaseDataWorker
+ class ImportBaseDataWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
diff --git a/app/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker.rb b/app/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker.rb
index 7007754ff2e..68b6e159fa4 100644
--- a/app/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker.rb
+++ b/app/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker.rb
@@ -3,7 +3,7 @@
module Gitlab
module GithubImport
module Stage
- class ImportIssuesAndDiffNotesWorker
+ class ImportIssuesAndDiffNotesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
diff --git a/app/workers/gitlab/github_import/stage/import_lfs_objects_worker.rb b/app/workers/gitlab/github_import/stage/import_lfs_objects_worker.rb
index 29257603a9d..a19df399969 100644
--- a/app/workers/gitlab/github_import/stage/import_lfs_objects_worker.rb
+++ b/app/workers/gitlab/github_import/stage/import_lfs_objects_worker.rb
@@ -3,7 +3,7 @@
module Gitlab
module GithubImport
module Stage
- class ImportLfsObjectsWorker
+ class ImportLfsObjectsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
diff --git a/app/workers/gitlab/github_import/stage/import_notes_worker.rb b/app/workers/gitlab/github_import/stage/import_notes_worker.rb
index ccf0013180d..49b9821cd45 100644
--- a/app/workers/gitlab/github_import/stage/import_notes_worker.rb
+++ b/app/workers/gitlab/github_import/stage/import_notes_worker.rb
@@ -3,7 +3,7 @@
module Gitlab
module GithubImport
module Stage
- class ImportNotesWorker
+ class ImportNotesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
diff --git a/app/workers/gitlab/github_import/stage/import_pull_requests_worker.rb b/app/workers/gitlab/github_import/stage/import_pull_requests_worker.rb
index 37a7a7f4ba0..3299db5653b 100644
--- a/app/workers/gitlab/github_import/stage/import_pull_requests_worker.rb
+++ b/app/workers/gitlab/github_import/stage/import_pull_requests_worker.rb
@@ -3,7 +3,7 @@
module Gitlab
module GithubImport
module Stage
- class ImportPullRequestsWorker
+ class ImportPullRequestsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
diff --git a/app/workers/gitlab/github_import/stage/import_repository_worker.rb b/app/workers/gitlab/github_import/stage/import_repository_worker.rb
index b5e30470070..cb9ef1cd198 100644
--- a/app/workers/gitlab/github_import/stage/import_repository_worker.rb
+++ b/app/workers/gitlab/github_import/stage/import_repository_worker.rb
@@ -3,7 +3,7 @@
module Gitlab
module GithubImport
module Stage
- class ImportRepositoryWorker
+ class ImportRepositoryWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
diff --git a/app/workers/gitlab/import/advance_stage.rb b/app/workers/gitlab/import/advance_stage.rb
new file mode 100644
index 00000000000..5c836413ae3
--- /dev/null
+++ b/app/workers/gitlab/import/advance_stage.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Import
+ module AdvanceStage
+ INTERVAL = 30.seconds.to_i
+
+ # The number of seconds to wait (while blocking the thread) before
+ # continuing to the next waiter.
+ BLOCKING_WAIT_TIME = 5
+
+ # project_id - The ID of the project being imported.
+ # waiters - A Hash mapping Gitlab::JobWaiter keys to the number of
+ # remaining jobs.
+ # next_stage - The name of the next stage to start when all jobs have been
+ # completed.
+ def perform(project_id, waiters, next_stage)
+ return unless import_state = find_import_state(project_id)
+
+ new_waiters = wait_for_jobs(waiters)
+
+ if new_waiters.empty?
+ # We refresh the import JID here so workers importing individual
+ # resources (e.g. notes) don't have to do this all the time, reducing
+ # the pressure on Redis. We _only_ do this once all jobs are done so
+ # we don't get stuck forever if one or more jobs failed to notify the
+ # JobWaiter.
+ import_state.refresh_jid_expiration
+
+ next_stage_worker(next_stage).perform_async(project_id)
+ else
+ self.class.perform_in(INTERVAL, project_id, new_waiters, next_stage)
+ end
+ end
+
+ def wait_for_jobs(waiters)
+ waiters.each_with_object({}) do |(key, remaining), new_waiters|
+ waiter = JobWaiter.new(remaining, key)
+
+ # We wait for a brief moment of time so we don't reschedule if we can
+ # complete the work fast enough.
+ waiter.wait(BLOCKING_WAIT_TIME)
+
+ next unless waiter.jobs_remaining.positive?
+
+ new_waiters[waiter.key] = waiter.jobs_remaining
+ end
+ end
+
+ def find_import_state(project_id)
+ ProjectImportState.jid_by(project_id: project_id, status: :started)
+ end
+
+ private
+
+ def next_stage_worker(next_stage)
+ raise NotImplementedError
+ end
+ end
+ end
+end
diff --git a/app/workers/gitlab/phabricator_import/base_worker.rb b/app/workers/gitlab/phabricator_import/base_worker.rb
index faae71d4627..82ef9e825f9 100644
--- a/app/workers/gitlab/phabricator_import/base_worker.rb
+++ b/app/workers/gitlab/phabricator_import/base_worker.rb
@@ -18,7 +18,7 @@
# - It marks the import as finished when all remaining jobs are done
module Gitlab
module PhabricatorImport
- class BaseWorker
+ class BaseWorker # rubocop:disable Scalability/IdempotentWorker
include WorkerAttributes
include Gitlab::ExclusiveLeaseHelpers
diff --git a/app/workers/gitlab/phabricator_import/import_tasks_worker.rb b/app/workers/gitlab/phabricator_import/import_tasks_worker.rb
index b5d9e80797b..1b1d7b35dd5 100644
--- a/app/workers/gitlab/phabricator_import/import_tasks_worker.rb
+++ b/app/workers/gitlab/phabricator_import/import_tasks_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Gitlab
module PhabricatorImport
- class ImportTasksWorker < BaseWorker
+ class ImportTasksWorker < BaseWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ProjectImportOptions # This marks the project as failed after too many tries
diff --git a/app/workers/gitlab_shell_worker.rb b/app/workers/gitlab_shell_worker.rb
index bd2225e6d7c..0db794793d4 100644
--- a/app/workers/gitlab_shell_worker.rb
+++ b/app/workers/gitlab_shell_worker.rb
@@ -1,14 +1,24 @@
# frozen_string_literal: true
-class GitlabShellWorker
+class GitlabShellWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include Gitlab::ShellAdapter
feature_category :source_code_management
- latency_sensitive_worker!
+ urgency :high
weight 2
def perform(action, *arg)
+ # Gitlab::Shell is being removed but we need to continue to process jobs
+ # enqueued in the previous release, so handle them here.
+ #
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/25095 for more details
+ if AuthorizedKeysWorker::PERMITTED_ACTIONS.include?(action)
+ AuthorizedKeysWorker.new.perform(action, *arg)
+
+ return
+ end
+
Gitlab::GitalyClient::NamespaceService.allow do
gitlab_shell.__send__(action, *arg) # rubocop:disable GitlabSecurity/PublicSend
end
diff --git a/app/workers/gitlab_usage_ping_worker.rb b/app/workers/gitlab_usage_ping_worker.rb
index bf0dc0fdd59..9f0cf1728dd 100644
--- a/app/workers/gitlab_usage_ping_worker.rb
+++ b/app/workers/gitlab_usage_ping_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class GitlabUsagePingWorker
+class GitlabUsagePingWorker # rubocop:disable Scalability/IdempotentWorker
LEASE_TIMEOUT = 86400
include ApplicationWorker
@@ -9,7 +9,7 @@ class GitlabUsagePingWorker
include CronjobQueue
# rubocop:enable Scalability/CronWorkerContext
- feature_category_not_owned!
+ feature_category :collection
# Retry for up to approximately three hours then give up.
sidekiq_options retry: 10, dead: false
diff --git a/app/workers/group_destroy_worker.rb b/app/workers/group_destroy_worker.rb
index fc751f8b612..d80a2dad7d9 100644
--- a/app/workers/group_destroy_worker.rb
+++ b/app/workers/group_destroy_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class GroupDestroyWorker
+class GroupDestroyWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExceptionBacktrace
diff --git a/app/workers/group_export_worker.rb b/app/workers/group_export_worker.rb
index a2d34e8c8bf..3e0390429d6 100644
--- a/app/workers/group_export_worker.rb
+++ b/app/workers/group_export_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class GroupExportWorker
+class GroupExportWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExceptionBacktrace
diff --git a/app/workers/group_import_worker.rb b/app/workers/group_import_worker.rb
index f283eab5814..b6fc5afc28c 100644
--- a/app/workers/group_import_worker.rb
+++ b/app/workers/group_import_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class GroupImportWorker
+class GroupImportWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExceptionBacktrace
diff --git a/app/workers/hashed_storage/base_worker.rb b/app/workers/hashed_storage/base_worker.rb
index 1ab2108f6bb..372440996d9 100644
--- a/app/workers/hashed_storage/base_worker.rb
+++ b/app/workers/hashed_storage/base_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module HashedStorage
- class BaseWorker
+ class BaseWorker # rubocop:disable Scalability/IdempotentWorker
include ExclusiveLeaseGuard
include WorkerAttributes
diff --git a/app/workers/hashed_storage/migrator_worker.rb b/app/workers/hashed_storage/migrator_worker.rb
index 72a3faec5f4..5cbdfcb0602 100644
--- a/app/workers/hashed_storage/migrator_worker.rb
+++ b/app/workers/hashed_storage/migrator_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module HashedStorage
- class MigratorWorker
+ class MigratorWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :hashed_storage
diff --git a/app/workers/hashed_storage/project_migrate_worker.rb b/app/workers/hashed_storage/project_migrate_worker.rb
index 0174467923d..3ce60ce7eb6 100644
--- a/app/workers/hashed_storage/project_migrate_worker.rb
+++ b/app/workers/hashed_storage/project_migrate_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module HashedStorage
- class ProjectMigrateWorker < BaseWorker
+ class ProjectMigrateWorker < BaseWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :hashed_storage
diff --git a/app/workers/hashed_storage/project_rollback_worker.rb b/app/workers/hashed_storage/project_rollback_worker.rb
index 55e1d7ab23e..17b3cca83e1 100644
--- a/app/workers/hashed_storage/project_rollback_worker.rb
+++ b/app/workers/hashed_storage/project_rollback_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module HashedStorage
- class ProjectRollbackWorker < BaseWorker
+ class ProjectRollbackWorker < BaseWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :hashed_storage
diff --git a/app/workers/hashed_storage/rollbacker_worker.rb b/app/workers/hashed_storage/rollbacker_worker.rb
index 8babdcfb96d..a220d3b2226 100644
--- a/app/workers/hashed_storage/rollbacker_worker.rb
+++ b/app/workers/hashed_storage/rollbacker_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module HashedStorage
- class RollbackerWorker
+ class RollbackerWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :hashed_storage
diff --git a/app/workers/import_export_project_cleanup_worker.rb b/app/workers/import_export_project_cleanup_worker.rb
index ae236fa1fcd..dd345434d08 100644
--- a/app/workers/import_export_project_cleanup_worker.rb
+++ b/app/workers/import_export_project_cleanup_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ImportExportProjectCleanupWorker
+class ImportExportProjectCleanupWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
diff --git a/app/workers/import_issues_csv_worker.rb b/app/workers/import_issues_csv_worker.rb
index 7c5584146ca..c7b5f8cd0a7 100644
--- a/app/workers/import_issues_csv_worker.rb
+++ b/app/workers/import_issues_csv_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ImportIssuesCsvWorker
+class ImportIssuesCsvWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :issue_tracking
diff --git a/app/workers/incident_management/process_alert_worker.rb b/app/workers/incident_management/process_alert_worker.rb
index f3d5bc5c66b..8d4294cc231 100644
--- a/app/workers/incident_management/process_alert_worker.rb
+++ b/app/workers/incident_management/process_alert_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module IncidentManagement
- class ProcessAlertWorker
+ class ProcessAlertWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :incident_management
diff --git a/app/workers/invalid_gpg_signature_update_worker.rb b/app/workers/invalid_gpg_signature_update_worker.rb
index e1c2eefbf0f..1fd959c8763 100644
--- a/app/workers/invalid_gpg_signature_update_worker.rb
+++ b/app/workers/invalid_gpg_signature_update_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class InvalidGpgSignatureUpdateWorker
+class InvalidGpgSignatureUpdateWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/irker_worker.rb b/app/workers/irker_worker.rb
index a133ed6ed1b..91ab0d69ad1 100644
--- a/app/workers/irker_worker.rb
+++ b/app/workers/irker_worker.rb
@@ -3,7 +3,7 @@
require 'json'
require 'socket'
-class IrkerWorker
+class IrkerWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :integrations
diff --git a/app/workers/issue_due_scheduler_worker.rb b/app/workers/issue_due_scheduler_worker.rb
index 59027907284..d735295d046 100644
--- a/app/workers/issue_due_scheduler_worker.rb
+++ b/app/workers/issue_due_scheduler_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class IssueDueSchedulerWorker
+class IssueDueSchedulerWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
diff --git a/app/workers/mail_scheduler/issue_due_worker.rb b/app/workers/mail_scheduler/issue_due_worker.rb
index 6df816de71f..309d3e13477 100644
--- a/app/workers/mail_scheduler/issue_due_worker.rb
+++ b/app/workers/mail_scheduler/issue_due_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module MailScheduler
- class IssueDueWorker
+ class IssueDueWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include MailSchedulerQueue
diff --git a/app/workers/mail_scheduler/notification_service_worker.rb b/app/workers/mail_scheduler/notification_service_worker.rb
index ec659e39b24..691af2a724d 100644
--- a/app/workers/mail_scheduler/notification_service_worker.rb
+++ b/app/workers/mail_scheduler/notification_service_worker.rb
@@ -3,7 +3,7 @@
require 'active_job/arguments'
module MailScheduler
- class NotificationServiceWorker
+ class NotificationServiceWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include MailSchedulerQueue
diff --git a/app/workers/merge_request_mergeability_check_worker.rb b/app/workers/merge_request_mergeability_check_worker.rb
index ed35284b66c..a26c1a886f6 100644
--- a/app/workers/merge_request_mergeability_check_worker.rb
+++ b/app/workers/merge_request_mergeability_check_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class MergeRequestMergeabilityCheckWorker
+class MergeRequestMergeabilityCheckWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/merge_worker.rb b/app/workers/merge_worker.rb
index 48bc205113f..cc5fe884aec 100644
--- a/app/workers/merge_worker.rb
+++ b/app/workers/merge_worker.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
-class MergeWorker
+class MergeWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
- latency_sensitive_worker!
+ urgency :high
weight 5
def perform(merge_request_id, current_user_id, params)
diff --git a/app/workers/migrate_external_diffs_worker.rb b/app/workers/migrate_external_diffs_worker.rb
index d248e2b5500..0a95f40aa8f 100644
--- a/app/workers/migrate_external_diffs_worker.rb
+++ b/app/workers/migrate_external_diffs_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class MigrateExternalDiffsWorker
+class MigrateExternalDiffsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/namespaceless_project_destroy_worker.rb b/app/workers/namespaceless_project_destroy_worker.rb
index 113afc268f2..1c8054d8fbd 100644
--- a/app/workers/namespaceless_project_destroy_worker.rb
+++ b/app/workers/namespaceless_project_destroy_worker.rb
@@ -6,7 +6,7 @@
# used to belong to. Projects in this state should be rare.
# The worker will reject doing anything for projects that *do* have a
# namespace. For those use ProjectDestroyWorker instead.
-class NamespacelessProjectDestroyWorker
+class NamespacelessProjectDestroyWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExceptionBacktrace
diff --git a/app/workers/namespaces/prune_aggregation_schedules_worker.rb b/app/workers/namespaces/prune_aggregation_schedules_worker.rb
index aeb5aa37a10..b94c8b7b4ba 100644
--- a/app/workers/namespaces/prune_aggregation_schedules_worker.rb
+++ b/app/workers/namespaces/prune_aggregation_schedules_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Namespaces
- class PruneAggregationSchedulesWorker
+ class PruneAggregationSchedulesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
diff --git a/app/workers/namespaces/root_statistics_worker.rb b/app/workers/namespaces/root_statistics_worker.rb
index fd772c8cff6..70b2510488b 100644
--- a/app/workers/namespaces/root_statistics_worker.rb
+++ b/app/workers/namespaces/root_statistics_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Namespaces
- class RootStatisticsWorker
+ class RootStatisticsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :update_namespace_statistics
@@ -16,13 +16,7 @@ module Namespaces
namespace.aggregation_schedule.destroy
rescue ::Namespaces::StatisticsRefresherService::RefresherError, ActiveRecord::RecordNotFound => ex
- log_error(namespace.full_path, ex.message) if namespace
- end
-
- private
-
- def log_error(namespace_path, error_message)
- Gitlab::SidekiqLogger.error("Namespace statistics can't be updated for #{namespace_path}: #{error_message}")
+ Gitlab::ErrorTracking.track_exception(ex, namespace_id: namespace_id, namespace: namespace&.full_path)
end
end
end
diff --git a/app/workers/namespaces/schedule_aggregation_worker.rb b/app/workers/namespaces/schedule_aggregation_worker.rb
index 87e135fbf21..94343a9e378 100644
--- a/app/workers/namespaces/schedule_aggregation_worker.rb
+++ b/app/workers/namespaces/schedule_aggregation_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Namespaces
- class ScheduleAggregationWorker
+ class ScheduleAggregationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :update_namespace_statistics
@@ -16,8 +16,8 @@ module Namespaces
return if root_ancestor.aggregation_scheduled?
Namespace::AggregationSchedule.safe_find_or_create_by!(namespace_id: root_ancestor.id)
- rescue ActiveRecord::RecordNotFound
- log_error(namespace_id)
+ rescue ActiveRecord::RecordNotFound => ex
+ Gitlab::ErrorTracking.track_exception(ex, namespace_id: namespace_id)
end
private
@@ -34,9 +34,5 @@ module Namespaces
Namespace::AggregationSchedule.table_exists?
end
-
- def log_error(root_ancestor_id)
- Gitlab::SidekiqLogger.error("Namespace can't be scheduled for aggregation: #{root_ancestor_id} does not exist")
- end
end
end
diff --git a/app/workers/new_issue_worker.rb b/app/workers/new_issue_worker.rb
index d696165b447..e0e28767f8d 100644
--- a/app/workers/new_issue_worker.rb
+++ b/app/workers/new_issue_worker.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
-class NewIssueWorker
+class NewIssueWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include NewIssuable
feature_category :issue_tracking
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
weight 2
diff --git a/app/workers/new_merge_request_worker.rb b/app/workers/new_merge_request_worker.rb
index e31ddae1f13..f672d37a83e 100644
--- a/app/workers/new_merge_request_worker.rb
+++ b/app/workers/new_merge_request_worker.rb
@@ -1,22 +1,20 @@
# frozen_string_literal: true
-class NewMergeRequestWorker
+class NewMergeRequestWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include NewIssuable
feature_category :source_code_management
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
weight 2
def perform(merge_request_id, user_id)
return unless objects_found?(merge_request_id, user_id)
- EventCreateService.new.open_mr(issuable, user)
- NotificationService.new.new_merge_request(issuable, user)
-
- issuable.diffs(include_stats: false).write_cache
- issuable.create_cross_references!(user)
+ MergeRequests::AfterCreateService
+ .new(issuable.target_project, user)
+ .execute(issuable)
end
def issuable_class
diff --git a/app/workers/new_note_worker.rb b/app/workers/new_note_worker.rb
index b446e376007..8ead87a9230 100644
--- a/app/workers/new_note_worker.rb
+++ b/app/workers/new_note_worker.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
-class NewNoteWorker
+class NewNoteWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :issue_tracking
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
weight 2
diff --git a/app/workers/new_release_worker.rb b/app/workers/new_release_worker.rb
index edfdb2d7aff..3c19e5f3d2b 100644
--- a/app/workers/new_release_worker.rb
+++ b/app/workers/new_release_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class NewReleaseWorker
+class NewReleaseWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
queue_namespace :notifications
diff --git a/app/workers/object_pool/create_worker.rb b/app/workers/object_pool/create_worker.rb
index 135b99886dc..cf87ad95077 100644
--- a/app/workers/object_pool/create_worker.rb
+++ b/app/workers/object_pool/create_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module ObjectPool
- class CreateWorker
+ class CreateWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ObjectPoolQueue
include ExclusiveLeaseGuard
diff --git a/app/workers/object_pool/destroy_worker.rb b/app/workers/object_pool/destroy_worker.rb
index ca00d467d9b..d42cee59d03 100644
--- a/app/workers/object_pool/destroy_worker.rb
+++ b/app/workers/object_pool/destroy_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module ObjectPool
- class DestroyWorker
+ class DestroyWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ObjectPoolQueue
diff --git a/app/workers/object_pool/join_worker.rb b/app/workers/object_pool/join_worker.rb
index ddd002eabb8..f1008d3be83 100644
--- a/app/workers/object_pool/join_worker.rb
+++ b/app/workers/object_pool/join_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module ObjectPool
- class JoinWorker
+ class JoinWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ObjectPoolQueue
diff --git a/app/workers/object_pool/schedule_join_worker.rb b/app/workers/object_pool/schedule_join_worker.rb
index 647a8b72435..c00bb2967f2 100644
--- a/app/workers/object_pool/schedule_join_worker.rb
+++ b/app/workers/object_pool/schedule_join_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module ObjectPool
- class ScheduleJoinWorker
+ class ScheduleJoinWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ObjectPoolQueue
diff --git a/app/workers/object_storage/background_move_worker.rb b/app/workers/object_storage/background_move_worker.rb
index 55f8e1c3ede..7b0a7c7ec58 100644
--- a/app/workers/object_storage/background_move_worker.rb
+++ b/app/workers/object_storage/background_move_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module ObjectStorage
- class BackgroundMoveWorker
+ class BackgroundMoveWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ObjectStorageQueue
diff --git a/app/workers/object_storage/migrate_uploads_worker.rb b/app/workers/object_storage/migrate_uploads_worker.rb
index 01e6fdb2d3e..d9d21f2cb7e 100644
--- a/app/workers/object_storage/migrate_uploads_worker.rb
+++ b/app/workers/object_storage/migrate_uploads_worker.rb
@@ -1,5 +1,6 @@
# frozen_string_literal: true
+# rubocop:disable Scalability/IdempotentWorker
module ObjectStorage
class MigrateUploadsWorker
include ApplicationWorker
@@ -137,3 +138,4 @@ module ObjectStorage
end
end
end
+# rubocop:enable Scalability/IdempotentWorker
diff --git a/app/workers/pages_domain_removal_cron_worker.rb b/app/workers/pages_domain_removal_cron_worker.rb
index 1c96dd6ad8c..cb24441d2f7 100644
--- a/app/workers/pages_domain_removal_cron_worker.rb
+++ b/app/workers/pages_domain_removal_cron_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class PagesDomainRemovalCronWorker
+class PagesDomainRemovalCronWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue
diff --git a/app/workers/pages_domain_ssl_renewal_cron_worker.rb b/app/workers/pages_domain_ssl_renewal_cron_worker.rb
index c1201b935d1..fe6d516d3cf 100644
--- a/app/workers/pages_domain_ssl_renewal_cron_worker.rb
+++ b/app/workers/pages_domain_ssl_renewal_cron_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class PagesDomainSslRenewalCronWorker
+class PagesDomainSslRenewalCronWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue
diff --git a/app/workers/pages_domain_ssl_renewal_worker.rb b/app/workers/pages_domain_ssl_renewal_worker.rb
index 4db7d22ef7e..561fd59d471 100644
--- a/app/workers/pages_domain_ssl_renewal_worker.rb
+++ b/app/workers/pages_domain_ssl_renewal_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class PagesDomainSslRenewalWorker
+class PagesDomainSslRenewalWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :pages
diff --git a/app/workers/pages_domain_verification_cron_worker.rb b/app/workers/pages_domain_verification_cron_worker.rb
index b06aa65a8e5..a30f0b981d8 100644
--- a/app/workers/pages_domain_verification_cron_worker.rb
+++ b/app/workers/pages_domain_verification_cron_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class PagesDomainVerificationCronWorker
+class PagesDomainVerificationCronWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue
diff --git a/app/workers/pages_domain_verification_worker.rb b/app/workers/pages_domain_verification_worker.rb
index b0888036498..1b4d9d3994c 100644
--- a/app/workers/pages_domain_verification_worker.rb
+++ b/app/workers/pages_domain_verification_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class PagesDomainVerificationWorker
+class PagesDomainVerificationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :pages
diff --git a/app/workers/pages_worker.rb b/app/workers/pages_worker.rb
index 484d9053849..875f17282f9 100644
--- a/app/workers/pages_worker.rb
+++ b/app/workers/pages_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class PagesWorker
+class PagesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
sidekiq_options retry: 3
diff --git a/app/workers/personal_access_tokens/expiring_worker.rb b/app/workers/personal_access_tokens/expiring_worker.rb
index 84f7ce9d5d7..86240f827fc 100644
--- a/app/workers/personal_access_tokens/expiring_worker.rb
+++ b/app/workers/personal_access_tokens/expiring_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module PersonalAccessTokens
- class ExpiringWorker
+ class ExpiringWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue
diff --git a/app/workers/pipeline_hooks_worker.rb b/app/workers/pipeline_hooks_worker.rb
index 04abc9c88fd..85ecdd02fb5 100644
--- a/app/workers/pipeline_hooks_worker.rb
+++ b/app/workers/pipeline_hooks_worker.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
-class PipelineHooksWorker
+class PipelineHooksWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_hooks
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/workers/pipeline_metrics_worker.rb b/app/workers/pipeline_metrics_worker.rb
index 3830522aaa1..1eb9b4ce089 100644
--- a/app/workers/pipeline_metrics_worker.rb
+++ b/app/workers/pipeline_metrics_worker.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
-class PipelineMetricsWorker
+class PipelineMetricsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
- latency_sensitive_worker!
+ urgency :high
# rubocop: disable CodeReuse/ActiveRecord
def perform(pipeline_id)
diff --git a/app/workers/pipeline_notification_worker.rb b/app/workers/pipeline_notification_worker.rb
index 62ecbc8a047..3336383adf7 100644
--- a/app/workers/pipeline_notification_worker.rb
+++ b/app/workers/pipeline_notification_worker.rb
@@ -1,19 +1,27 @@
# frozen_string_literal: true
-class PipelineNotificationWorker
+class PipelineNotificationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
# rubocop: disable CodeReuse/ActiveRecord
- def perform(pipeline_id, recipients = nil)
- pipeline = Ci::Pipeline.find_by(id: pipeline_id)
+ def perform(pipeline_id, args = {})
+ case args
+ when Hash
+ ref_status = args[:ref_status]
+ recipients = args[:recipients]
+ else # TODO: backward compatible interface, can be removed in 12.10
+ recipients = args
+ ref_status = nil
+ end
+ pipeline = Ci::Pipeline.find_by(id: pipeline_id)
return unless pipeline
- NotificationService.new.pipeline_finished(pipeline, recipients)
+ NotificationService.new.pipeline_finished(pipeline, ref_status: ref_status, recipients: recipients)
end
# rubocop: enable CodeReuse/ActiveRecord
end
diff --git a/app/workers/pipeline_process_worker.rb b/app/workers/pipeline_process_worker.rb
index 200f3619332..66a661dde71 100644
--- a/app/workers/pipeline_process_worker.rb
+++ b/app/workers/pipeline_process_worker.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-class PipelineProcessWorker
+class PipelineProcessWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_processing
feature_category :continuous_integration
- latency_sensitive_worker!
+ urgency :high
# rubocop: disable CodeReuse/ActiveRecord
def perform(pipeline_id, build_ids = nil)
diff --git a/app/workers/pipeline_schedule_worker.rb b/app/workers/pipeline_schedule_worker.rb
index 8b326b9dbb6..d81b978f9b0 100644
--- a/app/workers/pipeline_schedule_worker.rb
+++ b/app/workers/pipeline_schedule_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class PipelineScheduleWorker
+class PipelineScheduleWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue
diff --git a/app/workers/pipeline_success_worker.rb b/app/workers/pipeline_success_worker.rb
index 5c24f00e0c3..d84612c52d1 100644
--- a/app/workers/pipeline_success_worker.rb
+++ b/app/workers/pipeline_success_worker.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
-class PipelineSuccessWorker
+class PipelineSuccessWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_processing
- latency_sensitive_worker!
+ urgency :high
def perform(pipeline_id)
# no-op
diff --git a/app/workers/pipeline_update_ci_ref_status_worker.rb b/app/workers/pipeline_update_ci_ref_status_worker.rb
new file mode 100644
index 00000000000..96e14e126de
--- /dev/null
+++ b/app/workers/pipeline_update_ci_ref_status_worker.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class PipelineUpdateCiRefStatusWorker # rubocop:disable Scalability/IdempotentWorker
+ include ApplicationWorker
+ include PipelineQueue
+
+ urgency :high
+ worker_resource_boundary :cpu
+
+ def perform(pipeline_id)
+ pipeline = Ci::Pipeline.find_by_id(pipeline_id)
+
+ return unless pipeline
+
+ Ci::UpdateCiRefStatusService.new(pipeline).call
+ end
+end
diff --git a/app/workers/pipeline_update_worker.rb b/app/workers/pipeline_update_worker.rb
index 0321ea5a6ce..7f667057af6 100644
--- a/app/workers/pipeline_update_worker.rb
+++ b/app/workers/pipeline_update_worker.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
-class PipelineUpdateWorker
+class PipelineUpdateWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_processing
- latency_sensitive_worker!
+ urgency :high
def perform(pipeline_id)
Ci::Pipeline.find_by_id(pipeline_id)&.update_legacy_status
diff --git a/app/workers/post_receive.rb b/app/workers/post_receive.rb
index d5038f1152b..5178fabb2d8 100644
--- a/app/workers/post_receive.rb
+++ b/app/workers/post_receive.rb
@@ -1,17 +1,17 @@
# frozen_string_literal: true
-class PostReceive
+class PostReceive # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
weight 5
def perform(gl_repository, identifier, changes, push_options = {})
- project, repo_type = Gitlab::GlRepository.parse(gl_repository)
+ container, project, repo_type = Gitlab::GlRepository.parse(gl_repository)
- if project.nil?
+ if project.nil? && (!repo_type.snippet? || container.is_a?(ProjectSnippet))
log("Triggered hook for non-existing project with gl_repository \"#{gl_repository}\"")
return false
end
@@ -20,12 +20,14 @@ class PostReceive
# Use Sidekiq.logger so arguments can be correlated with execution
# time and thread ID's.
Sidekiq.logger.info "changes: #{changes.inspect}" if ENV['SIDEKIQ_LOG_ARGUMENTS']
- post_received = Gitlab::GitPostReceive.new(project, identifier, changes, push_options)
+ post_received = Gitlab::GitPostReceive.new(container, identifier, changes, push_options)
if repo_type.wiki?
- process_wiki_changes(post_received)
+ process_wiki_changes(post_received, container)
elsif repo_type.project?
- process_project_changes(post_received)
+ process_project_changes(post_received, container)
+ elsif repo_type.snippet?
+ process_snippet_changes(post_received, container)
else
# Other repos don't have hooks for now
end
@@ -39,24 +41,50 @@ class PostReceive
end
end
- def process_project_changes(post_received)
+ def process_project_changes(post_received, project)
user = identify_user(post_received)
return false unless user
- project = post_received.project
push_options = post_received.push_options
changes = post_received.changes
# We only need to expire certain caches once per push
- expire_caches(post_received, post_received.project.repository)
- enqueue_repository_cache_update(post_received)
+ expire_caches(post_received, project.repository)
+ enqueue_project_cache_update(post_received, project)
process_ref_changes(project, user, push_options: push_options, changes: changes)
- update_remote_mirrors(post_received)
+ update_remote_mirrors(post_received, project)
after_project_changes_hooks(project, user, changes.refs, changes.repository_data)
end
+ def process_wiki_changes(post_received, project)
+ project.touch(:last_activity_at, :last_repository_updated_at)
+ project.wiki.repository.expire_statistics_caches
+ ProjectCacheWorker.perform_async(project.id, [], [:wiki_size])
+
+ user = identify_user(post_received)
+ return false unless user
+
+ # We only need to expire certain caches once per push
+ expire_caches(post_received, project.wiki.repository)
+
+ ::Git::WikiPushService.new(project, user, changes: post_received.changes).execute
+ end
+
+ def process_snippet_changes(post_received, snippet)
+ user = identify_user(post_received)
+
+ return false unless user
+
+ # At the moment, we only expires the repository caches.
+ # In the future we might need to call ProjectCacheWorker
+ # (or the custom class we create) to update the snippet
+ # repository size or any other key.
+ # We might also need to update the repository statistics.
+ expire_caches(post_received, snippet.repository)
+ end
+
# Expire the repository status, branch, and tag cache once per push.
def expire_caches(post_received, repository)
repository.expire_status_cache if repository.empty?
@@ -65,12 +93,12 @@ class PostReceive
end
# Schedule an update for the repository size and commit count if necessary.
- def enqueue_repository_cache_update(post_received)
+ def enqueue_project_cache_update(post_received, project)
stats_to_invalidate = [:repository_size]
stats_to_invalidate << :commit_count if post_received.includes_default_branch?
ProjectCacheWorker.perform_async(
- post_received.project.id,
+ project.id,
[],
stats_to_invalidate,
true
@@ -83,10 +111,9 @@ class PostReceive
Git::ProcessRefChangesService.new(project, user, params).execute
end
- def update_remote_mirrors(post_received)
+ def update_remote_mirrors(post_received, project)
return unless post_received.includes_branches? || post_received.includes_tags?
- project = post_received.project
return unless project.has_remote_mirror?
project.mark_stuck_remote_mirrors_as_failed!
@@ -99,20 +126,6 @@ class PostReceive
Gitlab::UsageDataCounters::SourceCodeCounter.count(:pushes)
end
- def process_wiki_changes(post_received)
- post_received.project.touch(:last_activity_at, :last_repository_updated_at)
- post_received.project.wiki.repository.expire_statistics_caches
- ProjectCacheWorker.perform_async(post_received.project.id, [], [:wiki_size])
-
- user = identify_user(post_received)
- return false unless user
-
- # We only need to expire certain caches once per push
- expire_caches(post_received, post_received.project.wiki.repository)
-
- ::Git::WikiPushService.new(post_received.project, user, changes: post_received.changes).execute
- end
-
def log(message)
Gitlab::GitLogger.error("POST-RECEIVE: #{message}")
end
diff --git a/app/workers/process_commit_worker.rb b/app/workers/process_commit_worker.rb
index ca2896946c9..9960e812a2f 100644
--- a/app/workers/process_commit_worker.rb
+++ b/app/workers/process_commit_worker.rb
@@ -7,11 +7,11 @@
# result of this the workload of this worker should be kept to a bare minimum.
# Consider using an extra worker if you need to add any extra (and potentially
# slow) processing of commits.
-class ProcessCommitWorker
+class ProcessCommitWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
- latency_sensitive_worker!
+ urgency :high
weight 3
# project_id - The ID of the project this commit belongs to.
@@ -19,13 +19,12 @@ class ProcessCommitWorker
# commit_hash - Hash containing commit details to use for constructing a
# Commit object without having to use the Git repository.
# default - The data was pushed to the default branch.
- # rubocop: disable CodeReuse/ActiveRecord
def perform(project_id, user_id, commit_hash, default = false)
- project = Project.find_by(id: project_id)
+ project = Project.id_in(project_id).first
return unless project
- user = User.find_by(id: user_id)
+ user = User.id_in(user_id).first
return unless user
@@ -35,12 +34,11 @@ class ProcessCommitWorker
process_commit_message(project, commit, user, author, default)
update_issue_metrics(commit, author)
end
- # rubocop: enable CodeReuse/ActiveRecord
def process_commit_message(project, commit, user, author, default = false)
# Ignore closing references from GitLab-generated commit messages.
find_closing_issues = default && !commit.merged_merge_request?(user)
- closed_issues = find_closing_issues ? commit.closes_issues(user) : []
+ closed_issues = find_closing_issues ? issues_to_close(project, commit, user) : []
close_issues(project, user, author, commit, closed_issues) if closed_issues.any?
commit.create_cross_references!(author, closed_issues)
@@ -56,6 +54,12 @@ class ProcessCommitWorker
end
end
+ def issues_to_close(project, commit, user)
+ Gitlab::ClosingIssueExtractor
+ .new(project, user)
+ .closed_by_message(commit.safe_message)
+ end
+
def update_issue_metrics(commit, author)
mentioned_issues = commit.all_references(author).issues
diff --git a/app/workers/project_cache_worker.rb b/app/workers/project_cache_worker.rb
index ae1d57aa124..573f903f4e0 100644
--- a/app/workers/project_cache_worker.rb
+++ b/app/workers/project_cache_worker.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
# Worker for updating any project specific caches.
-class ProjectCacheWorker
+class ProjectCacheWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
- latency_sensitive_worker!
+ urgency :high
LEASE_TIMEOUT = 15.minutes.to_i
diff --git a/app/workers/project_daily_statistics_worker.rb b/app/workers/project_daily_statistics_worker.rb
index 19c2fd67763..c60bee0ffdc 100644
--- a/app/workers/project_daily_statistics_worker.rb
+++ b/app/workers/project_daily_statistics_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ProjectDailyStatisticsWorker
+class ProjectDailyStatisticsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/project_destroy_worker.rb b/app/workers/project_destroy_worker.rb
index 1d20837faa2..b3e7996f4a4 100644
--- a/app/workers/project_destroy_worker.rb
+++ b/app/workers/project_destroy_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ProjectDestroyWorker
+class ProjectDestroyWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExceptionBacktrace
diff --git a/app/workers/project_export_worker.rb b/app/workers/project_export_worker.rb
index 4d2cc3cd32d..aaaf70f09b5 100644
--- a/app/workers/project_export_worker.rb
+++ b/app/workers/project_export_worker.rb
@@ -1,19 +1,26 @@
# frozen_string_literal: true
-class ProjectExportWorker
+class ProjectExportWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExceptionBacktrace
+ include ProjectExportOptions
- sidekiq_options retry: 3
feature_category :importers
worker_resource_boundary :memory
def perform(current_user_id, project_id, after_export_strategy = {}, params = {})
current_user = User.find(current_user_id)
project = Project.find(project_id)
+ export_job = project.export_jobs.safe_find_or_create_by(jid: self.jid)
after_export = build!(after_export_strategy)
+ export_job&.start
+
::Projects::ImportExport::ExportService.new(project, current_user, params).execute(after_export)
+
+ export_job&.finish
+ rescue ActiveRecord::RecordNotFound, Gitlab::ImportExport::AfterExportStrategyBuilder::StrategyNotFoundError => e
+ logger.error("Failed to export project #{project_id}: #{e.message}")
end
private
diff --git a/app/workers/project_service_worker.rb b/app/workers/project_service_worker.rb
index 38a2a7414a5..84c3a3e52d0 100644
--- a/app/workers/project_service_worker.rb
+++ b/app/workers/project_service_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ProjectServiceWorker
+class ProjectServiceWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
sidekiq_options dead: false
diff --git a/app/workers/project_update_repository_storage_worker.rb b/app/workers/project_update_repository_storage_worker.rb
new file mode 100644
index 00000000000..ecee33e6421
--- /dev/null
+++ b/app/workers/project_update_repository_storage_worker.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+class ProjectUpdateRepositoryStorageWorker # rubocop:disable Scalability/IdempotentWorker
+ include ApplicationWorker
+
+ feature_category :gitaly
+
+ def perform(project_id, new_repository_storage_key)
+ project = Project.find(project_id)
+
+ ::Projects::UpdateRepositoryStorageService.new(project).execute(new_repository_storage_key)
+ end
+end
diff --git a/app/workers/propagate_service_template_worker.rb b/app/workers/propagate_service_template_worker.rb
index 73a2b453207..f3a6bda1821 100644
--- a/app/workers/propagate_service_template_worker.rb
+++ b/app/workers/propagate_service_template_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
# Worker for updating any project specific caches.
-class PropagateServiceTemplateWorker
+class PropagateServiceTemplateWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/prune_old_events_worker.rb b/app/workers/prune_old_events_worker.rb
index 835c51ec846..330de4c7cba 100644
--- a/app/workers/prune_old_events_worker.rb
+++ b/app/workers/prune_old_events_worker.rb
@@ -1,13 +1,13 @@
# frozen_string_literal: true
-class PruneOldEventsWorker
+class PruneOldEventsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
include CronjobQueue
# rubocop:enable Scalability/CronWorkerContext
- feature_category_not_owned!
+ feature_category :users
DELETE_LIMIT = 10_000
diff --git a/app/workers/prune_web_hook_logs_worker.rb b/app/workers/prune_web_hook_logs_worker.rb
index dd4f16a69da..a8e81a24ecd 100644
--- a/app/workers/prune_web_hook_logs_worker.rb
+++ b/app/workers/prune_web_hook_logs_worker.rb
@@ -2,7 +2,7 @@
# Worker that deletes a fixed number of outdated rows from the "web_hook_logs"
# table.
-class PruneWebHookLogsWorker
+class PruneWebHookLogsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
diff --git a/app/workers/reactive_caching_worker.rb b/app/workers/reactive_caching_worker.rb
index 6f82ad83137..716b1de2bf5 100644
--- a/app/workers/reactive_caching_worker.rb
+++ b/app/workers/reactive_caching_worker.rb
@@ -1,16 +1,16 @@
# frozen_string_literal: true
-class ReactiveCachingWorker
+class ReactiveCachingWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category_not_owned!
# TODO: The reactive caching worker should be split into
- # two different workers, one for latency_sensitive jobs without external dependencies
- # and another worker without latency_sensitivity, but with external dependencies
+ # two different workers, one for high urgency jobs without external dependencies
+ # and another worker without high urgency, but with external dependencies
# https://gitlab.com/gitlab-com/gl-infra/scalability/issues/34
# This worker should also have `worker_has_external_dependencies!` enabled
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
def perform(class_name, id, *args)
diff --git a/app/workers/rebase_worker.rb b/app/workers/rebase_worker.rb
index ddf5c31a1c2..2e13af5e0aa 100644
--- a/app/workers/rebase_worker.rb
+++ b/app/workers/rebase_worker.rb
@@ -2,7 +2,7 @@
# The RebaseWorker must be wrapped in important concurrency code, so should only
# be scheduled via MergeRequest#rebase_async
-class RebaseWorker
+class RebaseWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/remote_mirror_notification_worker.rb b/app/workers/remote_mirror_notification_worker.rb
index 706131d4e4b..33f5002014d 100644
--- a/app/workers/remote_mirror_notification_worker.rb
+++ b/app/workers/remote_mirror_notification_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RemoteMirrorNotificationWorker
+class RemoteMirrorNotificationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/remove_expired_group_links_worker.rb b/app/workers/remove_expired_group_links_worker.rb
index db35dfb3ca8..8226f22837c 100644
--- a/app/workers/remove_expired_group_links_worker.rb
+++ b/app/workers/remove_expired_group_links_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RemoveExpiredGroupLinksWorker
+class RemoveExpiredGroupLinksWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
diff --git a/app/workers/remove_expired_members_worker.rb b/app/workers/remove_expired_members_worker.rb
index 278adee98e9..f56a6cd9fa2 100644
--- a/app/workers/remove_expired_members_worker.rb
+++ b/app/workers/remove_expired_members_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RemoveExpiredMembersWorker
+class RemoveExpiredMembersWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
diff --git a/app/workers/remove_unreferenced_lfs_objects_worker.rb b/app/workers/remove_unreferenced_lfs_objects_worker.rb
index 5e3998f3915..76ab23ebbd5 100644
--- a/app/workers/remove_unreferenced_lfs_objects_worker.rb
+++ b/app/workers/remove_unreferenced_lfs_objects_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RemoveUnreferencedLfsObjectsWorker
+class RemoveUnreferencedLfsObjectsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
diff --git a/app/workers/repository_archive_cache_worker.rb b/app/workers/repository_archive_cache_worker.rb
index 76e08a80c15..84f61a60953 100644
--- a/app/workers/repository_archive_cache_worker.rb
+++ b/app/workers/repository_archive_cache_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RepositoryArchiveCacheWorker
+class RepositoryArchiveCacheWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
diff --git a/app/workers/repository_check/batch_worker.rb b/app/workers/repository_check/batch_worker.rb
index 4091c30f498..3e5e6a25228 100644
--- a/app/workers/repository_check/batch_worker.rb
+++ b/app/workers/repository_check/batch_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module RepositoryCheck
- class BatchWorker
+ class BatchWorker # rubocop:disable Scalability/IdempotentWorker
prepend_if_ee('::EE::RepositoryCheck::BatchWorker') # rubocop: disable Cop/InjectEnterpriseEditionModule
include ApplicationWorker
diff --git a/app/workers/repository_check/clear_worker.rb b/app/workers/repository_check/clear_worker.rb
index 01964c69fb2..1689b9bf251 100644
--- a/app/workers/repository_check/clear_worker.rb
+++ b/app/workers/repository_check/clear_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module RepositoryCheck
- class ClearWorker
+ class ClearWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include RepositoryCheckQueue
diff --git a/app/workers/repository_check/dispatch_worker.rb b/app/workers/repository_check/dispatch_worker.rb
index f68be8832eb..d7a145011fa 100644
--- a/app/workers/repository_check/dispatch_worker.rb
+++ b/app/workers/repository_check/dispatch_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module RepositoryCheck
- class DispatchWorker
+ class DispatchWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
diff --git a/app/workers/repository_check/single_repository_worker.rb b/app/workers/repository_check/single_repository_worker.rb
index cadb1de356c..b25a20b3eff 100644
--- a/app/workers/repository_check/single_repository_worker.rb
+++ b/app/workers/repository_check/single_repository_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module RepositoryCheck
- class SingleRepositoryWorker
+ class SingleRepositoryWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include RepositoryCheckQueue
diff --git a/app/workers/repository_cleanup_worker.rb b/app/workers/repository_cleanup_worker.rb
index dd2cbd42d1f..33b7223dd95 100644
--- a/app/workers/repository_cleanup_worker.rb
+++ b/app/workers/repository_cleanup_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RepositoryCleanupWorker
+class RepositoryCleanupWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
sidekiq_options retry: 3
diff --git a/app/workers/repository_fork_worker.rb b/app/workers/repository_fork_worker.rb
index ba141f808a7..395fce0696c 100644
--- a/app/workers/repository_fork_worker.rb
+++ b/app/workers/repository_fork_worker.rb
@@ -1,8 +1,7 @@
# frozen_string_literal: true
-class RepositoryForkWorker
+class RepositoryForkWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
- include Gitlab::ShellAdapter
include ProjectStartImport
include ProjectImportOptions
@@ -27,18 +26,8 @@ class RepositoryForkWorker
Gitlab::Metrics.add_event(:fork_repository)
- result = gitlab_shell.fork_repository(source_project, target_project)
-
- if result
- link_lfs_objects(source_project, target_project)
- else
- raise_fork_failure(
- source_project,
- target_project,
- 'Failed to create fork repository'
- )
- end
-
+ gitaly_fork!(source_project, target_project)
+ link_lfs_objects(source_project, target_project)
target_project.after_import
end
@@ -49,10 +38,21 @@ class RepositoryForkWorker
false
end
+ def gitaly_fork!(source_project, target_project)
+ source_repo = source_project.repository.raw
+ target_repo = target_project.repository.raw
+
+ ::Gitlab::GitalyClient::RepositoryService.new(target_repo).fork_repository(source_repo)
+ rescue GRPC::BadStatus => e
+ Gitlab::ErrorTracking.track_exception(e, source_project_id: source_project.id, target_project_id: target_project.id)
+
+ raise_fork_failure(source_project, target_project, 'Failed to create fork repository')
+ end
+
def link_lfs_objects(source_project, target_project)
Projects::LfsPointers::LfsLinkService
.new(target_project)
- .execute(source_project.lfs_objects_oids)
+ .execute(source_project.all_lfs_objects_oids)
rescue Projects::LfsPointers::LfsLinkService::TooManyOidsError
raise_fork_failure(
source_project,
diff --git a/app/workers/repository_import_worker.rb b/app/workers/repository_import_worker.rb
index 15677fb0a95..9f17ef467e3 100644
--- a/app/workers/repository_import_worker.rb
+++ b/app/workers/repository_import_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RepositoryImportWorker
+class RepositoryImportWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExceptionBacktrace
include ProjectStartImport
diff --git a/app/workers/repository_remove_remote_worker.rb b/app/workers/repository_remove_remote_worker.rb
index 3e55ebc77ed..23a9ec1e202 100644
--- a/app/workers/repository_remove_remote_worker.rb
+++ b/app/workers/repository_remove_remote_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RepositoryRemoveRemoteWorker
+class RepositoryRemoveRemoteWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExclusiveLeaseGuard
diff --git a/app/workers/repository_update_remote_mirror_worker.rb b/app/workers/repository_update_remote_mirror_worker.rb
index d1dec4cb732..cfff2382f04 100644
--- a/app/workers/repository_update_remote_mirror_worker.rb
+++ b/app/workers/repository_update_remote_mirror_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RepositoryUpdateRemoteMirrorWorker
+class RepositoryUpdateRemoteMirrorWorker # rubocop:disable Scalability/IdempotentWorker
UpdateError = Class.new(StandardError)
include ApplicationWorker
diff --git a/app/workers/requests_profiles_worker.rb b/app/workers/requests_profiles_worker.rb
index b711cb99082..106f04d9409 100644
--- a/app/workers/requests_profiles_worker.rb
+++ b/app/workers/requests_profiles_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RequestsProfilesWorker
+class RequestsProfilesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
diff --git a/app/workers/run_pipeline_schedule_worker.rb b/app/workers/run_pipeline_schedule_worker.rb
index f8f8a2fe7ae..7d76cbed77f 100644
--- a/app/workers/run_pipeline_schedule_worker.rb
+++ b/app/workers/run_pipeline_schedule_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class RunPipelineScheduleWorker
+class RunPipelineScheduleWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
diff --git a/app/workers/schedule_migrate_external_diffs_worker.rb b/app/workers/schedule_migrate_external_diffs_worker.rb
index 0e3c62cf282..4e7b60c4ab7 100644
--- a/app/workers/schedule_migrate_external_diffs_worker.rb
+++ b/app/workers/schedule_migrate_external_diffs_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ScheduleMigrateExternalDiffsWorker
+class ScheduleMigrateExternalDiffsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext:
# This schedules the `MigrateExternalDiffsWorker`
diff --git a/app/workers/self_monitoring_project_create_worker.rb b/app/workers/self_monitoring_project_create_worker.rb
index 429ac8aacc4..8177efb1683 100644
--- a/app/workers/self_monitoring_project_create_worker.rb
+++ b/app/workers/self_monitoring_project_create_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class SelfMonitoringProjectCreateWorker
+class SelfMonitoringProjectCreateWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExclusiveLeaseGuard
include SelfMonitoringProjectWorker
diff --git a/app/workers/self_monitoring_project_delete_worker.rb b/app/workers/self_monitoring_project_delete_worker.rb
index 07a7d3f6c45..4fa05d71de5 100644
--- a/app/workers/self_monitoring_project_delete_worker.rb
+++ b/app/workers/self_monitoring_project_delete_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class SelfMonitoringProjectDeleteWorker
+class SelfMonitoringProjectDeleteWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ExclusiveLeaseGuard
include SelfMonitoringProjectWorker
diff --git a/app/workers/stage_update_worker.rb b/app/workers/stage_update_worker.rb
index a96c4c6dda2..aface8288e3 100644
--- a/app/workers/stage_update_worker.rb
+++ b/app/workers/stage_update_worker.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
-class StageUpdateWorker
+class StageUpdateWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_processing
- latency_sensitive_worker!
+ urgency :high
def perform(stage_id)
Ci::Stage.find_by_id(stage_id)&.update_legacy_status
diff --git a/app/workers/stuck_ci_jobs_worker.rb b/app/workers/stuck_ci_jobs_worker.rb
index 6e4ffa36854..b3b1ed66efc 100644
--- a/app/workers/stuck_ci_jobs_worker.rb
+++ b/app/workers/stuck_ci_jobs_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class StuckCiJobsWorker
+class StuckCiJobsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue
diff --git a/app/workers/stuck_export_jobs_worker.rb b/app/workers/stuck_export_jobs_worker.rb
new file mode 100644
index 00000000000..6d8d60d2fc0
--- /dev/null
+++ b/app/workers/stuck_export_jobs_worker.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+# rubocop:disable Scalability/IdempotentWorker
+class StuckExportJobsWorker
+ include ApplicationWorker
+ # rubocop:disable Scalability/CronWorkerContext
+ # This worker updates export states inline and does not schedule
+ # other jobs.
+ include CronjobQueue
+ # rubocop:enable Scalability/CronWorkerContext
+
+ feature_category :importers
+ worker_resource_boundary :cpu
+
+ EXPORT_JOBS_EXPIRATION = 6.hours.to_i
+
+ def perform
+ failed_jobs_count = mark_stuck_jobs_as_failed!
+
+ Gitlab::Metrics.add_event(:stuck_export_jobs,
+ failed_jobs_count: failed_jobs_count)
+ end
+
+ private
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def mark_stuck_jobs_as_failed!
+ jids_and_ids = enqueued_exports.pluck(:jid, :id).to_h
+
+ completed_jids = Gitlab::SidekiqStatus.completed_jids(jids_and_ids.keys)
+ return unless completed_jids.any?
+
+ completed_ids = jids_and_ids.values_at(*completed_jids)
+
+ # We select the export states again, because they may have transitioned from
+ # started to finished while we were looking up their Sidekiq status.
+ completed_jobs = enqueued_exports.where(id: completed_ids)
+
+ Sidekiq.logger.info(
+ message: 'Marked stuck export jobs as failed',
+ job_ids: completed_jobs.map(&:jid)
+ )
+
+ completed_jobs.each do |job|
+ job.fail_op
+ end.count
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def enqueued_exports
+ ProjectExportJob.with_status([:started, :queued])
+ end
+end
+# rubocop:enable Scalability/IdempotentWorker
diff --git a/app/workers/stuck_import_jobs_worker.rb b/app/workers/stuck_import_jobs_worker.rb
index c9675417aa4..6a48b78b22c 100644
--- a/app/workers/stuck_import_jobs_worker.rb
+++ b/app/workers/stuck_import_jobs_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class StuckImportJobsWorker
+class StuckImportJobsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker updates several import states inline and does not schedule
diff --git a/app/workers/stuck_merge_jobs_worker.rb b/app/workers/stuck_merge_jobs_worker.rb
index 9214ae038a8..e0209b8237a 100644
--- a/app/workers/stuck_merge_jobs_worker.rb
+++ b/app/workers/stuck_merge_jobs_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class StuckMergeJobsWorker
+class StuckMergeJobsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
diff --git a/app/workers/system_hook_push_worker.rb b/app/workers/system_hook_push_worker.rb
index fc6237f359a..ff1f2baf058 100644
--- a/app/workers/system_hook_push_worker.rb
+++ b/app/workers/system_hook_push_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class SystemHookPushWorker
+class SystemHookPushWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/todos_destroyer/confidential_issue_worker.rb b/app/workers/todos_destroyer/confidential_issue_worker.rb
index 240a5f98ad5..b29d4168162 100644
--- a/app/workers/todos_destroyer/confidential_issue_worker.rb
+++ b/app/workers/todos_destroyer/confidential_issue_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module TodosDestroyer
- class ConfidentialIssueWorker
+ class ConfidentialIssueWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include TodosDestroyerQueue
diff --git a/app/workers/todos_destroyer/entity_leave_worker.rb b/app/workers/todos_destroyer/entity_leave_worker.rb
index 7db3f6c84b4..558cc32d158 100644
--- a/app/workers/todos_destroyer/entity_leave_worker.rb
+++ b/app/workers/todos_destroyer/entity_leave_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module TodosDestroyer
- class EntityLeaveWorker
+ class EntityLeaveWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include TodosDestroyerQueue
diff --git a/app/workers/todos_destroyer/group_private_worker.rb b/app/workers/todos_destroyer/group_private_worker.rb
index 21ec4abe478..a1943bee2ec 100644
--- a/app/workers/todos_destroyer/group_private_worker.rb
+++ b/app/workers/todos_destroyer/group_private_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module TodosDestroyer
- class GroupPrivateWorker
+ class GroupPrivateWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include TodosDestroyerQueue
diff --git a/app/workers/todos_destroyer/private_features_worker.rb b/app/workers/todos_destroyer/private_features_worker.rb
index 1e68f0fd5ae..6e55467234a 100644
--- a/app/workers/todos_destroyer/private_features_worker.rb
+++ b/app/workers/todos_destroyer/private_features_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module TodosDestroyer
- class PrivateFeaturesWorker
+ class PrivateFeaturesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include TodosDestroyerQueue
diff --git a/app/workers/todos_destroyer/project_private_worker.rb b/app/workers/todos_destroyer/project_private_worker.rb
index 064e001530c..2a06edc666e 100644
--- a/app/workers/todos_destroyer/project_private_worker.rb
+++ b/app/workers/todos_destroyer/project_private_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module TodosDestroyer
- class ProjectPrivateWorker
+ class ProjectPrivateWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include TodosDestroyerQueue
diff --git a/app/workers/trending_projects_worker.rb b/app/workers/trending_projects_worker.rb
index 208d8b3b9b5..ee7724e0fa8 100644
--- a/app/workers/trending_projects_worker.rb
+++ b/app/workers/trending_projects_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class TrendingProjectsWorker
+class TrendingProjectsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
diff --git a/app/workers/update_external_pull_requests_worker.rb b/app/workers/update_external_pull_requests_worker.rb
index e363b33f1b9..b459d26e487 100644
--- a/app/workers/update_external_pull_requests_worker.rb
+++ b/app/workers/update_external_pull_requests_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class UpdateExternalPullRequestsWorker
+class UpdateExternalPullRequestsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/update_head_pipeline_for_merge_request_worker.rb b/app/workers/update_head_pipeline_for_merge_request_worker.rb
index e069b16eb90..69698ba81bd 100644
--- a/app/workers/update_head_pipeline_for_merge_request_worker.rb
+++ b/app/workers/update_head_pipeline_for_merge_request_worker.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-class UpdateHeadPipelineForMergeRequestWorker
+class UpdateHeadPipelineForMergeRequestWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_processing
feature_category :continuous_integration
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
def perform(merge_request_id)
diff --git a/app/workers/update_merge_requests_worker.rb b/app/workers/update_merge_requests_worker.rb
index ec9739e8a11..63bb6171b9c 100644
--- a/app/workers/update_merge_requests_worker.rb
+++ b/app/workers/update_merge_requests_worker.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
-class UpdateMergeRequestsWorker
+class UpdateMergeRequestsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
- latency_sensitive_worker!
+ urgency :high
worker_resource_boundary :cpu
weight 3
@@ -18,21 +18,7 @@ class UpdateMergeRequestsWorker
user = User.find_by(id: user_id)
return unless user
- # TODO: remove this benchmarking when we have rich logging
- time = Benchmark.measure do
- MergeRequests::RefreshService.new(project, user).execute(oldrev, newrev, ref)
- end
-
- args_log = [
- "elapsed=#{time.real}",
- "project_id=#{project_id}",
- "user_id=#{user_id}",
- "oldrev=#{oldrev}",
- "newrev=#{newrev}",
- "ref=#{ref}"
- ].join(',')
-
- Rails.logger.info("UpdateMergeRequestsWorker#perform #{args_log}") if time.real > LOG_TIME_THRESHOLD # rubocop:disable Gitlab/RailsLogger
+ MergeRequests::RefreshService.new(project, user).execute(oldrev, newrev, ref)
end
# rubocop: enable CodeReuse/ActiveRecord
end
diff --git a/app/workers/update_project_statistics_worker.rb b/app/workers/update_project_statistics_worker.rb
index e36cebf6f4f..336877d9f57 100644
--- a/app/workers/update_project_statistics_worker.rb
+++ b/app/workers/update_project_statistics_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
# Worker for updating project statistics.
-class UpdateProjectStatisticsWorker
+class UpdateProjectStatisticsWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
diff --git a/app/workers/upload_checksum_worker.rb b/app/workers/upload_checksum_worker.rb
index d35367145b8..dc2511f718c 100644
--- a/app/workers/upload_checksum_worker.rb
+++ b/app/workers/upload_checksum_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class UploadChecksumWorker
+class UploadChecksumWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :geo_replication
diff --git a/app/workers/wait_for_cluster_creation_worker.rb b/app/workers/wait_for_cluster_creation_worker.rb
index 621125c8503..2e3feb1a4d1 100644
--- a/app/workers/wait_for_cluster_creation_worker.rb
+++ b/app/workers/wait_for_cluster_creation_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class WaitForClusterCreationWorker
+class WaitForClusterCreationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ClusterQueue
diff --git a/app/workers/web_hook_worker.rb b/app/workers/web_hook_worker.rb
index c3fa3162c14..6e1e7e7d62e 100644
--- a/app/workers/web_hook_worker.rb
+++ b/app/workers/web_hook_worker.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class WebHookWorker
+class WebHookWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :integrations
diff --git a/app/workers/x509_certificate_revoke_worker.rb b/app/workers/x509_certificate_revoke_worker.rb
new file mode 100644
index 00000000000..abd0e5eefa7
--- /dev/null
+++ b/app/workers/x509_certificate_revoke_worker.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class X509CertificateRevokeWorker
+ include ApplicationWorker
+
+ feature_category :source_code_management
+
+ idempotent!
+
+ def perform(certificate_id)
+ return unless certificate_id
+
+ X509Certificate.find_by_id(certificate_id).try do |certificate|
+ X509CertificateRevokeService.new.execute(certificate)
+ end
+ end
+end
diff --git a/babel.config.js b/babel.config.js
index dfb3bf19c36..9c419b93b33 100644
--- a/babel.config.js
+++ b/babel.config.js
@@ -18,12 +18,10 @@ const presets = [
// include stage 3 proposals
const plugins = [
- '@babel/plugin-syntax-dynamic-import',
'@babel/plugin-syntax-import-meta',
'@babel/plugin-proposal-class-properties',
'@babel/plugin-proposal-json-strings',
'@babel/plugin-proposal-private-methods',
- '@babel/plugin-proposal-optional-chaining',
'lodash',
];
diff --git a/bin/actioncable b/bin/actioncable
new file mode 100755
index 00000000000..0aacb19e070
--- /dev/null
+++ b/bin/actioncable
@@ -0,0 +1,63 @@
+#!/bin/sh
+
+set -e
+
+cd $(dirname $0)/..
+app_root=$(pwd)
+
+puma_pidfile="$app_root/tmp/pids/puma_actioncable.pid"
+puma_config="$app_root/config/puma_actioncable.rb"
+
+spawn_puma()
+{
+ exec bundle exec puma --config "${puma_config}" --environment "$RAILS_ENV" "$@"
+}
+
+get_puma_pid()
+{
+ pid=$(cat "${puma_pidfile}")
+ if [ -z "$pid" ] ; then
+ echo "Could not find a PID in $puma_pidfile"
+ exit 1
+ fi
+ echo "${pid}"
+}
+
+start()
+{
+ spawn_puma -d
+}
+
+start_foreground()
+{
+ spawn_puma
+}
+
+stop()
+{
+ get_puma_pid
+ kill -QUIT "$(get_puma_pid)"
+}
+
+reload()
+{
+ kill -USR2 "$(get_puma_pid)"
+}
+
+case "$1" in
+ start)
+ start
+ ;;
+ start_foreground)
+ start_foreground
+ ;;
+ stop)
+ stop
+ ;;
+ reload)
+ reload
+ ;;
+ *)
+ echo "Usage: RAILS_ENV=your_env $0 {start|start_foreground|stop|reload}"
+ ;;
+esac
diff --git a/bin/background_jobs b/bin/background_jobs
index 06f26df5409..b3d7cc04d4f 100755
--- a/bin/background_jobs
+++ b/bin/background_jobs
@@ -1,91 +1,9 @@
#!/bin/sh
cd $(dirname $0)/..
-app_root=$(pwd)
-sidekiq_pidfile="$app_root/tmp/pids/sidekiq.pid"
-sidekiq_logfile="$app_root/log/sidekiq.log"
-sidekiq_config="$app_root/config/sidekiq_queues.yml"
-gitlab_user=$(ls -l config.ru | awk '{print $3}')
-warn()
-{
- echo "$@" 1>&2
-}
-
-stop()
-{
- bundle exec sidekiqctl stop $sidekiq_pidfile >> $sidekiq_logfile 2>&1
-}
-
-killall()
-{
- pkill -u $gitlab_user -f 'sidekiq [0-9]'
-}
-
-restart()
-{
- if [ -f $sidekiq_pidfile ]; then
- stop
- fi
- killall
- start_sidekiq -P $sidekiq_pidfile -d -L $sidekiq_logfile >> $sidekiq_logfile 2>&1
-}
-
-start_no_deamonize()
-{
- start_sidekiq >> $sidekiq_logfile 2>&1
-}
-
-start_sidekiq()
-{
- cmd="exec"
- chpst=$(which chpst)
-
- if [ -n "$chpst" ]; then
- cmd="${cmd} ${chpst} -P"
- fi
-
- ${cmd} bundle exec sidekiq -C "${sidekiq_config}" -e $RAILS_ENV "$@"
-}
-
-load_ok()
-{
- sidekiq_pid=$(cat $sidekiq_pidfile)
- if [ -z "$sidekiq_pid" ] ; then
- warn "Could not find a PID in $sidekiq_pidfile"
- exit 0
- fi
-
- if (ps -p $sidekiq_pid -o args | grep '\([0-9]\+\) of \1 busy' 1>&2) ; then
- warn "Too many busy Sidekiq workers"
- exit 1
- fi
-
- exit 0
-}
-
-case "$1" in
- stop)
- stop
- ;;
- start)
- restart
- ;;
- start_no_deamonize)
- start_no_deamonize
- ;;
- start_foreground)
- start_sidekiq
- ;;
- restart)
- restart
- ;;
- killall)
- killall
- ;;
- load_ok)
- load_ok
- ;;
- *)
- echo "Usage: RAILS_ENV=your_env $0 {stop|start|start_no_deamonize|restart|killall|load_ok}"
-esac
+if [ -n "$SIDEKIQ_WORKERS" ] ; then
+ exec bin/background_jobs_sk_cluster "$@"
+else
+ exec bin/background_jobs_sk "$@"
+fi
diff --git a/bin/background_jobs_sk b/bin/background_jobs_sk
new file mode 100755
index 00000000000..25218718bb8
--- /dev/null
+++ b/bin/background_jobs_sk
@@ -0,0 +1,67 @@
+#!/bin/sh
+
+cd $(dirname $0)/..
+app_root=$(pwd)
+sidekiq_pidfile="$app_root/tmp/pids/sidekiq.pid"
+sidekiq_logfile="$app_root/log/sidekiq.log"
+sidekiq_config="$app_root/config/sidekiq_queues.yml"
+gitlab_user=$(ls -l config.ru | awk '{print $3}')
+
+warn()
+{
+ echo "$@" 1>&2
+}
+
+stop()
+{
+ bundle exec sidekiqctl stop $sidekiq_pidfile >> $sidekiq_logfile 2>&1
+}
+
+restart()
+{
+ if [ -f $sidekiq_pidfile ]; then
+ stop
+ fi
+
+ pkill -u $gitlab_user -f 'sidekiq [0-9]'
+ start_sidekiq -P $sidekiq_pidfile -d -L $sidekiq_logfile >> $sidekiq_logfile 2>&1
+}
+
+# Starts on foreground but output to the logfile instead stdout.
+start_silent()
+{
+ start_sidekiq >> $sidekiq_logfile 2>&1
+}
+
+start_sidekiq()
+{
+ cmd="exec"
+ chpst=$(which chpst)
+
+ if [ -n "$chpst" ]; then
+ cmd="${cmd} ${chpst} -P"
+ fi
+
+ ${cmd} bundle exec sidekiq -C "${sidekiq_config}" -e $RAILS_ENV "$@"
+}
+
+case "$1" in
+ stop)
+ stop
+ ;;
+ start)
+ restart
+ ;;
+ start_silent)
+ warn "Deprecated: Will be removed at 13.0 (see https://gitlab.com/gitlab-org/gitlab/-/issues/196731)."
+ start_silent
+ ;;
+ start_foreground)
+ start_sidekiq
+ ;;
+ restart)
+ restart
+ ;;
+ *)
+ echo "Usage: RAILS_ENV=<env> $0 {stop|start|start_silent|start_foreground|restart}"
+esac
diff --git a/bin/background_jobs_sk_cluster b/bin/background_jobs_sk_cluster
new file mode 100755
index 00000000000..9f44bb7381f
--- /dev/null
+++ b/bin/background_jobs_sk_cluster
@@ -0,0 +1,76 @@
+#!/bin/sh
+
+cd $(dirname $0)/..
+app_root=$(pwd)
+sidekiq_pidfile="$app_root/tmp/pids/sidekiq-cluster.pid"
+sidekiq_logfile="$app_root/log/sidekiq.log"
+gitlab_user=$(ls -l config.ru | awk '{print $3}')
+
+warn()
+{
+ echo "$@" 1>&2
+}
+
+get_sidekiq_pid()
+{
+ if [ ! -f $sidekiq_pidfile ]; then
+ warn "No pidfile found at $sidekiq_pidfile; is Sidekiq running?"
+ return
+ fi
+
+ cat $sidekiq_pidfile
+}
+
+stop()
+{
+ sidekiq_pid=$(get_sidekiq_pid)
+
+ if [ $sidekiq_pid ]; then
+ kill -TERM $sidekiq_pid
+ fi
+}
+
+restart()
+{
+ if [ -f $sidekiq_pidfile ]; then
+ stop
+ fi
+
+ warn "Sidekiq output will be written to $sidekiq_logfile"
+ start_sidekiq >> $sidekiq_logfile 2>&1
+}
+
+start_sidekiq()
+{
+ cmd="exec"
+ chpst=$(which chpst)
+
+ if [ -n "$chpst" ]; then
+ cmd="${cmd} ${chpst} -P"
+ fi
+
+ # sidekiq-cluster expects '*' '*' arguments (one wildcard for each process).
+ for (( i=1; i<=$SIDEKIQ_WORKERS; i++ ))
+ do
+ processes_args+=("*")
+ done
+
+ ${cmd} bin/sidekiq-cluster "${processes_args[@]}" -P $sidekiq_pidfile -e $RAILS_ENV
+}
+
+case "$1" in
+ stop)
+ stop
+ ;;
+ start)
+ restart &
+ ;;
+ start_foreground)
+ start_sidekiq
+ ;;
+ restart)
+ restart &
+ ;;
+ *)
+ echo "Usage: RAILS_ENV=<env> SIDEKIQ_WORKERS=<n> $0 {stop|start|start_foreground|restart}"
+esac
diff --git a/bin/mail_room b/bin/mail_room
index 74a84f5b2b4..2539e3d388e 100755
--- a/bin/mail_room
+++ b/bin/mail_room
@@ -19,7 +19,7 @@ get_mail_room_pid()
start()
{
- bin/daemon_with_pidfile $mail_room_pidfile bundle exec mail_room -q -c $mail_room_config >> $mail_room_logfile 2>&1
+ bin/daemon_with_pidfile $mail_room_pidfile bundle exec mail_room --log-exit-as json -q -c $mail_room_config >> $mail_room_logfile 2>&1
}
stop()
diff --git a/bin/sidekiq-cluster b/bin/sidekiq-cluster
new file mode 100755
index 00000000000..2204a222b88
--- /dev/null
+++ b/bin/sidekiq-cluster
@@ -0,0 +1,19 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'optparse'
+require_relative '../lib/gitlab'
+require_relative '../lib/gitlab/utils'
+require_relative '../lib/gitlab/sidekiq_config/cli_methods'
+require_relative '../lib/gitlab/sidekiq_cluster'
+require_relative '../lib/gitlab/sidekiq_cluster/cli'
+
+Thread.abort_on_exception = true
+
+cli = Gitlab::SidekiqCluster::CLI.new
+
+begin
+ cli.run
+rescue Gitlab::SidekiqCluster::CLI::CommandError => error
+ abort error.message
+end
diff --git a/cable/config.ru b/cable/config.ru
new file mode 100644
index 00000000000..3b93c483ded
--- /dev/null
+++ b/cable/config.ru
@@ -0,0 +1,6 @@
+# frozen_string_literal: true
+
+require ::File.expand_path('../../config/environment', __FILE__)
+Rails.application.eager_load!
+
+run ActionCable.server
diff --git a/changelogs/unreleased/10429_set_dast_default_git_strategy_to_none.yml b/changelogs/unreleased/10429_set_dast_default_git_strategy_to_none.yml
new file mode 100644
index 00000000000..35ef99383fc
--- /dev/null
+++ b/changelogs/unreleased/10429_set_dast_default_git_strategy_to_none.yml
@@ -0,0 +1,5 @@
+---
+title: Project repositories are no longer cloned by default when running DAST
+merge_request: 25320
+author:
+type: performance
diff --git a/changelogs/unreleased/10526-smartcard_support_different_hostname.yml b/changelogs/unreleased/10526-smartcard_support_different_hostname.yml
new file mode 100644
index 00000000000..6990449d171
--- /dev/null
+++ b/changelogs/unreleased/10526-smartcard_support_different_hostname.yml
@@ -0,0 +1,5 @@
+---
+title: Make hostname configurable for smartcard authentication
+merge_request: 26411
+author:
+type: added
diff --git a/changelogs/unreleased/11821-insights-back-json-fix.yml b/changelogs/unreleased/11821-insights-back-json-fix.yml
new file mode 100644
index 00000000000..69fde018420
--- /dev/null
+++ b/changelogs/unreleased/11821-insights-back-json-fix.yml
@@ -0,0 +1,5 @@
+---
+title: Fix Insights displaying JSON on back navigation
+merge_request: 25801
+author:
+type: fixed
diff --git a/changelogs/unreleased/118503.yml b/changelogs/unreleased/118503.yml
new file mode 100644
index 00000000000..3aa2e0399a8
--- /dev/null
+++ b/changelogs/unreleased/118503.yml
@@ -0,0 +1,5 @@
+---
+title: Fix infinite spinner on error detail page
+merge_request: 26188
+author:
+type: fixed
diff --git a/changelogs/unreleased/119107-respect-dnt-for-experiments.yml b/changelogs/unreleased/119107-respect-dnt-for-experiments.yml
new file mode 100644
index 00000000000..0132599d40b
--- /dev/null
+++ b/changelogs/unreleased/119107-respect-dnt-for-experiments.yml
@@ -0,0 +1,5 @@
+---
+title: 'Use DNT: 1 as an experiment opt-out mechanism'
+merge_request: 22100
+author:
+type: other
diff --git a/changelogs/unreleased/119429-decouple-webhooks-from-integrations-within-project-settings.yml b/changelogs/unreleased/119429-decouple-webhooks-from-integrations-within-project-settings.yml
new file mode 100644
index 00000000000..32a15defb58
--- /dev/null
+++ b/changelogs/unreleased/119429-decouple-webhooks-from-integrations-within-project-settings.yml
@@ -0,0 +1,5 @@
+---
+title: Decouple Webhooks from Integrations within Project > Settings
+merge_request: 23136
+author:
+type: changed
diff --git a/changelogs/unreleased/12577-generate-smaller-image-sizes-for-designs.yml b/changelogs/unreleased/12577-generate-smaller-image-sizes-for-designs.yml
new file mode 100644
index 00000000000..5a013bd8e96
--- /dev/null
+++ b/changelogs/unreleased/12577-generate-smaller-image-sizes-for-designs.yml
@@ -0,0 +1,5 @@
+---
+title: Add id and image_v432x230 columns to design_management_designs_versions
+merge_request: 22860
+author:
+type: changed
diff --git a/changelogs/unreleased/13005-allow-to-use-issue-issues-and-merge_request-merge_requests-for-issu.yml b/changelogs/unreleased/13005-allow-to-use-issue-issues-and-merge_request-merge_requests-for-issu.yml
new file mode 100644
index 00000000000..658a27723ea
--- /dev/null
+++ b/changelogs/unreleased/13005-allow-to-use-issue-issues-and-merge_request-merge_requests-for-issu.yml
@@ -0,0 +1,5 @@
+---
+title: Allow issues/merge_requests as an issuable_type in Insights configuration
+merge_request: 26061
+author:
+type: added
diff --git a/changelogs/unreleased/13717-es-indexing-without-index.yml b/changelogs/unreleased/13717-es-indexing-without-index.yml
new file mode 100644
index 00000000000..c232ee575c7
--- /dev/null
+++ b/changelogs/unreleased/13717-es-indexing-without-index.yml
@@ -0,0 +1,5 @@
+---
+title: 'Elasticsearch: when index is absent warn users and disable index button'
+merge_request: 25254
+author:
+type: fixed
diff --git a/changelogs/unreleased/13810-cluster-environments-table.yml b/changelogs/unreleased/13810-cluster-environments-table.yml
new file mode 100644
index 00000000000..bbcebec9106
--- /dev/null
+++ b/changelogs/unreleased/13810-cluster-environments-table.yml
@@ -0,0 +1,5 @@
+---
+title: Add responsivity to cluster environments table
+merge_request: 25501
+author:
+type: fixed
diff --git a/changelogs/unreleased/14080-slack-multiple-channels.yml b/changelogs/unreleased/14080-slack-multiple-channels.yml
new file mode 100644
index 00000000000..efc7299783b
--- /dev/null
+++ b/changelogs/unreleased/14080-slack-multiple-channels.yml
@@ -0,0 +1,5 @@
+---
+title: Allow multiple Slack channels for notifications
+merge_request: 24132
+author:
+type: added
diff --git a/changelogs/unreleased/15103-markup-tips-for-markdown-shown-while-editing-wiki-pages-in-other-fo.yml b/changelogs/unreleased/15103-markup-tips-for-markdown-shown-while-editing-wiki-pages-in-other-fo.yml
new file mode 100644
index 00000000000..723a52d65a1
--- /dev/null
+++ b/changelogs/unreleased/15103-markup-tips-for-markdown-shown-while-editing-wiki-pages-in-other-fo.yml
@@ -0,0 +1,5 @@
+---
+title: Markup tips for Markdown shown while editing wiki pages in other formats
+merge_request: 25974
+author:
+type: fixed
diff --git a/changelogs/unreleased/16131-wiki-titles-with-special-characters.yml b/changelogs/unreleased/16131-wiki-titles-with-special-characters.yml
new file mode 100644
index 00000000000..3ca25e52d44
--- /dev/null
+++ b/changelogs/unreleased/16131-wiki-titles-with-special-characters.yml
@@ -0,0 +1,5 @@
+---
+title: Tweak wiki page title handling
+merge_request: 25647
+author:
+type: changed
diff --git a/changelogs/unreleased/193170-fix-deployment-ref-validation.yml b/changelogs/unreleased/193170-fix-deployment-ref-validation.yml
new file mode 100644
index 00000000000..921cf96b7ba
--- /dev/null
+++ b/changelogs/unreleased/193170-fix-deployment-ref-validation.yml
@@ -0,0 +1,5 @@
+---
+title: Use of sha instead of ref when creating a new ref on deployment creation.
+merge_request: 23170
+author:
+type: fixed
diff --git a/changelogs/unreleased/195871-fix-duplicate-weight-change-notes.yml b/changelogs/unreleased/195871-fix-duplicate-weight-change-notes.yml
new file mode 100644
index 00000000000..24bcfd97392
--- /dev/null
+++ b/changelogs/unreleased/195871-fix-duplicate-weight-change-notes.yml
@@ -0,0 +1,5 @@
+---
+title: Ensure weight changes no longer render duplicate system notes
+merge_request: 26014
+author:
+type: fixed
diff --git a/changelogs/unreleased/195969-multi-select-on-issue-boards-inconsistent-erratic.yml b/changelogs/unreleased/195969-multi-select-on-issue-boards-inconsistent-erratic.yml
new file mode 100644
index 00000000000..614379d7ad8
--- /dev/null
+++ b/changelogs/unreleased/195969-multi-select-on-issue-boards-inconsistent-erratic.yml
@@ -0,0 +1,5 @@
+---
+title: Improved selection of multiple cards
+merge_request:
+author: Gwen_
+type: fixed
diff --git a/changelogs/unreleased/196609-remove-staging.yml b/changelogs/unreleased/196609-remove-staging.yml
new file mode 100644
index 00000000000..fc60091ba4d
--- /dev/null
+++ b/changelogs/unreleased/196609-remove-staging.yml
@@ -0,0 +1,5 @@
+---
+title: Remove staging from commit workflow in the Web IDE
+merge_request: 26151
+author:
+type: removed
diff --git a/changelogs/unreleased/196646-replace-underscore-with-lodash-for-app-assets-javascripts-badges.yml b/changelogs/unreleased/196646-replace-underscore-with-lodash-for-app-assets-javascripts-badges.yml
new file mode 100644
index 00000000000..691fddc753e
--- /dev/null
+++ b/changelogs/unreleased/196646-replace-underscore-with-lodash-for-app-assets-javascripts-badges.yml
@@ -0,0 +1,5 @@
+---
+title: Replace underscore with lodash for ./app/assets/javascripts/badges
+merge_request: 24966
+author: Jacopo Beschi @jacopo-beschi
+type: changed
diff --git a/changelogs/unreleased/196648-replace-_-with-lodash.yml b/changelogs/unreleased/196648-replace-_-with-lodash.yml
new file mode 100644
index 00000000000..397e2ded94d
--- /dev/null
+++ b/changelogs/unreleased/196648-replace-_-with-lodash.yml
@@ -0,0 +1,5 @@
+---
+title: Replaced underscore with lodash for app/assets/javascripts/lib
+merge_request: 25042
+author: Shubham Pandey
+type: other
diff --git a/changelogs/unreleased/196688-replace-underscore-with-lodash-for-app-assets-javascripts-deploy_k.yml b/changelogs/unreleased/196688-replace-underscore-with-lodash-for-app-assets-javascripts-deploy_k.yml
new file mode 100644
index 00000000000..42099b8771f
--- /dev/null
+++ b/changelogs/unreleased/196688-replace-underscore-with-lodash-for-app-assets-javascripts-deploy_k.yml
@@ -0,0 +1,5 @@
+---
+title: Replace underscore with lodash for ./app/assets/javascripts/deploy_keys
+merge_request: 24965
+author: Jacopo Beschi @jacopo-beschi
+type: changed
diff --git a/changelogs/unreleased/196718-remove-filter-epic-counts.yml b/changelogs/unreleased/196718-remove-filter-epic-counts.yml
new file mode 100644
index 00000000000..a3616ab490e
--- /dev/null
+++ b/changelogs/unreleased/196718-remove-filter-epic-counts.yml
@@ -0,0 +1,5 @@
+---
+title: Remove visibility check from epic descendant counts
+merge_request: 25975
+author:
+type: changed
diff --git a/changelogs/unreleased/196832-drop-feature-toggle.yml b/changelogs/unreleased/196832-drop-feature-toggle.yml
new file mode 100644
index 00000000000..bd9de0e33ca
--- /dev/null
+++ b/changelogs/unreleased/196832-drop-feature-toggle.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize project representation in large imports
+merge_request: !22598
+author:
+type: performance
diff --git a/changelogs/unreleased/196883-repository-link-grpc-graceful-failure.yml b/changelogs/unreleased/196883-repository-link-grpc-graceful-failure.yml
new file mode 100644
index 00000000000..3cc335c7ee9
--- /dev/null
+++ b/changelogs/unreleased/196883-repository-link-grpc-graceful-failure.yml
@@ -0,0 +1,5 @@
+---
+title: Ensure RepositoryLinkFilter handles Gitaly failures gracefully
+merge_request: 26531
+author:
+type: performance
diff --git a/changelogs/unreleased/197227-milestone-tab-async.yml b/changelogs/unreleased/197227-milestone-tab-async.yml
new file mode 100644
index 00000000000..c2aa3dd4485
--- /dev/null
+++ b/changelogs/unreleased/197227-milestone-tab-async.yml
@@ -0,0 +1,5 @@
+---
+title: Search issues in GraphQL API by milestone title and assignees
+merge_request: 25794
+author:
+type: added
diff --git a/changelogs/unreleased/197480-remove-package-file_type.yml b/changelogs/unreleased/197480-remove-package-file_type.yml
new file mode 100644
index 00000000000..c315a0289f5
--- /dev/null
+++ b/changelogs/unreleased/197480-remove-package-file_type.yml
@@ -0,0 +1,5 @@
+---
+title: Remove unused file_type column from packages_package_files
+merge_request: 26527
+author:
+type: changed
diff --git a/changelogs/unreleased/197790-hide-private-commit-emails-in-notification-settings.yml b/changelogs/unreleased/197790-hide-private-commit-emails-in-notification-settings.yml
new file mode 100644
index 00000000000..d8331b9a160
--- /dev/null
+++ b/changelogs/unreleased/197790-hide-private-commit-emails-in-notification-settings.yml
@@ -0,0 +1,5 @@
+---
+title: Hide the private commit email in Notification email list
+merge_request: 25099
+author: briankabiro
+type: changed
diff --git a/changelogs/unreleased/197918-add-package-type-param-to-group-packages-api.yml b/changelogs/unreleased/197918-add-package-type-param-to-group-packages-api.yml
new file mode 100644
index 00000000000..ff9eb13bea6
--- /dev/null
+++ b/changelogs/unreleased/197918-add-package-type-param-to-group-packages-api.yml
@@ -0,0 +1,5 @@
+---
+title: Add package_type as a filter option to the group packages list API endpoint
+merge_request: 26833
+author:
+type: added
diff --git a/changelogs/unreleased/197918-add-package-type-to-project-packages-api.yml b/changelogs/unreleased/197918-add-package-type-to-project-packages-api.yml
new file mode 100644
index 00000000000..9f7f6888092
--- /dev/null
+++ b/changelogs/unreleased/197918-add-package-type-to-project-packages-api.yml
@@ -0,0 +1,5 @@
+---
+title: Add package_type as a filter option to the packages list API endpoint
+merge_request: 25816
+author:
+type: added
diff --git a/changelogs/unreleased/197960-package-detail-activity.yml b/changelogs/unreleased/197960-package-detail-activity.yml
new file mode 100644
index 00000000000..3e7d9328d0a
--- /dev/null
+++ b/changelogs/unreleased/197960-package-detail-activity.yml
@@ -0,0 +1,5 @@
+---
+title: Adds new activity panel to package details page
+merge_request: 25534
+author:
+type: added
diff --git a/changelogs/unreleased/198050-frontend-pagination-in-log-explorer.yml b/changelogs/unreleased/198050-frontend-pagination-in-log-explorer.yml
new file mode 100644
index 00000000000..a8c8466a613
--- /dev/null
+++ b/changelogs/unreleased/198050-frontend-pagination-in-log-explorer.yml
@@ -0,0 +1,5 @@
+---
+title: More logs entries are loaded when logs are scrolled to the top
+merge_request: 26254
+author:
+type: added
diff --git a/changelogs/unreleased/198052-in-the-jobs-page-the-allowed-to-fail-badge-looks-like-an-error.yml b/changelogs/unreleased/198052-in-the-jobs-page-the-allowed-to-fail-badge-looks-like-an-error.yml
new file mode 100644
index 00000000000..c69fb956219
--- /dev/null
+++ b/changelogs/unreleased/198052-in-the-jobs-page-the-allowed-to-fail-badge-looks-like-an-error.yml
@@ -0,0 +1,5 @@
+---
+title: Hides the "Allowed to fail" tag on jobs that are successful
+merge_request: 25458
+author:
+type: changed
diff --git a/changelogs/unreleased/198325-migrate-design-mentions-to-db-table.yml b/changelogs/unreleased/198325-migrate-design-mentions-to-db-table.yml
new file mode 100644
index 00000000000..3a26827378c
--- /dev/null
+++ b/changelogs/unreleased/198325-migrate-design-mentions-to-db-table.yml
@@ -0,0 +1,5 @@
+---
+title: Migrate mentions for design notes to design_user_mentions DB table
+merge_request: 23704
+author:
+type: changed
diff --git a/changelogs/unreleased/198326-migrate-commit-notes-mentions-to-db-table.yml b/changelogs/unreleased/198326-migrate-commit-notes-mentions-to-db-table.yml
new file mode 100644
index 00000000000..c8808425251
--- /dev/null
+++ b/changelogs/unreleased/198326-migrate-commit-notes-mentions-to-db-table.yml
@@ -0,0 +1,5 @@
+---
+title: Migrate mentions for commit notes to commit_user_mentions DB table
+merge_request: 23859
+author:
+type: changed
diff --git a/changelogs/unreleased/198338-migrate-mr-mentions-to-db-table.yml b/changelogs/unreleased/198338-migrate-mr-mentions-to-db-table.yml
new file mode 100644
index 00000000000..578684589b2
--- /dev/null
+++ b/changelogs/unreleased/198338-migrate-mr-mentions-to-db-table.yml
@@ -0,0 +1,5 @@
+---
+title: Migrate mentions for merge requests to DB table
+merge_request: 25826
+author:
+type: changed
diff --git a/changelogs/unreleased/198391-add-user-plan-and-trial-status-to-api.yml b/changelogs/unreleased/198391-add-user-plan-and-trial-status-to-api.yml
new file mode 100644
index 00000000000..0389d7ff9ce
--- /dev/null
+++ b/changelogs/unreleased/198391-add-user-plan-and-trial-status-to-api.yml
@@ -0,0 +1,5 @@
+---
+title: Expose `plan` and `trial` to `/users/:id` endpoint
+merge_request: 25151
+author:
+type: added
diff --git a/changelogs/unreleased/198604-monaco-snippets.yml b/changelogs/unreleased/198604-monaco-snippets.yml
new file mode 100644
index 00000000000..79f9584ec56
--- /dev/null
+++ b/changelogs/unreleased/198604-monaco-snippets.yml
@@ -0,0 +1,5 @@
+---
+title: Replaced ACE with Monaco editor for Snippets
+merge_request: 25465
+author:
+type: added
diff --git a/changelogs/unreleased/19880-sort-closed-issues-by-recently-closed.yml b/changelogs/unreleased/19880-sort-closed-issues-by-recently-closed.yml
new file mode 100644
index 00000000000..a015ef1b132
--- /dev/null
+++ b/changelogs/unreleased/19880-sort-closed-issues-by-recently-closed.yml
@@ -0,0 +1,5 @@
+---
+title: Sort closed issues on issue boards using time of closing
+merge_request: 23442
+author: briankabiro
+type: changed
diff --git a/changelogs/unreleased/199197-make-has-been-reverted-check-cheaper.yml b/changelogs/unreleased/199197-make-has-been-reverted-check-cheaper.yml
new file mode 100644
index 00000000000..0337a77eeb3
--- /dev/null
+++ b/changelogs/unreleased/199197-make-has-been-reverted-check-cheaper.yml
@@ -0,0 +1,5 @@
+---
+title: Improve performance of the "has this commit been reverted?" check
+merge_request: 26784
+author:
+type: performance
diff --git a/changelogs/unreleased/199220-hide-snippet-blob-search.yml b/changelogs/unreleased/199220-hide-snippet-blob-search.yml
new file mode 100644
index 00000000000..083d8edaeb3
--- /dev/null
+++ b/changelogs/unreleased/199220-hide-snippet-blob-search.yml
@@ -0,0 +1,5 @@
+---
+title: Remove and deprecate snippet content search
+merge_request: 26359
+author:
+type: removed
diff --git a/changelogs/unreleased/199220-snippet-index-desc.yml b/changelogs/unreleased/199220-snippet-index-desc.yml
new file mode 100644
index 00000000000..c1a1063eef5
--- /dev/null
+++ b/changelogs/unreleased/199220-snippet-index-desc.yml
@@ -0,0 +1,5 @@
+---
+title: Add trigram index on snippet description
+merge_request: 26341
+author:
+type: performance
diff --git a/changelogs/unreleased/199220-snippet-search.yml b/changelogs/unreleased/199220-snippet-search.yml
new file mode 100644
index 00000000000..d0b17fd3135
--- /dev/null
+++ b/changelogs/unreleased/199220-snippet-search.yml
@@ -0,0 +1,5 @@
+---
+title: Include snippet description as part of snippet title search (basic search).
+merge_request: 25961
+author:
+type: added
diff --git a/changelogs/unreleased/199370-move-deploy_keys-to-cicd.yml b/changelogs/unreleased/199370-move-deploy_keys-to-cicd.yml
new file mode 100644
index 00000000000..04d07bbc550
--- /dev/null
+++ b/changelogs/unreleased/199370-move-deploy_keys-to-cicd.yml
@@ -0,0 +1,5 @@
+---
+title: Moved Deploy Keys from Repository to CI/CD settings.
+merge_request: 25444
+author:
+type: changed
diff --git a/changelogs/unreleased/199400-send-email-notifications-for-generic-alerts.yml b/changelogs/unreleased/199400-send-email-notifications-for-generic-alerts.yml
new file mode 100644
index 00000000000..dc5e40f42ea
--- /dev/null
+++ b/changelogs/unreleased/199400-send-email-notifications-for-generic-alerts.yml
@@ -0,0 +1,5 @@
+---
+title: Send alert emails for generic incident alerts
+merge_request: 24414
+author:
+type: added
diff --git a/changelogs/unreleased/199422-maximum-size-for-gitlab-pages-says-to-set-it-to-0-for-unlimited-bu.yml b/changelogs/unreleased/199422-maximum-size-for-gitlab-pages-says-to-set-it-to-0-for-unlimited-bu.yml
new file mode 100644
index 00000000000..5643e11e424
--- /dev/null
+++ b/changelogs/unreleased/199422-maximum-size-for-gitlab-pages-says-to-set-it-to-0-for-unlimited-bu.yml
@@ -0,0 +1,6 @@
+---
+title: Allow 0 to be set for pages maximum size per project/group to indicate unlimited
+ size
+merge_request: 25677
+author:
+type: fixed
diff --git a/changelogs/unreleased/199438-fix-logs-encoding.yml b/changelogs/unreleased/199438-fix-logs-encoding.yml
new file mode 100644
index 00000000000..c8a803f43e6
--- /dev/null
+++ b/changelogs/unreleased/199438-fix-logs-encoding.yml
@@ -0,0 +1,5 @@
+---
+title: Fix 500 error caused by Kubernetes logs not being encoded in UTF-8
+merge_request: 25999
+author:
+type: fixed
diff --git a/changelogs/unreleased/199442-explore-projects.yml b/changelogs/unreleased/199442-explore-projects.yml
new file mode 100644
index 00000000000..4acfae2da04
--- /dev/null
+++ b/changelogs/unreleased/199442-explore-projects.yml
@@ -0,0 +1,5 @@
+---
+title: Handle Gitaly failure when fetching license
+merge_request: 24310
+author:
+type: fixed
diff --git a/changelogs/unreleased/199790-approval-settings-target-branch-api.yml b/changelogs/unreleased/199790-approval-settings-target-branch-api.yml
new file mode 100644
index 00000000000..b71e9323624
--- /dev/null
+++ b/changelogs/unreleased/199790-approval-settings-target-branch-api.yml
@@ -0,0 +1,5 @@
+---
+title: Filter rules by target_branch in approval_settings
+merge_request: 26439
+author:
+type: added
diff --git a/changelogs/unreleased/199908-use-only-the-first-line-of-the-commit-message-on-chat-service-noti.yml b/changelogs/unreleased/199908-use-only-the-first-line-of-the-commit-message-on-chat-service-noti.yml
new file mode 100644
index 00000000000..a0924d76513
--- /dev/null
+++ b/changelogs/unreleased/199908-use-only-the-first-line-of-the-commit-message-on-chat-service-noti.yml
@@ -0,0 +1,5 @@
+---
+title: Use only the first line of the commit message on chat service notification
+merge_request: 25224
+author: Takuya Noguchi
+type: changed
diff --git a/changelogs/unreleased/199998-container-expiration-policy-settings-hide-form-on-api-error-2.yml b/changelogs/unreleased/199998-container-expiration-policy-settings-hide-form-on-api-error-2.yml
new file mode 100644
index 00000000000..2a00007e589
--- /dev/null
+++ b/changelogs/unreleased/199998-container-expiration-policy-settings-hide-form-on-api-error-2.yml
@@ -0,0 +1,5 @@
+---
+title: Container expiration policy settings hide form on API error
+merge_request: 26303
+author:
+type: fixed
diff --git a/changelogs/unreleased/200107-avatar-content-type-does-not-match-file-extension.yml b/changelogs/unreleased/200107-avatar-content-type-does-not-match-file-extension.yml
new file mode 100644
index 00000000000..09ab5cd6e3b
--- /dev/null
+++ b/changelogs/unreleased/200107-avatar-content-type-does-not-match-file-extension.yml
@@ -0,0 +1,5 @@
+---
+title: Replace avatar and favicon upload type consistency validation with content whitelist validation
+merge_request: 25401
+author:
+type: changed
diff --git a/changelogs/unreleased/20083-conflict-between-project-s-permission-settings-description-and-actu.yml b/changelogs/unreleased/20083-conflict-between-project-s-permission-settings-description-and-actu.yml
new file mode 100644
index 00000000000..0fe1c7d6b9d
--- /dev/null
+++ b/changelogs/unreleased/20083-conflict-between-project-s-permission-settings-description-and-actu.yml
@@ -0,0 +1,5 @@
+---
+title: Update project's permission settings description to reflect actual permissions
+merge_request: 25523
+author:
+type: other
diff --git a/changelogs/unreleased/201427-issue-board-due-date-picker-unable-to-select-custom-year-or-month.yml b/changelogs/unreleased/201427-issue-board-due-date-picker-unable-to-select-custom-year-or-month.yml
new file mode 100644
index 00000000000..d831a674efe
--- /dev/null
+++ b/changelogs/unreleased/201427-issue-board-due-date-picker-unable-to-select-custom-year-or-month.yml
@@ -0,0 +1,6 @@
+---
+title: Added the multiSelect option to stop event propagation when clicking on the
+ dropdown
+merge_request: 24611
+author: Gwen_
+type: fixed
diff --git a/changelogs/unreleased/201771.yml b/changelogs/unreleased/201771.yml
new file mode 100644
index 00000000000..8677b3d853c
--- /dev/null
+++ b/changelogs/unreleased/201771.yml
@@ -0,0 +1,5 @@
+---
+title: Replace content_viewer_spec setTimeouts with semantic actions / events
+merge_request:
+author: Oregand
+type: other
diff --git a/changelogs/unreleased/201931-white-syntax-highlighting-theme-for-web-ide.yml b/changelogs/unreleased/201931-white-syntax-highlighting-theme-for-web-ide.yml
new file mode 100644
index 00000000000..ee11d6b374a
--- /dev/null
+++ b/changelogs/unreleased/201931-white-syntax-highlighting-theme-for-web-ide.yml
@@ -0,0 +1,5 @@
+---
+title: Fix White syntax highlighting theme in Monaco to closely match the Pygments theme.
+merge_request: 25966
+author:
+type: fixed
diff --git a/changelogs/unreleased/201999-define-formatter-y-axis.yml b/changelogs/unreleased/201999-define-formatter-y-axis.yml
new file mode 100644
index 00000000000..3a6039cd1bf
--- /dev/null
+++ b/changelogs/unreleased/201999-define-formatter-y-axis.yml
@@ -0,0 +1,5 @@
+---
+title: Add properties to the dashboard definition to customize y-axis format
+merge_request: 25785
+author:
+type: added
diff --git a/changelogs/unreleased/201999-formatter-column-chart.yml b/changelogs/unreleased/201999-formatter-column-chart.yml
new file mode 100644
index 00000000000..c5d8935f4f7
--- /dev/null
+++ b/changelogs/unreleased/201999-formatter-column-chart.yml
@@ -0,0 +1,5 @@
+---
+title: Use y-axis format configuration in column charts
+merge_request: 26356
+author:
+type: changed
diff --git a/changelogs/unreleased/202008-disable-drag-from-epic-tree-dropdown-button.yml b/changelogs/unreleased/202008-disable-drag-from-epic-tree-dropdown-button.yml
new file mode 100644
index 00000000000..a1f92122210
--- /dev/null
+++ b/changelogs/unreleased/202008-disable-drag-from-epic-tree-dropdown-button.yml
@@ -0,0 +1,5 @@
+---
+title: Disable draggable behavior on the epic tree chevron (collapse/expand) button
+merge_request: 25729
+author:
+type: changed
diff --git a/changelogs/unreleased/202094-enable-ff-by-default.yml b/changelogs/unreleased/202094-enable-ff-by-default.yml
new file mode 100644
index 00000000000..2b307d945c8
--- /dev/null
+++ b/changelogs/unreleased/202094-enable-ff-by-default.yml
@@ -0,0 +1,5 @@
+---
+title: Store first commit's authored_date for value stream calculation on merge
+merge_request: 26885
+author:
+type: changed
diff --git a/changelogs/unreleased/202233-migrating-fa-spinner-whithin-top-level-projects.yml b/changelogs/unreleased/202233-migrating-fa-spinner-whithin-top-level-projects.yml
new file mode 100644
index 00000000000..baf8183b18e
--- /dev/null
+++ b/changelogs/unreleased/202233-migrating-fa-spinner-whithin-top-level-projects.yml
@@ -0,0 +1,5 @@
+---
+title: Update loader for various project views
+merge_request: 25755
+author: Phellipe K Ribeiro
+type: other
diff --git a/changelogs/unreleased/202271-migrate-fa-spinner-for-notifications_dropdown-js.yml b/changelogs/unreleased/202271-migrate-fa-spinner-for-notifications_dropdown-js.yml
new file mode 100644
index 00000000000..9dbfcd33188
--- /dev/null
+++ b/changelogs/unreleased/202271-migrate-fa-spinner-for-notifications_dropdown-js.yml
@@ -0,0 +1,5 @@
+---
+title: migrate fa spinner for notification_dropdown.js
+merge_request: 25141
+author: minghuan
+type: other
diff --git a/changelogs/unreleased/202274-migrate-fa-spinner-to-spinner.yml b/changelogs/unreleased/202274-migrate-fa-spinner-to-spinner.yml
new file mode 100644
index 00000000000..a6a5f0c4a96
--- /dev/null
+++ b/changelogs/unreleased/202274-migrate-fa-spinner-to-spinner.yml
@@ -0,0 +1,5 @@
+---
+title: New loading spinner for attachemnt uploads via discussion boxes
+merge_request: 25057
+author: Philip Jonas
+type: changed
diff --git a/changelogs/unreleased/202426-editor-lite-theme-preference.yml b/changelogs/unreleased/202426-editor-lite-theme-preference.yml
new file mode 100644
index 00000000000..38e4004657f
--- /dev/null
+++ b/changelogs/unreleased/202426-editor-lite-theme-preference.yml
@@ -0,0 +1,5 @@
+---
+title: In single-file editor set syntax highlighting theme according to user's preference
+merge_request: 26606
+author:
+type: changed
diff --git a/changelogs/unreleased/204723-nomethoderror-undefined-method-term_agreements-for-nil-nilclass.yml b/changelogs/unreleased/204723-nomethoderror-undefined-method-term_agreements-for-nil-nilclass.yml
new file mode 100644
index 00000000000..db99c44945d
--- /dev/null
+++ b/changelogs/unreleased/204723-nomethoderror-undefined-method-term_agreements-for-nil-nilclass.yml
@@ -0,0 +1,5 @@
+---
+title: Require a logged in user to accept or decline a term
+merge_request: 24771
+author:
+type: fixed
diff --git a/changelogs/unreleased/204774-quick-actions-executed-in-multiline-inline-code.yml b/changelogs/unreleased/204774-quick-actions-executed-in-multiline-inline-code.yml
new file mode 100644
index 00000000000..d626875a47d
--- /dev/null
+++ b/changelogs/unreleased/204774-quick-actions-executed-in-multiline-inline-code.yml
@@ -0,0 +1,5 @@
+---
+title: Fix quick actions executing in multiline inline code when placed on its own line
+merge_request: 24933
+author: Pavlo Dudchenko
+type: fixed
diff --git a/changelogs/unreleased/204801-add-instance-to-services.yml b/changelogs/unreleased/204801-add-instance-to-services.yml
new file mode 100644
index 00000000000..458644d7dd6
--- /dev/null
+++ b/changelogs/unreleased/204801-add-instance-to-services.yml
@@ -0,0 +1,5 @@
+---
+title: Add instance column to services table
+merge_request: 25714
+author:
+type: other
diff --git a/changelogs/unreleased/204858-update-self-monitor-environments.yml b/changelogs/unreleased/204858-update-self-monitor-environments.yml
new file mode 100644
index 00000000000..9c8d3bbc37b
--- /dev/null
+++ b/changelogs/unreleased/204858-update-self-monitor-environments.yml
@@ -0,0 +1,5 @@
+---
+title: Add migration to create self monitoring project environment
+merge_request: 25289
+author:
+type: added
diff --git a/changelogs/unreleased/205184-change-omniauth-log-format-to-json.yml b/changelogs/unreleased/205184-change-omniauth-log-format-to-json.yml
new file mode 100644
index 00000000000..400640bf053
--- /dev/null
+++ b/changelogs/unreleased/205184-change-omniauth-log-format-to-json.yml
@@ -0,0 +1,5 @@
+---
+title: Change OmniAuth log format to JSON
+merge_request: 25086
+author:
+type: other
diff --git a/changelogs/unreleased/205399-add-tooltip-to-file-tree-state.yml b/changelogs/unreleased/205399-add-tooltip-to-file-tree-state.yml
new file mode 100644
index 00000000000..82bffbd80ab
--- /dev/null
+++ b/changelogs/unreleased/205399-add-tooltip-to-file-tree-state.yml
@@ -0,0 +1,5 @@
+---
+title: Add tooltip to modification icon in the file tree
+merge_request: 27158
+author:
+type: other
diff --git a/changelogs/unreleased/205435.yml b/changelogs/unreleased/205435.yml
new file mode 100644
index 00000000000..c004bdf35c3
--- /dev/null
+++ b/changelogs/unreleased/205435.yml
@@ -0,0 +1,5 @@
+---
+title: Clean up conditional `col-` classes in `nav_dropdown_button.vue`
+merge_request: 25312
+author:
+type: other
diff --git a/changelogs/unreleased/205596-empty-state-for-code-review-analytics.yml b/changelogs/unreleased/205596-empty-state-for-code-review-analytics.yml
new file mode 100644
index 00000000000..ef81a166231
--- /dev/null
+++ b/changelogs/unreleased/205596-empty-state-for-code-review-analytics.yml
@@ -0,0 +1,5 @@
+---
+title: Empty state for Code Review Analytics
+merge_request: 25793
+author:
+type: added
diff --git a/changelogs/unreleased/206899-move-system-metrics-chart-group-to-the-top-of-the-default-dashbord.yml b/changelogs/unreleased/206899-move-system-metrics-chart-group-to-the-top-of-the-default-dashbord.yml
new file mode 100644
index 00000000000..afba03b584a
--- /dev/null
+++ b/changelogs/unreleased/206899-move-system-metrics-chart-group-to-the-top-of-the-default-dashbord.yml
@@ -0,0 +1,5 @@
+---
+title: Put System Metrics chart group first in default dashboard
+merge_request: 26355
+author:
+type: other
diff --git a/changelogs/unreleased/207087-blocked-status-issue.yml b/changelogs/unreleased/207087-blocked-status-issue.yml
new file mode 100644
index 00000000000..e708a8a635e
--- /dev/null
+++ b/changelogs/unreleased/207087-blocked-status-issue.yml
@@ -0,0 +1,5 @@
+---
+title: Don't show issue as blocked on the issue board if blocking issue is closed
+merge_request: 25817
+author:
+type: fixed
diff --git a/changelogs/unreleased/207126-more-descriptive-error-messages-in-migration-helpers.yml b/changelogs/unreleased/207126-more-descriptive-error-messages-in-migration-helpers.yml
new file mode 100644
index 00000000000..36ed216a505
--- /dev/null
+++ b/changelogs/unreleased/207126-more-descriptive-error-messages-in-migration-helpers.yml
@@ -0,0 +1,5 @@
+---
+title: Improve error messages of failed migrations
+merge_request: 25457
+author:
+type: changed
diff --git a/changelogs/unreleased/207181-status-page-settings-backend.yml b/changelogs/unreleased/207181-status-page-settings-backend.yml
new file mode 100644
index 00000000000..69758009099
--- /dev/null
+++ b/changelogs/unreleased/207181-status-page-settings-backend.yml
@@ -0,0 +1,5 @@
+---
+title: Create table & setup operations endpoint for Status Page Settings
+merge_request: 25863
+author:
+type: added
diff --git a/changelogs/unreleased/207203-forward-deployment-ui.yml b/changelogs/unreleased/207203-forward-deployment-ui.yml
new file mode 100644
index 00000000000..bd2165beb30
--- /dev/null
+++ b/changelogs/unreleased/207203-forward-deployment-ui.yml
@@ -0,0 +1,5 @@
+---
+title: Added Drop older active deployments project setting
+merge_request: 25520
+author:
+type: added
diff --git a/changelogs/unreleased/207216-lfs-batch-upload-fix.yml b/changelogs/unreleased/207216-lfs-batch-upload-fix.yml
new file mode 100644
index 00000000000..32ddea25ddd
--- /dev/null
+++ b/changelogs/unreleased/207216-lfs-batch-upload-fix.yml
@@ -0,0 +1,5 @@
+---
+title: Mark existing LFS object for upload for forks
+merge_request: 26344
+author:
+type: fixed
diff --git a/changelogs/unreleased/207223-fix-self-monitoring-project.yml b/changelogs/unreleased/207223-fix-self-monitoring-project.yml
new file mode 100644
index 00000000000..3db9eb9977b
--- /dev/null
+++ b/changelogs/unreleased/207223-fix-self-monitoring-project.yml
@@ -0,0 +1,5 @@
+---
+title: Fix self monitoring project link
+merge_request: 25516
+author:
+type: fixed
diff --git a/changelogs/unreleased/207237-snippet-edit-description-vue.yml b/changelogs/unreleased/207237-snippet-edit-description-vue.yml
new file mode 100644
index 00000000000..cc97faf5158
--- /dev/null
+++ b/changelogs/unreleased/207237-snippet-edit-description-vue.yml
@@ -0,0 +1,5 @@
+---
+title: Added Blob Description Edit component in Vue
+merge_request: 26762
+author:
+type: added
diff --git a/changelogs/unreleased/207242-vsibility-level-vue.yml b/changelogs/unreleased/207242-vsibility-level-vue.yml
new file mode 100644
index 00000000000..1b73a41a55b
--- /dev/null
+++ b/changelogs/unreleased/207242-vsibility-level-vue.yml
@@ -0,0 +1,5 @@
+---
+title: Added Edit Visibility Vue compoenent for Snippet
+merge_request: 26799
+author:
+type: added
diff --git a/changelogs/unreleased/207249-prevent-editing-weight-to-scroll-to-the-top.yml b/changelogs/unreleased/207249-prevent-editing-weight-to-scroll-to-the-top.yml
new file mode 100644
index 00000000000..59382ad677e
--- /dev/null
+++ b/changelogs/unreleased/207249-prevent-editing-weight-to-scroll-to-the-top.yml
@@ -0,0 +1,5 @@
+---
+title: Prevent editing weight to scroll to the top.
+merge_request: 26613
+author: Gilang Gumilar
+type: fixed
diff --git a/changelogs/unreleased/207367-change-link-icons-on-security-configuration-page-to-follow-design-.yml b/changelogs/unreleased/207367-change-link-icons-on-security-configuration-page-to-follow-design-.yml
new file mode 100644
index 00000000000..3cb599d175e
--- /dev/null
+++ b/changelogs/unreleased/207367-change-link-icons-on-security-configuration-page-to-follow-design-.yml
@@ -0,0 +1,5 @@
+---
+title: Resolve Change link-icons on security configuration page to follow design system
+merge_request: 26340
+author:
+type: other
diff --git a/changelogs/unreleased/207390-pages-api-case-insensitive-domain-lookup.yml b/changelogs/unreleased/207390-pages-api-case-insensitive-domain-lookup.yml
new file mode 100644
index 00000000000..e2dd0f8b082
--- /dev/null
+++ b/changelogs/unreleased/207390-pages-api-case-insensitive-domain-lookup.yml
@@ -0,0 +1,5 @@
+---
+title: 'Add index on LOWER(domain) for pages_domains'
+merge_request: 25664
+author:
+type: other
diff --git a/changelogs/unreleased/207455-frontend-fix-epic-blform.yml b/changelogs/unreleased/207455-frontend-fix-epic-blform.yml
new file mode 100644
index 00000000000..3b4ff6dda36
--- /dev/null
+++ b/changelogs/unreleased/207455-frontend-fix-epic-blform.yml
@@ -0,0 +1,5 @@
+---
+title: Fix "Add an epic" form
+merge_request: 26003
+author:
+type: fixed
diff --git a/changelogs/unreleased/207462-scoped-labels-rendering-is-broken-in-todos.yml b/changelogs/unreleased/207462-scoped-labels-rendering-is-broken-in-todos.yml
new file mode 100644
index 00000000000..d365e4cff58
--- /dev/null
+++ b/changelogs/unreleased/207462-scoped-labels-rendering-is-broken-in-todos.yml
@@ -0,0 +1,5 @@
+---
+title: Fix scoped labels rendering in To-Do List
+merge_request: 26146
+author:
+type: fixed
diff --git a/changelogs/unreleased/207464-prevent-unauthorized-user-to-lock-an-issue-when-the-sidebar-is-col.yml b/changelogs/unreleased/207464-prevent-unauthorized-user-to-lock-an-issue-when-the-sidebar-is-col.yml
new file mode 100644
index 00000000000..895fd7f95de
--- /dev/null
+++ b/changelogs/unreleased/207464-prevent-unauthorized-user-to-lock-an-issue-when-the-sidebar-is-col.yml
@@ -0,0 +1,5 @@
+---
+title: Prevent unauthorized users to lock an issue from the collapsed sidebar.
+merge_request: 26324
+author: Gilang Gumilar
+type: fixed
diff --git a/changelogs/unreleased/207468-note-confidential-attribute.yml b/changelogs/unreleased/207468-note-confidential-attribute.yml
new file mode 100644
index 00000000000..f3161aeee74
--- /dev/null
+++ b/changelogs/unreleased/207468-note-confidential-attribute.yml
@@ -0,0 +1,5 @@
+---
+title: Add confidential attribute to notes table
+merge_request:
+author:
+type: other
diff --git a/changelogs/unreleased/207536-retried-jobs-are-not-able-to-find-knapsack-report.yml b/changelogs/unreleased/207536-retried-jobs-are-not-able-to-find-knapsack-report.yml
new file mode 100644
index 00000000000..7c40b21769c
--- /dev/null
+++ b/changelogs/unreleased/207536-retried-jobs-are-not-able-to-find-knapsack-report.yml
@@ -0,0 +1,5 @@
+---
+title: Keep needs association on the retried build
+merge_request: 25888
+author:
+type: fixed
diff --git a/changelogs/unreleased/207623-fix-code-search-pagination.yml b/changelogs/unreleased/207623-fix-code-search-pagination.yml
new file mode 100644
index 00000000000..7cb90f0bef4
--- /dev/null
+++ b/changelogs/unreleased/207623-fix-code-search-pagination.yml
@@ -0,0 +1,5 @@
+---
+title: Fix code search pagination on a custom branch
+merge_request: 25984
+author:
+type: fixed
diff --git a/changelogs/unreleased/207808-geo-bug-filedownloaddispatchworker-may-sometimes-excessively-resyn.yml b/changelogs/unreleased/207808-geo-bug-filedownloaddispatchworker-may-sometimes-excessively-resyn.yml
new file mode 100644
index 00000000000..37e73e7b3cc
--- /dev/null
+++ b/changelogs/unreleased/207808-geo-bug-filedownloaddispatchworker-may-sometimes-excessively-resyn.yml
@@ -0,0 +1,5 @@
+---
+title: 'Use uncached SQL queries for Geo long-running workers'
+merge_request: 26187
+author:
+type: fixed
diff --git a/changelogs/unreleased/207927-validate-actor-user-against-codeowners.yml b/changelogs/unreleased/207927-validate-actor-user-against-codeowners.yml
new file mode 100644
index 00000000000..cbdfaa48c44
--- /dev/null
+++ b/changelogs/unreleased/207927-validate-actor-user-against-codeowners.yml
@@ -0,0 +1,5 @@
+---
+title: Validate actor against CODEOWNERS entries
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/207962-deploy-ecs.yml b/changelogs/unreleased/207962-deploy-ecs.yml
new file mode 100644
index 00000000000..d7bc7f5ed01
--- /dev/null
+++ b/changelogs/unreleased/207962-deploy-ecs.yml
@@ -0,0 +1,5 @@
+---
+title: Add CI template to deploy to ECS
+merge_request: 26371
+author:
+type: added
diff --git a/changelogs/unreleased/207976-stop-markdown-caching-of-non-markdown-snippet-content.yml b/changelogs/unreleased/207976-stop-markdown-caching-of-non-markdown-snippet-content.yml
new file mode 100644
index 00000000000..dd249140092
--- /dev/null
+++ b/changelogs/unreleased/207976-stop-markdown-caching-of-non-markdown-snippet-content.yml
@@ -0,0 +1,5 @@
+---
+title: Fix Snippet content incorrectly caching
+merge_request: 25985
+author:
+type: fixed
diff --git a/changelogs/unreleased/208128-add-external-pull-request-to-existing-object-relations.yml b/changelogs/unreleased/208128-add-external-pull-request-to-existing-object-relations.yml
new file mode 100644
index 00000000000..87178569f95
--- /dev/null
+++ b/changelogs/unreleased/208128-add-external-pull-request-to-existing-object-relations.yml
@@ -0,0 +1,5 @@
+---
+title: Fix ImportFailure when restore ci_pipelines:external_pull_request relation
+merge_request: 26041
+author:
+type: fixed
diff --git a/changelogs/unreleased/208151-code-review-analytics-shows-no-data-for-mrs-in-review-for-less-tha.yml b/changelogs/unreleased/208151-code-review-analytics-shows-no-data-for-mrs-in-review-for-less-tha.yml
new file mode 100644
index 00000000000..a891fe33277
--- /dev/null
+++ b/changelogs/unreleased/208151-code-review-analytics-shows-no-data-for-mrs-in-review-for-less-tha.yml
@@ -0,0 +1,5 @@
+---
+title: 'Code Review Analytics: Fix review time display'
+merge_request: 26057
+author:
+type: fixed
diff --git a/changelogs/unreleased/208153-add-anchor-to-related-issues-and-related-merge-requests.yml b/changelogs/unreleased/208153-add-anchor-to-related-issues-and-related-merge-requests.yml
new file mode 100644
index 00000000000..f4f13f58cee
--- /dev/null
+++ b/changelogs/unreleased/208153-add-anchor-to-related-issues-and-related-merge-requests.yml
@@ -0,0 +1,5 @@
+---
+title: Add anchor tags to related issues and related merge requests.
+merge_request: 26756
+author: Gilang Gumilar
+type: added
diff --git a/changelogs/unreleased/208167-bigfix-unable-to-fork-project-to-the-same-namespace.yml b/changelogs/unreleased/208167-bigfix-unable-to-fork-project-to-the-same-namespace.yml
new file mode 100644
index 00000000000..8685c8ca1f3
--- /dev/null
+++ b/changelogs/unreleased/208167-bigfix-unable-to-fork-project-to-the-same-namespace.yml
@@ -0,0 +1,5 @@
+---
+title: Allow to fork to the same namespace and different path via API call
+merge_request: 26062
+author:
+type: fixed
diff --git a/changelogs/unreleased/208242-scoped-label-rendering-in-emails-is-broken.yml b/changelogs/unreleased/208242-scoped-label-rendering-in-emails-is-broken.yml
new file mode 100644
index 00000000000..a2bce06c890
--- /dev/null
+++ b/changelogs/unreleased/208242-scoped-label-rendering-in-emails-is-broken.yml
@@ -0,0 +1,5 @@
+---
+title: Fix scoped labels rendering in emails
+merge_request: 26347
+author:
+type: fixed
diff --git a/changelogs/unreleased/208258-update-documentation-and-common_metrics-yml-to-match-new-y_axis-pr.yml b/changelogs/unreleased/208258-update-documentation-and-common_metrics-yml-to-match-new-y_axis-pr.yml
new file mode 100644
index 00000000000..43f28b85f15
--- /dev/null
+++ b/changelogs/unreleased/208258-update-documentation-and-common_metrics-yml-to-match-new-y_axis-pr.yml
@@ -0,0 +1,5 @@
+---
+title: Update charts documentation and common_metrics.yml to enable data formatting
+merge_request: 26048
+author:
+type: added
diff --git a/changelogs/unreleased/208403-attachment-file-not-found.yml b/changelogs/unreleased/208403-attachment-file-not-found.yml
new file mode 100644
index 00000000000..362a44574a2
--- /dev/null
+++ b/changelogs/unreleased/208403-attachment-file-not-found.yml
@@ -0,0 +1,5 @@
+---
+title: Ensure valid mount point is used by attachments on notes
+merge_request: 26849
+author:
+type: fixed
diff --git a/changelogs/unreleased/208453-add-title-to-analytics-sidebar-menus.yml b/changelogs/unreleased/208453-add-title-to-analytics-sidebar-menus.yml
new file mode 100644
index 00000000000..81367aece8e
--- /dev/null
+++ b/changelogs/unreleased/208453-add-title-to-analytics-sidebar-menus.yml
@@ -0,0 +1,5 @@
+---
+title: Add title to Analytics sidebar menus
+merge_request: 26265
+author:
+type: added
diff --git a/changelogs/unreleased/208455-remove-analytics-suffixes-from-analytics-sidebar-menu-items.yml b/changelogs/unreleased/208455-remove-analytics-suffixes-from-analytics-sidebar-menu-items.yml
new file mode 100644
index 00000000000..836eea0d23b
--- /dev/null
+++ b/changelogs/unreleased/208455-remove-analytics-suffixes-from-analytics-sidebar-menu-items.yml
@@ -0,0 +1,5 @@
+---
+title: Remove "Analytics" suffix from the sidebar menu items
+merge_request: 26415
+author:
+type: removed
diff --git a/changelogs/unreleased/208471-actionview-template-error-undefined-method-concat-for-nil-nilclass.yml b/changelogs/unreleased/208471-actionview-template-error-undefined-method-concat-for-nil-nilclass.yml
new file mode 100644
index 00000000000..3a4eb8596d8
--- /dev/null
+++ b/changelogs/unreleased/208471-actionview-template-error-undefined-method-concat-for-nil-nilclass.yml
@@ -0,0 +1,5 @@
+---
+title: Fix an error with concat method
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/208473-fix-pipeline-tab-url.yml b/changelogs/unreleased/208473-fix-pipeline-tab-url.yml
new file mode 100644
index 00000000000..70451260a6f
--- /dev/null
+++ b/changelogs/unreleased/208473-fix-pipeline-tab-url.yml
@@ -0,0 +1,5 @@
+---
+title: Generate proper link for Pipeline tab
+merge_request: 26193
+author:
+type: fixed
diff --git a/changelogs/unreleased/208479-requests-for-svgs-returning-404-in-issues-analytics-feature.yml b/changelogs/unreleased/208479-requests-for-svgs-returning-404-in-issues-analytics-feature.yml
new file mode 100644
index 00000000000..913569985b8
--- /dev/null
+++ b/changelogs/unreleased/208479-requests-for-svgs-returning-404-in-issues-analytics-feature.yml
@@ -0,0 +1,5 @@
+---
+title: 'Issue Analytics: Fix svg illustration path for empty state'
+merge_request: 26219
+author:
+type: fixed
diff --git a/changelogs/unreleased/208524-error-in-custom-dashboard-yml-file-breaks-the-dashboards-dropdown.yml b/changelogs/unreleased/208524-error-in-custom-dashboard-yml-file-breaks-the-dashboards-dropdown.yml
new file mode 100644
index 00000000000..971765fc88f
--- /dev/null
+++ b/changelogs/unreleased/208524-error-in-custom-dashboard-yml-file-breaks-the-dashboards-dropdown.yml
@@ -0,0 +1,5 @@
+---
+title: Fix dashboards dropdown if custom dashboard is broken
+merge_request: 26228
+author:
+type: fixed
diff --git a/changelogs/unreleased/208674-use-wh-acceleration-for-ui-project-import.yml b/changelogs/unreleased/208674-use-wh-acceleration-for-ui-project-import.yml
new file mode 100644
index 00000000000..118e7efa2fb
--- /dev/null
+++ b/changelogs/unreleased/208674-use-wh-acceleration-for-ui-project-import.yml
@@ -0,0 +1,5 @@
+---
+title: Use Workhorse acceleration for Project Import file upload via UI
+merge_request: 26278
+author:
+type: performance
diff --git a/changelogs/unreleased/208675-add-package_name-as-option-to-packages-api.yml b/changelogs/unreleased/208675-add-package_name-as-option-to-packages-api.yml
new file mode 100644
index 00000000000..3f7a0e3f62d
--- /dev/null
+++ b/changelogs/unreleased/208675-add-package_name-as-option-to-packages-api.yml
@@ -0,0 +1,5 @@
+---
+title: Added package_name as filter parameter to packages API
+merge_request: 26291
+author:
+type: added
diff --git a/changelogs/unreleased/208678-packages-project-and-group-api-will-return-processing-nuget-packag.yml b/changelogs/unreleased/208678-packages-project-and-group-api-will-return-processing-nuget-packag.yml
new file mode 100644
index 00000000000..0596f0e4b85
--- /dev/null
+++ b/changelogs/unreleased/208678-packages-project-and-group-api-will-return-processing-nuget-packag.yml
@@ -0,0 +1,5 @@
+---
+title: Fixed bug where processing NuGet packages are returned from the Packages API
+merge_request: 26270
+author:
+type: fixed
diff --git a/changelogs/unreleased/208788-fix-avg_cycle_analytics-giving-an-uncaught-error.yml b/changelogs/unreleased/208788-fix-avg_cycle_analytics-giving-an-uncaught-error.yml
new file mode 100644
index 00000000000..60d88989880
--- /dev/null
+++ b/changelogs/unreleased/208788-fix-avg_cycle_analytics-giving-an-uncaught-error.yml
@@ -0,0 +1,5 @@
+---
+title: Fix avg_cycle_analytics uncaught error and optimize query
+merge_request: 26381
+author:
+type: fixed
diff --git a/changelogs/unreleased/208798-replace-instances-of-the-issue-duplicate-icon-with-the-duplicate-i.yml b/changelogs/unreleased/208798-replace-instances-of-the-issue-duplicate-icon-with-the-duplicate-i.yml
new file mode 100644
index 00000000000..9cbee38da7d
--- /dev/null
+++ b/changelogs/unreleased/208798-replace-instances-of-the-issue-duplicate-icon-with-the-duplicate-i.yml
@@ -0,0 +1,5 @@
+---
+title: Replace issue-duplicate icon with duplicate icon
+merge_request:
+author:
+type: other
diff --git a/changelogs/unreleased/208827-replace-issue-external-icon-with-external-link.yml b/changelogs/unreleased/208827-replace-issue-external-icon-with-external-link.yml
new file mode 100644
index 00000000000..a929facdbbc
--- /dev/null
+++ b/changelogs/unreleased/208827-replace-issue-external-icon-with-external-link.yml
@@ -0,0 +1,5 @@
+---
+title: Replace issue-external icon with external-link
+merge_request: 208827
+author:
+type: other
diff --git a/changelogs/unreleased/208830-conan-package-reference-fix.yml b/changelogs/unreleased/208830-conan-package-reference-fix.yml
new file mode 100644
index 00000000000..9318b5d71f1
--- /dev/null
+++ b/changelogs/unreleased/208830-conan-package-reference-fix.yml
@@ -0,0 +1,5 @@
+---
+title: Fix package file finder for conan packages with a conan_package_reference filter
+merge_request: 26240
+author:
+type: fixed
diff --git a/changelogs/unreleased/208830-download-urls-conan-reference.yml b/changelogs/unreleased/208830-download-urls-conan-reference.yml
new file mode 100644
index 00000000000..6b9732d514c
--- /dev/null
+++ b/changelogs/unreleased/208830-download-urls-conan-reference.yml
@@ -0,0 +1,6 @@
+---
+title: Fix Conan package download_urls and snapshot to return files based on requested
+ conan_package_reference
+merge_request: 27250
+author:
+type: fixed
diff --git a/changelogs/unreleased/208885-optimize-ci_pipeline-counters-related-to-the-ci-pipeline.yml b/changelogs/unreleased/208885-optimize-ci_pipeline-counters-related-to-the-ci-pipeline.yml
new file mode 100644
index 00000000000..b08b4dbbd27
--- /dev/null
+++ b/changelogs/unreleased/208885-optimize-ci_pipeline-counters-related-to-the-ci-pipeline.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize ci_pipelines counters in usage data
+merge_request: 26774
+author:
+type: performance
diff --git a/changelogs/unreleased/208886-optimize-deployment-counters-related-to-the-deployment.yml b/changelogs/unreleased/208886-optimize-deployment-counters-related-to-the-deployment.yml
new file mode 100644
index 00000000000..d8e0eca9075
--- /dev/null
+++ b/changelogs/unreleased/208886-optimize-deployment-counters-related-to-the-deployment.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize Deployment related counters
+merge_request: 26757
+author:
+type: performance
diff --git a/changelogs/unreleased/208887-optimize-project-counters-mirrored-pipelines.yml b/changelogs/unreleased/208887-optimize-project-counters-mirrored-pipelines.yml
new file mode 100644
index 00000000000..7662291ea64
--- /dev/null
+++ b/changelogs/unreleased/208887-optimize-project-counters-mirrored-pipelines.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize Project counters with pipelines enabled counter
+merge_request: 26802
+author:
+type: performance
diff --git a/changelogs/unreleased/208887-optimize-project-counters-projects_with_repositories_enabled.yml b/changelogs/unreleased/208887-optimize-project-counters-projects_with_repositories_enabled.yml
new file mode 100644
index 00000000000..e2014464d8c
--- /dev/null
+++ b/changelogs/unreleased/208887-optimize-project-counters-projects_with_repositories_enabled.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize Project counters with respository enabled counter
+merge_request: 26698
+author:
+type: performance
diff --git a/changelogs/unreleased/208887-optimize-project-counters-service-desk.yml b/changelogs/unreleased/208887-optimize-project-counters-service-desk.yml
new file mode 100644
index 00000000000..5c422ecb959
--- /dev/null
+++ b/changelogs/unreleased/208887-optimize-project-counters-service-desk.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize Project related count service desk enabled
+merge_request: 27115
+author:
+type: performance
diff --git a/changelogs/unreleased/208887-optimize-project-counters-with-slack-service.yml b/changelogs/unreleased/208887-optimize-project-counters-with-slack-service.yml
new file mode 100644
index 00000000000..e63461764b0
--- /dev/null
+++ b/changelogs/unreleased/208887-optimize-project-counters-with-slack-service.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize Project related count with slack service
+merge_request: 26686
+author:
+type: performance
diff --git a/changelogs/unreleased/208889-optimize-event-counters.yml b/changelogs/unreleased/208889-optimize-event-counters.yml
new file mode 100644
index 00000000000..db97c395aff
--- /dev/null
+++ b/changelogs/unreleased/208889-optimize-event-counters.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize event counters query performance in usage data
+merge_request: 26444
+author:
+type: performance
diff --git a/changelogs/unreleased/208890-optimize-notes-counters.yml b/changelogs/unreleased/208890-optimize-notes-counters.yml
new file mode 100644
index 00000000000..ac1c1359ddf
--- /dev/null
+++ b/changelogs/unreleased/208890-optimize-notes-counters.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize notes counters in usage data
+merge_request: 26871
+author:
+type: performance
diff --git a/changelogs/unreleased/208891-optimize-todos-counters.yml b/changelogs/unreleased/208891-optimize-todos-counters.yml
new file mode 100644
index 00000000000..85f1a123f78
--- /dev/null
+++ b/changelogs/unreleased/208891-optimize-todos-counters.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize todos counters in usage data
+merge_request: 26442
+author:
+type: performance
diff --git a/changelogs/unreleased/208914-crud-for-instance-level-integrations.yml b/changelogs/unreleased/208914-crud-for-instance-level-integrations.yml
new file mode 100644
index 00000000000..94311b14adb
--- /dev/null
+++ b/changelogs/unreleased/208914-crud-for-instance-level-integrations.yml
@@ -0,0 +1,5 @@
+---
+title: Add CRUD for Instance-Level Integrations
+merge_request: 26454
+author:
+type: added
diff --git a/changelogs/unreleased/208923-enable-batch-counting-for-some-individual-queries.yml b/changelogs/unreleased/208923-enable-batch-counting-for-some-individual-queries.yml
new file mode 100644
index 00000000000..891ec3c0b58
--- /dev/null
+++ b/changelogs/unreleased/208923-enable-batch-counting-for-some-individual-queries.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize clusters counters query performance in usage data
+merge_request: 26887
+author:
+type: performance
diff --git a/changelogs/unreleased/208936.yml b/changelogs/unreleased/208936.yml
new file mode 100644
index 00000000000..ce561fd6027
--- /dev/null
+++ b/changelogs/unreleased/208936.yml
@@ -0,0 +1,5 @@
+---
+title: update table layout for error tracking list on medium view ports
+merge_request: 26479
+author:
+type: other
diff --git a/changelogs/unreleased/209002-change-evidence-sha-clipboard-button-hover-text.yml b/changelogs/unreleased/209002-change-evidence-sha-clipboard-button-hover-text.yml
new file mode 100644
index 00000000000..42cda8ff0c2
--- /dev/null
+++ b/changelogs/unreleased/209002-change-evidence-sha-clipboard-button-hover-text.yml
@@ -0,0 +1,5 @@
+---
+title: Fix evidence SHA clipboard hover text.
+merge_request: 26608
+author: Gilang Gumilar
+type: fixed
diff --git a/changelogs/unreleased/209207-spinner-appears-to-be-broken.yml b/changelogs/unreleased/209207-spinner-appears-to-be-broken.yml
new file mode 100644
index 00000000000..0b1ae5d1838
--- /dev/null
+++ b/changelogs/unreleased/209207-spinner-appears-to-be-broken.yml
@@ -0,0 +1,5 @@
+---
+title: Fix spinner in Create MR dropdown
+merge_request: 26679
+author:
+type: fixed
diff --git a/changelogs/unreleased/209277-introduce-a-feature-flag-for-resolve-notifications-for-when-pipeli.yml b/changelogs/unreleased/209277-introduce-a-feature-flag-for-resolve-notifications-for-when-pipeli.yml
new file mode 100644
index 00000000000..8ae56e59371
--- /dev/null
+++ b/changelogs/unreleased/209277-introduce-a-feature-flag-for-resolve-notifications-for-when-pipeli.yml
@@ -0,0 +1,5 @@
+---
+title: Introduce a feature flag for Notifications for when pipelines are fixed
+merge_request: 26682
+author: Jacopo Beschi @jacopo-beschi
+type: changed
diff --git a/changelogs/unreleased/209761-fix-wiki-directories-with-hyphens.yml b/changelogs/unreleased/209761-fix-wiki-directories-with-hyphens.yml
new file mode 100644
index 00000000000..3a18143ba7a
--- /dev/null
+++ b/changelogs/unreleased/209761-fix-wiki-directories-with-hyphens.yml
@@ -0,0 +1,5 @@
+---
+title: Fix WikiPage#title_changed for paths with spaces
+merge_request: 27087
+author:
+type: fixed
diff --git a/changelogs/unreleased/209783-follow-up-from-resolve-notifications-for-when-pipelines-are-fixed.yml b/changelogs/unreleased/209783-follow-up-from-resolve-notifications-for-when-pipelines-are-fixed.yml
new file mode 100644
index 00000000000..dcf162adc85
--- /dev/null
+++ b/changelogs/unreleased/209783-follow-up-from-resolve-notifications-for-when-pipelines-are-fixed.yml
@@ -0,0 +1,5 @@
+---
+title: Correctly send notification on pipeline retry
+merge_request: 26803
+author: Jacopo Beschi @jacopo-beschi
+type: fixed
diff --git a/changelogs/unreleased/210007-optimize-services_usage-counters.yml b/changelogs/unreleased/210007-optimize-services_usage-counters.yml
new file mode 100644
index 00000000000..8b2dc2764ac
--- /dev/null
+++ b/changelogs/unreleased/210007-optimize-services_usage-counters.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize services usage counters using batch counters
+merge_request: 26973
+author:
+type: performance
diff --git a/changelogs/unreleased/210008-the-same-chart-appears-twice-for-different-embeds.yml b/changelogs/unreleased/210008-the-same-chart-appears-twice-for-different-embeds.yml
new file mode 100644
index 00000000000..20b74415c22
--- /dev/null
+++ b/changelogs/unreleased/210008-the-same-chart-appears-twice-for-different-embeds.yml
@@ -0,0 +1,5 @@
+---
+title: Fix embeds so that a chart appears only once
+merge_request: 26997
+author:
+type: fixed
diff --git a/changelogs/unreleased/210051-optimize-or-remove-ldap_users-counter.yml b/changelogs/unreleased/210051-optimize-or-remove-ldap_users-counter.yml
new file mode 100644
index 00000000000..5f9a93c8d8d
--- /dev/null
+++ b/changelogs/unreleased/210051-optimize-or-remove-ldap_users-counter.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize members counters query performance in usage data
+merge_request: 27197
+author:
+type: performance
diff --git a/changelogs/unreleased/210332-approximate-counters-are-not-working-on-gitlab-com.yml b/changelogs/unreleased/210332-approximate-counters-are-not-working-on-gitlab-com.yml
new file mode 100644
index 00000000000..e7b614173ec
--- /dev/null
+++ b/changelogs/unreleased/210332-approximate-counters-are-not-working-on-gitlab-com.yml
@@ -0,0 +1,5 @@
+---
+title: Use batch counters instead of approximate counters in usage data
+merge_request: 27218
+author:
+type: performance
diff --git a/changelogs/unreleased/210335-fix-handling-of-numeric-emoji-names.yml b/changelogs/unreleased/210335-fix-handling-of-numeric-emoji-names.yml
new file mode 100644
index 00000000000..5686fb2a7a4
--- /dev/null
+++ b/changelogs/unreleased/210335-fix-handling-of-numeric-emoji-names.yml
@@ -0,0 +1,5 @@
+---
+title: Fix backend validation of numeric emoji names
+merge_request: 27101
+author:
+type: fixed
diff --git a/changelogs/unreleased/21765-group-token-refactor.yml b/changelogs/unreleased/21765-group-token-refactor.yml
new file mode 100644
index 00000000000..3a93d957c86
--- /dev/null
+++ b/changelogs/unreleased/21765-group-token-refactor.yml
@@ -0,0 +1,5 @@
+---
+title: Addition of the Group Deploy Token interface
+merge_request: 24102
+author:
+type: added
diff --git a/changelogs/unreleased/21811-group-create-deploy-tokens.yml b/changelogs/unreleased/21811-group-create-deploy-tokens.yml
new file mode 100644
index 00000000000..74400d73166
--- /dev/null
+++ b/changelogs/unreleased/21811-group-create-deploy-tokens.yml
@@ -0,0 +1,5 @@
+---
+title: Add api endpoint for creating group deploy tokens
+merge_request: 25629
+author:
+type: added
diff --git a/changelogs/unreleased/21811-group-delete-deploy-token.yml b/changelogs/unreleased/21811-group-delete-deploy-token.yml
new file mode 100644
index 00000000000..9f359c7edcc
--- /dev/null
+++ b/changelogs/unreleased/21811-group-delete-deploy-token.yml
@@ -0,0 +1,5 @@
+---
+title: Add API endpoint for deleting group deploy tokens
+merge_request: 25222
+author:
+type: added
diff --git a/changelogs/unreleased/21811-group-list-deploy-tokens.yml b/changelogs/unreleased/21811-group-list-deploy-tokens.yml
new file mode 100644
index 00000000000..c2cdc65fa53
--- /dev/null
+++ b/changelogs/unreleased/21811-group-list-deploy-tokens.yml
@@ -0,0 +1,5 @@
+---
+title: Add api endpoint for listing deploy tokens for a group
+merge_request: 25219
+author:
+type: added
diff --git a/changelogs/unreleased/21811-instance-deploy-tokens.yml b/changelogs/unreleased/21811-instance-deploy-tokens.yml
new file mode 100644
index 00000000000..b406bdacc76
--- /dev/null
+++ b/changelogs/unreleased/21811-instance-deploy-tokens.yml
@@ -0,0 +1,5 @@
+---
+title: Add deploy tokens instance API endpoint
+merge_request: 25066
+author:
+type: added
diff --git a/changelogs/unreleased/21811-project-create-deploy-tokens.yml b/changelogs/unreleased/21811-project-create-deploy-tokens.yml
new file mode 100644
index 00000000000..6194efc3838
--- /dev/null
+++ b/changelogs/unreleased/21811-project-create-deploy-tokens.yml
@@ -0,0 +1,5 @@
+---
+title: Add api endpoint to create deploy tokens
+merge_request: 25270
+author:
+type: added
diff --git a/changelogs/unreleased/21811-project-delete-deploy-token.yml b/changelogs/unreleased/21811-project-delete-deploy-token.yml
new file mode 100644
index 00000000000..18a41be8896
--- /dev/null
+++ b/changelogs/unreleased/21811-project-delete-deploy-token.yml
@@ -0,0 +1,5 @@
+---
+title: Add API endpoint for deleting project deploy tokens
+merge_request: 25220
+author:
+type: added
diff --git a/changelogs/unreleased/21811-project-list-deploy-tokens.yml b/changelogs/unreleased/21811-project-list-deploy-tokens.yml
new file mode 100644
index 00000000000..ead28fe7595
--- /dev/null
+++ b/changelogs/unreleased/21811-project-list-deploy-tokens.yml
@@ -0,0 +1,5 @@
+---
+title: Add endpoint for listing all deploy tokens for a project
+merge_request: 25186
+author:
+type: added
diff --git a/changelogs/unreleased/22103-make-code-tags-consistent-in-discussions.yml b/changelogs/unreleased/22103-make-code-tags-consistent-in-discussions.yml
new file mode 100644
index 00000000000..42e02621378
--- /dev/null
+++ b/changelogs/unreleased/22103-make-code-tags-consistent-in-discussions.yml
@@ -0,0 +1,5 @@
+---
+title: Deemphasized styles for inline code blocks
+merge_request:
+author:
+type: changed
diff --git a/changelogs/unreleased/24072-user-profile-add-job-title.yml b/changelogs/unreleased/24072-user-profile-add-job-title.yml
new file mode 100644
index 00000000000..0e77bd197fb
--- /dev/null
+++ b/changelogs/unreleased/24072-user-profile-add-job-title.yml
@@ -0,0 +1,5 @@
+---
+title: Add "Job Title" field in user settings and display on profile
+merge_request: 25155
+author:
+type: added
diff --git a/changelogs/unreleased/24083-tableflip-quick-action-is-interpreted-even-if-inside-code-block.yml b/changelogs/unreleased/24083-tableflip-quick-action-is-interpreted-even-if-inside-code-block.yml
new file mode 100644
index 00000000000..f63390550b2
--- /dev/null
+++ b/changelogs/unreleased/24083-tableflip-quick-action-is-interpreted-even-if-inside-code-block.yml
@@ -0,0 +1,5 @@
+---
+title: 'Fix: tableflip quick action is interpreted even if inside code block'
+merge_request:
+author: Pavlo Dudchenko
+type: fixed
diff --git a/changelogs/unreleased/24309-notifications-for-when-pipelines-are-fixed.yml b/changelogs/unreleased/24309-notifications-for-when-pipelines-are-fixed.yml
new file mode 100644
index 00000000000..fc2d1ecbef1
--- /dev/null
+++ b/changelogs/unreleased/24309-notifications-for-when-pipelines-are-fixed.yml
@@ -0,0 +1,5 @@
+---
+title: Notifications for when pipelines are fixed
+merge_request: 16951
+author: Jacopo Beschi @jacopo-beschi
+type: added
diff --git a/changelogs/unreleased/25095-remove-gitlab-shell-indirection-for-authorized-keys.yml b/changelogs/unreleased/25095-remove-gitlab-shell-indirection-for-authorized-keys.yml
new file mode 100644
index 00000000000..4c2c74d4bf1
--- /dev/null
+++ b/changelogs/unreleased/25095-remove-gitlab-shell-indirection-for-authorized-keys.yml
@@ -0,0 +1,5 @@
+---
+title: Move authorized_keys operations into their own Sidekiq queue
+merge_request: 26913
+author:
+type: changed
diff --git a/changelogs/unreleased/25283-add-masked-param-group-vars-api.yml b/changelogs/unreleased/25283-add-masked-param-group-vars-api.yml
new file mode 100644
index 00000000000..01abb31dd85
--- /dev/null
+++ b/changelogs/unreleased/25283-add-masked-param-group-vars-api.yml
@@ -0,0 +1,5 @@
+---
+title: "Allow to create masked variable from group variables API"
+merge_request: 25283
+author: Emmanuel CARRE
+type: added
diff --git a/changelogs/unreleased/25334-update-rouge.yml b/changelogs/unreleased/25334-update-rouge.yml
new file mode 100644
index 00000000000..cfe688ab1ee
--- /dev/null
+++ b/changelogs/unreleased/25334-update-rouge.yml
@@ -0,0 +1,5 @@
+---
+title: Update rouge to v3.16.0
+merge_request: 25334
+author: Konrad Borowski
+type: other
diff --git a/changelogs/unreleased/25351-add-buttons.yml b/changelogs/unreleased/25351-add-buttons.yml
new file mode 100644
index 00000000000..3a324b41c45
--- /dev/null
+++ b/changelogs/unreleased/25351-add-buttons.yml
@@ -0,0 +1,5 @@
+---
+title: Add deploy and re-deploy buttons to deployments
+merge_request: 25427
+author:
+type: added
diff --git a/changelogs/unreleased/25550-there-is-a-drag-and-drop-bug-in-boards.yml b/changelogs/unreleased/25550-there-is-a-drag-and-drop-bug-in-boards.yml
new file mode 100644
index 00000000000..e4777065f07
--- /dev/null
+++ b/changelogs/unreleased/25550-there-is-a-drag-and-drop-bug-in-boards.yml
@@ -0,0 +1,5 @@
+---
+title: Resolves the disappearance of a ticket when it was moved from the closed list.
+merge_request:
+author: Gwen_
+type: fixed
diff --git a/changelogs/unreleased/25744-optional-custom-icon-in-omniauth-login-label.yml b/changelogs/unreleased/25744-optional-custom-icon-in-omniauth-login-label.yml
new file mode 100644
index 00000000000..c82c5e0407c
--- /dev/null
+++ b/changelogs/unreleased/25744-optional-custom-icon-in-omniauth-login-label.yml
@@ -0,0 +1,5 @@
+---
+title: Optional custom icon in the OmniAuth login labels
+merge_request: 25744
+author: Tobias Wawryniuk, Luca Leonardo Scorcia
+type: added
diff --git a/changelogs/unreleased/25838-include-full-upload-url-in-api-response.yml b/changelogs/unreleased/25838-include-full-upload-url-in-api-response.yml
new file mode 100644
index 00000000000..879273efefa
--- /dev/null
+++ b/changelogs/unreleased/25838-include-full-upload-url-in-api-response.yml
@@ -0,0 +1,5 @@
+---
+title: Include full path to an upload in api response
+merge_request: 23500
+author: briankabiro
+type: other
diff --git a/changelogs/unreleased/25995-default-relative-links-to-blobs.yml b/changelogs/unreleased/25995-default-relative-links-to-blobs.yml
new file mode 100644
index 00000000000..dea0ac22ee3
--- /dev/null
+++ b/changelogs/unreleased/25995-default-relative-links-to-blobs.yml
@@ -0,0 +1,5 @@
+---
+title: Default to generating blob links for missing paths
+merge_request: 26817
+author:
+type: fixed
diff --git a/changelogs/unreleased/26111-fix-github-gist-links.yml b/changelogs/unreleased/26111-fix-github-gist-links.yml
new file mode 100644
index 00000000000..0f6954d2198
--- /dev/null
+++ b/changelogs/unreleased/26111-fix-github-gist-links.yml
@@ -0,0 +1,5 @@
+---
+title: Fix submodule links to gist.github.com
+merge_request: 27346
+author:
+type: fixed
diff --git a/changelogs/unreleased/26113-file-type-issue.yml b/changelogs/unreleased/26113-file-type-issue.yml
new file mode 100644
index 00000000000..a63d34aca99
--- /dev/null
+++ b/changelogs/unreleased/26113-file-type-issue.yml
@@ -0,0 +1,6 @@
+---
+title: Fix issues with non-ASCII plain text files being incorrectly uploaded as binary
+ in the Web IDE
+merge_request: 26360
+author:
+type: fixed
diff --git a/changelogs/unreleased/26556-create-merge-request-button-extends-past-edge-on-mobile.yml b/changelogs/unreleased/26556-create-merge-request-button-extends-past-edge-on-mobile.yml
new file mode 100644
index 00000000000..a69384079dd
--- /dev/null
+++ b/changelogs/unreleased/26556-create-merge-request-button-extends-past-edge-on-mobile.yml
@@ -0,0 +1,6 @@
+---
+title: Prevent "Select project to create merge request" button from overflowing out
+ of the viewport on mobile
+merge_request: 25195
+author:
+type: fixed
diff --git a/changelogs/unreleased/26712-Update-GitLab-codeclimate-to-head.yml b/changelogs/unreleased/26712-Update-GitLab-codeclimate-to-head.yml
new file mode 100644
index 00000000000..8c9aca822d4
--- /dev/null
+++ b/changelogs/unreleased/26712-Update-GitLab-codeclimate-to-head.yml
@@ -0,0 +1,5 @@
+---
+title: Update GitLab's codeclimate to 0.85.9
+merge_request: 26712
+author: Eddie Stubbington
+type: other
diff --git a/changelogs/unreleased/27072-name-regex-allow-bulk-api.yml b/changelogs/unreleased/27072-name-regex-allow-bulk-api.yml
new file mode 100644
index 00000000000..69021920407
--- /dev/null
+++ b/changelogs/unreleased/27072-name-regex-allow-bulk-api.yml
@@ -0,0 +1,5 @@
+---
+title: Add name_regex_keep param to container registry bulk delete API endpoint
+merge_request: 25484
+author:
+type: added
diff --git a/changelogs/unreleased/27144-gitlab-hosted-codesandbox.yml b/changelogs/unreleased/27144-gitlab-hosted-codesandbox.yml
new file mode 100644
index 00000000000..bb7e011f94c
--- /dev/null
+++ b/changelogs/unreleased/27144-gitlab-hosted-codesandbox.yml
@@ -0,0 +1,5 @@
+---
+title: Update Web IDE clientside preview bundler to use GitLab managed server
+merge_request: 21520
+author:
+type: changed
diff --git a/changelogs/unreleased/27227-widget-showing-changed-pages-for-visual-reviews.yml b/changelogs/unreleased/27227-widget-showing-changed-pages-for-visual-reviews.yml
new file mode 100644
index 00000000000..7d2cdcd4bde
--- /dev/null
+++ b/changelogs/unreleased/27227-widget-showing-changed-pages-for-visual-reviews.yml
@@ -0,0 +1,5 @@
+---
+title: Add changed pages dropdown to visual review modal
+merge_request:
+author:
+type: added
diff --git a/changelogs/unreleased/27300-add-filepath-redirect-url.yml b/changelogs/unreleased/27300-add-filepath-redirect-url.yml
new file mode 100644
index 00000000000..31645ba3050
--- /dev/null
+++ b/changelogs/unreleased/27300-add-filepath-redirect-url.yml
@@ -0,0 +1,5 @@
+---
+title: Add filepath redirect url
+merge_request: 25541
+author:
+type: added
diff --git a/changelogs/unreleased/27300-add-filepath-to-release-links-api.yml b/changelogs/unreleased/27300-add-filepath-to-release-links-api.yml
new file mode 100644
index 00000000000..2616aa2cc5b
--- /dev/null
+++ b/changelogs/unreleased/27300-add-filepath-to-release-links-api.yml
@@ -0,0 +1,5 @@
+---
+title: Add filepath to release links API
+merge_request: 25533
+author:
+type: added
diff --git a/changelogs/unreleased/27300-enable-a-direct-link-to-a-release-and-release-assets-2.yml b/changelogs/unreleased/27300-enable-a-direct-link-to-a-release-and-release-assets-2.yml
new file mode 100644
index 00000000000..3ea5419e3f7
--- /dev/null
+++ b/changelogs/unreleased/27300-enable-a-direct-link-to-a-release-and-release-assets-2.yml
@@ -0,0 +1,5 @@
+---
+title: Add filepath to ReleaseLink
+merge_request: 25512
+author:
+type: added
diff --git a/changelogs/unreleased/27300-expose-filepath-url-on-ui.yml b/changelogs/unreleased/27300-expose-filepath-url-on-ui.yml
new file mode 100644
index 00000000000..2027e27ef81
--- /dev/null
+++ b/changelogs/unreleased/27300-expose-filepath-url-on-ui.yml
@@ -0,0 +1,5 @@
+---
+title: Expose assets filepath URL on UI
+merge_request: 25635
+author:
+type: added
diff --git a/changelogs/unreleased/27880-clearing-release-note-from-the-tags-page-deletes-release.yml b/changelogs/unreleased/27880-clearing-release-note-from-the-tags-page-deletes-release.yml
new file mode 100644
index 00000000000..97d990069da
--- /dev/null
+++ b/changelogs/unreleased/27880-clearing-release-note-from-the-tags-page-deletes-release.yml
@@ -0,0 +1,5 @@
+---
+title: 27880 Make release notes optional and do not delete release when they are removed
+merge_request: 26231
+author: Pavlo Dudchenko
+type: changed
diff --git a/changelogs/unreleased/28085-index-options-tuning.yml b/changelogs/unreleased/28085-index-options-tuning.yml
new file mode 100644
index 00000000000..61f46fbadb1
--- /dev/null
+++ b/changelogs/unreleased/28085-index-options-tuning.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize storage usage for newly created ES indices
+merge_request: 25992
+author:
+type: other
diff --git a/changelogs/unreleased/28560_cleanup_optimistic_locking_db.yml b/changelogs/unreleased/28560_cleanup_optimistic_locking_db.yml
new file mode 100644
index 00000000000..30d1b6ce94d
--- /dev/null
+++ b/changelogs/unreleased/28560_cleanup_optimistic_locking_db.yml
@@ -0,0 +1,5 @@
+---
+title: Set all NULL `lock_version` values to 0 for issuables
+merge_request: 18418
+author:
+type: fixed
diff --git a/changelogs/unreleased/28627-adjust-commit-stats-over-limit-indication.yml b/changelogs/unreleased/28627-adjust-commit-stats-over-limit-indication.yml
new file mode 100644
index 00000000000..08ad9a7c183
--- /dev/null
+++ b/changelogs/unreleased/28627-adjust-commit-stats-over-limit-indication.yml
@@ -0,0 +1,5 @@
+---
+title: Add commits limit text at graphs page
+merge_request: 24990
+author:
+type: changed
diff --git a/changelogs/unreleased/28725-paginate-lfs-object-import.yml b/changelogs/unreleased/28725-paginate-lfs-object-import.yml
new file mode 100644
index 00000000000..25f59566316
--- /dev/null
+++ b/changelogs/unreleased/28725-paginate-lfs-object-import.yml
@@ -0,0 +1,5 @@
+---
+title: Batch processing LFS objects downloads
+merge_request: 26434
+author:
+type: changed
diff --git a/changelogs/unreleased/31289-show-issue-summary-on-releases-page.yml b/changelogs/unreleased/31289-show-issue-summary-on-releases-page.yml
new file mode 100644
index 00000000000..ff17ee0a3cb
--- /dev/null
+++ b/changelogs/unreleased/31289-show-issue-summary-on-releases-page.yml
@@ -0,0 +1,5 @@
+---
+title: Cache milestone issue counters and make them independent of user permissions
+merge_request: 21554
+author:
+type: performance
diff --git a/changelogs/unreleased/32046-differentiate-between-errors-failures-in-xunit-result.yml b/changelogs/unreleased/32046-differentiate-between-errors-failures-in-xunit-result.yml
new file mode 100644
index 00000000000..632b673f2b9
--- /dev/null
+++ b/changelogs/unreleased/32046-differentiate-between-errors-failures-in-xunit-result.yml
@@ -0,0 +1,5 @@
+---
+title: Differentiate between errors and failures in xUnit result
+merge_request: 23476
+author:
+type: changed
diff --git a/changelogs/unreleased/32882-render-special-references-for-releases.yml b/changelogs/unreleased/32882-render-special-references-for-releases.yml
new file mode 100644
index 00000000000..684625e9941
--- /dev/null
+++ b/changelogs/unreleased/32882-render-special-references-for-releases.yml
@@ -0,0 +1,5 @@
+---
+title: Render special references for releases
+merge_request: 26554
+author:
+type: fixed
diff --git a/changelogs/unreleased/33641-fix_smartcard_param_check_in_user_build.yml b/changelogs/unreleased/33641-fix_smartcard_param_check_in_user_build.yml
new file mode 100644
index 00000000000..eb5e94b4119
--- /dev/null
+++ b/changelogs/unreleased/33641-fix_smartcard_param_check_in_user_build.yml
@@ -0,0 +1,5 @@
+---
+title: Fix user registration when smartcard authentication is enabled
+merge_request: 26800
+author:
+type: fixed
diff --git a/changelogs/unreleased/34086-es-bulk-incremental-index-updates.yml b/changelogs/unreleased/34086-es-bulk-incremental-index-updates.yml
new file mode 100644
index 00000000000..67cceb21af0
--- /dev/null
+++ b/changelogs/unreleased/34086-es-bulk-incremental-index-updates.yml
@@ -0,0 +1,5 @@
+---
+title: 'Add a bulk processor for elasticsearch incremental updates'
+merge_request: 24298
+author:
+type: added
diff --git a/changelogs/unreleased/34420-optimize-pagination-on-explore-snippets.yml b/changelogs/unreleased/34420-optimize-pagination-on-explore-snippets.yml
new file mode 100644
index 00000000000..48748c57a0c
--- /dev/null
+++ b/changelogs/unreleased/34420-optimize-pagination-on-explore-snippets.yml
@@ -0,0 +1,5 @@
+---
+title: Showing only "Next" button for snippet explore page.
+merge_request: 25404
+author:
+type: changed
diff --git a/changelogs/unreleased/34525-update-custom-dashboard.yml b/changelogs/unreleased/34525-update-custom-dashboard.yml
new file mode 100644
index 00000000000..acc03e927b0
--- /dev/null
+++ b/changelogs/unreleased/34525-update-custom-dashboard.yml
@@ -0,0 +1,5 @@
+---
+title: Update file content of an existing custom dashboard
+merge_request: 25024
+author:
+type: added
diff --git a/changelogs/unreleased/35475-add-prometheus-ci-vars.yml b/changelogs/unreleased/35475-add-prometheus-ci-vars.yml
new file mode 100644
index 00000000000..2f6d2f2b331
--- /dev/null
+++ b/changelogs/unreleased/35475-add-prometheus-ci-vars.yml
@@ -0,0 +1,5 @@
+---
+title: Support more query variables in custom dashboards per project
+merge_request: 25732
+author:
+type: added
diff --git a/changelogs/unreleased/36243-introduce-an-optional-expiration-date-for-ssh-keys.yml b/changelogs/unreleased/36243-introduce-an-optional-expiration-date-for-ssh-keys.yml
new file mode 100644
index 00000000000..1a51203095f
--- /dev/null
+++ b/changelogs/unreleased/36243-introduce-an-optional-expiration-date-for-ssh-keys.yml
@@ -0,0 +1,5 @@
+---
+title: Introduce optional expiry date for SSH Keys
+merge_request: 26351
+author:
+type: added
diff --git a/changelogs/unreleased/37256-bump-wh-version.yml b/changelogs/unreleased/37256-bump-wh-version.yml
new file mode 100644
index 00000000000..6f3392e5ac8
--- /dev/null
+++ b/changelogs/unreleased/37256-bump-wh-version.yml
@@ -0,0 +1,5 @@
+---
+title: Enable Workhorse upload acceleration for Project Import uploads via API
+merge_request: 26914
+author:
+type: performance
diff --git a/changelogs/unreleased/37256-use-workhorse-acceleration-on-project-import.yml b/changelogs/unreleased/37256-use-workhorse-acceleration-on-project-import.yml
new file mode 100644
index 00000000000..29b9c11d595
--- /dev/null
+++ b/changelogs/unreleased/37256-use-workhorse-acceleration-on-project-import.yml
@@ -0,0 +1,5 @@
+---
+title: Enable Workhorse upload acceleration for Project Import API
+merge_request: 25361
+author:
+type: performance
diff --git a/changelogs/unreleased/37320-ensure-project-snippet-api-status.yml b/changelogs/unreleased/37320-ensure-project-snippet-api-status.yml
new file mode 100644
index 00000000000..e727bd726a9
--- /dev/null
+++ b/changelogs/unreleased/37320-ensure-project-snippet-api-status.yml
@@ -0,0 +1,5 @@
+---
+title: Project Snippets API endpoints check feature status
+merge_request: 26064
+author:
+type: performance
diff --git a/changelogs/unreleased/37320-ensure-project-snippet-feature-status-in-project-snippet-api-endpoi.yml b/changelogs/unreleased/37320-ensure-project-snippet-feature-status-in-project-snippet-api-endpoi.yml
new file mode 100644
index 00000000000..8c8f629d03c
--- /dev/null
+++ b/changelogs/unreleased/37320-ensure-project-snippet-feature-status-in-project-snippet-api-endpoi.yml
@@ -0,0 +1,5 @@
+---
+title: Project Snippets GraphQL resolver checks feature status
+merge_request: 26158
+author:
+type: performance
diff --git a/changelogs/unreleased/37951-project-settings-required-approval-input-not-sequential-order.yml b/changelogs/unreleased/37951-project-settings-required-approval-input-not-sequential-order.yml
new file mode 100644
index 00000000000..909dd38bbee
--- /dev/null
+++ b/changelogs/unreleased/37951-project-settings-required-approval-input-not-sequential-order.yml
@@ -0,0 +1,5 @@
+---
+title: Fix project setting approval input in non-sequential order
+merge_request: 25391
+author:
+type: fixed
diff --git a/changelogs/unreleased/38096-splitmr-write-resource-milestone-events-pd.yml b/changelogs/unreleased/38096-splitmr-write-resource-milestone-events-pd.yml
new file mode 100644
index 00000000000..def579b44e8
--- /dev/null
+++ b/changelogs/unreleased/38096-splitmr-write-resource-milestone-events-pd.yml
@@ -0,0 +1,5 @@
+---
+title: Add possibility to track milestone changes on issues and merge requests
+merge_request: 24780
+author:
+type: added
diff --git a/changelogs/unreleased/38143-replace-labels-in-vue-with-gitlab-ui-component.yml b/changelogs/unreleased/38143-replace-labels-in-vue-with-gitlab-ui-component.yml
new file mode 100644
index 00000000000..8f5af1bcc54
--- /dev/null
+++ b/changelogs/unreleased/38143-replace-labels-in-vue-with-gitlab-ui-component.yml
@@ -0,0 +1,5 @@
+---
+title: Update labels in Vue with GlLabel component
+merge_request: 21465
+author:
+type: changed
diff --git a/changelogs/unreleased/38144-replace-labels-in-haml-with-gitlab-ui-css.yml b/changelogs/unreleased/38144-replace-labels-in-haml-with-gitlab-ui-css.yml
new file mode 100644
index 00000000000..fcb22a03e9f
--- /dev/null
+++ b/changelogs/unreleased/38144-replace-labels-in-haml-with-gitlab-ui-css.yml
@@ -0,0 +1,5 @@
+---
+title: New styles for scoped labels
+merge_request: 21377
+author:
+type: changed
diff --git a/changelogs/unreleased/38145-replace-labels-in-non-vue-js-with-gitlab-ui-component.yml b/changelogs/unreleased/38145-replace-labels-in-non-vue-js-with-gitlab-ui-component.yml
new file mode 100644
index 00000000000..04302d3653f
--- /dev/null
+++ b/changelogs/unreleased/38145-replace-labels-in-non-vue-js-with-gitlab-ui-component.yml
@@ -0,0 +1,5 @@
+---
+title: Correctly style scoped labels in sidebar after updating
+merge_request: 22071
+author:
+type: changed
diff --git a/changelogs/unreleased/38414.yml b/changelogs/unreleased/38414.yml
new file mode 100644
index 00000000000..759319e7f68
--- /dev/null
+++ b/changelogs/unreleased/38414.yml
@@ -0,0 +1,5 @@
+---
+title: Fix error details layout and alignment for mobile view
+merge_request: 24390
+author:
+type: fixed
diff --git a/changelogs/unreleased/40585-token-disclaimer.yml b/changelogs/unreleased/40585-token-disclaimer.yml
new file mode 100644
index 00000000000..ab0a5b4e148
--- /dev/null
+++ b/changelogs/unreleased/40585-token-disclaimer.yml
@@ -0,0 +1,5 @@
+---
+title: Improvement in token reference
+merge_request:
+author:
+type: other
diff --git a/changelogs/unreleased/55487-backfill-lfs-objects-projects.yml b/changelogs/unreleased/55487-backfill-lfs-objects-projects.yml
new file mode 100644
index 00000000000..fc892457c9e
--- /dev/null
+++ b/changelogs/unreleased/55487-backfill-lfs-objects-projects.yml
@@ -0,0 +1,5 @@
+---
+title: Backfill LfsObjectsProject records of forks
+merge_request: 26964
+author:
+type: other
diff --git a/changelogs/unreleased/63-nudge-users-to-select-a-template-to-set-up-a-pipeline.yml b/changelogs/unreleased/63-nudge-users-to-select-a-template-to-set-up-a-pipeline.yml
new file mode 100644
index 00000000000..57801ee4ef0
--- /dev/null
+++ b/changelogs/unreleased/63-nudge-users-to-select-a-template-to-set-up-a-pipeline.yml
@@ -0,0 +1,5 @@
+---
+title: Nudge users to select a gitlab-ci.yml template
+merge_request: 24622
+author:
+type: added
diff --git a/changelogs/unreleased/7003-fail-to-start-server-without-ar-connection.yml b/changelogs/unreleased/7003-fail-to-start-server-without-ar-connection.yml
new file mode 100644
index 00000000000..806e1ccc362
--- /dev/null
+++ b/changelogs/unreleased/7003-fail-to-start-server-without-ar-connection.yml
@@ -0,0 +1,5 @@
+---
+title: Refuse to start web server without a working ActiveRecord connection
+merge_request: 25160
+author:
+type: other
diff --git a/changelogs/unreleased/7583-developer-cannot-push-to-projects-they-create-in-groups.yml b/changelogs/unreleased/7583-developer-cannot-push-to-projects-they-create-in-groups.yml
new file mode 100644
index 00000000000..2055abc4551
--- /dev/null
+++ b/changelogs/unreleased/7583-developer-cannot-push-to-projects-they-create-in-groups.yml
@@ -0,0 +1,5 @@
+---
+title: Introduce default branch protection at the group level
+merge_request: 24426
+author:
+type: added
diff --git a/changelogs/unreleased/Remove-refreshData-function-logic-from-issue-js.yml b/changelogs/unreleased/Remove-refreshData-function-logic-from-issue-js.yml
new file mode 100644
index 00000000000..404a231ca00
--- /dev/null
+++ b/changelogs/unreleased/Remove-refreshData-function-logic-from-issue-js.yml
@@ -0,0 +1,5 @@
+---
+title: Moves refreshData from issue model to board store
+merge_request: 21409
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-assets-javascripts-notes-components-discu.yml b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-assets-javascripts-notes-components-discu.yml
new file mode 100644
index 00000000000..1506f672ed2
--- /dev/null
+++ b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-assets-javascripts-notes-components-discu.yml
@@ -0,0 +1,5 @@
+---
+title: Migrate .fa-spinner to .spinner for app/assets/javascripts/notes/components/discussion_resolve_button.vue
+merge_request: 25055
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-award_emoji.yml b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-award_emoji.yml
new file mode 100644
index 00000000000..cf1cce15919
--- /dev/null
+++ b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-award_emoji.yml
@@ -0,0 +1,5 @@
+---
+title: Remove spinner from app/views/award_emoji
+merge_request: 25032
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-help.yml b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-help.yml
new file mode 100644
index 00000000000..69f1496ff3f
--- /dev/null
+++ b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-help.yml
@@ -0,0 +1,5 @@
+---
+title: Migrate .fa-spinner to .spinner for app/views/help
+merge_request: 25037
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-ide.yml b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-ide.yml
new file mode 100644
index 00000000000..2b206e3ddb1
--- /dev/null
+++ b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-ide.yml
@@ -0,0 +1,5 @@
+---
+title: Migrate .fa-spinner to .spinner for app/views/ide
+merge_request: 25022
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-find_file.yml b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-find_file.yml
new file mode 100644
index 00000000000..47da290fb43
--- /dev/null
+++ b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-find_file.yml
@@ -0,0 +1,5 @@
+---
+title: Migrate .fa-spinner to .spinner for app/views/projects/find_file
+merge_request: 25051
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-forks.yml b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-forks.yml
new file mode 100644
index 00000000000..4b46edbde8d
--- /dev/null
+++ b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-forks.yml
@@ -0,0 +1,5 @@
+---
+title: Remove .fa-spinner from app/views/projects/forks
+merge_request: 25034
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-notes.yml b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-notes.yml
new file mode 100644
index 00000000000..339b2b61e3e
--- /dev/null
+++ b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-projects-notes.yml
@@ -0,0 +1,5 @@
+---
+title: Remove spinner from app/views/projects/notes
+merge_request: 25015
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-shared-badges.yml b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-shared-badges.yml
new file mode 100644
index 00000000000..3cac9a566a0
--- /dev/null
+++ b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-shared-badges.yml
@@ -0,0 +1,5 @@
+---
+title: Remove unused loading spinner from badge_settings partial
+merge_request: 25044
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-snippets-notes.yml b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-snippets-notes.yml
new file mode 100644
index 00000000000..4f28db06eab
--- /dev/null
+++ b/changelogs/unreleased/Resolve-Migrate--fa-spinner-app-views-snippets-notes.yml
@@ -0,0 +1,5 @@
+---
+title: Remove .fa-spinner from app/views/snippets/notes
+merge_request: 25036
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/Resolve-Migrate--fa-spinner-ee-app-views-shared-members.yml b/changelogs/unreleased/Resolve-Migrate--fa-spinner-ee-app-views-shared-members.yml
new file mode 100644
index 00000000000..1e592d40992
--- /dev/null
+++ b/changelogs/unreleased/Resolve-Migrate--fa-spinner-ee-app-views-shared-members.yml
@@ -0,0 +1,5 @@
+---
+title: Migrate .fa-spinner to .spinner for ee/app/views/shared/members
+merge_request: 25019
+author: nuwe1
+type: other
diff --git a/changelogs/unreleased/a11y-ci-template.yml b/changelogs/unreleased/a11y-ci-template.yml
new file mode 100644
index 00000000000..b8312508144
--- /dev/null
+++ b/changelogs/unreleased/a11y-ci-template.yml
@@ -0,0 +1,5 @@
+---
+title: Add accessibility scanning CI template
+merge_request: 25144
+author:
+type: added
diff --git a/changelogs/unreleased/a11y-gitlab-script.yml b/changelogs/unreleased/a11y-gitlab-script.yml
new file mode 100644
index 00000000000..7f0e085ddbd
--- /dev/null
+++ b/changelogs/unreleased/a11y-gitlab-script.yml
@@ -0,0 +1,5 @@
+---
+title: Generate JSON-formatted a11y CI artifacts
+merge_request: 26687
+author:
+type: added
diff --git a/changelogs/unreleased/ac-remove-track_mr_picking-ff.yml b/changelogs/unreleased/ac-remove-track_mr_picking-ff.yml
new file mode 100644
index 00000000000..b1ebe525f2f
--- /dev/null
+++ b/changelogs/unreleased/ac-remove-track_mr_picking-ff.yml
@@ -0,0 +1,5 @@
+---
+title: Track merge request cherry-picks
+merge_request: 26907
+author:
+type: added
diff --git a/changelogs/unreleased/add-endpoint-to-remove-sidekiq-jobs-based-on-metadata.yml b/changelogs/unreleased/add-endpoint-to-remove-sidekiq-jobs-based-on-metadata.yml
new file mode 100644
index 00000000000..09dea432cb3
--- /dev/null
+++ b/changelogs/unreleased/add-endpoint-to-remove-sidekiq-jobs-based-on-metadata.yml
@@ -0,0 +1,5 @@
+---
+title: Add admin API endpoint to delete Sidekiq jobs matching metadata
+merge_request: 25998
+author:
+type: added
diff --git a/changelogs/unreleased/add-feature-flags-version.yml b/changelogs/unreleased/add-feature-flags-version.yml
new file mode 100644
index 00000000000..580bd8da7e1
--- /dev/null
+++ b/changelogs/unreleased/add-feature-flags-version.yml
@@ -0,0 +1,5 @@
+---
+title: Add version column to operations_feature_flags table
+merge_request: 25552
+author:
+type: added
diff --git a/changelogs/unreleased/add-gatsby.yml b/changelogs/unreleased/add-gatsby.yml
new file mode 100644
index 00000000000..28d57bda6d8
--- /dev/null
+++ b/changelogs/unreleased/add-gatsby.yml
@@ -0,0 +1,5 @@
+---
+title: Add Project template for Gatsby
+merge_request: 25486
+author:
+type: added
diff --git a/changelogs/unreleased/add-missing-secure-queues-changelog.yml b/changelogs/unreleased/add-missing-secure-queues-changelog.yml
new file mode 100644
index 00000000000..dc9b492b543
--- /dev/null
+++ b/changelogs/unreleased/add-missing-secure-queues-changelog.yml
@@ -0,0 +1,5 @@
+---
+title: Move namespace of Secure Sidekiq queues
+merge_request: 24340
+author:
+type: other
diff --git a/changelogs/unreleased/add-retiresjs-vars-to-dependency-scanning.yml b/changelogs/unreleased/add-retiresjs-vars-to-dependency-scanning.yml
new file mode 100644
index 00000000000..5c9f312f44e
--- /dev/null
+++ b/changelogs/unreleased/add-retiresjs-vars-to-dependency-scanning.yml
@@ -0,0 +1,5 @@
+---
+title: Add vars to allow air-gapped usage of Retire.js (Dependency Scanning)
+merge_request: 26463
+author:
+type: added
diff --git a/changelogs/unreleased/add-shard-label-to-queue-timing-histogram-metric.yml b/changelogs/unreleased/add-shard-label-to-queue-timing-histogram-metric.yml
new file mode 100644
index 00000000000..5d2ba73dc5d
--- /dev/null
+++ b/changelogs/unreleased/add-shard-label-to-queue-timing-histogram-metric.yml
@@ -0,0 +1,5 @@
+---
+title: Add 'shard' label for 'job_queue_duration_seconds' metric
+merge_request: 23536
+author:
+type: changed
diff --git a/changelogs/unreleased/add-sidekiq-metrics-for-gitaly-and-db-time.yml b/changelogs/unreleased/add-sidekiq-metrics-for-gitaly-and-db-time.yml
new file mode 100644
index 00000000000..7289637689f
--- /dev/null
+++ b/changelogs/unreleased/add-sidekiq-metrics-for-gitaly-and-db-time.yml
@@ -0,0 +1,5 @@
+---
+title: Add Prometheus metrics for Gitaly and database time in background jobs
+merge_request: 26384
+author:
+type: changed
diff --git a/changelogs/unreleased/add-static-object-external-storage-url-to-csp-rules.yml b/changelogs/unreleased/add-static-object-external-storage-url-to-csp-rules.yml
new file mode 100644
index 00000000000..a5b60d127ba
--- /dev/null
+++ b/changelogs/unreleased/add-static-object-external-storage-url-to-csp-rules.yml
@@ -0,0 +1,5 @@
+---
+title: Inject CSP values when repository static objects external caching is enabled
+merge_request: 25711
+author:
+type: fixed
diff --git a/changelogs/unreleased/add-trigger-include-artifact.yml b/changelogs/unreleased/add-trigger-include-artifact.yml
new file mode 100644
index 00000000000..acfad941751
--- /dev/null
+++ b/changelogs/unreleased/add-trigger-include-artifact.yml
@@ -0,0 +1,5 @@
+---
+title: 'Create child pipelines dynamically using content from artifact as CI configuration'
+merge_request: 23790
+author:
+type: fixed
diff --git a/changelogs/unreleased/add-user-job-title-column.yml b/changelogs/unreleased/add-user-job-title-column.yml
new file mode 100644
index 00000000000..5090f6f4eb1
--- /dev/null
+++ b/changelogs/unreleased/add-user-job-title-column.yml
@@ -0,0 +1,5 @@
+---
+title: Add support for user Job Title
+merge_request: 25483
+author:
+type: added
diff --git a/changelogs/unreleased/ak-fix-multi-pod.yml b/changelogs/unreleased/ak-fix-multi-pod.yml
new file mode 100644
index 00000000000..7f19dfd6875
--- /dev/null
+++ b/changelogs/unreleased/ak-fix-multi-pod.yml
@@ -0,0 +1,5 @@
+---
+title: Fix access to logs when multiple pods exist
+merge_request: 27008
+author:
+type: fixed
diff --git a/changelogs/unreleased/ak-move-logs-to-core.yml b/changelogs/unreleased/ak-move-logs-to-core.yml
new file mode 100644
index 00000000000..488059161ad
--- /dev/null
+++ b/changelogs/unreleased/ak-move-logs-to-core.yml
@@ -0,0 +1,5 @@
+---
+title: Move pod logs to core
+merge_request: 25455
+author:
+type: changed
diff --git a/changelogs/unreleased/ak-rescue-error.yml b/changelogs/unreleased/ak-rescue-error.yml
new file mode 100644
index 00000000000..2815477d758
--- /dev/null
+++ b/changelogs/unreleased/ak-rescue-error.yml
@@ -0,0 +1,5 @@
+---
+title: Rescue elasticsearch server error in pod logs
+merge_request: 25367
+author:
+type: fixed
diff --git a/changelogs/unreleased/ak-upgrade-es.yml b/changelogs/unreleased/ak-upgrade-es.yml
new file mode 100644
index 00000000000..733c3582bbd
--- /dev/null
+++ b/changelogs/unreleased/ak-upgrade-es.yml
@@ -0,0 +1,5 @@
+---
+title: Upgrade Elastic Stack helm chart to 1.9.0
+merge_request: 27011
+author:
+type: changed
diff --git a/changelogs/unreleased/al-205505-fix-snippet-blob-viewers.yml b/changelogs/unreleased/al-205505-fix-snippet-blob-viewers.yml
new file mode 100644
index 00000000000..c7cda97c9e0
--- /dev/null
+++ b/changelogs/unreleased/al-205505-fix-snippet-blob-viewers.yml
@@ -0,0 +1,5 @@
+---
+title: Fix snippet blob viewers for rich and plain data
+merge_request: 25945
+author:
+type: fixed
diff --git a/changelogs/unreleased/allow-selecting-all-queues-with-sidekiq-cluster.yml b/changelogs/unreleased/allow-selecting-all-queues-with-sidekiq-cluster.yml
new file mode 100644
index 00000000000..7dba322b07e
--- /dev/null
+++ b/changelogs/unreleased/allow-selecting-all-queues-with-sidekiq-cluster.yml
@@ -0,0 +1,5 @@
+---
+title: Allow selecting all queues with sidekiq-cluster
+merge_request: 26594
+author:
+type: added
diff --git a/changelogs/unreleased/allow-to-disable-defaults.yml b/changelogs/unreleased/allow-to-disable-defaults.yml
new file mode 100644
index 00000000000..c5c6616a8c2
--- /dev/null
+++ b/changelogs/unreleased/allow-to-disable-defaults.yml
@@ -0,0 +1,5 @@
+---
+title: Allow to disable inheritance of default job settings
+merge_request: 25690
+author:
+type: added
diff --git a/changelogs/unreleased/allow_toggle_modsecurity_settings.yml b/changelogs/unreleased/allow_toggle_modsecurity_settings.yml
new file mode 100644
index 00000000000..7535d00c58d
--- /dev/null
+++ b/changelogs/unreleased/allow_toggle_modsecurity_settings.yml
@@ -0,0 +1,5 @@
+---
+title: Allow enabling/disabling modsecurity from UI
+merge_request: 24747
+author:
+type: added
diff --git a/changelogs/unreleased/auto-deploy-image-v0-12-0.yml b/changelogs/unreleased/auto-deploy-image-v0-12-0.yml
new file mode 100644
index 00000000000..f6a6b5e9f6b
--- /dev/null
+++ b/changelogs/unreleased/auto-deploy-image-v0-12-0.yml
@@ -0,0 +1,5 @@
+---
+title: Bump Auto Deploy image to v0.12.1
+merge_request: 26336
+author:
+type: changed
diff --git a/changelogs/unreleased/broaden-access-scope-for-version-api.yml b/changelogs/unreleased/broaden-access-scope-for-version-api.yml
new file mode 100644
index 00000000000..90294f2978e
--- /dev/null
+++ b/changelogs/unreleased/broaden-access-scope-for-version-api.yml
@@ -0,0 +1,5 @@
+---
+title: Allow access to /version API endpoint with read_user scope
+merge_request: 25211
+author:
+type: changed
diff --git a/changelogs/unreleased/bvl-validate-changed-values.yml b/changelogs/unreleased/bvl-validate-changed-values.yml
new file mode 100644
index 00000000000..950fba03e69
--- /dev/null
+++ b/changelogs/unreleased/bvl-validate-changed-values.yml
@@ -0,0 +1,5 @@
+---
+title: Fix saving preferences with unrelated changes when gitaly timeouts became invalid.
+merge_request: 26292
+author:
+type: fixed
diff --git a/changelogs/unreleased/bw-board-query-by-id.yml b/changelogs/unreleased/bw-board-query-by-id.yml
new file mode 100644
index 00000000000..51246a4519f
--- /dev/null
+++ b/changelogs/unreleased/bw-board-query-by-id.yml
@@ -0,0 +1,5 @@
+---
+title: Allow group/project board to be queried by ID via GraphQL
+merge_request: 24825
+author:
+type: added
diff --git a/changelogs/unreleased/bw-graphql-board-type.yml b/changelogs/unreleased/bw-graphql-board-type.yml
new file mode 100644
index 00000000000..e4d97c4bfc1
--- /dev/null
+++ b/changelogs/unreleased/bw-graphql-board-type.yml
@@ -0,0 +1,5 @@
+---
+title: 'GraphQL: Add Board type'
+merge_request: 22497
+author: Alexander Koval
+type: added
diff --git a/changelogs/unreleased/cat-fix-env-passthrough-sast-205694.yml b/changelogs/unreleased/cat-fix-env-passthrough-sast-205694.yml
new file mode 100644
index 00000000000..6678026b896
--- /dev/null
+++ b/changelogs/unreleased/cat-fix-env-passthrough-sast-205694.yml
@@ -0,0 +1,5 @@
+---
+title: Fix variable passthrough in the SAST CI/CD template when using DinD
+merge_request: 25697
+author:
+type: fixed
diff --git a/changelogs/unreleased/cat-fix-namespaceid-import-39078.yml b/changelogs/unreleased/cat-fix-namespaceid-import-39078.yml
new file mode 100644
index 00000000000..993662fff97
--- /dev/null
+++ b/changelogs/unreleased/cat-fix-namespaceid-import-39078.yml
@@ -0,0 +1,5 @@
+---
+title: Fixes project import failures when user is not part of any groups
+merge_request: 26038
+author:
+type: fixed
diff --git a/changelogs/unreleased/changed-pages-ci-var.yml b/changelogs/unreleased/changed-pages-ci-var.yml
new file mode 100644
index 00000000000..62da65a2ddd
--- /dev/null
+++ b/changelogs/unreleased/changed-pages-ci-var.yml
@@ -0,0 +1,5 @@
+---
+title: Added CI_MERGE_REQUEST_CHANGED_PAGE_* to Predefined Variables reference
+merge_request: 25256
+author:
+type: added
diff --git a/changelogs/unreleased/closed-issue-weight-grey.yml b/changelogs/unreleased/closed-issue-weight-grey.yml
new file mode 100644
index 00000000000..283f88aa606
--- /dev/null
+++ b/changelogs/unreleased/closed-issue-weight-grey.yml
@@ -0,0 +1,5 @@
+---
+title: Board issue due dates appear grey for closed past-due issues
+merge_request: 25507
+author: rachelfox
+type: fixed
diff --git a/changelogs/unreleased/cluster-apps-0-9-0.yml b/changelogs/unreleased/cluster-apps-0-9-0.yml
new file mode 100644
index 00000000000..cba68cc19b5
--- /dev/null
+++ b/changelogs/unreleased/cluster-apps-0-9-0.yml
@@ -0,0 +1,5 @@
+---
+title: Update cluster-applications to v0.9.0
+merge_request: 26242
+author:
+type: added
diff --git a/changelogs/unreleased/cluster-info-tabs.yml b/changelogs/unreleased/cluster-info-tabs.yml
new file mode 100644
index 00000000000..bc62195596f
--- /dev/null
+++ b/changelogs/unreleased/cluster-info-tabs.yml
@@ -0,0 +1,5 @@
+---
+title: Split cluster info page into tabs
+merge_request: 25940
+author:
+type: changed
diff --git a/changelogs/unreleased/confapi-repo-push-mirror.yml b/changelogs/unreleased/confapi-repo-push-mirror.yml
new file mode 100644
index 00000000000..2fb52ed31d9
--- /dev/null
+++ b/changelogs/unreleased/confapi-repo-push-mirror.yml
@@ -0,0 +1,5 @@
+---
+title: Add support for configuring remote mirrors via API
+merge_request: 25825
+author: Rajendra Kadam
+type: added
diff --git a/changelogs/unreleased/create-approval-todos-on-update.yml b/changelogs/unreleased/create-approval-todos-on-update.yml
new file mode 100644
index 00000000000..87ab5539886
--- /dev/null
+++ b/changelogs/unreleased/create-approval-todos-on-update.yml
@@ -0,0 +1,5 @@
+---
+title: Create approval todos on update
+merge_request: 26077
+author:
+type: fixed
diff --git a/changelogs/unreleased/create-puma-rugged-suboptimal-config-checker.yml b/changelogs/unreleased/create-puma-rugged-suboptimal-config-checker.yml
new file mode 100644
index 00000000000..bdf38079460
--- /dev/null
+++ b/changelogs/unreleased/create-puma-rugged-suboptimal-config-checker.yml
@@ -0,0 +1,6 @@
+---
+title: 'Show notices in Admin area when detected any of these cases: Puma, multi-threaded
+ Puma, multi-threaded Puma + Rugged'
+merge_request: 21403
+author:
+type: added
diff --git a/changelogs/unreleased/create-remote-mirrors-docs.yml b/changelogs/unreleased/create-remote-mirrors-docs.yml
new file mode 100644
index 00000000000..16b438d53bb
--- /dev/null
+++ b/changelogs/unreleased/create-remote-mirrors-docs.yml
@@ -0,0 +1,5 @@
+---
+title: Add documentation for create remote mirrors API
+merge_request: 26012
+author: Rajendra Kadam
+type: added
diff --git a/changelogs/unreleased/create_single_temporary_index_for_notes_mentions.yml b/changelogs/unreleased/create_single_temporary_index_for_notes_mentions.yml
new file mode 100644
index 00000000000..bf3684b0630
--- /dev/null
+++ b/changelogs/unreleased/create_single_temporary_index_for_notes_mentions.yml
@@ -0,0 +1,5 @@
+---
+title: Replace several temporary indexes with a single one to save time when running mentions migration
+merge_request:
+author:
+type: performance
diff --git a/changelogs/unreleased/create_users_statistics.yml b/changelogs/unreleased/create_users_statistics.yml
new file mode 100644
index 00000000000..4f98141d447
--- /dev/null
+++ b/changelogs/unreleased/create_users_statistics.yml
@@ -0,0 +1,5 @@
+---
+title: Introduce db table to store users statistics
+merge_request: 26261
+author:
+type: added
diff --git a/changelogs/unreleased/creator-pairing-fix-alert.yml b/changelogs/unreleased/creator-pairing-fix-alert.yml
new file mode 100644
index 00000000000..1b027d29d73
--- /dev/null
+++ b/changelogs/unreleased/creator-pairing-fix-alert.yml
@@ -0,0 +1,5 @@
+---
+title: Fixed SSH warning style
+merge_request: 26992
+author:
+type: other
diff --git a/changelogs/unreleased/creator-pairing-group-list-padding.yml b/changelogs/unreleased/creator-pairing-group-list-padding.yml
new file mode 100644
index 00000000000..1e1431208e4
--- /dev/null
+++ b/changelogs/unreleased/creator-pairing-group-list-padding.yml
@@ -0,0 +1,5 @@
+---
+title: Added a padding-right to items in subgroup list
+merge_request: 26791
+author:
+type: fixed
diff --git a/changelogs/unreleased/dblessing-add-scim-identities.yml b/changelogs/unreleased/dblessing-add-scim-identities.yml
new file mode 100644
index 00000000000..4e7663b12a7
--- /dev/null
+++ b/changelogs/unreleased/dblessing-add-scim-identities.yml
@@ -0,0 +1,6 @@
+---
+title: Create scim_identities table in preparation for newer SCIM features in the
+ future
+merge_request: 26124
+author:
+type: added
diff --git a/changelogs/unreleased/dennis-update-ios-swift-project-template-logo.yml b/changelogs/unreleased/dennis-update-ios-swift-project-template-logo.yml
new file mode 100644
index 00000000000..2f5ecb0e280
--- /dev/null
+++ b/changelogs/unreleased/dennis-update-ios-swift-project-template-logo.yml
@@ -0,0 +1,5 @@
+---
+title: Update iOS (Swift) project template logo
+merge_request: 25049
+author:
+type: changed
diff --git a/changelogs/unreleased/deploy-mr-once-take-2.yml b/changelogs/unreleased/deploy-mr-once-take-2.yml
new file mode 100644
index 00000000000..59b612def4a
--- /dev/null
+++ b/changelogs/unreleased/deploy-mr-once-take-2.yml
@@ -0,0 +1,5 @@
+---
+title: Don't track MR deployment multiple times
+merge_request: 25537
+author:
+type: fixed
diff --git a/changelogs/unreleased/descriptive_pages_error.yml b/changelogs/unreleased/descriptive_pages_error.yml
new file mode 100644
index 00000000000..bbd29db2141
--- /dev/null
+++ b/changelogs/unreleased/descriptive_pages_error.yml
@@ -0,0 +1,5 @@
+---
+title: Use clearer error message for pages deploy job when the SHA is outdated
+merge_request: 25659
+author:
+type: other
diff --git a/changelogs/unreleased/dmishunov-rich-viewers.yml b/changelogs/unreleased/dmishunov-rich-viewers.yml
new file mode 100644
index 00000000000..9634e570215
--- /dev/null
+++ b/changelogs/unreleased/dmishunov-rich-viewers.yml
@@ -0,0 +1,5 @@
+---
+title: Special handling for the rich viewer on specific file types
+merge_request: 26260
+author:
+type: changed
diff --git a/changelogs/unreleased/do-not-parse-undefined-severity-confidence.yml b/changelogs/unreleased/do-not-parse-undefined-severity-confidence.yml
new file mode 100644
index 00000000000..32efeccf971
--- /dev/null
+++ b/changelogs/unreleased/do-not-parse-undefined-severity-confidence.yml
@@ -0,0 +1,5 @@
+---
+title: Do not parse undefined severity and confidence from reports
+merge_request: 25884
+author:
+type: other
diff --git a/changelogs/unreleased/dotenv-report-artifact.yml b/changelogs/unreleased/dotenv-report-artifact.yml
new file mode 100644
index 00000000000..54ed75bc7ab
--- /dev/null
+++ b/changelogs/unreleased/dotenv-report-artifact.yml
@@ -0,0 +1,5 @@
+---
+title: Support DotEnv Variables through report type artifact
+merge_request: 26247
+author:
+type: added
diff --git a/changelogs/unreleased/drop_forked_project_links_table.yml b/changelogs/unreleased/drop_forked_project_links_table.yml
new file mode 100644
index 00000000000..105b921abc9
--- /dev/null
+++ b/changelogs/unreleased/drop_forked_project_links_table.yml
@@ -0,0 +1,5 @@
+---
+title: Drop forked_project_links table
+merge_request: 20771
+author: Lee Tickett
+type: other
diff --git a/changelogs/unreleased/drop_old_state_column_from_issues.yml b/changelogs/unreleased/drop_old_state_column_from_issues.yml
new file mode 100644
index 00000000000..ed94c381cea
--- /dev/null
+++ b/changelogs/unreleased/drop_old_state_column_from_issues.yml
@@ -0,0 +1,5 @@
+---
+title: Remove state column from issues and merge_requests
+merge_request: 25561
+author:
+type: deprecated
diff --git a/changelogs/unreleased/dz-scope-issue-route-default.yml b/changelogs/unreleased/dz-scope-issue-route-default.yml
new file mode 100644
index 00000000000..d39c70ef620
--- /dev/null
+++ b/changelogs/unreleased/dz-scope-issue-route-default.yml
@@ -0,0 +1,5 @@
+---
+title: Move issues routes under /-/ scope
+merge_request: 24791
+author:
+type: changed
diff --git a/changelogs/unreleased/ee-insert-all-for-load-balancing.yml b/changelogs/unreleased/ee-insert-all-for-load-balancing.yml
new file mode 100644
index 00000000000..135f97cb488
--- /dev/null
+++ b/changelogs/unreleased/ee-insert-all-for-load-balancing.yml
@@ -0,0 +1,5 @@
+---
+title: Support Rails 6 `insert_all!`
+merge_request: 26595
+author:
+type: fixed
diff --git a/changelogs/unreleased/enable-customizable-cycle-analytics.yml b/changelogs/unreleased/enable-customizable-cycle-analytics.yml
new file mode 100644
index 00000000000..3f981cb4fd3
--- /dev/null
+++ b/changelogs/unreleased/enable-customizable-cycle-analytics.yml
@@ -0,0 +1,5 @@
+---
+title: Enable customizable_cycle_analytics feature flag by default
+merge_request: 27418
+author:
+type: changed
diff --git a/changelogs/unreleased/feat-2fa-for-admin-mode.yml b/changelogs/unreleased/feat-2fa-for-admin-mode.yml
new file mode 100644
index 00000000000..c95f80da045
--- /dev/null
+++ b/changelogs/unreleased/feat-2fa-for-admin-mode.yml
@@ -0,0 +1,5 @@
+---
+title: Add 2FA support to admin mode feature
+merge_request: 22281
+author: Diego Louzán
+type: added
diff --git a/changelogs/unreleased/feat-broadcast-message-dimsiss.yml b/changelogs/unreleased/feat-broadcast-message-dimsiss.yml
new file mode 100644
index 00000000000..fd03f36d07a
--- /dev/null
+++ b/changelogs/unreleased/feat-broadcast-message-dimsiss.yml
@@ -0,0 +1,5 @@
+---
+title: Add user dismiss option to broadcast messages
+merge_request: 20665
+author: Fabio Huser
+type: added
diff --git a/changelogs/unreleased/feat-mr-diff-coverage-visualisation.yml b/changelogs/unreleased/feat-mr-diff-coverage-visualisation.yml
new file mode 100644
index 00000000000..f0ff247edbe
--- /dev/null
+++ b/changelogs/unreleased/feat-mr-diff-coverage-visualisation.yml
@@ -0,0 +1,5 @@
+---
+title: Add Cobertura XML coverage visualization to merge request diff view
+merge_request: 21791
+author: Fabio Huser
+type: added
diff --git a/changelogs/unreleased/feat-x509-crl.yml b/changelogs/unreleased/feat-x509-crl.yml
new file mode 100644
index 00000000000..be093ce69be
--- /dev/null
+++ b/changelogs/unreleased/feat-x509-crl.yml
@@ -0,0 +1,5 @@
+---
+title: Add functionality to revoke a X509Certificate and update related X509CommitSignatures
+merge_request: 24889
+author: Roger Meier
+type: added
diff --git a/changelogs/unreleased/feature-199458-track-jump-to-next-unresolved-thread.yml b/changelogs/unreleased/feature-199458-track-jump-to-next-unresolved-thread.yml
new file mode 100644
index 00000000000..a34de5df008
--- /dev/null
+++ b/changelogs/unreleased/feature-199458-track-jump-to-next-unresolved-thread.yml
@@ -0,0 +1,5 @@
+---
+title: Added tracking to merge request jump to next thread buttons
+merge_request: 26319
+author: Martin Hobert
+type: added
diff --git a/changelogs/unreleased/feature-enable-split-diffs-by-default.yml b/changelogs/unreleased/feature-enable-split-diffs-by-default.yml
new file mode 100644
index 00000000000..9f703a226c6
--- /dev/null
+++ b/changelogs/unreleased/feature-enable-split-diffs-by-default.yml
@@ -0,0 +1,5 @@
+---
+title: Diffs load each view style separately, on demand
+merge_request: 24821
+author:
+type: performance
diff --git a/changelogs/unreleased/feature-image-diff-size.yml b/changelogs/unreleased/feature-image-diff-size.yml
new file mode 100644
index 00000000000..0217f6b8018
--- /dev/null
+++ b/changelogs/unreleased/feature-image-diff-size.yml
@@ -0,0 +1,6 @@
+---
+title: All image diffs (except for renamed files) show the image file size in the
+ diff
+merge_request: 25734
+author:
+type: added
diff --git a/changelogs/unreleased/file-path-validator.yml b/changelogs/unreleased/file-path-validator.yml
new file mode 100644
index 00000000000..313177d3682
--- /dev/null
+++ b/changelogs/unreleased/file-path-validator.yml
@@ -0,0 +1,5 @@
+---
+title: Add custom validator for validating file path
+merge_request: 24223
+author: Rajendra Kadam
+type: added
diff --git a/changelogs/unreleased/filter-sentry-error-list.yml b/changelogs/unreleased/filter-sentry-error-list.yml
new file mode 100644
index 00000000000..36d752ca8ba
--- /dev/null
+++ b/changelogs/unreleased/filter-sentry-error-list.yml
@@ -0,0 +1,5 @@
+---
+title: Filter sentry error list by status (unresolved/ignored/resolved)
+merge_request: 26205
+author:
+type: added
diff --git a/changelogs/unreleased/find-commits-by-author.yml b/changelogs/unreleased/find-commits-by-author.yml
new file mode 100644
index 00000000000..a0ef9c1f3af
--- /dev/null
+++ b/changelogs/unreleased/find-commits-by-author.yml
@@ -0,0 +1,5 @@
+---
+title: Filter commits by author
+merge_request: 25597
+author:
+type: added
diff --git a/changelogs/unreleased/fix-api.yml b/changelogs/unreleased/fix-api.yml
new file mode 100644
index 00000000000..867c534663b
--- /dev/null
+++ b/changelogs/unreleased/fix-api.yml
@@ -0,0 +1,5 @@
+---
+title: Support finding namespace by ID or path on fork API
+merge_request: 20603
+author: leoleoasd
+type: fixed
diff --git a/changelogs/unreleased/fix-ci-delete-variable-bug.yml b/changelogs/unreleased/fix-ci-delete-variable-bug.yml
new file mode 100644
index 00000000000..9a47f2d74d4
--- /dev/null
+++ b/changelogs/unreleased/fix-ci-delete-variable-bug.yml
@@ -0,0 +1,5 @@
+---
+title: Update UI for project and group settings CI variables
+merge_request: 27411
+author:
+type: added
diff --git a/changelogs/unreleased/fix-dependency-proxy-link.yml b/changelogs/unreleased/fix-dependency-proxy-link.yml
new file mode 100644
index 00000000000..547d0334d1e
--- /dev/null
+++ b/changelogs/unreleased/fix-dependency-proxy-link.yml
@@ -0,0 +1,5 @@
+---
+title: Add link to dependency proxy docs on the dependency proxy page
+merge_request: 26092
+author:
+type: changed
diff --git a/changelogs/unreleased/fix-deployment-namespace-resolution.yml b/changelogs/unreleased/fix-deployment-namespace-resolution.yml
new file mode 100644
index 00000000000..389e6c6cb56
--- /dev/null
+++ b/changelogs/unreleased/fix-deployment-namespace-resolution.yml
@@ -0,0 +1,5 @@
+---
+title: Fix Kubernetes namespace resolution for new DeploymentCluster records
+merge_request: 25853
+author:
+type: fixed
diff --git a/changelogs/unreleased/fix-deployments-pagination.yml b/changelogs/unreleased/fix-deployments-pagination.yml
new file mode 100644
index 00000000000..5fe5e95f077
--- /dev/null
+++ b/changelogs/unreleased/fix-deployments-pagination.yml
@@ -0,0 +1,5 @@
+---
+title: Add API pagination for deployed merge requests
+merge_request: 25733
+author:
+type: performance
diff --git a/changelogs/unreleased/fix-duplicate-labels-when-moving-projects.yml b/changelogs/unreleased/fix-duplicate-labels-when-moving-projects.yml
new file mode 100644
index 00000000000..a5827b08f33
--- /dev/null
+++ b/changelogs/unreleased/fix-duplicate-labels-when-moving-projects.yml
@@ -0,0 +1,5 @@
+---
+title: Fix duplicate labels when moving projects within the same ancestor group
+merge_request: 27261
+author:
+type: fixed
diff --git a/changelogs/unreleased/fix-export-state-logic.yml b/changelogs/unreleased/fix-export-state-logic.yml
new file mode 100644
index 00000000000..7b4bc2186a0
--- /dev/null
+++ b/changelogs/unreleased/fix-export-state-logic.yml
@@ -0,0 +1,5 @@
+---
+title: Fix logic to determine project export state and add regeneration_in_progress state
+merge_request: 23664
+author:
+type: fixed
diff --git a/changelogs/unreleased/fix-merge-to-ref-service-raise-command-error.yml b/changelogs/unreleased/fix-merge-to-ref-service-raise-command-error.yml
new file mode 100644
index 00000000000..73b09188a6e
--- /dev/null
+++ b/changelogs/unreleased/fix-merge-to-ref-service-raise-command-error.yml
@@ -0,0 +1,5 @@
+---
+title: Fix MergeToRefService raises Gitlab::Git::CommandError
+merge_request: 26465
+author:
+type: fixed
diff --git a/changelogs/unreleased/fix-mermaid-flow-chart-width.yml b/changelogs/unreleased/fix-mermaid-flow-chart-width.yml
new file mode 100644
index 00000000000..20258d40728
--- /dev/null
+++ b/changelogs/unreleased/fix-mermaid-flow-chart-width.yml
@@ -0,0 +1,5 @@
+---
+title: Fix Mermaid flowchart width
+merge_request: 26848
+author: julien MILLAU
+type: fixed
diff --git a/changelogs/unreleased/fix-pipeline-creation-race-conditions.yml b/changelogs/unreleased/fix-pipeline-creation-race-conditions.yml
new file mode 100644
index 00000000000..283e1541fb9
--- /dev/null
+++ b/changelogs/unreleased/fix-pipeline-creation-race-conditions.yml
@@ -0,0 +1,5 @@
+---
+title: Drop bridge if downstream pipeline has errors
+merge_request: 25706
+author:
+type: fixed
diff --git a/changelogs/unreleased/fix-pipeline-details-invalid-buttons.yml b/changelogs/unreleased/fix-pipeline-details-invalid-buttons.yml
new file mode 100644
index 00000000000..afb162f7440
--- /dev/null
+++ b/changelogs/unreleased/fix-pipeline-details-invalid-buttons.yml
@@ -0,0 +1,5 @@
+---
+title: Fix pipeline details page initialisation on invalid pipeline
+merge_request: 25302
+author: Fabio Huser
+type: fixed
diff --git a/changelogs/unreleased/fix-pipeline-tooltip.yml b/changelogs/unreleased/fix-pipeline-tooltip.yml
new file mode 100644
index 00000000000..07d2a1b78de
--- /dev/null
+++ b/changelogs/unreleased/fix-pipeline-tooltip.yml
@@ -0,0 +1,5 @@
+---
+title: Change tooltip text for pipeline on last commit widget
+merge_request: 26315
+author:
+type: other
diff --git a/changelogs/unreleased/fix-prevent-user-theme-color-api-overwrite.yml b/changelogs/unreleased/fix-prevent-user-theme-color-api-overwrite.yml
new file mode 100644
index 00000000000..9fd3cb83869
--- /dev/null
+++ b/changelogs/unreleased/fix-prevent-user-theme-color-api-overwrite.yml
@@ -0,0 +1,5 @@
+---
+title: Prevent default overwrite for theme and color ID in user API
+merge_request: 26792
+author: Fabio Huser
+type: fixed
diff --git a/changelogs/unreleased/fixes-caret-position-after-pasting-an-image-15011.yml b/changelogs/unreleased/fixes-caret-position-after-pasting-an-image-15011.yml
new file mode 100644
index 00000000000..f111a2e075b
--- /dev/null
+++ b/changelogs/unreleased/fixes-caret-position-after-pasting-an-image-15011.yml
@@ -0,0 +1,5 @@
+---
+title: Fixes caret position after pasting an image 15011
+merge_request: 21382
+author: Carolina Carvalhosa
+type: fixed
diff --git a/changelogs/unreleased/fj-195517-single-blob-snippet-view-render.yml b/changelogs/unreleased/fj-195517-single-blob-snippet-view-render.yml
new file mode 100644
index 00000000000..5c95aee814d
--- /dev/null
+++ b/changelogs/unreleased/fj-195517-single-blob-snippet-view-render.yml
@@ -0,0 +1,5 @@
+---
+title: Render single snippet blob in repository
+merge_request: 23848
+author:
+type: added
diff --git a/changelogs/unreleased/fj-205646-fix-project-moved-message.yml b/changelogs/unreleased/fj-205646-fix-project-moved-message.yml
new file mode 100644
index 00000000000..9f5865d78ef
--- /dev/null
+++ b/changelogs/unreleased/fj-205646-fix-project-moved-message.yml
@@ -0,0 +1,5 @@
+---
+title: Fix project moved message after git operation
+merge_request: 27341
+author:
+type: fixed
diff --git a/changelogs/unreleased/fj-207803-fix-project-snippet-policy-bug.yml b/changelogs/unreleased/fj-207803-fix-project-snippet-policy-bug.yml
new file mode 100644
index 00000000000..97659b6019a
--- /dev/null
+++ b/changelogs/unreleased/fj-207803-fix-project-snippet-policy-bug.yml
@@ -0,0 +1,5 @@
+---
+title: Fix bug deleting internal project snippets by project maintainer
+merge_request: 25792
+author:
+type: fixed
diff --git a/changelogs/unreleased/fj-208693-fix-bug-creating-snippet.yml b/changelogs/unreleased/fj-208693-fix-bug-creating-snippet.yml
new file mode 100644
index 00000000000..8b10f9b59b3
--- /dev/null
+++ b/changelogs/unreleased/fj-208693-fix-bug-creating-snippet.yml
@@ -0,0 +1,5 @@
+---
+title: Fix bug committing snippet content when creating the snippet
+merge_request: 26287
+author:
+type: fixed
diff --git a/changelogs/unreleased/fj-39176-create-project-snippet-repository.yml b/changelogs/unreleased/fj-39176-create-project-snippet-repository.yml
new file mode 100644
index 00000000000..81645e30b67
--- /dev/null
+++ b/changelogs/unreleased/fj-39176-create-project-snippet-repository.yml
@@ -0,0 +1,5 @@
+---
+title: Update git workflows and routes to allow snippets
+merge_request: 21739
+author:
+type: added
diff --git a/changelogs/unreleased/fj-39201-import-export-project-snippets.yml b/changelogs/unreleased/fj-39201-import-export-project-snippets.yml
new file mode 100644
index 00000000000..790052f3f20
--- /dev/null
+++ b/changelogs/unreleased/fj-39201-import-export-project-snippets.yml
@@ -0,0 +1,5 @@
+---
+title: Import/Export snippet repositories
+merge_request: 24150
+author:
+type: added
diff --git a/changelogs/unreleased/fj-39265-create-snippet-repository-content.yml b/changelogs/unreleased/fj-39265-create-snippet-repository-content.yml
new file mode 100644
index 00000000000..25a12005e95
--- /dev/null
+++ b/changelogs/unreleased/fj-39265-create-snippet-repository-content.yml
@@ -0,0 +1,5 @@
+---
+title: Commit file when snippet is created
+merge_request: 23953
+author:
+type: added
diff --git a/changelogs/unreleased/fj-39265-update-snippet-repository-content.yml b/changelogs/unreleased/fj-39265-update-snippet-repository-content.yml
new file mode 100644
index 00000000000..3a8b3684531
--- /dev/null
+++ b/changelogs/unreleased/fj-39265-update-snippet-repository-content.yml
@@ -0,0 +1,5 @@
+---
+title: Update files when snippet is updated
+merge_request: 23993
+author:
+type: changed
diff --git a/changelogs/unreleased/fj-39515-delete-snippet-repositories.yml b/changelogs/unreleased/fj-39515-delete-snippet-repositories.yml
new file mode 100644
index 00000000000..a5ed472ddfa
--- /dev/null
+++ b/changelogs/unreleased/fj-39515-delete-snippet-repositories.yml
@@ -0,0 +1,5 @@
+---
+title: Add/update services to delete snippets repositories
+merge_request: 22672
+author:
+type: added
diff --git a/changelogs/unreleased/fj-allow-create-snippet-default-branch.yml b/changelogs/unreleased/fj-allow-create-snippet-default-branch.yml
new file mode 100644
index 00000000000..0f945ab6de8
--- /dev/null
+++ b/changelogs/unreleased/fj-allow-create-snippet-default-branch.yml
@@ -0,0 +1,5 @@
+---
+title: Allow creating default branch in snippet repositories
+merge_request: 26294
+author:
+type: fixed
diff --git a/changelogs/unreleased/fj-fix-bug-hook-env.yml b/changelogs/unreleased/fj-fix-bug-hook-env.yml
new file mode 100644
index 00000000000..351dcdee689
--- /dev/null
+++ b/changelogs/unreleased/fj-fix-bug-hook-env.yml
@@ -0,0 +1,5 @@
+---
+title: Fix bug setting hook env with personal snippets
+merge_request: 27235
+author:
+type: fixed
diff --git a/changelogs/unreleased/fj-fix-git-error-message-update-snippet.yml b/changelogs/unreleased/fj-fix-git-error-message-update-snippet.yml
new file mode 100644
index 00000000000..cfb2575ac21
--- /dev/null
+++ b/changelogs/unreleased/fj-fix-git-error-message-update-snippet.yml
@@ -0,0 +1,5 @@
+---
+title: Show git error message updating snippet
+merge_request: 26570
+author:
+type: fixed
diff --git a/changelogs/unreleased/fj-fix-internal-api-return-code.yml b/changelogs/unreleased/fj-fix-internal-api-return-code.yml
new file mode 100644
index 00000000000..6809d48013a
--- /dev/null
+++ b/changelogs/unreleased/fj-fix-internal-api-return-code.yml
@@ -0,0 +1,5 @@
+---
+title: Change back internal api return code
+merge_request: 26063
+author:
+type: fixed
diff --git a/changelogs/unreleased/fj-fix-snippet-update-error-message.yml b/changelogs/unreleased/fj-fix-snippet-update-error-message.yml
new file mode 100644
index 00000000000..dbf1c0b2919
--- /dev/null
+++ b/changelogs/unreleased/fj-fix-snippet-update-error-message.yml
@@ -0,0 +1,5 @@
+---
+title: Fix bug displaying snippet update error
+merge_request: 27082
+author:
+type: fixed
diff --git a/changelogs/unreleased/fj-fix-snippet-url-to-repo.yml b/changelogs/unreleased/fj-fix-snippet-url-to-repo.yml
new file mode 100644
index 00000000000..8f84d3ad9f5
--- /dev/null
+++ b/changelogs/unreleased/fj-fix-snippet-url-to-repo.yml
@@ -0,0 +1,5 @@
+---
+title: Fix remove special chars from snippet url_to_repo
+merge_request: 27390
+author:
+type: fixed
diff --git a/changelogs/unreleased/fj-remove-repository-storage-column-from-snippets.yml b/changelogs/unreleased/fj-remove-repository-storage-column-from-snippets.yml
new file mode 100644
index 00000000000..b3cc3143fc4
--- /dev/null
+++ b/changelogs/unreleased/fj-remove-repository-storage-column-from-snippets.yml
@@ -0,0 +1,5 @@
+---
+title: Remove repository_storage column from snippets
+merge_request: 25699
+author:
+type: other
diff --git a/changelogs/unreleased/fj-rename-unauthorized-error-to-forbidden-error.yml b/changelogs/unreleased/fj-rename-unauthorized-error-to-forbidden-error.yml
new file mode 100644
index 00000000000..ca2c28c13ae
--- /dev/null
+++ b/changelogs/unreleased/fj-rename-unauthorized-error-to-forbidden-error.yml
@@ -0,0 +1,5 @@
+---
+title: Align git returned error codes
+merge_request: 25936
+author:
+type: changed
diff --git a/changelogs/unreleased/fj-update-snippet-from-git-action.yml b/changelogs/unreleased/fj-update-snippet-from-git-action.yml
new file mode 100644
index 00000000000..bf210ba1a14
--- /dev/null
+++ b/changelogs/unreleased/fj-update-snippet-from-git-action.yml
@@ -0,0 +1,5 @@
+---
+title: Sync snippet after Git action
+merge_request: 26565
+author:
+type: changed
diff --git a/changelogs/unreleased/fooishbar-gitlab-fix-project-job-path-exposed-artifacts.yml b/changelogs/unreleased/fooishbar-gitlab-fix-project-job-path-exposed-artifacts.yml
new file mode 100644
index 00000000000..2a361fcc264
--- /dev/null
+++ b/changelogs/unreleased/fooishbar-gitlab-fix-project-job-path-exposed-artifacts.yml
@@ -0,0 +1,5 @@
+---
+title: Fix links to exposed artifacts in MRs from forks
+merge_request: 25868
+author: Daniel Stone
+type: fixed
diff --git a/changelogs/unreleased/georgekoltsov-196188-cleanup-temp-exports.yml b/changelogs/unreleased/georgekoltsov-196188-cleanup-temp-exports.yml
new file mode 100644
index 00000000000..d74a628dfe5
--- /dev/null
+++ b/changelogs/unreleased/georgekoltsov-196188-cleanup-temp-exports.yml
@@ -0,0 +1,5 @@
+---
+title: Ensure temp export data is removed if Group/Project export failed
+merge_request: 25828
+author:
+type: fixed
diff --git a/changelogs/unreleased/georgekoltsov-27883-fix-import-pipeline-order.yml b/changelogs/unreleased/georgekoltsov-27883-fix-import-pipeline-order.yml
new file mode 100644
index 00000000000..1ccabf0af1a
--- /dev/null
+++ b/changelogs/unreleased/georgekoltsov-27883-fix-import-pipeline-order.yml
@@ -0,0 +1,5 @@
+---
+title: Fix reversed pipeline order on Project Import
+merge_request: 26390
+author:
+type: fixed
diff --git a/changelogs/unreleased/georgekoltsov-bump-project-import-limit.yml b/changelogs/unreleased/georgekoltsov-bump-project-import-limit.yml
new file mode 100644
index 00000000000..1b692060624
--- /dev/null
+++ b/changelogs/unreleased/georgekoltsov-bump-project-import-limit.yml
@@ -0,0 +1,5 @@
+---
+title: Update Project Import API rate limit
+merge_request: 26903
+author:
+type: other
diff --git a/changelogs/unreleased/georgekoltsov-fix-500-on-gitea-importer.yml b/changelogs/unreleased/georgekoltsov-fix-500-on-gitea-importer.yml
new file mode 100644
index 00000000000..d1d37115c9c
--- /dev/null
+++ b/changelogs/unreleased/georgekoltsov-fix-500-on-gitea-importer.yml
@@ -0,0 +1,5 @@
+---
+title: Fix 500 Error when using Gitea Importer
+merge_request: 26166
+author:
+type: fixed
diff --git a/changelogs/unreleased/georgekoltsov-fix-epic-issues.yml b/changelogs/unreleased/georgekoltsov-fix-epic-issues.yml
new file mode 100644
index 00000000000..7a856dc6e28
--- /dev/null
+++ b/changelogs/unreleased/georgekoltsov-fix-epic-issues.yml
@@ -0,0 +1,5 @@
+---
+title: Fix issues missing on epic's page after project import
+merge_request: 26099
+author:
+type: fixed
diff --git a/changelogs/unreleased/georgekoltsov-fix-group-members-owner-access-level.yml b/changelogs/unreleased/georgekoltsov-fix-group-members-owner-access-level.yml
new file mode 100644
index 00000000000..3cdc56e553c
--- /dev/null
+++ b/changelogs/unreleased/georgekoltsov-fix-group-members-owner-access-level.yml
@@ -0,0 +1,5 @@
+---
+title: Fix an issue with Group Import members with Owner access level being imported with Maintainer access level. Owner access level is now preserved
+merge_request: 25595
+author:
+type: fixed
diff --git a/changelogs/unreleased/georgekoltsov-fix-relations-order-on-export.yml b/changelogs/unreleased/georgekoltsov-fix-relations-order-on-export.yml
new file mode 100644
index 00000000000..bdcaf31877e
--- /dev/null
+++ b/changelogs/unreleased/georgekoltsov-fix-relations-order-on-export.yml
@@ -0,0 +1,5 @@
+---
+title: Reorder exported relations by primary_key when using Project Export
+merge_request: 27117
+author:
+type: fixed
diff --git a/changelogs/unreleased/gitaly_keepalive.yml b/changelogs/unreleased/gitaly_keepalive.yml
new file mode 100644
index 00000000000..c975f0f0df2
--- /dev/null
+++ b/changelogs/unreleased/gitaly_keepalive.yml
@@ -0,0 +1,5 @@
+---
+title: Enable client-side GRPC keepalive for Gitaly
+merge_request: 26536
+author:
+type: changed
diff --git a/changelogs/unreleased/gitlab-middleware-refactoring.yml b/changelogs/unreleased/gitlab-middleware-refactoring.yml
new file mode 100644
index 00000000000..bb9f4c4a9fa
--- /dev/null
+++ b/changelogs/unreleased/gitlab-middleware-refactoring.yml
@@ -0,0 +1,5 @@
+---
+title: Refactor workhorse passthrough URL checker
+merge_request: 26157
+author: Takuya Noguchi
+type: performance
diff --git a/changelogs/unreleased/group_milestones_n_1.yml b/changelogs/unreleased/group_milestones_n_1.yml
new file mode 100644
index 00000000000..154d29784f7
--- /dev/null
+++ b/changelogs/unreleased/group_milestones_n_1.yml
@@ -0,0 +1,5 @@
+---
+title: Fix N+1 in Group milestone view
+merge_request: 26051
+author:
+type: performance
diff --git a/changelogs/unreleased/groupapi-avatar-support.yml b/changelogs/unreleased/groupapi-avatar-support.yml
new file mode 100644
index 00000000000..8219240f64e
--- /dev/null
+++ b/changelogs/unreleased/groupapi-avatar-support.yml
@@ -0,0 +1,5 @@
+---
+title: Add avatar upload support for create and update group APIs
+merge_request: 25751
+author: Rajendra Kadam
+type: added
diff --git a/changelogs/unreleased/handle-object-storage-errors.yml b/changelogs/unreleased/handle-object-storage-errors.yml
new file mode 100644
index 00000000000..8140a30fa78
--- /dev/null
+++ b/changelogs/unreleased/handle-object-storage-errors.yml
@@ -0,0 +1,5 @@
+---
+title: Return 503 to the Runner when the object storage is unavailable
+merge_request: 25822
+author:
+type: fixed
diff --git a/changelogs/unreleased/insights-description-for-chart.yml b/changelogs/unreleased/insights-description-for-chart.yml
new file mode 100644
index 00000000000..79105e40a6c
--- /dev/null
+++ b/changelogs/unreleased/insights-description-for-chart.yml
@@ -0,0 +1,5 @@
+---
+title: Allow chart descriptions for Insights
+merge_request: 25686
+author:
+type: added
diff --git a/changelogs/unreleased/introduce_highest_role_per_user_database_table.yml b/changelogs/unreleased/introduce_highest_role_per_user_database_table.yml
new file mode 100644
index 00000000000..f3f7f5a3179
--- /dev/null
+++ b/changelogs/unreleased/introduce_highest_role_per_user_database_table.yml
@@ -0,0 +1,5 @@
+---
+title: Introduce database table for user highest roles
+merge_request: 26987
+author:
+type: added
diff --git a/changelogs/unreleased/issue-updated_at-not-nil.yml b/changelogs/unreleased/issue-updated_at-not-nil.yml
new file mode 100644
index 00000000000..359994f371b
--- /dev/null
+++ b/changelogs/unreleased/issue-updated_at-not-nil.yml
@@ -0,0 +1,5 @@
+---
+title: Add validation for updated_at parameter in update Issue API
+merge_request: 25201
+author: Filip Stybel
+type: fixed
diff --git a/changelogs/unreleased/issue_11391.yml b/changelogs/unreleased/issue_11391.yml
new file mode 100644
index 00000000000..4b6e83728d6
--- /dev/null
+++ b/changelogs/unreleased/issue_11391.yml
@@ -0,0 +1,5 @@
+---
+title: Update moved service desk issues notifications
+merge_request: 25640
+author:
+type: added
diff --git a/changelogs/unreleased/issue_205500_1.yml b/changelogs/unreleased/issue_205500_1.yml
new file mode 100644
index 00000000000..a1ab7b7bf18
--- /dev/null
+++ b/changelogs/unreleased/issue_205500_1.yml
@@ -0,0 +1,5 @@
+---
+title: Add missing arguments to UpdateIssue mutation
+merge_request: 25268
+author:
+type: added
diff --git a/changelogs/unreleased/issue_205690.yml b/changelogs/unreleased/issue_205690.yml
new file mode 100644
index 00000000000..acdcafe406f
--- /dev/null
+++ b/changelogs/unreleased/issue_205690.yml
@@ -0,0 +1,5 @@
+---
+title: Remove promoted notes temporary index
+merge_request: 26896
+author:
+type: other
diff --git a/changelogs/unreleased/jdb-display-base-label-versions-dropdown.yml b/changelogs/unreleased/jdb-display-base-label-versions-dropdown.yml
new file mode 100644
index 00000000000..e18f2f1857b
--- /dev/null
+++ b/changelogs/unreleased/jdb-display-base-label-versions-dropdown.yml
@@ -0,0 +1,5 @@
+---
+title: Display base label in versions drop down
+merge_request: 25834
+author:
+type: added
diff --git a/changelogs/unreleased/jdb-fix-jump-to-next-unresolved-thread.yml b/changelogs/unreleased/jdb-fix-jump-to-next-unresolved-thread.yml
new file mode 100644
index 00000000000..72d37699fee
--- /dev/null
+++ b/changelogs/unreleased/jdb-fix-jump-to-next-unresolved-thread.yml
@@ -0,0 +1,5 @@
+---
+title: Fix Jump to next unresolved thread
+merge_request: 24728
+author:
+type: fixed
diff --git a/changelogs/unreleased/jdb-hide-dont-remove-collapsed-files.yml b/changelogs/unreleased/jdb-hide-dont-remove-collapsed-files.yml
new file mode 100644
index 00000000000..0ce204583ce
--- /dev/null
+++ b/changelogs/unreleased/jdb-hide-dont-remove-collapsed-files.yml
@@ -0,0 +1,5 @@
+---
+title: Improved MR toggle file performance by hiding instead of removing
+merge_request: 26181
+author:
+type: performance
diff --git a/changelogs/unreleased/jhyson-export_failures.yml b/changelogs/unreleased/jhyson-export_failures.yml
new file mode 100644
index 00000000000..d536230e87e
--- /dev/null
+++ b/changelogs/unreleased/jhyson-export_failures.yml
@@ -0,0 +1,5 @@
+---
+title: Ensure all errors are logged in Group Import
+merge_request: 25619
+author:
+type: changed
diff --git a/changelogs/unreleased/jhyson-issue-import-export-consistency.yml b/changelogs/unreleased/jhyson-issue-import-export-consistency.yml
new file mode 100644
index 00000000000..0d2b8167d94
--- /dev/null
+++ b/changelogs/unreleased/jhyson-issue-import-export-consistency.yml
@@ -0,0 +1,5 @@
+---
+title: Fix issue importer so it matches issue export format
+merge_request: 25896
+author:
+type: fixed
diff --git a/changelogs/unreleased/jivanvl-add-edit-custom-metric-link.yml b/changelogs/unreleased/jivanvl-add-edit-custom-metric-link.yml
new file mode 100644
index 00000000000..ca4dd7a0b50
--- /dev/null
+++ b/changelogs/unreleased/jivanvl-add-edit-custom-metric-link.yml
@@ -0,0 +1,5 @@
+---
+title: Add edit custom metric link to metrics dashboard
+merge_request: 26511
+author:
+type: changed
diff --git a/changelogs/unreleased/jivanvl-change-pod-logs-name.yml b/changelogs/unreleased/jivanvl-change-pod-logs-name.yml
new file mode 100644
index 00000000000..9180e7f6b3b
--- /dev/null
+++ b/changelogs/unreleased/jivanvl-change-pod-logs-name.yml
@@ -0,0 +1,5 @@
+---
+title: Rename pod logs to logs
+merge_request: 26313
+author:
+type: changed
diff --git a/changelogs/unreleased/jivavnvl-add-refresh-button-monitoring-dashboard.yml b/changelogs/unreleased/jivavnvl-add-refresh-button-monitoring-dashboard.yml
new file mode 100644
index 00000000000..75052ff337f
--- /dev/null
+++ b/changelogs/unreleased/jivavnvl-add-refresh-button-monitoring-dashboard.yml
@@ -0,0 +1,5 @@
+---
+title: Add refresh dashboard button
+merge_request: 25716
+author:
+type: changed
diff --git a/changelogs/unreleased/jlouw-improve-audit-log-header-layout.yml b/changelogs/unreleased/jlouw-improve-audit-log-header-layout.yml
new file mode 100644
index 00000000000..8a79e699a28
--- /dev/null
+++ b/changelogs/unreleased/jlouw-improve-audit-log-header-layout.yml
@@ -0,0 +1,5 @@
+---
+title: Improve audit log header layout
+merge_request: 25821
+author:
+type: changed
diff --git a/changelogs/unreleased/jswain_update_renewal_link.yml b/changelogs/unreleased/jswain_update_renewal_link.yml
new file mode 100644
index 00000000000..aec30d9fdbf
--- /dev/null
+++ b/changelogs/unreleased/jswain_update_renewal_link.yml
@@ -0,0 +1,5 @@
+---
+title: Update renewal banner link for clearer instructions
+merge_request: 26240
+author:
+type: changed
diff --git a/changelogs/unreleased/kassio-fix-dev-seed.yml b/changelogs/unreleased/kassio-fix-dev-seed.yml
new file mode 100644
index 00000000000..e55193184dd
--- /dev/null
+++ b/changelogs/unreleased/kassio-fix-dev-seed.yml
@@ -0,0 +1,5 @@
+---
+title: Fix dev vulnerabilities seeder
+merge_request: 26169
+author:
+type: fixed
diff --git a/changelogs/unreleased/leipert-drop-node-8-support.yml b/changelogs/unreleased/leipert-drop-node-8-support.yml
new file mode 100644
index 00000000000..6cb80a6a23f
--- /dev/null
+++ b/changelogs/unreleased/leipert-drop-node-8-support.yml
@@ -0,0 +1,5 @@
+---
+title: Bump minimum node version to v10.13.0
+merge_request: 26831
+author:
+type: other
diff --git a/changelogs/unreleased/leipert-polyfills-improvements.yml b/changelogs/unreleased/leipert-polyfills-improvements.yml
new file mode 100644
index 00000000000..25c27ba60c6
--- /dev/null
+++ b/changelogs/unreleased/leipert-polyfills-improvements.yml
@@ -0,0 +1,5 @@
+---
+title: Polyfill fetch for Internet Explorer 11
+merge_request: 26366
+author:
+type: fixed
diff --git a/changelogs/unreleased/limit-broadcast-notifications-to-ui.yml b/changelogs/unreleased/limit-broadcast-notifications-to-ui.yml
new file mode 100644
index 00000000000..f10b3b4fb95
--- /dev/null
+++ b/changelogs/unreleased/limit-broadcast-notifications-to-ui.yml
@@ -0,0 +1,5 @@
+---
+title: Limit notification-type broadcast display to web interface
+merge_request: 26236
+author: Aleksandrs Ļedovskis
+type: changed
diff --git a/changelogs/unreleased/lodash_blob.yml b/changelogs/unreleased/lodash_blob.yml
new file mode 100644
index 00000000000..c4404157746
--- /dev/null
+++ b/changelogs/unreleased/lodash_blob.yml
@@ -0,0 +1,5 @@
+---
+title: Replace underscore with lodash in /app/assets/javascripts/blob/
+merge_request: 25113
+author: rkpattnaik780
+type: changed
diff --git a/changelogs/unreleased/make_design_management_versions_created_at_not_null.yml b/changelogs/unreleased/make_design_management_versions_created_at_not_null.yml
new file mode 100644
index 00000000000..ee748c25c79
--- /dev/null
+++ b/changelogs/unreleased/make_design_management_versions_created_at_not_null.yml
@@ -0,0 +1,5 @@
+---
+title: Make design_management_versions.created_at not null
+merge_request: 20182
+author: Lee Tickett
+type: other
diff --git a/changelogs/unreleased/mc-feature-trigger-pipelines-project-subscriptions.yml b/changelogs/unreleased/mc-feature-trigger-pipelines-project-subscriptions.yml
new file mode 100644
index 00000000000..b028d619816
--- /dev/null
+++ b/changelogs/unreleased/mc-feature-trigger-pipelines-project-subscriptions.yml
@@ -0,0 +1,5 @@
+---
+title: Add ability to trigger pipelines when project is rebuilt.
+merge_request: 20063
+author:
+type: added
diff --git a/changelogs/unreleased/mermaid-fix.yml b/changelogs/unreleased/mermaid-fix.yml
new file mode 100644
index 00000000000..54bdfba6a15
--- /dev/null
+++ b/changelogs/unreleased/mermaid-fix.yml
@@ -0,0 +1,5 @@
+---
+title: Add functionality to render individual mermaids
+merge_request: 26564
+author:
+type: changed
diff --git a/changelogs/unreleased/migrate-fa-spinner-in-views-dashboard-todos.yml b/changelogs/unreleased/migrate-fa-spinner-in-views-dashboard-todos.yml
new file mode 100644
index 00000000000..7007400ede6
--- /dev/null
+++ b/changelogs/unreleased/migrate-fa-spinner-in-views-dashboard-todos.yml
@@ -0,0 +1,5 @@
+---
+title: Use new loading spinner in Todos dashboard buttons
+merge_request: 25142
+author: Tsegaselassie Tadesse
+type: other
diff --git a/changelogs/unreleased/migrate-security-scans.yml b/changelogs/unreleased/migrate-security-scans.yml
new file mode 100644
index 00000000000..3806aef93a1
--- /dev/null
+++ b/changelogs/unreleased/migrate-security-scans.yml
@@ -0,0 +1,5 @@
+---
+title: Schedule worker to migrate security job artifacts to security scans
+merge_request: 24125
+author:
+type: other
diff --git a/changelogs/unreleased/mk-hide-secondary-only-setting.yml b/changelogs/unreleased/mk-hide-secondary-only-setting.yml
new file mode 100644
index 00000000000..49107c23b16
--- /dev/null
+++ b/changelogs/unreleased/mk-hide-secondary-only-setting.yml
@@ -0,0 +1,5 @@
+---
+title: 'Geo: Show secondary-only setting on only on secondaries'
+merge_request: 26029
+author:
+type: fixed
diff --git a/changelogs/unreleased/mo-change-capybara-screenshots-name.yml b/changelogs/unreleased/mo-change-capybara-screenshots-name.yml
new file mode 100644
index 00000000000..d20ccd25522
--- /dev/null
+++ b/changelogs/unreleased/mo-change-capybara-screenshots-name.yml
@@ -0,0 +1,5 @@
+---
+title: Change capybara screenshots files names taken on tests failures
+merge_request: 26788
+author:
+type: changed
diff --git a/changelogs/unreleased/mo-fix-capybara-screenshots-rails.yml b/changelogs/unreleased/mo-fix-capybara-screenshots-rails.yml
new file mode 100644
index 00000000000..ae01f5503d8
--- /dev/null
+++ b/changelogs/unreleased/mo-fix-capybara-screenshots-rails.yml
@@ -0,0 +1,5 @@
+---
+title: Fix capybara screenshots path name for rails configuration
+merge_request: 27002
+author:
+type: fixed
diff --git a/changelogs/unreleased/mo-use-new-code-quality-image.yml b/changelogs/unreleased/mo-use-new-code-quality-image.yml
new file mode 100644
index 00000000000..c95a5abc1e3
--- /dev/null
+++ b/changelogs/unreleased/mo-use-new-code-quality-image.yml
@@ -0,0 +1,5 @@
+---
+title: Use new codequality docker image from ci-cd group
+merge_request: 27098
+author:
+type: other
diff --git a/changelogs/unreleased/move-storage-shards.yml b/changelogs/unreleased/move-storage-shards.yml
new file mode 100644
index 00000000000..442d86f80d8
--- /dev/null
+++ b/changelogs/unreleased/move-storage-shards.yml
@@ -0,0 +1,5 @@
+---
+title: "Backport API support to move between repository storages/shards"
+merge_request: 18721
+author: Ben Bodenmiller
+type: added
diff --git a/changelogs/unreleased/mwaw-197871-improve-duplicated-dashboard-error-messages.yml b/changelogs/unreleased/mwaw-197871-improve-duplicated-dashboard-error-messages.yml
new file mode 100644
index 00000000000..a5e1fa98049
--- /dev/null
+++ b/changelogs/unreleased/mwaw-197871-improve-duplicated-dashboard-error-messages.yml
@@ -0,0 +1,5 @@
+---
+title: Fix error messages for dashboard clonning process.
+merge_request: 26290
+author:
+type: fixed
diff --git a/changelogs/unreleased/mwaw-activate_shared_services_on_project_creation.yml b/changelogs/unreleased/mwaw-activate_shared_services_on_project_creation.yml
new file mode 100644
index 00000000000..b7641675f88
--- /dev/null
+++ b/changelogs/unreleased/mwaw-activate_shared_services_on_project_creation.yml
@@ -0,0 +1,6 @@
+---
+title: Activate Prometheus integration service for newly created project if this project
+ has access to shared Prometheus application.
+merge_request: 24676
+author:
+type: fixed
diff --git a/changelogs/unreleased/mwaw-remove_logs_path_for_not_authorised_users.yml b/changelogs/unreleased/mwaw-remove_logs_path_for_not_authorised_users.yml
new file mode 100644
index 00000000000..249aa36e9fd
--- /dev/null
+++ b/changelogs/unreleased/mwaw-remove_logs_path_for_not_authorised_users.yml
@@ -0,0 +1,5 @@
+---
+title: Remove unreachable link from embded dashboard context menu
+merge_request: 25892
+author:
+type: fixed
diff --git a/changelogs/unreleased/nfriend-create-release-through-ui.yml b/changelogs/unreleased/nfriend-create-release-through-ui.yml
new file mode 100644
index 00000000000..230bc3b31a7
--- /dev/null
+++ b/changelogs/unreleased/nfriend-create-release-through-ui.yml
@@ -0,0 +1,5 @@
+---
+title: Add "New release" button to Releases page
+merge_request: 24516
+author:
+type: added
diff --git a/changelogs/unreleased/nfriend-enable-issues-summary.yml b/changelogs/unreleased/nfriend-enable-issues-summary.yml
new file mode 100644
index 00000000000..45a816ac198
--- /dev/null
+++ b/changelogs/unreleased/nfriend-enable-issues-summary.yml
@@ -0,0 +1,5 @@
+---
+title: Add issue summary to Release blocks on the Releases page
+merge_request: 27032
+author:
+type: added
diff --git a/changelogs/unreleased/nfriend-enable-release-show-page-feature-flag.yml b/changelogs/unreleased/nfriend-enable-release-show-page-feature-flag.yml
new file mode 100644
index 00000000000..882d489613d
--- /dev/null
+++ b/changelogs/unreleased/nfriend-enable-release-show-page-feature-flag.yml
@@ -0,0 +1,5 @@
+---
+title: Add dedicated Release page for viewing a single Release
+merge_request: 26502
+author:
+type: added
diff --git a/changelogs/unreleased/nicolasdular-email-restriction-regex.yml b/changelogs/unreleased/nicolasdular-email-restriction-regex.yml
new file mode 100644
index 00000000000..d7f89cc614d
--- /dev/null
+++ b/changelogs/unreleased/nicolasdular-email-restriction-regex.yml
@@ -0,0 +1,5 @@
+---
+title: Add restrictions for signup email addresses
+merge_request: 25122
+author:
+type: added
diff --git a/changelogs/unreleased/open-project-integration-2.yml b/changelogs/unreleased/open-project-integration-2.yml
new file mode 100644
index 00000000000..4bb17b3e30a
--- /dev/null
+++ b/changelogs/unreleased/open-project-integration-2.yml
@@ -0,0 +1,5 @@
+---
+title: Add migration for creating open_project_tracker_data table
+merge_request: 26966
+author:
+type: other
diff --git a/changelogs/unreleased/osw-move-sidekiq-cluster-to-core.yml b/changelogs/unreleased/osw-move-sidekiq-cluster-to-core.yml
new file mode 100644
index 00000000000..faec47f41b5
--- /dev/null
+++ b/changelogs/unreleased/osw-move-sidekiq-cluster-to-core.yml
@@ -0,0 +1,5 @@
+---
+title: Move sidekiq-cluster script to Core
+merge_request: 26703
+author:
+type: other
diff --git a/changelogs/unreleased/osw-support-opt-in-cluster-in-bg-jobs-script.yml b/changelogs/unreleased/osw-support-opt-in-cluster-in-bg-jobs-script.yml
new file mode 100644
index 00000000000..847b0f43919
--- /dev/null
+++ b/changelogs/unreleased/osw-support-opt-in-cluster-in-bg-jobs-script.yml
@@ -0,0 +1,5 @@
+---
+title: Support sidekiq-cluster supervision through bin/background_jobs
+merge_request: 27042
+author:
+type: added
diff --git a/changelogs/unreleased/pages-1-17.yml b/changelogs/unreleased/pages-1-17.yml
new file mode 100644
index 00000000000..16290eef158
--- /dev/null
+++ b/changelogs/unreleased/pages-1-17.yml
@@ -0,0 +1,5 @@
+---
+title: Upgrade Pages to 1.17.0
+merge_request: 26478
+author:
+type: added
diff --git a/changelogs/unreleased/persist-expanded-environment-name-in-build-metadata.yml b/changelogs/unreleased/persist-expanded-environment-name-in-build-metadata.yml
new file mode 100644
index 00000000000..a6de3aa78c5
--- /dev/null
+++ b/changelogs/unreleased/persist-expanded-environment-name-in-build-metadata.yml
@@ -0,0 +1,5 @@
+---
+title: Persist expanded environment name in ci build metadata
+merge_request: 22374
+author:
+type: performance
diff --git a/changelogs/unreleased/pokstad1-remove-branch-user-squash.yml b/changelogs/unreleased/pokstad1-remove-branch-user-squash.yml
new file mode 100644
index 00000000000..60efd000094
--- /dev/null
+++ b/changelogs/unreleased/pokstad1-remove-branch-user-squash.yml
@@ -0,0 +1,5 @@
+---
+title: Upgrade Gitaly gem and fix UserSquash RPC usage
+merge_request: 27372
+author:
+type: other
diff --git a/changelogs/unreleased/public-api-for-merge-trains.yml b/changelogs/unreleased/public-api-for-merge-trains.yml
new file mode 100644
index 00000000000..97da15794e8
--- /dev/null
+++ b/changelogs/unreleased/public-api-for-merge-trains.yml
@@ -0,0 +1,5 @@
+---
+title: Allow users to get Merge Trains entries via Public API
+merge_request: 25229
+author:
+type: added
diff --git a/changelogs/unreleased/rc-whitelist_ports.yml b/changelogs/unreleased/rc-whitelist_ports.yml
new file mode 100644
index 00000000000..d3e3bdc1b7a
--- /dev/null
+++ b/changelogs/unreleased/rc-whitelist_ports.yml
@@ -0,0 +1,5 @@
+---
+title: Add ability to whitelist ports
+merge_request: 27025
+author:
+type: added
diff --git a/changelogs/unreleased/refactor-bypass-session-admin-mode.yml b/changelogs/unreleased/refactor-bypass-session-admin-mode.yml
new file mode 100644
index 00000000000..902f3239fd0
--- /dev/null
+++ b/changelogs/unreleased/refactor-bypass-session-admin-mode.yml
@@ -0,0 +1,5 @@
+---
+title: Sessionless and API endpoints bypass session for admin mode
+merge_request: 25056
+author: Diego Louzán
+type: changed
diff --git a/changelogs/unreleased/refactor-disable-csrf-in-session-destroy.yml b/changelogs/unreleased/refactor-disable-csrf-in-session-destroy.yml
new file mode 100644
index 00000000000..57bf2e816b4
--- /dev/null
+++ b/changelogs/unreleased/refactor-disable-csrf-in-session-destroy.yml
@@ -0,0 +1,5 @@
+---
+title: Disable CSRF protection on logout endpoint
+merge_request: 25521
+author: Diego Louzán
+type: changed
diff --git a/changelogs/unreleased/remove-cs-kubernetes-workaround.yml b/changelogs/unreleased/remove-cs-kubernetes-workaround.yml
new file mode 100644
index 00000000000..879da58aa3c
--- /dev/null
+++ b/changelogs/unreleased/remove-cs-kubernetes-workaround.yml
@@ -0,0 +1,5 @@
+---
+title: Remove kubernetes workaround in container scanning
+merge_request: 21188
+author:
+type: changed
diff --git a/changelogs/unreleased/remove-duplicate-auth-refresh-on-project-create.yml b/changelogs/unreleased/remove-duplicate-auth-refresh-on-project-create.yml
new file mode 100644
index 00000000000..db0fe9e34e6
--- /dev/null
+++ b/changelogs/unreleased/remove-duplicate-auth-refresh-on-project-create.yml
@@ -0,0 +1,5 @@
+---
+title: Remove duplicate authorization refresh for group members on project creation
+merge_request:
+author:
+type: performance
diff --git a/changelogs/unreleased/remove-merged-branch-names-ff.yml b/changelogs/unreleased/remove-merged-branch-names-ff.yml
new file mode 100644
index 00000000000..103ac3d3792
--- /dev/null
+++ b/changelogs/unreleased/remove-merged-branch-names-ff.yml
@@ -0,0 +1,5 @@
+---
+title: Improve performance of Repository#merged_branch_names
+merge_request: 26005
+author:
+type: performance
diff --git a/changelogs/unreleased/remove-puma-notices-from-admin-area-banner.yml b/changelogs/unreleased/remove-puma-notices-from-admin-area-banner.yml
new file mode 100644
index 00000000000..a5dd6b83f4d
--- /dev/null
+++ b/changelogs/unreleased/remove-puma-notices-from-admin-area-banner.yml
@@ -0,0 +1,5 @@
+---
+title: Remove Puma notices from AdminArea banner
+merge_request: 26137
+author:
+type: changed
diff --git a/changelogs/unreleased/replace-undefined-with-unknown.yml b/changelogs/unreleased/replace-undefined-with-unknown.yml
new file mode 100644
index 00000000000..38f1ac169f8
--- /dev/null
+++ b/changelogs/unreleased/replace-undefined-with-unknown.yml
@@ -0,0 +1,5 @@
+---
+title: Replace undefined severity with unknown severity for occurrences
+merge_request: 26085
+author:
+type: other
diff --git a/changelogs/unreleased/replace-undefined-with-unkown-vulnerabilities.yml b/changelogs/unreleased/replace-undefined-with-unkown-vulnerabilities.yml
new file mode 100644
index 00000000000..bc06524fead
--- /dev/null
+++ b/changelogs/unreleased/replace-undefined-with-unkown-vulnerabilities.yml
@@ -0,0 +1,5 @@
+---
+title: Replace undefined severity with unknown severity for vulnerabilities
+merge_request: 26305
+author:
+type: other
diff --git a/changelogs/unreleased/replace_checkbox_by_toggle_for_modsecurity.yml b/changelogs/unreleased/replace_checkbox_by_toggle_for_modsecurity.yml
new file mode 100644
index 00000000000..73ec392fbaa
--- /dev/null
+++ b/changelogs/unreleased/replace_checkbox_by_toggle_for_modsecurity.yml
@@ -0,0 +1,5 @@
+---
+title: Replace checkbox by toggle for ModSecurity on Cluster App Page
+merge_request: 26720
+author:
+type: changed
diff --git a/changelogs/unreleased/repository-contributors-group-by-email.yml b/changelogs/unreleased/repository-contributors-group-by-email.yml
new file mode 100644
index 00000000000..7ee48907e6a
--- /dev/null
+++ b/changelogs/unreleased/repository-contributors-group-by-email.yml
@@ -0,0 +1,5 @@
+---
+title: Group repository contributors by email instead of name
+merge_request: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26899
+author: Hilco van der Wilk
+type: changed
diff --git a/changelogs/unreleased/requirements-model.yml b/changelogs/unreleased/requirements-model.yml
new file mode 100644
index 00000000000..daf2c649db2
--- /dev/null
+++ b/changelogs/unreleased/requirements-model.yml
@@ -0,0 +1,5 @@
+---
+title: Add migration for Requirement model
+merge_request: 26097
+author:
+type: added
diff --git a/changelogs/unreleased/rk4bir-master-patch-60164.yml b/changelogs/unreleased/rk4bir-master-patch-60164.yml
new file mode 100644
index 00000000000..e8f5203dad4
--- /dev/null
+++ b/changelogs/unreleased/rk4bir-master-patch-60164.yml
@@ -0,0 +1,5 @@
+---
+title: Migrated from .fa-spinner to .spinner in 'app/assets/javascripts/notes.js
+merge_request: 24916
+author: Raihan Kabir (gitlab/rk4bir)
+type: changed
diff --git a/changelogs/unreleased/rk4bir-master-patch-77755.yml b/changelogs/unreleased/rk4bir-master-patch-77755.yml
new file mode 100644
index 00000000000..b904dad6660
--- /dev/null
+++ b/changelogs/unreleased/rk4bir-master-patch-77755.yml
@@ -0,0 +1,5 @@
+---
+title: Migrated from .fa-spinner to .spinner in app/assets/javascripts/blob/template_selector.js
+merge_request: 25045
+author: Raihan Kabir (gitlab/rk4bir)
+type: changed
diff --git a/changelogs/unreleased/rk4bir-master-patch-85217.yml b/changelogs/unreleased/rk4bir-master-patch-85217.yml
new file mode 100644
index 00000000000..6825c70206f
--- /dev/null
+++ b/changelogs/unreleased/rk4bir-master-patch-85217.yml
@@ -0,0 +1,5 @@
+---
+title: Migrated from .fa-spinner to .spinner in app/assets/javascripts/create_merge_request_dropdown.js
+merge_request: 24917
+author: Raihan Kabir (gitlab/rk4bir)
+type: changed
diff --git a/changelogs/unreleased/rk4bir-master-patch-92247.yml b/changelogs/unreleased/rk4bir-master-patch-92247.yml
new file mode 100644
index 00000000000..d38ee386b6b
--- /dev/null
+++ b/changelogs/unreleased/rk4bir-master-patch-92247.yml
@@ -0,0 +1,5 @@
+---
+title: Migrated from .fa-spinner to .spinner in app/assets/javascripts/sidebar/components/assignees/assignee_title.vue
+merge_request: 24919
+author: rk4bir
+type: changed
diff --git a/changelogs/unreleased/rk4bir-master-patch-97031.yml b/changelogs/unreleased/rk4bir-master-patch-97031.yml
new file mode 100644
index 00000000000..7271f9ab92b
--- /dev/null
+++ b/changelogs/unreleased/rk4bir-master-patch-97031.yml
@@ -0,0 +1,5 @@
+---
+title: Migrated the sidebar label select dropdown title component spinner to utilize GlLoadingIcon
+merge_request: 24914
+author: Raihan Kabir
+type: changed
diff --git a/changelogs/unreleased/rs-commit-web_url.yml b/changelogs/unreleased/rs-commit-web_url.yml
new file mode 100644
index 00000000000..c9d521660e8
--- /dev/null
+++ b/changelogs/unreleased/rs-commit-web_url.yml
@@ -0,0 +1,5 @@
+---
+title: Add web_url attribute to API response for Commits
+merge_request: 26173
+author:
+type: added
diff --git a/changelogs/unreleased/rs-keep-divergent-refs-column.yml b/changelogs/unreleased/rs-keep-divergent-refs-column.yml
new file mode 100644
index 00000000000..b6d83fbc16a
--- /dev/null
+++ b/changelogs/unreleased/rs-keep-divergent-refs-column.yml
@@ -0,0 +1,5 @@
+---
+title: Add keep_divergent_refs to remote_mirrors table
+merge_request:
+author:
+type: other
diff --git a/changelogs/unreleased/rs-remote-mirrors-docs.yml b/changelogs/unreleased/rs-remote-mirrors-docs.yml
new file mode 100644
index 00000000000..3711aac63ef
--- /dev/null
+++ b/changelogs/unreleased/rs-remote-mirrors-docs.yml
@@ -0,0 +1,5 @@
+---
+title: Add remote mirrors API
+merge_request:
+author:
+type: added
diff --git a/changelogs/unreleased/sarnold-format-timestamps-locally.yml b/changelogs/unreleased/sarnold-format-timestamps-locally.yml
new file mode 100644
index 00000000000..0d2186ac32e
--- /dev/null
+++ b/changelogs/unreleased/sarnold-format-timestamps-locally.yml
@@ -0,0 +1,5 @@
+---
+title: Fix timezones for popovers.
+merge_request: 24942
+author:
+type: fixed
diff --git a/changelogs/unreleased/sh-avoid-current-settings-rake-task.yml b/changelogs/unreleased/sh-avoid-current-settings-rake-task.yml
new file mode 100644
index 00000000000..06c21785e33
--- /dev/null
+++ b/changelogs/unreleased/sh-avoid-current-settings-rake-task.yml
@@ -0,0 +1,5 @@
+---
+title: Only load usage ping cron schedule for Sidekiq
+merge_request: 25325
+author:
+type: other
diff --git a/changelogs/unreleased/sh-cache-ci-variables.yml b/changelogs/unreleased/sh-cache-ci-variables.yml
new file mode 100644
index 00000000000..8534af37808
--- /dev/null
+++ b/changelogs/unreleased/sh-cache-ci-variables.yml
@@ -0,0 +1,5 @@
+---
+title: Memoize loading of CI variables
+merge_request: 26147
+author:
+type: performance
diff --git a/changelogs/unreleased/sh-cleaup-prom-background-migrations.yml b/changelogs/unreleased/sh-cleaup-prom-background-migrations.yml
new file mode 100644
index 00000000000..f74d6560eb0
--- /dev/null
+++ b/changelogs/unreleased/sh-cleaup-prom-background-migrations.yml
@@ -0,0 +1,5 @@
+---
+title: Clean stale background migration jobs
+merge_request: 25707
+author:
+type: fixed
diff --git a/changelogs/unreleased/sh-enable-redis-key-compression.yml b/changelogs/unreleased/sh-enable-redis-key-compression.yml
new file mode 100644
index 00000000000..7ea56b0b0c3
--- /dev/null
+++ b/changelogs/unreleased/sh-enable-redis-key-compression.yml
@@ -0,0 +1,5 @@
+---
+title: Enable Redis cache key compression
+merge_request: 27254
+author:
+type: performance
diff --git a/changelogs/unreleased/sh-ensure-fresh-project-settings.yml b/changelogs/unreleased/sh-ensure-fresh-project-settings.yml
new file mode 100644
index 00000000000..9b37d06f75b
--- /dev/null
+++ b/changelogs/unreleased/sh-ensure-fresh-project-settings.yml
@@ -0,0 +1,5 @@
+---
+title: Ensure freshness of settings with project creation
+merge_request: 27156
+author:
+type: fixed
diff --git a/changelogs/unreleased/sh-feature-flag-persisted-process-cache.yml b/changelogs/unreleased/sh-feature-flag-persisted-process-cache.yml
new file mode 100644
index 00000000000..d1ef2424b15
--- /dev/null
+++ b/changelogs/unreleased/sh-feature-flag-persisted-process-cache.yml
@@ -0,0 +1,5 @@
+---
+title: Move feature flag list into process cache
+merge_request: 27511
+author:
+type: performance
diff --git a/changelogs/unreleased/sh-optimize-pipeline-for-mrs.yml b/changelogs/unreleased/sh-optimize-pipeline-for-mrs.yml
new file mode 100644
index 00000000000..fd604819982
--- /dev/null
+++ b/changelogs/unreleased/sh-optimize-pipeline-for-mrs.yml
@@ -0,0 +1,5 @@
+---
+title: Fix N+1 queries for PipelinesController#index.json
+merge_request: 26643
+author:
+type: performance
diff --git a/changelogs/unreleased/sh-rate-limit-archive-endpoint.yml b/changelogs/unreleased/sh-rate-limit-archive-endpoint.yml
new file mode 100644
index 00000000000..55c20d1a6e3
--- /dev/null
+++ b/changelogs/unreleased/sh-rate-limit-archive-endpoint.yml
@@ -0,0 +1,5 @@
+---
+title: Rate limit archive endpoint by user
+merge_request: 25750
+author:
+type: changed
diff --git a/changelogs/unreleased/sh-refresh-mr-widget-upon-cancel.yml b/changelogs/unreleased/sh-refresh-mr-widget-upon-cancel.yml
new file mode 100644
index 00000000000..66e8ebf1992
--- /dev/null
+++ b/changelogs/unreleased/sh-refresh-mr-widget-upon-cancel.yml
@@ -0,0 +1,5 @@
+---
+title: Refresh widget after canceling "Merge When Pipeline Succeeds"
+merge_request: 26232
+author:
+type: fixed
diff --git a/changelogs/unreleased/sh-upgrade-bootsnap-1-4-6.yml b/changelogs/unreleased/sh-upgrade-bootsnap-1-4-6.yml
new file mode 100644
index 00000000000..a33c056106f
--- /dev/null
+++ b/changelogs/unreleased/sh-upgrade-bootsnap-1-4-6.yml
@@ -0,0 +1,5 @@
+---
+title: Upgrade to Bootsnap 1.4.6
+merge_request: 25844
+author:
+type: performance
diff --git a/changelogs/unreleased/sh-use-process-cache-for-feature-flags.yml b/changelogs/unreleased/sh-use-process-cache-for-feature-flags.yml
new file mode 100644
index 00000000000..a812d61dadf
--- /dev/null
+++ b/changelogs/unreleased/sh-use-process-cache-for-feature-flags.yml
@@ -0,0 +1,5 @@
+---
+title: Use process-wide memory cache for feature flags
+merge_request: 26935
+author:
+type: performance
diff --git a/changelogs/unreleased/sha-params-validator.yml b/changelogs/unreleased/sha-params-validator.yml
new file mode 100644
index 00000000000..ff8717bd3a1
--- /dev/null
+++ b/changelogs/unreleased/sha-params-validator.yml
@@ -0,0 +1,5 @@
+---
+title: Add grape custom validator for sha params
+merge_request: 26220
+author: Rajendra Kadam
+type: added
diff --git a/changelogs/unreleased/show-cluster-status-fe.yml b/changelogs/unreleased/show-cluster-status-fe.yml
new file mode 100644
index 00000000000..b0ec9fa702b
--- /dev/null
+++ b/changelogs/unreleased/show-cluster-status-fe.yml
@@ -0,0 +1,5 @@
+---
+title: Show cluster status (FE)
+merge_request: 26368
+author:
+type: added
diff --git a/changelogs/unreleased/sidebar_not_expanding_at_certain_resolutions.yml b/changelogs/unreleased/sidebar_not_expanding_at_certain_resolutions.yml
new file mode 100644
index 00000000000..c6a2ea74a5a
--- /dev/null
+++ b/changelogs/unreleased/sidebar_not_expanding_at_certain_resolutions.yml
@@ -0,0 +1,5 @@
+---
+title: Fix bug with sidebar not expanding at certain resolutions
+merge_request: 25313
+author: Lee Tickett
+type: fixed
diff --git a/changelogs/unreleased/simplify-ide-colors.yml b/changelogs/unreleased/simplify-ide-colors.yml
new file mode 100644
index 00000000000..f8d174998b9
--- /dev/null
+++ b/changelogs/unreleased/simplify-ide-colors.yml
@@ -0,0 +1,5 @@
+---
+title: Simplifying colors in the Web IDE
+merge_request: 25304
+author:
+type: other
diff --git a/changelogs/unreleased/stop_environments.yml b/changelogs/unreleased/stop_environments.yml
new file mode 100644
index 00000000000..ea92be202af
--- /dev/null
+++ b/changelogs/unreleased/stop_environments.yml
@@ -0,0 +1,5 @@
+---
+title: 'Fixes stop_review job upon expired artifacts from previous stages'
+merge_request: 27258
+author: Jack Lei
+type: fixed
diff --git a/changelogs/unreleased/support-airgap-in-dependency-scanning-template.yml b/changelogs/unreleased/support-airgap-in-dependency-scanning-template.yml
new file mode 100644
index 00000000000..0d70bb836c1
--- /dev/null
+++ b/changelogs/unreleased/support-airgap-in-dependency-scanning-template.yml
@@ -0,0 +1,5 @@
+---
+title: Add airgap support to Dependency Scanning template
+merge_request: 26145
+author:
+type: changed
diff --git a/changelogs/unreleased/switch-ff-ci-dynamic-child-pipeline.yml b/changelogs/unreleased/switch-ff-ci-dynamic-child-pipeline.yml
new file mode 100644
index 00000000000..84aac82fe1e
--- /dev/null
+++ b/changelogs/unreleased/switch-ff-ci-dynamic-child-pipeline.yml
@@ -0,0 +1,5 @@
+---
+title: Enable feature Dynamic Child Pipeline creation via artifact
+merge_request: 26648
+author:
+type: added
diff --git a/changelogs/unreleased/sy-alert-embeds.yml b/changelogs/unreleased/sy-alert-embeds.yml
new file mode 100644
index 00000000000..f18b93212dd
--- /dev/null
+++ b/changelogs/unreleased/sy-alert-embeds.yml
@@ -0,0 +1,5 @@
+---
+title: Add support for alert-based metric embeds in GFM
+merge_request: 25075
+author:
+type: added
diff --git a/changelogs/unreleased/sy-auto-embed-alert.yml b/changelogs/unreleased/sy-auto-embed-alert.yml
new file mode 100644
index 00000000000..d89b0e161f5
--- /dev/null
+++ b/changelogs/unreleased/sy-auto-embed-alert.yml
@@ -0,0 +1,5 @@
+---
+title: Automatically include embedded metrics for GitLab alert incidents
+merge_request: 25277
+author:
+type: added
diff --git a/changelogs/unreleased/sy-global-integration.yml b/changelogs/unreleased/sy-global-integration.yml
new file mode 100644
index 00000000000..34ecd9a509d
--- /dev/null
+++ b/changelogs/unreleased/sy-global-integration.yml
@@ -0,0 +1,5 @@
+---
+title: Display GitLab issues created via Sentry global integration
+merge_request: 26418
+author:
+type: fixed
diff --git a/changelogs/unreleased/sy-grafana-default-panel.yml b/changelogs/unreleased/sy-grafana-default-panel.yml
new file mode 100644
index 00000000000..9ad5c824b32
--- /dev/null
+++ b/changelogs/unreleased/sy-grafana-default-panel.yml
@@ -0,0 +1,5 @@
+---
+title: Default to first valid panel in unspecified Grafana embeds
+merge_request: 21932
+author:
+type: changed
diff --git a/changelogs/unreleased/sy-grafana-default-times.yml b/changelogs/unreleased/sy-grafana-default-times.yml
new file mode 100644
index 00000000000..54b4f76c082
--- /dev/null
+++ b/changelogs/unreleased/sy-grafana-default-times.yml
@@ -0,0 +1,5 @@
+---
+title: Allow default time window on grafana embeds
+merge_request: 21884
+author:
+type: changed
diff --git a/changelogs/unreleased/tokenize-filtered-search.yml b/changelogs/unreleased/tokenize-filtered-search.yml
new file mode 100644
index 00000000000..517d2dbe63c
--- /dev/null
+++ b/changelogs/unreleased/tokenize-filtered-search.yml
@@ -0,0 +1,5 @@
+---
+title: Use colon to tokenize input in filtered search
+merge_request: 26072
+author:
+type: changed
diff --git a/changelogs/unreleased/turn-on-new-variables-ui-ff.yml b/changelogs/unreleased/turn-on-new-variables-ui-ff.yml
new file mode 100644
index 00000000000..9cb100481db
--- /dev/null
+++ b/changelogs/unreleased/turn-on-new-variables-ui-ff.yml
@@ -0,0 +1,5 @@
+---
+title: Update UI for project and group settings CI variables
+merge_request: 26901
+author:
+type: added
diff --git a/changelogs/unreleased/tweak-wiki-title-validation-message.yml b/changelogs/unreleased/tweak-wiki-title-validation-message.yml
new file mode 100644
index 00000000000..05e2c57fcff
--- /dev/null
+++ b/changelogs/unreleased/tweak-wiki-title-validation-message.yml
@@ -0,0 +1,5 @@
+---
+title: Include invalid directories in wiki title message
+merge_request: 25376
+author:
+type: changed
diff --git a/changelogs/unreleased/udpate-cluster-application-image-to-0-11.yml b/changelogs/unreleased/udpate-cluster-application-image-to-0-11.yml
new file mode 100644
index 00000000000..27e11028441
--- /dev/null
+++ b/changelogs/unreleased/udpate-cluster-application-image-to-0-11.yml
@@ -0,0 +1,6 @@
+---
+title: Update cluster-applications image to v0.11 with a runner bugfix, updated cert-manager,
+ and vault as a new app
+merge_request: 26842
+author:
+type: changed
diff --git a/changelogs/unreleased/udpate-cluster-application-image-to-0-12.yml b/changelogs/unreleased/udpate-cluster-application-image-to-0-12.yml
new file mode 100644
index 00000000000..786e088088c
--- /dev/null
+++ b/changelogs/unreleased/udpate-cluster-application-image-to-0-12.yml
@@ -0,0 +1,5 @@
+---
+title: Adds crossplane as CI/CD Managed App
+merge_request: 27374
+author:
+type: added
diff --git a/changelogs/unreleased/unique-service-template-per-type.yml b/changelogs/unreleased/unique-service-template-per-type.yml
new file mode 100644
index 00000000000..e394b959e7d
--- /dev/null
+++ b/changelogs/unreleased/unique-service-template-per-type.yml
@@ -0,0 +1,5 @@
+---
+title: Validates only one service template per type
+merge_request: 26380
+author:
+type: other
diff --git a/changelogs/unreleased/unlink-cache-deletions.yml b/changelogs/unreleased/unlink-cache-deletions.yml
new file mode 100644
index 00000000000..699ec94d1e1
--- /dev/null
+++ b/changelogs/unreleased/unlink-cache-deletions.yml
@@ -0,0 +1,5 @@
+---
+title: Swap to UNLINK for Redis set cache
+merge_request: 27116
+author:
+type: performance
diff --git a/changelogs/unreleased/update-ado-image-to-0-10-0.yml b/changelogs/unreleased/update-ado-image-to-0-10-0.yml
new file mode 100644
index 00000000000..45bc5a41fce
--- /dev/null
+++ b/changelogs/unreleased/update-ado-image-to-0-10-0.yml
@@ -0,0 +1,5 @@
+---
+title: Update Auto DevOps deployment template's auto-deploy-image to v0.10.0 (updates the included glibc)
+merge_request: 25920
+author:
+type: other
diff --git a/changelogs/unreleased/update-cert-manager-to-0-10-1.yml b/changelogs/unreleased/update-cert-manager-to-0-10-1.yml
new file mode 100644
index 00000000000..886ab67dfc4
--- /dev/null
+++ b/changelogs/unreleased/update-cert-manager-to-0-10-1.yml
@@ -0,0 +1,5 @@
+---
+title: Use cert-manager 0.10 instead of 0.9 for new chart installations
+merge_request: 26345
+author:
+type: changed
diff --git a/changelogs/unreleased/update-dast-ado-image-to-0-10-0.yml b/changelogs/unreleased/update-dast-ado-image-to-0-10-0.yml
new file mode 100644
index 00000000000..335d553239f
--- /dev/null
+++ b/changelogs/unreleased/update-dast-ado-image-to-0-10-0.yml
@@ -0,0 +1,5 @@
+---
+title: Update DAST auto-deploy-image to v0.10.0
+merge_request: 25922
+author:
+type: other
diff --git a/changelogs/unreleased/update-gitlab-runner-helm-chart-to-0-14-0.yml b/changelogs/unreleased/update-gitlab-runner-helm-chart-to-0-14-0.yml
new file mode 100644
index 00000000000..5342809ca07
--- /dev/null
+++ b/changelogs/unreleased/update-gitlab-runner-helm-chart-to-0-14-0.yml
@@ -0,0 +1,5 @@
+---
+title: Update GitLab Runner Helm Chart to 0.14.0
+merge_request: 25749
+author:
+type: other
diff --git a/changelogs/unreleased/update-private-project-wording.yml b/changelogs/unreleased/update-private-project-wording.yml
new file mode 100644
index 00000000000..db2434589f3
--- /dev/null
+++ b/changelogs/unreleased/update-private-project-wording.yml
@@ -0,0 +1,5 @@
+---
+title: Clarify private visibility for projects.
+merge_request: 25852
+author:
+type: other
diff --git a/changelogs/unreleased/update-puma-to-4-3-3.yml b/changelogs/unreleased/update-puma-to-4-3-3.yml
new file mode 100644
index 00000000000..f01fa09a417
--- /dev/null
+++ b/changelogs/unreleased/update-puma-to-4-3-3.yml
@@ -0,0 +1,5 @@
+---
+title: Update Puma to 4.3.3
+merge_request: 27232
+author:
+type: security
diff --git a/changelogs/unreleased/update-ruby-version-on-official-ci-templates.yml b/changelogs/unreleased/update-ruby-version-on-official-ci-templates.yml
new file mode 100644
index 00000000000..73e98808301
--- /dev/null
+++ b/changelogs/unreleased/update-ruby-version-on-official-ci-templates.yml
@@ -0,0 +1,5 @@
+---
+title: Update Ruby version in official CI templates
+merge_request: 23585
+author: Takuya Noguchi
+type: other
diff --git a/changelogs/unreleased/update_ingress_chart_version.yml b/changelogs/unreleased/update_ingress_chart_version.yml
new file mode 100644
index 00000000000..9a3c85c5637
--- /dev/null
+++ b/changelogs/unreleased/update_ingress_chart_version.yml
@@ -0,0 +1,5 @@
+---
+title: 'Update Ingress chart version to 1.29.7'
+merge_request: 25949
+author:
+type: added
diff --git a/changelogs/unreleased/update_repo_storage_checksum.yml b/changelogs/unreleased/update_repo_storage_checksum.yml
new file mode 100644
index 00000000000..b2f8b673730
--- /dev/null
+++ b/changelogs/unreleased/update_repo_storage_checksum.yml
@@ -0,0 +1,5 @@
+---
+title: Ensure checksums match when updating repository storage
+merge_request: 26334
+author:
+type: changed
diff --git a/changelogs/unreleased/upgrade-gitlab-ui.yml b/changelogs/unreleased/upgrade-gitlab-ui.yml
new file mode 100644
index 00000000000..b881a643a72
--- /dev/null
+++ b/changelogs/unreleased/upgrade-gitlab-ui.yml
@@ -0,0 +1,5 @@
+---
+title: Remove special chars from previous and next items in pagination
+merge_request: 25891
+author:
+type: other
diff --git a/changelogs/unreleased/use-default-crossplane-stack-versions.yml b/changelogs/unreleased/use-default-crossplane-stack-versions.yml
new file mode 100644
index 00000000000..b65c27e637b
--- /dev/null
+++ b/changelogs/unreleased/use-default-crossplane-stack-versions.yml
@@ -0,0 +1,5 @@
+---
+title: Fix installation of GitLab-managed crossplane chart
+merge_request: 27040
+author:
+type: fixed
diff --git a/changelogs/unreleased/use_replicate_repo_for_repo_move.yml b/changelogs/unreleased/use_replicate_repo_for_repo_move.yml
new file mode 100644
index 00000000000..aca0d6c5380
--- /dev/null
+++ b/changelogs/unreleased/use_replicate_repo_for_repo_move.yml
@@ -0,0 +1,5 @@
+---
+title: Use ReplicateRepository when moving repo storage
+merge_request: 26550
+author:
+type: changed
diff --git a/changelogs/unreleased/validate-subnets-field.yml b/changelogs/unreleased/validate-subnets-field.yml
new file mode 100644
index 00000000000..9d444f88255
--- /dev/null
+++ b/changelogs/unreleased/validate-subnets-field.yml
@@ -0,0 +1,5 @@
+---
+title: Validate that users selects at least two subnets in EKS Form
+merge_request: 26936
+author:
+type: fixed
diff --git a/changelogs/unreleased/validate_service_project_id_nil_if_template.yml b/changelogs/unreleased/validate_service_project_id_nil_if_template.yml
new file mode 100644
index 00000000000..a04683705ef
--- /dev/null
+++ b/changelogs/unreleased/validate_service_project_id_nil_if_template.yml
@@ -0,0 +1,5 @@
+---
+title: Validate absence of project_id if service is a template
+merge_request: 26563
+author:
+type: other
diff --git a/changelogs/unreleased/vh-snippets-content-types.yml b/changelogs/unreleased/vh-snippets-content-types.yml
new file mode 100644
index 00000000000..f6589abb522
--- /dev/null
+++ b/changelogs/unreleased/vh-snippets-content-types.yml
@@ -0,0 +1,5 @@
+---
+title: Remove unused Snippets#content_types method
+merge_request: 26306
+author:
+type: other
diff --git a/changelogs/unreleased/vh-snippets-finder-tweak.yml b/changelogs/unreleased/vh-snippets-finder-tweak.yml
new file mode 100644
index 00000000000..7979245a908
--- /dev/null
+++ b/changelogs/unreleased/vh-snippets-finder-tweak.yml
@@ -0,0 +1,5 @@
+---
+title: Improve SnippetsFinder performance with disabled project snippets
+merge_request: 26295
+author:
+type: performance
diff --git a/config/application.rb b/config/application.rb
index be7d366c927..14e92bf5905 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -8,6 +8,7 @@ require 'active_record/railtie'
require 'action_controller/railtie'
require 'action_view/railtie'
require 'action_mailer/railtie'
+require 'action_cable/engine'
require 'rails/test_unit/railtie'
Bundler.require(*Rails.groups)
@@ -258,7 +259,7 @@ module Gitlab
# Full list of options:
# https://api.rubyonrails.org/classes/ActiveSupport/Cache/RedisCacheStore.html#method-c-new
caching_config_hash = Gitlab::Redis::Cache.params
- caching_config_hash[:compress] = false
+ caching_config_hash[:compress] = Gitlab::Utils.to_boolean(ENV.fetch('ENABLE_REDIS_CACHE_COMPRESSION', '1'))
caching_config_hash[:namespace] = Gitlab::Redis::Cache::CACHE_NAMESPACE
caching_config_hash[:expires_in] = 2.weeks # Cache should not grow forever
if Gitlab::Runtime.multi_threaded?
@@ -285,6 +286,20 @@ module Gitlab
g.factory_bot false
end
+ # This empty initializer forces the :let_zeitwerk_take_over initializer to run before we load
+ # initializers in config/initializers. This is done because autoloading before Zeitwerk takes
+ # over is deprecated but our initializers do a lot of autoloading.
+ # See https://gitlab.com/gitlab-org/gitlab/issues/197346 for more details
+ initializer :move_initializers, before: :load_config_initializers, after: :let_zeitwerk_take_over do
+ end
+
+ # We need this for initializers that need to be run before Zeitwerk is loaded
+ initializer :before_zeitwerk, before: :let_zeitwerk_take_over, after: :prepend_helpers_path do
+ Dir[Rails.root.join('config/initializers_before_autoloader/*.rb')].sort.each do |initializer|
+ load_config_initializer(initializer)
+ end
+ end
+
config.after_initialize do
Rails.application.reload_routes!
diff --git a/config/environments/development.rb b/config/environments/development.rb
index dc804197fef..25d57467060 100644
--- a/config/environments/development.rb
+++ b/config/environments/development.rb
@@ -11,6 +11,9 @@ Rails.application.configure do
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
+ # Show a warning when a large data set is loaded into memory
+ config.active_record.warn_on_records_fetched_greater_than = 1000
+
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
@@ -50,4 +53,16 @@ Rails.application.configure do
# BetterErrors live shell (REPL) on every stack frame
BetterErrors::Middleware.allow_ip!("127.0.0.1/0")
+
+ # Reassign some performance related settings when we profile the app
+ if Gitlab::Utils.to_boolean(ENV['RAILS_PROFILE'].to_s)
+ warn "Hot-reloading is disabled as you are running with RAILS_PROFILE enabled"
+ config.cache_classes = true
+ config.eager_load = true
+ config.active_record.migration_error = false
+ config.active_record.verbose_query_logs = false
+ config.action_view.cache_template_loading = true
+
+ config.middleware.delete BetterErrors::Middleware
+ end
end
diff --git a/config/environments/test.rb b/config/environments/test.rb
index d8235c6220c..71cd5200415 100644
--- a/config/environments/test.rb
+++ b/config/environments/test.rb
@@ -1,7 +1,12 @@
+require 'gitlab/testing/request_blocker_middleware'
+require 'gitlab/testing/request_inspector_middleware'
+require 'gitlab/testing/clear_thread_memory_cache_middleware'
+
Rails.application.configure do
# Make sure the middleware is inserted first in middleware chain
config.middleware.insert_before(ActionDispatch::Static, Gitlab::Testing::RequestBlockerMiddleware)
config.middleware.insert_before(ActionDispatch::Static, Gitlab::Testing::RequestInspectorMiddleware)
+ config.middleware.insert_before(ActionDispatch::Static, Gitlab::Testing::ClearThreadMemoryCacheMiddleware)
# Settings specified here will take precedence over those in config/application.rb
diff --git a/config/feature_categories.yml b/config/feature_categories.yml
index 924bdb58682..7e3746baec5 100644
--- a/config/feature_categories.yml
+++ b/config/feature_categories.yml
@@ -25,8 +25,7 @@
- code_quality
- code_review
- collection
-- compliance_controls
-- compliance_frameworks
+- compliance_management
- container_network_security
- container_registry
- container_scanning
@@ -37,7 +36,7 @@
- dependency_proxy
- dependency_scanning
- design_management
-- devops_score
+- devops_reports
- digital_experience_management
- disaster_recovery
- dynamic_application_security_testing
@@ -52,6 +51,7 @@
- gitaly
- gitlab_handbook
- gitter
+- global_search
- helm_chart_registry
- importers
- incident_management
@@ -61,6 +61,8 @@
- interactive_application_security_testing
- internationalization
- issue_tracking
+- jenkins_importer
+- jira_importer
- jupyter_notebooks
- kanban_boards
- kubernetes_management
@@ -70,13 +72,14 @@
- load_testing
- logging
- malware_scanning
-- merge_trains
- metrics
- omnibus_package
- package_registry
- pages
+- pki_management
+- planning_analytics
- quality_management
-- release_governance
+- release_evidence
- release_orchestration
- requirements_management
- responsible_disclosure
@@ -86,7 +89,6 @@
- runner
- runtime_application_self_protection
- sdk
-- search
- secret_detection
- secrets_management
- serverless
@@ -97,8 +99,6 @@
- static_site_editor
- status_page
- subgroups
-- system_testing
-- teams
- templates
- threat_detection
- time_tracking
@@ -113,4 +113,3 @@
- web_ide
- web_performance
- wiki
-- workspaces
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index 20c75a6e255..81085d4641e 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -454,6 +454,11 @@ production: &base
pseudonymizer_worker:
cron: "0 * * * *"
+ # Elasticsearch bulk updater for incremental updates.
+ # NOTE: This will only take effect if elasticsearch is enabled.
+ elastic_index_bulk_cron_worker:
+ cron: "*/1 * * * *"
+
registry:
# enabled: true
# host: registry.example.com
@@ -747,7 +752,9 @@ production: &base
# Path to a file containing a CA certificate
ca_file: '/etc/ssl/certs/CA.pem'
- # Port where the client side certificate is requested by the webserver (NGINX/Apache)
+ # Host and port where the client side certificate is requested by the
+ # webserver (NGINX/Apache)
+ # client_certificate_required_host: smartcard.gitlab.example.com
# client_certificate_required_port: 3444
# Browser session with smartcard sign-in is required for Git access
@@ -1221,6 +1228,8 @@ test:
gitaly:
client_path: tmp/tests/gitaly
token: secret
+ workhorse:
+ secret_file: tmp/gitlab_workhorse_test_secret
backup:
path: tmp/tests/backups
pseudonymizer:
diff --git a/config/initializers/0_eager_load_http_cookie.rb b/config/initializers/0_eager_load_http_cookie.rb
new file mode 100644
index 00000000000..ed633fdb079
--- /dev/null
+++ b/config/initializers/0_eager_load_http_cookie.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+# https://gitlab.com/gitlab-org/gitlab/issues/207937
+# http-cookie is not thread-safe while loading it the first time, see:
+# https://github.com/sparklemotion/http-cookie/issues/6#issuecomment-543570876
+# If we're using it, we should eagerly load it.
+# For now, we have an implicit dependency on it via:
+# * http
+# * rest-client
+require 'http/cookie_jar/hash_store' if Gem.loaded_specs.key?('http-cookie')
diff --git a/config/initializers/0_inflections.rb b/config/initializers/0_inflections.rb
deleted file mode 100644
index 5c38859a667..00000000000
--- a/config/initializers/0_inflections.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# Be sure to restart your server when you modify this file.
-
-# Add new inflection rules using the following format
-# (all these examples are active by default):
-# ActiveSupport::Inflector.inflections do |inflect|
-# inflect.plural /^(ox)$/i, '\1en'
-# inflect.singular /^(ox)en/i, '\1'
-# inflect.irregular 'person', 'people'
-# inflect.uncountable %w( fish sheep )
-# end
-#
-ActiveSupport::Inflector.inflections do |inflect|
- inflect.uncountable %w(
- award_emoji
- container_repository_registry
- design_registry
- event_log
- file_registry
- group_view
- job_artifact_registry
- lfs_object_registry
- package_file_registry
- project_auto_devops
- project_registry
- project_statistics
- system_note_metadata
- vulnerabilities_feedback
- vulnerability_feedback
- )
- inflect.acronym 'EE'
-end
diff --git a/config/initializers/0_license.rb b/config/initializers/0_license.rb
index 19c71c34904..e7b46a14630 100644
--- a/config/initializers/0_license.rb
+++ b/config/initializers/0_license.rb
@@ -1,19 +1,9 @@
# frozen_string_literal: true
Gitlab.ee do
- begin
- public_key_file = File.read(Rails.root.join(".license_encryption_key.pub"))
- public_key = OpenSSL::PKey::RSA.new(public_key_file)
- Gitlab::License.encryption_key = public_key
- rescue
- warn "WARNING: No valid license encryption key provided."
- end
-
- # Needed to run migration
- if ActiveRecord::Base.connected? && ActiveRecord::Base.connection.table_exists?('licenses')
- message = LicenseHelper.license_message(signed_in: true, is_admin: true, in_html: false)
- if ::License.block_changes? && message.present?
- warn "WARNING: #{message}"
- end
- end
+ public_key_file = File.read(Rails.root.join(".license_encryption_key.pub"))
+ public_key = OpenSSL::PKey::RSA.new(public_key_file)
+ Gitlab::License.encryption_key = public_key
+rescue
+ warn "WARNING: No valid license encryption key provided."
end
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index aa743416e99..8d88d1bcf7c 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -77,6 +77,7 @@ end
Gitlab.ee do
Settings['smartcard'] ||= Settingslogic.new({})
Settings.smartcard['enabled'] = false if Settings.smartcard['enabled'].nil?
+ Settings.smartcard['client_certificate_required_host'] = Settings.gitlab['host'] if Settings.smartcard['client_certificate_required_host'].nil?
Settings.smartcard['client_certificate_required_port'] = 3444 if Settings.smartcard['client_certificate_required_port'].nil?
Settings.smartcard['required_for_git_access'] = false if Settings.smartcard['required_for_git_access'].nil?
Settings.smartcard['san_extensions'] = false if Settings.smartcard['san_extensions'].nil?
@@ -179,6 +180,8 @@ Settings.gitlab['email_smime'] = SmimeSignatureSettings.parse(Settings.gitlab['e
Settings.gitlab['base_url'] ||= Settings.__send__(:build_base_gitlab_url)
Settings.gitlab['url'] ||= Settings.__send__(:build_gitlab_url)
Settings.gitlab['user'] ||= 'git'
+# External configuration may cause the ssh user to differ from the GitLab user
+Settings.gitlab['ssh_user'] ||= Settings.gitlab.user
Settings.gitlab['user_home'] ||= begin
Etc.getpwnam(Settings.gitlab['user']).dir
rescue ArgumentError # no user configured
@@ -245,6 +248,12 @@ Settings['incoming_email'] ||= Settingslogic.new({})
Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled'].nil?
#
+# Service desk email
+#
+Settings['service_desk_email'] ||= Settingslogic.new({})
+Settings.service_desk_email['enabled'] = false if Settings.service_desk_email['enabled'].nil?
+
+#
# Build Artifacts
#
Settings['artifacts'] ||= Settingslogic.new({})
@@ -445,8 +454,11 @@ Settings.cron_jobs['remove_unreferenced_lfs_objects_worker']['job_class'] = 'Rem
Settings.cron_jobs['stuck_import_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_import_jobs_worker']['cron'] ||= '15 * * * *'
Settings.cron_jobs['stuck_import_jobs_worker']['job_class'] = 'StuckImportJobsWorker'
+Settings.cron_jobs['stuck_export_jobs_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['stuck_export_jobs_worker']['cron'] ||= '30 * * * *'
+Settings.cron_jobs['stuck_export_jobs_worker']['job_class'] = 'StuckExportJobsWorker'
Settings.cron_jobs['gitlab_usage_ping_worker'] ||= Settingslogic.new({})
-Settings.cron_jobs['gitlab_usage_ping_worker']['cron'] ||= Settings.__send__(:cron_for_usage_ping)
+Settings.cron_jobs['gitlab_usage_ping_worker']['cron'] ||= nil # This is dynamically loaded in the sidekiq initializer
Settings.cron_jobs['gitlab_usage_ping_worker']['job_class'] = 'GitlabUsagePingWorker'
Settings.cron_jobs['stuck_merge_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_merge_jobs_worker']['cron'] ||= '0 */2 * * *'
@@ -531,6 +543,12 @@ Gitlab.ee do
Settings.cron_jobs['update_max_seats_used_for_gitlab_com_subscriptions_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['update_max_seats_used_for_gitlab_com_subscriptions_worker']['cron'] ||= '0 12 * * *'
Settings.cron_jobs['update_max_seats_used_for_gitlab_com_subscriptions_worker']['job_class'] = 'UpdateMaxSeatsUsedForGitlabComSubscriptionsWorker'
+ Settings.cron_jobs['elastic_index_bulk_cron_worker'] ||= Settingslogic.new({})
+ Settings.cron_jobs['elastic_index_bulk_cron_worker']['cron'] ||= '*/1 * * * *'
+ Settings.cron_jobs['elastic_index_bulk_cron_worker']['job_class'] ||= 'ElasticIndexBulkCronWorker'
+ Settings.cron_jobs['sync_seat_link_worker'] ||= Settingslogic.new({})
+ Settings.cron_jobs['sync_seat_link_worker']['cron'] ||= "#{rand(60)} 0 * * *"
+ Settings.cron_jobs['sync_seat_link_worker']['job_class'] = 'SyncSeatLinkWorker'
end
#
@@ -551,7 +569,7 @@ Settings.gitlab_shell['receive_pack'] = true if Settings.gitlab_shell['receive
Settings.gitlab_shell['upload_pack'] = true if Settings.gitlab_shell['upload_pack'].nil?
Settings.gitlab_shell['ssh_host'] ||= Settings.gitlab.ssh_host
Settings.gitlab_shell['ssh_port'] ||= 22
-Settings.gitlab_shell['ssh_user'] ||= Settings.gitlab.user
+Settings.gitlab_shell['ssh_user'] = Settings.gitlab.ssh_user
Settings.gitlab_shell['owner_group'] ||= Settings.gitlab.user
Settings.gitlab_shell['ssh_path_prefix'] ||= Settings.__send__(:build_gitlab_shell_ssh_path_prefix)
Settings.gitlab_shell['git_timeout'] ||= 10800
diff --git a/config/initializers/5_backend.rb b/config/initializers/5_backend.rb
index 482613dacc9..46854af9b55 100644
--- a/config/initializers/5_backend.rb
+++ b/config/initializers/5_backend.rb
@@ -1,6 +1,6 @@
unless Rails.env.test?
required_version = Gitlab::VersionInfo.parse(Gitlab::Shell.version_required)
- current_version = Gitlab::VersionInfo.parse(Gitlab::Shell.new.version)
+ current_version = Gitlab::VersionInfo.parse(Gitlab::Shell.version)
unless current_version.valid? && required_version <= current_version
warn "WARNING: This version of GitLab depends on gitlab-shell #{required_version}, but you're running #{current_version}. Please update gitlab-shell."
diff --git a/config/initializers/8_devise.rb b/config/initializers/8_devise.rb
index 6ed56598e15..3daddb10b29 100644
--- a/config/initializers/8_devise.rb
+++ b/config/initializers/8_devise.rb
@@ -226,9 +226,9 @@ Devise.setup do |config|
manager.failure_app = Gitlab::DeviseFailure
end
- if Gitlab::Auth::LDAP::Config.enabled?
- Gitlab::Auth::LDAP::Config.providers.each do |provider|
- ldap_config = Gitlab::Auth::LDAP::Config.new(provider)
+ if Gitlab::Auth::Ldap::Config.enabled?
+ Gitlab::Auth::Ldap::Config.providers.each do |provider|
+ ldap_config = Gitlab::Auth::Ldap::Config.new(provider)
config.omniauth(provider, ldap_config.omniauth_options)
end
end
diff --git a/config/initializers/9_fast_gettext.rb b/config/initializers/9_fast_gettext.rb
index fd0167aa476..f836e6e971d 100644
--- a/config/initializers/9_fast_gettext.rb
+++ b/config/initializers/9_fast_gettext.rb
@@ -1,9 +1,2 @@
-FastGettext.add_text_domain 'gitlab',
- path: File.join(Rails.root, 'locale'),
- type: :po,
- ignore_fuzzy: true
-FastGettext.default_text_domain = 'gitlab'
FastGettext.default_available_locales = Gitlab::I18n.available_locales
-FastGettext.default_locale = :en
-
I18n.available_locales = Gitlab::I18n.available_locales
diff --git a/config/initializers/actioncable.rb b/config/initializers/actioncable.rb
new file mode 100644
index 00000000000..ed96f965150
--- /dev/null
+++ b/config/initializers/actioncable.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+Rails.application.configure do
+ # Prevents the default engine from being mounted because
+ # we're running ActionCable as a standalone server
+ config.action_cable.mount_path = nil
+ config.action_cable.url = Gitlab::Utils.append_path(Gitlab.config.gitlab.relative_url_root, '/-/cable')
+end
diff --git a/config/initializers/active_record_force_reconnects.rb b/config/initializers/active_record_force_reconnects.rb
new file mode 100644
index 00000000000..73dfaf5e121
--- /dev/null
+++ b/config/initializers/active_record_force_reconnects.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+Gitlab::Database::ConnectionTimer.configure do |config|
+ config.interval = Rails.application.config_for(:database)[:force_reconnect_interval]
+end
+
+ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.prepend(Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin)
diff --git a/config/initializers/active_record_lifecycle.rb b/config/initializers/active_record_lifecycle.rb
index 2cf0f0439a9..493d328b93e 100644
--- a/config/initializers/active_record_lifecycle.rb
+++ b/config/initializers/active_record_lifecycle.rb
@@ -14,6 +14,8 @@ end
if defined?(ActiveRecord::Base)
Gitlab::Cluster::LifecycleEvents.on_before_fork do
+ raise 'ActiveRecord connection not established. Unable to start.' unless Gitlab::Database.exists?
+
# the following is highly recommended for Rails + "preload_app true"
# as there's no need for the master process to hold a connection
ActiveRecord::Base.connection.disconnect!
diff --git a/config/initializers/console_message.rb b/config/initializers/console_message.rb
index 7272583f262..523a3898043 100644
--- a/config/initializers/console_message.rb
+++ b/config/initializers/console_message.rb
@@ -5,7 +5,7 @@ if Gitlab::Runtime.console?
puts '-' * 80
puts " GitLab:".ljust(justify) + "#{Gitlab::VERSION} (#{Gitlab.revision}) #{Gitlab.ee? ? 'EE' : 'FOSS'}"
- puts " GitLab Shell:".ljust(justify) + "#{Gitlab::VersionInfo.parse(Gitlab::Shell.new.version)}"
+ puts " GitLab Shell:".ljust(justify) + "#{Gitlab::VersionInfo.parse(Gitlab::Shell.version)}"
if Gitlab::Database.exists?
puts " #{Gitlab::Database.human_adapter_name}:".ljust(justify) + Gitlab::Database.version
diff --git a/config/initializers/graphql.rb b/config/initializers/graphql.rb
index 44a9644f481..f1bc289f1f0 100644
--- a/config/initializers/graphql.rb
+++ b/config/initializers/graphql.rb
@@ -5,9 +5,3 @@ GraphQL::Field.accepts_definitions(authorize: GraphQL::Define.assign_metadata_ke
GraphQL::Schema::Object.accepts_definition(:authorize)
GraphQL::Schema::Field.accepts_definition(:authorize)
-
-Gitlab::Application.config.after_initialize do
- GitlabSchema.middleware << GraphQL::Schema::TimeoutMiddleware.new(max_seconds: Gitlab.config.gitlab.graphql_timeout) do |timeout_error, query|
- Gitlab::GraphqlLogger.error(message: timeout_error.to_s, query: query.query_string, query_variables: query.provided_variables)
- end
-end
diff --git a/config/initializers/lograge.rb b/config/initializers/lograge.rb
index fb93c3a6e12..6ba2fa39aa6 100644
--- a/config/initializers/lograge.rb
+++ b/config/initializers/lograge.rb
@@ -20,38 +20,6 @@ unless Gitlab::Runtime.sidekiq?
config.lograge.ignore_actions = ['Gitlab::RequestForgeryProtection::Controller#index']
# Add request parameters to log output
- config.lograge.custom_options = lambda do |event|
- params = event.payload[:params]
- .except(*%w(controller action format))
- .each_pair
- .map { |k, v| { key: k, value: v } }
-
- payload = {
- time: Time.now.utc.iso8601(3),
- params: Gitlab::Utils::LogLimitedArray.log_limited_array(params),
- remote_ip: event.payload[:remote_ip],
- user_id: event.payload[:user_id],
- username: event.payload[:username],
- ua: event.payload[:ua],
- queue_duration: event.payload[:queue_duration]
- }
-
- ::Gitlab::InstrumentationHelper.add_instrumentation_data(payload)
-
- payload[:response] = event.payload[:response] if event.payload[:response]
- payload[:etag_route] = event.payload[:etag_route] if event.payload[:etag_route]
- payload[Labkit::Correlation::CorrelationId::LOG_KEY] = Labkit::Correlation::CorrelationId.current_id
-
- if cpu_s = Gitlab::Metrics::System.thread_cpu_duration(::Gitlab::RequestContext.instance.start_thread_cpu_time)
- payload[:cpu_s] = cpu_s
- end
-
- # https://github.com/roidrage/lograge#logging-errors--exceptions
- exception = event.payload[:exception_object]
-
- ::Gitlab::ExceptionLogFormatter.format!(exception, payload)
-
- payload
- end
+ config.lograge.custom_options = Gitlab::Lograge::CustomOptions
end
end
diff --git a/config/initializers/omniauth.rb b/config/initializers/omniauth.rb
index ef23ca065c6..a2720ab9986 100644
--- a/config/initializers/omniauth.rb
+++ b/config/initializers/omniauth.rb
@@ -1,6 +1,6 @@
-if Gitlab::Auth::LDAP::Config.enabled?
+if Gitlab::Auth::Ldap::Config.enabled?
module OmniAuth::Strategies
- Gitlab::Auth::LDAP::Config.available_servers.each do |server|
+ Gitlab::Auth::Ldap::Config.available_servers.each do |server|
# do not redeclare LDAP
next if server['provider_name'] == 'ldap'
@@ -16,3 +16,7 @@ OmniAuth.config.allowed_request_methods << :get if Gitlab.config.omniauth.auto_s
OmniAuth.config.before_request_phase do |env|
Gitlab::RequestForgeryProtection.call(env)
end
+
+# Use json formatter
+OmniAuth.config.logger.formatter = Gitlab::OmniauthLogging::JSONFormatter.new
+OmniAuth.config.logger.level = Logger::ERROR if Rails.env.production?
diff --git a/config/initializers/sidekiq.rb b/config/initializers/sidekiq.rb
index b90a04a19e1..fa4fc2d2c7b 100644
--- a/config/initializers/sidekiq.rb
+++ b/config/initializers/sidekiq.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'sidekiq/web'
def enable_reliable_fetch?
@@ -34,15 +36,6 @@ use_sidekiq_legacy_memory_killer = !use_sidekiq_daemon_memory_killer
use_request_store = ENV.fetch('SIDEKIQ_REQUEST_STORE', 1).to_i.nonzero?
Sidekiq.configure_server do |config|
- config.redis = queues_config_hash
-
- config.server_middleware(&Gitlab::SidekiqMiddleware.server_configurator({
- metrics: Settings.monitoring.sidekiq_exporter,
- arguments_logger: ENV['SIDEKIQ_LOG_ARGUMENTS'] && !enable_json_logs,
- memory_killer: enable_sidekiq_memory_killer && use_sidekiq_legacy_memory_killer,
- request_store: use_request_store
- }))
-
if enable_json_logs
Sidekiq.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new
config.options[:job_logger] = Gitlab::SidekiqLogging::StructuredLogger
@@ -52,6 +45,15 @@ Sidekiq.configure_server do |config|
config.error_handlers << Gitlab::SidekiqLogging::ExceptionHandler.new
end
+ config.redis = queues_config_hash
+
+ config.server_middleware(&Gitlab::SidekiqMiddleware.server_configurator({
+ metrics: Settings.monitoring.sidekiq_exporter,
+ arguments_logger: ENV['SIDEKIQ_LOG_ARGUMENTS'] && !enable_json_logs,
+ memory_killer: enable_sidekiq_memory_killer && use_sidekiq_legacy_memory_killer,
+ request_store: use_request_store
+ }))
+
config.client_middleware(&Gitlab::SidekiqMiddleware.client_configurator)
config.on :startup do
@@ -71,6 +73,8 @@ Sidekiq.configure_server do |config|
Sidekiq::ReliableFetch.setup_reliable_fetch!(config)
end
+ Gitlab.config.load_dynamic_cron_schedules!
+
# Sidekiq-cron: load recurring jobs from gitlab.yml
# UGLY Hack to get nested hash from settingslogic
cron_jobs = JSON.parse(Gitlab.config.cron_jobs.to_json)
@@ -104,6 +108,11 @@ end
Sidekiq.configure_client do |config|
config.redis = queues_config_hash
+ # We only need to do this for other clients. If Sidekiq-server is the
+ # client scheduling jobs, we have access to the regular sidekiq logger that
+ # writes to STDOUT
+ Sidekiq.logger = Gitlab::SidekiqLogging::ClientLogger.build
+ Sidekiq.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new if enable_json_logs
config.client_middleware(&Gitlab::SidekiqMiddleware.client_configurator)
end
diff --git a/config/initializers/sidekiq_cluster.rb b/config/initializers/sidekiq_cluster.rb
index baa7495aa29..4622984fe0c 100644
--- a/config/initializers/sidekiq_cluster.rb
+++ b/config/initializers/sidekiq_cluster.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-if ENV['ENABLE_SIDEKIQ_CLUSTER'] && Gitlab.ee?
+if ENV['ENABLE_SIDEKIQ_CLUSTER']
Thread.new do
Thread.current.abort_on_exception = true
diff --git a/config/initializers/tracing.rb b/config/initializers/tracing.rb
index aaf74eb4cd3..f26fb18f3ea 100644
--- a/config/initializers/tracing.rb
+++ b/config/initializers/tracing.rb
@@ -5,22 +5,6 @@ if Labkit::Tracing.enabled?
config.middleware.insert_after Labkit::Middleware::Rack, ::Labkit::Tracing::RackMiddleware
end
- # Instrument the Sidekiq client
- Sidekiq.configure_client do |config|
- config.client_middleware do |chain|
- chain.add Labkit::Tracing::Sidekiq::ClientMiddleware
- end
- end
-
- # Instrument Sidekiq server calls when running Sidekiq server
- if Gitlab::Runtime.sidekiq?
- Sidekiq.configure_server do |config|
- config.server_middleware do |chain|
- chain.add Labkit::Tracing::Sidekiq::ServerMiddleware
- end
- end
- end
-
# Instrument Redis
Labkit::Tracing::Redis.instrument
diff --git a/config/initializers_before_autoloader/000_inflections.rb b/config/initializers_before_autoloader/000_inflections.rb
new file mode 100644
index 00000000000..1fabce9a57e
--- /dev/null
+++ b/config/initializers_before_autoloader/000_inflections.rb
@@ -0,0 +1,32 @@
+# Be sure to restart your server when you modify this file.
+
+# Add new inflection rules using the following format
+# (all these examples are active by default):
+# ActiveSupport::Inflector.inflections do |inflect|
+# inflect.plural /^(ox)$/i, '\1en'
+# inflect.singular /^(ox)en/i, '\1'
+# inflect.irregular 'person', 'people'
+# inflect.uncountable %w( fish sheep )
+# end
+#
+ActiveSupport::Inflector.inflections do |inflect|
+ inflect.uncountable %w(
+ award_emoji
+ container_repository_registry
+ design_registry
+ event_log
+ file_registry
+ group_view
+ job_artifact_registry
+ lfs_object_registry
+ package_file_registry
+ project_auto_devops
+ project_registry
+ project_statistics
+ system_note_metadata
+ vulnerabilities_feedback
+ vulnerability_feedback
+ )
+ inflect.acronym 'EE'
+ inflect.acronym 'CSP'
+end
diff --git a/config/initializers_before_autoloader/001_fast_gettext.rb b/config/initializers_before_autoloader/001_fast_gettext.rb
new file mode 100644
index 00000000000..ede38450582
--- /dev/null
+++ b/config/initializers_before_autoloader/001_fast_gettext.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FastGettext.add_text_domain 'gitlab',
+ path: File.join(Rails.root, 'locale'),
+ type: :po,
+ ignore_fuzzy: true
+FastGettext.default_text_domain = 'gitlab'
+FastGettext.default_locale = :en
diff --git a/config/initializers_before_autoloader/100_patch_omniauth_saml.rb b/config/initializers_before_autoloader/100_patch_omniauth_saml.rb
new file mode 100644
index 00000000000..d46842ed9a0
--- /dev/null
+++ b/config/initializers_before_autoloader/100_patch_omniauth_saml.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'omniauth/strategies/saml'
+
+module OmniAuth
+ module Strategies
+ class SAML
+ # NOTE: This method duplicates code from omniauth-saml
+ # so that we can access authn_request to store it
+ # See: https://github.com/omniauth/omniauth-saml/issues/172
+ def request_phase
+ authn_request = OneLogin::RubySaml::Authrequest.new
+
+ store_authn_request_id(authn_request)
+
+ with_settings do |settings|
+ redirect(authn_request.create(settings, additional_params_for_authn_request))
+ end
+ end
+
+ private
+
+ def store_authn_request_id(authn_request)
+ Gitlab::Auth::Saml::OriginValidator.new(session).store_origin(authn_request)
+ end
+ end
+ end
+end
diff --git a/config/locales/carrierwave.en.yml b/config/locales/carrierwave.en.yml
index 12619226460..864ec8fd73f 100644
--- a/config/locales/carrierwave.en.yml
+++ b/config/locales/carrierwave.en.yml
@@ -6,8 +6,8 @@ en:
carrierwave_download_error: could not be downloaded
extension_whitelist_error: "You are not allowed to upload %{extension} files, allowed types: %{allowed_types}"
extension_blacklist_error: "You are not allowed to upload %{extension} files, prohibited types: %{prohibited_types}"
- content_type_whitelist_error: "You are not allowed to upload %{content_type} files"
- content_type_blacklist_error: "You are not allowed to upload %{content_type} files"
+ content_type_whitelist_error: "file format is not supported. Please try one of the following supported formats: %{allowed_types}"
+ content_type_blacklist_error: "You are not allowed to upload %{content_type} files, prohibited types: %{allowed_types}"
rmagick_processing_error: "Failed to manipulate with rmagick, maybe it is not an image?"
mini_magick_processing_error: "Failed to manipulate with MiniMagick, maybe it is not an image? Original Error: %{e}"
min_size_error: "File size should be greater than %{min_size}"
diff --git a/config/locales/en.yml b/config/locales/en.yml
index dabcefba169..c95232ae540 100644
--- a/config/locales/en.yml
+++ b/config/locales/en.yml
@@ -20,6 +20,8 @@ en:
token: "Grafana HTTP API Token"
grafana_url: "Grafana API URL"
grafana_enabled: "Grafana integration enabled"
+ user/user_detail:
+ job_title: 'Job title'
views:
pagination:
previous: "Prev"
diff --git a/config/prometheus/common_metrics.yml b/config/prometheus/common_metrics.yml
index 314ee44ed71..85833cc1968 100644
--- a/config/prometheus/common_metrics.yml
+++ b/config/prometheus/common_metrics.yml
@@ -1,6 +1,74 @@
dashboard: 'Environment metrics'
priority: 1
panel_groups:
+- group: System metrics (Kubernetes)
+ priority: 15
+ panels:
+ - title: "Memory Usage (Total)"
+ type: "area-chart"
+ y_label: "Total Memory Used (GB)"
+ weight: 4
+ metrics:
+ - id: system_metrics_kubernetes_container_memory_total
+ query_range: 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024'
+ label: Total (GB)
+ unit: GB
+ - title: "Core Usage (Total)"
+ type: "area-chart"
+ y_label: "Total Cores"
+ weight: 3
+ metrics:
+ - id: system_metrics_kubernetes_container_cores_total
+ query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)'
+ label: Total (cores)
+ unit: "cores"
+ - title: "Memory Usage (Pod average)"
+ type: "line-chart"
+ y_label: "Memory Used per Pod (MB)"
+ weight: 2
+ metrics:
+ - id: system_metrics_kubernetes_container_memory_average
+ query_range: 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024'
+ label: Pod average (MB)
+ unit: MB
+ - title: "Canary: Memory Usage (Pod Average)"
+ type: "line-chart"
+ y_label: "Memory Used per Pod (MB)"
+ weight: 2
+ metrics:
+ - id: system_metrics_kubernetes_container_memory_average_canary
+ query_range: 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024'
+ label: Pod average (MB)
+ unit: MB
+ track: canary
+ - title: "Core Usage (Pod Average)"
+ type: "line-chart"
+ y_label: "Cores per Pod"
+ weight: 1
+ metrics:
+ - id: system_metrics_kubernetes_container_core_usage
+ query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name))'
+ label: Pod average (cores)
+ unit: "cores"
+ - title: "Canary: Core Usage (Pod Average)"
+ type: "line-chart"
+ y_label: "Cores per Pod"
+ weight: 1
+ metrics:
+ - id: system_metrics_kubernetes_container_core_usage_canary
+ query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name))'
+ label: Pod average (cores)
+ unit: "cores"
+ track: canary
+ - title: "Knative function invocations"
+ type: "area-chart"
+ y_label: "Invocations"
+ weight: 1
+ metrics:
+ - id: system_metrics_knative_function_invocation_count
+ query_range: 'sum(ceil(rate(istio_requests_total{destination_service_namespace="%{kube_namespace}", destination_service=~"%{function_name}.*"}[1m])*60))'
+ label: invocations / minute
+ unit: requests
# NGINX Ingress metrics for pre-0.16.0 versions
- group: Response metrics (NGINX Ingress VTS)
priority: 10
@@ -17,6 +85,8 @@ panel_groups:
- title: "Latency"
type: "area-chart"
y_label: "Latency (ms)"
+ y_axis:
+ format: milliseconds
weight: 1
metrics:
- id: response_metrics_nginx_ingress_latency_pod_average
@@ -26,6 +96,8 @@ panel_groups:
- title: "HTTP Error Rate"
type: "area-chart"
y_label: "HTTP Errors (%)"
+ y_axis:
+ format: percentHundred
weight: 1
metrics:
- id: response_metrics_nginx_ingress_http_error_rate
@@ -138,77 +210,11 @@ panel_groups:
- title: "HTTP Error Rate (Errors / Sec)"
type: "area-chart"
y_label: "HTTP 500 Errors / Sec"
+ y_axis:
+ precision: 0
weight: 1
metrics:
- id: response_metrics_nginx_http_error_rate
query_range: 'sum(rate(nginx_server_requests{code="5xx", %{environment_filter}}[2m]))'
label: HTTP Errors
unit: "errors / sec"
-- group: System metrics (Kubernetes)
- priority: 5
- panels:
- - title: "Memory Usage (Total)"
- type: "area-chart"
- y_label: "Total Memory Used (GB)"
- weight: 4
- metrics:
- - id: system_metrics_kubernetes_container_memory_total
- query_range: 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024'
- label: Total (GB)
- unit: GB
- - title: "Core Usage (Total)"
- type: "area-chart"
- y_label: "Total Cores"
- weight: 3
- metrics:
- - id: system_metrics_kubernetes_container_cores_total
- query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)'
- label: Total (cores)
- unit: "cores"
- - title: "Memory Usage (Pod average)"
- type: "line-chart"
- y_label: "Memory Used per Pod (MB)"
- weight: 2
- metrics:
- - id: system_metrics_kubernetes_container_memory_average
- query_range: 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024'
- label: Pod average (MB)
- unit: MB
- - title: "Canary: Memory Usage (Pod Average)"
- type: "line-chart"
- y_label: "Memory Used per Pod (MB)"
- weight: 2
- metrics:
- - id: system_metrics_kubernetes_container_memory_average_canary
- query_range: 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024'
- label: Pod average (MB)
- unit: MB
- track: canary
- - title: "Core Usage (Pod Average)"
- type: "line-chart"
- y_label: "Cores per Pod"
- weight: 1
- metrics:
- - id: system_metrics_kubernetes_container_core_usage
- query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name))'
- label: Pod average (cores)
- unit: "cores"
- - title: "Canary: Core Usage (Pod Average)"
- type: "line-chart"
- y_label: "Cores per Pod"
- weight: 1
- metrics:
- - id: system_metrics_kubernetes_container_core_usage_canary
- query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name))'
- label: Pod average (cores)
- unit: "cores"
- track: canary
- - title: "Knative function invocations"
- type: "area-chart"
- y_label: "Invocations"
- weight: 1
- metrics:
- - id: system_metrics_knative_function_invocation_count
- query_range: 'sum(ceil(rate(istio_requests_total{destination_service_namespace="%{kube_namespace}", destination_service=~"%{function_name}.*"}[1m])*60))'
- label: invocations / minute
- unit: requests
diff --git a/config/puma_actioncable.example.development.rb b/config/puma_actioncable.example.development.rb
new file mode 100644
index 00000000000..aef15da54f9
--- /dev/null
+++ b/config/puma_actioncable.example.development.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+# -----------------------------------------------------------------------
+# This file is used by the GDK to generate a default config/puma_actioncable.rb file
+# Note that `/home/git` will be substituted for the actual GDK root
+# directory when this file is generated
+# -----------------------------------------------------------------------
+
+# Load "path" as a rackup file.
+#
+# The default is "cable/config.ru".
+#
+rackup 'cable/config.ru'
+pidfile '/home/git/gitlab/tmp/pids/puma_actioncable.pid'
+state_path '/home/git/gitlab/tmp/pids/puma_actioncable.state'
+
+## Uncomment the lines if you would like to write puma stdout & stderr streams
+## to a different location than rails logs.
+## When using GitLab Development Kit, by default, these logs will be consumed
+## by runit and can be accessed using `gdk tail rails-actioncable`
+# stdout_redirect '/home/git/gitlab/log/puma_actioncable.stdout.log',
+# '/home/git/gitlab/log/puma_actioncable.stderr.log',
+# true
+
+# Configure "min" to be the minimum number of threads to use to answer
+# requests and "max" the maximum.
+#
+# The default is "0, 16".
+#
+threads 1, 4
+
+# By default, workers accept all requests and queue them to pass to handlers.
+# When false, workers accept the number of simultaneous requests configured.
+#
+# Queueing requests generally improves performance, but can cause deadlocks if
+# the app is waiting on a request to itself. See https://github.com/puma/puma/issues/612
+#
+# When set to false this may require a reverse proxy to handle slow clients and
+# queue requests before they reach puma. This is due to disabling HTTP keepalive
+queue_requests false
+
+# Bind the server to "url". "tcp://", "unix://" and "ssl://" are the only
+# accepted protocols.
+bind 'unix:///home/git/gitlab_actioncable.socket'
+
+workers 2
+
+require_relative "/home/git/gitlab/lib/gitlab/cluster/lifecycle_events"
+
+on_restart do
+ # Signal application hooks that we're about to restart
+ Gitlab::Cluster::LifecycleEvents.do_before_master_restart
+end
+
+before_fork do
+ # Signal to the puma killer
+ Gitlab::Cluster::PumaWorkerKillerInitializer.start @config.options unless ENV['DISABLE_PUMA_WORKER_KILLER']
+
+ # Signal application hooks that we're about to fork
+ Gitlab::Cluster::LifecycleEvents.do_before_fork
+end
+
+Gitlab::Cluster::LifecycleEvents.set_puma_options @config.options
+on_worker_boot do
+ # Signal application hooks of worker start
+ Gitlab::Cluster::LifecycleEvents.do_worker_start
+end
+
+# Preload the application before starting the workers; this conflicts with
+# phased restart feature. (off by default)
+
+preload_app!
+
+tag 'gitlab-actioncable-puma-worker'
+
+# Verifies that all workers have checked in to the master process within
+# the given timeout. If not the worker process will be restarted. Default
+# value is 60 seconds.
+#
+worker_timeout 60
+
+# Use json formatter
+require_relative "/home/git/gitlab/lib/gitlab/puma_logging/json_formatter"
+
+json_formatter = Gitlab::PumaLogging::JSONFormatter.new
+log_formatter do |str|
+ json_formatter.call(str)
+end
diff --git a/config/routes.rb b/config/routes.rb
index 16b15e5300a..466555eeee8 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -121,10 +121,7 @@ Rails.application.routes.draw do
draw :country
draw :country_state
draw :subscription
-
- constraints(-> (*) { Gitlab::Analytics.any_features_enabled? }) do
- draw :analytics
- end
+ draw :analytics
end
if ENV['GITLAB_CHAOS_SECRET'] || Rails.env.development? || Rails.env.test?
@@ -136,6 +133,9 @@ Rails.application.routes.draw do
get :kill
end
end
+
+ # Notification settings
+ resources :notification_settings, only: [:create, :update]
end
concern :clusterable do
@@ -184,9 +184,6 @@ Rails.application.routes.draw do
# Spam reports
resources :abuse_reports, only: [:new, :create]
- # Notification settings
- resources :notification_settings, only: [:create, :update]
-
resources :groups, only: [:index, :new, :create] do
post :preview_markdown
end
diff --git a/config/routes/admin.rb b/config/routes/admin.rb
index 5210b84c8ba..c92484316e4 100644
--- a/config/routes/admin.rb
+++ b/config/routes/admin.rb
@@ -33,7 +33,7 @@ namespace :admin do
resources :gitaly_servers, only: [:index]
namespace :serverless do
- resources :domains, only: [:index, :create, :update] do
+ resources :domains, only: [:index, :create, :update, :destroy] do
member do
post '/verify', to: 'domains#verify'
end
@@ -121,6 +121,11 @@ namespace :admin do
get '/', to: redirect('admin/application_settings/general'), as: nil
resources :services, only: [:index, :edit, :update]
+ resources :integrations, only: [:edit, :update, :test] do
+ member do
+ put :test
+ end
+ end
get :usage_data
put :reset_registration_token
diff --git a/config/routes/git_http.rb b/config/routes/git_http.rb
index 593f818e434..fb8119904ea 100644
--- a/config/routes/git_http.rb
+++ b/config/routes/git_http.rb
@@ -32,6 +32,14 @@ concern :lfsable do
end
end
+# Git route for personal and project snippets
+scope(path: ':namespace_id/:repository_id',
+ format: nil,
+ constraints: { namespace_id: Gitlab::PathRegex.personal_and_project_snippets_path_regex, repository_id: /\d+\.git/ },
+ module: :repositories) do
+ concerns :gitactionable
+end
+
scope(path: '*namespace_id/:repository_id',
format: nil,
constraints: { namespace_id: Gitlab::PathRegex.full_namespace_route_regex }) do
diff --git a/config/routes/group.rb b/config/routes/group.rb
index 68e239faf6d..1d51b3fb6fe 100644
--- a/config/routes/group.rb
+++ b/config/routes/group.rb
@@ -29,6 +29,7 @@ constraints(::Constraints::GroupUrlConstrainer.new) do
resource :ci_cd, only: [:show, :update], controller: 'ci_cd' do
put :reset_registration_token
patch :update_auto_devops
+ post :create_deploy_token, path: 'deploy_token/create'
end
end
@@ -49,6 +50,12 @@ constraints(::Constraints::GroupUrlConstrainer.new) do
end
end
+ resources :deploy_tokens, constraints: { id: /\d+/ }, only: [] do
+ member do
+ put :revoke
+ end
+ end
+
resource :avatar, only: [:destroy]
concerns :clusterable
diff --git a/config/routes/import.rb b/config/routes/import.rb
index 9fe2688de1e..57a1fab48e9 100644
--- a/config/routes/import.rb
+++ b/config/routes/import.rb
@@ -60,6 +60,7 @@ namespace :import do
resource :gitlab_project, only: [:create, :new] do
post :create
+ post :authorize
end
resource :manifest, only: [:create, :new], controller: :manifest do
diff --git a/config/routes/merge_requests.rb b/config/routes/merge_requests.rb
index f9670a5bf6e..fe58649b684 100644
--- a/config/routes/merge_requests.rb
+++ b/config/routes/merge_requests.rb
@@ -14,6 +14,7 @@ resources :merge_requests, concerns: :awardable, except: [:new, :create, :show],
post :rebase
get :test_reports
get :exposed_artifacts
+ get :coverage_reports
scope constraints: ->(req) { req.format == :json }, as: :json do
get :commits
diff --git a/config/routes/project.rb b/config/routes/project.rb
index f153082f118..c37b5528f71 100644
--- a/config/routes/project.rb
+++ b/config/routes/project.rb
@@ -68,7 +68,7 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
end
namespace :settings do
- get :members, to: redirect("%{namespace_id}/%{project_id}/project_members")
+ get :members, to: redirect("%{namespace_id}/%{project_id}/-/project_members")
resource :ci_cd, only: [:show, :update], controller: 'ci_cd' do
post :reset_cache
@@ -79,7 +79,9 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
resource :integrations, only: [:show]
resource :repository, only: [:show], controller: :repository do
- post :create_deploy_token, path: 'deploy_token/create'
+ # TODO: Move 'create_deploy_token' here to the ':ci_cd' resource above during 12.9.
+ # More details here: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24102#note_287572556
+ post :create_deploy_token, path: 'deploy_token/create', to: 'ci_cd#create_deploy_token'
post :cleanup
end
end
@@ -169,6 +171,14 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
resources :releases, only: [:index, :show, :edit], param: :tag, constraints: { tag: %r{[^/]+} } do
member do
get :evidence
+ get :downloads, path: 'downloads/*filepath', format: false
+ end
+ end
+
+ resources :logs, only: [:index] do
+ collection do
+ get :k8s
+ get :elasticsearch
end
end
@@ -252,7 +262,11 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
end
namespace :performance_monitoring do
- resources :dashboards, only: [:create]
+ resources :dashboards, only: [:create] do
+ collection do
+ put '/:file_name', to: 'dashboards#update', constraints: { file_name: /.+\.yml/ }
+ end
+ end
end
namespace :error_tracking do
@@ -273,6 +287,7 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
end
end
+ draw :issues
draw :merge_requests
# The wiki and repository routing contains wildcard characters so
@@ -280,6 +295,12 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
draw :repository_scoped
draw :repository
draw :wiki
+
+ namespace :import do
+ resource :jira, only: [:show], controller: :jira do
+ post :import
+ end
+ end
end
# End of the /-/ scope.
@@ -395,12 +416,7 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
# Unscoped route. It will be replaced with redirect to /-/issues/
# Issue https://gitlab.com/gitlab-org/gitlab/issues/118849
- draw :issues
-
- # To ensure an old unscoped routing is used for the UI we need to
- # add prefix 'as' to the scope routing and place it below original routing.
- # Issue https://gitlab.com/gitlab-org/gitlab/issues/118849
- scope '-', as: 'scoped' do
+ scope as: 'deprecated' do
draw :issues
end
diff --git a/config/routes/user.rb b/config/routes/user.rb
index fe7a0aa3233..9db3a71a270 100644
--- a/config/routes/user.rb
+++ b/config/routes/user.rb
@@ -10,9 +10,9 @@ def override_omniauth(provider, controller, path_prefix = '/users/auth')
end
# Use custom controller for LDAP omniauth callback
-if Gitlab::Auth::LDAP::Config.sign_in_enabled?
+if Gitlab::Auth::Ldap::Config.sign_in_enabled?
devise_scope :user do
- Gitlab::Auth::LDAP::Config.available_servers.each do |server|
+ Gitlab::Auth::Ldap::Config.available_servers.each do |server|
override_omniauth(server['provider_name'], 'ldap/omniauth_callbacks')
end
end
diff --git a/config/settings.rb b/config/settings.rb
index 767c6c56337..144a068ef2a 100644
--- a/config/settings.rb
+++ b/config/settings.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'settingslogic'
require 'digest/md5'
@@ -42,7 +44,8 @@ class Settings < Settingslogic
end
def build_gitlab_shell_ssh_path_prefix
- user_host = "#{gitlab_shell.ssh_user}@#{gitlab_shell.ssh_host}"
+ user = "#{gitlab_shell.ssh_user}@" unless gitlab_shell.ssh_user.empty?
+ user_host = "#{user}#{gitlab_shell.ssh_host}"
if gitlab_shell.ssh_port != 22
"ssh://#{user_host}:#{gitlab_shell.ssh_port}/"
@@ -143,6 +146,10 @@ class Settings < Settingslogic
Gitlab::Application.secrets.db_key_base
end
+ def load_dynamic_cron_schedules!
+ cron_jobs['gitlab_usage_ping_worker']['cron'] ||= cron_for_usage_ping
+ end
+
private
def base_url(config)
diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml
index 1cb19d18a0d..2dc2f33e71e 100644
--- a/config/sidekiq_queues.yml
+++ b/config/sidekiq_queues.yml
@@ -28,6 +28,10 @@
- 1
- - admin_emails
- 1
+- - analytics_code_review_metrics
+ - 1
+- - authorized_keys
+ - 2
- - authorized_projects
- 2
- - auto_devops
@@ -228,6 +232,10 @@
- 2
- - service_desk_email_receiver
- 1
+- - status_page_publish_incident
+ - 1
+- - sync_seat_link_request
+ - 1
- - system_hook_push
- 1
- - todos_destroyer
@@ -244,3 +252,5 @@
- 1
- - web_hook
- 1
+- - x509_certificate_revoke
+ - 1
diff --git a/config/webpack.config.js b/config/webpack.config.js
index 639de770fd8..e220482d769 100644
--- a/config/webpack.config.js
+++ b/config/webpack.config.js
@@ -117,23 +117,18 @@ if (IS_EE) {
});
}
-// if there is a compiled DLL with a matching hash string, use it
let dll;
if (VENDOR_DLL && !IS_PRODUCTION) {
const dllHash = vendorDllHash();
const dllCachePath = path.join(ROOT_PATH, `tmp/cache/webpack-dlls/${dllHash}`);
- if (fs.existsSync(dllCachePath)) {
- console.log(`Using vendor DLL found at: ${dllCachePath}`);
- dll = {
- manifestPath: path.join(dllCachePath, 'vendor.dll.manifest.json'),
- cacheFrom: dllCachePath,
- cacheTo: path.join(ROOT_PATH, `public/assets/webpack/dll.${dllHash}/`),
- publicPath: `dll.${dllHash}/vendor.dll.bundle.js`,
- };
- } else {
- console.log(`Warning: No vendor DLL found at: ${dllCachePath}. DllPlugin disabled.`);
- }
+ dll = {
+ manifestPath: path.join(dllCachePath, 'vendor.dll.manifest.json'),
+ cacheFrom: dllCachePath,
+ cacheTo: path.join(ROOT_PATH, `public/assets/webpack/dll.${dllHash}/`),
+ publicPath: `dll.${dllHash}/vendor.dll.bundle.js`,
+ exists: null,
+ };
}
module.exports = {
@@ -166,7 +161,9 @@ module.exports = {
},
{
test: /\.js$/,
- exclude: path => /node_modules|vendor[\\/]assets/.test(path) && !/\.vue\.js/.test(path),
+ exclude: path =>
+ /node_modules\/(?!tributejs)|node_modules|vendor[\\/]assets/.test(path) &&
+ !/\.vue\.js/.test(path),
loader: 'babel-loader',
options: {
cacheDirectory: path.join(CACHE_PATH, 'babel-loader'),
@@ -314,6 +311,51 @@ module.exports = {
jQuery: 'jquery',
}),
+ // if DLLs are enabled, detect whether the DLL exists and create it automatically if necessary
+ dll && {
+ apply(compiler) {
+ compiler.hooks.beforeCompile.tapAsync('DllAutoCompilePlugin', (params, callback) => {
+ if (dll.exists) {
+ callback();
+ } else if (fs.existsSync(dll.manifestPath)) {
+ console.log(`Using vendor DLL found at: ${dll.cacheFrom}`);
+ dll.exists = true;
+ callback();
+ } else {
+ console.log(
+ `Warning: No vendor DLL found at: ${dll.cacheFrom}. Compiling DLL automatically.`,
+ );
+
+ const dllConfig = require('./webpack.vendor.config.js');
+ const dllCompiler = webpack(dllConfig);
+
+ dllCompiler.run((err, stats) => {
+ if (err) {
+ return callback(err);
+ }
+
+ const info = stats.toJson();
+
+ if (stats.hasErrors()) {
+ console.error(info.errors.join('\n\n'));
+ return callback('DLL not compiled successfully.');
+ }
+
+ if (stats.hasWarnings()) {
+ console.warn(info.warnings.join('\n\n'));
+ console.warn('DLL compiled with warnings.');
+ } else {
+ console.log('DLL compiled successfully.');
+ }
+
+ dll.exists = true;
+ callback();
+ });
+ }
+ });
+ },
+ },
+
// reference our compiled DLL modules
dll &&
new webpack.DllReferencePlugin({
diff --git a/config/webpack.vendor.config.js b/config/webpack.vendor.config.js
index 90736349d91..7ecb9b06fdd 100644
--- a/config/webpack.vendor.config.js
+++ b/config/webpack.vendor.config.js
@@ -15,6 +15,9 @@ module.exports = {
extensions: ['.js'],
},
+ // ensure output is not generated when errors are encountered
+ bail: true,
+
context: ROOT_PATH,
entry: {
diff --git a/danger/database/Dangerfile b/danger/database/Dangerfile
index 16740cb867d..a0a2959bab5 100644
--- a/danger/database/Dangerfile
+++ b/danger/database/Dangerfile
@@ -29,6 +29,8 @@ DB_FILES_MESSAGE = <<~MSG
The following files require a review from the Database team:
MSG
+DATABASE_APPROVED_LABEL = 'database::approved'
+
non_geo_db_schema_updated = !git.modified_files.grep(%r{\Adb/schema\.rb}).empty?
geo_db_schema_updated = !git.modified_files.grep(%r{\Aee/db/geo/schema\.rb}).empty?
@@ -46,6 +48,7 @@ if geo_migration_created && !geo_db_schema_updated
end
return unless gitlab_danger.ci?
+return if gitlab.mr_labels.include?(DATABASE_APPROVED_LABEL)
db_paths_to_review = helper.changes_by_category[:database]
diff --git a/danger/gemfile/Dangerfile b/danger/gemfile/Dangerfile
deleted file mode 100644
index 07c4c07cfe8..00000000000
--- a/danger/gemfile/Dangerfile
+++ /dev/null
@@ -1,36 +0,0 @@
-GEMFILE_LOCK_NOT_UPDATED_MESSAGE_SHORT = <<~MSG.freeze
-%<gemfile>s was updated but %<gemfile_lock>s wasn't updated.
-MSG
-
-GEMFILE_LOCK_NOT_UPDATED_MESSAGE_FULL = <<~MSG.freeze
-**#{GEMFILE_LOCK_NOT_UPDATED_MESSAGE_SHORT}**
-
-Usually, when %<gemfile>s is updated, you should run
-```
-bundle install
-```
-
-or
-
-```
-bundle update <the-added-or-updated-gem>
-```
-
-and commit the %<gemfile_lock>s changes.
-MSG
-
-gemfile_modified = git.modified_files.include?("Gemfile")
-gemfile_lock_modified = git.modified_files.include?("Gemfile.lock")
-
-if gemfile_modified && !gemfile_lock_modified
- gitlab_danger = GitlabDanger.new(helper.gitlab_helper)
-
- format_str = gitlab_danger.ci? ? GEMFILE_LOCK_NOT_UPDATED_MESSAGE_FULL : GEMFILE_LOCK_NOT_UPDATED_MESSAGE_SHORT
-
- message = format(format_str,
- gemfile: gitlab_danger.html_link("Gemfile"),
- gemfile_lock: gitlab_danger.html_link("Gemfile.lock")
- )
-
- warn(message)
-end
diff --git a/danger/karma/Dangerfile b/danger/karma/Dangerfile
new file mode 100644
index 00000000000..6d692a89e13
--- /dev/null
+++ b/danger/karma/Dangerfile
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+# rubocop:disable Style/SignalException
+
+def get_karma_files(files)
+ files.select do |file|
+ file.start_with?('ee/spec/javascripts', 'spec/javascripts') &&
+ !file.end_with?('browser_spec.js')
+ end
+end
+
+new_karma_files = get_karma_files(git.added_files.to_a)
+
+unless new_karma_files.empty?
+
+ if GitlabDanger.new(helper.gitlab_helper).ci?
+ markdown(<<~MARKDOWN)
+ ## New karma spec file
+
+ New frontend specs ([except `browser_specs`](https://gitlab.com/gitlab-org/gitlab/blob/3b6fe2f1077eedb0b8aff02a7350234f0b7dc4f9/spec/javascripts/lib/utils/browser_spec.js#L2)) should be
+ [written in jest](https://docs.gitlab.com/ee/development/testing_guide/frontend_testing.html#jest).
+
+ You have created the following tests, please migrate them over to jest:
+
+ * #{new_karma_files.map { |path| "`#{path}`" }.join("\n* ")}
+ MARKDOWN
+ end
+
+ fail "You have created a new karma spec file"
+
+end
+
+changed_karma_files = get_karma_files(helper.all_changed_files) - new_karma_files
+
+return if changed_karma_files.empty?
+
+warn 'You have edited karma spec files. Please consider migrating them to jest.'
+
+if GitlabDanger.new(helper.gitlab_helper).ci?
+ markdown(<<~MARKDOWN)
+ ## Edited karma files
+
+ You have edited the following karma spec files. Please consider migrating them to jest:
+
+ * #{changed_karma_files.map { |path| "`#{path}`" }.join("\n* ")}
+
+ In order to align with our Iteration value, migration can also be done as a follow-up.
+
+ For more information: [Jestodus epic](https://gitlab.com/groups/gitlab-org/-/epics/895)
+ MARKDOWN
+end
diff --git a/danger/metadata/Dangerfile b/danger/metadata/Dangerfile
index 5edd134cbab..b3313674951 100644
--- a/danger/metadata/Dangerfile
+++ b/danger/metadata/Dangerfile
@@ -5,7 +5,8 @@ THROUGHPUT_LABELS = [
'security',
'bug',
'feature',
- 'backstage'
+ 'backstage',
+ 'documentation'
].freeze
if gitlab.mr_body.size < 5
diff --git a/danger/telemetry/Dangerfile b/danger/telemetry/Dangerfile
new file mode 100644
index 00000000000..68a226ef11b
--- /dev/null
+++ b/danger/telemetry/Dangerfile
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+TELEMETRY_CHANGED_FILES_MESSAGE = <<~MSG
+This merge request adds or changes files for which a
+review from the Data team and Telemetry team is recommended.
+@gitlab-org/growth/telemetry group is mentioned in order to notify team members.
+MSG
+
+usage_data_changed_files = git.modified_files.grep(%r{usage_data})
+
+if usage_data_changed_files.any?
+ warn format(TELEMETRY_CHANGED_FILES_MESSAGE)
+
+ USAGE_DATA_FILES_MESSAGE = <<~MSG
+ For the following files, a review from the [Data team and Telemetry team](https://gitlab.com/groups/gitlab-org/growth/telemetry/-/group_members?with_inherited_permissions=exclude) is recommended:
+ MSG
+
+ markdown(USAGE_DATA_FILES_MESSAGE + helper.markdown_list(usage_data_changed_files))
+end
diff --git a/db/fixtures/development/11_keys.rb b/db/fixtures/development/11_keys.rb
index eeee2388d01..958d999f475 100644
--- a/db/fixtures/development/11_keys.rb
+++ b/db/fixtures/development/11_keys.rb
@@ -5,9 +5,9 @@ require './spec/support/sidekiq_middleware'
# gitlab-shell path set (yet) we need to disable this for these fixtures.
Sidekiq::Testing.disable! do
Gitlab::Seeder.quiet do
- # We want to run `add_to_shell` immediately instead of after the commit, so
+ # We want to run `add_to_authorized_keys` immediately instead of after the commit, so
# that it falls under `Sidekiq::Testing.disable!`.
- Key.skip_callback(:commit, :after, :add_to_shell)
+ Key.skip_callback(:commit, :after, :add_to_authorized_keys)
User.not_mass_generated.first(10).each do |user|
key = "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt#{user.id + 100}6k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0="
@@ -18,7 +18,7 @@ Sidekiq::Testing.disable! do
)
Sidekiq::Worker.skipping_transaction_check do
- key.add_to_shell
+ key.add_to_authorized_keys
end
print '.'
diff --git a/db/migrate/20180305144721_add_privileged_to_runner.rb b/db/migrate/20180305144721_add_privileged_to_runner.rb
index 1ad3c045d60..359498bf9b0 100644
--- a/db/migrate/20180305144721_add_privileged_to_runner.rb
+++ b/db/migrate/20180305144721_add_privileged_to_runner.rb
@@ -9,7 +9,7 @@ class AddPrivilegedToRunner < ActiveRecord::Migration[4.2]
disable_ddl_transaction!
def up
- add_column_with_default :clusters_applications_runners, :privileged, :boolean, default: true, allow_null: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :clusters_applications_runners, :privileged, :boolean, default: true, allow_null: false
end
def down
diff --git a/db/migrate/20180423204600_add_pages_access_level_to_project_feature.rb b/db/migrate/20180423204600_add_pages_access_level_to_project_feature.rb
index c841c7eb77b..0c536f917ce 100644
--- a/db/migrate/20180423204600_add_pages_access_level_to_project_feature.rb
+++ b/db/migrate/20180423204600_add_pages_access_level_to_project_feature.rb
@@ -5,7 +5,7 @@ class AddPagesAccessLevelToProjectFeature < ActiveRecord::Migration[4.2]
DOWNTIME = false
def up
- add_column_with_default(:project_features, :pages_access_level, :integer, default: ProjectFeature::PUBLIC, allow_null: false) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:project_features, :pages_access_level, :integer, default: ProjectFeature::PUBLIC, allow_null: false)
change_column_default(:project_features, :pages_access_level, ProjectFeature::ENABLED)
end
diff --git a/db/migrate/20180529093006_ensure_remote_mirror_columns.rb b/db/migrate/20180529093006_ensure_remote_mirror_columns.rb
index 3c61729dca8..a0a1150f022 100644
--- a/db/migrate/20180529093006_ensure_remote_mirror_columns.rb
+++ b/db/migrate/20180529093006_ensure_remote_mirror_columns.rb
@@ -11,7 +11,7 @@ class EnsureRemoteMirrorColumns < ActiveRecord::Migration[4.2]
add_column :remote_mirrors, :remote_name, :string unless column_exists?(:remote_mirrors, :remote_name) # rubocop:disable Migration/AddLimitToStringColumns
unless column_exists?(:remote_mirrors, :only_protected_branches)
- add_column_with_default(:remote_mirrors, # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:remote_mirrors,
:only_protected_branches,
:boolean,
default: false,
diff --git a/db/migrate/20180601213245_add_deploy_strategy_to_project_auto_devops.rb b/db/migrate/20180601213245_add_deploy_strategy_to_project_auto_devops.rb
index 67d20b949d9..78a3617ec93 100644
--- a/db/migrate/20180601213245_add_deploy_strategy_to_project_auto_devops.rb
+++ b/db/migrate/20180601213245_add_deploy_strategy_to_project_auto_devops.rb
@@ -10,7 +10,7 @@ class AddDeployStrategyToProjectAutoDevops < ActiveRecord::Migration[4.2]
disable_ddl_transaction!
def up
- add_column_with_default :project_auto_devops, :deploy_strategy, :integer, default: 0, allow_null: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :project_auto_devops, :deploy_strategy, :integer, default: 0, allow_null: false
end
def down
diff --git a/db/migrate/20180831164905_add_common_to_prometheus_metrics.rb b/db/migrate/20180831164905_add_common_to_prometheus_metrics.rb
index 6654e6d1957..5eb77d0480d 100644
--- a/db/migrate/20180831164905_add_common_to_prometheus_metrics.rb
+++ b/db/migrate/20180831164905_add_common_to_prometheus_metrics.rb
@@ -8,7 +8,7 @@ class AddCommonToPrometheusMetrics < ActiveRecord::Migration[4.2]
disable_ddl_transaction!
def up
- add_column_with_default(:prometheus_metrics, :common, :boolean, default: false) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:prometheus_metrics, :common, :boolean, default: false)
end
def down
diff --git a/db/migrate/20180907015926_add_legacy_abac_to_cluster_providers_gcp.rb b/db/migrate/20180907015926_add_legacy_abac_to_cluster_providers_gcp.rb
index 8bfb0c5612a..c57611a0f7d 100644
--- a/db/migrate/20180907015926_add_legacy_abac_to_cluster_providers_gcp.rb
+++ b/db/migrate/20180907015926_add_legacy_abac_to_cluster_providers_gcp.rb
@@ -8,7 +8,7 @@ class AddLegacyAbacToClusterProvidersGcp < ActiveRecord::Migration[4.2]
disable_ddl_transaction!
def up
- add_column_with_default(:cluster_providers_gcp, :legacy_abac, :boolean, default: true) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:cluster_providers_gcp, :legacy_abac, :boolean, default: true)
end
def down
diff --git a/db/migrate/20181017001059_add_cluster_type_to_clusters.rb b/db/migrate/20181017001059_add_cluster_type_to_clusters.rb
index 75abcfedfc9..d032afe1a43 100644
--- a/db/migrate/20181017001059_add_cluster_type_to_clusters.rb
+++ b/db/migrate/20181017001059_add_cluster_type_to_clusters.rb
@@ -9,7 +9,7 @@ class AddClusterTypeToClusters < ActiveRecord::Migration[4.2]
disable_ddl_transaction!
def up
- add_column_with_default(:clusters, :cluster_type, :smallint, default: PROJECT_CLUSTER_TYPE) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:clusters, :cluster_type, :smallint, default: PROJECT_CLUSTER_TYPE)
end
def down
diff --git a/db/migrate/20190218134158_add_masked_to_ci_variables.rb b/db/migrate/20190218134158_add_masked_to_ci_variables.rb
index 60dcc0d7af5..b4999d5b4a9 100644
--- a/db/migrate/20190218134158_add_masked_to_ci_variables.rb
+++ b/db/migrate/20190218134158_add_masked_to_ci_variables.rb
@@ -12,7 +12,7 @@ class AddMaskedToCiVariables < ActiveRecord::Migration[5.0]
disable_ddl_transaction!
def up
- add_column_with_default :ci_variables, :masked, :boolean, default: false, allow_null: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :ci_variables, :masked, :boolean, default: false, allow_null: false
end
def down
diff --git a/db/migrate/20190218134209_add_masked_to_ci_group_variables.rb b/db/migrate/20190218134209_add_masked_to_ci_group_variables.rb
index c25881410d0..8633875b341 100644
--- a/db/migrate/20190218134209_add_masked_to_ci_group_variables.rb
+++ b/db/migrate/20190218134209_add_masked_to_ci_group_variables.rb
@@ -12,7 +12,7 @@ class AddMaskedToCiGroupVariables < ActiveRecord::Migration[5.0]
disable_ddl_transaction!
def up
- add_column_with_default :ci_group_variables, :masked, :boolean, default: false, allow_null: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :ci_group_variables, :masked, :boolean, default: false, allow_null: false
end
def down
diff --git a/db/migrate/20190220142344_add_email_header_and_footer_enabled_flag_to_appearances_table.rb b/db/migrate/20190220142344_add_email_header_and_footer_enabled_flag_to_appearances_table.rb
index 33fb6b8ef0d..85b9e0580f4 100644
--- a/db/migrate/20190220142344_add_email_header_and_footer_enabled_flag_to_appearances_table.rb
+++ b/db/migrate/20190220142344_add_email_header_and_footer_enabled_flag_to_appearances_table.rb
@@ -8,7 +8,7 @@ class AddEmailHeaderAndFooterEnabledFlagToAppearancesTable < ActiveRecord::Migra
DOWNTIME = false
def up
- add_column_with_default(:appearances, :email_header_and_footer_enabled, :boolean, default: false) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:appearances, :email_header_and_footer_enabled, :boolean, default: false)
end
def down
diff --git a/db/migrate/20190228192410_add_multi_line_attributes_to_suggestion.rb b/db/migrate/20190228192410_add_multi_line_attributes_to_suggestion.rb
index 766ea50161d..856dfc89fa3 100644
--- a/db/migrate/20190228192410_add_multi_line_attributes_to_suggestion.rb
+++ b/db/migrate/20190228192410_add_multi_line_attributes_to_suggestion.rb
@@ -8,11 +8,9 @@ class AddMultiLineAttributesToSuggestion < ActiveRecord::Migration[5.0]
disable_ddl_transaction!
def up
- # rubocop:disable Migration/AddColumnWithDefault
add_column_with_default :suggestions, :lines_above, :integer, default: 0, allow_null: false
add_column_with_default :suggestions, :lines_below, :integer, default: 0, allow_null: false
add_column_with_default :suggestions, :outdated, :boolean, default: false, allow_null: false
- # rubocop:enable Migration/AddColumnWithDefault
end
def down
diff --git a/db/migrate/20190322164830_add_auto_ssl_enabled_to_pages_domain.rb b/db/migrate/20190322164830_add_auto_ssl_enabled_to_pages_domain.rb
index 41552b0e2e3..e74a9535ddf 100644
--- a/db/migrate/20190322164830_add_auto_ssl_enabled_to_pages_domain.rb
+++ b/db/migrate/20190322164830_add_auto_ssl_enabled_to_pages_domain.rb
@@ -8,7 +8,7 @@ class AddAutoSslEnabledToPagesDomain < ActiveRecord::Migration[5.0]
disable_ddl_transaction!
def up
- add_column_with_default :pages_domains, :auto_ssl_enabled, :boolean, default: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :pages_domains, :auto_ssl_enabled, :boolean, default: false
end
def down
diff --git a/db/migrate/20190325165127_add_managed_to_cluster.rb b/db/migrate/20190325165127_add_managed_to_cluster.rb
index 14ed4db143e..e960df9d502 100644
--- a/db/migrate/20190325165127_add_managed_to_cluster.rb
+++ b/db/migrate/20190325165127_add_managed_to_cluster.rb
@@ -8,7 +8,7 @@ class AddManagedToCluster < ActiveRecord::Migration[5.0]
DOWNTIME = false
def up
- add_column_with_default(:clusters, :managed, :boolean, default: true) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:clusters, :managed, :boolean, default: true)
end
def down
diff --git a/db/migrate/20190415030217_add_variable_type_to_ci_variables.rb b/db/migrate/20190415030217_add_variable_type_to_ci_variables.rb
index ed7af455e12..433f510299a 100644
--- a/db/migrate/20190415030217_add_variable_type_to_ci_variables.rb
+++ b/db/migrate/20190415030217_add_variable_type_to_ci_variables.rb
@@ -8,7 +8,7 @@ class AddVariableTypeToCiVariables < ActiveRecord::Migration[5.0]
ENV_VAR_VARIABLE_TYPE = 1
def up
- add_column_with_default(:ci_variables, :variable_type, :smallint, default: ENV_VAR_VARIABLE_TYPE) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:ci_variables, :variable_type, :smallint, default: ENV_VAR_VARIABLE_TYPE)
end
def down
diff --git a/db/migrate/20190416185130_add_merge_train_enabled_to_ci_cd_settings.rb b/db/migrate/20190416185130_add_merge_train_enabled_to_ci_cd_settings.rb
index 55ef3c79f3f..60d4c2554f7 100644
--- a/db/migrate/20190416185130_add_merge_train_enabled_to_ci_cd_settings.rb
+++ b/db/migrate/20190416185130_add_merge_train_enabled_to_ci_cd_settings.rb
@@ -8,12 +8,10 @@ class AddMergeTrainEnabledToCiCdSettings < ActiveRecord::Migration[5.1]
disable_ddl_transaction!
# rubocop:disable Migration/UpdateLargeTable
- # rubocop:disable Migration/AddColumnWithDefault
def up
add_column_with_default :project_ci_cd_settings, :merge_trains_enabled, :boolean, default: false, allow_null: false
end
# rubocop:enable Migration/UpdateLargeTable
- # rubocop:enable Migration/AddColumnWithDefault
def down
remove_column :project_ci_cd_settings, :merge_trains_enabled
diff --git a/db/migrate/20190416213556_add_variable_type_to_ci_group_variables.rb b/db/migrate/20190416213556_add_variable_type_to_ci_group_variables.rb
index 4d329cea1b5..dce73caeb5e 100644
--- a/db/migrate/20190416213556_add_variable_type_to_ci_group_variables.rb
+++ b/db/migrate/20190416213556_add_variable_type_to_ci_group_variables.rb
@@ -8,7 +8,7 @@ class AddVariableTypeToCiGroupVariables < ActiveRecord::Migration[5.0]
ENV_VAR_VARIABLE_TYPE = 1
def up
- add_column_with_default(:ci_group_variables, :variable_type, :smallint, default: ENV_VAR_VARIABLE_TYPE) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:ci_group_variables, :variable_type, :smallint, default: ENV_VAR_VARIABLE_TYPE)
end
def down
diff --git a/db/migrate/20190416213631_add_variable_type_to_ci_pipeline_schedule_variables.rb b/db/migrate/20190416213631_add_variable_type_to_ci_pipeline_schedule_variables.rb
index b7d80cb2d0d..3079b2afd9c 100644
--- a/db/migrate/20190416213631_add_variable_type_to_ci_pipeline_schedule_variables.rb
+++ b/db/migrate/20190416213631_add_variable_type_to_ci_pipeline_schedule_variables.rb
@@ -8,7 +8,7 @@ class AddVariableTypeToCiPipelineScheduleVariables < ActiveRecord::Migration[5.0
ENV_VAR_VARIABLE_TYPE = 1
def up
- add_column_with_default(:ci_pipeline_schedule_variables, :variable_type, :smallint, default: ENV_VAR_VARIABLE_TYPE) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:ci_pipeline_schedule_variables, :variable_type, :smallint, default: ENV_VAR_VARIABLE_TYPE)
end
def down
diff --git a/db/migrate/20190426180107_add_deployment_events_to_services.rb b/db/migrate/20190426180107_add_deployment_events_to_services.rb
index e8e53728010..1fb137fb5f9 100644
--- a/db/migrate/20190426180107_add_deployment_events_to_services.rb
+++ b/db/migrate/20190426180107_add_deployment_events_to_services.rb
@@ -8,7 +8,7 @@ class AddDeploymentEventsToServices < ActiveRecord::Migration[5.0]
disable_ddl_transaction!
def up
- add_column_with_default(:services, :deployment_events, :boolean, default: false, allow_null: false) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:services, :deployment_events, :boolean, default: false, allow_null: false)
end
def down
diff --git a/db/migrate/20190520200123_add_rule_type_to_approval_merge_request_approval_rules.rb b/db/migrate/20190520200123_add_rule_type_to_approval_merge_request_approval_rules.rb
index 7bdb48f3eec..7339a4fccba 100644
--- a/db/migrate/20190520200123_add_rule_type_to_approval_merge_request_approval_rules.rb
+++ b/db/migrate/20190520200123_add_rule_type_to_approval_merge_request_approval_rules.rb
@@ -12,7 +12,7 @@ class AddRuleTypeToApprovalMergeRequestApprovalRules < ActiveRecord::Migration[5
disable_ddl_transaction!
def up
- add_column_with_default(:approval_merge_request_rules, :rule_type, :integer, limit: 2, default: 1) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:approval_merge_request_rules, :rule_type, :integer, limit: 2, default: 1)
end
def down
diff --git a/db/migrate/20190607085356_add_source_to_pages_domains.rb b/db/migrate/20190607085356_add_source_to_pages_domains.rb
index d681ab67431..0a845d7d11f 100644
--- a/db/migrate/20190607085356_add_source_to_pages_domains.rb
+++ b/db/migrate/20190607085356_add_source_to_pages_domains.rb
@@ -12,7 +12,7 @@ class AddSourceToPagesDomains < ActiveRecord::Migration[5.1]
disable_ddl_transaction!
def up
- add_column_with_default(:pages_domains, :certificate_source, :smallint, default: 0) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:pages_domains, :certificate_source, :smallint, default: 0)
end
def down
diff --git a/db/migrate/20190628145246_add_strategies_to_operations_feature_flag_scopes.rb b/db/migrate/20190628145246_add_strategies_to_operations_feature_flag_scopes.rb
index 030ef9e4bd6..ed1f16ee69a 100644
--- a/db/migrate/20190628145246_add_strategies_to_operations_feature_flag_scopes.rb
+++ b/db/migrate/20190628145246_add_strategies_to_operations_feature_flag_scopes.rb
@@ -8,7 +8,7 @@ class AddStrategiesToOperationsFeatureFlagScopes < ActiveRecord::Migration[5.1]
disable_ddl_transaction!
def up
- add_column_with_default :operations_feature_flag_scopes, :strategies, :jsonb, default: [{ name: "default", parameters: {} }] # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :operations_feature_flag_scopes, :strategies, :jsonb, default: [{ name: "default", parameters: {} }]
end
def down
diff --git a/db/migrate/20190709204413_add_rule_type_to_approval_project_rules.rb b/db/migrate/20190709204413_add_rule_type_to_approval_project_rules.rb
index b11154d0f26..87a228c9bf9 100644
--- a/db/migrate/20190709204413_add_rule_type_to_approval_project_rules.rb
+++ b/db/migrate/20190709204413_add_rule_type_to_approval_project_rules.rb
@@ -8,7 +8,7 @@ class AddRuleTypeToApprovalProjectRules < ActiveRecord::Migration[5.1]
disable_ddl_transaction!
def up
- add_column_with_default :approval_project_rules, :rule_type, :integer, limit: 2, default: 0, allow_null: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :approval_project_rules, :rule_type, :integer, limit: 2, default: 0, allow_null: false
end
def down
diff --git a/db/migrate/20190712064021_add_namespace_per_environment_flag_to_clusters.rb b/db/migrate/20190712064021_add_namespace_per_environment_flag_to_clusters.rb
index 771eb21c4b6..4c8a0ab3def 100644
--- a/db/migrate/20190712064021_add_namespace_per_environment_flag_to_clusters.rb
+++ b/db/migrate/20190712064021_add_namespace_per_environment_flag_to_clusters.rb
@@ -11,7 +11,7 @@ class AddNamespacePerEnvironmentFlagToClusters < ActiveRecord::Migration[5.1]
disable_ddl_transaction!
def up
- add_column_with_default :clusters, :namespace_per_environment, :boolean, default: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :clusters, :namespace_per_environment, :boolean, default: false
end
def down
diff --git a/db/migrate/20190715173819_add_object_storage_flag_to_geo_node.rb b/db/migrate/20190715173819_add_object_storage_flag_to_geo_node.rb
index cbc353b6282..2d3243f3357 100644
--- a/db/migrate/20190715173819_add_object_storage_flag_to_geo_node.rb
+++ b/db/migrate/20190715173819_add_object_storage_flag_to_geo_node.rb
@@ -12,7 +12,7 @@ class AddObjectStorageFlagToGeoNode < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default :geo_nodes, :sync_object_storage, :boolean, default: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :geo_nodes, :sync_object_storage, :boolean, default: false
end
def down
diff --git a/db/migrate/20190729180447_add_merge_requests_require_code_owner_approval_to_protected_branches.rb b/db/migrate/20190729180447_add_merge_requests_require_code_owner_approval_to_protected_branches.rb
index bfac67606d6..098fcff9ace 100644
--- a/db/migrate/20190729180447_add_merge_requests_require_code_owner_approval_to_protected_branches.rb
+++ b/db/migrate/20190729180447_add_merge_requests_require_code_owner_approval_to_protected_branches.rb
@@ -9,7 +9,7 @@ class AddMergeRequestsRequireCodeOwnerApprovalToProtectedBranches < ActiveRecord
disable_ddl_transaction!
def up
- add_column_with_default( # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(
:protected_branches,
:code_owner_approval_required,
:boolean,
diff --git a/db/migrate/20190816151221_add_active_jobs_limit_to_plans.rb b/db/migrate/20190816151221_add_active_jobs_limit_to_plans.rb
index 193e6cb188e..951ff41f1a8 100644
--- a/db/migrate/20190816151221_add_active_jobs_limit_to_plans.rb
+++ b/db/migrate/20190816151221_add_active_jobs_limit_to_plans.rb
@@ -8,7 +8,7 @@ class AddActiveJobsLimitToPlans < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default :plans, :active_jobs_limit, :integer, default: 0 # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :plans, :active_jobs_limit, :integer, default: 0
end
def down
diff --git a/db/migrate/20190901174200_add_max_issue_count_to_list.rb b/db/migrate/20190901174200_add_max_issue_count_to_list.rb
index 7408d2f1c93..59359f28d6a 100644
--- a/db/migrate/20190901174200_add_max_issue_count_to_list.rb
+++ b/db/migrate/20190901174200_add_max_issue_count_to_list.rb
@@ -7,7 +7,7 @@ class AddMaxIssueCountToList < ActiveRecord::Migration[4.2]
DOWNTIME = false
def up
- add_column_with_default :lists, :max_issue_count, :integer, default: 0 # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :lists, :max_issue_count, :integer, default: 0
end
def down
diff --git a/db/migrate/20190905140605_add_cloud_run_to_clusters_providers_gcp.rb b/db/migrate/20190905140605_add_cloud_run_to_clusters_providers_gcp.rb
index cd6b2fb7d4f..e7ffd7cd4d3 100644
--- a/db/migrate/20190905140605_add_cloud_run_to_clusters_providers_gcp.rb
+++ b/db/migrate/20190905140605_add_cloud_run_to_clusters_providers_gcp.rb
@@ -8,7 +8,7 @@ class AddCloudRunToClustersProvidersGcp < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default(:cluster_providers_gcp, :cloud_run, :boolean, default: false) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:cluster_providers_gcp, :cloud_run, :boolean, default: false)
end
def down
diff --git a/db/migrate/20190907184714_add_show_whitespace_in_diffs_to_user_preferences.rb b/db/migrate/20190907184714_add_show_whitespace_in_diffs_to_user_preferences.rb
index 41f9b36278a..50d5d2b0574 100644
--- a/db/migrate/20190907184714_add_show_whitespace_in_diffs_to_user_preferences.rb
+++ b/db/migrate/20190907184714_add_show_whitespace_in_diffs_to_user_preferences.rb
@@ -11,7 +11,7 @@ class AddShowWhitespaceInDiffsToUserPreferences < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default :user_preferences, :show_whitespace_in_diffs, :boolean, default: true, allow_null: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :user_preferences, :show_whitespace_in_diffs, :boolean, default: true, allow_null: false
end
def down
diff --git a/db/migrate/20190918104731_add_cleanup_status_to_cluster.rb b/db/migrate/20190918104731_add_cleanup_status_to_cluster.rb
index 62290fb0fa6..0ba9d8e6c89 100644
--- a/db/migrate/20190918104731_add_cleanup_status_to_cluster.rb
+++ b/db/migrate/20190918104731_add_cleanup_status_to_cluster.rb
@@ -9,7 +9,7 @@ class AddCleanupStatusToCluster < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default(:clusters, :cleanup_status, # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:clusters, :cleanup_status,
:smallint,
default: 1,
allow_null: false)
diff --git a/db/migrate/20191001170300_create_ci_ref.rb b/db/migrate/20191001170300_create_ci_ref.rb
new file mode 100644
index 00000000000..af25e67430b
--- /dev/null
+++ b/db/migrate/20191001170300_create_ci_ref.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class CreateCiRef < ActiveRecord::Migration[5.2]
+ DOWNTIME = false
+
+ def change
+ create_table :ci_refs do |t|
+ t.references :project, null: false, index: false, foreign_key: { on_delete: :cascade }, type: :integer
+ t.integer :lock_version, default: 0
+ t.integer :last_updated_by_pipeline_id
+ t.boolean :tag, default: false, null: false
+ t.string :ref, null: false, limit: 255
+ t.string :status, null: false, limit: 255
+ t.foreign_key :ci_pipelines, column: :last_updated_by_pipeline_id, on_delete: :nullify
+ t.index [:project_id, :ref, :tag], unique: true
+ t.index [:last_updated_by_pipeline_id]
+ end
+ end
+end
diff --git a/db/migrate/20191014123159_add_expire_notification_delivered_to_personal_access_tokens.rb b/db/migrate/20191014123159_add_expire_notification_delivered_to_personal_access_tokens.rb
index 41a81e3ac87..f172d3bdcbd 100644
--- a/db/migrate/20191014123159_add_expire_notification_delivered_to_personal_access_tokens.rb
+++ b/db/migrate/20191014123159_add_expire_notification_delivered_to_personal_access_tokens.rb
@@ -8,7 +8,7 @@ class AddExpireNotificationDeliveredToPersonalAccessTokens < ActiveRecord::Migra
disable_ddl_transaction!
def up
- add_column_with_default :personal_access_tokens, :expire_notification_delivered, :boolean, default: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :personal_access_tokens, :expire_notification_delivered, :boolean, default: false
end
def down
diff --git a/db/migrate/20191023093207_add_comment_actions_to_services.rb b/db/migrate/20191023093207_add_comment_actions_to_services.rb
index 0bd528cc85d..f3fc12ac7c7 100644
--- a/db/migrate/20191023093207_add_comment_actions_to_services.rb
+++ b/db/migrate/20191023093207_add_comment_actions_to_services.rb
@@ -8,7 +8,7 @@ class AddCommentActionsToServices < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default(:services, :comment_on_event_enabled, :boolean, default: true) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:services, :comment_on_event_enabled, :boolean, default: true)
end
def down
diff --git a/db/migrate/20191028130054_add_max_issue_weight_to_list.rb b/db/migrate/20191028130054_add_max_issue_weight_to_list.rb
index f15b65067f6..eec7c42c907 100644
--- a/db/migrate/20191028130054_add_max_issue_weight_to_list.rb
+++ b/db/migrate/20191028130054_add_max_issue_weight_to_list.rb
@@ -8,7 +8,7 @@ class AddMaxIssueWeightToList < ActiveRecord::Migration[5.2]
DOWNTIME = false
def up
- add_column_with_default :lists, :max_issue_weight, :integer, default: 0 # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :lists, :max_issue_weight, :integer, default: 0
end
def down
diff --git a/db/migrate/20191029191901_add_enabled_to_grafana_integrations.rb b/db/migrate/20191029191901_add_enabled_to_grafana_integrations.rb
index 40e361e2150..8db11724874 100644
--- a/db/migrate/20191029191901_add_enabled_to_grafana_integrations.rb
+++ b/db/migrate/20191029191901_add_enabled_to_grafana_integrations.rb
@@ -8,7 +8,7 @@ class AddEnabledToGrafanaIntegrations < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default( # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(
:grafana_integrations,
:enabled,
:boolean,
diff --git a/db/migrate/20191105155113_add_secret_to_snippet.rb b/db/migrate/20191105155113_add_secret_to_snippet.rb
index 8f0a330238b..ae514d48494 100644
--- a/db/migrate/20191105155113_add_secret_to_snippet.rb
+++ b/db/migrate/20191105155113_add_secret_to_snippet.rb
@@ -9,7 +9,7 @@ class AddSecretToSnippet < ActiveRecord::Migration[5.2]
def up
unless column_exists?(:snippets, :secret)
- add_column_with_default :snippets, :secret, :boolean, default: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :snippets, :secret, :boolean, default: false
end
add_concurrent_index :snippets, [:visibility_level, :secret]
diff --git a/db/migrate/20191106144901_add_state_to_merge_trains.rb b/db/migrate/20191106144901_add_state_to_merge_trains.rb
index 64a70575c91..e2256705f53 100644
--- a/db/migrate/20191106144901_add_state_to_merge_trains.rb
+++ b/db/migrate/20191106144901_add_state_to_merge_trains.rb
@@ -9,7 +9,7 @@ class AddStateToMergeTrains < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default :merge_trains, :status, :integer, limit: 2, default: MERGE_TRAIN_STATUS_CREATED # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :merge_trains, :status, :integer, limit: 2, default: MERGE_TRAIN_STATUS_CREATED
end
def down
diff --git a/db/migrate/20191111165017_add_fixed_pipeline_to_notification_settings.rb b/db/migrate/20191111165017_add_fixed_pipeline_to_notification_settings.rb
new file mode 100644
index 00000000000..7a857807468
--- /dev/null
+++ b/db/migrate/20191111165017_add_fixed_pipeline_to_notification_settings.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddFixedPipelineToNotificationSettings < ActiveRecord::Migration[5.2]
+ DOWNTIME = false
+
+ def change
+ add_column :notification_settings, :fixed_pipeline, :boolean
+ end
+end
diff --git a/db/migrate/20191112090226_add_artifacts_to_ci_build_need.rb b/db/migrate/20191112090226_add_artifacts_to_ci_build_need.rb
index b868e0b44a8..2fbd003b2e5 100644
--- a/db/migrate/20191112090226_add_artifacts_to_ci_build_need.rb
+++ b/db/migrate/20191112090226_add_artifacts_to_ci_build_need.rb
@@ -8,7 +8,7 @@ class AddArtifactsToCiBuildNeed < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default(:ci_build_needs, :artifacts, # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:ci_build_needs, :artifacts,
:boolean,
default: true,
allow_null: false)
diff --git a/db/migrate/20191114201118_make_created_at_not_null_in_design_management_versions.rb b/db/migrate/20191114201118_make_created_at_not_null_in_design_management_versions.rb
new file mode 100644
index 00000000000..7b9d70c1a50
--- /dev/null
+++ b/db/migrate/20191114201118_make_created_at_not_null_in_design_management_versions.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class MakeCreatedAtNotNullInDesignManagementVersions < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ change_column_null :design_management_versions, :created_at, false, Time.now.to_s(:db)
+ end
+
+ def down
+ change_column_null :design_management_versions, :created_at, true
+ end
+end
diff --git a/db/migrate/20191121193110_add_issue_links_type.rb b/db/migrate/20191121193110_add_issue_links_type.rb
index 86bfd41b916..61ef2e7d7e8 100644
--- a/db/migrate/20191121193110_add_issue_links_type.rb
+++ b/db/migrate/20191121193110_add_issue_links_type.rb
@@ -8,7 +8,7 @@ class AddIssueLinksType < ActiveRecord::Migration[5.1]
disable_ddl_transaction!
def up
- add_column_with_default :issue_links, :link_type, :integer, default: 0, limit: 2 # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :issue_links, :link_type, :integer, default: 0, limit: 2
end
def down
diff --git a/db/migrate/20191123081456_add_dismissable_to_broadcast_messages.rb b/db/migrate/20191123081456_add_dismissable_to_broadcast_messages.rb
new file mode 100644
index 00000000000..40235771d80
--- /dev/null
+++ b/db/migrate/20191123081456_add_dismissable_to_broadcast_messages.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddDismissableToBroadcastMessages < ActiveRecord::Migration[5.2]
+ DOWNTIME = false
+
+ def change
+ add_column :broadcast_messages, :dismissable, :boolean
+ end
+end
diff --git a/db/migrate/20191127163053_add_confidential_to_doorkeeper_application.rb b/db/migrate/20191127163053_add_confidential_to_doorkeeper_application.rb
index 12e22b4744c..1fb02085c37 100644
--- a/db/migrate/20191127163053_add_confidential_to_doorkeeper_application.rb
+++ b/db/migrate/20191127163053_add_confidential_to_doorkeeper_application.rb
@@ -8,7 +8,7 @@ class AddConfidentialToDoorkeeperApplication < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default( # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(
:oauth_applications,
:confidential,
:boolean,
diff --git a/db/migrate/20191127221608_add_wildcard_and_domain_type_to_pages_domains.rb b/db/migrate/20191127221608_add_wildcard_and_domain_type_to_pages_domains.rb
index 4ca7ad29691..6893a02bcad 100644
--- a/db/migrate/20191127221608_add_wildcard_and_domain_type_to_pages_domains.rb
+++ b/db/migrate/20191127221608_add_wildcard_and_domain_type_to_pages_domains.rb
@@ -9,10 +9,8 @@ class AddWildcardAndDomainTypeToPagesDomains < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- # rubocop:disable Migration/AddColumnWithDefault
add_column_with_default :pages_domains, :wildcard, :boolean, default: false
add_column_with_default :pages_domains, :domain_type, :integer, limit: 2, default: PROJECT_TYPE
- # rubocop:enable Migration/AddColumnWithDefault
end
def down
diff --git a/db/migrate/20191129134844_add_broadcast_type_to_broadcast_message.rb b/db/migrate/20191129134844_add_broadcast_type_to_broadcast_message.rb
index 884d9ac6d7f..84d17f558d1 100644
--- a/db/migrate/20191129134844_add_broadcast_type_to_broadcast_message.rb
+++ b/db/migrate/20191129134844_add_broadcast_type_to_broadcast_message.rb
@@ -10,7 +10,7 @@ class AddBroadcastTypeToBroadcastMessage < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default(:broadcast_messages, :broadcast_type, :smallint, default: BROADCAST_MESSAGE_BANNER_TYPE) # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(:broadcast_messages, :broadcast_type, :smallint, default: BROADCAST_MESSAGE_BANNER_TYPE)
end
def down
diff --git a/db/migrate/20191206014412_add_image_to_design_management_designs_versions.rb b/db/migrate/20191206014412_add_image_to_design_management_designs_versions.rb
new file mode 100644
index 00000000000..d8e2269d21a
--- /dev/null
+++ b/db/migrate/20191206014412_add_image_to_design_management_designs_versions.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddImageToDesignManagementDesignsVersions < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column :design_management_designs_versions, :image_v432x230, :string, limit: 255
+ end
+end
diff --git a/db/migrate/20191218124915_add_repository_storage_to_snippets.rb b/db/migrate/20191218124915_add_repository_storage_to_snippets.rb
index df9a9d2ff43..ff391c04062 100644
--- a/db/migrate/20191218124915_add_repository_storage_to_snippets.rb
+++ b/db/migrate/20191218124915_add_repository_storage_to_snippets.rb
@@ -8,7 +8,7 @@ class AddRepositoryStorageToSnippets < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default( # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(
:snippets,
:repository_storage,
:string,
diff --git a/db/migrate/20191218125015_add_storage_version_to_snippets.rb b/db/migrate/20191218125015_add_storage_version_to_snippets.rb
index b1bd3589692..659f36f42b5 100644
--- a/db/migrate/20191218125015_add_storage_version_to_snippets.rb
+++ b/db/migrate/20191218125015_add_storage_version_to_snippets.rb
@@ -8,7 +8,7 @@ class AddStorageVersionToSnippets < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default( # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(
:snippets,
:storage_version,
:integer,
diff --git a/db/migrate/20200102140148_add_expanded_environment_name_to_ci_build_metadata.rb b/db/migrate/20200102140148_add_expanded_environment_name_to_ci_build_metadata.rb
new file mode 100644
index 00000000000..e76806c5d3f
--- /dev/null
+++ b/db/migrate/20200102140148_add_expanded_environment_name_to_ci_build_metadata.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+class AddExpandedEnvironmentNameToCiBuildMetadata < ActiveRecord::Migration[5.2]
+ DOWNTIME = false
+
+ def up
+ add_column :ci_builds_metadata, :expanded_environment_name, :string, limit: 255
+ end
+
+ def down
+ remove_column :ci_builds_metadata, :expanded_environment_name
+ end
+end
diff --git a/db/migrate/20200122161638_add_deploy_token_type_to_deploy_tokens.rb b/db/migrate/20200122161638_add_deploy_token_type_to_deploy_tokens.rb
index e0cf18caf9c..2fe79250ea2 100644
--- a/db/migrate/20200122161638_add_deploy_token_type_to_deploy_tokens.rb
+++ b/db/migrate/20200122161638_add_deploy_token_type_to_deploy_tokens.rb
@@ -8,7 +8,7 @@ class AddDeployTokenTypeToDeployTokens < ActiveRecord::Migration[5.2]
DOWNTIME = false
def up
- add_column_with_default :deploy_tokens, :deploy_token_type, :integer, default: 2, limit: 2, allow_null: false # rubocop: disable Migration/AddColumnWithDefault
+ add_column_with_default :deploy_tokens, :deploy_token_type, :integer, default: 2, limit: 2, allow_null: false
end
def down
diff --git a/db/migrate/20200128184209_add_usage_to_pages_domains.rb b/db/migrate/20200128184209_add_usage_to_pages_domains.rb
index 292490078cd..ac644814076 100644
--- a/db/migrate/20200128184209_add_usage_to_pages_domains.rb
+++ b/db/migrate/20200128184209_add_usage_to_pages_domains.rb
@@ -9,7 +9,7 @@ class AddUsageToPagesDomains < ActiveRecord::Migration[5.2]
disable_ddl_transaction!
def up
- add_column_with_default :pages_domains, :usage, :integer, limit: 2, default: PAGES_USAGE, allow_null: false # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default :pages_domains, :usage, :integer, limit: 2, default: PAGES_USAGE, allow_null: false
end
def down
diff --git a/db/migrate/20200203015140_add_id_to_design_management_designs_versions.rb b/db/migrate/20200203015140_add_id_to_design_management_designs_versions.rb
new file mode 100644
index 00000000000..f809bc84fae
--- /dev/null
+++ b/db/migrate/20200203015140_add_id_to_design_management_designs_versions.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddIdToDesignManagementDesignsVersions < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column :design_management_designs_versions, :id, :primary_key
+ end
+end
diff --git a/db/migrate/20200203025400_default_lock_version_to_zero_for_merge_requests.rb b/db/migrate/20200203025400_default_lock_version_to_zero_for_merge_requests.rb
index c0c58cfa3a7..8e48490af77 100644
--- a/db/migrate/20200203025400_default_lock_version_to_zero_for_merge_requests.rb
+++ b/db/migrate/20200203025400_default_lock_version_to_zero_for_merge_requests.rb
@@ -9,9 +9,15 @@ class DefaultLockVersionToZeroForMergeRequests < ActiveRecord::Migration[6.0]
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
- def change
+ def up
with_lock_retries do
change_column_default :merge_requests, :lock_version, from: nil, to: 0
end
end
+
+ def down
+ with_lock_retries do
+ change_column_default :merge_requests, :lock_version, from: 0, to: nil
+ end
+ end
end
diff --git a/db/migrate/20200203025602_default_lock_version_to_zero_for_issues.rb b/db/migrate/20200203025602_default_lock_version_to_zero_for_issues.rb
index 72c265fdf38..1265de70728 100644
--- a/db/migrate/20200203025602_default_lock_version_to_zero_for_issues.rb
+++ b/db/migrate/20200203025602_default_lock_version_to_zero_for_issues.rb
@@ -9,9 +9,15 @@ class DefaultLockVersionToZeroForIssues < ActiveRecord::Migration[6.0]
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
- def change
+ def up
with_lock_retries do
change_column_default :issues, :lock_version, from: nil, to: 0
end
end
+
+ def down
+ with_lock_retries do
+ change_column_default :issues, :lock_version, from: 0, to: nil
+ end
+ end
end
diff --git a/db/migrate/20200203025619_default_lock_version_to_zero_for_epics.rb b/db/migrate/20200203025619_default_lock_version_to_zero_for_epics.rb
index f44cfd07ee2..b2af8cbe707 100644
--- a/db/migrate/20200203025619_default_lock_version_to_zero_for_epics.rb
+++ b/db/migrate/20200203025619_default_lock_version_to_zero_for_epics.rb
@@ -9,9 +9,15 @@ class DefaultLockVersionToZeroForEpics < ActiveRecord::Migration[6.0]
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
- def change
+ def up
with_lock_retries do
change_column_default :epics, :lock_version, from: nil, to: 0
end
end
+
+ def down
+ with_lock_retries do
+ change_column_default :epics, :lock_version, from: 0, to: nil
+ end
+ end
end
diff --git a/db/migrate/20200203025744_default_lock_version_to_zero_for_ci_builds.rb b/db/migrate/20200203025744_default_lock_version_to_zero_for_ci_builds.rb
index feda8c36947..f40bf7d6309 100644
--- a/db/migrate/20200203025744_default_lock_version_to_zero_for_ci_builds.rb
+++ b/db/migrate/20200203025744_default_lock_version_to_zero_for_ci_builds.rb
@@ -9,9 +9,15 @@ class DefaultLockVersionToZeroForCiBuilds < ActiveRecord::Migration[6.0]
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
- def change
+ def up
with_lock_retries do
change_column_default :ci_builds, :lock_version, from: nil, to: 0
end
end
+
+ def down
+ with_lock_retries do
+ change_column_default :ci_builds, :lock_version, from: 0, to: nil
+ end
+ end
end
diff --git a/db/migrate/20200203025801_default_lock_version_to_zero_for_ci_stages.rb b/db/migrate/20200203025801_default_lock_version_to_zero_for_ci_stages.rb
index b825c4ff1c4..fd3194a742a 100644
--- a/db/migrate/20200203025801_default_lock_version_to_zero_for_ci_stages.rb
+++ b/db/migrate/20200203025801_default_lock_version_to_zero_for_ci_stages.rb
@@ -9,9 +9,15 @@ class DefaultLockVersionToZeroForCiStages < ActiveRecord::Migration[6.0]
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
- def change
+ def up
with_lock_retries do
change_column_default :ci_stages, :lock_version, from: nil, to: 0
end
end
+
+ def down
+ with_lock_retries do
+ change_column_default :ci_stages, :lock_version, from: 0, to: nil
+ end
+ end
end
diff --git a/db/migrate/20200203025821_default_lock_version_to_zero_for_ci_pipelines.rb b/db/migrate/20200203025821_default_lock_version_to_zero_for_ci_pipelines.rb
index 6c4c84cb7e7..6b4568cc7e8 100644
--- a/db/migrate/20200203025821_default_lock_version_to_zero_for_ci_pipelines.rb
+++ b/db/migrate/20200203025821_default_lock_version_to_zero_for_ci_pipelines.rb
@@ -9,9 +9,15 @@ class DefaultLockVersionToZeroForCiPipelines < ActiveRecord::Migration[6.0]
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
- def change
+ def up
with_lock_retries do
change_column_default :ci_pipelines, :lock_version, from: nil, to: 0
end
end
+
+ def down
+ with_lock_retries do
+ change_column_default :ci_pipelines, :lock_version, from: 0, to: nil
+ end
+ end
end
diff --git a/db/migrate/20200206141511_change_saml_provider_outer_forks_default.rb b/db/migrate/20200206141511_change_saml_provider_outer_forks_default.rb
new file mode 100644
index 00000000000..971cc4da921
--- /dev/null
+++ b/db/migrate/20200206141511_change_saml_provider_outer_forks_default.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class ChangeSamlProviderOuterForksDefault < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ change_column_null :saml_providers, :prohibited_outer_forks, false
+ change_column_default :saml_providers, :prohibited_outer_forks, true
+ end
+
+ def down
+ change_column_default :saml_providers, :prohibited_outer_forks, false
+ change_column_null :saml_providers, :prohibited_outer_forks, true
+ end
+end
diff --git a/db/migrate/20200207062728_add_default_branch_protection_to_namespaces.rb b/db/migrate/20200207062728_add_default_branch_protection_to_namespaces.rb
new file mode 100644
index 00000000000..6eb650e6b6a
--- /dev/null
+++ b/db/migrate/20200207062728_add_default_branch_protection_to_namespaces.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddDefaultBranchProtectionToNamespaces < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ add_column :namespaces, :default_branch_protection, :integer, limit: 2
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_column :namespaces, :default_branch_protection
+ end
+ end
+end
diff --git a/db/migrate/20200211174946_add_auto_renew_to_gitlab_subscriptions.rb b/db/migrate/20200211174946_add_auto_renew_to_gitlab_subscriptions.rb
new file mode 100644
index 00000000000..6bce74bb8f8
--- /dev/null
+++ b/db/migrate/20200211174946_add_auto_renew_to_gitlab_subscriptions.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+class AddAutoRenewToGitlabSubscriptions < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column :gitlab_subscription_histories, :auto_renew, :boolean
+ add_column :gitlab_subscriptions, :auto_renew, :boolean
+ end
+end
diff --git a/db/migrate/20200212014653_rename_security_dashboard_feature_flag_to_instance_security_dashboard.rb b/db/migrate/20200212014653_rename_security_dashboard_feature_flag_to_instance_security_dashboard.rb
new file mode 100644
index 00000000000..8d37f6c1dd4
--- /dev/null
+++ b/db/migrate/20200212014653_rename_security_dashboard_feature_flag_to_instance_security_dashboard.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+class RenameSecurityDashboardFeatureFlagToInstanceSecurityDashboard < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ class FeatureGate < ApplicationRecord
+ self.table_name = 'feature_gates'
+ end
+
+ def up
+ security_dashboard_feature = FeatureGate.find_by(feature_key: :security_dashboard, key: :boolean)
+
+ if security_dashboard_feature.present?
+ FeatureGate.safe_find_or_create_by!(
+ feature_key: :instance_security_dashboard,
+ key: :boolean,
+ value: security_dashboard_feature.value
+ )
+ end
+ end
+
+ def down
+ FeatureGate.find_by(feature_key: :instance_security_dashboard, key: :boolean)&.delete
+ end
+end
diff --git a/db/migrate/20200212133945_add_group_hooks_to_plan_limits.rb b/db/migrate/20200212133945_add_group_hooks_to_plan_limits.rb
new file mode 100644
index 00000000000..4e279ff903d
--- /dev/null
+++ b/db/migrate/20200212133945_add_group_hooks_to_plan_limits.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddGroupHooksToPlanLimits < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column(:plan_limits, :group_hooks, :integer, default: 0, null: false)
+ end
+end
diff --git a/db/migrate/20200212134201_insert_group_hooks_plan_limits.rb b/db/migrate/20200212134201_insert_group_hooks_plan_limits.rb
new file mode 100644
index 00000000000..a2963cfaf4d
--- /dev/null
+++ b/db/migrate/20200212134201_insert_group_hooks_plan_limits.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+class InsertGroupHooksPlanLimits < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ return unless Gitlab.com?
+
+ create_or_update_plan_limit('group_hooks', 'bronze', 50)
+ create_or_update_plan_limit('group_hooks', 'silver', 50)
+ create_or_update_plan_limit('group_hooks', 'gold', 50)
+ end
+
+ def down
+ return unless Gitlab.com?
+
+ create_or_update_plan_limit('group_hooks', 'bronze', 0)
+ create_or_update_plan_limit('group_hooks', 'silver', 0)
+ create_or_update_plan_limit('group_hooks', 'gold', 0)
+ end
+end
diff --git a/db/migrate/20200213093702_add_email_restrictions_to_application_settings.rb b/db/migrate/20200213093702_add_email_restrictions_to_application_settings.rb
new file mode 100644
index 00000000000..22d7b9f37e0
--- /dev/null
+++ b/db/migrate/20200213093702_add_email_restrictions_to_application_settings.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class AddEmailRestrictionsToApplicationSettings < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ add_column(:application_settings, :email_restrictions_enabled, :boolean, default: false, null: false)
+ add_column(:application_settings, :email_restrictions, :text, null: true)
+ end
+
+ def down
+ remove_column(:application_settings, :email_restrictions_enabled)
+ remove_column(:application_settings, :email_restrictions)
+ end
+end
diff --git a/db/migrate/20200213155311_add_npm_package_requests_forwarding_to_application_settings.rb b/db/migrate/20200213155311_add_npm_package_requests_forwarding_to_application_settings.rb
new file mode 100644
index 00000000000..db67437232c
--- /dev/null
+++ b/db/migrate/20200213155311_add_npm_package_requests_forwarding_to_application_settings.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+class AddNpmPackageRequestsForwardingToApplicationSettings < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_column_with_default(:application_settings, :npm_package_requests_forwarding,
+ :boolean,
+ default: false,
+ allow_null: false)
+ end
+
+ def down
+ remove_column(:application_settings, :npm_package_requests_forwarding)
+ end
+end
diff --git a/db/migrate/20200215222507_drop_forked_project_links_fk.rb b/db/migrate/20200215222507_drop_forked_project_links_fk.rb
new file mode 100644
index 00000000000..f3ee36e9037
--- /dev/null
+++ b/db/migrate/20200215222507_drop_forked_project_links_fk.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+class DropForkedProjectLinksFk < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ with_lock_retries do
+ remove_foreign_key_if_exists :forked_project_links, column: :forked_to_project_id
+ end
+ end
+
+ def down
+ unless foreign_key_exists?(:forked_project_links, :projects, column: :forked_to_project_id)
+ with_lock_retries do
+ # rubocop: disable Migration/AddConcurrentForeignKey
+ add_foreign_key :forked_project_links, :projects, column: :forked_to_project_id, on_delete: :cascade, validate: false
+ end
+ end
+
+ fk_name = concurrent_foreign_key_name(:forked_project_links, :forked_to_project_id, prefix: 'fk_rails_')
+ validate_foreign_key(:forked_project_links, :forked_to_project_id, name: fk_name)
+ end
+end
diff --git a/db/migrate/20200215225103_drop_forked_project_links_table.rb b/db/migrate/20200215225103_drop_forked_project_links_table.rb
new file mode 100644
index 00000000000..f8dbd19980e
--- /dev/null
+++ b/db/migrate/20200215225103_drop_forked_project_links_table.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class DropForkedProjectLinksTable < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ drop_table "forked_project_links", id: :serial do |t|
+ t.integer "forked_to_project_id", null: false
+ t.integer "forked_from_project_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.index ["forked_to_project_id"], name: "index_forked_project_links_on_forked_to_project_id", unique: true
+ end
+ end
+end
diff --git a/db/migrate/20200219105209_add_filepath_to_release_links.rb b/db/migrate/20200219105209_add_filepath_to_release_links.rb
new file mode 100644
index 00000000000..bcc204c22e8
--- /dev/null
+++ b/db/migrate/20200219105209_add_filepath_to_release_links.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+class AddFilepathToReleaseLinks < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column :release_links, :filepath, :string, limit: 128
+ end
+end
diff --git a/db/migrate/20200219133859_add_environment_id_to_deployment_merge_requests.rb b/db/migrate/20200219133859_add_environment_id_to_deployment_merge_requests.rb
new file mode 100644
index 00000000000..a57d0b44c52
--- /dev/null
+++ b/db/migrate/20200219133859_add_environment_id_to_deployment_merge_requests.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddEnvironmentIdToDeploymentMergeRequests < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column :deployment_merge_requests, :environment_id, :integer, null: true
+ end
+end
diff --git a/db/migrate/20200219141307_add_environment_id_fk_to_deployment_merge_requests.rb b/db/migrate/20200219141307_add_environment_id_fk_to_deployment_merge_requests.rb
new file mode 100644
index 00000000000..76980b21feb
--- /dev/null
+++ b/db/migrate/20200219141307_add_environment_id_fk_to_deployment_merge_requests.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddEnvironmentIdFkToDeploymentMergeRequests < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_foreign_key :deployment_merge_requests, :environments, column: :environment_id, on_delete: :cascade
+ end
+
+ def down
+ remove_foreign_key_if_exists :deployment_merge_requests, column: :environment_id
+ end
+end
diff --git a/db/migrate/20200219142522_add_environment_id_merge_request_id_uniq_idx_to_deployment_merge_requests.rb b/db/migrate/20200219142522_add_environment_id_merge_request_id_uniq_idx_to_deployment_merge_requests.rb
new file mode 100644
index 00000000000..a557f3f88d7
--- /dev/null
+++ b/db/migrate/20200219142522_add_environment_id_merge_request_id_uniq_idx_to_deployment_merge_requests.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddEnvironmentIdMergeRequestIdUniqIdxToDeploymentMergeRequests < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :deployment_merge_requests, [:environment_id, :merge_request_id], unique: true, name: 'idx_environment_merge_requests_unique_index'
+ end
+
+ def down
+ remove_concurrent_index_by_name :deployment_merge_requests, 'idx_environment_merge_requests_unique_index'
+ end
+end
diff --git a/db/migrate/20200220180944_add_keep_divergent_refs.rb b/db/migrate/20200220180944_add_keep_divergent_refs.rb
new file mode 100644
index 00000000000..0b123bc9d72
--- /dev/null
+++ b/db/migrate/20200220180944_add_keep_divergent_refs.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class AddKeepDivergentRefs < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ add_column :remote_mirrors, :keep_divergent_refs, :boolean
+ end
+end
diff --git a/db/migrate/20200221023320_add_index_on_pages_domain_on_domain_lowercase.rb b/db/migrate/20200221023320_add_index_on_pages_domain_on_domain_lowercase.rb
new file mode 100644
index 00000000000..53f8f5e7f81
--- /dev/null
+++ b/db/migrate/20200221023320_add_index_on_pages_domain_on_domain_lowercase.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddIndexOnPagesDomainOnDomainLowercase < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ INDEX_NAME = 'index_pages_domains_on_domain_lowercase'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :pages_domains, 'LOWER(domain)', name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index_by_name :pages_domains, INDEX_NAME
+ end
+end
diff --git a/db/migrate/20200221074028_add_mr_metrics_first_approved_at.rb b/db/migrate/20200221074028_add_mr_metrics_first_approved_at.rb
new file mode 100644
index 00000000000..993905f66ce
--- /dev/null
+++ b/db/migrate/20200221074028_add_mr_metrics_first_approved_at.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+class AddMrMetricsFirstApprovedAt < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ add_column :merge_request_metrics, :first_approved_at, :datetime_with_timezone
+ end
+
+ def down
+ remove_column :merge_request_metrics, :first_approved_at
+ end
+end
diff --git a/db/migrate/20200221100514_create_users_statistics.rb b/db/migrate/20200221100514_create_users_statistics.rb
new file mode 100644
index 00000000000..c02e635bdd4
--- /dev/null
+++ b/db/migrate/20200221100514_create_users_statistics.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class CreateUsersStatistics < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ create_table :users_statistics do |t|
+ t.timestamps_with_timezone null: false
+ t.integer :without_groups_and_projects, null: false, default: 0
+ t.integer :with_highest_role_guest, null: false, default: 0
+ t.integer :with_highest_role_reporter, null: false, default: 0
+ t.integer :with_highest_role_developer, null: false, default: 0
+ t.integer :with_highest_role_maintainer, null: false, default: 0
+ t.integer :with_highest_role_owner, null: false, default: 0
+ t.integer :bots, null: false, default: 0
+ t.integer :blocked, null: false, default: 0
+ end
+ end
+end
diff --git a/db/migrate/20200221105436_update_application_setting_npm_package_requests_forwarding_default.rb b/db/migrate/20200221105436_update_application_setting_npm_package_requests_forwarding_default.rb
new file mode 100644
index 00000000000..2479df2737d
--- /dev/null
+++ b/db/migrate/20200221105436_update_application_setting_npm_package_requests_forwarding_default.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class UpdateApplicationSettingNpmPackageRequestsForwardingDefault < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ change_column_default :application_settings, :npm_package_requests_forwarding, true
+
+ execute('UPDATE application_settings SET npm_package_requests_forwarding = TRUE')
+ end
+
+ def down
+ change_column_default :application_settings, :npm_package_requests_forwarding, false
+
+ execute('UPDATE application_settings SET npm_package_requests_forwarding = FALSE')
+ end
+end
diff --git a/db/migrate/20200221144534_drop_activate_prometheus_services_background_jobs.rb b/db/migrate/20200221144534_drop_activate_prometheus_services_background_jobs.rb
new file mode 100644
index 00000000000..13b041d8f95
--- /dev/null
+++ b/db/migrate/20200221144534_drop_activate_prometheus_services_background_jobs.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+class DropActivatePrometheusServicesBackgroundJobs < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+ DROPPED_JOB_CLASS = 'ActivatePrometheusServicesForSharedClusterApplications'.freeze
+ QUEUE = 'background_migration'.freeze
+
+ def up
+ sidekiq_queues.each do |queue|
+ queue.each do |job|
+ klass, project_id, *should_be_empty = job.args
+ next unless klass == DROPPED_JOB_CLASS && project_id.is_a?(Integer) && should_be_empty.empty?
+
+ job.delete
+ end
+ end
+ end
+
+ def down
+ # no-op
+ end
+
+ def sidekiq_queues
+ [Sidekiq::ScheduledSet.new, Sidekiq::RetrySet.new, Sidekiq::Queue.new(QUEUE)]
+ end
+end
diff --git a/db/migrate/20200222055543_add_confidential_to_note.rb b/db/migrate/20200222055543_add_confidential_to_note.rb
new file mode 100644
index 00000000000..d7bf64dca44
--- /dev/null
+++ b/db/migrate/20200222055543_add_confidential_to_note.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+class AddConfidentialToNote < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ add_column :notes, :confidential, :boolean
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_column :notes, :confidential
+ end
+ end
+end
diff --git a/db/migrate/20200224020219_add_status_page_settings.rb b/db/migrate/20200224020219_add_status_page_settings.rb
new file mode 100644
index 00000000000..b960b60881e
--- /dev/null
+++ b/db/migrate/20200224020219_add_status_page_settings.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddStatusPageSettings < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ create_table :status_page_settings, id: false do |t|
+ t.references :project, index: true, primary_key: true, foreign_key: { on_delete: :cascade }, unique: true, null: false
+ t.timestamps_with_timezone null: false
+ t.boolean :enabled, default: false, null: false
+ t.string :aws_s3_bucket_name, limit: 63, null: false
+ t.string :aws_region, limit: 255, null: false
+ t.string :aws_access_key, limit: 255, null: false
+ t.string :encrypted_aws_secret_key, limit: 255, null: false
+ t.string :encrypted_aws_secret_key_iv, limit: 255, null: false
+ end
+ end
+end
diff --git a/db/migrate/20200224163804_add_version_to_feature_flags_table.rb b/db/migrate/20200224163804_add_version_to_feature_flags_table.rb
new file mode 100644
index 00000000000..bf3179f070c
--- /dev/null
+++ b/db/migrate/20200224163804_add_version_to_feature_flags_table.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+class AddVersionToFeatureFlagsTable < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ FEATURE_FLAG_LEGACY_VERSION = 1
+
+ def up
+ # The operations_feature_flags table is small enough that we can disable this cop.
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25552#note_291202882
+ add_column_with_default(:operations_feature_flags, :version, :smallint, default: FEATURE_FLAG_LEGACY_VERSION, allow_null: false)
+ end
+
+ def down
+ remove_column(:operations_feature_flags, :version)
+ end
+end
diff --git a/db/migrate/20200224185814_add_project_subscriptions_to_plan_limits.rb b/db/migrate/20200224185814_add_project_subscriptions_to_plan_limits.rb
new file mode 100644
index 00000000000..789f23501fb
--- /dev/null
+++ b/db/migrate/20200224185814_add_project_subscriptions_to_plan_limits.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddProjectSubscriptionsToPlanLimits < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column(:plan_limits, :ci_project_subscriptions, :integer, default: 0, null: false)
+ end
+end
diff --git a/db/migrate/20200225111018_add_index_for_group_and_iid_search_to_epics.rb b/db/migrate/20200225111018_add_index_for_group_and_iid_search_to_epics.rb
new file mode 100644
index 00000000000..98f35d9f1ae
--- /dev/null
+++ b/db/migrate/20200225111018_add_index_for_group_and_iid_search_to_epics.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class AddIndexForGroupAndIidSearchToEpics < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'index_epics_on_group_id_and_iid_varchar_pattern'
+
+ disable_ddl_transaction!
+
+ def up
+ disable_statement_timeout do
+ execute "CREATE INDEX CONCURRENTLY \"#{INDEX_NAME}\" ON epics (group_id, CAST(iid AS VARCHAR) varchar_pattern_ops);"
+ end
+ end
+
+ def down
+ disable_statement_timeout do
+ remove_concurrent_index_by_name :epics, INDEX_NAME
+ end
+ end
+end
diff --git a/db/migrate/20200225123228_insert_project_subscriptions_plan_limits.rb b/db/migrate/20200225123228_insert_project_subscriptions_plan_limits.rb
new file mode 100644
index 00000000000..f04e0c68cf6
--- /dev/null
+++ b/db/migrate/20200225123228_insert_project_subscriptions_plan_limits.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+class InsertProjectSubscriptionsPlanLimits < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ return unless Gitlab.com?
+
+ create_or_update_plan_limit('ci_project_subscriptions', 'free', 2)
+ create_or_update_plan_limit('ci_project_subscriptions', 'bronze', 2)
+ create_or_update_plan_limit('ci_project_subscriptions', 'silver', 2)
+ create_or_update_plan_limit('ci_project_subscriptions', 'gold', 2)
+ end
+
+ def down
+ return unless Gitlab.com?
+
+ create_or_update_plan_limit('ci_project_subscriptions', 'free', 0)
+ create_or_update_plan_limit('ci_project_subscriptions', 'bronze', 0)
+ create_or_update_plan_limit('ci_project_subscriptions', 'silver', 0)
+ create_or_update_plan_limit('ci_project_subscriptions', 'gold', 0)
+ end
+end
diff --git a/db/migrate/20200226100614_create_requirements.rb b/db/migrate/20200226100614_create_requirements.rb
new file mode 100644
index 00000000000..4ebbf38b8d1
--- /dev/null
+++ b/db/migrate/20200226100614_create_requirements.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+class CreateRequirements < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ create_table :requirements do |t|
+ t.timestamps_with_timezone null: false
+ t.integer :project_id, null: false
+ t.integer :author_id
+ t.integer :iid, null: false
+ t.integer :cached_markdown_version
+ t.integer :state, limit: 2, default: 1, null: false
+ t.string :title, limit: 255, null: false
+ t.text :title_html
+
+ t.index :project_id
+ t.index :author_id
+ t.index :title, name: "index_requirements_on_title_trigram", using: :gin, opclass: :gin_trgm_ops
+ t.index :state
+ t.index :created_at
+ t.index :updated_at
+ t.index %w(project_id iid), name: 'index_requirements_on_project_id_and_iid', where: 'project_id IS NOT NULL', unique: true, using: :btree
+ end
+ end
+end
diff --git a/db/migrate/20200226100624_requirements_add_project_fk.rb b/db/migrate/20200226100624_requirements_add_project_fk.rb
new file mode 100644
index 00000000000..7c133e820f3
--- /dev/null
+++ b/db/migrate/20200226100624_requirements_add_project_fk.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class RequirementsAddProjectFk < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ add_foreign_key(:requirements, :projects, column: :project_id, on_delete: :cascade) # rubocop: disable Migration/AddConcurrentForeignKey
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_foreign_key(:requirements, column: :project_id)
+ end
+ end
+end
diff --git a/db/migrate/20200226100634_requirements_add_author_fk.rb b/db/migrate/20200226100634_requirements_add_author_fk.rb
new file mode 100644
index 00000000000..8e1a726bb76
--- /dev/null
+++ b/db/migrate/20200226100634_requirements_add_author_fk.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class RequirementsAddAuthorFk < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ add_foreign_key(:requirements, :users, column: :author_id, on_delete: :nullify) # rubocop: disable Migration/AddConcurrentForeignKey
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_foreign_key(:requirements, column: :author_id)
+ end
+ end
+end
diff --git a/db/migrate/20200226162156_rename_closed_at_to_dismissed_at_in_vulnerabilities.rb b/db/migrate/20200226162156_rename_closed_at_to_dismissed_at_in_vulnerabilities.rb
new file mode 100644
index 00000000000..ce7170cb335
--- /dev/null
+++ b/db/migrate/20200226162156_rename_closed_at_to_dismissed_at_in_vulnerabilities.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class RenameClosedAtToDismissedAtInVulnerabilities < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ rename_column_concurrently :vulnerabilities, :closed_at, :dismissed_at
+ end
+
+ def down
+ undo_rename_column_concurrently :vulnerabilities, :closed_at, :dismissed_at
+ end
+end
diff --git a/db/migrate/20200226162634_rename_closed_by_to_dismissed_by_in_vulnerabilities.rb b/db/migrate/20200226162634_rename_closed_by_to_dismissed_by_in_vulnerabilities.rb
new file mode 100644
index 00000000000..04b1f0d7136
--- /dev/null
+++ b/db/migrate/20200226162634_rename_closed_by_to_dismissed_by_in_vulnerabilities.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class RenameClosedByToDismissedByInVulnerabilities < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ rename_column_concurrently :vulnerabilities, :closed_by_id, :dismissed_by_id
+ end
+
+ def down
+ undo_rename_column_concurrently :vulnerabilities, :closed_by_id, :dismissed_by_id
+ end
+end
diff --git a/db/migrate/20200227164113_create_scim_identities.rb b/db/migrate/20200227164113_create_scim_identities.rb
new file mode 100644
index 00000000000..1942270761b
--- /dev/null
+++ b/db/migrate/20200227164113_create_scim_identities.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class CreateScimIdentities < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ create_table :scim_identities do |t|
+ t.references :group, foreign_key: { to_table: :namespaces, on_delete: :cascade }, null: false
+ t.references :user, index: false, foreign_key: { on_delete: :cascade }, null: false
+ t.timestamps_with_timezone
+ t.boolean :active, default: false
+ t.string :extern_uid, null: false, limit: 255
+
+ t.index 'LOWER(extern_uid),group_id', name: 'index_scim_identities_on_lower_extern_uid_and_group_id', unique: true
+ t.index [:user_id, :group_id], unique: true
+ end
+ end
+end
diff --git a/db/migrate/20200227165129_create_user_details.rb b/db/migrate/20200227165129_create_user_details.rb
new file mode 100644
index 00000000000..fe9f23fcb21
--- /dev/null
+++ b/db/migrate/20200227165129_create_user_details.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+class CreateUserDetails < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ create_table :user_details, id: false do |t|
+ t.references :user, index: false, foreign_key: { on_delete: :cascade }, null: false, primary_key: true
+ t.string :job_title, limit: 200, default: "", null: false
+ end
+ end
+
+ add_index :user_details, :user_id, unique: true
+ end
+
+ def down
+ with_lock_retries do
+ drop_table :user_details
+ end
+ end
+end
diff --git a/db/migrate/20200228160542_create_ci_sources_projects.rb b/db/migrate/20200228160542_create_ci_sources_projects.rb
new file mode 100644
index 00000000000..36f5167a784
--- /dev/null
+++ b/db/migrate/20200228160542_create_ci_sources_projects.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class CreateCiSourcesProjects < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ create_table :ci_sources_projects do |t|
+ t.bigint :pipeline_id, null: false
+ t.bigint :source_project_id, null: false
+
+ t.index [:source_project_id, :pipeline_id], unique: true
+ t.index :pipeline_id
+ end
+ end
+end
diff --git a/db/migrate/20200303055348_add_expires_at_to_keys.rb b/db/migrate/20200303055348_add_expires_at_to_keys.rb
new file mode 100644
index 00000000000..ef7b813a2ef
--- /dev/null
+++ b/db/migrate/20200303055348_add_expires_at_to_keys.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddExpiresAtToKeys < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column :keys, :expires_at, :datetime_with_timezone
+ end
+end
diff --git a/db/migrate/20200303074328_add_index_on_snippet_description.rb b/db/migrate/20200303074328_add_index_on_snippet_description.rb
new file mode 100644
index 00000000000..f23e5f8bf8e
--- /dev/null
+++ b/db/migrate/20200303074328_add_index_on_snippet_description.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddIndexOnSnippetDescription < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'index_snippets_on_description_trigram'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :snippets, :description, name: INDEX_NAME, using: :gin, opclass: { description: :gin_trgm_ops }
+ end
+
+ def down
+ remove_concurrent_index_by_name :snippets, INDEX_NAME
+ end
+end
diff --git a/db/migrate/20200304085423_add_user_type.rb b/db/migrate/20200304085423_add_user_type.rb
new file mode 100644
index 00000000000..68db44c6847
--- /dev/null
+++ b/db/migrate/20200304085423_add_user_type.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+class AddUserType < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ with_lock_retries do
+ add_column :users, :user_type, :integer, limit: 2
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_column :users, :user_type
+ end
+ end
+end
diff --git a/db/migrate/20200304090155_add_user_type_index.rb b/db/migrate/20200304090155_add_user_type_index.rb
new file mode 100644
index 00000000000..cd3b87d0a45
--- /dev/null
+++ b/db/migrate/20200304090155_add_user_type_index.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddUserTypeIndex < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :users, :user_type
+ end
+
+ def down
+ remove_concurrent_index :users, :user_type
+ end
+end
diff --git a/db/migrate/20200304121828_add_ci_sources_project_pipeline_foreign_key.rb b/db/migrate/20200304121828_add_ci_sources_project_pipeline_foreign_key.rb
new file mode 100644
index 00000000000..d5b0af41d4a
--- /dev/null
+++ b/db/migrate/20200304121828_add_ci_sources_project_pipeline_foreign_key.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddCiSourcesProjectPipelineForeignKey < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ add_foreign_key :ci_sources_projects, :ci_pipelines, column: :pipeline_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_foreign_key :ci_sources_projects, :ci_pipelines, column: :pipeline_id
+ end
+ end
+end
diff --git a/db/migrate/20200304121844_add_ci_sources_project_source_project_foreign_key.rb b/db/migrate/20200304121844_add_ci_sources_project_source_project_foreign_key.rb
new file mode 100644
index 00000000000..4f679bef85e
--- /dev/null
+++ b/db/migrate/20200304121844_add_ci_sources_project_source_project_foreign_key.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddCiSourcesProjectSourceProjectForeignKey < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ add_foreign_key :ci_sources_projects, :projects, column: :source_project_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_foreign_key :ci_sources_projects, :projects, column: :source_project_id
+ end
+ end
+end
diff --git a/db/migrate/20200304160800_add_index_services_on_template.rb b/db/migrate/20200304160800_add_index_services_on_template.rb
new file mode 100644
index 00000000000..731fa04123c
--- /dev/null
+++ b/db/migrate/20200304160800_add_index_services_on_template.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddIndexServicesOnTemplate < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ # This migration is a corrective action to add the missing
+ # index_services_on_template index on staging.
+ def up
+ add_concurrent_index(:services, :template) unless index_exists?(:services, :template)
+ end
+
+ def down
+ # No reverse action as this is a corrective migration.
+ end
+end
diff --git a/db/migrate/20200304160801_delete_template_services_duplicated_by_type.rb b/db/migrate/20200304160801_delete_template_services_duplicated_by_type.rb
new file mode 100644
index 00000000000..a1c5161aea0
--- /dev/null
+++ b/db/migrate/20200304160801_delete_template_services_duplicated_by_type.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+class DeleteTemplateServicesDuplicatedByType < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ # Delete service templates with duplicated types. Keep the service
+ # template with the lowest `id` because that is the service template used:
+ # https://gitlab.com/gitlab-org/gitlab/-/blob/v12.8.1-ee/app/controllers/admin/services_controller.rb#L37
+ execute <<~SQL
+ DELETE
+ FROM services
+ WHERE TEMPLATE = TRUE
+ AND id NOT IN
+ (SELECT MIN(id)
+ FROM services
+ WHERE TEMPLATE = TRUE
+ GROUP BY TYPE);
+ SQL
+ end
+
+ def down
+ # This migration cannot be reversed.
+ end
+end
diff --git a/db/migrate/20200304160823_add_index_to_service_unique_template_per_type.rb b/db/migrate/20200304160823_add_index_to_service_unique_template_per_type.rb
new file mode 100644
index 00000000000..b81e5acf67f
--- /dev/null
+++ b/db/migrate/20200304160823_add_index_to_service_unique_template_per_type.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexToServiceUniqueTemplatePerType < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index(:services, [:type, :template], unique: true, where: 'template IS TRUE')
+ end
+
+ def down
+ remove_concurrent_index(:services, [:type, :template])
+ end
+end
diff --git a/db/migrate/20200305121159_add_merge_request_metrics_first_reassigned_at.rb b/db/migrate/20200305121159_add_merge_request_metrics_first_reassigned_at.rb
new file mode 100644
index 00000000000..714adf925ed
--- /dev/null
+++ b/db/migrate/20200305121159_add_merge_request_metrics_first_reassigned_at.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddMergeRequestMetricsFirstReassignedAt < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ add_column :merge_request_metrics, :first_reassigned_at, :datetime_with_timezone
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_column :merge_request_metrics, :first_reassigned_at, :datetime_with_timezone
+ end
+ end
+end
diff --git a/db/migrate/20200305151736_delete_template_project_services.rb b/db/migrate/20200305151736_delete_template_project_services.rb
new file mode 100644
index 00000000000..2ab8d46a94e
--- /dev/null
+++ b/db/migrate/20200305151736_delete_template_project_services.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class DeleteTemplateProjectServices < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ # In 12.9 an ActiveRecord validation for services not being a template and
+ # attached to a project at the same time is introduced. This migration cleans up invalid data.
+ execute <<~SQL
+ DELETE
+ FROM services
+ WHERE TEMPLATE = TRUE AND project_id IS NOT NULL
+ SQL
+ end
+
+ def down
+ # This migration cannot be reversed.
+ end
+end
diff --git a/db/migrate/20200306095654_add_merge_request_assignee_created_at.rb b/db/migrate/20200306095654_add_merge_request_assignee_created_at.rb
new file mode 100644
index 00000000000..127a20f127b
--- /dev/null
+++ b/db/migrate/20200306095654_add_merge_request_assignee_created_at.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddMergeRequestAssigneeCreatedAt < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ add_column :merge_request_assignees, :created_at, :datetime_with_timezone
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_column :merge_request_assignees, :created_at
+ end
+ end
+end
diff --git a/db/migrate/20200306160521_add_index_on_author_id_and_created_at_to_events.rb b/db/migrate/20200306160521_add_index_on_author_id_and_created_at_to_events.rb
new file mode 100644
index 00000000000..3328a14bb65
--- /dev/null
+++ b/db/migrate/20200306160521_add_index_on_author_id_and_created_at_to_events.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexOnAuthorIdAndCreatedAtToEvents < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :events, [:author_id, :created_at]
+ end
+
+ def down
+ remove_concurrent_index :events, [:author_id, :created_at]
+ end
+end
diff --git a/db/migrate/20200306170211_add_index_on_author_id_and_id_and_created_at_to_issues.rb b/db/migrate/20200306170211_add_index_on_author_id_and_id_and_created_at_to_issues.rb
new file mode 100644
index 00000000000..c581ca3874f
--- /dev/null
+++ b/db/migrate/20200306170211_add_index_on_author_id_and_id_and_created_at_to_issues.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexOnAuthorIdAndIdAndCreatedAtToIssues < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :issues, [:author_id, :id, :created_at]
+ end
+
+ def down
+ remove_concurrent_index :issues, [:author_id, :id, :created_at]
+ end
+end
diff --git a/db/migrate/20200306170321_add_index_on_user_id_and_created_at_to_ci_pipelines.rb b/db/migrate/20200306170321_add_index_on_user_id_and_created_at_to_ci_pipelines.rb
new file mode 100644
index 00000000000..b88f938d1c2
--- /dev/null
+++ b/db/migrate/20200306170321_add_index_on_user_id_and_created_at_to_ci_pipelines.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddIndexOnUserIdAndCreatedAtToCiPipelines < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :ci_pipelines, [:user_id, :created_at]
+ remove_concurrent_index :ci_pipelines, [:user_id]
+ end
+
+ def down
+ add_concurrent_index :ci_pipelines, [:user_id]
+ remove_concurrent_index :ci_pipelines, [:user_id, :created_at]
+ end
+end
diff --git a/db/migrate/20200306170531_add_index_on_author_id_and_created_at_to_todos.rb b/db/migrate/20200306170531_add_index_on_author_id_and_created_at_to_todos.rb
new file mode 100644
index 00000000000..d0d31ca7c52
--- /dev/null
+++ b/db/migrate/20200306170531_add_index_on_author_id_and_created_at_to_todos.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+class AddIndexOnAuthorIdAndCreatedAtToTodos < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :todos, [:author_id, :created_at]
+ end
+
+ def down
+ remove_concurrent_index :todos, [:author_id, :created_at]
+ end
+end
diff --git a/db/migrate/20200306192548_add_index_on_project_id_and_type_to_services.rb b/db/migrate/20200306192548_add_index_on_project_id_and_type_to_services.rb
new file mode 100644
index 00000000000..9deb3c2832d
--- /dev/null
+++ b/db/migrate/20200306192548_add_index_on_project_id_and_type_to_services.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class AddIndexOnProjectIdAndTypeToServices < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'index_services_on_project_id'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :services, [:project_id, :type]
+
+ remove_concurrent_index_by_name :services, INDEX_NAME
+ end
+
+ def down
+ add_concurrent_index :services, :project_id, name: INDEX_NAME
+
+ remove_concurrent_index :services, [:project_id, :type]
+ end
+end
diff --git a/db/migrate/20200306193236_add_index_on_creator_id_and_created_at_to_projects.rb b/db/migrate/20200306193236_add_index_on_creator_id_and_created_at_to_projects.rb
new file mode 100644
index 00000000000..913383d32f4
--- /dev/null
+++ b/db/migrate/20200306193236_add_index_on_creator_id_and_created_at_to_projects.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class AddIndexOnCreatorIdAndCreatedAtToProjects < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'index_projects_on_creator_id'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :projects, [:creator_id, :created_at]
+
+ remove_concurrent_index_by_name :projects, INDEX_NAME
+ end
+
+ def down
+ add_concurrent_index :projects, :creator_id, name: INDEX_NAME
+
+ remove_concurrent_index :projects, [:creator_id, :created_at]
+ end
+end
diff --git a/db/migrate/20200309140540_add_index_on_project_id_and_repository_access_level_to_project_features.rb b/db/migrate/20200309140540_add_index_on_project_id_and_repository_access_level_to_project_features.rb
new file mode 100644
index 00000000000..a2093db0b3b
--- /dev/null
+++ b/db/migrate/20200309140540_add_index_on_project_id_and_repository_access_level_to_project_features.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddIndexOnProjectIdAndRepositoryAccessLevelToProjectFeatures < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'index_project_features_on_project_id_ral_20'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :project_features, :project_id, where: 'repository_access_level = 20', name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index_by_name :project_features, INDEX_NAME
+ end
+end
diff --git a/db/migrate/20200309162244_add_open_project_tracker_data.rb b/db/migrate/20200309162244_add_open_project_tracker_data.rb
new file mode 100644
index 00000000000..672dde4d518
--- /dev/null
+++ b/db/migrate/20200309162244_add_open_project_tracker_data.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+# See https://docs.gitlab.com/ee/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddOpenProjectTrackerData < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ create_table :open_project_tracker_data do |t|
+ t.references :service, foreign_key: { on_delete: :cascade }, type: :integer, index: true, null: false
+ t.timestamps_with_timezone
+ t.string :encrypted_url, limit: 255
+ t.string :encrypted_url_iv, limit: 255
+ t.string :encrypted_api_url, limit: 255
+ t.string :encrypted_api_url_iv, limit: 255
+ t.string :encrypted_token, limit: 255
+ t.string :encrypted_token_iv, limit: 255
+ t.string :closed_status_id, limit: 5
+ t.string :project_identifier_code, limit: 100
+ end
+ end
+end
diff --git a/db/migrate/20200309195209_add_index_on_project_id_and_builds_access_level_to_project_features.rb b/db/migrate/20200309195209_add_index_on_project_id_and_builds_access_level_to_project_features.rb
new file mode 100644
index 00000000000..8d6ab60ca48
--- /dev/null
+++ b/db/migrate/20200309195209_add_index_on_project_id_and_builds_access_level_to_project_features.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddIndexOnProjectIdAndBuildsAccessLevelToProjectFeatures < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'index_project_features_on_project_id_bal_20'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :project_features, :project_id, where: 'builds_access_level = 20', name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index_by_name :project_features, INDEX_NAME
+ end
+end
diff --git a/db/migrate/20200309195710_add_index_on_mirror_and_creator_id_and_created_at_to_projects.rb b/db/migrate/20200309195710_add_index_on_mirror_and_creator_id_and_created_at_to_projects.rb
new file mode 100644
index 00000000000..e1bdb5d7a0d
--- /dev/null
+++ b/db/migrate/20200309195710_add_index_on_mirror_and_creator_id_and_created_at_to_projects.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddIndexOnMirrorAndCreatorIdAndCreatedAtToProjects < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'index_projects_on_mirror_creator_id_created_at'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :projects, [:creator_id, :created_at], where: 'mirror = true and mirror_trigger_builds = true', name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index_by_name :projects, INDEX_NAME
+ end
+end
diff --git a/db/migrate/20200310123229_add_index_on_enabled_and_provider_type_and_id_to_clusters.rb b/db/migrate/20200310123229_add_index_on_enabled_and_provider_type_and_id_to_clusters.rb
new file mode 100644
index 00000000000..edd9343e743
--- /dev/null
+++ b/db/migrate/20200310123229_add_index_on_enabled_and_provider_type_and_id_to_clusters.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddIndexOnEnabledAndProviderTypeAndIdToClusters < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :clusters, [:enabled, :provider_type, :id]
+ remove_concurrent_index :clusters, :enabled
+ end
+
+ def down
+ add_concurrent_index :clusters, :enabled
+ remove_concurrent_index :clusters, [:enabled, :provider_type, :id]
+ end
+end
diff --git a/db/migrate/20200310132654_add_instance_to_services.rb b/db/migrate/20200310132654_add_instance_to_services.rb
new file mode 100644
index 00000000000..85b16a4094c
--- /dev/null
+++ b/db/migrate/20200310132654_add_instance_to_services.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddInstanceToServices < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_column_with_default(:services, :instance, :boolean, default: false)
+ end
+
+ def down
+ remove_column(:services, :instance)
+ end
+end
diff --git a/db/migrate/20200310133822_add_index_on_author_id_and_id_and_created_at_to_notes.rb b/db/migrate/20200310133822_add_index_on_author_id_and_id_and_created_at_to_notes.rb
new file mode 100644
index 00000000000..8005da5fbae
--- /dev/null
+++ b/db/migrate/20200310133822_add_index_on_author_id_and_id_and_created_at_to_notes.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddIndexOnAuthorIdAndIdAndCreatedAtToNotes < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :notes, [:author_id, :created_at]
+ remove_concurrent_index :notes, [:author_id]
+ end
+
+ def down
+ add_concurrent_index :notes, [:author_id]
+ remove_concurrent_index :notes, [:author_id, :created_at]
+ end
+end
diff --git a/db/migrate/20200310135823_add_index_to_service_unique_instance_per_type.rb b/db/migrate/20200310135823_add_index_to_service_unique_instance_per_type.rb
new file mode 100644
index 00000000000..1a60c521b71
--- /dev/null
+++ b/db/migrate/20200310135823_add_index_to_service_unique_instance_per_type.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexToServiceUniqueInstancePerType < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index(:services, [:type, :instance], unique: true, where: 'instance IS TRUE')
+ end
+
+ def down
+ remove_concurrent_index(:services, [:type, :instance])
+ end
+end
diff --git a/db/migrate/20200310145304_add_runtime_created_to_ci_job_variables.rb b/db/migrate/20200310145304_add_runtime_created_to_ci_job_variables.rb
new file mode 100644
index 00000000000..d5ec8854bfa
--- /dev/null
+++ b/db/migrate/20200310145304_add_runtime_created_to_ci_job_variables.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddRuntimeCreatedToCiJobVariables < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ DEFAULT_SOURCE = 0 # Equvalent to Ci::JobVariable.internal_source
+
+ def up
+ add_column_with_default(:ci_job_variables, :source, :integer, limit: 2, default: DEFAULT_SOURCE, allow_null: false)
+ end
+
+ def down
+ remove_column(:ci_job_variables, :source)
+ end
+end
diff --git a/db/migrate/20200311084025_add_index_on_user_id_status_created_at_to_deployments.rb b/db/migrate/20200311084025_add_index_on_user_id_status_created_at_to_deployments.rb
new file mode 100644
index 00000000000..1744b701f0a
--- /dev/null
+++ b/db/migrate/20200311084025_add_index_on_user_id_status_created_at_to_deployments.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexOnUserIdStatusCreatedAtToDeployments < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :deployments, [:user_id, :status, :created_at]
+ end
+
+ def down
+ remove_concurrent_index :deployments, [:user_id, :status, :created_at]
+ end
+end
diff --git a/db/migrate/20200311093210_create_user_highest_roles.rb b/db/migrate/20200311093210_create_user_highest_roles.rb
new file mode 100644
index 00000000000..36007f196d1
--- /dev/null
+++ b/db/migrate/20200311093210_create_user_highest_roles.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+class CreateUserHighestRoles < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ create_table :user_highest_roles, id: false do |t|
+ t.datetime_with_timezone :updated_at, null: false
+ t.references :user, primary_key: true, default: nil, index: false, foreign_key: { on_delete: :cascade }
+ t.integer :highest_access_level
+
+ t.index [:user_id, :highest_access_level]
+ end
+ end
+ end
+
+ def down
+ with_lock_retries do
+ drop_table :user_highest_roles
+ end
+ end
+end
diff --git a/db/migrate/20200311094020_add_index_on_id_and_status_to_deployments.rb b/db/migrate/20200311094020_add_index_on_id_and_status_to_deployments.rb
new file mode 100644
index 00000000000..77c746ed88b
--- /dev/null
+++ b/db/migrate/20200311094020_add_index_on_id_and_status_to_deployments.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexOnIdAndStatusToDeployments < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :deployments, [:id, :status]
+ end
+
+ def down
+ remove_concurrent_index :deployments, [:id, :status]
+ end
+end
diff --git a/db/migrate/20200311141053_add_ci_pipeline_schedules_to_plan_limits.rb b/db/migrate/20200311141053_add_ci_pipeline_schedules_to_plan_limits.rb
new file mode 100644
index 00000000000..2fc7785fe9c
--- /dev/null
+++ b/db/migrate/20200311141053_add_ci_pipeline_schedules_to_plan_limits.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddCiPipelineSchedulesToPlanLimits < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ disable_ddl_transaction!
+
+ DOWNTIME = false
+
+ def up
+ add_column_with_default(:plan_limits, :ci_pipeline_schedules, :integer, default: 0, allow_null: false)
+ end
+
+ def down
+ remove_column(:plan_limits, :ci_pipeline_schedules)
+ end
+end
diff --git a/db/migrate/20200311141943_insert_ci_pipeline_schedules_plan_limits.rb b/db/migrate/20200311141943_insert_ci_pipeline_schedules_plan_limits.rb
new file mode 100644
index 00000000000..849d95667a7
--- /dev/null
+++ b/db/migrate/20200311141943_insert_ci_pipeline_schedules_plan_limits.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+class InsertCiPipelineSchedulesPlanLimits < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ return unless Gitlab.com?
+
+ create_or_update_plan_limit('ci_pipeline_schedules', 'free', 10)
+ create_or_update_plan_limit('ci_pipeline_schedules', 'bronze', 50)
+ create_or_update_plan_limit('ci_pipeline_schedules', 'silver', 50)
+ create_or_update_plan_limit('ci_pipeline_schedules', 'gold', 50)
+ end
+
+ def down
+ return unless Gitlab.com?
+
+ create_or_update_plan_limit('ci_pipeline_schedules', 'free', 0)
+ create_or_update_plan_limit('ci_pipeline_schedules', 'bronze', 0)
+ create_or_update_plan_limit('ci_pipeline_schedules', 'silver', 0)
+ create_or_update_plan_limit('ci_pipeline_schedules', 'gold', 0)
+ end
+end
diff --git a/db/migrate/20200311165635_create_project_export_jobs.rb b/db/migrate/20200311165635_create_project_export_jobs.rb
new file mode 100644
index 00000000000..026ad2cd771
--- /dev/null
+++ b/db/migrate/20200311165635_create_project_export_jobs.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class CreateProjectExportJobs < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ create_table :project_export_jobs do |t|
+ t.references :project, index: false, null: false, foreign_key: { on_delete: :cascade }
+ t.timestamps_with_timezone null: false
+ t.integer :status, limit: 2, null: false, default: 0
+ t.string :jid, limit: 100, null: false, unique: true
+
+ t.index [:project_id, :jid]
+ t.index [:jid], unique: true
+ t.index [:status]
+ t.index [:project_id, :status]
+ end
+ end
+end
diff --git a/db/migrate/20200312163407_add_index_on_id_and_service_desk_enabled_to_projects.rb b/db/migrate/20200312163407_add_index_on_id_and_service_desk_enabled_to_projects.rb
new file mode 100644
index 00000000000..903bd12ddf4
--- /dev/null
+++ b/db/migrate/20200312163407_add_index_on_id_and_service_desk_enabled_to_projects.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddIndexOnIdAndServiceDeskEnabledToProjects < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'index_projects_on_id_service_desk_enabled'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :projects, :id, where: 'service_desk_enabled = true', name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index_by_name :projects, INDEX_NAME
+ end
+end
diff --git a/db/migrate/20200313123934_add_index_on_user_id_type_source_type_ldap_and_created_at_to_members.rb b/db/migrate/20200313123934_add_index_on_user_id_type_source_type_ldap_and_created_at_to_members.rb
new file mode 100644
index 00000000000..0215d102529
--- /dev/null
+++ b/db/migrate/20200313123934_add_index_on_user_id_type_source_type_ldap_and_created_at_to_members.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddIndexOnUserIdTypeSourceTypeLdapAndCreatedAtToMembers < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'index_members_on_user_id_created_at'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :members, [:user_id, :created_at], where: "ldap = TRUE AND type = 'GroupMember' AND source_type = 'Namespace'", name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index :members, INDEX_NAME
+ end
+end
diff --git a/db/post_migrate/20200120083607_remove_storage_version_column_from_snippets.rb b/db/post_migrate/20200120083607_remove_storage_version_column_from_snippets.rb
index 62bb3f46cae..e94dc75e65c 100644
--- a/db/post_migrate/20200120083607_remove_storage_version_column_from_snippets.rb
+++ b/db/post_migrate/20200120083607_remove_storage_version_column_from_snippets.rb
@@ -19,7 +19,7 @@ class RemoveStorageVersionColumnFromSnippets < ActiveRecord::Migration[5.2]
def down
return if column_exists?(:snippets, :storage_version)
- add_column_with_default( # rubocop:disable Migration/AddColumnWithDefault
+ add_column_with_default(
:snippets,
:storage_version,
:integer,
diff --git a/db/post_migrate/20200124110831_migrate_design_notes_mentions_to_db.rb b/db/post_migrate/20200124110831_migrate_design_notes_mentions_to_db.rb
new file mode 100644
index 00000000000..f1e4ee9807b
--- /dev/null
+++ b/db/post_migrate/20200124110831_migrate_design_notes_mentions_to_db.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+class MigrateDesignNotesMentionsToDb < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ DELAY = 2.minutes.to_i
+ BATCH_SIZE = 10000
+ MIGRATION = 'UserMentions::CreateResourceUserMention'
+
+ INDEX_NAME = 'design_mentions_temp_index'
+ INDEX_CONDITION = "note LIKE '%@%'::text AND notes.noteable_type = 'DesignManagement::Design'"
+ QUERY_CONDITIONS = "#{INDEX_CONDITION} AND design_user_mentions.design_id IS NULL"
+ JOIN = 'INNER JOIN design_management_designs ON design_management_designs.id = notes.noteable_id LEFT JOIN design_user_mentions ON notes.id = design_user_mentions.note_id'
+
+ class DesignUserMention < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'design_user_mentions'
+ end
+
+ class Note < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'notes'
+ end
+
+ def up
+ return unless Gitlab.ee?
+
+ # cleanup design user mentions with no actual mentions,
+ # re https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24586#note_285982468
+ DesignUserMention
+ .where(mentioned_users_ids: nil)
+ .where(mentioned_groups_ids: nil)
+ .where(mentioned_projects_ids: nil)
+ .each_batch(of: BATCH_SIZE) do |batch|
+ batch.delete_all
+ end
+
+ # create temporary index for notes with mentions, may take well over 1h
+ add_concurrent_index(:notes, :id, where: INDEX_CONDITION, name: INDEX_NAME)
+
+ Note
+ .joins(JOIN)
+ .where(QUERY_CONDITIONS)
+ .each_batch(of: BATCH_SIZE) do |batch, index|
+ range = batch.pluck(Arel.sql('MIN(notes.id)'), Arel.sql('MAX(notes.id)')).first
+ BackgroundMigrationWorker.perform_in(index * DELAY, MIGRATION, ['DesignManagement::Design', JOIN, QUERY_CONDITIONS, true, *range])
+ end
+ end
+
+ def down
+ # no-op
+ # temporary index is to be dropped in a different migration in an upcoming release:
+ # https://gitlab.com/gitlab-org/gitlab/issues/196842
+ end
+end
diff --git a/db/post_migrate/20200128132510_add_temporary_index_for_notes_with_mentions.rb b/db/post_migrate/20200128132510_add_temporary_index_for_notes_with_mentions.rb
new file mode 100644
index 00000000000..bd55485f871
--- /dev/null
+++ b/db/post_migrate/20200128132510_add_temporary_index_for_notes_with_mentions.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+class AddTemporaryIndexForNotesWithMentions < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ disable_ddl_transaction!
+
+ INDEX_CONDITION = "note LIKE '%@%'::text"
+ INDEX_NAME = 'note_mentions_temp_index'
+
+ EPIC_MENTIONS_INDEX_NAME = 'epic_mentions_temp_index'
+ DESIGN_MENTIONS_INDEX_NAME = 'design_mentions_temp_index'
+
+ def up
+ # create temporary index for notes with mentions, may take well over 1h
+ add_concurrent_index(:notes, [:id, :noteable_type], where: INDEX_CONDITION, name: INDEX_NAME)
+
+ # cleanup previous temporary indexes, as we'll be usig the single one
+ remove_concurrent_index(:notes, :id, name: EPIC_MENTIONS_INDEX_NAME)
+ remove_concurrent_index(:notes, :id, name: DESIGN_MENTIONS_INDEX_NAME)
+ end
+
+ def down
+ remove_concurrent_index(:notes, :id, name: INDEX_NAME)
+
+ add_concurrent_index(:notes, :id, where: "#{INDEX_CONDITION} AND noteable_type='Epic'", name: EPIC_MENTIONS_INDEX_NAME)
+ add_concurrent_index(:notes, :id, where: "#{INDEX_CONDITION} AND noteable_type='DesignManagement::Design'", name: DESIGN_MENTIONS_INDEX_NAME)
+ end
+end
diff --git a/db/post_migrate/20200128133510_cleanup_empty_commit_user_mentions.rb b/db/post_migrate/20200128133510_cleanup_empty_commit_user_mentions.rb
new file mode 100644
index 00000000000..362aa3a60f7
--- /dev/null
+++ b/db/post_migrate/20200128133510_cleanup_empty_commit_user_mentions.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+class CleanupEmptyCommitUserMentions < ActiveRecord::Migration[5.2]
+ DOWNTIME = false
+ BATCH_SIZE = 10_000
+
+ class CommitUserMention < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'commit_user_mentions'
+ end
+
+ def up
+ # cleanup commit user mentions with no actual mentions,
+ # re https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24586#note_285982468
+ CommitUserMention
+ .where(mentioned_users_ids: nil)
+ .where(mentioned_groups_ids: nil)
+ .where(mentioned_projects_ids: nil)
+ .each_batch(of: BATCH_SIZE) do |batch|
+ batch.delete_all
+ end
+ end
+
+ def down
+ # no-op
+ end
+end
diff --git a/db/post_migrate/20200128134110_migrate_commit_notes_mentions_to_db.rb b/db/post_migrate/20200128134110_migrate_commit_notes_mentions_to_db.rb
new file mode 100644
index 00000000000..5b8ed99fb7a
--- /dev/null
+++ b/db/post_migrate/20200128134110_migrate_commit_notes_mentions_to_db.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+class MigrateCommitNotesMentionsToDb < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ disable_ddl_transaction!
+
+ DOWNTIME = false
+ DELAY = 3.minutes.to_i
+ BATCH_SIZE = 1_000
+ MIGRATION = 'UserMentions::CreateResourceUserMention'
+
+ QUERY_CONDITIONS = "note LIKE '%@%'::text AND notes.noteable_type = 'Commit' AND commit_user_mentions.commit_id IS NULL"
+ JOIN = 'LEFT JOIN commit_user_mentions ON notes.id = commit_user_mentions.note_id'
+
+ class Note < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'notes'
+ end
+
+ def up
+ Note
+ .joins(JOIN)
+ .where(QUERY_CONDITIONS)
+ .each_batch(of: BATCH_SIZE) do |batch, index|
+ range = batch.pluck(Arel.sql('MIN(notes.id)'), Arel.sql('MAX(notes.id)')).first
+ migrate_in(index * DELAY, MIGRATION, ['Commit', JOIN, QUERY_CONDITIONS, true, *range])
+ end
+ end
+
+ def down
+ # no-op
+ # temporary index is to be dropped in a different migration in an upcoming release:
+ # https://gitlab.com/gitlab-org/gitlab/issues/196842
+ end
+end
diff --git a/db/post_migrate/20200128210353_cleanup_optimistic_locking_nulls.rb b/db/post_migrate/20200128210353_cleanup_optimistic_locking_nulls.rb
new file mode 100644
index 00000000000..8bc037c7333
--- /dev/null
+++ b/db/post_migrate/20200128210353_cleanup_optimistic_locking_nulls.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class CleanupOptimisticLockingNulls < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ TABLES = %w(epics merge_requests issues).freeze
+ BATCH_SIZE = 10_000
+
+ def declare_class(table)
+ Class.new(ActiveRecord::Base) do
+ include EachBatch
+
+ self.table_name = table
+ self.inheritance_column = :_type_disabled # Disable STI
+ end
+ end
+
+ def up
+ TABLES.each do |table|
+ add_concurrent_index table.to_sym, :lock_version, where: "lock_version IS NULL"
+
+ queue_background_migration_jobs_by_range_at_intervals(
+ declare_class(table).where(lock_version: nil),
+ 'CleanupOptimisticLockingNulls',
+ 2.minutes,
+ batch_size: BATCH_SIZE,
+ other_arguments: [table]
+ )
+ end
+ end
+
+ def down
+ TABLES.each do |table|
+ remove_concurrent_index table.to_sym, :lock_version, where: "lock_version IS NULL"
+ end
+ end
+end
diff --git a/db/post_migrate/20200211155000_cleanup_empty_merge_request_mentions.rb b/db/post_migrate/20200211155000_cleanup_empty_merge_request_mentions.rb
new file mode 100644
index 00000000000..e90d192388b
--- /dev/null
+++ b/db/post_migrate/20200211155000_cleanup_empty_merge_request_mentions.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+class CleanupEmptyMergeRequestMentions < ActiveRecord::Migration[5.2]
+ DOWNTIME = false
+ BATCH_SIZE = 1_000
+
+ class MergeRequestUserMention < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'merge_request_user_mentions'
+ end
+
+ def up
+ # cleanup merge request user mentions with no actual mentions,
+ # re https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24586#note_285982468
+ MergeRequestUserMention
+ .where(mentioned_users_ids: nil)
+ .where(mentioned_groups_ids: nil)
+ .where(mentioned_projects_ids: nil).each_batch(of: BATCH_SIZE) do |batch|
+ batch.delete_all
+ end
+ end
+
+ def down
+ # no-op
+ end
+end
diff --git a/db/post_migrate/20200211155100_add_temporary_merge_request_with_mentions_index.rb b/db/post_migrate/20200211155100_add_temporary_merge_request_with_mentions_index.rb
new file mode 100644
index 00000000000..5b25f29d5f7
--- /dev/null
+++ b/db/post_migrate/20200211155100_add_temporary_merge_request_with_mentions_index.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+class AddTemporaryMergeRequestWithMentionsIndex < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_CONDITION = "description like '%@%' OR title like '%@%'"
+ INDEX_NAME = 'merge_request_mentions_temp_index'
+
+ disable_ddl_transaction!
+
+ def up
+ # create temporary index for notes with mentions, may take well over 1h
+ add_concurrent_index(:merge_requests, :id, where: INDEX_CONDITION, name: INDEX_NAME)
+ end
+
+ def down
+ remove_concurrent_index(:merge_requests, :id, where: INDEX_CONDITION, name: INDEX_NAME)
+ end
+end
diff --git a/db/post_migrate/20200211155539_migrate_merge_request_mentions_to_db.rb b/db/post_migrate/20200211155539_migrate_merge_request_mentions_to_db.rb
new file mode 100644
index 00000000000..b622badb561
--- /dev/null
+++ b/db/post_migrate/20200211155539_migrate_merge_request_mentions_to_db.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+class MigrateMergeRequestMentionsToDb < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ DELAY = 3.minutes.to_i
+ BATCH_SIZE = 1_000
+ MIGRATION = 'UserMentions::CreateResourceUserMention'
+
+ JOIN = "LEFT JOIN merge_request_user_mentions on merge_requests.id = merge_request_user_mentions.merge_request_id"
+ QUERY_CONDITIONS = "(description like '%@%' OR title like '%@%') AND merge_request_user_mentions.merge_request_id IS NULL"
+
+ disable_ddl_transaction!
+
+ class MergeRequest < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'merge_requests'
+ end
+
+ def up
+ MergeRequest
+ .joins(JOIN)
+ .where(QUERY_CONDITIONS)
+ .each_batch(of: BATCH_SIZE) do |batch, index|
+ range = batch.pluck(Arel.sql('MIN(merge_requests.id)'), Arel.sql('MAX(merge_requests.id)')).first
+ migrate_in(index * DELAY, MIGRATION, ['MergeRequest', JOIN, QUERY_CONDITIONS, false, *range])
+ end
+ end
+
+ def down
+ # no-op
+ end
+end
diff --git a/db/post_migrate/20200214034836_remove_security_dashboard_feature_flag.rb b/db/post_migrate/20200214034836_remove_security_dashboard_feature_flag.rb
new file mode 100644
index 00000000000..79723619533
--- /dev/null
+++ b/db/post_migrate/20200214034836_remove_security_dashboard_feature_flag.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+class RemoveSecurityDashboardFeatureFlag < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ class FeatureGate < ApplicationRecord
+ self.table_name = 'feature_gates'
+ end
+
+ def up
+ FeatureGate.find_by(feature_key: :security_dashboard, key: :boolean)&.delete
+ end
+
+ def down
+ instance_security_dashboard_feature = FeatureGate.find_by(feature_key: :instance_security_dashboard, key: :boolean)
+
+ if instance_security_dashboard_feature.present?
+ FeatureGate.safe_find_or_create_by!(
+ feature_key: :security_dashboard,
+ key: instance_security_dashboard_feature.key,
+ value: instance_security_dashboard_feature.value
+ )
+ end
+ end
+end
diff --git a/db/post_migrate/20200214214934_create_environment_for_self_monitoring_project.rb b/db/post_migrate/20200214214934_create_environment_for_self_monitoring_project.rb
new file mode 100644
index 00000000000..a44efa3c460
--- /dev/null
+++ b/db/post_migrate/20200214214934_create_environment_for_self_monitoring_project.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+class CreateEnvironmentForSelfMonitoringProject < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ execute <<~SQL
+ INSERT INTO environments (project_id, name, slug, created_at, updated_at)
+ SELECT instance_administration_project_id, 'production', 'production', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP
+ FROM application_settings
+ WHERE instance_administration_project_id IS NOT NULL
+ AND NOT EXISTS (
+ SELECT 1
+ FROM environments
+ INNER JOIN application_settings
+ ON application_settings.instance_administration_project_id = environments.project_id
+ )
+ SQL
+ end
+
+ def down
+ # no-op
+
+ # This migration cannot be reversed because it cannot be ensured that the environment for the Self Monitoring Project
+ # did not already exist before the migration ran - in that case, the migration does nothing, and it would be unexpected
+ # behavior for that environment to be deleted by reversing this migration.
+ end
+end
diff --git a/db/post_migrate/20200217223651_add_index_to_job_artifact_secure_reports.rb b/db/post_migrate/20200217223651_add_index_to_job_artifact_secure_reports.rb
new file mode 100644
index 00000000000..ca297272f8e
--- /dev/null
+++ b/db/post_migrate/20200217223651_add_index_to_job_artifact_secure_reports.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+class AddIndexToJobArtifactSecureReports < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'job_artifacts_secure_reports_temp_index'
+ PARTIAL_FILTER = "file_type BETWEEN 5 AND 8"
+
+ disable_ddl_transaction!
+
+ def up
+ # This is a temporary index used for the migration of Security Reports to Security Scans
+ add_concurrent_index(:ci_job_artifacts,
+ [:id, :file_type, :job_id, :created_at, :updated_at],
+ name: INDEX_NAME,
+ where: PARTIAL_FILTER)
+ end
+
+ def down
+ remove_concurrent_index(:ci_job_artifacts,
+ [:id, :file_type, :job_id, :created_at, :updated_at])
+ end
+end
diff --git a/db/post_migrate/20200217225719_schedule_migrate_security_scans.rb b/db/post_migrate/20200217225719_schedule_migrate_security_scans.rb
new file mode 100644
index 00000000000..7ef204ed9de
--- /dev/null
+++ b/db/post_migrate/20200217225719_schedule_migrate_security_scans.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+class ScheduleMigrateSecurityScans < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INTERVAL = 2.minutes.to_i
+ BATCH_SIZE = 10_000
+ MIGRATION = 'MigrateSecurityScans'.freeze
+
+ disable_ddl_transaction!
+
+ class JobArtifact < ActiveRecord::Base
+ include ::EachBatch
+
+ self.table_name = 'ci_job_artifacts'
+
+ scope :security_reports, -> { where('file_type BETWEEN 5 and 8') }
+ end
+
+ def up
+ queue_background_migration_jobs_by_range_at_intervals(JobArtifact.security_reports,
+ MIGRATION,
+ INTERVAL,
+ batch_size: BATCH_SIZE)
+ end
+
+ def down
+ # intentionally blank
+ end
+end
diff --git a/db/post_migrate/20200219183456_remove_issue_state_indexes.rb b/db/post_migrate/20200219183456_remove_issue_state_indexes.rb
new file mode 100644
index 00000000000..cdf10b8172e
--- /dev/null
+++ b/db/post_migrate/20200219183456_remove_issue_state_indexes.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+class RemoveIssueStateIndexes < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ # issues state column is ignored since 12.6 and will be removed on a following migration
+ def up
+ remove_concurrent_index_by_name :issues, 'index_issues_on_state'
+ remove_concurrent_index_by_name :issues, 'index_issues_on_project_id_and_created_at_and_id_and_state'
+ remove_concurrent_index_by_name :issues, 'idx_issues_on_project_id_and_due_date_and_id_and_state_partial'
+ remove_concurrent_index_by_name :issues, 'index_issues_on_project_id_and_rel_position_and_state_and_id'
+ remove_concurrent_index_by_name :issues, 'index_issues_on_project_id_and_updated_at_and_id_and_state'
+ end
+
+ def down
+ add_concurrent_index :issues, :state, name: 'index_issues_on_state'
+
+ add_concurrent_index :issues,
+ [:project_id, :created_at, :id, :state],
+ name: 'index_issues_on_project_id_and_created_at_and_id_and_state'
+
+ add_concurrent_index :issues,
+ [:project_id, :due_date, :id, :state],
+ where: 'due_date IS NOT NULL',
+ name: 'idx_issues_on_project_id_and_due_date_and_id_and_state_partial'
+
+ add_concurrent_index :issues,
+ [:project_id, :relative_position, :state, :id],
+ order: { id: :desc },
+ name: 'index_issues_on_project_id_and_rel_position_and_state_and_id'
+
+ add_concurrent_index :issues,
+ [:project_id, :updated_at, :id, :state],
+ name: 'index_issues_on_project_id_and_updated_at_and_id_and_state'
+ end
+end
diff --git a/db/post_migrate/20200219184219_remove_merge_request_state_indexes.rb b/db/post_migrate/20200219184219_remove_merge_request_state_indexes.rb
new file mode 100644
index 00000000000..deb95acc1cf
--- /dev/null
+++ b/db/post_migrate/20200219184219_remove_merge_request_state_indexes.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+class RemoveMergeRequestStateIndexes < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ # merge_requests state column is ignored since 12.6 and will be removed on a following migration
+ def up
+ remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_id_and_merge_jid'
+ remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_source_project_and_branch_state_opened'
+ remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_state_and_merge_status'
+ remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_target_project_id_and_iid_opened'
+ end
+
+ def down
+ add_concurrent_index :merge_requests,
+ [:id, :merge_jid],
+ where: "merge_jid IS NOT NULL and state = 'locked'",
+ name: 'index_merge_requests_on_id_and_merge_jid'
+
+ add_concurrent_index :merge_requests,
+ [:source_project_id, :source_branch],
+ where: "state = 'opened'",
+ name: 'index_merge_requests_on_source_project_and_branch_state_opened'
+
+ add_concurrent_index :merge_requests,
+ [:state, :merge_status],
+ where: "state = 'opened' AND merge_status = 'can_be_merged'",
+ name: 'index_merge_requests_on_state_and_merge_status'
+
+ add_concurrent_index :merge_requests,
+ [:target_project_id, :iid],
+ where: "state = 'opened'",
+ name: 'index_merge_requests_on_target_project_id_and_iid_opened'
+ end
+end
diff --git a/db/post_migrate/20200219193058_remove_state_from_issues.rb b/db/post_migrate/20200219193058_remove_state_from_issues.rb
new file mode 100644
index 00000000000..ac27a9a9b69
--- /dev/null
+++ b/db/post_migrate/20200219193058_remove_state_from_issues.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+class RemoveStateFromIssues < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ return unless issue_state_column_exists?
+
+ # Ignored in 12.6 - https://gitlab.com/gitlab-org/gitlab/-/merge_requests/19574
+ with_lock_retries do
+ remove_column :issues, :state, :string
+ end
+ end
+
+ def down
+ return if issue_state_column_exists?
+
+ with_lock_retries do
+ add_column :issues, :state, :string # rubocop:disable Migration/AddLimitToStringColumns
+ end
+ end
+
+ private
+
+ def issue_state_column_exists?
+ column_exists?(:issues, :state)
+ end
+end
diff --git a/db/post_migrate/20200219193117_remove_state_from_merge_requests.rb b/db/post_migrate/20200219193117_remove_state_from_merge_requests.rb
new file mode 100644
index 00000000000..c99a732f37b
--- /dev/null
+++ b/db/post_migrate/20200219193117_remove_state_from_merge_requests.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+class RemoveStateFromMergeRequests < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ return unless merge_requests_state_column_exists?
+
+ # Ignored in 12.6 - https://gitlab.com/gitlab-org/gitlab/-/merge_requests/19574
+ with_lock_retries do
+ remove_column :merge_requests, :state, :string
+ end
+ end
+
+ def down
+ return if merge_requests_state_column_exists?
+
+ with_lock_retries do
+ add_column :merge_requests, :state, :string # rubocop:disable Migration/AddLimitToStringColumns
+ end
+ end
+
+ private
+
+ def merge_requests_state_column_exists?
+ column_exists?(:merge_requests, :state)
+ end
+end
diff --git a/db/post_migrate/20200221142216_remove_repository_storage_from_snippets.rb b/db/post_migrate/20200221142216_remove_repository_storage_from_snippets.rb
new file mode 100644
index 00000000000..fb8721a45b8
--- /dev/null
+++ b/db/post_migrate/20200221142216_remove_repository_storage_from_snippets.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+class RemoveRepositoryStorageFromSnippets < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ return unless column_exists?(:snippets, :repository_storage)
+
+ remove_column :snippets, :repository_storage
+ end
+
+ def down
+ return if column_exists?(:snippets, :repository_storage)
+
+ add_column_with_default(
+ :snippets,
+ :repository_storage,
+ :string,
+ default: 'default',
+ limit: 255,
+ allow_null: false
+ )
+ end
+end
diff --git a/db/post_migrate/20200226162239_cleanup_closed_at_rename_in_vulnerabilities.rb b/db/post_migrate/20200226162239_cleanup_closed_at_rename_in_vulnerabilities.rb
new file mode 100644
index 00000000000..eb7df0b8d22
--- /dev/null
+++ b/db/post_migrate/20200226162239_cleanup_closed_at_rename_in_vulnerabilities.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class CleanupClosedAtRenameInVulnerabilities < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ cleanup_concurrent_column_rename :vulnerabilities, :closed_at, :dismissed_at
+ end
+
+ def down
+ undo_cleanup_concurrent_column_rename :vulnerabilities, :closed_at, :dismissed_at
+ end
+end
diff --git a/db/post_migrate/20200226162723_cleanup_closed_by_rename_in_vulnerabilities.rb b/db/post_migrate/20200226162723_cleanup_closed_by_rename_in_vulnerabilities.rb
new file mode 100644
index 00000000000..4aa3568db14
--- /dev/null
+++ b/db/post_migrate/20200226162723_cleanup_closed_by_rename_in_vulnerabilities.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class CleanupClosedByRenameInVulnerabilities < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ cleanup_concurrent_column_rename :vulnerabilities, :closed_by_id, :dismissed_by_id
+ end
+
+ def down
+ undo_cleanup_concurrent_column_rename :vulnerabilities, :closed_by_id, :dismissed_by_id
+ end
+end
diff --git a/db/post_migrate/20200227140242_update_occurrence_severity_column.rb b/db/post_migrate/20200227140242_update_occurrence_severity_column.rb
new file mode 100644
index 00000000000..6d250532383
--- /dev/null
+++ b/db/post_migrate/20200227140242_update_occurrence_severity_column.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+class UpdateOccurrenceSeverityColumn < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+ BATCH_SIZE = 1_000
+ INTERVAL = 5.minutes
+
+ # 23_044 records to be updated on GitLab.com,
+ def up
+ # create temporary index for undefined vulnerabilities
+ add_concurrent_index(:vulnerability_occurrences, :id, where: 'severity = 0', name: 'undefined_vulnerabilities')
+
+ return unless Gitlab.ee?
+
+ migration = Gitlab::BackgroundMigration::RemoveUndefinedOccurrenceSeverityLevel
+ migration_name = migration.to_s.demodulize
+ relation = migration::Occurrence.undefined_severity
+ queue_background_migration_jobs_by_range_at_intervals(relation,
+ migration_name,
+ INTERVAL,
+ batch_size: BATCH_SIZE)
+ end
+
+ def down
+ # no-op
+ # temporary index is to be dropped in a different migration in an upcoming release
+ remove_concurrent_index(:vulnerability_occurrences, :id, where: 'severity = 0', name: 'undefined_vulnerabilities')
+ # This migration can not be reversed because we can not know which records had undefined severity
+ end
+end
diff --git a/db/post_migrate/20200302142052_update_vulnerability_severity_column.rb b/db/post_migrate/20200302142052_update_vulnerability_severity_column.rb
new file mode 100644
index 00000000000..fa38569f35d
--- /dev/null
+++ b/db/post_migrate/20200302142052_update_vulnerability_severity_column.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+class UpdateVulnerabilitySeverityColumn < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+ BATCH_SIZE = 1_000
+ INTERVAL = 2.minutes
+
+ def up
+ # create temporary index for undefined vulnerabilities
+ add_concurrent_index(:vulnerabilities, :id, where: 'severity = 0', name: 'undefined_vulnerability')
+
+ return unless Gitlab.ee?
+
+ migration = Gitlab::BackgroundMigration::RemoveUndefinedVulnerabilitySeverityLevel
+ migration_name = migration.to_s.demodulize
+ relation = migration::Vulnerability.undefined_severity
+ queue_background_migration_jobs_by_range_at_intervals(relation,
+ migration_name,
+ INTERVAL,
+ batch_size: BATCH_SIZE)
+ end
+
+ def down
+ # no-op
+ # This migration can not be reversed because we can not know which records had undefined severity
+ end
+end
diff --git a/db/post_migrate/20200304211738_remove_file_type_from_packages_package_files.rb b/db/post_migrate/20200304211738_remove_file_type_from_packages_package_files.rb
new file mode 100644
index 00000000000..98bce8845ab
--- /dev/null
+++ b/db/post_migrate/20200304211738_remove_file_type_from_packages_package_files.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class RemoveFileTypeFromPackagesPackageFiles < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ remove_column :packages_package_files, :file_type, :integer
+ end
+end
diff --git a/db/post_migrate/20200310075115_schedule_link_lfs_objects_projects.rb b/db/post_migrate/20200310075115_schedule_link_lfs_objects_projects.rb
new file mode 100644
index 00000000000..d1ed53d8e70
--- /dev/null
+++ b/db/post_migrate/20200310075115_schedule_link_lfs_objects_projects.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+class ScheduleLinkLfsObjectsProjects < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ MIGRATION = 'LinkLfsObjectsProjects'
+ BATCH_SIZE = 1000
+
+ disable_ddl_transaction!
+
+ def up
+ lfs_objects_projects = Gitlab::BackgroundMigration::LinkLfsObjectsProjects::LfsObjectsProject.linkable
+
+ queue_background_migration_jobs_by_range_at_intervals(
+ lfs_objects_projects,
+ MIGRATION,
+ BackgroundMigrationWorker.minimum_interval,
+ batch_size: BATCH_SIZE
+ )
+ end
+
+ def down
+ # No-op. No need to make this reversible. In case the jobs enqueued runs and
+ # fails at some point, some records will be created. When rescheduled, those
+ # records won't be re-created. It's also hard to track which records to clean
+ # up if ever.
+ end
+end
diff --git a/db/post_migrate/20200310135818_remove_temporary_promoted_notes_index.rb b/db/post_migrate/20200310135818_remove_temporary_promoted_notes_index.rb
new file mode 100644
index 00000000000..0b9bbf1e17b
--- /dev/null
+++ b/db/post_migrate/20200310135818_remove_temporary_promoted_notes_index.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+# Removes temporary index to fix orphan promoted issues.
+# For more information check: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23916
+class RemoveTemporaryPromotedNotesIndex < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ remove_concurrent_index_by_name :notes, 'tmp_idx_on_promoted_notes'
+ end
+
+ def down
+ add_concurrent_index :notes,
+ :note,
+ where: "noteable_type = 'Issue' AND system IS TRUE AND note LIKE 'promoted to epic%'",
+ name: 'tmp_idx_on_promoted_notes'
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 517177e82b9..55c99cb1027 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema.define(version: 2020_02_14_085940) do
+ActiveRecord::Schema.define(version: 2020_03_13_123934) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm"
@@ -349,6 +349,9 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.boolean "disable_overriding_approvers_per_merge_request", default: false, null: false
t.boolean "prevent_merge_requests_author_approval", default: false, null: false
t.boolean "prevent_merge_requests_committers_approval", default: false, null: false
+ t.boolean "email_restrictions_enabled", default: false, null: false
+ t.text "email_restrictions"
+ t.boolean "npm_package_requests_forwarding", default: true, null: false
t.index ["custom_project_templates_group_id"], name: "index_application_settings_on_custom_project_templates_group_id"
t.index ["file_template_project_id"], name: "index_application_settings_on_file_template_project_id"
t.index ["instance_administration_project_id"], name: "index_applicationsettings_on_instance_administration_project_id"
@@ -570,6 +573,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "cached_markdown_version"
t.string "target_path", limit: 255
t.integer "broadcast_type", limit: 2, default: 1, null: false
+ t.boolean "dismissable"
t.index ["ends_at", "broadcast_type", "id"], name: "index_broadcast_message_on_ends_at_and_broadcast_type_and_id"
end
@@ -719,6 +723,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.jsonb "config_variables"
t.boolean "has_exposed_artifacts"
t.string "environment_auto_stop_in", limit: 255
+ t.string "expanded_environment_name", limit: 255
t.index ["build_id"], name: "index_ci_builds_metadata_on_build_id", unique: true
t.index ["build_id"], name: "index_ci_builds_metadata_on_build_id_and_has_exposed_artifacts", where: "(has_exposed_artifacts IS TRUE)"
t.index ["build_id"], name: "index_ci_builds_metadata_on_build_id_and_interruptible", where: "(interruptible = true)"
@@ -763,6 +768,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "file_location", limit: 2
t.index ["expire_at", "job_id"], name: "index_ci_job_artifacts_on_expire_at_and_job_id"
t.index ["file_store"], name: "index_ci_job_artifacts_on_file_store"
+ t.index ["id", "file_type", "job_id", "created_at", "updated_at"], name: "job_artifacts_secure_reports_temp_index", where: "((file_type >= 5) AND (file_type <= 8))"
t.index ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true
t.index ["project_id"], name: "index_ci_job_artifacts_on_project_id"
t.index ["project_id"], name: "index_ci_job_artifacts_on_project_id_for_security_reports", where: "(file_type = ANY (ARRAY[5, 6, 7, 8]))"
@@ -774,6 +780,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.string "encrypted_value_iv"
t.bigint "job_id", null: false
t.integer "variable_type", limit: 2, default: 1, null: false
+ t.integer "source", limit: 2, default: 0, null: false
t.index ["job_id"], name: "index_ci_job_variables_on_job_id"
t.index ["key", "job_id"], name: "index_ci_job_variables_on_key_and_job_id", unique: true
end
@@ -867,7 +874,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["project_id", "status", "config_source"], name: "index_ci_pipelines_on_project_id_and_status_and_config_source"
t.index ["project_id", "status", "updated_at"], name: "index_ci_pipelines_on_project_id_and_status_and_updated_at"
t.index ["status"], name: "index_ci_pipelines_on_status"
- t.index ["user_id"], name: "index_ci_pipelines_on_user_id"
+ t.index ["user_id", "created_at"], name: "index_ci_pipelines_on_user_id_and_created_at"
end
create_table "ci_pipelines_config", primary_key: "pipeline_id", force: :cascade do |t|
@@ -875,6 +882,17 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["pipeline_id"], name: "index_ci_pipelines_config_on_pipeline_id"
end
+ create_table "ci_refs", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.integer "lock_version", default: 0
+ t.integer "last_updated_by_pipeline_id"
+ t.boolean "tag", default: false, null: false
+ t.string "ref", limit: 255, null: false
+ t.string "status", limit: 255, null: false
+ t.index ["last_updated_by_pipeline_id"], name: "index_ci_refs_on_last_updated_by_pipeline_id"
+ t.index ["project_id", "ref", "tag"], name: "index_ci_refs_on_project_id_and_ref_and_tag", unique: true
+ end
+
create_table "ci_resource_groups", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
@@ -949,6 +967,13 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["source_project_id"], name: "index_ci_sources_pipelines_on_source_project_id"
end
+ create_table "ci_sources_projects", force: :cascade do |t|
+ t.bigint "pipeline_id", null: false
+ t.bigint "source_project_id", null: false
+ t.index ["pipeline_id"], name: "index_ci_sources_projects_on_pipeline_id"
+ t.index ["source_project_id", "pipeline_id"], name: "index_ci_sources_projects_on_source_project_id_and_pipeline_id", unique: true
+ end
+
create_table "ci_stages", id: :serial, force: :cascade do |t|
t.integer "project_id"
t.integer "pipeline_id"
@@ -1100,7 +1125,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "management_project_id"
t.integer "cleanup_status", limit: 2, default: 1, null: false
t.text "cleanup_status_reason"
- t.index ["enabled"], name: "index_clusters_on_enabled"
+ t.index ["enabled", "provider_type", "id"], name: "index_clusters_on_enabled_and_provider_type_and_id"
t.index ["management_project_id"], name: "index_clusters_on_management_project_id", where: "(management_project_id IS NOT NULL)"
t.index ["user_id"], name: "index_clusters_on_user_id"
end
@@ -1357,7 +1382,9 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
create_table "deployment_merge_requests", id: false, force: :cascade do |t|
t.integer "deployment_id", null: false
t.integer "merge_request_id", null: false
+ t.integer "environment_id"
t.index ["deployment_id", "merge_request_id"], name: "idx_deployment_merge_requests_unique_index", unique: true
+ t.index ["environment_id", "merge_request_id"], name: "idx_environment_merge_requests_unique_index", unique: true
t.index ["merge_request_id"], name: "index_deployment_merge_requests_on_merge_request_id"
end
@@ -1383,6 +1410,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["environment_id", "id"], name: "index_deployments_on_environment_id_and_id"
t.index ["environment_id", "iid", "project_id"], name: "index_deployments_on_environment_id_and_iid_and_project_id"
t.index ["environment_id", "status"], name: "index_deployments_on_environment_id_and_status"
+ t.index ["id", "status"], name: "index_deployments_on_id_and_status"
t.index ["id"], name: "partial_index_deployments_for_legacy_successful_deployments", where: "((finished_at IS NULL) AND (status = 2))"
t.index ["project_id", "id"], name: "index_deployments_on_project_id_and_id", order: { id: :desc }
t.index ["project_id", "iid"], name: "index_deployments_on_project_id_and_iid", unique: true
@@ -1391,6 +1419,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["project_id", "status"], name: "index_deployments_on_project_id_and_status"
t.index ["project_id", "updated_at", "id"], name: "index_deployments_on_project_id_and_updated_at_and_id", order: { updated_at: :desc, id: :desc }
t.index ["project_id"], name: "partial_index_deployments_for_project_id_and_tag", where: "(tag IS TRUE)"
+ t.index ["user_id", "status", "created_at"], name: "index_deployments_on_user_id_and_status_and_created_at"
end
create_table "description_versions", force: :cascade do |t|
@@ -1414,10 +1443,11 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["project_id"], name: "index_design_management_designs_on_project_id"
end
- create_table "design_management_designs_versions", id: false, force: :cascade do |t|
+ create_table "design_management_designs_versions", force: :cascade do |t|
t.bigint "design_id", null: false
t.bigint "version_id", null: false
t.integer "event", limit: 2, default: 0, null: false
+ t.string "image_v432x230", limit: 255
t.index ["design_id", "version_id"], name: "design_management_designs_versions_uniqueness", unique: true
t.index ["design_id"], name: "index_design_management_designs_versions_on_design_id"
t.index ["event"], name: "index_design_management_designs_versions_on_event"
@@ -1427,7 +1457,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
create_table "design_management_versions", force: :cascade do |t|
t.binary "sha", null: false
t.bigint "issue_id"
- t.datetime_with_timezone "created_at"
+ t.datetime_with_timezone "created_at", null: false
t.integer "author_id"
t.index ["author_id"], name: "index_design_management_versions_on_author_id", where: "(author_id IS NOT NULL)"
t.index ["issue_id"], name: "index_design_management_versions_on_issue_id"
@@ -1563,6 +1593,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "start_date_sourcing_epic_id"
t.integer "due_date_sourcing_epic_id"
t.integer "health_status", limit: 2
+ t.index "group_id, ((iid)::character varying) varchar_pattern_ops", name: "index_epics_on_group_id_and_iid_varchar_pattern"
t.index ["assignee_id"], name: "index_epics_on_assignee_id"
t.index ["author_id"], name: "index_epics_on_author_id"
t.index ["closed_by_id"], name: "index_epics_on_closed_by_id"
@@ -1571,6 +1602,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["end_date"], name: "index_epics_on_end_date"
t.index ["group_id"], name: "index_epics_on_group_id"
t.index ["iid"], name: "index_epics_on_iid"
+ t.index ["lock_version"], name: "index_epics_on_lock_version", where: "(lock_version IS NULL)"
t.index ["parent_id"], name: "index_epics_on_parent_id"
t.index ["start_date"], name: "index_epics_on_start_date"
t.index ["start_date_sourcing_epic_id"], name: "index_epics_on_start_date_sourcing_epic_id", where: "(start_date_sourcing_epic_id IS NOT NULL)"
@@ -1587,6 +1619,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.string "target_type"
t.bigint "group_id"
t.index ["action"], name: "index_events_on_action"
+ t.index ["author_id", "created_at"], name: "index_events_on_author_id_and_created_at"
t.index ["author_id", "project_id"], name: "index_events_on_author_id_and_project_id"
t.index ["created_at", "author_id"], name: "analytics_index_events_on_created_at_and_author_id"
t.index ["group_id"], name: "index_events_on_group_id_partial", where: "(group_id IS NOT NULL)"
@@ -1650,14 +1683,6 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["root_project_id"], name: "index_fork_networks_on_root_project_id", unique: true
end
- create_table "forked_project_links", id: :serial, force: :cascade do |t|
- t.integer "forked_to_project_id", null: false
- t.integer "forked_from_project_id", null: false
- t.datetime "created_at"
- t.datetime "updated_at"
- t.index ["forked_to_project_id"], name: "index_forked_project_links_on_forked_to_project_id", unique: true
- end
-
create_table "geo_cache_invalidation_events", force: :cascade do |t|
t.string "key", null: false
end
@@ -1909,6 +1934,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.bigint "gitlab_subscription_id", null: false
t.datetime_with_timezone "created_at"
t.date "trial_starts_on"
+ t.boolean "auto_renew"
t.index ["gitlab_subscription_id"], name: "index_gitlab_subscription_histories_on_gitlab_subscription_id"
end
@@ -1924,6 +1950,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "seats", default: 0
t.boolean "trial", default: false
t.date "trial_starts_on"
+ t.boolean "auto_renew"
t.index ["hosted_plan_id"], name: "index_gitlab_subscriptions_on_hosted_plan_id"
t.index ["namespace_id"], name: "index_gitlab_subscriptions_on_namespace_id", unique: true
end
@@ -2160,7 +2187,6 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.datetime "updated_at"
t.text "description"
t.integer "milestone_id"
- t.string "state"
t.integer "iid"
t.integer "updated_by_id"
t.integer "weight"
@@ -2183,25 +2209,22 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "duplicated_to_id"
t.integer "promoted_to_epic_id"
t.integer "health_status", limit: 2
+ t.index ["author_id", "id", "created_at"], name: "index_issues_on_author_id_and_id_and_created_at"
t.index ["author_id"], name: "index_issues_on_author_id"
t.index ["closed_by_id"], name: "index_issues_on_closed_by_id"
t.index ["confidential"], name: "index_issues_on_confidential"
t.index ["description"], name: "index_issues_on_description_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["duplicated_to_id"], name: "index_issues_on_duplicated_to_id", where: "(duplicated_to_id IS NOT NULL)"
+ t.index ["lock_version"], name: "index_issues_on_lock_version", where: "(lock_version IS NULL)"
t.index ["milestone_id"], name: "index_issues_on_milestone_id"
t.index ["moved_to_id"], name: "index_issues_on_moved_to_id", where: "(moved_to_id IS NOT NULL)"
- t.index ["project_id", "created_at", "id", "state"], name: "index_issues_on_project_id_and_created_at_and_id_and_state"
t.index ["project_id", "created_at", "id", "state_id"], name: "idx_issues_on_project_id_and_created_at_and_id_and_state_id"
- t.index ["project_id", "due_date", "id", "state"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_partial", where: "(due_date IS NOT NULL)"
t.index ["project_id", "due_date", "id", "state_id"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_id", where: "(due_date IS NOT NULL)"
t.index ["project_id", "iid"], name: "index_issues_on_project_id_and_iid", unique: true
- t.index ["project_id", "relative_position", "state", "id"], name: "index_issues_on_project_id_and_rel_position_and_state_and_id", order: { id: :desc }
t.index ["project_id", "relative_position", "state_id", "id"], name: "idx_issues_on_project_id_and_rel_position_and_state_id_and_id", order: { id: :desc }
- t.index ["project_id", "updated_at", "id", "state"], name: "index_issues_on_project_id_and_updated_at_and_id_and_state"
t.index ["project_id", "updated_at", "id", "state_id"], name: "idx_issues_on_project_id_and_updated_at_and_id_and_state_id"
t.index ["promoted_to_epic_id"], name: "index_issues_on_promoted_to_epic_id", where: "(promoted_to_epic_id IS NOT NULL)"
t.index ["relative_position"], name: "index_issues_on_relative_position"
- t.index ["state"], name: "index_issues_on_state"
t.index ["state_id"], name: "idx_issues_on_state_id"
t.index ["title"], name: "index_issues_on_title_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["updated_at"], name: "index_issues_on_updated_at"
@@ -2271,6 +2294,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.boolean "public", default: false, null: false
t.datetime "last_used_at"
t.binary "fingerprint_sha256"
+ t.datetime_with_timezone "expires_at"
t.index ["fingerprint"], name: "index_keys_on_fingerprint", unique: true
t.index ["fingerprint_sha256"], name: "index_keys_on_fingerprint_sha256"
t.index ["id", "type"], name: "index_on_deploy_keys_id_and_type_and_public", unique: true, where: "(public = true)"
@@ -2418,12 +2442,14 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["invite_token"], name: "index_members_on_invite_token", unique: true
t.index ["requested_at"], name: "index_members_on_requested_at"
t.index ["source_id", "source_type"], name: "index_members_on_source_id_and_source_type"
+ t.index ["user_id", "created_at"], name: "index_members_on_user_id_created_at", where: "((ldap = true) AND ((type)::text = 'GroupMember'::text) AND ((source_type)::text = 'Namespace'::text))"
t.index ["user_id"], name: "index_members_on_user_id"
end
create_table "merge_request_assignees", force: :cascade do |t|
t.integer "user_id", null: false
t.integer "merge_request_id", null: false
+ t.datetime_with_timezone "created_at"
t.index ["merge_request_id", "user_id"], name: "index_merge_request_assignees_on_merge_request_id_and_user_id", unique: true
t.index ["merge_request_id"], name: "index_merge_request_assignees_on_merge_request_id"
t.index ["user_id"], name: "index_merge_request_assignees_on_user_id"
@@ -2537,6 +2563,8 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "diff_size"
t.integer "modified_paths_size"
t.integer "commits_count"
+ t.datetime_with_timezone "first_approved_at"
+ t.datetime_with_timezone "first_reassigned_at"
t.index ["first_deployed_to_production_at"], name: "index_merge_request_metrics_on_first_deployed_to_production_at"
t.index ["latest_closed_at"], name: "index_merge_request_metrics_on_latest_closed_at", where: "(latest_closed_at IS NOT NULL)"
t.index ["latest_closed_by_id"], name: "index_merge_request_metrics_on_latest_closed_by_id"
@@ -2568,7 +2596,6 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.datetime "created_at"
t.datetime "updated_at"
t.integer "milestone_id"
- t.string "state", default: "opened", null: false
t.string "merge_status", default: "unchecked", null: false
t.integer "target_project_id", null: false
t.integer "iid"
@@ -2604,21 +2631,19 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["description"], name: "index_merge_requests_on_description_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["head_pipeline_id"], name: "index_merge_requests_on_head_pipeline_id"
t.index ["id", "merge_jid"], name: "idx_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND (state_id = 4))"
- t.index ["id", "merge_jid"], name: "index_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND ((state)::text = 'locked'::text))"
+ t.index ["id"], name: "merge_request_mentions_temp_index", where: "((description ~~ '%@%'::text) OR ((title)::text ~~ '%@%'::text))"
t.index ["latest_merge_request_diff_id"], name: "index_merge_requests_on_latest_merge_request_diff_id"
+ t.index ["lock_version"], name: "index_merge_requests_on_lock_version", where: "(lock_version IS NULL)"
t.index ["merge_user_id"], name: "index_merge_requests_on_merge_user_id", where: "(merge_user_id IS NOT NULL)"
t.index ["milestone_id"], name: "index_merge_requests_on_milestone_id"
t.index ["source_branch"], name: "index_merge_requests_on_source_branch"
t.index ["source_project_id", "source_branch"], name: "idx_merge_requests_on_source_project_and_branch_state_opened", where: "(state_id = 1)"
- t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_and_branch_state_opened", where: "((state)::text = 'opened'::text)"
t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_id_and_source_branch"
- t.index ["state", "merge_status"], name: "index_merge_requests_on_state_and_merge_status", where: "(((state)::text = 'opened'::text) AND ((merge_status)::text = 'can_be_merged'::text))"
t.index ["state_id", "merge_status"], name: "idx_merge_requests_on_state_id_and_merge_status", where: "((state_id = 1) AND ((merge_status)::text = 'can_be_merged'::text))"
t.index ["target_branch"], name: "index_merge_requests_on_target_branch"
t.index ["target_project_id", "created_at"], name: "index_merge_requests_target_project_id_created_at"
t.index ["target_project_id", "iid"], name: "idx_merge_requests_on_target_project_id_and_iid_opened", where: "(state_id = 1)"
t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid", unique: true
- t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid_opened", where: "((state)::text = 'opened'::text)"
t.index ["target_project_id", "merge_commit_sha", "id"], name: "index_merge_requests_on_tp_id_and_merge_commit_sha_and_id"
t.index ["target_project_id", "target_branch"], name: "index_merge_requests_on_target_project_id_and_target_branch", where: "((state_id = 1) AND (merge_when_pipeline_succeeds = true))"
t.index ["title"], name: "index_merge_requests_on_title"
@@ -2746,6 +2771,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "max_pages_size"
t.integer "max_artifacts_size"
t.boolean "mentions_disabled"
+ t.integer "default_branch_protection", limit: 2
t.index ["created_at"], name: "index_namespaces_on_created_at"
t.index ["custom_project_templates_group_id", "type"], name: "index_namespaces_on_custom_project_templates_group_id_and_type", where: "(custom_project_templates_group_id IS NOT NULL)"
t.index ["file_template_project_id"], name: "index_namespaces_on_file_template_project_id"
@@ -2804,14 +2830,14 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.text "change_position"
t.boolean "resolved_by_push"
t.bigint "review_id"
- t.index ["author_id"], name: "index_notes_on_author_id"
+ t.boolean "confidential"
+ t.index ["author_id", "created_at"], name: "index_notes_on_author_id_and_created_at"
t.index ["commit_id"], name: "index_notes_on_commit_id"
t.index ["created_at"], name: "index_notes_on_created_at"
t.index ["discussion_id"], name: "index_notes_on_discussion_id"
- t.index ["id"], name: "epic_mentions_temp_index", where: "((note ~~ '%@%'::text) AND ((noteable_type)::text = 'Epic'::text))"
+ t.index ["id", "noteable_type"], name: "note_mentions_temp_index", where: "(note ~~ '%@%'::text)"
t.index ["line_code"], name: "index_notes_on_line_code"
t.index ["note"], name: "index_notes_on_note_trigram", opclass: :gin_trgm_ops, using: :gin
- t.index ["note"], name: "tmp_idx_on_promoted_notes", where: "(((noteable_type)::text = 'Issue'::text) AND (system IS TRUE) AND (note ~~ 'promoted to epic%'::text))"
t.index ["noteable_id", "noteable_type"], name: "index_notes_on_noteable_id_and_noteable_type"
t.index ["project_id", "id"], name: "index_notes_on_project_id_and_id_and_system_false", where: "(NOT system)"
t.index ["project_id", "noteable_type"], name: "index_notes_on_project_id_and_noteable_type"
@@ -2841,6 +2867,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.boolean "issue_due"
t.boolean "new_epic"
t.string "notification_email"
+ t.boolean "fixed_pipeline"
t.boolean "new_release"
t.index ["source_id", "source_type"], name: "index_notification_settings_on_source_id_and_source_type"
t.index ["user_id", "source_id", "source_type"], name: "index_notifications_on_user_id_and_source_id_and_source_type", unique: true
@@ -2896,6 +2923,21 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["access_grant_id"], name: "index_oauth_openid_requests_on_access_grant_id"
end
+ create_table "open_project_tracker_data", force: :cascade do |t|
+ t.integer "service_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.string "encrypted_url", limit: 255
+ t.string "encrypted_url_iv", limit: 255
+ t.string "encrypted_api_url", limit: 255
+ t.string "encrypted_api_url_iv", limit: 255
+ t.string "encrypted_token", limit: 255
+ t.string "encrypted_token_iv", limit: 255
+ t.string "closed_status_id", limit: 5
+ t.string "project_identifier_code", limit: 100
+ t.index ["service_id"], name: "index_open_project_tracker_data_on_service_id"
+ end
+
create_table "operations_feature_flag_scopes", force: :cascade do |t|
t.bigint "feature_flag_id", null: false
t.datetime_with_timezone "created_at", null: false
@@ -2914,6 +2956,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.string "name", null: false
t.text "description"
t.integer "iid", null: false
+ t.integer "version", limit: 2, default: 1, null: false
t.index ["project_id", "iid"], name: "index_operations_feature_flags_on_project_id_and_iid", unique: true
t.index ["project_id", "name"], name: "index_operations_feature_flags_on_project_id_and_name", unique: true
end
@@ -2994,7 +3037,6 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.bigint "size"
- t.integer "file_type"
t.integer "file_store"
t.binary "file_md5"
t.binary "file_sha1"
@@ -3059,6 +3101,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.boolean "wildcard", default: false, null: false
t.integer "usage", limit: 2, default: 0, null: false
t.integer "scope", limit: 2, default: 2, null: false
+ t.index "lower((domain)::text)", name: "index_pages_domains_on_domain_lowercase"
t.index ["certificate_source", "certificate_valid_not_after"], name: "index_pages_domains_need_auto_ssl_renewal", where: "(auto_ssl_enabled = true)"
t.index ["domain", "wildcard"], name: "index_pages_domains_on_domain_and_wildcard", unique: true
t.index ["project_id", "enabled_until"], name: "index_pages_domains_on_project_id_and_enabled_until"
@@ -3104,6 +3147,9 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "ci_pipeline_size", default: 0, null: false
t.integer "ci_active_jobs", default: 0, null: false
t.integer "project_hooks", default: 0, null: false
+ t.integer "group_hooks", default: 0, null: false
+ t.integer "ci_project_subscriptions", default: 0, null: false
+ t.integer "ci_pipeline_schedules", default: 0, null: false
t.index ["plan_id"], name: "index_plan_limits_on_plan_id", unique: true
end
@@ -3206,6 +3252,18 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.string "organization_name"
end
+ create_table "project_export_jobs", force: :cascade do |t|
+ t.bigint "project_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "status", limit: 2, default: 0, null: false
+ t.string "jid", limit: 100, null: false
+ t.index ["jid"], name: "index_project_export_jobs_on_jid", unique: true
+ t.index ["project_id", "jid"], name: "index_project_export_jobs_on_project_id_and_jid"
+ t.index ["project_id", "status"], name: "index_project_export_jobs_on_project_id_and_status"
+ t.index ["status"], name: "index_project_export_jobs_on_status"
+ end
+
create_table "project_feature_usages", primary_key: "project_id", id: :integer, default: nil, force: :cascade do |t|
t.datetime "jira_dvcs_cloud_last_sync_at"
t.datetime "jira_dvcs_server_last_sync_at"
@@ -3227,6 +3285,8 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "pages_access_level", null: false
t.integer "forking_access_level"
t.index ["project_id"], name: "index_project_features_on_project_id", unique: true
+ t.index ["project_id"], name: "index_project_features_on_project_id_bal_20", where: "(builds_access_level = 20)"
+ t.index ["project_id"], name: "index_project_features_on_project_id_ral_20", where: "(repository_access_level = 20)"
end
create_table "project_group_links", id: :serial, force: :cascade do |t|
@@ -3428,11 +3488,13 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["created_at", "id"], name: "index_projects_api_created_at_id_desc", order: { id: :desc }
t.index ["created_at", "id"], name: "index_projects_api_vis20_created_at", where: "(visibility_level = 20)"
t.index ["created_at", "id"], name: "index_projects_on_created_at_and_id"
- t.index ["creator_id"], name: "index_projects_on_creator_id"
+ t.index ["creator_id", "created_at"], name: "index_projects_on_creator_id_and_created_at"
+ t.index ["creator_id", "created_at"], name: "index_projects_on_mirror_creator_id_created_at", where: "((mirror = true) AND (mirror_trigger_builds = true))"
t.index ["description"], name: "index_projects_on_description_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["id", "repository_storage", "last_repository_updated_at"], name: "idx_projects_on_repository_storage_last_repository_updated_at"
t.index ["id"], name: "index_on_id_partial_with_legacy_storage", where: "((storage_version < 2) OR (storage_version IS NULL))"
t.index ["id"], name: "index_projects_on_id_partial_for_visibility", unique: true, where: "(visibility_level = ANY (ARRAY[10, 20]))"
+ t.index ["id"], name: "index_projects_on_id_service_desk_enabled", where: "(service_desk_enabled = true)"
t.index ["id"], name: "index_projects_on_mirror_and_mirror_trigger_builds_both_true", where: "((mirror IS TRUE) AND (mirror_trigger_builds IS TRUE))"
t.index ["last_activity_at", "id"], name: "index_projects_api_last_activity_at_id_desc", order: { id: :desc }
t.index ["last_activity_at", "id"], name: "index_projects_api_vis20_last_activity_at", where: "(visibility_level = 20)"
@@ -3646,6 +3708,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.string "name", null: false
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
+ t.string "filepath", limit: 128
t.index ["release_id", "name"], name: "index_release_links_on_release_id_and_name", unique: true
t.index ["release_id", "url"], name: "index_release_links_on_release_id_and_url", unique: true
end
@@ -3683,6 +3746,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.boolean "only_protected_branches", default: false, null: false
t.string "remote_name"
t.boolean "error_notification_sent"
+ t.boolean "keep_divergent_refs"
t.index ["last_successful_update_at"], name: "index_remote_mirrors_on_last_successful_update_at"
t.index ["project_id"], name: "index_remote_mirrors_on_project_id"
end
@@ -3694,6 +3758,25 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["project_id", "programming_language_id"], name: "index_repository_languages_on_project_and_languages_id", unique: true
end
+ create_table "requirements", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id", null: false
+ t.integer "author_id"
+ t.integer "iid", null: false
+ t.integer "cached_markdown_version"
+ t.integer "state", limit: 2, default: 1, null: false
+ t.string "title", limit: 255, null: false
+ t.text "title_html"
+ t.index ["author_id"], name: "index_requirements_on_author_id"
+ t.index ["created_at"], name: "index_requirements_on_created_at"
+ t.index ["project_id", "iid"], name: "index_requirements_on_project_id_and_iid", unique: true, where: "(project_id IS NOT NULL)"
+ t.index ["project_id"], name: "index_requirements_on_project_id"
+ t.index ["state"], name: "index_requirements_on_state"
+ t.index ["title"], name: "index_requirements_on_title_trigram", opclass: :gin_trgm_ops, using: :gin
+ t.index ["updated_at"], name: "index_requirements_on_updated_at"
+ end
+
create_table "resource_label_events", force: :cascade do |t|
t.integer "action", null: false
t.integer "issue_id"
@@ -3767,10 +3850,22 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.string "sso_url", null: false
t.boolean "enforced_sso", default: false, null: false
t.boolean "enforced_group_managed_accounts", default: false, null: false
- t.boolean "prohibited_outer_forks", default: false
+ t.boolean "prohibited_outer_forks", default: true, null: false
t.index ["group_id"], name: "index_saml_providers_on_group_id"
end
+ create_table "scim_identities", force: :cascade do |t|
+ t.bigint "group_id", null: false
+ t.bigint "user_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.boolean "active", default: false
+ t.string "extern_uid", limit: 255, null: false
+ t.index "lower((extern_uid)::text), group_id", name: "index_scim_identities_on_lower_extern_uid_and_group_id", unique: true
+ t.index ["group_id"], name: "index_scim_identities_on_group_id"
+ t.index ["user_id", "group_id"], name: "index_scim_identities_on_user_id_and_group_id", unique: true
+ end
+
create_table "scim_oauth_access_tokens", id: :serial, force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
@@ -3867,8 +3962,11 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.string "description", limit: 500
t.boolean "comment_on_event_enabled", default: true, null: false
t.boolean "template", default: false
- t.index ["project_id"], name: "index_services_on_project_id"
+ t.boolean "instance", default: false, null: false
+ t.index ["project_id", "type"], name: "index_services_on_project_id_and_type"
t.index ["template"], name: "index_services_on_template"
+ t.index ["type", "instance"], name: "index_services_on_type_and_instance", unique: true, where: "(instance IS TRUE)"
+ t.index ["type", "template"], name: "index_services_on_type_and_template", unique: true, where: "(template IS TRUE)"
t.index ["type"], name: "index_services_on_type"
end
@@ -3933,10 +4031,10 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.string "encrypted_secret_token", limit: 255
t.string "encrypted_secret_token_iv", limit: 255
t.boolean "secret", default: false, null: false
- t.string "repository_storage", limit: 255, default: "default", null: false
t.index ["author_id"], name: "index_snippets_on_author_id"
t.index ["content"], name: "index_snippets_on_content_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["created_at"], name: "index_snippets_on_created_at"
+ t.index ["description"], name: "index_snippets_on_description_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["file_name"], name: "index_snippets_on_file_name_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["project_id", "visibility_level"], name: "index_snippets_on_project_id_and_visibility_level"
t.index ["title"], name: "index_snippets_on_title_trigram", opclass: :gin_trgm_ops, using: :gin
@@ -3975,6 +4073,18 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.boolean "recaptcha_verified", default: false, null: false
end
+ create_table "status_page_settings", primary_key: "project_id", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.boolean "enabled", default: false, null: false
+ t.string "aws_s3_bucket_name", limit: 63, null: false
+ t.string "aws_region", limit: 255, null: false
+ t.string "aws_access_key", limit: 255, null: false
+ t.string "encrypted_aws_secret_key", limit: 255, null: false
+ t.string "encrypted_aws_secret_key_iv", limit: 255, null: false
+ t.index ["project_id"], name: "index_status_page_settings_on_project_id"
+ end
+
create_table "subscriptions", id: :serial, force: :cascade do |t|
t.integer "user_id"
t.integer "subscribable_id"
@@ -4070,6 +4180,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "note_id"
t.string "commit_id"
t.integer "group_id"
+ t.index ["author_id", "created_at"], name: "index_todos_on_author_id_and_created_at"
t.index ["author_id"], name: "index_todos_on_author_id"
t.index ["commit_id"], name: "index_todos_on_commit_id"
t.index ["group_id"], name: "index_todos_on_group_id"
@@ -4145,6 +4256,17 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["user_id", "key"], name: "index_user_custom_attributes_on_user_id_and_key", unique: true
end
+ create_table "user_details", primary_key: "user_id", force: :cascade do |t|
+ t.string "job_title", limit: 200, default: "", null: false
+ t.index ["user_id"], name: "index_user_details_on_user_id", unique: true
+ end
+
+ create_table "user_highest_roles", primary_key: "user_id", id: :bigint, default: nil, force: :cascade do |t|
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "highest_access_level"
+ t.index ["user_id", "highest_access_level"], name: "index_user_highest_roles_on_user_id_and_highest_access_level"
+ end
+
create_table "user_interacted_projects", id: false, force: :cascade do |t|
t.integer "user_id", null: false
t.integer "project_id", null: false
@@ -4279,6 +4401,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.string "last_name", limit: 255
t.string "static_object_token", limit: 255
t.integer "role", limit: 2
+ t.integer "user_type", limit: 2
t.index "lower((name)::text)", name: "index_on_users_name_lower"
t.index ["accepted_term_id"], name: "index_users_on_accepted_term_id"
t.index ["admin"], name: "index_users_on_admin"
@@ -4301,6 +4424,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["state"], name: "index_users_on_state_and_internal_ee", where: "((ghost IS NOT TRUE) AND (bot_type IS NULL))"
t.index ["static_object_token"], name: "index_users_on_static_object_token", unique: true
t.index ["unconfirmed_email"], name: "index_users_on_unconfirmed_email", where: "(unconfirmed_email IS NOT NULL)"
+ t.index ["user_type"], name: "index_users_on_user_type"
t.index ["username"], name: "index_users_on_username"
t.index ["username"], name: "index_users_on_username_trigram", opclass: :gin_trgm_ops, using: :gin
end
@@ -4330,6 +4454,19 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.index ["user_id", "project_id"], name: "index_users_star_projects_on_user_id_and_project_id", unique: true
end
+ create_table "users_statistics", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "without_groups_and_projects", default: 0, null: false
+ t.integer "with_highest_role_guest", default: 0, null: false
+ t.integer "with_highest_role_reporter", default: 0, null: false
+ t.integer "with_highest_role_developer", default: 0, null: false
+ t.integer "with_highest_role_maintainer", default: 0, null: false
+ t.integer "with_highest_role_owner", default: 0, null: false
+ t.integer "bots", default: 0, null: false
+ t.integer "blocked", default: 0, null: false
+ end
+
create_table "vulnerabilities", force: :cascade do |t|
t.bigint "milestone_id"
t.bigint "epic_id"
@@ -4348,8 +4485,6 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.text "description_html"
t.bigint "start_date_sourcing_milestone_id"
t.bigint "due_date_sourcing_milestone_id"
- t.bigint "closed_by_id"
- t.datetime_with_timezone "closed_at"
t.integer "state", limit: 2, default: 1, null: false
t.integer "severity", limit: 2, null: false
t.boolean "severity_overridden", default: false
@@ -4361,11 +4496,14 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.integer "cached_markdown_version"
t.bigint "confirmed_by_id"
t.datetime_with_timezone "confirmed_at"
+ t.datetime_with_timezone "dismissed_at"
+ t.bigint "dismissed_by_id"
t.index ["author_id"], name: "index_vulnerabilities_on_author_id"
- t.index ["closed_by_id"], name: "index_vulnerabilities_on_closed_by_id"
t.index ["confirmed_by_id"], name: "index_vulnerabilities_on_confirmed_by_id"
+ t.index ["dismissed_by_id"], name: "index_vulnerabilities_on_dismissed_by_id"
t.index ["due_date_sourcing_milestone_id"], name: "index_vulnerabilities_on_due_date_sourcing_milestone_id"
t.index ["epic_id"], name: "index_vulnerabilities_on_epic_id"
+ t.index ["id"], name: "undefined_vulnerability", where: "(severity = 0)"
t.index ["last_edited_by_id"], name: "index_vulnerabilities_on_last_edited_by_id"
t.index ["milestone_id"], name: "index_vulnerabilities_on_milestone_id"
t.index ["project_id"], name: "index_vulnerabilities_on_project_id"
@@ -4453,6 +4591,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
t.string "metadata_version", null: false
t.text "raw_metadata", null: false
t.bigint "vulnerability_id"
+ t.index ["id"], name: "undefined_vulnerabilities", where: "(severity = 0)"
t.index ["primary_identifier_id"], name: "index_vulnerability_occurrences_on_primary_identifier_id"
t.index ["project_id", "primary_identifier_id", "location_fingerprint", "scanner_id"], name: "index_vulnerability_occurrences_on_unique_keys", unique: true
t.index ["scanner_id"], name: "index_vulnerability_occurrences_on_scanner_id"
@@ -4642,6 +4781,8 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "ci_pipelines", "merge_requests", name: "fk_a23be95014", on_delete: :cascade
add_foreign_key "ci_pipelines", "projects", name: "fk_86635dbd80", on_delete: :cascade
add_foreign_key "ci_pipelines_config", "ci_pipelines", column: "pipeline_id", on_delete: :cascade
+ add_foreign_key "ci_refs", "ci_pipelines", column: "last_updated_by_pipeline_id", on_delete: :nullify
+ add_foreign_key "ci_refs", "projects", on_delete: :cascade
add_foreign_key "ci_resource_groups", "projects", name: "fk_774722d144", on_delete: :cascade
add_foreign_key "ci_resources", "ci_builds", column: "build_id", name: "fk_e169a8e3d5", on_delete: :nullify
add_foreign_key "ci_resources", "ci_resource_groups", column: "resource_group_id", on_delete: :cascade
@@ -4653,6 +4794,8 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "ci_sources_pipelines", "ci_pipelines", column: "source_pipeline_id", name: "fk_d4e29af7d7", on_delete: :cascade
add_foreign_key "ci_sources_pipelines", "projects", column: "source_project_id", name: "fk_acd9737679", on_delete: :cascade
add_foreign_key "ci_sources_pipelines", "projects", name: "fk_1e53c97c0a", on_delete: :cascade
+ add_foreign_key "ci_sources_projects", "ci_pipelines", column: "pipeline_id", on_delete: :cascade
+ add_foreign_key "ci_sources_projects", "projects", column: "source_project_id", on_delete: :cascade
add_foreign_key "ci_stages", "ci_pipelines", column: "pipeline_id", name: "fk_fb57e6cc56", on_delete: :cascade
add_foreign_key "ci_stages", "projects", name: "fk_2360681d1d", on_delete: :cascade
add_foreign_key "ci_subscriptions_projects", "projects", column: "downstream_project_id", on_delete: :cascade
@@ -4695,6 +4838,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "deployment_clusters", "clusters", on_delete: :cascade
add_foreign_key "deployment_clusters", "deployments", on_delete: :cascade
add_foreign_key "deployment_merge_requests", "deployments", on_delete: :cascade
+ add_foreign_key "deployment_merge_requests", "environments", name: "fk_a064ff4453", on_delete: :cascade
add_foreign_key "deployment_merge_requests", "merge_requests", on_delete: :cascade
add_foreign_key "deployments", "clusters", name: "fk_289bba3222", on_delete: :nullify
add_foreign_key "deployments", "projects", name: "fk_b9a3851b82", on_delete: :cascade
@@ -4737,7 +4881,6 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "fork_network_members", "projects", column: "forked_from_project_id", name: "fk_b01280dae4", on_delete: :nullify
add_foreign_key "fork_network_members", "projects", on_delete: :cascade
add_foreign_key "fork_networks", "projects", column: "root_project_id", name: "fk_e7b436b2b5", on_delete: :nullify
- add_foreign_key "forked_project_links", "projects", column: "forked_to_project_id", name: "fk_434510edb0", on_delete: :cascade
add_foreign_key "geo_container_repository_updated_events", "container_repositories", name: "fk_212c89c706", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_cache_invalidation_events", column: "cache_invalidation_event_id", name: "fk_42c3b54bed", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_container_repository_updated_events", column: "container_repository_updated_event_id", name: "fk_6ada82d42a", on_delete: :cascade
@@ -4869,6 +5012,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "notes", "reviews", name: "fk_2e82291620", on_delete: :nullify
add_foreign_key "notification_settings", "users", name: "fk_0c95e91db7", on_delete: :cascade
add_foreign_key "oauth_openid_requests", "oauth_access_grants", column: "access_grant_id", name: "fk_77114b3b09", on_delete: :cascade
+ add_foreign_key "open_project_tracker_data", "services", on_delete: :cascade
add_foreign_key "operations_feature_flag_scopes", "operations_feature_flags", column: "feature_flag_id", on_delete: :cascade
add_foreign_key "operations_feature_flags", "projects", on_delete: :cascade
add_foreign_key "operations_feature_flags_clients", "projects", on_delete: :cascade
@@ -4903,6 +5047,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "project_deploy_tokens", "deploy_tokens", on_delete: :cascade
add_foreign_key "project_deploy_tokens", "projects", on_delete: :cascade
add_foreign_key "project_error_tracking_settings", "projects", on_delete: :cascade
+ add_foreign_key "project_export_jobs", "projects", on_delete: :cascade
add_foreign_key "project_feature_usages", "projects", on_delete: :cascade
add_foreign_key "project_features", "projects", name: "fk_18513d9b92", on_delete: :cascade
add_foreign_key "project_group_links", "projects", name: "fk_daa8cee94c", on_delete: :cascade
@@ -4950,6 +5095,8 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "releases", "users", column: "author_id", name: "fk_8e4456f90f", on_delete: :nullify
add_foreign_key "remote_mirrors", "projects", name: "fk_43a9aa4ca8", on_delete: :cascade
add_foreign_key "repository_languages", "projects", on_delete: :cascade
+ add_foreign_key "requirements", "projects", on_delete: :cascade
+ add_foreign_key "requirements", "users", column: "author_id", on_delete: :nullify
add_foreign_key "resource_label_events", "epics", on_delete: :cascade
add_foreign_key "resource_label_events", "issues", on_delete: :cascade
add_foreign_key "resource_label_events", "labels", on_delete: :nullify
@@ -4965,6 +5112,8 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "reviews", "projects", on_delete: :cascade
add_foreign_key "reviews", "users", column: "author_id", on_delete: :nullify
add_foreign_key "saml_providers", "namespaces", column: "group_id", on_delete: :cascade
+ add_foreign_key "scim_identities", "namespaces", column: "group_id", on_delete: :cascade
+ add_foreign_key "scim_identities", "users", on_delete: :cascade
add_foreign_key "scim_oauth_access_tokens", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "security_scans", "ci_builds", column: "build_id", on_delete: :cascade
add_foreign_key "self_managed_prometheus_alert_events", "environments", on_delete: :cascade
@@ -4984,6 +5133,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "snippets", "projects", name: "fk_be41fd4bb7", on_delete: :cascade
add_foreign_key "software_license_policies", "projects", on_delete: :cascade
add_foreign_key "software_license_policies", "software_licenses", on_delete: :cascade
+ add_foreign_key "status_page_settings", "projects", on_delete: :cascade
add_foreign_key "subscriptions", "projects", on_delete: :cascade
add_foreign_key "suggestions", "notes", on_delete: :cascade
add_foreign_key "system_note_metadata", "description_versions", name: "fk_fbd87415c9", on_delete: :nullify
@@ -5001,6 +5151,8 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "u2f_registrations", "users"
add_foreign_key "user_callouts", "users", on_delete: :cascade
add_foreign_key "user_custom_attributes", "users", on_delete: :cascade
+ add_foreign_key "user_details", "users", on_delete: :cascade
+ add_foreign_key "user_highest_roles", "users", on_delete: :cascade
add_foreign_key "user_interacted_projects", "projects", name: "fk_722ceba4f7", on_delete: :cascade
add_foreign_key "user_interacted_projects", "users", name: "fk_0894651f08", on_delete: :cascade
add_foreign_key "user_preferences", "users", on_delete: :cascade
@@ -5019,8 +5171,8 @@ ActiveRecord::Schema.define(version: 2020_02_14_085940) do
add_foreign_key "vulnerabilities", "milestones", name: "fk_131d289c65", on_delete: :nullify
add_foreign_key "vulnerabilities", "projects", name: "fk_efb96ab1e2", on_delete: :cascade
add_foreign_key "vulnerabilities", "users", column: "author_id", name: "fk_b1de915a15", on_delete: :nullify
- add_foreign_key "vulnerabilities", "users", column: "closed_by_id", name: "fk_cf5c60acbf", on_delete: :nullify
add_foreign_key "vulnerabilities", "users", column: "confirmed_by_id", name: "fk_959d40ad0a", on_delete: :nullify
+ add_foreign_key "vulnerabilities", "users", column: "dismissed_by_id", name: "fk_725465b774", on_delete: :nullify
add_foreign_key "vulnerabilities", "users", column: "last_edited_by_id", name: "fk_1302949740", on_delete: :nullify
add_foreign_key "vulnerabilities", "users", column: "resolved_by_id", name: "fk_76bc5f5455", on_delete: :nullify
add_foreign_key "vulnerabilities", "users", column: "updated_by_id", name: "fk_7ac31eacb9", on_delete: :nullify
diff --git a/doc/.linting/vale/styles/gitlab/Contractions.yml b/doc/.linting/vale/styles/gitlab/Contractions.yml
deleted file mode 100644
index 0f31f6b6aa9..00000000000
--- a/doc/.linting/vale/styles/gitlab/Contractions.yml
+++ /dev/null
@@ -1,76 +0,0 @@
----
-# `extends` indicates the Vale extension point being used.
-# Full list of styles: https://errata-ai.github.io/vale/styles/
-extends: substitution
-
-# Substitution rules can display the matched and suggested strings in the
-# message shown to the user. The first use of %s prints the suggested option,
-# and the second use of %s displays what was found in the text.
-message: Use "%s" instead of "%s" in most cases.
-
-# Should a result be flagged as a suggestion, warning, or error?
-# Results that fall below the MinAlertLevel set in
-# https://gitlab.com/gitlab-org/gitlab/blob/master/.vale.ini won't be shown.
-level: suggestion
-
-# Should a match be case-insensitive or case-sensitive?
-# Acceptable values are 'true' or 'false'
-ignorecase: true
-
-# Should this rule be limited to a specific scope? If yes, uncomment the line.
-# Possible scopes: https://errata-ai.github.io/vale/formats/#available-scopes
-# scope: heading
-
-# Should this rule ignore normal word boundaries, such as \b ?
-# Acceptable values are 'true' or 'false'
-nonword: false
-
-# What is the source for this rule?
-link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#language
-
-# The 'swap' section provides a list of values, one per line, in the form of
-# $bad: $good
-swap:
-
- # Common contractions are ok
- it is: it's
- can not: can't
- cannot: can't
- do not: don't
- have not: haven't
- that is: that's
- we are: we're
- will not: won't
- would not: wouldn't
- you are: you're
- you have: you've
-
- # Uncommon contractions are not ok
- aren't: are not
- couldn't: could not
- didn't: did not
- doesn't: does not
- hasn't: has not
- how'll: how will
- how's: how is
- isn't: is not
- it'll: it will
- shouldn't: should not
- that'll: that will
- they'll: they will
- they're: they are
- wasn't: was not
- weren't: were not
- we'll: we will
- we've: we have
- what's: what is
- what'll: what will
- when's: when is
- when'll: when will
- where's: where is
- where'll: where will
- who's: who is
- who'll: who will
- why's: why is
- why'll: why will
-
diff --git a/doc/.linting/vale/styles/gitlab/LatinTerms.yml b/doc/.linting/vale/styles/gitlab/LatinTerms.yml
deleted file mode 100644
index 082b56147a4..00000000000
--- a/doc/.linting/vale/styles/gitlab/LatinTerms.yml
+++ /dev/null
@@ -1,39 +0,0 @@
----
-# `extends` indicates the Vale extension point being used.
-# Full list of styles: https://errata-ai.github.io/vale/styles/
-extends: substitution
-
-# Substitution rules can display the matched and suggested strings in the
-# message shown to the user. The first use of %s prints the suggested option,
-# and the second use of %s displays what was found in the text.
-message: Use "%s" instead of "%s," but consider rewriting the sentence.
-
-# Should a result be flagged as a suggestion, warning, or error?
-# Results that fall below the MinAlertLevel set in
-# https://gitlab.com/gitlab-org/gitlab/blob/master/.vale.ini won't be shown.
-level: warning
-
-# Should a match be case-insensitive or case-sensitive?
-# Acceptable values are 'true' or 'false'
-ignorecase: true
-
-# Should this rule be limited to a specific scope? If yes, uncomment the line.
-# Possible scopes: https://errata-ai.github.io/vale/formats/#available-scopes
-# scope: heading
-
-# Should this rule ignore normal word boundaries, such as \b ?
-# Acceptable values are 'true' or 'false'
-nonword: true
-
-# What is the source for this rule?
-link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#language
-
-# The 'swap' section provides a list of values, one per line, in the form of
-# $bad: $good
-swap:
- e\.g\.: for example
- e\. g\.: for example
- i\.e\.: that is
- i\. e\.: that is
- etc\.: and so on
- et cetera: and so on
diff --git a/doc/.linting/vale/styles/gitlab/OxfordComma.yml b/doc/.linting/vale/styles/gitlab/OxfordComma.yml
deleted file mode 100644
index 76a8d5c3f3e..00000000000
--- a/doc/.linting/vale/styles/gitlab/OxfordComma.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-extends: existence
-message: Use a comma before the last "and" in a list of three or more items.
-link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#punctuation
-level: warning
-tokens:
- - '(?:[^,]+,){1,}\s\w+\sand'
diff --git a/doc/.linting/vale/styles/gitlab/SentenceSpacing.yml b/doc/.linting/vale/styles/gitlab/SentenceSpacing.yml
deleted file mode 100644
index 5efc6ceeef5..00000000000
--- a/doc/.linting/vale/styles/gitlab/SentenceSpacing.yml
+++ /dev/null
@@ -1,32 +0,0 @@
----
-# `extends` indicates the Vale extension point being used.
-# Full list of styles: https://errata-ai.github.io/vale/styles/
-extends: existence
-
-# Existence rules can display the matched strings in the user message.
-message: "'%s' should have one space between sentences."
-
-# Should a result be flagged as a suggestion, warning, or error?
-# Results that fall below the MinAlertLevel set in
-# https://gitlab.com/gitlab-org/gitlab/blob/master/.vale.ini won't be shown.
-level: suggestion
-
-# Should a match be case-insensitive or case-sensitive?
-# Acceptable values are 'true' or 'false'
-# This value is irrelevant when testing non-alphabetical characters
-#ignorecase: true
-
-# Should this rule be limited to a specific scope? If yes, uncomment the line.
-# Possible scopes: https://errata-ai.github.io/vale/formats/#available-scopes
-# scope: heading
-
-# Should this rule ignore normal word boundaries, such as \b ?
-# Acceptable values are 'true' or 'false'
-nonword: true
-
-# What is the source for this rule?
-link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#punctuation
-
-tokens:
- - '[a-z][.?!][A-Z]'
- - '[.?!] {2,}[A-Z]'
diff --git a/doc/.linting/vale/styles/gitlab/Substitutions.yml b/doc/.linting/vale/styles/gitlab/Substitutions.yml
deleted file mode 100644
index d9ae1dfcb07..00000000000
--- a/doc/.linting/vale/styles/gitlab/Substitutions.yml
+++ /dev/null
@@ -1,36 +0,0 @@
----
-# `extends` indicates the Vale extension point being used.
-# Full list of styles: https://errata-ai.github.io/vale/styles/
-extends: substitution
-
-# Substitution rules can display the matched and suggested strings in the
-# message shown to the user. The first use of %s prints the suggested option,
-# and the second use of %s displays what was found in the text.
-message: Use "%s" instead of "%s."
-
-# Should a result be flagged as a suggestion, warning, or error?
-# Results that fall below the MinAlertLevel set in
-# https://gitlab.com/gitlab-org/gitlab/blob/master/.vale.ini won't be shown.
-level: warning
-
-# Should a match be case-insensitive or case-sensitive?
-# Acceptable values are 'true' or 'false'
-ignorecase: true
-
-# Should this rule be limited to a specific scope? If yes, uncomment the line.
-# Possible scopes: https://errata-ai.github.io/vale/formats/#available-scopes
-# scope: heading
-
-# Should this rule ignore normal word boundaries, such as \b ?
-# Acceptable values are 'true' or 'false'
-nonword: true
-
-# What is the source for this rule?
-link: https://about.gitlab.com/handbook/communication/#top-misused-terms
-
-# The 'swap' section provides a list of values, one per line, in the form of
-# $bad: $good
-swap:
- GitLabber: GitLab team member
- self hosted: self-managed
- self-hosted: self-managed
diff --git a/doc/.vale/gitlab/Contractions.yml b/doc/.vale/gitlab/Contractions.yml
new file mode 100644
index 00000000000..5f389bd1ea4
--- /dev/null
+++ b/doc/.vale/gitlab/Contractions.yml
@@ -0,0 +1,53 @@
+---
+# Checks for use of common and uncommon contractions.
+#
+# For a list of all options, see https://errata-ai.github.io/vale/styles/
+extends: substitution
+message: Use "%s" instead of "%s" in most cases.
+link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#language
+level: suggestion
+nonword: false
+ignorecase: true
+swap:
+
+ # Common contractions are ok
+ it is: it's
+ can not: can't
+ cannot: can't
+ do not: don't
+ have not: haven't
+ that is: that's
+ we are: we're
+ will not: won't
+ would not: wouldn't
+ you are: you're
+ you have: you've
+
+ # Uncommon contractions are not ok
+ aren't: are not
+ couldn't: could not
+ didn't: did not
+ doesn't: does not
+ hasn't: has not
+ how'll: how will
+ how's: how is
+ isn't: is not
+ it'll: it will
+ shouldn't: should not
+ that'll: that will
+ they'll: they will
+ they're: they are
+ wasn't: was not
+ weren't: were not
+ we'll: we will
+ we've: we have
+ what's: what is
+ what'll: what will
+ when's: when is
+ when'll: when will
+ where's: where is
+ where'll: where will
+ who's: who is
+ who'll: who will
+ why's: why is
+ why'll: why will
diff --git a/doc/.vale/gitlab/FirstPerson.yml b/doc/.vale/gitlab/FirstPerson.yml
new file mode 100644
index 00000000000..18c5265b0a6
--- /dev/null
+++ b/doc/.vale/gitlab/FirstPerson.yml
@@ -0,0 +1,13 @@
+# Checks for use of first person pronouns.
+#
+# For a list of all options, see https://errata-ai.github.io/vale/styles/
+extends: existence
+message: '`%s` is a first-person pronoun. Use second- or third-person pronouns (like we, you, us, one) instead.'
+level: warning
+ignorecase: true
+link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#language
+tokens:
+ - '\bI[ ,;:?!"]|\bI\x27.{1,2}'
+ - me
+ - myself
+ - mine
diff --git a/doc/.vale/gitlab/InternalLinkExtension.yml b/doc/.vale/gitlab/InternalLinkExtension.yml
new file mode 100644
index 00000000000..d07a2600798
--- /dev/null
+++ b/doc/.vale/gitlab/InternalLinkExtension.yml
@@ -0,0 +1,11 @@
+---
+# Checks that internal links have .md extenstion and not .html extension.
+#
+# For a list of all options, see https://errata-ai.github.io/vale/styles/
+extends: existence
+message: Link %s must use the .md file extension.
+link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#links-to-internal-documentation
+level: error
+scope: raw
+raw:
+ - '\[.+\]\((https?:){0}[\w\/\.-]+(\.html).*\)'
diff --git a/doc/.vale/gitlab/LatinTerms.yml b/doc/.vale/gitlab/LatinTerms.yml
new file mode 100644
index 00000000000..8412631f8fe
--- /dev/null
+++ b/doc/.vale/gitlab/LatinTerms.yml
@@ -0,0 +1,17 @@
+---
+# Checks for use of latin terms..
+#
+# For a list of all options, see https://errata-ai.github.io/vale/styles/
+extends: substitution
+message: Use "%s" instead of "%s," but consider rewriting the sentence.
+link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#language
+level: warning
+nonword: true
+ignorecase: true
+swap:
+ e\.g\.: for example
+ e\. g\.: for example
+ i\.e\.: that is
+ i\. e\.: that is
+ etc\.: and so on
+ et cetera: and so on
diff --git a/doc/.vale/gitlab/OxfordComma.yml b/doc/.vale/gitlab/OxfordComma.yml
new file mode 100644
index 00000000000..4b37ba8c2b9
--- /dev/null
+++ b/doc/.vale/gitlab/OxfordComma.yml
@@ -0,0 +1,11 @@
+---
+# Checks for the lack of an Oxford comma. In some cases, will catch overly
+# complex sentence structures with lots of commas.
+#
+# For a list of all options, see https://errata-ai.github.io/vale/styles/
+extends: existence
+message: Use a comma before the last "and" or "or" in a list of four or more items.
+link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#punctuation
+level: warning
+raw:
+ - '(?:[\w-_` ]+,){2,}(?:[\w-_` ]+) (and |or )'
diff --git a/doc/.vale/gitlab/RelativeLinks.yml b/doc/.vale/gitlab/RelativeLinks.yml
new file mode 100644
index 00000000000..95bd60dd6e4
--- /dev/null
+++ b/doc/.vale/gitlab/RelativeLinks.yml
@@ -0,0 +1,11 @@
+---
+# Checks for the presence of absolute hyperlinks that should be relative.
+#
+# For a list of all options, see https://errata-ai.github.io/vale/styles/
+extends: existence
+message: Link %s must be relative.
+link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#links-to-internal-documentation
+level: error
+scope: raw
+raw:
+ - '\[.+\]\(https?:\/\/docs\.gitlab\.com\/ee.*\)'
diff --git a/doc/.vale/gitlab/SentenceSpacing.yml b/doc/.vale/gitlab/SentenceSpacing.yml
new file mode 100644
index 00000000000..b061f7f6f9e
--- /dev/null
+++ b/doc/.vale/gitlab/SentenceSpacing.yml
@@ -0,0 +1,15 @@
+---
+# Check for the following in common content scenarios:
+#
+# - No spaces.
+# - More than one space.
+#
+# For a list of all options, see https://errata-ai.github.io/vale/styles/
+extends: existence
+message: '"%s" must contain one and only one space.'
+link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#punctuation
+level: error
+nonword: true
+tokens:
+ - '[a-z][.?!,][A-Z]'
+ - '[\w.?!,\(\)\-":] {2,}[\w.?!,\(\)\-":]'
diff --git a/doc/.vale/gitlab/Substitutions.yml b/doc/.vale/gitlab/Substitutions.yml
new file mode 100644
index 00000000000..b32a03e17d5
--- /dev/null
+++ b/doc/.vale/gitlab/Substitutions.yml
@@ -0,0 +1,13 @@
+---
+# Checks for use of some of the top misused terms at GitLab.
+#
+# For a list of all options, see https://errata-ai.github.io/vale/styles/
+extends: substitution
+message: Use "%s" instead of "%s".
+link: https://about.gitlab.com/handbook/communication/#top-misused-terms
+level: error
+ignorecase: true
+swap:
+ GitLabber: GitLab team member
+ self hosted: self-managed
+ self-hosted: self-managed
diff --git a/doc/README.md b/doc/README.md
index 132351f5353..95a8b09dd07 100644
--- a/doc/README.md
+++ b/doc/README.md
@@ -109,10 +109,10 @@ The following documentation relates to the DevOps **Plan** stage:
| Plan Topics | Description |
|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------|
| [Burndown Charts](user/project/milestones/burndown_charts.md) **(STARTER)** | Watch your project's progress throughout a specific milestone. |
-| [Discussions](user/discussions/index.md) | Threads, comments, and resolvable threads in issues, commits, and merge requests. |
+| [Discussions](user/discussions/index.md) | Threads, comments, and resolvable threads in issues, commits, and merge requests. |
| [Due Dates](user/project/issues/due_dates.md) | Keep track of issue deadlines. |
| [Epics](user/group/epics/index.md) **(ULTIMATE)** | Tracking groups of issues that share a theme. |
-| [Issues](user/project/issues/index.md), including [confidential issues](user/project/issues/confidential_issues.md),<br/>[issue and merge request templates](user/project/description_templates.md),<br/>and [moving issues](user/project/issues/managing_issues.md#moving-issues) | Project issues, restricting access to issues, create templates for submitting new issues and merge requests, and moving issues between projects. |
+| [Issues](user/project/issues/index.md), including [confidential issues](user/project/issues/confidential_issues.md),<br/>[issue and merge request templates](user/project/description_templates.md),<br/>and [moving issues](user/project/issues/managing_issues.md#moving-issues) | Project issues and restricting access to issues as well as creating templates for submitting new issues and merge requests. Also, moving issues between projects. |
| [Labels](user/project/labels.md) | Categorize issues or merge requests with descriptive labels. |
| [Milestones](user/project/milestones/index.md) | Set milestones for delivery of issues and merge requests, with optional due date. |
| [Project Issue Board](user/project/issue_board.md) | Display issues on a Scrum or Kanban board. |
@@ -238,7 +238,7 @@ The following documentation relates to the DevOps **Verify** stage:
| [GitLab CI/CD](ci/README.md) | Explore the features and capabilities of Continuous Integration with GitLab. |
| [JUnit test reports](ci/junit_test_reports.md) | Display JUnit test reports on merge requests. |
| [Multi-project pipelines](ci/multi_project_pipelines.md) **(PREMIUM)** | Visualize entire pipelines that span multiple projects, including all cross-project inter-dependencies. |
-| [Pipeline Graphs](ci/pipelines.md#visualizing-pipelines) | Visualize builds. |
+| [Pipeline Graphs](ci/pipelines/index.md#visualizing-pipelines) | Visualize builds. |
| [Review Apps](ci/review_apps/index.md) | Preview changes to your application right from a merge request. |
<div align="right">
@@ -287,7 +287,7 @@ The following documentation relates to the DevOps **Release** stage:
| [GitLab CI/CD](ci/README.md) | Explore the features and capabilities of Continuous Deployment and Delivery with GitLab. |
| [GitLab Pages](user/project/pages/index.md) | Build, test, and deploy a static site directly from GitLab. |
| [Protected Runners](ci/runners/README.md#protected-runners) | Select Runners to only pick jobs for protected branches and tags. |
-| [Scheduled Pipelines](user/project/pipelines/schedules.md) | Execute pipelines on a schedule. |
+| [Scheduled Pipelines](ci/pipelines/schedules.md) | Execute pipelines on a schedule. |
<div align="right">
<a type="button" class="btn btn-default" href="#overview">
@@ -359,14 +359,14 @@ The following documentation relates to the DevOps **Secure** stage:
| Secure Topics | Description |
|:------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------|
-| [Compliance Dashboard](user/application_security/compliance_dashboard/index.md) **(ULTIMATE)** | View the most recent Merge Request activity in a group. |
+| [Compliance Dashboard](user/compliance/compliance_dashboard/index.md) **(ULTIMATE)** | View the most recent Merge Request activity in a group. |
| [Container Scanning](user/application_security/container_scanning/index.md) **(ULTIMATE)** | Use Clair to scan docker images for known vulnerabilities. |
| [Dependency List](user/application_security/dependency_list/index.md) **(ULTIMATE)** | View your project's dependencies and their known vulnerabilities. |
| [Dependency Scanning](user/application_security/dependency_scanning/index.md) **(ULTIMATE)** | Analyze your dependencies for known vulnerabilities. |
| [Dynamic Application Security Testing (DAST)](user/application_security/dast/index.md) **(ULTIMATE)** | Analyze running web applications for known vulnerabilities. |
| [Group Security Dashboard](user/application_security/security_dashboard/index.md#group-security-dashboard) **(ULTIMATE)** | View vulnerabilities in all the projects in a group and its subgroups. |
| [Instance Security Dashboard](user/application_security/security_dashboard/index.md#instance-security-dashboard) **(ULTIMATE)** | View vulnerabilities in all the projects you're interested in. |
-| [License Compliance](user/application_security/license_compliance/index.md) **(ULTIMATE)** | Search your project's dependencies for their licenses. |
+| [License Compliance](user/compliance/license_compliance/index.md) **(ULTIMATE)** | Search your project's dependencies for their licenses. |
| [Pipeline Security Dashboard](user/application_security/security_dashboard/index.md#pipeline-security-dashboard) **(ULTIMATE)** | View the security reports for your project's pipelines. |
| [Project Security Dashboard](user/application_security/security_dashboard/index.md#project-security-dashboard) **(ULTIMATE)** | View the latest security reports for your project. |
| [Static Application Security Testing (SAST)](user/application_security/sast/index.md) **(ULTIMATE)** | Analyze source code for known vulnerabilities. |
diff --git a/doc/administration/audit_events.md b/doc/administration/audit_events.md
index cf017c0167b..2b068e64901 100644
--- a/doc/administration/audit_events.md
+++ b/doc/administration/audit_events.md
@@ -3,28 +3,28 @@
GitLab offers a way to view the changes made within the GitLab server for owners and administrators on a [paid plan][ee].
GitLab system administrators can also take advantage of the logs located on the
-filesystem, see [the logs system documentation](logs.md) for more details.
+filesystem. See [the logs system documentation](logs.md) for more details.
## Overview
-**Audit Events** is a tool for GitLab owners and administrators to be
-able to track important events such as who performed certain actions and the
-time they happened. These actions could be, for example, change a user
+**Audit Events** is a tool for GitLab owners and administrators
+to track important events such as who performed certain actions and the
+time they happened. For example, these actions could be a change to a user
permission level, who added a new user, or who removed a user.
-## Use-cases
+## Use cases
-- Check who the person was that changed the permission level of a particular
- user for a project in GitLab.
-- Use it to track which users have access to a certain group of projects
- in GitLab, and who gave them that permission level.
+- Check who changed the permission level of a particular
+ user for a GitLab project.
+- Track which users have access to a certain group of projects
+ in GitLab, and who gave them that permission level.
## List of events
There are two kinds of events logged:
-- Events scoped to the group or project, used by group / project managers
- to look up who made what change.
+- Events scoped to the group or project, used by group and project managers
+ to look up who made a change.
- Instance events scoped to the whole GitLab instance, used by your Compliance team to
perform formal audits.
@@ -36,9 +36,9 @@ You need Owner [permissions] to view the group Audit Events page.
To view a group's audit events, navigate to **Group > Settings > Audit Events**.
From there, you can see the following actions:
-- Group name/path changed
+- Group name or path changed
- Group repository size limit changed
-- Group created/deleted
+- Group created or deleted
- Group changed visibility
- User was added to group and with which [permissions]
- Permissions changes of a user assigned to a group
@@ -48,11 +48,11 @@ From there, you can see the following actions:
- [Project shared with group](../user/project/members/share_project_with_groups.md)
and with which [permissions]
- Removal of a previously shared group with a project
-- LFS enabled/disabled
+- LFS enabled or disabled
- Shared runners minutes limit changed
-- Membership lock enabled/disabled
-- Request access enabled/disabled
-- 2FA enforcement/grace period changed
+- Membership lock enabled or disabled
+- Request access enabled or disabled
+- 2FA enforcement or grace period changed
- Roles allowed to create project changed
Group events can also be accessed via the [Group Audit Events API](../api/audit_events.md#group-audit-events-starter)
@@ -65,8 +65,8 @@ You need Maintainer [permissions] or higher to view the project Audit Events pag
To view a project's audit events, navigate to **Project > Settings > Audit Events**.
From there, you can see the following actions:
-- Added/removed deploy keys
-- Project created/deleted/renamed/moved(transferred)/changed path
+- Added or removed deploy keys
+- Project created, deleted, renamed, moved(transferred), changed path
- Project changed visibility level
- User was added to project and with which [permissions]
- Permission changes of a user assigned to a project
@@ -75,10 +75,13 @@ From there, you can see the following actions:
- Project repository was downloaded
- Project was archived
- Project was unarchived
-- Added/removed/updated protected branches
+- Added, removed, or updated protected branches
- Release was added to a project
- Release was updated
- Release milestone associations changed
+- Permission to approve merge requests by committers was updated ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/7531) in GitLab 12.9)
+- Permission to approve merge requests by authors was updated ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/7531) in GitLab 12.9)
+- Number of required approvals was updated ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/7531) in GitLab 12.9)
### Instance events **(PREMIUM ONLY)**
@@ -94,20 +97,21 @@ In addition to the group and project events, the following user actions are also
recorded:
- Failed Logins
-- Sign-in events and the authentication type (standard, LDAP, OmniAuth, etc.)
+- Sign-in events and the authentication type (such as standard, LDAP, or OmniAuth)
- Added SSH key
-- Added/removed email
+- Added or removed email
- Changed password
- Ask for password reset
- Grant OAuth access
-- Started/stopped user impersonation
+- Started or stopped user impersonation
- Changed username ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/7797) in GitLab 12.8)
- User was deleted ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/251) in GitLab 12.8)
- User was added ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/251) in GitLab 12.8)
- User was blocked via Admin Area ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/251) in GitLab 12.8)
+- User was blocked via API ([introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25872) in GitLab 12.9)
-It is possible to filter particular actions by choosing an audit data type from
-the filter dropdown box. You can further filter by specific group, project or user
+It's possible to filter particular actions by choosing an audit data type from
+the filter dropdown box. You can further filter by specific group, project, or user
(for authentication events).
![audit log](img/audit_log.png)
@@ -116,8 +120,8 @@ Instance events can also be accessed via the [Instance Audit Events API](../api/
### Missing events
-Some events are not being tracked in Audit Events. Please see the following
-epics for more detail on which events are not being tracked and our progress
+Some events are not tracked in Audit Events. See the following
+epics for more detail on which events are not being tracked, and our progress
on adding these events into GitLab:
- [Project settings and activity](https://gitlab.com/groups/gitlab-org/-/epics/474)
@@ -129,8 +133,8 @@ on adding these events into GitLab:
#### Repository push
The current architecture of audit events is not prepared to receive a very high amount of records.
-It may make your project/admin audit logs UI very busy and the disk space consumed by the
-`audit_events` Postgres table will increase considerably. Thus, it's disabled by default
+It may make the user interface for your project or audit logs very busy, and the disk space consumed by the
+`audit_events` Postgres table will increase considerably. It's disabled by default
to prevent performance degradations on GitLab instances with very high Git write traffic.
In an upcoming release, Audit Logs for Git push events will be enabled
diff --git a/doc/administration/auth/README.md b/doc/administration/auth/README.md
index 2fc9db0632e..f30d6be1775 100644
--- a/doc/administration/auth/README.md
+++ b/doc/administration/auth/README.md
@@ -10,6 +10,7 @@ providers:
- [Auth0](../../integration/auth0.md)
- [Authentiq](authentiq.md)
+- [AWS Cognito](cognito.md)
- [Azure](../../integration/azure.md)
- [Bitbucket Cloud](../../integration/bitbucket.md)
- [CAS](../../integration/cas.md)
diff --git a/doc/administration/auth/authentiq.md b/doc/administration/auth/authentiq.md
index d15beb4f6fc..e9b32b64160 100644
--- a/doc/administration/auth/authentiq.md
+++ b/doc/administration/auth/authentiq.md
@@ -66,7 +66,10 @@ Authentiq will generate a Client ID and the accompanying Client Secret for you t
On the sign in page there should now be an Authentiq icon below the regular sign in form. Click the icon to begin the authentication process.
-- If the user has the Authentiq ID app installed in their iOS or Android device, they can scan the QR code, decide what personal details to share and sign in to your GitLab installation.
+- If the user has the Authentiq ID app installed in their iOS or Android device, they can:
+ 1. Scan the QR code.
+ 1. Decide what personal details to share.
+ 1. Sign in to your GitLab installation.
- If not they will be prompted to download the app and then follow the procedure above.
If everything goes right, the user will be returned to GitLab and will be signed in.
diff --git a/doc/administration/auth/cognito.md b/doc/administration/auth/cognito.md
new file mode 100644
index 00000000000..84923952131
--- /dev/null
+++ b/doc/administration/auth/cognito.md
@@ -0,0 +1,79 @@
+# Amazon Web Services Cognito
+
+Amazon Cognito lets you add user sign-up, sign-in, and access control to your GitLab instance.
+The following documentation enables Cognito as an OAuth2 provider.
+
+## Configure AWS Cognito
+
+To enable the [AWS Cognito](https://aws.amazon.com/cognito/) OAuth2 OmniAuth provider, register your application with Cognito,
+where it will generate a Client ID and Client Secret for your application.
+Any settings you configure in the following procedure can be modified later.
+The following steps enable AWS Cognito as an authentication provider:
+
+1. Sign in to the [AWS console](https://console.aws.amazon.com/console/home).
+1. Select **Cognito** from the **Services** menu.
+1. Select **Manage User Pools**, and click the **Create a user pool** button in the top right corner.
+1. Enter the pool name and then click the **Step through settings** button.
+1. Under **How do you want your end users to sign in?**, select **Email address or phone number** and **Allow email addresses**.
+1. Under **Which standard attributes do you want to require?**, select **email**.
+1. Go to the next steps of configuration and set the rest of the settings to suit your needs - in the basic setup they are not related to GitLab configuration.
+1. In the **App clients** settings, click **Add an app client**, add **App client name** and select the **Enable username password based authentication** check box.
+1. Click **Create app client**.
+1. In the next step, you can set up AWS Lambda functions for sending emails. You can then finish creating the pool.
+1. After creating the user pool, go to **App client settings** and provide the required information:
+
+ - **Enabled Identity Providers** - select all
+ - **Callback URL** - `https://gitlab.example.com/users/auth/cognito/callback`
+ - Substitute the URL of your GitLab instance for `gitlab.example.com`
+ - **Allowed OAuth Flows** - Authorization code grant
+ - **Allowed OAuth Scopes** - `email` and `openid`
+
+1. Save changes for the app client settings.
+1. Under **Domain name** include the AWS domain name for your AWS Cognito application.
+1. Under **App Clients**, find your **App client id** and **App client secret**. These values correspond to the OAuth2 Client ID and Client Secret. Save these values.
+
+## Configure GitLab
+
+1. See [Initial OmniAuth Configuration](../../integration/omniauth.md#initial-omniauth-configuration) for initial settings.
+1. On your GitLab server, open the configuration file.
+
+ **For Omnibus installations**
+
+ ```shell
+ sudo editor /etc/gitlab/gitlab.rb
+ ```
+
+1. In the following code block, substitute the Client ID (`app_id`), Client Secret (`app_secret`), and the Amazon domain name (`site`) for your AWS Cognito application.
+Include the code block in the `/etc/gitlab/gitlab.rb` file:
+
+ ```ruby
+ gitlab_rails['omniauth_allow_single_sign_on'] = ['cognito']
+ gitlab_rails['omniauth_providers'] = [
+ {
+ "name" => "cognito",
+ "app_id" => "CLIENT ID",
+ "app_secret" => "CLIENT SECRET",
+ "args" => {
+ client_options: {
+ 'site' => 'https://your_domain.auth.your_region.amazoncognito.com',
+ 'authorize_url' => '/login',
+ 'token_url' => '/oauth2/token',
+ 'user_info_url' => '/oauth2/userInfo'
+ },
+ user_response_structure: {
+ root_path: [],
+ attributes: { nickname: 'email'}
+ },
+ name: 'cognito',
+ strategy_class: "OmniAuth::Strategies::OAuth2Generic"
+ }
+ }
+ ]
+ ```
+
+1. Save the configuration file.
+1. Save the file and [reconfigure](../restart_gitlab.md#omnibus-gitlab-reconfigure) GitLab for the changes to take effect.
+
+Your sign-in page should now display a Cognito button below the regular sign-in form.
+To begin the authentication process, click the icon, and AWS Cognito will ask the user to sign in and authorize the GitLab application.
+If successful, the user will be redirected and signed in to your GitLab instance.
diff --git a/doc/administration/auth/crowd.md b/doc/administration/auth/crowd.md
index da6c01ec382..6c2e4edac31 100644
--- a/doc/administration/auth/crowd.md
+++ b/doc/administration/auth/crowd.md
@@ -54,7 +54,7 @@ Authenticate to GitLab using the Atlassian Crowd OmniAuth provider.
**Source:**
- ```
+ ```yaml
- { name: 'crowd',
args: {
crowd_server_url: 'CROWD_SERVER_URL',
diff --git a/doc/administration/auth/how_to_configure_ldap_gitlab_ce/index.md b/doc/administration/auth/how_to_configure_ldap_gitlab_ce/index.md
index 35620be7d7e..acb39ae0f78 100644
--- a/doc/administration/auth/how_to_configure_ldap_gitlab_ce/index.md
+++ b/doc/administration/auth/how_to_configure_ldap_gitlab_ce/index.md
@@ -14,7 +14,7 @@ GitLab has supported LDAP integration since [version 2.2](https://about.gitlab.c
### Choosing an LDAP Server
-The main reason organizations choose to utilize a LDAP server is to keep the entire organization's user base consolidated into a central repository. Users can access multiple applications and systems across the IT environment using a single login. Because LDAP is an open, vendor-neutral, industry standard application protocol, the number of applications using LDAP authentication continues to increase.
+The main reason organizations choose to utilize a LDAP server is to keep the entire organization's user base consolidated into a central repository. Users can access multiple applications and systems across the IT environment using a single login. Because LDAP is an open, vendor-neutral, industry standard application protocol, the number of applications using LDAP authentication continues to increase.
There are many commercial and open source [directory servers](https://en.wikipedia.org/wiki/Directory_service#LDAP_implementations) that support the LDAP protocol. Deciding on the right directory server highly depends on the existing IT environment in which the server will be integrated with.
@@ -32,9 +32,9 @@ For example, [Active Directory](https://docs.microsoft.com/en-us/previous-versio
We won't cover the installation and configuration of Windows Server or Active Directory Domain Services in this tutorial. There are a number of resources online to guide you through this process:
-- Install Windows Server 2012 - (`technet.microsoft.com`) - [Installing Windows Server 2012](https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-server-2012-R2-and-2012/jj134246(v=ws.11))
+- Install Windows Server 2012 - (`technet.microsoft.com`) - [Installing Windows Server 2012](https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-server-2012-R2-and-2012/jj134246(v=ws.11))
-- Install Active Directory Domain Services (AD DS) (`technet.microsoft.com`)- [Install Active Directory Domain Services](https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/deploy/install-active-directory-domain-services--level-100-#BKMK_PS)
+- Install Active Directory Domain Services (AD DS) (`technet.microsoft.com`) - [Install Active Directory Domain Services](https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/deploy/install-active-directory-domain-services--level-100-#BKMK_PS)
> **Shortcut:** You can quickly install AD DS via PowerShell using
`Install-WindowsFeature AD-Domain-Services -IncludeManagementTools`
diff --git a/doc/administration/auth/ldap-ee.md b/doc/administration/auth/ldap-ee.md
index 5217cd5114a..c5e4bee3c58 100644
--- a/doc/administration/auth/ldap-ee.md
+++ b/doc/administration/auth/ldap-ee.md
@@ -392,7 +392,7 @@ Group sync was written to be as performant as possible. Data is cached, database
queries are optimized, and LDAP queries are minimized. The last benchmark run
revealed the following metrics:
-For 20,000 LDAP users, 11,000 LDAP groups and 1,000 GitLab groups with 10
+For 20000 LDAP users, 11000 LDAP groups and 1000 GitLab groups with 10
LDAP group links each:
- Initial sync (no existing members assigned in GitLab) took 1.8 hours
@@ -470,7 +470,7 @@ step of the sync.
sudo gitlab-rails console
# For installations from source
- sudo -u git -H bundle exec rails console production
+ sudo -u git -H bundle exec rails console -e production
```
1. Set the log level to debug (only for this session):
@@ -493,7 +493,7 @@ step of the sync.
1. Run a group sync for this particular group.
```ruby
- EE::Gitlab::Auth::LDAP::Sync::Group.execute_all_providers(group)
+ EE::Gitlab::Auth::Ldap::Sync::Group.execute_all_providers(group)
```
1. Look through the output of the sync. See [example log output](#example-log-output)
@@ -503,11 +503,11 @@ step of the sync.
run the following query:
```ruby
- adapter = Gitlab::Auth::LDAP::Adapter.new('ldapmain') # If `main` is the LDAP provider
- ldap_group = EE::Gitlab::Auth::LDAP::Group.find_by_cn('group_cn_here', adapter)
+ adapter = Gitlab::Auth::Ldap::Adapter.new('ldapmain') # If `main` is the LDAP provider
+ ldap_group = EE::Gitlab::Auth::Ldap::Group.find_by_cn('group_cn_here', adapter)
# Output
- => #<EE::Gitlab::Auth::LDAP::Group:0x007fcbdd0bb6d8
+ => #<EE::Gitlab::Auth::Ldap::Group:0x007fcbdd0bb6d8
```
1. Query the LDAP group's member DNs and see if the user's DN is in the list.
diff --git a/doc/administration/auth/ldap.md b/doc/administration/auth/ldap.md
index 339710624fa..067fdfd0018 100644
--- a/doc/administration/auth/ldap.md
+++ b/doc/administration/auth/ldap.md
@@ -405,7 +405,7 @@ production:
Tip: If you want to limit access to the nested members of an Active Directory
group, you can use the following syntax:
-```text
+```plaintext
(memberOf:1.2.840.113556.1.4.1941:=CN=My Group,DC=Example,DC=com)
```
@@ -423,13 +423,13 @@ The `user_filter` DN can contain special characters. For example:
- A comma:
- ```text
+ ```plaintext
OU=GitLab, Inc,DC=gitlab,DC=com
```
- Open and close brackets:
- ```text
+ ```plaintext
OU=Gitlab (Inc),DC=gitlab,DC=com
```
@@ -438,13 +438,13 @@ The `user_filter` DN can contain special characters. For example:
- Escape commas with `\2C`. For example:
- ```text
+ ```plaintext
OU=GitLab\2C Inc,DC=gitlab,DC=com
```
- Escape open and close brackets with `\28` and `\29`, respectively. For example:
- ```text
+ ```plaintext
OU=Gitlab \28Inc\29,DC=gitlab,DC=com
```
@@ -461,7 +461,8 @@ LDAP email address, and then sign into GitLab via their LDAP credentials.
## Enabling LDAP username lowercase
-Some LDAP servers, depending on their configurations, can return uppercase usernames. This can lead to several confusing issues like, for example, creating links or namespaces with uppercase names.
+Some LDAP servers, depending on their configurations, can return uppercase usernames.
+This can lead to several confusing issues such as creating links or namespaces with uppercase names.
GitLab can automatically lowercase usernames provided by the LDAP server by enabling
the configuration option `lowercase_usernames`. By default, this configuration option is `false`.
@@ -602,7 +603,7 @@ GitLab. Common combinations are `encryption: 'plain'` and `port: 389`, OR
If GitLab cannot reach your LDAP endpoint, you will see a message like this:
-```
+```plaintext
Could not authenticate you from Ldapmain because "Connection timed out - user specified timeout".
```
diff --git a/doc/administration/auth/oidc.md b/doc/administration/auth/oidc.md
index 6f59cffc3cc..728be699772 100644
--- a/doc/administration/auth/oidc.md
+++ b/doc/administration/auth/oidc.md
@@ -34,6 +34,7 @@ The OpenID Connect will provide you with a client details and secret for you to
gitlab_rails['omniauth_providers'] = [
{ 'name' => 'openid_connect',
'label' => '<your_oidc_label>',
+ 'icon' => '<custom_provider_icon>',
'args' => {
'name' => 'openid_connect',
'scope' => ['openid','profile'],
@@ -42,6 +43,7 @@ The OpenID Connect will provide you with a client details and secret for you to
'discovery' => true,
'client_auth_method' => 'query',
'uid_field' => '<uid_field>',
+ 'send_scope_to_token_endpoint' => 'false',
'client_options' => {
'identifier' => '<your_oidc_client_id>',
'secret' => '<your_oidc_client_secret>',
@@ -57,6 +59,7 @@ The OpenID Connect will provide you with a client details and secret for you to
```yaml
- { name: 'openid_connect',
label: '<your_oidc_label>',
+ icon: '<custom_provider_icon>',
args: {
name: 'openid_connect',
scope: ['openid','profile'],
@@ -65,6 +68,7 @@ The OpenID Connect will provide you with a client details and secret for you to
discovery: true,
client_auth_method: 'query',
uid_field: '<uid_field>',
+ send_scope_to_token_endpoint: false,
client_options: {
identifier: '<your_oidc_client_id>',
secret: '<your_oidc_client_secret>',
@@ -80,6 +84,8 @@ The OpenID Connect will provide you with a client details and secret for you to
1. For the configuration above, change the values for the provider to match your OpenID Connect client setup. Use the following as a guide:
- `<your_oidc_label>` is the label that will be displayed on the login page.
+ - `<custom_provider_icon>` (optional) is the icon that will be displayed on the login page. Icons for the major social login platforms are built-in into GitLab,
+ but can be overridden by specifying this parameter. Both local paths and absolute URLs are accepted.
- `<your_oidc_url>` (optional) is the URL that points to the OpenID Connect provider. For example, `https://example.com/auth/realms/your-realm`.
If this value is not provided, the URL is constructed from the `client_options` in the following format: `<client_options.scheme>://<client_options.host>:<client_options.port>`.
- If `discovery` is set to `true`, the OpenID Connect provider will try to auto discover the client options using `<your_oidc_url>/.well-known/openid-configuration`. Defaults to `false`.
@@ -92,6 +98,8 @@ The OpenID Connect will provide you with a client details and secret for you to
- If not specified, defaults to `basic`.
- `<uid_field>` (optional) is the field name from the `user_info` details that will be used as `uid` value. For example, `preferred_username`.
If this value is not provided or the field with the configured value is missing from the `user_info` details, the `uid` will use the `sub` field.
+ - `send_scope_to_token_endpoint` is `true` by default. In other words, the `scope` parameter is normally included in requests to the token endpoint.
+ However, if your OpenID Connect provider does not accept the `scope` parameter in such requests, set this to `false`.
- `client_options` are the OpenID Connect client-specific options. Specifically:
- `identifier` is the client identifier as configured in the OpenID Connect service provider.
- `secret` is the client secret as configured in the OpenID Connect service provider.
diff --git a/doc/administration/auth/okta.md b/doc/administration/auth/okta.md
index 7b5effe3d77..4fd37b51f24 100644
--- a/doc/administration/auth/okta.md
+++ b/doc/administration/auth/okta.md
@@ -42,21 +42,6 @@ Now that the Okta app is configured, it's time to enable it in GitLab.
## Configure GitLab
-1. On your GitLab server, open the configuration file:
-
- **For Omnibus GitLab installations**
-
- ```shell
- sudo editor /etc/gitlab/gitlab.rb
- ```
-
- **For installations from source**
-
- ```shell
- cd /home/git/gitlab
- sudo -u git -H editor config/gitlab.yml
- ```
-
1. See [Initial OmniAuth Configuration](../../integration/omniauth.md#initial-omniauth-configuration)
for initial settings.
@@ -66,13 +51,19 @@ Now that the Okta app is configured, it's time to enable it in GitLab.
**For Omnibus GitLab installations**
+ Edit `/etc/gitlab/gitlab.rb`:
+
```ruby
gitlab_rails['omniauth_allow_single_sign_on'] = ['saml']
gitlab_rails['omniauth_block_auto_created_users'] = false
```
+ ---
+
**For installations from source**
+ Edit `config/gitlab.yml`:
+
```yaml
allow_single_sign_on: ["saml"]
block_auto_created_users: false
@@ -83,12 +74,18 @@ Now that the Okta app is configured, it's time to enable it in GitLab.
**For Omnibus GitLab installations**
+ Edit `/etc/gitlab/gitlab.rb`:
+
```ruby
gitlab_rails['omniauth_auto_link_saml_user'] = true
```
+ ---
+
**For installations from source**
+ Edit `config/gitlab.yml`:
+
```yaml
auto_link_saml_user: true
```
diff --git a/doc/administration/compliance.md b/doc/administration/compliance.md
index 44e1cc8059a..8cd10f5ea4e 100644
--- a/doc/administration/compliance.md
+++ b/doc/administration/compliance.md
@@ -14,6 +14,7 @@ GitLab’s [security features](../security/README.md) may also help you meet rel
|**[Lock project membership to group](../user/group/index.md#member-lock-starter)**<br>Group owners can prevent new members from being added to projects within a group.|Starter+|✓|
|**[LDAP group sync](auth/ldap-ee.md#group-sync)**<br>GitLab Enterprise Edition gives admins the ability to automatically sync groups and manage SSH keys, permissions, and authentication, so you can focus on building your product, not configuring your tools.|Starter+||
|**[LDAP group sync filters](auth/ldap-ee.md#group-sync)**<br>GitLab Enterprise Edition Premium gives more flexibility to synchronize with LDAP based on filters, meaning you can leverage LDAP attributes to map GitLab permissions.|Premium+||
-|**[Audit logs](audit_events.md)**<br>To maintain the integrity of your code, GitLab Enterprise Edition Premium gives admins the ability to view any modifications made within the GitLab server in an advanced audit log system, so you can control, analyze and track every change.|Premium+||
+|**[Audit logs](audit_events.md)**<br>To maintain the integrity of your code, GitLab Enterprise Edition Premium gives admins the ability to view any modifications made within the GitLab server in an advanced audit log system, so you can control, analyze, and track every change.|Premium+||
|**[Auditor users](auditor_users.md)**<br>Auditor users are users who are given read-only access to all projects, groups, and other resources on the GitLab instance.|Premium+||
|**[Credentials inventory](../user/admin_area/credentials_inventory.md)**<br>With a credentials inventory, GitLab administrators can keep track of the credentials used by all of the users in their GitLab instance. |Ultimate||
+|**Separation of Duties using [Protected branches](../user/project/protected_branches.md#protected-branches-approval-by-code-owners-premium) and [custom CI Configuration Paths](../ci/pipelines/settings.md#custom-ci-configuration-path)**<br> GitLab Silver and Premium users can leverage GitLab's cross-project YAML configuration's to define deployers of code and developers of code. View the [Separation of Duties Deploy Project](https://gitlab.com/guided-explorations/separation-of-duties-deploy/blob/master/README.md) and [Separation of Duties Project](https://gitlab.com/guided-explorations/separation-of-duties/blob/master/README.md) to see how to use this set up to define these roles.|Premium+||
diff --git a/doc/administration/geo/disaster_recovery/background_verification.md b/doc/administration/geo/disaster_recovery/background_verification.md
index c042e3e9872..6852d08cc07 100644
--- a/doc/administration/geo/disaster_recovery/background_verification.md
+++ b/doc/administration/geo/disaster_recovery/background_verification.md
@@ -17,7 +17,7 @@ You can restore it from backup or remove it from the **primary** node to resolve
If verification succeeds on the **primary** node but fails on the **secondary** node,
this indicates that the object was corrupted during the replication process.
Geo actively try to correct verification failures marking the repository to
-be resynced with a backoff period. If you want to reset the verification for
+be resynced with a back-off period. If you want to reset the verification for
these failures, so you should follow [these instructions][reset-verification].
If verification is lagging significantly behind replication, consider giving
@@ -51,14 +51,14 @@ Feature.enable('geo_repository_verification')
## Repository verification
-Navigate to the **Admin Area > Geo** dashboard on the **primary** node and expand
+Navigate to the **{admin}** **Admin Area >** **{location-dot}** **Geo** dashboard on the **primary** node and expand
the **Verification information** tab for that node to view automatic checksumming
status for repositories and wikis. Successes are shown in green, pending work
in grey, and failures in red.
![Verification status](img/verification-status-primary.png)
-Navigate to the **Admin Area > Geo** dashboard on the **secondary** node and expand
+Navigate to the **{admin}** **Admin Area >** **{location-dot}** **Geo** dashboard on the **secondary** node and expand
the **Verification information** tab for that node to view automatic verification
status for repositories and wikis. As with checksumming, successes are shown in
green, pending work in grey, and failures in red.
@@ -85,7 +85,7 @@ data. The default and recommended re-verification interval is 7 days, though
an interval as short as 1 day can be set. Shorter intervals reduce risk but
increase load and vice versa.
-Navigate to the **Admin Area > Geo** dashboard on the **primary** node, and
+Navigate to the **{admin}** **Admin Area >** **{location-dot}** **Geo** dashboard on the **primary** node, and
click the **Edit** button for the **primary** node to customize the minimum
re-verification interval:
@@ -114,9 +114,9 @@ Feature.enable('geo_repository_reverification')
## Reset verification for projects where verification has failed
Geo actively try to correct verification failures marking the repository to
-be resynced with a backoff period. If you want to reset them manually, this
+be resynced with a back-off period. If you want to reset them manually, this
rake task marks projects where verification has failed or the checksum mismatch
-to be resynced without the backoff period:
+to be resynced without the back-off period:
For repositories:
@@ -134,7 +134,7 @@ sudo gitlab-rake geo:verification:wiki:reset
If the **primary** and **secondary** nodes have a checksum verification mismatch, the cause may not be apparent. To find the cause of a checksum mismatch:
-1. Navigate to the **Admin Area > Projects** dashboard on the **primary** node, find the
+1. Navigate to the **{admin}** **Admin Area >** **{overview}** **Overview > Projects** dashboard on the **primary** node, find the
project that you want to check the checksum differences and click on the
**Edit** button:
![Projects dashboard](img/checksum-differences-admin-projects.png)
@@ -176,7 +176,7 @@ progress to include them in [ee-1430]. For now, you can verify their integrity
manually by following [these instructions][foreground-verification] on both
nodes, and comparing the output between them.
-In GitLab EE 12.1, Geo calculates checksums for attachments, LFS objects and
+In GitLab EE 12.1, Geo calculates checksums for attachments, LFS objects, and
archived traces on secondary nodes after the transfer, compares it with the
stored checksums, and rejects transfers if mismatched. Please note that Geo
currently does not support an automatic way to verify these data if they have
diff --git a/doc/administration/geo/disaster_recovery/index.md b/doc/administration/geo/disaster_recovery/index.md
index 5455e5914e1..7ecb4893c88 100644
--- a/doc/administration/geo/disaster_recovery/index.md
+++ b/doc/administration/geo/disaster_recovery/index.md
@@ -75,7 +75,7 @@ must disable the **primary** node.
single recommendation. You may need to:
- Reconfigure the load balancers.
- - Change DNS records (e.g., point the primary DNS record to the **secondary**
+ - Change DNS records (for example, point the primary DNS record to the **secondary**
node in order to stop usage of the **primary** node).
- Stop the virtual servers.
- Block traffic through a firewall.
@@ -139,6 +139,8 @@ do this manually.
sudo gitlab-pg-ctl promote
```
+ In GitLab 12.8 and earlier, see [Message: `sudo: gitlab-pg-ctl: command not found`](../replication/troubleshooting.md#message-sudo-gitlab-pg-ctl-command-not-found).
+
1. Edit `/etc/gitlab/gitlab.rb` on every machine in the **secondary** to
reflect its new status as **primary** by removing any lines that enabled the
`geo_secondary_role`:
@@ -205,20 +207,20 @@ secondary domain, like changing Git remotes and API URLs.
This command will use the changed `external_url` configuration defined
in `/etc/gitlab/gitlab.rb`.
-1. For GitLab 11.11 through 12.7 only, you may need to update the primary
+1. For GitLab 11.11 through 12.7 only, you may need to update the **primary**
node's name in the database. This bug has been fixed in GitLab 12.8.
To determine if you need to do this, search for the
`gitlab_rails["geo_node_name"]` setting in your `/etc/gitlab/gitlab.rb`
file. If it is commented out with `#` or not found at all, then you will
- need to update the primary node's name in the database. You can search for it
+ need to update the **primary** node's name in the database. You can search for it
like so:
```shell
grep "geo_node_name" /etc/gitlab/gitlab.rb
```
- To update the primary node's name in the database:
+ To update the **primary** node's name in the database:
```shell
gitlab-rails runner 'Gitlab::Geo.primary_node.update!(name: GeoNode.current_node_name)'
diff --git a/doc/administration/geo/disaster_recovery/planned_failover.md b/doc/administration/geo/disaster_recovery/planned_failover.md
index cd3d5a88de7..8af60a42fbb 100644
--- a/doc/administration/geo/disaster_recovery/planned_failover.md
+++ b/doc/administration/geo/disaster_recovery/planned_failover.md
@@ -36,7 +36,7 @@ Repository-centric strategies for using `rsync` effectively can be found in the
be adapted for use with any other file-based data, such as GitLab Pages (to
be found in `/var/opt/gitlab/gitlab-rails/shared/pages` if using Omnibus).
-## Pre-flight checks
+## Preflight checks
Follow these steps before scheduling a planned failover to ensure the process
will go smoothly.
@@ -92,7 +92,7 @@ The maintenance window won't end until Geo replication and verification is
completely finished. To keep the window as short as possible, you should
ensure these processes are close to 100% as possible during active use.
-Navigate to the **Admin Area > Geo** dashboard on the **secondary** node to
+Navigate to the **{admin}** **Admin Area >** **{location-dot}** **Geo** dashboard on the **secondary** node to
review status. Replicated objects (shown in green) should be close to 100%,
and there should be no failures (shown in red). If a large proportion of
objects aren't yet replicated (shown in grey), consider giving the node more
@@ -117,8 +117,8 @@ This [content was moved to another location][background-verification].
### Notify users of scheduled maintenance
-On the **primary** node, navigate to **Admin Area > Messages**, add a broadcast
-message. You can check under **Admin Area > Geo** to estimate how long it
+On the **primary** node, navigate to **{admin}** **Admin Area >** **{bullhorn}** **Messages**, add a broadcast
+message. You can check under **{admin}** **Admin Area >** **{location-dot}** **Geo** to estimate how long it
will take to finish syncing. An example message would be:
> A scheduled maintenance will take place at XX:XX UTC. We expect it to take
@@ -162,8 +162,8 @@ access to the **primary** node during the maintenance window.
existing Git repository with an SSH remote URL. The server should refuse
connection.
-1. Disable non-Geo periodic background jobs on the primary node by navigating
- to **Admin Area > Monitoring > Background Jobs > Cron** , pressing `Disable All`,
+1. Disable non-Geo periodic background jobs on the **primary** node by navigating
+ to **{admin}** **Admin Area >** **{monitor}** **Monitoring > Background Jobs > Cron**, pressing `Disable All`,
and then pressing `Enable` for the `geo_sidekiq_cron_config_worker` cron job.
This job will re-enable several other cron jobs that are essential for planned
failover to complete successfully.
@@ -172,11 +172,11 @@ access to the **primary** node during the maintenance window.
1. If you are manually replicating any data not managed by Geo, trigger the
final replication process now.
-1. On the **primary** node, navigate to **Admin Area > Monitoring > Background Jobs > Queues**
+1. On the **primary** node, navigate to **{admin}** **Admin Area >** **{monitor}** **Monitoring > Background Jobs > Queues**
and wait for all queues except those with `geo` in the name to drop to 0.
These queues contain work that has been submitted by your users; failing over
before it is completed will cause the work to be lost.
-1. On the **primary** node, navigate to **Admin Area > Geo** and wait for the
+1. On the **primary** node, navigate to **{admin}** **Admin Area >** **{location-dot}** **Geo** and wait for the
following conditions to be true of the **secondary** node you are failing over to:
- All replication meters to each 100% replicated, 0% failures.
@@ -184,10 +184,10 @@ access to the **primary** node during the maintenance window.
- Database replication lag is 0ms.
- The Geo log cursor is up to date (0 events behind).
-1. On the **secondary** node, navigate to **Admin Area > Monitoring > Background Jobs > Queues**
+1. On the **secondary** node, navigate to **{admin}** **Admin Area >** **{monitor}** **Monitoring > Background Jobs > Queues**
and wait for all the `geo` queues to drop to 0 queued and 0 running jobs.
1. On the **secondary** node, use [these instructions][foreground-verification]
- to verify the integrity of CI artifacts, LFS objects and uploads in file
+ to verify the integrity of CI artifacts, LFS objects, and uploads in file
storage.
At this point, your **secondary** node will contain an up-to-date copy of everything the
@@ -201,7 +201,7 @@ Finally, follow the [Disaster Recovery docs][disaster-recovery] to promote the
Once it is completed, the maintenance window is over! Your new **primary** node will now
begin to diverge from the old one. If problems do arise at this point, failing
back to the old **primary** node [is possible][bring-primary-back], but likely to result
-in the loss of any data uploaded to the new primary in the meantime.
+in the loss of any data uploaded to the new **primary** in the meantime.
Don't forget to remove the broadcast message after failover is complete.
diff --git a/doc/administration/geo/replication/configuration.md b/doc/administration/geo/replication/configuration.md
index 5c8ad18a4df..1434eeb61af 100644
--- a/doc/administration/geo/replication/configuration.md
+++ b/doc/administration/geo/replication/configuration.md
@@ -107,7 +107,7 @@ keys must be manually replicated to the **secondary** node.
scp root@<primary_node_fqdn>:/etc/ssh/ssh_host_*_key* /etc/ssh
```
- If you only have access through a user with **sudo** privileges:
+ If you only have access through a user with `sudo` privileges:
```shell
# Run this from your primary node:
@@ -153,7 +153,7 @@ keys must be manually replicated to the **secondary** node.
NOTE: **Note:**
The output for private keys and public keys command should generate the same fingerprint.
-1. Restart sshd on your **secondary** node:
+1. Restart `sshd` on your **secondary** node:
```shell
# Debian or Ubuntu installations
@@ -184,7 +184,7 @@ keys must be manually replicated to the **secondary** node.
gitlab-ctl reconfigure
```
-1. Visit the **primary** node's **Admin Area > Geo**
+1. Visit the **primary** node's **{admin}** **Admin Area >** **{location-dot}** **Geo**
(`/admin/geo/nodes`) in your browser.
1. Click the **New node** button.
![Add secondary node](img/adding_a_secondary_node.png)
@@ -231,7 +231,7 @@ You can login to the **secondary** node with the same credentials as used for th
Using Hashed Storage significantly improves Geo replication. Project and group
renames no longer require synchronization between nodes.
-1. Visit the **primary** node's **Admin Area > Settings > Repository**
+1. Visit the **primary** node's **{admin}** **Admin Area >** **{settings}** **Settings > Repository**
(`/admin/application_settings/repository`) in your browser.
1. In the **Repository storage** section, check **Use hashed storage paths for newly created and renamed projects**.
@@ -248,7 +248,7 @@ on the **secondary** node.
### Step 6. Enable Git access over HTTP/HTTPS
Geo synchronizes repositories over HTTP/HTTPS, and therefore requires this clone
-method to be enabled. Navigate to **Admin Area > Settings**
+method to be enabled. Navigate to **{admin}** **Admin Area >** **{settings}** **Settings**
(`/admin/application_settings/general`) on the **primary** node, and set
`Enabled Git access protocols` to `Both SSH and HTTP(S)` or `Only HTTP(S)`.
@@ -257,13 +257,13 @@ method to be enabled. Navigate to **Admin Area > Settings**
Your **secondary** node is now configured!
You can login to the **secondary** node with the same credentials you used for the
-**primary** node. Visit the **secondary** node's **Admin Area > Geo**
+**primary** node. Visit the **secondary** node's **{admin}** **Admin Area >** **{location-dot}** **Geo**
(`/admin/geo/nodes`) in your browser to check if it's correctly identified as a
**secondary** Geo node and if Geo is enabled.
The initial replication, or 'backfill', will probably still be in progress. You
can monitor the synchronization process on each geo node from the **primary**
-node's Geo Nodes dashboard in your browser.
+node's **Geo Nodes** dashboard in your browser.
![Geo dashboard](img/geo_node_dashboard.png)
diff --git a/doc/administration/geo/replication/database.md b/doc/administration/geo/replication/database.md
index 0e6583741bc..f25aa0e5da8 100644
--- a/doc/administration/geo/replication/database.md
+++ b/doc/administration/geo/replication/database.md
@@ -469,7 +469,7 @@ work:
1. On the **primary** Geo database, enter the PostgreSQL on the console as an
admin user. If you are using an Omnibus-managed database, log onto the **primary**
- node that is running the PostgreSQL database (the default Omnibus database name is gitlabhq_production):
+ node that is running the PostgreSQL database (the default Omnibus database name is `gitlabhq_production`):
```shell
sudo \
@@ -495,7 +495,7 @@ work:
1. On the **secondary** nodes, change `/etc/gitlab/gitlab.rb`:
- ```
+ ```ruby
geo_postgresql['fdw_external_user'] = 'gitlab_geo_fdw'
```
diff --git a/doc/administration/geo/replication/datatypes.md b/doc/administration/geo/replication/datatypes.md
index 75ce7503c34..f8c3076a38c 100644
--- a/doc/administration/geo/replication/datatypes.md
+++ b/doc/administration/geo/replication/datatypes.md
@@ -13,7 +13,7 @@ We currently distinguish between three different data types:
- [Blobs](#blobs)
- [Database](#database)
-See the list below of each feature or component we replicate, its corresponding data type, replication and
+See the list below of each feature or component we replicate, its corresponding data type, replication, and
verification methods:
| Type | Feature / component | Replication method | Verification method |
@@ -75,7 +75,7 @@ GitLab stores files and blobs such as Issue attachments or LFS objects into eith
- The filesystem in a specific location.
- An Object Storage solution. Object Storage solutions can be:
- Cloud based like Amazon S3 Google Cloud Storage.
- - Self hosted (like MinIO).
+ - Hosted by you (like MinIO).
- A Storage Appliance that exposes an Object Storage-compatible API.
When using the filesystem store instead of Object Storage, you need to use network mounted filesystems
@@ -97,7 +97,7 @@ as well as permissions and credentials.
PostgreSQL can also hold some level of cached data like HTML rendered Markdown, cached merge-requests diff (this can
also be configured to be offloaded to object storage).
-We use PostgreSQL's own replication functionality to replicate data from the primary to secondary nodes.
+We use PostgreSQL's own replication functionality to replicate data from the **primary** to **secondary** nodes.
We use Redis both as a cache store and to hold persistent data for our background jobs system. Because both
use-cases has data that are exclusive to the same Geo node, we don't replicate it between nodes.
@@ -124,31 +124,35 @@ replicating data from those features will cause the data to be **lost**.
If you wish to use those features on a **secondary** node, or to execute a failover
successfully, you must replicate their data using some other means.
-| Feature | Replicated | Verified | Notes |
-|-----------------------------------------------------|--------------------------|-----------------------------|---------------------------------------------|
-| Application data in PostgreSQL | **Yes** | **Yes** | |
-| Project repository | **Yes** | **Yes** | |
-| Project wiki repository | **Yes** | **Yes** | |
-| Project designs repository | **Yes** | [No][design-verification] | |
-| Uploads | **Yes** | [No][upload-verification] | Verified only on transfer, or manually (*1*)|
-| LFS objects | **Yes** | [No][lfs-verification] | Verified only on transfer, or manually (*1*)|
-| CI job artifacts (other than traces) | **Yes** | [No][artifact-verification] | Verified only manually (*1*) |
-| Archived traces | **Yes** | [No][artifact-verification] | Verified only on transfer, or manually (*1*)|
-| Personal snippets | **Yes** | **Yes** | |
-| Project snippets | **Yes** | **Yes** | |
-| Object pools for forked project deduplication | **Yes** | No | |
-| [Server-side Git Hooks][custom-hooks] | No | No | |
-| [Elasticsearch integration][elasticsearch] | No | No | |
-| [GitLab Pages][gitlab-pages] | [No][pages-replication] | No | |
-| [Container Registry][container-registry] | **Yes** | No | |
-| [NPM Registry][npm-registry] | No | No | |
-| [Maven Repository][maven-repository] | No | No | |
-| [Conan Repository][conan-repository] | No | No | |
-| [External merge request diffs][merge-request-diffs] | [No][diffs-replication] | No | |
-| Content in object storage | **Yes** | No | |
+| Feature | Replicated | Verified | Notes |
+|-----------------------------------------------------|-------------------------- |-----------------------------|---------------------------------------------|
+| Application data in PostgreSQL | **Yes** | **Yes** | |
+| Project repository | **Yes** | **Yes** | |
+| Project wiki repository | **Yes** | **Yes** | |
+| Project designs repository | **Yes** | [No][design-verification] | |
+| Uploads | **Yes** | [No][upload-verification] | Verified only on transfer, or manually (*1*)|
+| LFS objects | **Yes** | [No][lfs-verification] | Verified only on transfer, or manually (*1*). Unavailable for new LFS objects in 11.11.x and 12.0.x (*2*). |
+| CI job artifacts (other than traces) | **Yes** | [No][artifact-verification] | Verified only manually (*1*) |
+| Archived traces | **Yes** | [No][artifact-verification] | Verified only on transfer, or manually (*1*)|
+| Personal snippets | **Yes** | **Yes** | |
+| Project snippets | **Yes** | **Yes** | |
+| Object pools for forked project deduplication | **Yes** | No | |
+| [Server-side Git Hooks][custom-hooks] | No | No | |
+| [Elasticsearch integration][elasticsearch] | [No][elasticsearch-replication] | No | |
+| [GitLab Pages][gitlab-pages] | [No][pages-replication] | No | |
+| [Container Registry][container-registry] | **Yes** | No | |
+| [NPM Registry][npm-registry] | [No][packages-replication] | No | |
+| [Maven Repository][maven-repository] | [No][packages-replication] | No | |
+| [Conan Repository][conan-repository] | [No][packages-replication] | No | |
+| [NuGet Repository][nuget-repository] | [No][packages-replication] | No | |
+| [External merge request diffs][merge-request-diffs] | [No][diffs-replication] | No | |
+| Content in object storage | **Yes** | No | |
- (*1*): The integrity can be verified manually using
- [Integrity Check Rake Task](../../raketasks/check.md) on both nodes and comparing the output between them.
+ [Integrity Check Rake Task](../../raketasks/check.md) on both nodes and comparing
+ the output between them.
+- (*2*): GitLab versions 11.11.x and 12.0.x are affected by [a bug that prevents any new
+ LFS objects from replicating](https://gitlab.com/gitlab-org/gitlab/issues/32696).
[design-replication]: https://gitlab.com/groups/gitlab-org/-/epics/1633
[design-verification]: https://gitlab.com/gitlab-org/gitlab/issues/32467
@@ -157,6 +161,8 @@ successfully, you must replicate their data using some other means.
[artifact-verification]: https://gitlab.com/gitlab-org/gitlab/issues/8923
[diffs-replication]: https://gitlab.com/gitlab-org/gitlab/issues/33817
[pages-replication]: https://gitlab.com/groups/gitlab-org/-/epics/589
+[packages-replication]: https://gitlab.com/groups/gitlab-org/-/epics/2346
+[elasticsearch-replication]: https://gitlab.com/gitlab-org/gitlab/-/issues/1186
[custom-hooks]: ../../custom_hooks.md
[elasticsearch]: ../../../integration/elasticsearch.md
@@ -165,4 +171,5 @@ successfully, you must replicate their data using some other means.
[npm-registry]: ../../../user/packages/npm_registry/index.md
[maven-repository]: ../../../user/packages/maven_repository/index.md
[conan-repository]: ../../../user/packages/conan_repository/index.md
+[nuget-repository]: ../../../user/packages/nuget_repository/index.md
[merge-request-diffs]: ../../merge_request_diffs.md
diff --git a/doc/administration/geo/replication/docker_registry.md b/doc/administration/geo/replication/docker_registry.md
index 7d041d97ed2..1d57fece0e4 100644
--- a/doc/administration/geo/replication/docker_registry.md
+++ b/doc/administration/geo/replication/docker_registry.md
@@ -17,7 +17,7 @@ integrated [Container Registry](../../packages/container_registry.md#container-r
You can enable a storage-agnostic replication so it
can be used for cloud or local storages. Whenever a new image is pushed to the
-primary node, each **secondary** node will pull it to its own container
+**primary** node, each **secondary** node will pull it to its own container
repository.
To configure Docker Registry replication:
@@ -111,6 +111,7 @@ generate a short-lived JWT that is pull-only-capable to access the
### Verify replication
-To verify Container Registry replication is working, go to **Admin Area > Geo** (`/admin/geo/nodes`) on the **secondary** node.
+To verify Container Registry replication is working, go to **{admin}** **Admin Area >** **{location-dot}** **Geo**
+(`/admin/geo/nodes`) on the **secondary** node.
The initial replication, or "backfill", will probably still be in progress.
You can monitor the synchronization process on each Geo node from the **primary** node's **Geo Nodes** dashboard in your browser.
diff --git a/doc/administration/geo/replication/high_availability.md b/doc/administration/geo/replication/high_availability.md
index 5c124e9c6dc..3e7102b96da 100644
--- a/doc/administration/geo/replication/high_availability.md
+++ b/doc/administration/geo/replication/high_availability.md
@@ -25,10 +25,17 @@ The **primary** and **secondary** Geo deployments must be able to communicate to
## Redis and PostgreSQL High Availability
-The **primary** and **secondary** Redis and PostgreSQL should be configured
-for high availability. Because of the additional complexity involved
-in setting up this configuration for PostgreSQL and Redis,
-it is not covered by this Geo HA documentation.
+Geo supports:
+
+- Redis and PostgreSQL on the **primary** node configured for high availability
+- Redis on **secondary** nodes configured for high availability.
+
+NOTE: **Note:**
+Support for PostgreSQL on **secondary** nodes in high availability configuration
+[is planned](https://gitlab.com/groups/gitlab-org/-/epics/2536).
+
+Because of the additional complexity involved in setting up this configuration
+for PostgreSQL and Redis, it is not covered by this Geo HA documentation.
For more information about setting up a highly available PostgreSQL cluster and Redis cluster using the omnibus package see the high availability documentation for
[PostgreSQL](../../high_availability/database.md) and
@@ -37,10 +44,17 @@ For more information about setting up a highly available PostgreSQL cluster and
NOTE: **Note:**
It is possible to use cloud hosted services for PostgreSQL and Redis, but this is beyond the scope of this document.
-## Prerequisites: A working GitLab HA cluster
+## Prerequisites: Two working GitLab HA clusters
+
+One cluster will serve as the **primary** node. Use the
+[GitLab HA documentation](../../high_availability/README.md) to set this up. If
+you already have a working GitLab instance that is in-use, it can be used as a
+**primary**.
-This cluster will serve as the **primary** node. Use the
+The second cluster will serve as the **secondary** node. Again, use the
[GitLab HA documentation](../../high_availability/README.md) to set this up.
+It's a good idea to log in and test it, however, note that its data will be
+wiped out as part of the process of replicating from the **primary**.
## Configure the GitLab cluster to be the **primary** node
@@ -99,7 +113,11 @@ major differences:
various resources.
Therefore, we will set up the HA components one-by-one, and include deviations
-from the normal HA setup.
+from the normal HA setup. However, we highly recommend first configuring a
+brand-new cluster as if it were not part of a Geo setup so that it can be
+tested and verified as a working cluster. And only then should it be modified
+for use as a Geo **secondary**. This helps to separate problems that are related
+and are not related to Geo setup.
### Step 1: Configure the Redis and Gitaly services on the **secondary** node
@@ -118,7 +136,8 @@ recommended.
### Step 2: Configure the main read-only replica PostgreSQL database on the **secondary** node
NOTE: **Note:** The following documentation assumes the database will be run on
-a single node only, rather than as a PostgreSQL cluster.
+a single node only. PostgreSQL HA on **secondary** nodes is
+[not currently supported](https://gitlab.com/groups/gitlab-org/-/epics/2536).
Configure the [**secondary** database](database.md) as a read-only replica of
the **primary** database. Use the following as a guide.
@@ -167,6 +186,11 @@ the **primary** database. Use the following as a guide.
## the tracking database IP is in postgresql['md5_auth_cidr_addresses'] above.
##
geo_postgresql['enable'] = false
+
+ ##
+ ## Disable `geo_logcursor` service so Rails doesn't get configured here
+ ##
+ geo_logcursor['enable'] = false
```
After making these changes, [reconfigure GitLab][gitlab-reconfigure] so the changes take effect.
@@ -343,5 +367,97 @@ route traffic to the application servers.
See [Load Balancer for GitLab HA](../../high_availability/load_balancer.md) for
more information.
+### Step 6: Configure the backend application servers on the **secondary** node
+
+The minimal reference architecture diagram above shows all application services
+running together on the same machines. However, for high availability we
+[strongly recommend running all services separately](../../high_availability/README.md).
+
+For example, a Sidekiq server could be configured similarly to the frontend
+application servers above, with some changes to run only the `sidekiq` service:
+
+1. Edit `/etc/gitlab/gitlab.rb` on each Sidekiq server in the **secondary**
+ cluster, and add the following:
+
+ ```ruby
+ ##
+ ## Enable the Geo secondary role
+ ##
+ roles ['geo_secondary_role']
+
+ ##
+ ## Enable the Sidekiq service
+ ##
+ sidekiq['enable'] = true
+
+ ##
+ ## Ensure unnecessary services are disabled
+ ##
+ alertmanager['enable'] = false
+ consul['enable'] = false
+ geo_logcursor['enable'] = false
+ gitaly['enable'] = false
+ gitlab_exporter['enable'] = false
+ gitlab_workhorse['enable'] = false
+ nginx['enable'] = false
+ node_exporter['enable'] = false
+ pgbouncer_exporter['enable'] = false
+ postgresql['enable'] = false
+ prometheus['enable'] = false
+ redis['enable'] = false
+ redis_exporter['enable'] = false
+ repmgr['enable'] = false
+ unicorn['enable'] = false
+
+ ##
+ ## The unique identifier for the Geo node.
+ ##
+ gitlab_rails['geo_node_name'] = '<node_name_here>'
+
+ ##
+ ## Disable automatic migrations
+ ##
+ gitlab_rails['auto_migrate'] = false
+
+ ##
+ ## Configure the connection to the tracking DB. And disable application
+ ## servers from running tracking databases.
+ ##
+ geo_secondary['db_host'] = '<geo_tracking_db_host>'
+ geo_secondary['db_password'] = '<geo_tracking_db_password>'
+ geo_postgresql['enable'] = false
+
+ ##
+ ## Configure connection to the streaming replica database, if you haven't
+ ## already
+ ##
+ gitlab_rails['db_host'] = '<replica_database_host>'
+ gitlab_rails['db_password'] = '<replica_database_password>'
+
+ ##
+ ## Configure connection to Redis, if you haven't already
+ ##
+ gitlab_rails['redis_host'] = '<redis_host>'
+ gitlab_rails['redis_password'] = '<redis_password>'
+
+ ##
+ ## If you are using custom users not managed by Omnibus, you need to specify
+ ## UIDs and GIDs like below, and ensure they match between servers in a
+ ## cluster to avoid permissions issues
+ ##
+ user['uid'] = 9000
+ user['gid'] = 9000
+ web_server['uid'] = 9001
+ web_server['gid'] = 9001
+ registry['uid'] = 9002
+ registry['gid'] = 9002
+ ```
+
+ You can similarly configure a server to run only the `geo-logcursor` service
+ with `geo_logcursor['enable'] = true` and disabling Sidekiq with
+ `sidekiq['enable'] = false`.
+
+ These servers do not need to be attached to the load balancer.
+
[diagram-source]: https://docs.google.com/drawings/d/1z0VlizKiLNXVVVaERFwgsIOuEgjcUqDTWPdQYsE7Z4c/edit
[gitlab-reconfigure]: ../../restart_gitlab.md#omnibus-gitlab-reconfigure
diff --git a/doc/administration/geo/replication/index.md b/doc/administration/geo/replication/index.md
index e3699f1544b..4f598162a63 100644
--- a/doc/administration/geo/replication/index.md
+++ b/doc/administration/geo/replication/index.md
@@ -12,7 +12,8 @@ Replication with Geo is the solution for widely distributed development teams.
Fetching large repositories can take a long time for teams located far from a single GitLab instance.
-Geo provides local, read-only instances of your GitLab instances, reducing the time it takes to clone and fetch large repositories and speeding up development.
+Geo provides local, read-only instances of your GitLab instances. This can reduce the time it takes
+to clone and fetch large repositories, speeding up development.
NOTE: **Note:**
Check the [requirements](#requirements-for-running-geo) carefully before setting up Geo.
@@ -30,7 +31,7 @@ Implementing Geo provides the following benefits:
- Reduce from minutes to seconds the time taken for your distributed developers to clone and fetch large repositories and projects.
- Enable all of your developers to contribute ideas and work in parallel, no matter where they are.
-- Balance the load between your **primary** and **secondary** nodes, or offload your automated tests to a **secondary** node.
+- Balance the read-only load between your **primary** and **secondary** nodes.
In addition, it:
@@ -249,6 +250,7 @@ This list of limitations only reflects the latest version of GitLab. If you are
- [Selective synchronization](configuration.md#selective-synchronization) applies only to files and repositories. Other datasets are replicated to the **secondary** node in full, making it inappropriate for use as an access control mechanism.
- Object pools for forked project deduplication work only on the **primary** node, and are duplicated on the **secondary** node.
- [External merge request diffs](../../merge_request_diffs.md) will not be replicated if they are on-disk, and viewing merge requests will fail. However, external MR diffs in object storage **are** supported. The default configuration (in-database) does work.
+- GitLab Runners cannot register with a **secondary** node. Support for this is [planned for the future](https://gitlab.com/gitlab-org/gitlab/issues/3294).
### Limitations on replication/verification
@@ -268,7 +270,7 @@ For answers to common questions, see the [Geo FAQ](faq.md).
Since GitLab 9.5, Geo stores structured log messages in a `geo.log` file. For Omnibus installations, this file is at `/var/log/gitlab/gitlab-rails/geo.log`.
-This file contains information about when Geo attempts to sync repositories and files. Each line in the file contains a separate JSON entry that can be ingested into Elasticsearch, Splunk, etc.
+This file contains information about when Geo attempts to sync repositories and files. Each line in the file contains a separate JSON entry that can be ingested into. For example, Elasticsearch or Splunk.
For example:
diff --git a/doc/administration/geo/replication/location_aware_git_url.md b/doc/administration/geo/replication/location_aware_git_url.md
index 6183a0ad119..f1f1edd4a9b 100644
--- a/doc/administration/geo/replication/location_aware_git_url.md
+++ b/doc/administration/geo/replication/location_aware_git_url.md
@@ -37,7 +37,7 @@ In any case, you require:
- A Route53 Hosted Zone managing your domain.
If you have not yet setup a Geo **primary** node and **secondary** node, please consult
-[the Geo setup instructions](https://docs.gitlab.com/ee/administration/geo/replication/#setup-instructions).
+[the Geo setup instructions](index.md#setup-instructions).
## Create a traffic policy
diff --git a/doc/administration/geo/replication/object_storage.md b/doc/administration/geo/replication/object_storage.md
index 3251a673e4e..0c1bec5d4ae 100644
--- a/doc/administration/geo/replication/object_storage.md
+++ b/doc/administration/geo/replication/object_storage.md
@@ -24,7 +24,7 @@ whether they are stored on the local filesystem or in object storage.
To enable GitLab replication, you must:
-1. Go to **Admin Area > Geo**.
+1. Go to **{admin}** **Admin Area >** **{location-dot}** **Geo**.
1. Press **Edit** on the **secondary** node.
1. Enable the **Allow this secondary node to replicate content on Object Storage**
checkbox.
diff --git a/doc/administration/geo/replication/remove_geo_node.md b/doc/administration/geo/replication/remove_geo_node.md
index c3ff0ef47c1..c04c7aec858 100644
--- a/doc/administration/geo/replication/remove_geo_node.md
+++ b/doc/administration/geo/replication/remove_geo_node.md
@@ -2,7 +2,7 @@
**Secondary** nodes can be removed from the Geo cluster using the Geo admin page of the **primary** node. To remove a **secondary** node:
-1. Navigate to **Admin Area > Geo** (`/admin/geo/nodes`).
+1. Navigate to **{admin}** **Admin Area >** **{location-dot}** **Geo** (`/admin/geo/nodes`).
1. Click the **Remove** button for the **secondary** node you want to remove.
1. Confirm by clicking **Remove** when the prompt appears.
diff --git a/doc/administration/geo/replication/troubleshooting.md b/doc/administration/geo/replication/troubleshooting.md
index cbb01c41002..3a8219bdbc2 100644
--- a/doc/administration/geo/replication/troubleshooting.md
+++ b/doc/administration/geo/replication/troubleshooting.md
@@ -19,7 +19,7 @@ Before attempting more advanced troubleshooting:
### Check the health of the **secondary** node
-Visit the **primary** node's **Admin Area > Geo** (`/admin/geo/nodes`) in
+Visit the **primary** node's **{admin}** **Admin Area >** **{location-dot}** **Geo** (`/admin/geo/nodes`) in
your browser. We perform the following health checks on each **secondary** node
to help identify if something is wrong:
@@ -46,7 +46,7 @@ sudo gitlab-rake gitlab:geo:check
Example output:
-```text
+```plaintext
Checking Geo ...
GitLab Geo is available ... yes
@@ -79,7 +79,7 @@ sudo gitlab-rake geo:status
Example output:
-```text
+```plaintext
http://secondary.example.com/
-----------------------------------------------------
GitLab Version: 11.10.4-ee
@@ -122,7 +122,7 @@ Geo finds the current machine's Geo node name in `/etc/gitlab/gitlab.rb` by:
- If that is not defined, using the `external_url` setting.
This name is used to look up the node with the same **Name** in
-**Admin Area > Geo**.
+**{admin}** **Admin Area >** **{location-dot}** **Geo**.
To check if the current machine has a node name that matches a node in the
database, run the check task:
@@ -134,11 +134,11 @@ sudo gitlab-rake gitlab:geo:check
It displays the current machine's node name and whether the matching database
record is a **primary** or **secondary** node.
-```
+```plaintext
This machine's Geo node name matches a database record ... yes, found a secondary node named "Shanghai"
```
-```
+```plaintext
This machine's Geo node name matches a database record ... no
Try fixing it:
You could add or update a Geo node database record, setting the name to "https://example.com/".
@@ -157,102 +157,102 @@ sudo gitlab-rake gitlab:geo:check
1. Rails did not provide a password when connecting to the database
- ```text
- Checking Geo ...
+ ```plaintext
+ Checking Geo ...
- GitLab Geo is available ... Exception: fe_sendauth: no password supplied
- GitLab Geo is enabled ... Exception: fe_sendauth: no password supplied
- ...
- Checking Geo ... Finished
- ```
+ GitLab Geo is available ... Exception: fe_sendauth: no password supplied
+ GitLab Geo is enabled ... Exception: fe_sendauth: no password supplied
+ ...
+ Checking Geo ... Finished
+ ```
- - Ensure that you have the `gitlab_rails['db_password']` set to the plain text-password used when creating the hash for `postgresql['sql_user_password']`.
+ - Ensure that you have the `gitlab_rails['db_password']` set to the plain text-password used when creating the hash for `postgresql['sql_user_password']`.
1. Rails is unable to connect to the database
- ```text
- Checking Geo ...
+ ```plaintext
+ Checking Geo ...
- GitLab Geo is available ... Exception: FATAL: no pg_hba.conf entry for host "1.1.1.1", user "gitlab", database "gitlabhq_production", SSL on
- FATAL: no pg_hba.conf entry for host "1.1.1.1", user "gitlab", database "gitlabhq_production", SSL off
- GitLab Geo is enabled ... Exception: FATAL: no pg_hba.conf entry for host "1.1.1.1", user "gitlab", database "gitlabhq_production", SSL on
- FATAL: no pg_hba.conf entry for host "1.1.1.1", user "gitlab", database "gitlabhq_production", SSL off
- ...
- Checking Geo ... Finished
- ```
+ GitLab Geo is available ... Exception: FATAL: no pg_hba.conf entry for host "1.1.1.1", user "gitlab", database "gitlabhq_production", SSL on
+ FATAL: no pg_hba.conf entry for host "1.1.1.1", user "gitlab", database "gitlabhq_production", SSL off
+ GitLab Geo is enabled ... Exception: FATAL: no pg_hba.conf entry for host "1.1.1.1", user "gitlab", database "gitlabhq_production", SSL on
+ FATAL: no pg_hba.conf entry for host "1.1.1.1", user "gitlab", database "gitlabhq_production", SSL off
+ ...
+ Checking Geo ... Finished
+ ```
- - Ensure that you have the IP address of the rails node included in `postgresql['md5_auth_cidr_addresses']`.
- - Ensure that you have included the subnet mask on the IP address: `postgresql['md5_auth_cidr_addresses'] = ['1.1.1.1/32']`.
+ - Ensure that you have the IP address of the rails node included in `postgresql['md5_auth_cidr_addresses']`.
+ - Ensure that you have included the subnet mask on the IP address: `postgresql['md5_auth_cidr_addresses'] = ['1.1.1.1/32']`.
1. Rails has supplied the incorrect password
- ```text
- Checking Geo ...
- GitLab Geo is available ... Exception: FATAL: password authentication failed for user "gitlab"
- FATAL: password authentication failed for user "gitlab"
- GitLab Geo is enabled ... Exception: FATAL: password authentication failed for user "gitlab"
- FATAL: password authentication failed for user "gitlab"
- ...
- Checking Geo ... Finished
- ```
+ ```plaintext
+ Checking Geo ...
+ GitLab Geo is available ... Exception: FATAL: password authentication failed for user "gitlab"
+ FATAL: password authentication failed for user "gitlab"
+ GitLab Geo is enabled ... Exception: FATAL: password authentication failed for user "gitlab"
+ FATAL: password authentication failed for user "gitlab"
+ ...
+ Checking Geo ... Finished
+ ```
- - Verify the correct password is set for `gitlab_rails['db_password']` that was used when creating the hash in `postgresql['sql_user_password']` by running `gitlab-ctl pg-password-md5 gitlab` and entering the password.
+ - Verify the correct password is set for `gitlab_rails['db_password']` that was used when creating the hash in `postgresql['sql_user_password']` by running `gitlab-ctl pg-password-md5 gitlab` and entering the password.
1. Check returns not a secondary node
- ```text
- Checking Geo ...
-
- GitLab Geo is available ... yes
- GitLab Geo is enabled ... yes
- GitLab Geo secondary database is correctly configured ... not a secondary node
- Database replication enabled? ... not a secondary node
- ...
- Checking Geo ... Finished
- ```
-
- - Ensure that you have added the secondary node in the Admin Area of the primary node.
- - Ensure that you entered the `external_url` or `gitlab_rails['geo_node_name']` when adding the secondary node in the admin are of the primary node.
- - Prior to GitLab 12.4, edit the secondary node in the Admin Area of the primary node and ensure that there is a trailing `/` in the `Name` field.
-
-1. Check returns Exception: PG::UndefinedTable: ERROR: relation "geo_nodes" does not exist
-
- ```text
- Checking Geo ...
-
- GitLab Geo is available ... no
- Try fixing it:
- Upload a new license that includes the GitLab Geo feature
- For more information see:
- https://about.gitlab.com/features/gitlab-geo/
- GitLab Geo is enabled ... Exception: PG::UndefinedTable: ERROR: relation "geo_nodes" does not exist
- LINE 8: WHERE a.attrelid = '"geo_nodes"'::regclass
- ^
- : SELECT a.attname, format_type(a.atttypid, a.atttypmod),
- pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,
- c.collname, col_description(a.attrelid, a.attnum) AS comment
- FROM pg_attribute a
- LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum
- LEFT JOIN pg_type t ON a.atttypid = t.oid
- LEFT JOIN pg_collation c ON a.attcollation = c.oid AND a.attcollation <> t.typcollation
- WHERE a.attrelid = '"geo_nodes"'::regclass
- AND a.attnum > 0 AND NOT a.attisdropped
- ORDER BY a.attnum
- ...
- Checking Geo ... Finished
- ```
-
- When performing a Postgres major version (9 > 10) update this is expected. Follow:
-
- - [initiate-the-replication-process](https://docs.gitlab.com/ee/administration/geo/replication/database.html#step-3-initiate-the-replication-process)
- - [Geo database has an outdated FDW remote schema](https://docs.gitlab.com/ee/administration/geo/replication/troubleshooting.html#geo-database-has-an-outdated-fdw-remote-schema-error)
+ ```plaintext
+ Checking Geo ...
+
+ GitLab Geo is available ... yes
+ GitLab Geo is enabled ... yes
+ GitLab Geo secondary database is correctly configured ... not a secondary node
+ Database replication enabled? ... not a secondary node
+ ...
+ Checking Geo ... Finished
+ ```
+
+ - Ensure that you have added the secondary node in the Admin Area of the **primary** node.
+ - Ensure that you entered the `external_url` or `gitlab_rails['geo_node_name']` when adding the secondary node in the admin are of the **primary** node.
+ - Prior to GitLab 12.4, edit the secondary node in the Admin Area of the **primary** node and ensure that there is a trailing `/` in the `Name` field.
+
+1. Check returns `Exception: PG::UndefinedTable: ERROR: relation "geo_nodes" does not exist`
+
+ ```plaintext
+ Checking Geo ...
+
+ GitLab Geo is available ... no
+ Try fixing it:
+ Upload a new license that includes the GitLab Geo feature
+ For more information see:
+ https://about.gitlab.com/features/gitlab-geo/
+ GitLab Geo is enabled ... Exception: PG::UndefinedTable: ERROR: relation "geo_nodes" does not exist
+ LINE 8: WHERE a.attrelid = '"geo_nodes"'::regclass
+ ^
+ : SELECT a.attname, format_type(a.atttypid, a.atttypmod),
+ pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,
+ c.collname, col_description(a.attrelid, a.attnum) AS comment
+ FROM pg_attribute a
+ LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum
+ LEFT JOIN pg_type t ON a.atttypid = t.oid
+ LEFT JOIN pg_collation c ON a.attcollation = c.oid AND a.attcollation <> t.typcollation
+ WHERE a.attrelid = '"geo_nodes"'::regclass
+ AND a.attnum > 0 AND NOT a.attisdropped
+ ORDER BY a.attnum
+ ...
+ Checking Geo ... Finished
+ ```
+
+ When performing a Postgres major version (9 > 10) update this is expected. Follow:
+
+ - [initiate-the-replication-process](database.md#step-3-initiate-the-replication-process)
+ - [Geo database has an outdated FDW remote schema](troubleshooting.md#geo-database-has-an-outdated-fdw-remote-schema-error)
## Fixing replication errors
The following sections outline troubleshooting steps for fixing replication
errors.
-### Message: "ERROR: replication slots can only be used if max_replication_slots > 0"?
+### Message: `ERROR: replication slots can only be used if max_replication_slots > 0`?
This means that the `max_replication_slots` PostgreSQL variable needs to
be set on the **primary** database. In GitLab 9.4, we have made this setting
@@ -263,7 +263,7 @@ Be sure to restart PostgreSQL for this to take
effect. See the [PostgreSQL replication
setup][database-pg-replication] guide for more details.
-### Message: "FATAL: could not start WAL streaming: ERROR: replication slot "geo_secondary_my_domain_com" does not exist"?
+### Message: `FATAL: could not start WAL streaming: ERROR: replication slot "geo_secondary_my_domain_com" does not exist`?
This occurs when PostgreSQL does not have a replication slot for the
**secondary** node by that name.
@@ -290,7 +290,7 @@ sudo gitlab-ctl \
This will give the initial replication up to six hours to complete, rather than
the default thirty minutes. Adjust as required for your installation.
-### Message: "PANIC: could not write to file 'pg_xlog/xlogtemp.123': No space left on device"
+### Message: "PANIC: could not write to file `pg_xlog/xlogtemp.123`: No space left on device"
Determine if you have any unused replication slots in the **primary** database. This can cause large amounts of
log data to build up in `pg_xlog`. Removing the unused slots can reduce the amount of space used in the `pg_xlog`.
@@ -359,7 +359,7 @@ To help us resolve this problem, consider commenting on
GitLab places a timeout on all repository clones, including project imports
and Geo synchronization operations. If a fresh `git clone` of a repository
-on the primary takes more than a few minutes, you may be affected by this.
+on the **primary** takes more than a few minutes, you may be affected by this.
To increase the timeout, add the following line to `/etc/gitlab/gitlab.rb`
on the **secondary** node:
@@ -377,6 +377,14 @@ sudo gitlab-ctl reconfigure
This will increase the timeout to three hours (10800 seconds). Choose a time
long enough to accommodate a full clone of your largest repositories.
+### New LFS objects are never replicated
+
+If new LFS objects are never replicated to secondary Geo nodes, check the version of
+GitLab you are running. GitLab versions 11.11.x or 12.0.x are affected by
+[a bug that results in new LFS objects not being replicated to Geo secondary nodes](https://gitlab.com/gitlab-org/gitlab/issues/32696).
+
+To resolve the issue, upgrade to GitLab 12.1 or newer.
+
### Resetting Geo **secondary** node replication
If you get a **secondary** node in a broken state and want to reset the replication state,
@@ -475,7 +483,7 @@ when promoting a secondary to a primary node with strategies to resolve them.
When [promoting a **secondary** node](../disaster_recovery/index.md#step-3-promoting-a-secondary-node),
you might encounter the following error:
-```text
+```plaintext
Running gitlab-rake geo:set_secondary_as_primary...
rake aborted!
@@ -494,7 +502,7 @@ If you encounter this message when running `gitlab-rake geo:set_secondary_as_pri
or `gitlab-ctl promote-to-primary-node`, either:
- Enter a Rails console and run:
-
+
```ruby
Rails.application.load_tasks; nil
Gitlab::Geo.expire_cache_keys!([:primary_node, :current_node])
@@ -506,6 +514,27 @@ or `gitlab-ctl promote-to-primary-node`, either:
bug](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22021) was
fixed.
+### Message: `sudo: gitlab-pg-ctl: command not found`
+
+When
+[promoting a **secondary** node with HA](../disaster_recovery/index.md#promoting-a-secondary-node-with-ha),
+you need to run the `gitlab-pg-ctl` command to promote the PostgreSQL
+read-replica database.
+
+In GitLab 12.8 and earlier, this command will fail with the message:
+
+```plaintext
+sudo: gitlab-pg-ctl: command not found
+```
+
+In this case, the workaround is to use the full path to the binary, for example:
+
+```shell
+sudo /opt/gitlab/embedded/bin/gitlab-pg-ctl promote
+```
+
+GitLab 12.9 and later are [unaffected by this error](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5147).
+
## Fixing Foreign Data Wrapper errors
This section documents ways to fix potential Foreign Data Wrapper errors.
@@ -515,7 +544,7 @@ This section documents ways to fix potential Foreign Data Wrapper errors.
When setting up Geo, you might see this warning in the `gitlab-rake
gitlab:geo:check` output:
-```text
+```plaintext
GitLab Geo tracking database Foreign Data Wrapper schema is up-to-date? ... foreign data wrapper is not configured
```
@@ -599,7 +628,7 @@ To check the configuration:
1. Check that the foreign server mapping is correct via `\des+`. The
results should look something like this:
- ```
+ ```plaintext
gitlabhq_geo_production=# \des+
List of foreign servers
-[ RECORD 1 ]--------+------------------------------------------------------------
@@ -635,7 +664,7 @@ To check the configuration:
1. Check that the user mapping is configured properly via `\deu+`:
- ```
+ ```plaintext
gitlabhq_geo_production=# \deu+
List of user mappings
Server | User name | FDW Options
@@ -723,7 +752,7 @@ GitLab can error with a `Geo database has an outdated FDW remote schema` message
For example:
-```text
+```plaintext
Geo database has an outdated FDW remote schema. It contains 229 of 236 expected tables. Please refer to Geo Troubleshooting.
```
@@ -750,7 +779,7 @@ If you are able to log in to the **primary** node, but you receive this error
when attempting to log into a **secondary**, you should check that the Geo
node's URL matches its external URL.
-1. On the primary, visit **Admin Area > Geo**.
+1. On the primary, visit **{admin}** **Admin Area >** **{location-dot}** **Geo**.
1. Find the affected **secondary** and click **Edit**.
1. Ensure the **URL** field matches the value found in `/etc/gitlab/gitlab.rb`
in `external_url "https://gitlab.example.com"` on the frontend server(s) of
@@ -833,4 +862,4 @@ To resolve this issue:
- Check `/var/log/gitlab/gitlab-rails/geo.log` to see if the **secondary** node is
using IPv6 to send its status to the **primary** node. If it is, add an entry to
the **primary** node using IPv4 in the `/etc/hosts` file. Alternatively, you should
- [enable IPv6 on the primary node](https://docs.gitlab.com/omnibus/settings/nginx.html#setting-the-nginx-listen-address-or-addresses).
+ [enable IPv6 on the **primary** node](https://docs.gitlab.com/omnibus/settings/nginx.html#setting-the-nginx-listen-address-or-addresses).
diff --git a/doc/administration/geo/replication/tuning.md b/doc/administration/geo/replication/tuning.md
index 3ee9937774a..972bf002935 100644
--- a/doc/administration/geo/replication/tuning.md
+++ b/doc/administration/geo/replication/tuning.md
@@ -2,8 +2,8 @@
## Changing the sync capacity values
-In the Geo admin page (`/admin/geo/nodes`), there are several variables that
-can be tuned to improve performance of Geo:
+In the Geo admin page at **{admin}** **Admin Area >** **{location-dot}** **Geo** (`/admin/geo/nodes`),
+there are several variables that can be tuned to improve performance of Geo:
- Repository sync capacity.
- File sync capacity.
diff --git a/doc/administration/geo/replication/updating_the_geo_nodes.md b/doc/administration/geo/replication/updating_the_geo_nodes.md
index d094ccf3e54..df66b1b36ec 100644
--- a/doc/administration/geo/replication/updating_the_geo_nodes.md
+++ b/doc/administration/geo/replication/updating_the_geo_nodes.md
@@ -14,6 +14,8 @@ different steps.
- [Updating to GitLab 12.7](version_specific_updates.md#updating-to-gitlab-127)
- [Updating to GitLab 12.2](version_specific_updates.md#updating-to-gitlab-122)
- [Updating to GitLab 12.1](version_specific_updates.md#updating-to-gitlab-121)
+- [Updating to GitLab 12.0](version_specific_updates.md#updating-to-gitlab-120)
+- [Updating to GitLab 11.11](version_specific_updates.md#updating-to-gitlab-1111)
- [Updating to GitLab 10.8](version_specific_updates.md#updating-to-gitlab-108)
- [Updating to GitLab 10.6](version_specific_updates.md#updating-to-gitlab-106)
- [Updating to GitLab 10.5](version_specific_updates.md#updating-to-gitlab-105)
diff --git a/doc/administration/geo/replication/version_specific_updates.md b/doc/administration/geo/replication/version_specific_updates.md
index 89e1fc9eaa3..a697d07ded4 100644
--- a/doc/administration/geo/replication/version_specific_updates.md
+++ b/doc/administration/geo/replication/version_specific_updates.md
@@ -45,6 +45,20 @@ This can be temporarily disabled by running the following before updating:
sudo touch /etc/gitlab/disable-postgresql-upgrade
```
+## Updating to GitLab 12.0
+
+WARNING: **Warning:**
+This version is affected by [a bug that results in new LFS objects not being replicated to
+Geo secondary nodes](https://gitlab.com/gitlab-org/gitlab/issues/32696). The issue is fixed
+in GitLab 12.1. Please upgrade to GitLab 12.1 or newer.
+
+## Updating to GitLab 11.11
+
+WARNING: **Warning:**
+This version is affected by [a bug that results in new LFS objects not being replicated to
+Geo secondary nodes](https://gitlab.com/gitlab-org/gitlab/issues/32696). The issue is fixed
+in GitLab 12.1. Please upgrade to GitLab 12.1 or newer.
+
## Updating to GitLab 10.8
Before 10.8, broadcast messages would not propagate without flushing
@@ -186,7 +200,7 @@ Replicating over SSH has been deprecated, and support for this option will be
removed in a future release.
To switch to HTTP/HTTPS replication, log into the **primary** node as an admin and visit
-**Admin Area > Geo** (`/admin/geo/nodes`). For each **secondary** node listed,
+**{admin}** **Admin Area >** **{location-dot}** **Geo** (`/admin/geo/nodes`). For each **secondary** node listed,
press the "Edit" button, change the "Repository cloning" setting from
"SSH (deprecated)" to "HTTP/HTTPS", and press "Save changes". This should take
effect immediately.
diff --git a/doc/administration/git_annex.md b/doc/administration/git_annex.md
index 87e1d3b1e8e..8f285cd9bcb 100644
--- a/doc/administration/git_annex.md
+++ b/doc/administration/git_annex.md
@@ -18,7 +18,7 @@ you can.
Not being able to version control large binaries is a big problem for many
larger organizations.
-Videos, photos, audio, compiled binaries and many other types of files are too
+Videos, photos, audio, compiled binaries, and many other types of files are too
large. As a workaround, people keep artwork-in-progress in a Dropbox folder and
only check in the final result. This results in using outdated files, not
having a complete history and increases the risk of losing work.
@@ -41,15 +41,15 @@ configuration options required to enable it.
`git-annex` needs to be installed both on the server and the client side.
-For Debian-like systems (e.g., Debian, Ubuntu) this can be achieved by running:
+For Debian-like systems (for example, Debian and Ubuntu) this can be achieved by running:
-```
+```shell
sudo apt-get update && sudo apt-get install git-annex
```
-For RedHat-like systems (e.g., CentOS, RHEL) this can be achieved by running:
+For RedHat-like systems (for example, CentOS and RHEL) this can be achieved by running:
-```
+```shell
sudo yum install epel-release && sudo yum install git-annex
```
@@ -108,7 +108,7 @@ git annex sync --content # sync the Git repo and large file to the GitLa
The output should look like this:
-```
+```plaintext
commit
On branch master
Your branch is ahead of 'origin/master' by 1 commit.
@@ -154,7 +154,7 @@ are turned into symbolic links that point to data in `.git/annex/objects/`.
The `debian.iso` file in the example will contain the symbolic link:
-```
+```plaintext
.git/annex/objects/ZW/1k/SHA256E-s82701--6384039733b5035b559efd5a2e25a493ab6e09aabfd5162cc03f6f0ec238429d.png/SHA256E-s82701--6384039733b5035b559efd5a2e25a493ab6e09aabfd5162cc03f6f0ec238429d.iso
```
@@ -216,14 +216,14 @@ and the files are pushed to the GitLab repository.
If you get hit by this, you can run the following command inside the repository
that the warning was raised:
-```
+```shell
git config remote.origin.annex-ignore false
```
Consecutive runs of `git annex sync --content` **should not** produce this
warning and the output should look like this:
-```
+```plaintext
commit ok
pull origin
ok
diff --git a/doc/administration/git_protocol.md b/doc/administration/git_protocol.md
index 2e5c362a3ab..a8e785f9344 100644
--- a/doc/administration/git_protocol.md
+++ b/doc/administration/git_protocol.md
@@ -4,17 +4,9 @@ description: "Set and configure Git protocol v2"
# Configuring Git Protocol v2
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/46555) in GitLab 11.4.
-> Temporarily disabled (see [confidential issue](../user/project/issues/confidential_issues.md)
-> `https://gitlab.com/gitlab-org/gitlab-foss/issues/55769`) in GitLab 11.5.8, 11.6.6, 11.7.1, and 11.8+.
-
-NOTE: **Note:**
-Git protocol v2 support has been temporarily disabled
-because a feature used to hide certain internal references does not function when it
-is enabled, and this has a security impact. Once this problem has been resolved,
-protocol v2 support will be re-enabled. For more information, see the
-[confidential issue](../user/project/issues/confidential_issues.md)
-`https://gitlab.com/gitlab-org/gitlab-foss/issues/55769`.
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/46555) in GitLab 11.4.
+> - [Temporarily disabled](https://gitlab.com/gitlab-org/gitlab-foss/issues/55769) in GitLab 11.5.8, 11.6.6, 11.7.1, and 11.8+.
+> - [Re-enabled](https://gitlab.com/gitlab-org/gitlab/issues/27828) in GitLab 12.8.
Git protocol v2 improves the v1 wire protocol in several ways and is
enabled by default in GitLab for HTTP requests. In order to enable SSH,
@@ -43,10 +35,14 @@ the SSH configuration of your server by adding the line below to the `/etc/ssh/s
AcceptEnv GIT_PROTOCOL
```
-Once configured, restart the SSH daemon. In Ubuntu, run:
+Once configured, restart the SSH daemon for the change to take effect:
```shell
-sudo service ssh restart
+# CentOS 6 / RHEL 6
+sudo service sshd restart
+
+# All other supported distributions
+sudo systemctl restart ssh
```
## Instructions
@@ -110,3 +106,15 @@ debug1: Sending env GIT_PROTOCOL = version=2
For the server side, you can use the [same examples from HTTP](#http-connections), changing the
URL to use SSH.
+
+### Observe Git protocol version of connections
+
+To observe what Git protocol versions are being used in a
+production environment, you can use the following Prometheus query:
+
+```prometheus
+sum(rate(gitaly_git_protocol_requests_total[1m])) by (grpc_method,git_protocol,grpc_service)
+```
+
+You can view what Git protocol versions are being used on GitLab.com at
+<https://dashboards.gitlab.com/d/pqlQq0xik/git-protocol-versions>.
diff --git a/doc/administration/gitaly/img/praefect_architecture_v12_9.png b/doc/administration/gitaly/img/praefect_architecture_v12_9.png
new file mode 100644
index 00000000000..b68e495cb17
--- /dev/null
+++ b/doc/administration/gitaly/img/praefect_architecture_v12_9.png
Binary files differ
diff --git a/doc/administration/gitaly/index.md b/doc/administration/gitaly/index.md
index 390e0ae05af..4630b6d8259 100644
--- a/doc/administration/gitaly/index.md
+++ b/doc/administration/gitaly/index.md
@@ -6,9 +6,9 @@ components can read or write Git data. GitLab components that access Git
repositories (GitLab Rails, GitLab Shell, GitLab Workhorse, etc.) act as clients
to Gitaly. End users do not have direct access to Gitaly.
-In the rest of this page, Gitaly server is referred to the standalone node that
-only runs Gitaly, and Gitaly client to the GitLab Rails node that runs all other
-processes except Gitaly.
+On this page, *Gitaly server* refers to a standalone node that only runs Gitaly
+and *Gitaly client* is a GitLab Rails app node that runs all other processes
+except Gitaly.
## Architecture
@@ -20,7 +20,7 @@ Here's a high-level architecture overview of how Gitaly is used.
The Gitaly service itself is configured via a [TOML configuration file](reference.md).
-In case you want to change some of its settings:
+If you want to change any of its settings:
**For Omnibus GitLab**
@@ -54,10 +54,6 @@ scenario, the [new repository indexer](../../integration/elasticsearch.md#elasti
needs to be enabled in your GitLab configuration. [Since GitLab v12.3](https://gitlab.com/gitlab-org/gitlab/issues/6481),
the new indexer becomes the default and no configuration is required.
-NOTE: **Note:** While Gitaly can be used as a replacement for NFS, it's not recommended
-to use EFS as it may impact GitLab's performance. Review the [relevant documentation](../high_availability/nfs.md#avoid-using-awss-elastic-file-system-efs)
-for more details.
-
### Network architecture
The following list depicts what the network architecture of Gitaly is:
@@ -167,17 +163,21 @@ Git operations in GitLab will result in an API error.
unicorn['enable'] = false
sidekiq['enable'] = false
gitlab_workhorse['enable'] = false
+ grafana['enable'] = false
+
+ # If you run a seperate monitoring node you can disable these services
+ alertmanager['enable'] = false
+ prometheus['enable'] = false
+
+ # If you don't run a seperate monitoring node you can
+ # Enable Prometheus access & disable these extra services
+ # This makes Prometheus listen on all interfaces. You must use firewalls to restrict access to this address/port.
+ # prometheus['listen_address'] = '0.0.0.0:9090'
+ # prometheus['monitor_kubernetes'] = false
# If you don't want to run monitoring services uncomment the following (not recommended)
- # alertmanager['enable'] = false
# gitlab_exporter['enable'] = false
- # grafana['enable'] = false
# node_exporter['enable'] = false
- # prometheus['enable'] = false
-
- # Enable prometheus monitoring - comment out if you disable monitoring services above.
- # This makes Prometheus listen on all interfaces. You must use firewalls to restrict access to this address/port.
- prometheus['listen_address'] = '0.0.0.0:9090'
# Prevent database connections during 'gitlab-ctl reconfigure'
gitlab_rails['rake_cache_clear'] = false
@@ -568,30 +568,6 @@ server with the following settings.
1. Save the file and [restart GitLab](../restart_gitlab.md#installations-from-source).
-## Eliminating NFS altogether
-
-If you are planning to use Gitaly without NFS for your storage needs
-and want to eliminate NFS from your environment altogether, there are
-a few things that you need to do:
-
-1. Make sure the [`git` user home directory](https://docs.gitlab.com/omnibus/settings/configuration.html#moving-the-home-directory-for-a-user) is on local disk.
-1. Configure [database lookup of SSH keys](../operations/fast_ssh_key_lookup.md)
- to eliminate the need for a shared `authorized_keys` file.
-1. Configure [object storage for job artifacts](../job_artifacts.md#using-object-storage)
- including [incremental logging](../job_logs.md#new-incremental-logging-architecture).
-1. Configure [object storage for LFS objects](../lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage).
-1. Configure [object storage for uploads](../uploads.md#using-object-storage-core-only).
-1. Configure [object storage for merge request diffs](../merge_request_diffs.md#using-object-storage).
-1. Configure [object storage for packages](../packages/index.md#using-object-storage) (optional feature).
-1. Configure [object storage for dependency proxy](../packages/dependency_proxy.md#using-object-storage) (optional feature).
-1. Configure [object storage for Mattermost](https://docs.mattermost.com/administration/config-settings.html#file-storage) (optional feature).
-
-NOTE: **Note:**
-One current feature of GitLab that still requires a shared directory (NFS) is
-[GitLab Pages](../../user/project/pages/index.md).
-There is [work in progress](https://gitlab.com/gitlab-org/gitlab-pages/issues/196)
-to eliminate the need for NFS to support GitLab Pages.
-
## Limiting RPC concurrency
It can happen that CI clone traffic puts a large strain on your Gitaly
@@ -805,7 +781,7 @@ two checks. The result of both of these checks is cached.
see if we can access filesystem underneath the Gitaly server
directly. If so, use the Rugged patch.
-To see if GitLab Rails can access the repo filesystem directly, we use
+To see if GitLab Rails can access the repo filesystem directly, we use
the following heuristic:
- Gitaly ensures that the filesystem has a metadata file in its root
@@ -886,10 +862,10 @@ you are seeing Gitaly errors. You can control the log level of the
gRPC client with the `GRPC_LOG_LEVEL` environment variable. The
default level is `WARN`.
-You can run a GRPC trace with:
+You can run a gRPC trace with:
```shell
-GRPC_TRACE=all GRPC_VERBOSITY=DEBUG sudo gitlab-rake gitlab:gitaly:check
+sudo GRPC_TRACE=all GRPC_VERBOSITY=DEBUG gitlab-rake gitlab:gitaly:check
```
### Observing `gitaly-ruby` traffic
@@ -1034,6 +1010,12 @@ unset http_proxy
unset https_proxy
```
+### Gitaly not listening on new address after reconfiguring
+
+When updating the `gitaly['listen_addr']` or `gitaly['prometheus_listen_addr']` values, Gitaly may continue to listen on the old address after a `sudo gitlab-ctl reconfigure`.
+
+When this occurs, performing a `sudo gitlab-ctl restart` will resolve the issue. This will no longer be necessary after [this issue](https://gitlab.com/gitlab-org/gitaly/issues/2521) is resolved.
+
### Praefect
Praefect is an experimental daemon that allows for replication of the Git data.
diff --git a/doc/administration/gitaly/praefect.md b/doc/administration/gitaly/praefect.md
index 7f314bc5e31..4e8dc533f44 100644
--- a/doc/administration/gitaly/praefect.md
+++ b/doc/administration/gitaly/praefect.md
@@ -1,359 +1,594 @@
-# Praefect
+# Praefect: High Availability
-NOTE: **Note:** Praefect is an experimental service, and for testing purposes only at
-this time.
+NOTE: **Note:** Praefect is an experimental service, and data loss is likely.
Praefect is an optional reverse-proxy for [Gitaly](../index.md) to manage a
-cluster of Gitaly nodes for high availability through replication.
-If a Gitaly node becomes unavailable, it will be possible to fail over to a
-warm Gitaly replica.
+cluster of Gitaly nodes for high availability. Initially, high availability
+be implemented through asynchronous replication. If a Gitaly node becomes
+unavailable, it will be possible to fail over to a warm Gitaly replica.
The first minimal version will support:
- Eventual consistency of the secondary replicas.
-- Manual fail over from the primary to the secondary.
+- Automatic fail over from the primary to the secondary.
+- Reporting of possible data loss if replication queue is non empty.
Follow the [HA Gitaly epic](https://gitlab.com/groups/gitlab-org/-/epics/1489)
for updates and roadmap.
-## Omnibus
+## Requirements for configuring Gitaly for High Availability
-### Architecture
+NOTE: **Note:** this reference architecture is not highly available because
+Praefect is a single point of failure.
-The most common architecture for Praefect is simplified in the diagram below:
+The minimal [alpha](https://about.gitlab.com/handbook/product/#alpha-beta-ga)
+reference architecture additionally requires:
-```mermaid
-graph TB
- GitLab --> Praefect;
- Praefect --- PostgreSQL;
- Praefect --> Gitaly1;
- Praefect --> Gitaly2;
- Praefect --> Gitaly3;
-```
+- 1 Praefect node
+- 1 PostgreSQL server (PostgreSQL 9.6 or newer)
+- 3 Gitaly nodes (1 primary, 2 secondary)
+
+![Alpha architecture diagram](img/praefect_architecture_v12_9.png)
+
+See the [design
+document](https://gitlab.com/gitlab-org/gitaly/-/blob/master/doc/design_ha.md)
+for implementation details.
+
+## Setup Instructions
+
+If you [installed](https://about.gitlab.com/install/) GitLab using the Omnibus
+package (highly recommended), follow the steps below:
-Where `GitLab` is the collection of clients that can request Git operations.
-The Praefect node has three storage nodes attached. Praefect itself doesn't
-store data, but connects to three Gitaly nodes, `Gitaly-1`, `Gitaly-2`, and `Gitaly-3`.
+1. [Preparation](#preparation)
+1. [Configuring the Praefect database](#postgresql)
+1. [Configuring the Praefect proxy/router](#praefect)
+1. [Configuring each Gitaly node](#gitaly) (once for each Gitaly node)
+1. [Updating the GitLab server configuration](#gitlab)
-In order to keep track of replication state, Praefect relies on a
-PostgreSQL database. This database is a single point of failure so you
-should use a highly available PostgreSQL server for this. GitLab
-itself needs a HA PostgreSQL server too, so you could optionally co-locate the Praefect
-SQL database on the PostgreSQL server you use for the rest of GitLab.
+### Preparation
-Praefect may be enabled on its own node or can be run on the GitLab server.
-In the example below we will use a separate server, but the optimal configuration
-for Praefect is still being determined.
+Before beginning, you should already have a working GitLab instance. [Learn how
+to install GitLab](https://about.gitlab.com/install/).
-Praefect will handle all Gitaly RPC requests to its child nodes. However, the child nodes
-will still need to communicate with the GitLab server via its internal API for authentication
-purposes.
+Provision a PostgreSQL server (PostgreSQL 9.6 or newer). Configuration through
+the GitLab Omnibus distribution is not yet supported. Follow this
+[issue](https://gitlab.com/gitlab-org/gitaly/issues/2476) for updates.
-### Setup
+Prepare all your new nodes by [installing
+GitLab](https://about.gitlab.com/install/).
-In this setup guide we will start by configuring Praefect, then its child
-Gitaly nodes, and lastly the GitLab server configuration.
+- 1 Praefect node (minimal storage required)
+- 3 Gitaly nodes (high CPU, high memory, fast storage)
+
+You will need the IP/host address for each node.
+
+1. `POSTGRESQL_SERVER_ADDRESS`: the IP/host address of the PostgreSQL server
+1. `PRAEFECT_SERVER_ADDRESS`: the IP/host address of the Praefect server
+1. `GITALY_SERVER_ADDRESS`: the IP/host address of each Gitaly node
#### Secrets
-We need to manage the following secrets and make them match across hosts:
-
-1. `GITLAB_SHELL_SECRET_TOKEN`: this is used by Git hooks to make
- callback HTTP API requests to GitLab when accepting a Git push. This
- secret is shared with GitLab Shell for legacy reasons.
-1. `PRAEFECT_EXTERNAL_TOKEN`: repositories hosted on your Praefect
- cluster can only be accessed by Gitaly clients that carry this
- token.
-1. `PRAEFECT_INTERNAL_TOKEN`: this token is used for replication
- traffic inside your Praefect cluster. This is distinct from
- `PRAEFECT_EXTERNAL_TOKEN` because Gitaly clients must not be able to
- access internal nodes of the Praefect cluster directly; that could
- lead to data loss.
+The communication between components is secured with different secrets, which
+are described below. Before you begin, generate a unique secret for each, and
+make note of it. This will make it easy to replace these placeholder tokens
+with secure tokens as you complete the setup process.
+
+1. `GITLAB_SHELL_SECRET_TOKEN`: this is used by Git hooks to make callback HTTP
+ API requests to GitLab when accepting a Git push. This secret is shared with
+ GitLab Shell for legacy reasons.
+1. `PRAEFECT_EXTERNAL_TOKEN`: repositories hosted on your Praefect cluster can
+ only be accessed by Gitaly clients that carry this token.
+1. `PRAEFECT_INTERNAL_TOKEN`: this token is used for replication traffic inside
+ your Praefect cluster. This is distinct from `PRAEFECT_EXTERNAL_TOKEN`
+ because Gitaly clients must not be able to access internal nodes of the
+ Praefect cluster directly; that could lead to data loss.
1. `PRAEFECT_SQL_PASSWORD`: this password is used by Praefect to connect to
- PostgreSQL.
+ PostgreSQL.
+1. `GRAFANA_PASSWORD`: this password is used to access the `admin`
+ account in the Grafana dashboards.
We will note in the instructions below where these secrets are required.
-#### Network addresses
+### PostgreSQL
-1. `POSTGRESQL_SERVER_ADDRESS`: the host name or IP address of your PostgreSQL server
+NOTE: **Note:** don't reuse the GitLab application database for the Praefect
+database.
-#### PostgreSQL
+To complete this section you will need:
-To set up a Praefect cluster you need a highly available PostgreSQL
-server. You need PostgreSQL 9.6 or newer. Praefect needs to have a SQL
-user with the right to create databases.
+- 1 Praefect node
+- 1 PostgreSQL server (PostgreSQL 9.6 or newer)
+ - An SQL user with permissions to create databases
-In the instructions below we assume you have administrative access to
-your PostgreSQL server via `psql`. Depending on your environment, you
-may also be able to do this via the web interface of your cloud
-platform, or via your configuration management system, etc.
+During this section, we will configure the PostgreSQL server, from the Praefect
+node, using `psql` which is installed by GitLab Omnibus.
-Below we assume that you have administrative access as the `postgres`
-user. First open a `psql` session as the `postgres` user:
+1. SSH into the **Praefect** node and login as root:
-```shell
-/opt/gitlab/embedded/bin/psql -h POSTGRESQL_SERVER_ADDRESS -U postgres -d template1
-```
+ ```shell
+ sudo -i
+ ```
-Once you are connected, run the following command. Replace
-`PRAEFECT_SQL_PASSWORD` with the actual (random) password you
-generated for the `praefect` SQL user:
+1. Connect to the PostgreSQL server with administrative access. This is likely
+ the `postgres` user. The database `template1` is used because it is created
+ by default on all PostgreSQL servers.
-```sql
-CREATE ROLE praefect WITH LOGIN CREATEDB PASSWORD 'PRAEFECT_SQL_PASSWORD';
-\q
-```
+ ```shell
+ /opt/gitlab/embedded/bin/psql -U postgres -d template1 -h POSTGRESQL_SERVER_ADDRESS
+ ```
-Now connect as the `praefect` user to create the database. This has
-the side effect of verifying that you have access:
+ Create a new user `praefect` which will be used by Praefect. Replace
+ `PRAEFECT_SQL_PASSWORD` with the strong password you generated in the
+ preparation step.
-```shell
-/opt/gitlab/embedded/bin/psql -h POSTGRESQL_SERVER_ADDRESS -U praefect -d template1
-```
+ ```sql
+ CREATE ROLE praefect WITH LOGIN CREATEDB PASSWORD 'PRAEFECT_SQL_PASSWORD';
+ ```
-Once you have connected as the `praefect` user, run:
+1. Reconnect to the PostgreSQL server, this time as the `praefect` user:
-```sql
-CREATE DATABASE praefect_production WITH ENCODING=UTF8;
-\q
-```
+ ```shell
+ /opt/gitlab/embedded/bin/psql -U praefect -d template1 -h POSTGRESQL_SERVER_ADDRESS
+ ```
-#### Praefect
-
-On the Praefect node we disable all other services, including Gitaly. We list each
-Gitaly node that will be connected to Praefect as members of the `praefect` hash in `praefect['virtual_storages']`.
-
-In the example below, the Gitaly nodes are named `gitaly-N`. Note that one
-node is designated as primary by setting the primary to `true`.
-
-If you are using an uncrypted connection to Postgres, set `praefect['database_sslmode']` to false.
-
-If you are using an encrypted connection with a client certificate,
-`praefect['database_sslcert']` and `praefect['database_sslkey']` will need to be set.
-If you are using a custom CA, also set `praefect['database_sslrootcert']`:
-
-```ruby
-# /etc/gitlab/gitlab.rb on praefect server
-
-# Avoid running unnecessary services on the Gitaly server
-postgresql['enable'] = false
-redis['enable'] = false
-nginx['enable'] = false
-prometheus['enable'] = false
-grafana['enable'] = false
-unicorn['enable'] = false
-sidekiq['enable'] = false
-gitlab_workhorse['enable'] = false
-gitaly['enable'] = false
-
-# Prevent database connections during 'gitlab-ctl reconfigure'
-gitlab_rails['rake_cache_clear'] = false
-gitlab_rails['auto_migrate'] = false
-
-praefect['enable'] = true
-
-# Make Praefect accept connections on all network interfaces. You must use
-# firewalls to restrict access to this address/port.
-praefect['listen_addr'] = '0.0.0.0:2305'
-
-# Replace PRAEFECT_EXTERNAL_TOKEN with a real secret
-praefect['auth_token'] = 'PRAEFECT_EXTERNAL_TOKEN'
-
-# Replace each instance of PRAEFECT_INTERNAL_TOKEN below with a real
-# secret, distinct from PRAEFECT_EXTERNAL_TOKEN.
-# Name of storage hash must match storage name in git_data_dirs on GitLab server.
-praefect['virtual_storages'] = {
- 'praefect' => {
- 'gitaly-1' => {
- # Replace GITALY_URL_OR_IP below with the real address to connect to.
- 'address' => 'tcp://GITALY_URL_OR_IP:8075',
- 'token' => 'PRAEFECT_INTERNAL_TOKEN',
- 'primary' => true
- },
- 'gitaly-2' => {
- # Replace GITALY_URL_OR_IP below with the real address to connect to.
- 'address' => 'tcp://GITALY_URL_OR_IP:8075',
- 'token' => 'PRAEFECT_INTERNAL_TOKEN'
- },
- 'gitaly-3' => {
- # Replace GITALY_URL_OR_IP below with the real address to connect to.
- 'address' => 'tcp://GITALY_URL_OR_IP:8075',
- 'token' => 'PRAEFECT_INTERNAL_TOKEN'
- }
- }
-}
-
-# Replace POSTGRESQL_SERVER below with a real IP/host address of the database.
-praefect['database_host'] = 'POSTGRESQL_SERVER_ADDRESS'
-praefect['database_port'] = 5432
-praefect['database_user'] = 'praefect'
-# Replace PRAEFECT_SQL_PASSWORD below with a real password of the database.
-praefect['database_password'] = 'PRAEFECT_SQL_PASSWORD'
-praefect['database_dbname'] = 'praefect_production'
-
-# Uncomment the line below if you do not want to use an encrypted
-# connection to PostgreSQL
-# praefect['database_sslmode'] = 'disable'
-
-# Uncomment and modify these lines if you are using a TLS client
-# certificate to connect to PostgreSQL
-# praefect['database_sslcert'] = '/path/to/client-cert'
-# praefect['database_sslkey'] = '/path/to/client-key'
-
-# Uncomment and modify this line if your PostgreSQL server uses a custom
-# CA
-# praefect['database_sslrootcert'] = '/path/to/rootcert'
-```
+ Create a new database `praefect_production`. By creating the database while
+ connected as the `praefect` user, we are confident they have access.
-Replace `POSTGRESQL_SERVER_ADDRESS`, `PRAEFECT_EXTERNAL_TOKEN`, `PRAEFECT_INTERNAL_TOKEN`,
-and `PRAEFECT_SQL_PASSWORD` with their respective values.
+ ```sql
+ CREATE DATABASE praefect_production WITH ENCODING=UTF8;
+ ```
-Save the file and reconfigure Praefect:
+The database used by Praefect is now configured.
-```shell
-sudo gitlab-ctl reconfigure
-```
+### Praefect
-After you reconfigure, verify that Praefect can reach PostgreSQL:
+To complete this section you will need:
-```shell
-sudo -u git /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml sql-ping
-```
+- [Configured PostgreSQL server](#postgresql), including:
+ - IP/host address (`POSTGRESQL_SERVER_ADDRESS`)
+ - password (`PRAEFECT_SQL_PASSWORD`)
-If the check fails, make sure you have followed the steps correctly. If you edit `/etc/gitlab/gitlab.rb`,
-remember to run `sudo gitlab-ctl reconfigure` again before trying the
-`sql-ping` command.
-
-#### Gitaly
-
-Next we will configure each Gitaly server assigned to Praefect. Configuration for these
-is the same as a normal standalone Gitaly server, except that we use storage names and
-auth tokens from Praefect instead of GitLab.
-
-Below is an example configuration for `gitaly-1`, the only difference for the
-other Gitaly nodes is the storage name under `git_data_dirs`.
-
-Note that `gitaly['auth_token']` matches the `token` value listed under `praefect['virtual_storages']`
-on the Praefect node.
-
-```ruby
-# /etc/gitlab/gitlab.rb on gitaly node inside praefect cluster
-
-# Avoid running unnecessary services on the Gitaly server
-postgresql['enable'] = false
-redis['enable'] = false
-nginx['enable'] = false
-prometheus['enable'] = false
-grafana['enable'] = false
-unicorn['enable'] = false
-sidekiq['enable'] = false
-gitlab_workhorse['enable'] = false
-prometheus_monitoring['enable'] = false
-
-# Prevent database connections during 'gitlab-ctl reconfigure'
-gitlab_rails['rake_cache_clear'] = false
-gitlab_rails['auto_migrate'] = false
-
-# Replace GITLAB_SHELL_SECRET_TOKEN below with real secret
-gitlab_shell['secret_token'] = 'GITLAB_SHELL_SECRET_TOKEN'
-
-# Configure the gitlab-shell API callback URL. Without this, `git push` will
-# fail. This can be your 'front door' GitLab URL or an internal load
-# balancer.
-# Possible values could be: 'http://10.23.101.53', 'https://gitlab.example.com',
-# etc. Please replace GITLAB_SERVER_ADDRESS with proper value and change schema
-# to 'https' in case you use encrypted connection.
-gitlab_rails['internal_api_url'] = 'http://GITLAB_SERVER_ADDRESS'
-
-# Replace PRAEFECT_INTERNAL_TOKEN below with a real secret.
-gitaly['auth_token'] = 'PRAEFECT_INTERNAL_TOKEN'
-
-# Make Gitaly accept connections on all network interfaces. You must use
-# firewalls to restrict access to this address/port.
-# Comment out following line if you only want to support TLS connections
-gitaly['listen_addr'] = "0.0.0.0:8075"
-
-git_data_dirs({
- # Update this to the name of this Gitaly server which will be later
- # exposed in the UI under "Admin area > Gitaly"
- "gitaly-1" => {
- "path" => "/var/opt/gitlab/git-data"
- }
-})
-```
+Praefect should be run on a dedicated node. Do not run Praefect on the
+application server, or a Gitaly node.
-Replace `GITLAB_SHELL_SECRET_TOKEN` and `PRAEFECT_INTERNAL_TOKEN`
-with their respective values.
+1. SSH into the **Praefect** node and login as root:
-For more information on Gitaly server configuration, see our [Gitaly documentation](index.md#3-gitaly-server-configuration).
+ ```shell
+ sudo -i
+ ```
-When finished editing the configuration file for each Gitaly server, run the
-reconfigure command to put changes into effect:
+1. Disable all other services by editing `/etc/gitlab/gitlab.rb`:
-```shell
-sudo gitlab-ctl reconfigure
-```
+ ```ruby
+ # Disable all other services on the Praefect node
+ postgresql['enable'] = false
+ redis['enable'] = false
+ nginx['enable'] = false
+ prometheus['enable'] = false
+ grafana['enable'] = false
+ unicorn['enable'] = false
+ sidekiq['enable'] = false
+ gitlab_workhorse['enable'] = false
+ gitaly['enable'] = false
+
+ # Enable only the Praefect service
+ praefect['enable'] = true
+
+ # Prevent database connections during 'gitlab-ctl reconfigure'
+ gitlab_rails['rake_cache_clear'] = false
+ gitlab_rails['auto_migrate'] = false
+ ```
+
+1. Configure **Praefect** to listen on network interfaces by editing
+ `/etc/gitlab/gitlab.rb`:
+
+ ```ruby
+ # Make Praefect accept connections on all network interfaces.
+ # Use firewalls to restrict access to this address/port.
+ praefect['listen_addr'] = '0.0.0.0:2305'
+
+ # Enable Prometheus metrics access to Praefect. You must use firewalls
+ # to restrict access to this address/port.
+ praefect['prometheus_listen_addr'] = '0.0.0.0:9652'
+ ```
+
+1. Configure a strong `auth_token` for **Praefect** by editing
+ `/etc/gitlab/gitlab.rb`. This will be needed by clients outside the cluster
+ (like GitLab Shell) to communicate with the Praefect cluster :
+
+ ```ruby
+ praefect['auth_token'] = 'PRAEFECT_EXTERNAL_TOKEN'
+ ```
+
+1. Configure **Praefect** to connect to the PostgreSQL database by editing
+ `/etc/gitlab/gitlab.rb`.
+
+ You will need to replace `POSTGRESQL_SERVER_ADDRESS` with the IP/host address
+ of the database, and `PRAEFECT_SQL_PASSWORD` with the strong password set
+ above.
+
+ ```ruby
+ praefect['database_host'] = 'POSTGRESQL_SERVER_ADDRESS'
+ praefect['database_port'] = 5432
+ praefect['database_user'] = 'praefect'
+ praefect['database_password'] = 'PRAEFECT_SQL_PASSWORD'
+ praefect['database_dbname'] = 'praefect_production'
+ ```
+
+ If you want to use a TLS client certificate, the options below can be used:
+
+ ```ruby
+ # Connect to PostreSQL using a TLS client certificate
+ # praefect['database_sslcert'] = '/path/to/client-cert'
+ # praefect['database_sslkey'] = '/path/to/client-key'
+
+ # Trust a custom certificate authority
+ # praefect['database_sslrootcert'] = '/path/to/rootcert'
+ ```
+
+ By default Praefect will refuse to make an unencrypted connection to
+ PostgreSQL. You can override this by uncommenting the following line:
+
+ ```ruby
+ # praefect['database_sslmode'] = 'disable'
+ ```
+
+1. Configure the **Praefect** cluster to connect to each Gitaly node in the
+ cluster by editing `/etc/gitlab/gitlab.rb`.
+
+ In the example below we have configured one cluster named `praefect`. This
+ cluster has three Gitaly nodes `gitaly-1`, `gitaly-2`, and `gitaly-3`, which
+ will be replicas of each other.
+
+ Replace `PRAEFECT_INTERNAL_TOKEN` with a strong secret, which will be used by
+ Praefect when communicating with Gitaly nodes in the cluster. This token is
+ distinct from the `PRAEFECT_EXTERNAL_TOKEN`.
+
+ Replace `GITALY_HOST` with the IP/host address of the each Gitaly node.
+
+ More Gitaly nodes can be added to the cluster to increase the number of
+ replicas. More clusters can also be added for very large GitLab instances.
+
+ NOTE: **Note:** The `gitaly-1` node is currently denoted the primary. This
+ can be used to manually fail from one node to another. This will be removed
+ in the future to allow for automatic failover.
+
+ ```ruby
+ # Name of storage hash must match storage name in git_data_dirs on GitLab
+ # server ('praefect') and in git_data_dirs on Gitaly nodes ('gitaly-1')
+ praefect['virtual_storages'] = {
+ 'praefect' => {
+ 'gitaly-1' => {
+ 'address' => 'tcp://GITALY_HOST:8075',
+ 'token' => 'PRAEFECT_INTERNAL_TOKEN',
+ 'primary' => true
+ },
+ 'gitaly-2' => {
+ 'address' => 'tcp://GITALY_HOST:8075',
+ 'token' => 'PRAEFECT_INTERNAL_TOKEN'
+ },
+ 'gitaly-3' => {
+ 'address' => 'tcp://GITALY_HOST:8075',
+ 'token' => 'PRAEFECT_INTERNAL_TOKEN'
+ }
+ }
+ }
+ ```
+
+1. Save the changes to `/etc/gitlab/gitlab.rb` and [reconfigure Praefect](../restart_gitlab.md#omnibus-gitlab-reconfigure):
+
+ ```shell
+ gitlab-ctl reconfigure
+ ```
+
+1. Verify that Praefect can reach PostgreSQL:
+
+ ```shell
+ sudo -u git /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml sql-ping
+ ```
+
+ If the check fails, make sure you have followed the steps correctly. If you
+ edit `/etc/gitlab/gitlab.rb`, remember to run `sudo gitlab-ctl reconfigure`
+ again before trying the `sql-ping` command.
+
+### Gitaly
+
+NOTE: **Note:** Complete these steps for **each** Gitaly node.
+
+To complete this section you will need:
+
+- [Configured Praefect node](#praefect)
+- 3 (or more) servers, with GitLab installed, to be configured as Gitaly nodes.
+ These should be dedicated nodes, do not run other services on these nodes.
+
+Every Gitaly server assigned to the Praefect cluster needs to be configured. The
+configuration is the same as a normal [standalone Gitaly server](../index.md),
+except:
+
+- the storage names are exposed to Praefect, not GitLab
+- the secret token is shared with Praefect, not GitLab
+
+The configuration of all Gitaly nodes in the Praefect cluster can be identical,
+because we rely on Praefect to route operations correctly.
+
+Particular attention should be shown to:
+
+- the `gitaly['auth_token']` configured in this section must match the `token`
+ value under `praefect['virtual_storages']` on the Praefect node. This was set
+ in the [previous section](#praefect). This document uses the placeholder
+ `PRAEFECT_INTERNAL_TOKEN` throughout.
+- the storage names in `git_data_dirs` configured in this section must match the
+ storage names under `praefect['virtual_storages']` on the Praefect node. This
+ was set in the [previous section](#praefect). This document uses `gitaly-1`,
+ `gitaly-2`, and `gitaly-3` as Gitaly storage names.
+
+For more information on Gitaly server configuration, see our [Gitaly
+documentation](index.md#3-gitaly-server-configuration).
+
+1. SSH into the **Gitaly** node and login as root:
+
+ ```shell
+ sudo -i
+ ```
+
+1. Disable all other services by editing `/etc/gitlab/gitlab.rb`:
+
+ ```ruby
+ # Disable all other services on the Praefect node
+ postgresql['enable'] = false
+ redis['enable'] = false
+ nginx['enable'] = false
+ prometheus['enable'] = false
+ grafana['enable'] = false
+ unicorn['enable'] = false
+ sidekiq['enable'] = false
+ gitlab_workhorse['enable'] = false
+ prometheus_monitoring['enable'] = false
+
+ # Enable only the Praefect service
+ gitaly['enable'] = true
+
+ # Prevent database connections during 'gitlab-ctl reconfigure'
+ gitlab_rails['rake_cache_clear'] = false
+ gitlab_rails['auto_migrate'] = false
+ ```
+
+1. Configure **Gitaly** to listen on network interfaces by editing
+ `/etc/gitlab/gitlab.rb`:
+
+ ```ruby
+ # Make Gitaly accept connections on all network interfaces.
+ # Use firewalls to restrict access to this address/port.
+ gitaly['listen_addr'] = '0.0.0.0:8075'
+
+ # Enable Prometheus metrics access to Gitaly. You must use firewalls
+ # to restrict access to this address/port.
+ gitaly['prometheus_listen_addr'] = '0.0.0.0:9236'
+ ```
+
+1. Configure a strong `auth_token` for **Gitaly** by editing
+ `/etc/gitlab/gitlab.rb`. This will be needed by clients to communicate with
+ this Gitaly nodes. Typically, this token will be the same for all Gitaly
+ nodes.
+
+ ```ruby
+ gitaly['auth_token'] = 'PRAEFECT_INTERNAL_TOKEN'
+ ```
-When all Gitaly servers are configured, you can run the Praefect connection
+1. Configure the GitLab Shell `secret_token`, and `internal_api_url` which are
+ needed for `git push` operations.
+
+ If you have already configured [Gitaly on its own server](../index.md)
+
+ ```ruby
+ gitlab_shell['secret_token'] = 'GITLAB_SHELL_SECRET_TOKEN'
+
+ # Configure the gitlab-shell API callback URL. Without this, `git push` will
+ # fail. This can be your front door GitLab URL or an internal load balancer.
+ # Examples: 'https://example.gitlab.com', 'http://1.2.3.4'
+ gitlab_rails['internal_api_url'] = 'GITLAB_SERVER_URL'
+ ```
+
+1. Configure the storage location for Git data by setting `git_data_dirs` in
+ `/etc/gitlab/gitlab.rb`. Each Gitaly node should have a unique storage name
+ (eg `gitaly-1`).
+
+ Instead of configuring `git_data_dirs` uniquely for each Gitaly node, it is
+ often easier to have include the configuration for all Gitaly nodes on every
+ Gitaly node. This is supported because the Praefect `virtual_storages`
+ configuration maps each storage name (eg `gitaly-1`) to a specific node, and
+ requests are routed accordingly. This means every Gitaly node in your fleet
+ can share the same configuration.
+
+ ```ruby
+ # You can include the data dirs for all nodes in the same config, because
+ # Praefect will only route requests according to the addresses provided in the
+ # prior step.
+ git_data_dirs({
+ "gitaly-1" => {
+ "path" => "/var/opt/gitlab/git-data"
+ },
+ "gitaly-2" => {
+ "path" => "/var/opt/gitlab/git-data"
+ },
+ "gitaly-3" => {
+ "path" => "/var/opt/gitlab/git-data"
+ }
+ })
+ ```
+
+1. Save the changes to `/etc/gitlab/gitlab.rb` and [reconfigure Gitaly](../restart_gitlab.md#omnibus-gitlab-reconfigure):
+
+ ```shell
+ gitlab-ctl reconfigure
+ ```
+
+1. To ensure that Gitaly [has updated its Prometheus listen address](https://gitlab.com/gitlab-org/gitaly/-/issues/2521), [restart Gitaly](../restart_gitlab.md#omnibus-gitlab-restart):
+
+ ```shell
+ gitlab-ctl restart gitaly
+ ```
+
+**Complete these steps for each Gitaly node!**
+
+After all Gitaly nodes are configured, you can run the Praefect connection
checker to verify Praefect can connect to all Gitaly servers in the Praefect
-config. This can be done by running the following command on the Praefect
-server:
+config.
-```shell
-sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dial-nodes
-```
+1. SSH into the **Praefect** node and run the Praefect connection checker:
-#### GitLab
-
-When Praefect is running, it should be exposed as a storage to GitLab. This
-is done through setting the `git_data_dirs`. Assuming the default storage
-is present, there should be two storages available to GitLab:
-
-```ruby
-# /etc/gitlab/gitlab.rb on gitlab server
-
-# Replace PRAEFECT_URL_OR_IP below with real address Praefect can be accessed at.
-# Replace PRAEFECT_EXTERNAL_TOKEN below with real secret.
-git_data_dirs({
- "default" => {
- "path" => "/var/opt/gitlab/git-data"
- },
- "praefect" => {
- "gitaly_address" => "tcp://PRAEFECT_URL_OR_IP:2305",
- "gitaly_token" => 'PRAEFECT_EXTERNAL_TOKEN'
- }
-})
-
-# Replace GITLAB_SHELL_SECRET_TOKEN below with real secret
-gitlab_shell['secret_token'] = 'GITLAB_SHELL_SECRET_TOKEN'
-
-# Possible values could be: 'http://10.23.101.53', 'https://gitlab.example.com',
-# etc. Please replace GITLAB_SERVER_ADDRESS with proper value and change schema
-# to 'https' in case you use encrypted connection. For more info please refer
-# to https://docs.gitlab.com/omnibus/settings/configuration.html#configuring-the-external-url-for-gitlab
-external_url "http://<GITLAB_SERVER_ADDRESS>"
-```
+ ```shell
+ sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dial-nodes
+ ```
-Replace `GITLAB_SHELL_SECRET_TOKEN` and `PRAEFECT_EXTERNAL_TOKEN`
-with their respective values.
+### GitLab
-Note that the storage name used is the same as the `praefect['virtual_storage_name']` set
-on the Praefect node.
+To complete this section you will need:
-Save your changes and reconfigure GitLab:
+- [Configured Praefect node](#praefect)
+- [Configured Gitaly nodes](#gitaly)
-```shell
-sudo gitlab-ctl reconfigure
-```
+The Praefect cluster needs to be exposed as a storage location to the GitLab
+application. This is done by updating the `git_data_dirs`.
+
+Particular attention should be shown to:
+
+- the storage name added to `git_data_dirs` in this section must match the
+ storage name under `praefect['virtual_storages']` on the Praefect node. This
+ was set in the [Praefect](#praefect) section of this guide. This document uses
+ `praefect` as the Praefect storage name.
+
+1. SSH into the **GitLab** node and login as root:
+
+ ```shell
+ sudo -i
+ ```
+
+1. Add the Praefect cluster as a storage location by editing
+ `/etc/gitlab/gitlab.rb`.
+
+ You will need to replace:
+
+ - `PRAEFECT_HOST` with the IP address or hostname of the Praefect node
+ - `PRAEFECT_EXTERNAL_TOKEN` with the real secret
+
+ ```ruby
+ git_data_dirs({
+ "default" => {
+ "path" => "/var/opt/gitlab/git-data"
+ },
+ "praefect" => {
+ "gitaly_address" => "tcp://PRAEFECT_HOST:2305",
+ "gitaly_token" => 'PRAEFECT_EXTERNAL_TOKEN'
+ }
+ })
+ ```
+
+1. Configure the `gitlab_shell['secret_token']` so that callbacks from Gitaly
+ nodes during a `git push` are properly authenticated by editing
+ `/etc/gitlab/gitlab.rb`:
+
+ You will need to replace `GITLAB_SHELL_SECRET_TOKEN` with the real secret.
+
+ ```ruby
+ gitlab_shell['secret_token'] = 'GITLAB_SHELL_SECRET_TOKEN'
+ ```
+
+1. Configure the `external_url` so that files could be served by GitLab
+ by proper endpoint access by editing `/etc/gitlab/gitlab.rb`:
+
+ You will need to replace `GITLAB_SERVER_URL` with the real URL on which
+ current GitLab instance is serving:
+
+ ```ruby
+ external_url 'GITLAB_SERVER_URL'
+ ```
-Run `sudo gitlab-rake gitlab:gitaly:check` to confirm that GitLab can reach Praefect.
+1. Add Prometheus monitoring settings by editing `/etc/gitlab/gitlab.rb`.
-### Testing Praefect
+ You will need to replace:
+
+ - `PRAEFECT_HOST` with the IP address or hostname of the Praefect node
+ - `GITALY_HOST` with the IP address or hostname of each Gitaly node
+
+ ```ruby
+ prometheus['scrape_configs'] = [
+ {
+ 'job_name' => 'praefect',
+ 'static_configs' => [
+ 'targets' => [
+ 'PRAEFECT_HOST:9652' # praefect
+ ]
+ ]
+ },
+ {
+ 'job_name' => 'praefect-gitaly',
+ 'static_configs' => [
+ 'targets' => [
+ 'GITALY_HOST:9236', # gitaly-1
+ 'GITALY_HOST:9236', # gitaly-2
+ 'GITALY_HOST:9236', # gitaly-3
+ ]
+ ]
+ }
+ ]
+
+ grafana['disable_login_form'] = false
+ ```
+
+1. Save the changes to `/etc/gitlab/gitlab.rb` and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure):
+
+ ```shell
+ gitlab-ctl reconfigure
+ ```
+
+1. Verify that GitLab can reach Praefect:
+
+ ```shell
+ gitlab-rake gitlab:gitaly:check
+ ```
+
+1. Set the Grafana admin password. This command will prompt you to enter a new password:
+
+ ```shell
+ gitlab-ctl set-grafana-password
+ ```
+
+1. Update the **Repository storage** settings from **Admin Area > Settings >
+ Repository > Repository storage** to make the newly configured Praefect
+ cluster the storage location for new Git repositories.
+
+ - Deselect the **default** storage location
+ - Select the **praefect** storage location
+
+1. Verify everything is still working by creating a new project. Check the
+ "Initialize repository with a README" box so that there is content in the
+ repository that viewed. If the project is created, and you can see the
+ README file, it works!
+
+1. Inspect metrics by browsing to `/-/grafana` on your GitLab server.
+ Log in with `admin` / `GRAFANA_PASSWORD`. Go to 'Explore' and query
+ `gitlab_build_info` to verify that you are getting metrics from all your
+ machines.
+
+Congratulations! You have configured a highly available Praefect cluster.
+
+## Migrating existing repositories to Praefect
+
+If your GitLab instance already has repositories, these won't be migrated
+automatically.
+
+Repositories may be moved from one storage location using the [Repository
+API](../../api/projects.html#edit-project):
+
+```shell
+curl --request PUT \
+ --header "PRIVATE-TOKEN: <your_access_token>" \
+ --data "repository_storage=praefect" \
+ https://example.gitlab.com/api/v4/projects/123
+```
-To test Praefect, first set it as the default storage node for new projects
-using **Admin Area > Settings > Repository > Repository storage**. Next,
-create a new project and check the "Initialize repository with a README" box.
+## Debugging Praefect
If you receive an error, check `/var/log/gitlab/gitlab-rails/production.log`.
diff --git a/doc/administration/gitaly/reference.md b/doc/administration/gitaly/reference.md
index 2c5e54743c3..6b6919247fe 100644
--- a/doc/administration/gitaly/reference.md
+++ b/doc/administration/gitaly/reference.md
@@ -233,7 +233,7 @@ The following values configure logging in Gitaly under the `[logging]` section.
| `sentry_environment` | string | no | [Sentry Environment](https://docs.sentry.io/enriching-error-data/environments/) for exception monitoring. |
| `ruby_sentry_dsn` | string | no | Sentry DSN for `gitaly-ruby` exception monitoring. |
-While the main Gitaly application logs go to stdout, there are some extra log
+While the main Gitaly application logs go to `stdout`, there are some extra log
files that go to a configured directory, like the GitLab Shell logs.
GitLab Shell does not support `panic` or `trace` level logs. `panic` will fall
back to `error`, while `trace` will fall back to `debug`. Any other invalid log
diff --git a/doc/administration/high_availability/README.md b/doc/administration/high_availability/README.md
index 2c2fc075dbe..c74beb11241 100644
--- a/doc/administration/high_availability/README.md
+++ b/doc/administration/high_availability/README.md
@@ -4,210 +4,124 @@ type: reference, concepts
# Scaling and High Availability
-GitLab supports a number of options for scaling your self-managed instance and configuring high availability (HA).
-The solution you choose will be based on the level of scalability and
-availability you require. The easiest solutions are scalable, but not necessarily
-highly available.
-
-GitLab provides a service that is essential to most organizations: it
-enables people to collaborate on code in a timely fashion. Any downtime should
-therefore be short and planned. Due to the distributed nature
-of Git, developers can continue to commit code locally even when GitLab is not
-available. However, some GitLab features such as the issue tracker and
-continuous integration are not available when GitLab is down.
-If you require all GitLab functionality to be highly available,
-consider the options outlined below.
-
-**Keep in mind that all highly-available solutions come with a trade-off between
-cost/complexity and uptime**. The more uptime you want, the more complex the
-solution. And the more complex the solution, the more work is involved in
-setting up and maintaining it. High availability is not free and every HA
-solution should balance the costs against the benefits.
-
-There are many options when choosing a highly-available GitLab architecture. We
-recommend engaging with GitLab Support to choose the best architecture for your
-use case. This page contains recommendations based on
-experience with GitLab.com and internal scale testing.
+GitLab supports a number of options for larger self-managed instances to
+ensure that they are scalable and highly available. While these needs can be tackled
+individually, they typically go hand in hand: a performant scalable environment
+will have availability by default, as its components are separated and pooled.
+
+On this page, we present a maturity model for a progression from simple to complex
+GitLab installations as your GitLab usage evolves. For larger setups we give several recommended
+architectures based on experience with GitLab.com and internal scale
+testing that aim to achieve the right balance between both scalability
+and availability.
For detailed insight into how GitLab scales and configures GitLab.com, you can
watch [this 1 hour Q&A](https://www.youtube.com/watch?v=uCU8jdYzpac)
-with [John Northrup](https://gitlab.com/northrup), and live questions coming in from some of our customers.
-
-## GitLab Components
-
-The following components need to be considered for a scaled or highly-available
-environment. In many cases, components can be combined on the same nodes to reduce
-complexity.
-
-- GitLab application nodes (Unicorn / Puma, Workhorse) - Web-requests (UI, API, Git over HTTP)
-- Sidekiq - Asynchronous/Background jobs
-- PostgreSQL - Database
- - Consul - Database service discovery and health checks/failover
- - PgBouncer - Database pool manager
-- Redis - Key/Value store (User sessions, cache, queue for Sidekiq)
- - Sentinel - Redis health check/failover manager
-- Gitaly - Provides high-level storage and RPC access to Git repositories
-- S3 Object Storage service[^4] and / or NFS storage servers[^5] for entities such as Uploads, Artifacts, LFS Objects, etc...
-- Load Balancer[^6] - Main entry point and handles load balancing for the GitLab application nodes.
-- Monitor - Prometheus and Grafana monitoring with auto discovery.
-
-## Scalable Architecture Examples
-
-When an organization reaches a certain threshold it will be necessary to scale
-the GitLab instance. Still, true high availability may not be necessary. There
-are options for scaling GitLab instances relatively easily without incurring the
-infrastructure and maintenance costs of full high availability.
-
-### Basic Scaling
-
-This is the simplest form of scaling and will work for the majority of
-cases. Backend components such as PostgreSQL, Redis, and storage are offloaded
-to their own nodes while the remaining GitLab components all run on 2 or more
-application nodes.
-
-This form of scaling also works well in a cloud environment when it is more
-cost effective to deploy several small nodes rather than a single
-larger one.
-
-- 1 PostgreSQL node
-- 1 Redis node
-- 1 Gitaly node
-- 1 or more Object Storage services[^4] and / or NFS storage server[^5]
-- 2 or more GitLab application nodes (Unicorn / Puma, Workhorse, Sidekiq)
-- 1 or more Load Balancer nodes[^6]
-- 1 Monitoring node (Prometheus, Grafana)
-
-#### Installation Instructions
-
-Complete the following installation steps in order. A link at the end of each
-section will bring you back to the Scalable Architecture Examples section so
-you can continue with the next step.
-
-1. [Load Balancer(s)](load_balancer.md)[^6]
-1. [Consul](consul.md)
-1. [PostgreSQL](database.md#postgresql-in-a-scaled-environment) with [PgBouncer](pgbouncer.md)
-1. [Redis](redis.md#redis-in-a-scaled-environment)
-1. [Gitaly](gitaly.md) (recommended) and / or [NFS](nfs.md)[^5]
-1. [GitLab application nodes](gitlab.md)
- - With [Object Storage service enabled](../gitaly/index.md#eliminating-nfs-altogether)[^4]
-1. [Monitoring node (Prometheus and Grafana)](monitoring_node.md)
-
-### Full Scaling
-
-For very large installations, it might be necessary to further split components
-for maximum scalability. In a fully-scaled architecture, the application node
-is split into separate Sidekiq and Unicorn/Workhorse nodes. One indication that
-this architecture is required is if Sidekiq queues begin to periodically increase
-in size, indicating that there is contention or there are not enough resources.
-
-- 1 or more PostgreSQL nodes
-- 1 or more Redis nodes
-- 1 or more Gitaly storage servers
-- 1 or more Object Storage services[^4] and / or NFS storage server[^5]
-- 2 or more Sidekiq nodes
-- 2 or more GitLab application nodes (Unicorn / Puma, Workhorse, Sidekiq)
-- 1 or more Load Balancer nodes[^6]
-- 1 Monitoring node (Prometheus, Grafana)
-
-## High Availability Architecture Examples
-
-When organizations require scaling *and* high availability, the following
-architectures can be utilized. As the introduction section at the top of this
-page mentions, there is a tradeoff between cost/complexity and uptime. Be sure
-this complexity is absolutely required before taking the step into full
-high availability.
-
-For all examples below, we recommend running Consul and Redis Sentinel separately
-from the services they monitor. If Consul is running on PostgreSQL nodes or Sentinel on
-Redis nodes, there is a potential that high resource usage by PostgreSQL or
-Redis could prevent communication between the other Consul and Sentinel nodes.
-This may lead to the other nodes believing a failure has occurred and initiating
-automated failover. Isolating Consul and Redis Sentinel from the services they monitor
-reduces the chances of a false positive that a failure has occurred.
-
-The examples below do not address high availability of NFS for objects. We recommend a
-S3 Object Storage service[^4] is used where possible over NFS but it's still required in
-certain cases[^5]. Where NFS is to be used some enterprises have access to NFS appliances
-that manage availability and this would be best case scenario.
-
-There are many options in between each of these examples. Work with GitLab Support
-to understand the best starting point for your workload and adapt from there.
-
-### Horizontal
-
-This is the simplest form of high availability and scaling. It requires the
-fewest number of individual servers (virtual or physical) but does have some
-trade-offs and limits.
-
-This architecture will work well for many GitLab customers. Larger customers
-may begin to notice certain events cause contention/high load - for example,
-cloning many large repositories with binary files, high API usage, a large
-number of enqueued Sidekiq jobs, and so on. If this happens, you should consider
-moving to a hybrid or fully distributed architecture depending on what is causing
-the contention.
-
-- 3 PostgreSQL nodes
-- 3 Redis nodes
-- 3 Consul / Sentinel nodes
-- 2 or more GitLab application nodes (Unicorn / Puma, Workhorse, Sidekiq)
-- 1 Gitaly storage servers
-- 1 Object Storage service[^4] and / or NFS storage server[^5]
-- 1 or more Load Balancer nodes[^6]
-- 1 Monitoring node (Prometheus, Grafana)
-
-![Horizontal architecture diagram](img/horizontal.png)
-
-### Hybrid
-
-In this architecture, certain components are split on dedicated nodes so high
-resource usage of one component does not interfere with others. In larger
-environments this is a good architecture to consider if you foresee or do have
-contention due to certain workloads.
-
-- 3 PostgreSQL nodes
-- 1 PgBouncer node
-- 3 Redis nodes
-- 3 Consul / Sentinel nodes
-- 2 or more Sidekiq nodes
-- 2 or more GitLab application nodes (Unicorn / Puma, Workhorse, Sidekiq)
-- 1 Gitaly storage servers
-- 1 Object Storage service[^4] and / or NFS storage server[^5]
-- 1 or more Load Balancer nodes[^6]
-- 1 Monitoring node (Prometheus, Grafana)
-
-![Hybrid architecture diagram](img/hybrid.png)
-
-### Fully Distributed
-
-This architecture scales to hundreds of thousands of users and projects and is
-the basis of the GitLab.com architecture. While this scales well it also comes
-with the added complexity of many more nodes to configure, manage, and monitor.
-
-- 3 PostgreSQL nodes
-- 1 or more PgBouncer nodes (with associated internal load balancers)
-- 4 or more Redis nodes (2 separate clusters for persistent and cache data)
-- 3 Consul nodes
-- 3 Sentinel nodes
-- Multiple dedicated Sidekiq nodes (Split into real-time, best effort, ASAP,
- CI Pipeline and Pull Mirror sets)
-- 2 or more Git nodes (Git over SSH/Git over HTTP)
-- 2 or more API nodes (All requests to `/api`)
-- 2 or more Web nodes (All other web requests)
-- 2 or more Gitaly storage servers
-- 1 or more Object Storage services[^4] and / or NFS storage servers[^5]
-- 1 or more Load Balancer nodes[^6]
-- 1 Monitoring node (Prometheus, Grafana)
-
-![Fully Distributed architecture diagram](img/fully-distributed.png)
-
-## Reference Architecture Recommendations
-
-The Support and Quality teams build, performance test, and validate Reference
-Architectures that support large numbers of users. The specifications below are
-a representation of this work so far and may be adjusted in the future based on
-additional testing and iteration.
-
-The architectures have been tested with specific coded workloads, and the
+with [John Northrup](https://gitlab.com/northrup), and live questions coming
+in from some of our customers.
+
+## Maturity levels
+
+### Level 1: Single-node Omnibus installation
+
+This solution is appropriate for many teams that have a single server at their disposal. With automatic backup of the GitLab repositories, configuration, and the database, this can be an optimal solution if you don't have strict availability requirements.
+
+This configuration is supported in [GitLab Starter, Premium and Ultimate](https://about.gitlab.com/pricing/).
+
+References:
+
+- [Installation Docs](../../install/README.md)
+- [Backup/Restore Docs](https://docs.gitlab.com/omnibus/settings/backups.html#backup-and-restore-omnibus-gitlab-configuration)
+
+### Level 2: Multiple application servers
+
+By separating components you can see a number of advantages compared to a single-node setup. Namely, you can:
+
+- Increase the number of users
+- Enable zero-downtime upgrades
+- Increase availability
+
+Additional application nodes will handle frontend traffic, with a load balancer in front to distribute traffic across those nodes. Meanwhile, each application node connects to a shared file server and database systems on the back end. This way, if one of the application servers fails, the workflow is not interrupted.
+
+This configuration is supported in [GitLab Starter, Premium and Ultimate](https://about.gitlab.com/pricing/).
+
+References:
+
+- [High Availability Reference Architectures](#reference-architectures), without HA components
+
+### Level 3: Highly Available
+
+By adding automatic failover for database systems, we can enable higher uptime with an additional layer of complexity.
+
+This configuration is supported in [GitLab Premium and Ultimate](https://about.gitlab.com/pricing/).
+
+References:
+
+- [High Availability Reference Architectures](#reference-architectures)
+
+### Level 4: GitLab Geo
+
+GitLab Geo allows you to replicate your GitLab instance to other geographical locations as a read-only fully operational instance that can also be promoted in case of disaster.
+
+This configuration is supported in [GitLab Premium and Ultimate](https://about.gitlab.com/pricing/).
+
+References:
+
+- [Geo Documentation](../geo/replication/index.md)
+- [GitLab Geo with a highly available configuration](../geo/replication/high_availability.md)
+
+## Recommended setups based on number of users
+
+- 1 - 1000 Users: A single-node [Omnibus](https://docs.gitlab.com/omnibus/) setup with frequent backups. Refer to the [requirements page](../../install/requirements.md) for further details of the specs you will require.
+- 1000 - 10000 Users: A scaled environment based on one of our [Reference Architectures](#reference-architectures), without the HA components applied. This can be a reasonable step towards a fully HA environment.
+- 2000 - 50000+ Users: A scaled HA environment based on one of our [Reference Architectures](#reference-architectures) below.
+
+## GitLab components and configuration instructions
+
+The GitLab application depends on the following [components](../../development/architecture.md#component-diagram).
+It can also depend on several third party services depending on
+your environment setup. Here we'll detail both in the order in which
+you would typically configure them along with our recommendations for
+their use and configuration.
+
+### Third party services
+
+Here's some details of several third party services a typical environment
+will depend on. The services can be provided by numerous applications
+or providers and further advice can be given on how best to select.
+These should be configured first, before the [GitLab components](#gitlab-components).
+
+| Component | Description | Configuration instructions |
+|--------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------|
+| [Load Balancer(s)](load_balancer.md)[^6] | Handles load balancing for the GitLab nodes where required | [Load balancer HA configuration](load_balancer.md) |
+| [Cloud Object Storage service](object_storage.md)[^4] | Recommended store for shared data objects | [Cloud Object Storage configuration](object_storage.md) |
+| [NFS](nfs.md)[^5] [^7] | Shared disk storage service. Can be used as an alternative for Gitaly or Object Storage. Required for GitLab Pages | [NFS configuration](nfs.md) |
+
+### GitLab components
+
+Next are all of the components provided directly by GitLab. As mentioned
+earlier, they are presented in the typical order you would configure
+them.
+
+| Component | Description | Configuration instructions |
+|---------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------|---------------------------------------------------------------|
+| [Consul](../../development/architecture.md#consul)[^3] | Service discovery and health checks/failover | [Consul HA configuration](consul.md) **(PREMIUM ONLY)** |
+| [PostgreSQL](../../development/architecture.md#postgresql) | Database | [Database HA configuration](database.md) |
+| [PgBouncer](../../development/architecture.md#pgbouncer) | Database Pool Manager | [PgBouncer HA configuration](pgbouncer.md) **(PREMIUM ONLY)** |
+| [Redis](../../development/architecture.md#redis)[^3] with Redis Sentinel | Key/Value store for shared data with HA watcher service | [Redis HA configuration](redis.md) |
+| [Gitaly](../../development/architecture.md#gitaly)[^2] [^5] [^7] | Recommended high-level storage for Git repository data | [Gitaly HA configuration](gitaly.md) |
+| [Sidekiq](../../development/architecture.md#sidekiq) | Asynchronous/Background jobs | |
+| [GitLab application nodes](../../development/architecture.md#unicorn)[^1] | (Unicorn / Puma, Workhorse) - Web-requests (UI, API, Git over HTTP) | [GitLab app HA/scaling configuration](gitlab.md) |
+| [Prometheus](../../development/architecture.md#prometheus) and [Grafana](../../development/architecture.md#grafana) | GitLab environment monitoring | [Monitoring node for scaling/HA](monitoring_node.md) |
+
+In some cases, components can be combined on the same nodes to reduce complexity as well.
+
+## Reference architectures
+
+In this section we'll detail the Reference Architectures that can support large numbers
+of users. These were built, tested and verified by our Quality and Support teams.
+
+Testing was done with our GitLab Performance Tool at specific coded workloads, and the
throughputs used for testing were calculated based on sample customer data. We
test each endpoint type with the following number of requests per second (RPS)
per 1000 users:
@@ -223,113 +137,108 @@ how much automation you use, mirroring, and repo/change size. Additionally the
shown memory values are given directly by [GCP machine types](https://cloud.google.com/compute/docs/machine-types).
On different cloud vendors a best effort like for like can be used.
-### 2,000 User Configuration
+### 2,000 user configuration
-- **Supported Users (approximate):** 2,000
-- **Test RPS Rates:** API: 40 RPS, Web: 4 RPS, Git: 4 RPS
-- **Known Issues:** For the latest list of known performance issues head
-[here](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=Quality%3Aperformance-issues).
+- **Supported users (approximate):** 2,000
+- **Test RPS rates:** API: 40 RPS, Web: 4 RPS, Git: 4 RPS
+- **Known issues:** [List of known performance issues](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=Quality%3Aperformance-issues)
-| Service | Nodes | Configuration | GCP type |
+| Service | Nodes | Configuration[^8] | GCP type |
| ----------------------------|-------|-----------------------|---------------|
| GitLab Rails[^1] | 3 | 8 vCPU, 7.2GB Memory | n1-highcpu-8 |
| PostgreSQL | 3 | 2 vCPU, 7.5GB Memory | n1-standard-2 |
| PgBouncer | 3 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
-| Gitaly[^2] [^7] | X | 4 vCPU, 15GB Memory | n1-standard-4 |
+| Gitaly[^2] [^5] [^7] | X | 4 vCPU, 15GB Memory | n1-standard-4 |
| Redis[^3] | 3 | 2 vCPU, 7.5GB Memory | n1-standard-2 |
| Consul + Sentinel[^3] | 3 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| Sidekiq | 4 | 2 vCPU, 7.5GB Memory | n1-standard-2 |
-| S3 Object Storage[^4] | - | - | - |
+| Cloud Object Storage[^4] | - | - | - |
| NFS Server[^5] [^7] | 1 | 4 vCPU, 3.6GB Memory | n1-highcpu-4 |
| Monitoring node | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| External load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| Internal load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
-### 5,000 User Configuration
+### 5,000 user configuration
-- **Supported Users (approximate):** 5,000
-- **Test RPS Rates:** API: 100 RPS, Web: 10 RPS, Git: 10 RPS
-- **Known Issues:** For the latest list of known performance issues head
-[here](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=Quality%3Aperformance-issues).
+- **Supported users (approximate):** 5,000
+- **Test RPS rates:** API: 100 RPS, Web: 10 RPS, Git: 10 RPS
+- **Known issues:** [List of known performance issues](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=Quality%3Aperformance-issues)
-| Service | Nodes | Configuration | GCP type |
+| Service | Nodes | Configuration[^8] | GCP type |
| ----------------------------|-------|-----------------------|---------------|
| GitLab Rails[^1] | 3 | 16 vCPU, 14.4GB Memory | n1-highcpu-16 |
| PostgreSQL | 3 | 2 vCPU, 7.5GB Memory | n1-standard-2 |
| PgBouncer | 3 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
-| Gitaly[^2] [^7] | X | 8 vCPU, 30GB Memory | n1-standard-8 |
+| Gitaly[^2] [^5] [^7] | X | 8 vCPU, 30GB Memory | n1-standard-8 |
| Redis[^3] | 3 | 2 vCPU, 7.5GB Memory | n1-standard-2 |
| Consul + Sentinel[^3] | 3 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| Sidekiq | 4 | 2 vCPU, 7.5GB Memory | n1-standard-2 |
-| S3 Object Storage[^4] | - | - | - |
+| Cloud Object Storage[^4] | - | - | - |
| NFS Server[^5] [^7] | 1 | 4 vCPU, 3.6GB Memory | n1-highcpu-4 |
| Monitoring node | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| External load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| Internal load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
-### 10,000 User Configuration
+### 10,000 user configuration
-- **Supported Users (approximate):** 10,000
-- **Test RPS Rates:** API: 200 RPS, Web: 20 RPS, Git: 20 RPS
-- **Known Issues:** For the latest list of known performance issues head
-[here](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=Quality%3Aperformance-issues).
+- **Supported users (approximate):** 10,000
+- **Test RPS rates:** API: 200 RPS, Web: 20 RPS, Git: 20 RPS
+- **Known issues:** [List of known performance issues](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=Quality%3Aperformance-issues)
-| Service | Nodes | Configuration | GCP type |
+| Service | Nodes | Configuration[^8] | GCP type |
| ----------------------------|-------|-----------------------|---------------|
| GitLab Rails[^1] | 3 | 32 vCPU, 28.8GB Memory | n1-highcpu-32 |
| PostgreSQL | 3 | 4 vCPU, 15GB Memory | n1-standard-4 |
| PgBouncer | 3 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
-| Gitaly[^2] [^7] | X | 16 vCPU, 60GB Memory | n1-standard-16 |
+| Gitaly[^2] [^5] [^7] | X | 16 vCPU, 60GB Memory | n1-standard-16 |
| Redis[^3] - Cache | 3 | 4 vCPU, 15GB Memory | n1-standard-4 |
| Redis[^3] - Queues / Shared State | 3 | 4 vCPU, 15GB Memory | n1-standard-4 |
| Redis Sentinel[^3] - Cache | 3 | 1 vCPU, 1.7GB Memory | g1-small |
| Redis Sentinel[^3] - Queues / Shared State | 3 | 1 vCPU, 1.7GB Memory | g1-small |
| Consul | 3 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| Sidekiq | 4 | 4 vCPU, 15GB Memory | n1-standard-4 |
-| S3 Object Storage[^4] | - | - | - |
+| Cloud Object Storage[^4] | - | - | - |
| NFS Server[^5] [^7] | 1 | 4 vCPU, 3.6GB Memory | n1-highcpu-4 |
| Monitoring node | 1 | 4 vCPU, 3.6GB Memory | n1-highcpu-4 |
| External load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| Internal load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
-### 25,000 User Configuration
+### 25,000 user configuration
-- **Supported Users (approximate):** 25,000
-- **Test RPS Rates:** API: 500 RPS, Web: 50 RPS, Git: 50 RPS
-- **Known Issues:** For the latest list of known performance issues head
-[here](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=Quality%3Aperformance-issues).
+- **Supported users (approximate):** 25,000
+- **Test RPS rates:** API: 500 RPS, Web: 50 RPS, Git: 50 RPS
+- **Known issues:** [List of known performance issues](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=Quality%3Aperformance-issues)
-| Service | Nodes | Configuration | GCP type |
+| Service | Nodes | Configuration[^8] | GCP type |
| ----------------------------|-------|-----------------------|---------------|
-| GitLab Rails[^1] | 7 | 32 vCPU, 28.8GB Memory | n1-highcpu-32 |
+| GitLab Rails[^1] | 5 | 32 vCPU, 28.8GB Memory | n1-highcpu-32 |
| PostgreSQL | 3 | 8 vCPU, 30GB Memory | n1-standard-8 |
| PgBouncer | 3 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
-| Gitaly[^2] [^7] | X | 32 vCPU, 120GB Memory | n1-standard-32 |
+| Gitaly[^2] [^5] [^7] | X | 32 vCPU, 120GB Memory | n1-standard-32 |
| Redis[^3] - Cache | 3 | 4 vCPU, 15GB Memory | n1-standard-4 |
| Redis[^3] - Queues / Shared State | 3 | 4 vCPU, 15GB Memory | n1-standard-4 |
| Redis Sentinel[^3] - Cache | 3 | 1 vCPU, 1.7GB Memory | g1-small |
| Redis Sentinel[^3] - Queues / Shared State | 3 | 1 vCPU, 1.7GB Memory | g1-small |
| Consul | 3 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| Sidekiq | 4 | 4 vCPU, 15GB Memory | n1-standard-4 |
-| S3 Object Storage[^4] | - | - | - |
+| Cloud Object Storage[^4] | - | - | - |
| NFS Server[^5] [^7] | 1 | 4 vCPU, 3.6GB Memory | n1-highcpu-4 |
| Monitoring node | 1 | 4 vCPU, 3.6GB Memory | n1-highcpu-4 |
| External load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| Internal load balancing node[^6] | 1 | 4 vCPU, 3.6GB Memory | n1-highcpu-4 |
-### 50,000 User Configuration
+### 50,000 user configuration
-- **Supported Users (approximate):** 50,000
-- **Test RPS Rates:** API: 1000 RPS, Web: 100 RPS, Git: 100 RPS
-- **Known Issues:** For the latest list of known performance issues head
-[here](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=Quality%3Aperformance-issues).
+- **Supported users (approximate):** 50,000
+- **Test RPS rates:** API: 1000 RPS, Web: 100 RPS, Git: 100 RPS
+- **Known issues:** [List of known performance issues](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=Quality%3Aperformance-issues)
-| Service | Nodes | Configuration | GCP type |
+| Service | Nodes | Configuration[^8] | GCP type |
| ----------------------------|-------|-----------------------|---------------|
-| GitLab Rails[^1] | 15 | 32 vCPU, 28.8GB Memory | n1-highcpu-32 |
+| GitLab Rails[^1] | 12 | 32 vCPU, 28.8GB Memory | n1-highcpu-32 |
| PostgreSQL | 3 | 16 vCPU, 60GB Memory | n1-standard-16 |
| PgBouncer | 3 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
-| Gitaly[^2] [^7] | X | 64 vCPU, 240GB Memory | n1-standard-64 |
+| Gitaly[^2] [^5] [^7] | X | 64 vCPU, 240GB Memory | n1-standard-64 |
| Redis[^3] - Cache | 3 | 4 vCPU, 15GB Memory | n1-standard-4 |
| Redis[^3] - Queues / Shared State | 3 | 4 vCPU, 15GB Memory | n1-standard-4 |
| Redis Sentinel[^3] - Cache | 3 | 1 vCPU, 1.7GB Memory | g1-small |
@@ -337,7 +246,7 @@ On different cloud vendors a best effort like for like can be used.
| Consul | 3 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| Sidekiq | 4 | 4 vCPU, 15GB Memory | n1-standard-4 |
| NFS Server[^5] [^7] | 1 | 4 vCPU, 3.6GB Memory | n1-highcpu-4 |
-| S3 Object Storage[^4] | - | - | - |
+| Cloud Object Storage[^4] | - | - | - |
| Monitoring node | 1 | 4 vCPU, 3.6GB Memory | n1-highcpu-4 |
| External load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 |
| Internal load balancing node[^6] | 1 | 8 vCPU, 7.2GB Memory | n1-highcpu-8 |
@@ -361,26 +270,26 @@ On different cloud vendors a best effort like for like can be used.
and another for the Queues and Shared State classes respectively. We also recommend
that you run the Redis Sentinel clusters separately as well for each Redis Cluster.
-[^4]: For data objects such as LFS, Uploads, Artifacts, etc... We recommend a S3 Object Storage
- where possible over NFS due to better performance and availability. Several types of objects
- are supported for S3 storage - [Job artifacts](../job_artifacts.md#using-object-storage),
- [LFS](../lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage),
- [Uploads](../uploads.md#using-object-storage-core-only),
- [Merge Request Diffs](../merge_request_diffs.md#using-object-storage),
- [Packages](../packages/index.md#using-object-storage) (Optional Feature),
- [Dependency Proxy](../packages/dependency_proxy.md#using-object-storage) (Optional Feature).
+[^4]: For data objects such as LFS, Uploads, Artifacts, etc. We recommend a [Cloud Object Storage service](object_storage.md)
+ over NFS where possible, due to better performance and availability.
-[^5]: NFS storage server is still required for [GitLab Pages](https://gitlab.com/gitlab-org/gitlab-pages/issues/196)
- and optionally for CI Job Incremental Logging
- ([can be switched to use Redis instead](../job_logs.md#new-incremental-logging-architecture)).
+[^5]: NFS can be used as an alternative for both repository data (replacing Gitaly) and
+ object storage but this isn't typically recommended for performance reasons. Note however it is required for
+ [GitLab Pages](https://gitlab.com/gitlab-org/gitlab-pages/issues/196).
[^6]: Our architectures have been tested and validated with [HAProxy](https://www.haproxy.org/)
as the load balancer. However other reputable load balancers with similar feature sets
should also work instead but be aware these aren't validated.
-[^7]: We strongly recommend that the Gitaly and / or NFS nodes are set up with SSD disks over
+[^7]: We strongly recommend that any Gitaly and / or NFS nodes are set up with SSD disks over
HDD with a throughput of at least 8,000 IOPS for read operations and 2,000 IOPS for write
as these components have heavy I/O. These IOPS values are recommended only as a starter
as with time they may be adjusted higher or lower depending on the scale of your
environment's workload. If you're running the environment on a Cloud provider
you may need to refer to their documentation on how configure IOPS correctly.
+
+[^8]: The architectures were built and tested with the [Intel Xeon E5 v3 (Haswell)](https://cloud.google.com/compute/docs/cpu-platforms)
+ CPU platform on GCP. On different hardware you may find that adjustments, either lower
+ or higher, are required for your CPU or Node counts accordingly. For more info a
+ [Sysbench](https://github.com/akopytov/sysbench) benchmark of the CPU can be found
+ [here](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Reference-Architectures/GCP-CPU-Benchmarks).
diff --git a/doc/administration/high_availability/consul.md b/doc/administration/high_availability/consul.md
index 0ea5e55cc35..6762a81f671 100644
--- a/doc/administration/high_availability/consul.md
+++ b/doc/administration/high_availability/consul.md
@@ -94,7 +94,8 @@ Ideally all nodes will have a `Status` of `alive`.
### Restarting the server cluster
-**Note**: This section only applies to server agents. It is safe to restart client agents whenever needed.
+NOTE: **Note:**
+This section only applies to server agents. It is safe to restart client agents whenever needed.
If it is necessary to restart the server cluster, it is important to do this in a controlled fashion in order to maintain quorum. If quorum is lost, you will need to follow the Consul [outage recovery](#outage-recovery) process to recover the cluster.
diff --git a/doc/administration/high_availability/database.md b/doc/administration/high_availability/database.md
index daeb0f9baf5..bb07fdbbb6e 100644
--- a/doc/administration/high_availability/database.md
+++ b/doc/administration/high_availability/database.md
@@ -22,11 +22,9 @@ If you use a cloud-managed service, or provide your own PostgreSQL:
1. Configure the GitLab application servers with the appropriate details.
This step is covered in [Configuring GitLab for HA](gitlab.md).
-## PostgreSQL in a Scaled Environment
+## PostgreSQL in a Scaled and Highly Available Environment
-This section is relevant for [Scaled Architecture](README.md#scalable-architecture-examples)
-environments including [Basic Scaling](README.md#basic-scaling) and
-[Full Scaling](README.md#full-scaling).
+This section is relevant for [Scalable and Highly Available Setups](README.md).
### Provide your own PostgreSQL instance **(CORE ONLY)**
@@ -94,23 +92,6 @@ deploy the bundled PostgreSQL.
Advanced configuration options are supported and can be added if
needed.
-Continue configuration of other components by going
-[back to Scaled Architectures](README.md#scalable-architecture-examples)
-
-## PostgreSQL with High Availability
-
-This section is relevant for [High Availability Architecture](README.md#high-availability-architecture-examples)
-environments including [Horizontal](README.md#horizontal),
-[Hybrid](README.md#hybrid), and
-[Fully Distributed](README.md#fully-distributed).
-
-### Provide your own PostgreSQL instance **(CORE ONLY)**
-
-If you want to use your own deployed PostgreSQL instance(s),
-see [Provide your own PostgreSQL instance](#provide-your-own-postgresql-instance-core-only)
-for more details. However, you can use the GitLab Omnibus package to easily
-deploy the bundled PostgreSQL.
-
### High Availability with GitLab Omnibus **(PREMIUM ONLY)**
> Important notes:
@@ -182,7 +163,7 @@ Similarly, PostgreSQL access is controlled based on the network source.
This is why you will need:
- IP address of each nodes network interface. This can be set to `0.0.0.0` to
- listen on all interfaces. It cannot be set to the loopack address `127.0.0.1`.
+ listen on all interfaces. It cannot be set to the loopback address `127.0.0.1`.
- Network Address. This can be in subnet (i.e. `192.168.0.0/255.255.255.0`)
or CIDR (i.e. `192.168.0.0/24`) form.
@@ -402,7 +383,7 @@ Select one node as a primary node.
* master | HOSTNAME | | host=HOSTNAME user=gitlab_repmgr dbname=gitlab_repmgr
```
-1. Note down the hostname/ip in the connection string: `host=HOSTNAME`. We will
+1. Note down the hostname or IP address in the connection string: `host=HOSTNAME`. We will
refer to the hostname in the next section as `MASTER_NODE_NAME`. If the value
is not an IP address, it will need to be a resolvable name (via DNS or
`/etc/hosts`)
@@ -554,7 +535,7 @@ Here is a list and description of each machine and the assigned IP:
- `10.6.0.33`: PostgreSQL secondary
- `10.6.0.41`: GitLab application
-All passwords are set to `toomanysecrets`, please do not use this password or derived hashes and the external_url for GitLab is `http://gitlab.example.com`.
+All passwords are set to `toomanysecrets`, please do not use this password or derived hashes and the `external_url` for GitLab is `http://gitlab.example.com`.
Please note that after the initial configuration, if a failover occurs, the PostgresSQL master will change to one of the available secondaries until it is failed back.
@@ -758,7 +739,7 @@ Here is a list and description of each machine and the assigned IP:
All passwords are set to `toomanysecrets`, please do not use this password or derived hashes.
-The external_url for GitLab is `http://gitlab.example.com`
+The `external_url` for GitLab is `http://gitlab.example.com`
Please note that after the initial configuration, if a failover occurs, the PostgresSQL master will change to one of the available secondaries until it is failed back.
@@ -963,7 +944,7 @@ repmgr['trust_auth_cidr_addresses'] = %w(192.168.1.44/32 db2.example.com)
##### MD5 Authentication
If you are running on an untrusted network, repmgr can use md5 authentication
-with a [.pgpass file](https://www.postgresql.org/docs/9.6/libpq-pgpass.html)
+with a [`.pgpass` file](https://www.postgresql.org/docs/9.6/libpq-pgpass.html)
to authenticate.
You can specify by IP address, FQDN, or by subnet, using the same format as in
diff --git a/doc/administration/high_availability/gitaly.md b/doc/administration/high_availability/gitaly.md
index 739d1ae35fb..bb40747b24c 100644
--- a/doc/administration/high_availability/gitaly.md
+++ b/doc/administration/high_availability/gitaly.md
@@ -11,18 +11,15 @@ should consider using Gitaly on a separate node.
See the [Gitaly HA Epic](https://gitlab.com/groups/gitlab-org/-/epics/289) to
track plans and progress toward high availability support.
-This document is relevant for [Scaled Architecture](README.md#scalable-architecture-examples)
-environments and [High Availability Architecture](README.md#high-availability-architecture-examples).
+This document is relevant for [Scalable and Highly Available Setups](README.md).
## Running Gitaly on its own server
See [Running Gitaly on its own server](../gitaly/index.md#running-gitaly-on-its-own-server)
in Gitaly documentation.
-Continue configuration of other components by going back to:
-
-- [Scaled Architectures](README.md#scalable-architecture-examples)
-- [High Availability Architectures](README.md#high-availability-architecture-examples)
+Continue configuration of other components by going back to the
+[Scaling and High Availability](README.md#gitlab-components-and-configuration-instructions) page.
## Enable Monitoring
diff --git a/doc/administration/high_availability/gitlab.md b/doc/administration/high_availability/gitlab.md
index ad00cb8df9f..cef9f9c5761 100644
--- a/doc/administration/high_availability/gitlab.md
+++ b/doc/administration/high_availability/gitlab.md
@@ -8,6 +8,9 @@ NOTE: **Note:** There is some additional configuration near the bottom for
additional GitLab application servers. It's important to read and understand
these additional steps before proceeding with GitLab installation.
+NOTE: **Note:** [Cloud Object Storage service](object_storage.md) with [Gitaly](gitaly.md)
+is recommended over [NFS](nfs.md) wherever possible for improved performance.
+
1. If necessary, install the NFS client utility packages using the following
commands:
diff --git a/doc/administration/high_availability/monitoring_node.md b/doc/administration/high_availability/monitoring_node.md
index d293fc350fa..a12b865cb91 100644
--- a/doc/administration/high_availability/monitoring_node.md
+++ b/doc/administration/high_availability/monitoring_node.md
@@ -74,8 +74,8 @@ Omnibus:
## Migrating to Service Discovery
Once monitoring using Service Discovery is enabled with `consul['monitoring_service_discovery'] = true`,
-ensure that `prometheus['scrape_configs']` is not set in `/etc/gitlab/gitlab.rb`. Setting both
-`consul['monitoring_service_discovery'] = true` and `prometheus['scrape_configs']` in `/etc/gitlab/gitlab.rb`
+ensure that `prometheus['scrape_configs']` is not set in `/etc/gitlab/gitlab.rb`. Setting both
+`consul['monitoring_service_discovery'] = true` and `prometheus['scrape_configs']` in `/etc/gitlab/gitlab.rb`
will result in errors.
<!-- ## Troubleshooting
diff --git a/doc/administration/high_availability/nfs.md b/doc/administration/high_availability/nfs.md
index 0d88191151a..8e018b7a6fe 100644
--- a/doc/administration/high_availability/nfs.md
+++ b/doc/administration/high_availability/nfs.md
@@ -12,6 +12,9 @@ performance, especially for actions that read or write to Git repositories. See
[Filesystem Performance Benchmarking](../operations/filesystem_benchmarking.md)
for steps to test filesystem performance.
+NOTE: **Note:** [Cloud Object Storage service](object_storage.md) with [Gitaly](gitaly.md)
+is recommended over NFS wherever possible for improved performance.
+
## NFS Server features
### Required features
@@ -51,6 +54,8 @@ management between systems:
### Improving NFS performance with GitLab
+#### Improving NFS performance with Unicorn
+
NOTE: **Note:** From GitLab 12.1, it will automatically be detected if Rugged can and should be used per storage.
If you previously enabled Rugged using the feature flag, you will need to unset the feature flag by using:
@@ -61,6 +66,16 @@ sudo gitlab-rake gitlab:features:unset_rugged
If the Rugged feature flag is explicitly set to either true or false, GitLab will use the value explicitly set.
+#### Improving NFS performance with Puma
+
+NOTE: **Note:** From GitLab 12.7, Rugged auto-detection is disabled if Puma thread count is greater than 1.
+
+If you want to use Rugged with Puma, it is recommended to [set Puma thread count to 1](https://docs.gitlab.com/omnibus/settings/puma.html#puma-settings).
+
+If you want to use Rugged with Puma thread count more than 1, Rugged can be enabled using the [feature flag](../../development/gitaly.md#legacy-rugged-code)
+
+If the Rugged feature flag is explicitly set to either true or false, GitLab will use the value explicitly set.
+
### Known issues
On some customer systems, we have seen NFS clients slow precipitously due to
@@ -146,7 +161,7 @@ Note there are several options that you should consider using:
## A single NFS mount
-It's recommended to nest all GitLab data dirs within a mount, that allows automatic
+It's recommended to nest all GitLab data directories within a mount, that allows automatic
restore of backups without manually moving existing data.
```plaintext
diff --git a/doc/administration/high_availability/nfs_host_client_setup.md b/doc/administration/high_availability/nfs_host_client_setup.md
index 75dec1eef29..ddc58fc0db7 100644
--- a/doc/administration/high_availability/nfs_host_client_setup.md
+++ b/doc/administration/high_availability/nfs_host_client_setup.md
@@ -25,7 +25,7 @@ Using EFS may negatively impact performance. Please review the [relevant documen
### Step 1 - Install NFS Server on Host
-Installing the nfs-kernel-server package allows you to share directories with the clients running the GitLab application.
+Installing the `nfs-kernel-server` package allows you to share directories with the clients running the GitLab application.
```shell
apt-get update
@@ -61,7 +61,7 @@ inside your HA environment to the NFS server configured above.
### Step 1 - Install NFS Common on Client
-The nfs-common provides NFS functionality without installing server components which
+The `nfs-common` provides NFS functionality without installing server components which
we don't need running on the application nodes.
```shell
@@ -126,7 +126,7 @@ by a firewall, then you will need to reconfigure that firewall to allow NFS comm
[This guide from TDLP](http://tldp.org/HOWTO/NFS-HOWTO/security.html#FIREWALLS)
covers the basics of using NFS in a firewalled environment. Additionally, we encourage you to
-search for and review the specific documentation for your OS/distro and your firewall software.
+search for and review the specific documentation for your operating system or distribution and your firewall software.
Example for Ubuntu:
diff --git a/doc/administration/high_availability/object_storage.md b/doc/administration/high_availability/object_storage.md
new file mode 100644
index 00000000000..dc451757a1c
--- /dev/null
+++ b/doc/administration/high_availability/object_storage.md
@@ -0,0 +1,32 @@
+---
+type: reference
+---
+
+# Cloud Object Storage
+
+GitLab supports utilizing a Cloud Object Storage service rather than [NFS](nfs.md) for holding
+numerous types of data. This is recommended in larger setups as object storage is
+typically much more performant, reliable, and scalable.
+
+For configuring GitLab to use Object Storage refer to the following guides:
+
+1. Make sure the [`git` user home directory](https://docs.gitlab.com/omnibus/settings/configuration.html#moving-the-home-directory-for-a-user) is on local disk.
+1. Configure [database lookup of SSH keys](../operations/fast_ssh_key_lookup.md)
+ to eliminate the need for a shared `authorized_keys` file.
+1. Configure [object storage for backups](../../raketasks/backup_restore.md#uploading-backups-to-a-remote-cloud-storage).
+1. Configure [object storage for job artifacts](../job_artifacts.md#using-object-storage)
+ including [incremental logging](../job_logs.md#new-incremental-logging-architecture).
+1. Configure [object storage for LFS objects](../lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage).
+1. Configure [object storage for uploads](../uploads.md#using-object-storage-core-only).
+1. Configure [object storage for merge request diffs](../merge_request_diffs.md#using-object-storage).
+1. Configure [object storage for container registry](../packages/container_registry.md#container-registry-storage-driver) (optional feature).
+1. Configure [object storage for Mattermost](https://docs.mattermost.com/administration/config-settings.html#file-storage) (optional feature).
+1. Configure [object storage for packages](../packages/index.md#using-object-storage) (optional feature). **(PREMIUM ONLY)**
+1. Configure [object storage for dependency proxy](../packages/dependency_proxy.md#using-object-storage) (optional feature). **(ULTIMATE ONLY)**
+1. Configure [object storage for Pseudonymizer](../pseudonymizer.md#configuration) (optional feature). **(ULTIMATE ONLY)**
+
+NOTE: **Note:**
+One current feature of GitLab that still requires a shared directory (NFS) is
+[GitLab Pages](../../user/project/pages/index.md).
+There is [work in progress](https://gitlab.com/gitlab-org/gitlab-pages/issues/196)
+to eliminate the need for NFS to support GitLab Pages.
diff --git a/doc/administration/high_availability/pgbouncer.md b/doc/administration/high_availability/pgbouncer.md
index cea55e6c9b4..c820a01da8a 100644
--- a/doc/administration/high_availability/pgbouncer.md
+++ b/doc/administration/high_availability/pgbouncer.md
@@ -2,7 +2,7 @@
type: reference
---
-# Working with the bundle PgBouncer service
+# Working with the bundled PgBouncer service **(PREMIUM ONLY)**
As part of its High Availability stack, GitLab Premium includes a bundled version of [PgBouncer](https://pgbouncer.github.io/) that can be managed through `/etc/gitlab/gitlab.rb`. PgBouncer is used to seamlessly migrate database connections between servers in a failover scenario. Additionally, it can be used in a non-HA setup to pool connections, speeding up response time while reducing resource usage.
diff --git a/doc/administration/high_availability/redis.md b/doc/administration/high_availability/redis.md
index 56c2bafea76..8fb25f958ce 100644
--- a/doc/administration/high_availability/redis.md
+++ b/doc/administration/high_availability/redis.md
@@ -14,17 +14,15 @@ The following are the requirements for providing your own Redis instance:
[Merge Trains](../../ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md).
- Standalone Redis or Redis high availability with Sentinel are supported. Redis
Cluster is not supported.
-- Managed Redis from cloud providers such as AWS Elasticache will work. If these
+- Managed Redis from cloud providers such as AWS ElastiCache will work. If these
services support high availability, be sure it is not the Redis Cluster type.
Note the Redis node's IP address or hostname, port, and password (if required).
These will be necessary when configuring the GitLab application servers later.
-## Redis in a Scaled Environment
+## Redis in a Scaled and Highly Available Environment
-This section is relevant for [Scaled Architecture](README.md#scalable-architecture-examples)
-environments including [Basic Scaling](README.md#basic-scaling) and
-[Full Scaling](README.md#full-scaling).
+This section is relevant for [Scalable and Highly Available Setups](README.md).
### Provide your own Redis instance **(CORE ONLY)**
@@ -85,22 +83,8 @@ Omnibus:
Advanced configuration options are supported and can be added if
needed.
-Continue configuration of other components by going
-[back to Scaled Architectures](README.md#scalable-architecture-examples)
-
-## Redis with High Availability
-
-This section is relevant for [High Availability Architecture](README.md#high-availability-architecture-examples)
-environments including [Horizontal](README.md#horizontal),
-[Hybrid](README.md#hybrid), and
-[Fully Distributed](README.md#fully-distributed).
-
-### Provide your own Redis instance **(CORE ONLY)**
-
-If you want to use your own deployed Redis instance(s),
-see [Provide your own Redis instance](#provide-your-own-redis-instance-core-only)
-for more details. However, you can use the GitLab Omnibus package to easily
-deploy the bundled Redis.
+Continue configuration of other components by going back to the
+[Scaling and High Availability](README.md#gitlab-components-and-configuration-instructions) page.
### High Availability with GitLab Omnibus **(PREMIUM ONLY)**
@@ -878,7 +862,7 @@ mailroom['enable'] = false
redis['master'] = false
```
-You can find the relevant attributes defined in [gitlab_rails.rb][omnifile].
+You can find the relevant attributes defined in [`gitlab_rails.rb`][omnifile].
## Troubleshooting
@@ -952,7 +936,7 @@ and `redis['master_pasword']` as you defined for your sentinel node.
The way the Redis connector `redis-rb` works with sentinel is a bit
non-intuitive. We try to hide the complexity in omnibus, but it still requires
-a few extra configs.
+a few extra configurations.
---
@@ -966,7 +950,7 @@ To make sure your configuration is correct:
sudo gitlab-rails console
# For source installations
- sudo -u git rails console production
+ sudo -u git rails console -e production
```
1. Run in the console:
diff --git a/doc/administration/housekeeping.md b/doc/administration/housekeeping.md
index ca3480f1146..4a2e2b9aac9 100644
--- a/doc/administration/housekeeping.md
+++ b/doc/administration/housekeeping.md
@@ -15,7 +15,7 @@ The housekeeping function runs `repack` or `gc` depending on the
For example in the following scenario a `git repack -d` will be executed:
-- Project: pushes since gc counter (`pushes_since_gc`) = `10`
+- Project: pushes since GC counter (`pushes_since_gc`) = `10`
- Git GC period = `200`
- Full repack period = `50`
@@ -23,7 +23,7 @@ When the `pushes_since_gc` value is 50 a `repack -A -d --pack-kept-objects` will
the `pushes_since_gc` value is 200 a `git gc` will be run.
- `git gc` ([man page](https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-gc.html)) runs a number of housekeeping tasks,
- such as compressing filerevisions (to reduce disk space and increase performance)
+ such as compressing file revisions (to reduce disk space and increase performance)
and removing unreachable objects which may have been created from prior invocations of
`git add`.
- `git repack` ([man page](https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-repack.html)) re-organize existing packs into a single, more efficient pack.
diff --git a/doc/administration/incoming_email.md b/doc/administration/incoming_email.md
index 07b6f79a0fa..dcc590bea9c 100644
--- a/doc/administration/incoming_email.md
+++ b/doc/administration/incoming_email.md
@@ -81,7 +81,7 @@ incoming email domain to `hooli.com`, an attacker could abuse the "Create new
issue by email" or
"[Create new merge request by email](../user/project/merge_requests/creating_merge_requests.md#new-merge-request-by-email-core-only)"
features by using a project's unique address as the email when signing up for
-Slack, which would send a confirmation email, which would create a new issue or
+Slack. This would send a confirmation email, which would create a new issue or
merge request on the project owned by the attacker, allowing them to click the
confirmation link and validate their account on your company's private Slack
instance.
@@ -283,10 +283,17 @@ incoming_email:
idle_timeout: 60
```
-#### MS Exchange
+#### Microsoft Exchange Server
-Example configuration for Microsoft Exchange mail server with IMAP enabled. Assumes the
-catch-all mailbox incoming@exchange.example.com.
+Example configurations for Microsoft Exchange Server with IMAP enabled. Since
+Exchange does not support sub-addressing, only two options exist:
+
+- Catch-all mailbox (recommended for Exchange-only)
+- Dedicated email address (supports Reply by Email only)
+
+##### Catch-all mailbox
+
+Assumes the catch-all mailbox `incoming@exchange.example.com`.
Example for Omnibus installs:
@@ -335,11 +342,53 @@ incoming_email:
port: 993
# Whether the IMAP server uses SSL
ssl: true
- # Whether the IMAP server uses StartTLS
- start_tls: false
+```
- # The mailbox where incoming mail will end up. Usually "inbox".
- mailbox: "inbox"
- # The IDLE command timeout.
- idle_timeout: 60
+##### Dedicated email address
+
+Assumes the dedicated email address `incoming@exchange.example.com`.
+
+Example for Omnibus installs:
+
+```ruby
+gitlab_rails['incoming_email_enabled'] = true
+
+# Exchange does not support sub-addressing, and we're not using a catch-all mailbox so %{key} is not used here
+gitlab_rails['incoming_email_address'] = "incoming@exchange.example.com"
+
+# Email account username
+# Typically this is the userPrincipalName (UPN)
+gitlab_rails['incoming_email_email'] = "incoming@ad-domain.example.com"
+# Email account password
+gitlab_rails['incoming_email_password'] = "[REDACTED]"
+
+# IMAP server host
+gitlab_rails['incoming_email_host'] = "exchange.example.com"
+# IMAP server port
+gitlab_rails['incoming_email_port'] = 993
+# Whether the IMAP server uses SSL
+gitlab_rails['incoming_email_ssl'] = true
+```
+
+Example for source installs:
+
+```yaml
+incoming_email:
+ enabled: true
+
+ # Exchange does not support sub-addressing, and we're not using a catch-all mailbox so %{key} is not used here
+ address: "incoming@exchange.example.com"
+
+ # Email account username
+ # Typically this is the userPrincipalName (UPN)
+ user: "incoming@ad-domain.example.com"
+ # Email account password
+ password: "[REDACTED]"
+
+ # IMAP server host
+ host: "exchange.example.com"
+ # IMAP server port
+ port: 993
+ # Whether the IMAP server uses SSL
+ ssl: true
```
diff --git a/doc/administration/index.md b/doc/administration/index.md
index fcfdcfdf6c8..4ca03fa2669 100644
--- a/doc/administration/index.md
+++ b/doc/administration/index.md
@@ -38,7 +38,7 @@ Learn how to install, configure, update, and maintain your GitLab instance.
- [Installing GitLab HA on Amazon Web Services (AWS)](../install/aws/index.md): Set up GitLab High Availability on Amazon AWS.
- [Geo](geo/replication/index.md): Replicate your GitLab instance to other geographic locations as a read-only fully operational version. **(PREMIUM ONLY)**
- [Disaster Recovery](geo/disaster_recovery/index.md): Quickly fail-over to a different site with minimal effort in a disaster situation. **(PREMIUM ONLY)**
-- [Pivotal Tile](../install/pivotal/index.md): Deploy GitLab as a pre-configured appliance using Ops Manager (BOSH) for Pivotal Cloud Foundry. **(PREMIUM ONLY)**
+- [Pivotal Tile](../install/pivotal/index.md): Deploy GitLab as a preconfigured appliance using Ops Manager (BOSH) for Pivotal Cloud Foundry. **(PREMIUM ONLY)**
- [Add License](../user/admin_area/license.md): Upload a license at install time to unlock features that are in paid tiers of GitLab. **(STARTER ONLY)**
### Configuring GitLab
@@ -76,7 +76,7 @@ Learn how to install, configure, update, and maintain your GitLab instance.
### Maintaining GitLab
-- [Raketasks](../raketasks/README.md): Perform various tasks for maintenance, backups, automatic webhooks setup, etc.
+- [Raketasks](../raketasks/README.md): Perform various tasks for maintenance, backups, automatic webhooks setup, and more.
- [Backup and restore](../raketasks/backup_restore.md): Backup and restore your GitLab instance.
- [Operations](operations/index.md): Keeping GitLab up and running (clean up Redis sessions, moving repositories, Sidekiq MemoryKiller, Unicorn).
- [Restart GitLab](restart_gitlab.md): Learn how to restart GitLab and its components.
@@ -86,7 +86,7 @@ Learn how to install, configure, update, and maintain your GitLab instance.
- [GitLab versions and maintenance policy](../policy/maintenance.md): Understand GitLab versions and releases (Major, Minor, Patch, Security), as well as update recommendations.
- [Update GitLab](../update/README.md): Update guides to upgrade your installation to a new version.
-- [Downtimeless updates](../update/README.md#upgrading-without-downtime): Upgrade to a newer major, minor, or patch version of GitLab without taking your GitLab instance offline.
+- [Upgrading without downtime](../update/README.md#upgrading-without-downtime): Upgrade to a newer major, minor, or patch version of GitLab without taking your GitLab instance offline.
- [Migrate your GitLab CI/CD data to another version of GitLab](../migrate_ci_to_ce/README.md): If you have an old GitLab installation (older than 8.0), follow this guide to migrate your existing GitLab CI/CD data to another version of GitLab.
### Upgrading or downgrading GitLab
@@ -107,7 +107,7 @@ Learn how to install, configure, update, and maintain your GitLab instance.
- [Libravatar](libravatar.md): Use Libravatar instead of Gravatar for user avatars.
- [Sign-up restrictions](../user/admin_area/settings/sign_up_restrictions.md): block email addresses of specific domains, or whitelist only specific domains.
- [Access restrictions](../user/admin_area/settings/visibility_and_access_controls.md#enabled-git-access-protocols): Define which Git access protocols can be used to talk to GitLab (SSH, HTTP, HTTPS).
-- [Authentication and Authorization](auth/README.md): Configure external authentication with LDAP, SAML, CAS and additional providers.
+- [Authentication and Authorization](auth/README.md): Configure external authentication with LDAP, SAML, CAS, and additional providers.
- [Sync LDAP](auth/ldap-ee.md) **(STARTER ONLY)**
- [Kerberos authentication](../integration/kerberos.md) **(STARTER ONLY)**
- See also other [authentication](../topics/authentication/index.md#gitlab-administrators) topics (for example, enforcing 2FA).
@@ -227,7 +227,6 @@ who are aware of the risks.
- [GitLab Developer Docs](../development/README.md)
- [Repairing and recovering broken Git repositories](https://git.seveas.net/repairing-and-recovering-broken-git-repositories.html)
- [Testing with OpenSSL](https://www.feistyduck.com/library/openssl-cookbook/online/ch-testing-with-openssl.html)
- - [Strace zine](https://wizardzines.com/zines/strace/)
+ - [`Strace` zine](https://wizardzines.com/zines/strace/)
- GitLab.com-specific resources:
- [Group SAML/SCIM setup](troubleshooting/group_saml_scim.md)
- \ No newline at end of file
diff --git a/doc/administration/instance_limits.md b/doc/administration/instance_limits.md
index 6928ea8ec22..56a407490be 100644
--- a/doc/administration/instance_limits.md
+++ b/doc/administration/instance_limits.md
@@ -35,22 +35,22 @@ Read more in the [CI documentation](../ci/yaml/README.md#processing-git-pushes).
Activity history for projects and individuals' profiles was limited to one year until [GitLab 11.4](https://gitlab.com/gitlab-org/gitlab-foss/issues/52246) when it was extended to two years, and in [GitLab 12.4](https://gitlab.com/gitlab-org/gitlab/issues/33840) to three years.
-## Number of project webhooks
+## Number of webhooks
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20730) in GitLab 12.6.
+On GitLab.com, the [maximum number of webhooks](../user/gitlab_com/index.md#maximum-number-of-webhooks) per project, and per group, is limited.
-A maximum number of project webhooks applies to each GitLab.com tier. Check the
-[Maximum number of webhooks (per tier)](../user/project/integrations/webhooks.md#maximum-number-of-webhooks-per-tier)
-section in the Webhooks page.
-
-To set this limit on a self-hosted installation, run the following in the
+To set this limit on a self-managed installation, run the following in the
[GitLab Rails console](https://docs.gitlab.com/omnibus/maintenance/#starting-a-rails-console-session):
```ruby
# If limits don't exist for the default plan, you can create one with:
# Plan.default.create_limits!
+# For project webhooks
Plan.default.limits.update!(project_hooks: 100)
+
+# For group webhooks
+Plan.default.limits.update!(group_hooks: 100)
```
NOTE: **Note:** Set the limit to `0` to disable it.
@@ -72,10 +72,10 @@ If a new pipeline would cause the total number of jobs to exceed the limit, the
will fail with a `job_activity_limit_exceeded` error.
- On GitLab.com different [limits are defined per plan](../user/gitlab_com/index.md#gitlab-cicd) and they affect all projects under that plan.
-- On [GitLab Starter](https://about.gitlab.com/pricing/#self-managed) tier or higher self-hosted installations, this limit is defined for the `default` plan that affects all projects.
+- On [GitLab Starter](https://about.gitlab.com/pricing/#self-managed) tier or higher self-managed installations, this limit is defined for the `default` plan that affects all projects.
This limit is disabled by default.
-To set this limit on a self-hosted installation, run the following in the
+To set this limit on a self-managed installation, run the following in the
[GitLab Rails console](https://docs.gitlab.com/omnibus/maintenance/#starting-a-rails-console-session):
```ruby
@@ -87,6 +87,51 @@ Plan.default.limits.update!(ci_active_jobs: 500)
NOTE: **Note:** Set the limit to `0` to disable it.
+### Number of CI/CD subscriptions to a project
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9045) in GitLab 12.9.
+
+The total number of subscriptions can be limited per project. This limit is
+checked each time a new subscription is created.
+
+If a new subscription would cause the total number of subscription to exceed the
+limit, the subscription will be considered invalid.
+
+- On GitLab.com different [limits are defined per plan](../user/gitlab_com/index.md#gitlab-cicd) and they affect all projects under that plan.
+- On [GitLab Starter](https://about.gitlab.com/pricing/#self-managed) tier or higher self-managed installations, this limit is defined for the `default` plan that affects all projects.
+
+To set this limit on a self-managed installation, run the following in the
+[GitLab Rails console](https://docs.gitlab.com/omnibus/maintenance/#starting-a-rails-console-session):
+
+```ruby
+Plan.default.limits.update!(ci_project_subscriptions: 500)
+```
+
+NOTE: **Note:** Set the limit to `0` to disable it.
+
+### Number of pipeline schedules
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/29566) in GitLab 12.10.
+
+The total number of pipeline schedules can be limited per project. This limit is
+checked each time a new pipeline schedule is created. If a new pipeline schedule
+would cause the total number of pipeline schedules to exceed the limit, the
+pipeline schedule will not be created.
+
+On GitLab.com, different limits are [defined per plan](../user/gitlab_com/index.md#gitlab-cicd),
+and they affect all projects under that plan.
+
+On self-managed instances ([GitLab Starter](https://about.gitlab.com/pricing/#self-managed)
+or higher tiers), this limit is defined for the `default` plan that affects all
+projects. By default, there is no limit.
+
+To set this limit on a self-managed installation, run the following in the
+[GitLab Rails console](https://docs.gitlab.com/omnibus/maintenance/#starting-a-rails-console-session):
+
+```ruby
+Plan.default.limits.update!(ci_pipeline_schedules: 100)
+```
+
## Environment data on Deploy Boards
[Deploy Boards](../user/project/deploy_boards.md) load information from Kubernetes about
@@ -113,9 +158,9 @@ text field exceeds this limit then the text will be truncated to this number of
characters and the rest will not be indexed and hence will not be searchable.
- On GitLab.com this is limited to 20000 characters
-- For self-hosted installations it is unlimited by default
+- For self-managed installations it is unlimited by default
-This limit can be configured for self hosted installations when [enabling
+This limit can be configured for self-managed installations when [enabling
Elasticsearch](../integration/elasticsearch.md#enabling-elasticsearch).
NOTE: **Note:** Set the limit to `0` to disable it.
diff --git a/doc/administration/integration/plantuml.md b/doc/administration/integration/plantuml.md
index 9b8d3ac2efe..009a1a247c0 100644
--- a/doc/administration/integration/plantuml.md
+++ b/doc/administration/integration/plantuml.md
@@ -122,12 +122,12 @@ our AsciiDoc snippets, wikis and repos using delimited blocks:
- **Markdown**
- ~~~markdown
+ ````markdown
```plantuml
Bob -> Alice : hello
Alice -> Bob : hi
```
- ~~~
+ ````
- **AsciiDoc**
@@ -149,9 +149,9 @@ our AsciiDoc snippets, wikis and repos using delimited blocks:
Alice -> Bob: hi
```
- You can also use the `uml::` directive for compatibility with [sphinxcontrib-plantuml](https://pypi.org/project/sphinxcontrib-plantuml/), but please note that we currently only support the `caption` option.
+ You can also use the `uml::` directive for compatibility with [`sphinxcontrib-plantuml`](https://pypi.org/project/sphinxcontrib-plantuml/), but please note that we currently only support the `caption` option.
-The above blocks will be converted to an HTML img tag with source pointing to the
+The above blocks will be converted to an HTML image tag with source pointing to the
PlantUML instance. If the PlantUML server is correctly configured, this should
render a nice diagram instead of the block:
@@ -172,7 +172,7 @@ Some parameters can be added to the AsciiDoc block definition:
- *format*: Can be either `png` or `svg`. Note that `svg` is not supported by
all browsers so use with care. The default is `png`.
- *id*: A CSS id added to the diagram HTML tag.
-- *width*: Width attribute added to the img tag.
-- *height*: Height attribute added to the img tag.
+- *width*: Width attribute added to the image tag.
+- *height*: Height attribute added to the image tag.
Markdown does not support any parameters and will always use PNG format.
diff --git a/doc/administration/job_artifacts.md b/doc/administration/job_artifacts.md
index 441ad2186f6..6f927d8f920 100644
--- a/doc/administration/job_artifacts.md
+++ b/doc/administration/job_artifacts.md
@@ -3,7 +3,7 @@
> - Introduced in GitLab 8.2 and GitLab Runner 0.7.0.
> - Starting with GitLab 8.4 and GitLab Runner 1.0, the artifacts archive format changed to `ZIP`.
> - Starting with GitLab 8.17, builds are renamed to jobs.
-> - This is the administration documentation. For the user guide see [pipelines/job_artifacts](../user/project/pipelines/job_artifacts.md).
+> - This is the administration documentation. For the user guide see [pipelines/job_artifacts](../ci/pipelines/job_artifacts.md).
Artifacts is a list of files and directories which are attached to a job after it
finishes. This feature is enabled by default in all GitLab installations. Keep reading
@@ -79,7 +79,7 @@ _The artifacts are stored by default in
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1762) in
> [GitLab Premium](https://about.gitlab.com/pricing/) 9.4.
-> - Since version 9.5, artifacts are [browsable](../user/project/pipelines/job_artifacts.md#browsing-artifacts),
+> - Since version 9.5, artifacts are [browsable](../ci/pipelines/job_artifacts.md#browsing-artifacts),
> when object storage is enabled. 9.4 lacks this feature.
> - Since version 10.6, available in [GitLab Core](https://about.gitlab.com/pricing/)
> - Since version 11.0, we support `direct_upload` to S3.
@@ -160,6 +160,11 @@ _The artifacts are stored by default in
gitlab-rake gitlab:artifacts:migrate
```
+CAUTION: **CAUTION:**
+JUnit test report artifact (`junit.xml.gz`) migration
+[is not supported](https://gitlab.com/gitlab-org/gitlab/issues/27698)
+by the `gitlab:artifacts:migrate` script.
+
**In installations from source:**
_The artifacts are stored by default in
@@ -188,13 +193,25 @@ _The artifacts are stored by default in
sudo -u git -H bundle exec rake gitlab:artifacts:migrate RAILS_ENV=production
```
+CAUTION: **CAUTION:**
+JUnit test report artifact (`junit.xml.gz`) migration
+[is not supported](https://gitlab.com/gitlab-org/gitlab/issues/27698)
+by the `gitlab:artifacts:migrate` script.
+
### Migrating from object storage to local storage
+**In Omnibus installations:**
+
In order to migrate back to local storage:
-1. Set both `direct_upload` and `background_upload` to false under the artifacts object storage settings. Don't forget to restart GitLab.
-1. Run `rake gitlab:artifacts:migrate_to_local` on your console.
-1. Disable `object_storage` for artifacts in `gitlab.rb`. Remember to restart GitLab afterwards.
+1. Set both `direct_upload` and `background_upload` to false in `gitlab.rb`, under the artifacts object storage settings.
+1. [reconfigure GitLab][].
+1. Run `gitlab-rake gitlab:artifacts:migrate_to_local`.
+1. Disable object_storage for artifacts in `gitlab.rb`:
+ - Set `gitlab_rails['artifacts_object_store_enabled'] = false`.
+ - Comment out all other `artifacts_object_store` settings, including the entire
+ `artifacts_object_store_connection` section, including the closing `}`.
+1. [reconfigure GitLab][].
## Expiring artifacts
@@ -369,7 +386,7 @@ If you need to manually remove job artifacts associated with multiple jobs while
NOTE: **NOTE:**
This step will also erase artifacts that users have chosen to
- ["keep"](../user/project/pipelines/job_artifacts.md#browsing-artifacts).
+ ["keep"](../ci/pipelines/job_artifacts.md#browsing-artifacts).
```ruby
builds_to_clear = builds_with_artifacts.where("finished_at < ?", 1.week.ago)
diff --git a/doc/administration/lfs/lfs_administration.md b/doc/administration/lfs/lfs_administration.md
index 7900ec13c02..d8631b05c14 100644
--- a/doc/administration/lfs/lfs_administration.md
+++ b/doc/administration/lfs/lfs_administration.md
@@ -63,13 +63,13 @@ GitLab provides two different options for the uploading mechanism: "Direct uploa
**Option 1. Direct upload**
-1. User pushes an lfs file to the GitLab instance
+1. User pushes an `lfs` file to the GitLab instance
1. GitLab-workhorse uploads the file directly to the external object storage
1. GitLab-workhorse notifies GitLab-rails that the upload process is complete
**Option 2. Background upload**
-1. User pushes an lfs file to the GitLab instance
+1. User pushes an `lfs` file to the GitLab instance
1. GitLab-rails stores the file in the local file storage
1. GitLab-rails then uploads the file to the external object storage asynchronously
@@ -127,7 +127,7 @@ Here is a configuration example with Rackspace Cloud Files.
NOTE: **Note:**
Regardless of whether the container has public access enabled or disabled, Fog will
use the TempURL method to grant access to LFS objects. If you see errors in logs referencing
-instantiating storage with a temp-url-key, ensure that you have set they key properly
+instantiating storage with a temp-url-key, ensure that you have set the key properly
on the Rackspace API and in `gitlab.rb`. You can verify the value of the key Rackspace
has set by sending a GET request with token header to the service access endpoint URL
and comparing the output of the returned headers.
@@ -261,7 +261,7 @@ See more information in [!19581](https://gitlab.com/gitlab-org/gitlab-foss/-/mer
## Known limitations
-- Support for removing unreferenced LFS objects was added in 8.14 onwards.
+- Support for removing unreferenced LFS objects was added in 8.14 onward.
- LFS authentications via SSH was added with GitLab 8.12.
- Only compatible with the Git LFS client versions 1.1.0 and up, or 1.0.2.
- The storage statistics currently count each LFS object multiple times for
diff --git a/doc/administration/lfs/manage_large_binaries_with_git_lfs.md b/doc/administration/lfs/manage_large_binaries_with_git_lfs.md
index 025b547c37e..61b8f28293f 100644
--- a/doc/administration/lfs/manage_large_binaries_with_git_lfs.md
+++ b/doc/administration/lfs/manage_large_binaries_with_git_lfs.md
@@ -113,7 +113,7 @@ created or updated with the following content:
```
You can also register a file type as lockable without using LFS
-(In order to be able to lock/unlock a file you need a remote server that implements the LFS File Locking API),
+(In order to be able to lock/unlock a file you need a remote server that implements the LFS File Locking API),
in order to do that you can edit the `.gitattributes` file manually:
```shell
@@ -252,7 +252,7 @@ on [Git Credential Storage documentation](https://git-scm.com/book/en/v2/Git-Too
GitLab checks files to detect LFS pointers on push. If LFS pointers are detected, GitLab tries to verify that those files already exist in LFS on GitLab.
-Verify that LFS in installed locally and consider a manual push with `git lfs push --all`.
+Verify that LFS is installed locally and consider a manual push with `git lfs push --all`.
If you are storing LFS files outside of GitLab you can disable LFS on the project by setting `lfs_enabled: false` with the [projects API](../../api/projects.md#edit-project).
diff --git a/doc/administration/lfs/migrate_from_git_annex_to_git_lfs.md b/doc/administration/lfs/migrate_from_git_annex_to_git_lfs.md
index 3f983bebf27..bfe18f697bf 100644
--- a/doc/administration/lfs/migrate_from_git_annex_to_git_lfs.md
+++ b/doc/administration/lfs/migrate_from_git_annex_to_git_lfs.md
@@ -57,7 +57,7 @@ Fire up a terminal, navigate to your Git repository and:
1. Enable `git-lfs`:
- ```
+ ```shell
git lfs install
git lfs track <files>
git add .
diff --git a/doc/administration/libravatar.md b/doc/administration/libravatar.md
index 43a6b8f0d34..c28e701dc25 100644
--- a/doc/administration/libravatar.md
+++ b/doc/administration/libravatar.md
@@ -9,7 +9,7 @@ GitLab by default supports the [Gravatar](https://gravatar.com) avatar service.
Libravatar is another service that delivers your avatar (profile picture) to
other websites. The Libravatar API is
[heavily based on gravatar](https://wiki.libravatar.org/api/), so you can
-easily switch to the Libravatar avatar service or even a self-hosted Libravatar
+easily switch to the Libravatar avatar service or even your own Libravatar
server.
## Configuration
@@ -35,7 +35,7 @@ the configuration options as follows:
ssl_url: "https://seccdn.libravatar.org/avatar/%{hash}?s=%{size}&d=identicon"
```
-### Self-hosted Libravatar server
+### Your own Libravatar server
If you are [running your own libravatar service](https://wiki.libravatar.org/running_your_own/),
the URL will be different in the configuration, but you must provide the same
diff --git a/doc/administration/logs.md b/doc/administration/logs.md
index b98f02ccc98..222d79ddc35 100644
--- a/doc/administration/logs.md
+++ b/doc/administration/logs.md
@@ -1,10 +1,9 @@
# Log system
-GitLab has an advanced log system where everything is logged so that you
+GitLab has an advanced log system where everything is logged, so you
can analyze your instance using various system log files. In addition to
-system log files, GitLab Enterprise Edition comes with Audit Events.
-Find more about them [in Audit Events
-documentation](audit_events.md)
+system log files, GitLab Enterprise Edition provides Audit Events.
+Find more about them [in Audit Events documentation](audit_events.md).
System log files are typically plain text in a standard log file format.
This guide talks about how to read and use these system log files.
@@ -13,21 +12,40 @@ This guide talks about how to read and use these system log files.
This file lives in `/var/log/gitlab/gitlab-rails/production_json.log` for
Omnibus GitLab packages or in `/home/git/gitlab/log/production_json.log` for
-installations from source. (When GitLab is running in an environment
-other than production, the corresponding logfile is shown here.)
+installations from source. When GitLab is running in an environment
+other than production, the corresponding logfile is shown here.
It contains a structured log for Rails controller requests received from
GitLab, thanks to [Lograge](https://github.com/roidrage/lograge/). Note that
requests from the API are logged to a separate file in `api_json.log`.
-Each line contains a JSON line that can be ingested by Elasticsearch, Splunk, etc. For example:
+Each line contains a JSON line that can be ingested by services like Elasticsearch and Splunk.
+Line breaks have been added to this example for legibility:
```json
-{"method":"GET","path":"/gitlab/gitlab-foss/issues/1234","format":"html","controller":"Projects::IssuesController","action":"show","status":200,"duration":229.03,"view":174.07,"db":13.24,"time":"2017-08-08T20:15:54.821Z","params":[{"key":"param_key","value":"param_value"}],"remote_ip":"18.245.0.1","user_id":1,"username":"admin","gitaly_calls":76,"gitaly_duration":7.41,"queue_duration": 112.47}
+{
+ "method":"GET",
+ "path":"/gitlab/gitlab-foss/issues/1234",
+ "format":"html",
+ "controller":"Projects::IssuesController",
+ "action":"show",
+ "status":200,
+ "duration":229.03,
+ "view":174.07,
+ "db":13.24,
+ "time":"2017-08-08T20:15:54.821Z",
+ "params":[{"key":"param_key","value":"param_value"}],
+ "remote_ip":"18.245.0.1",
+ "user_id":1,
+ "username":"admin",
+ "gitaly_calls":76,
+ "gitaly_duration":7.41,
+ "queue_duration": 112.47
+}
```
-In this example, you can see this was a GET request for a specific
-issue. Notice each line also contains performance data. All times are in
+This example was a GET request for a specific
+issue. Each line also contains performance data, with times in
milliseconds:
1. `duration`: total time taken to retrieve the request
@@ -37,10 +55,10 @@ milliseconds:
1. `gitaly_calls`: total number of calls made to Gitaly
1. `gitaly_duration`: total time taken by Gitaly calls
-User clone/fetch activity using http transport appears in this log as `action: git_upload_pack`.
+User clone and fetch activity using HTTP transport appears in this log as `action: git_upload_pack`.
-In addition, the log contains the IP address from which the request originated
-(`remote_ip`) as well as the user's ID (`user_id`), and username (`username`).
+In addition, the log contains the originating IP address,
+(`remote_ip`),the user's ID (`user_id`), and username (`username`).
NOTE: **Note:** Starting with GitLab 12.5, if an error occurs, an
`exception` field is included with `class`, `message`, and
@@ -90,11 +108,11 @@ installations from source. (When GitLab is running in an environment
other than production, the corresponding logfile is shown here.)
It contains information about all performed requests. You can see the
-URL and type of request, IP address and what exactly parts of code were
-involved to service this particular request. Also you can see all SQL
-request that have been performed and how much time it took. This task is
+URL and type of request, IP address, and what parts of code were
+involved to service this particular request. Also, you can see all SQL
+requests performed, and how much time each took. This task is
more useful for GitLab contributors and developers. Use part of this log
-file when you are going to report bug. For example:
+file when you're reporting bugs. For example:
```plaintext
Started GET "/gitlabhq/yaml_db/tree/master" for 168.111.56.1 at 2015-02-12 19:34:53 +0200
@@ -114,26 +132,44 @@ Processing by Projects::TreeController#show as HTML
Completed 200 OK in 166ms (Views: 117.4ms | ActiveRecord: 27.2ms)
```
-In this example we can see that server processed an HTTP request with URL
-`/gitlabhq/yaml_db/tree/master` from IP 168.111.56.1 at 2015-02-12
-19:34:53 +0200. Also we can see that request was processed by
-`Projects::TreeController`.
+In this example, the server processed an HTTP request with URL
+`/gitlabhq/yaml_db/tree/master` from IP `168.111.56.1` at `2015-02-12 19:34:53 +0200`.
+The request was processed by `Projects::TreeController`.
## `api_json.log`
> Introduced in GitLab 10.0.
This file lives in
-`/var/log/gitlab/gitlab-rails/api_json.log` for Omnibus GitLab packages or in
+`/var/log/gitlab/gitlab-rails/api_json.log` for Omnibus GitLab packages, or in
`/home/git/gitlab/log/api_json.log` for installations from source.
It helps you see requests made directly to the API. For example:
```json
-{"time":"2018-10-29T12:49:42.123Z","severity":"INFO","duration":709.08,"db":14.59,"view":694.49,"status":200,"method":"GET","path":"/api/v4/projects","params":[{"key":"action","value":"git-upload-pack"},{"key":"changes","value":"_any"},{"key":"key_id","value":"secret"},{"key":"secret_token","value":"[FILTERED]"}],"host":"localhost","remote_ip":"::1","ua":"Ruby","route":"/api/:version/projects","user_id":1,"username":"root","queue_duration":100.31,"gitaly_calls":30,"gitaly_duration":5.36}
+{
+ "time":"2018-10-29T12:49:42.123Z",
+ "severity":"INFO",
+ "duration":709.08,
+ "db":14.59,
+ "view":694.49,
+ "status":200,
+ "method":"GET",
+ "path":"/api/v4/projects",
+ "params":[{"key":"action","value":"git-upload-pack"},{"key":"changes","value":"_any"},{"key":"key_id","value":"secret"},{"key":"secret_token","value":"[FILTERED]"}],
+ "host":"localhost",
+ "remote_ip":"::1",
+ "ua":"Ruby",
+ "route":"/api/:version/projects",
+ "user_id":1,
+ "username":"root",
+ "queue_duration":100.31,
+ "gitaly_calls":30,
+ "gitaly_duration":5.36
+}
```
-This entry above shows an access to an internal endpoint to check whether an
+This entry shows an access to an internal endpoint to check whether an
associated SSH key can download the project in question via a `git fetch` or
`git clone`. In this example, we see:
@@ -141,7 +177,7 @@ associated SSH key can download the project in question via a `git fetch` or
1. `queue_duration`: total time in milliseconds that the request was queued inside GitLab Workhorse
1. `method`: The HTTP method used to make the request
1. `path`: The relative path of the query
-1. `params`: Key-value pairs passed in a query string or HTTP body. Sensitive parameters (e.g. passwords, tokens, etc.) are filtered out.
+1. `params`: Key-value pairs passed in a query string or HTTP body. Sensitive parameters (such as passwords and tokens) are filtered out.
1. `ua`: The User-Agent of the requester
## `application.log`
@@ -172,8 +208,18 @@ installations from source.
It contains the JSON version of the logs in `application.log` like the example below:
``` json
-{"severity":"INFO","time":"2020-01-14T13:35:15.466Z","correlation_id":"3823a1550b64417f9c9ed8ee0f48087e","message":"User \"Administrator\" (admin@example.com) was created"}
-{"severity":"INFO","time":"2020-01-14T13:35:15.466Z","correlation_id":"78e3df10c9a18745243d524540bd5be4","message":"Project \"project133\" was removed"}
+{
+ "severity":"INFO",
+ "time":"2020-01-14T13:35:15.466Z",
+ "correlation_id":"3823a1550b64417f9c9ed8ee0f48087e",
+ "message":"User \"Administrator\" (admin@example.com) was created"
+}
+{
+ "severity":"INFO",
+ "time":"2020-01-14T13:35:15.466Z",
+ "correlation_id":"78e3df10c9a18745243d524540bd5be4",
+ "message":"Project \"project133\" was removed"
+}
```
## `integrations_json.log`
@@ -182,11 +228,28 @@ This file lives in `/var/log/gitlab/gitlab-rails/integrations_json.log` for
Omnibus GitLab packages or in `/home/git/gitlab/log/integrations_json.log` for
installations from source.
-It contains information about [integrations](../user/project/integrations/project_services.md) activities such as Jira, Asana and Irker services. It uses JSON format like the example below:
+It contains information about [integrations](../user/project/integrations/project_services.md) activities such as Jira, Asana, and Irker services. It uses JSON format like the example below:
```json
-{"severity":"ERROR","time":"2018-09-06T14:56:20.439Z","service_class":"JiraService","project_id":8,"project_path":"h5bp/html5-boilerplate","message":"Error sending message","client_url":"http://jira.gitlap.com:8080","error":"execution expired"}
-{"severity":"INFO","time":"2018-09-06T17:15:16.365Z","service_class":"JiraService","project_id":3,"project_path":"namespace2/project2","message":"Successfully posted","client_url":"http://jira.example.com"}
+{
+ "severity":"ERROR",
+ "time":"2018-09-06T14:56:20.439Z",
+ "service_class":"JiraService",
+ "project_id":8,
+ "project_path":"h5bp/html5-boilerplate",
+ "message":"Error sending message",
+ "client_url":"http://jira.gitlap.com:8080",
+ "error":"execution expired"
+}
+{
+ "severity":"INFO",
+ "time":"2018-09-06T17:15:16.365Z",
+ "service_class":"JiraService",
+ "project_id":3,
+ "project_path":"namespace2/project2",
+ "message":"Successfully posted",
+ "client_url":"http://jira.example.com"
+}
```
## `kubernetes.log`
@@ -202,12 +265,32 @@ It logs information related to the Kubernetes Integration including errors
during installing cluster applications on your GitLab managed Kubernetes
clusters.
-Each line contains a JSON line that can be ingested by Elasticsearch, Splunk,
-etc. For example:
+Each line contains a JSON line that can be ingested by services like Elasticsearch and Splunk.
+Line breaks have been added to the following example for clarity:
```json
-{"severity":"ERROR","time":"2018-11-23T15:14:54.652Z","exception":"Kubeclient::HttpError","error_code":401,"service":"Clusters::Applications::CheckInstallationProgressService","app_id":14,"project_ids":[1],"group_ids":[],"message":"Unauthorized"}
-{"severity":"ERROR","time":"2018-11-23T15:42:11.647Z","exception":"Kubeclient::HttpError","error_code":null,"service":"Clusters::Applications::InstallService","app_id":2,"project_ids":[19],"group_ids":[],"message":"SSL_connect returned=1 errno=0 state=error: certificate verify failed (unable to get local issuer certificate)"}
+{
+ "severity":"ERROR",
+ "time":"2018-11-23T15:14:54.652Z",
+ "exception":"Kubeclient::HttpError",
+ "error_code":401,
+ "service":"Clusters::Applications::CheckInstallationProgressService",
+ "app_id":14,
+ "project_ids":[1],
+ "group_ids":[],
+ "message":"Unauthorized"
+}
+{
+ "severity":"ERROR",
+ "time":"2018-11-23T15:42:11.647Z",
+ "exception":"Kubeclient::HttpError",
+ "error_code":null,
+ "service":"Clusters::Applications::InstallService",
+ "app_id":2,
+ "project_ids":[19],
+ "group_ids":[],
+ "message":"SSL_connect returned=1 errno=0 state=error: certificate verify failed (unable to get local issuer certificate)"
+}
```
## `git_json.log`
@@ -220,8 +303,8 @@ NOTE: **Note:**
After 12.2, this file was renamed from `githost.log` to
`git_json.log` and stored in JSON format.
-GitLab has to interact with Git repositories but in some rare cases
-something can go wrong and in this case you will know what exactly
+GitLab has to interact with Git repositories, but in some rare cases
+something can go wrong, and in this case you will know what exactly
happened. This log file contains all failed requests from GitLab to Git
repositories. In the majority of cases this file will be useful for developers
only. For example:
@@ -244,9 +327,20 @@ installations from source.
Changes to group or project settings are logged to this file. For example:
```json
-{"severity":"INFO","time":"2018-10-17T17:38:22.523Z","author_id":3,"entity_id":2,"entity_type":"Project","change":"visibility","from":"Private","to":"Public","author_name":"John Doe4","target_id":2,"target_type":"Project","target_details":"namespace2/project2"}
-{"severity":"INFO","time":"2018-10-17T17:38:22.830Z","author_id":5,"entity_id":3,"entity_type":"Project","change":"name","from":"John Doe7 / project3","to":"John Doe7 / new name","author_name":"John Doe6","target_id":3,"target_type":"Project","target_details":"namespace3/project3"}
-{"severity":"INFO","time":"2018-10-17T17:38:23.175Z","author_id":7,"entity_id":4,"entity_type":"Project","change":"path","from":"","to":"namespace4/newpath","author_name":"John Doe8","target_id":4,"target_type":"Project","target_details":"namespace4/newpath"}
+{
+ "severity":"INFO",
+ "time":"2018-10-17T17:38:22.523Z",
+ "author_id":3,
+ "entity_id":2,
+ "entity_type":"Project",
+ "change":"visibility",
+ "from":"Private",
+ "to":"Public",
+ "author_name":"John Doe4",
+ "target_id":2,
+ "target_type":"Project",
+ "target_details":"namespace2/project2"
+}
```
## `sidekiq.log`
@@ -259,7 +353,7 @@ GitLab uses background jobs for processing tasks which can take a long
time. All information about processing these jobs are written down to
this file. For example:
-```
+```plaintext
2014-06-10T07:55:20Z 2037 TID-tm504 ERROR: /opt/bitnami/apps/discourse/htdocs/vendor/bundle/ruby/1.9.1/gems/redis-3.0.7/lib/redis/client.rb:228:in `read'
2014-06-10T18:18:26Z 14299 TID-55uqo INFO: Booting Sidekiq 3.0.0 with redis options {:url=>"redis://localhost:6379/0", :namespace=>"sidekiq"}
```
@@ -268,7 +362,27 @@ Instead of the format above, you can opt to generate JSON logs for
Sidekiq. For example:
```json
-{"severity":"INFO","time":"2018-04-03T22:57:22.071Z","queue":"cronjob:update_all_mirrors","args":[],"class":"UpdateAllMirrorsWorker","retry":false,"queue_namespace":"cronjob","jid":"06aeaa3b0aadacf9981f368e","created_at":"2018-04-03T22:57:21.930Z","enqueued_at":"2018-04-03T22:57:21.931Z","pid":10077,"message":"UpdateAllMirrorsWorker JID-06aeaa3b0aadacf9981f368e: done: 0.139 sec","job_status":"done","duration":0.139,"completed_at":"2018-04-03T22:57:22.071Z","db_duration":0.05,"db_duration_s":0.0005,"gitaly_duration":0,"gitaly_calls":0}
+{
+ "severity":"INFO",
+ "time":"2018-04-03T22:57:22.071Z",
+ "queue":"cronjob:update_all_mirrors",
+ "args":[],
+ "class":"UpdateAllMirrorsWorker",
+ "retry":false,
+ "queue_namespace":"cronjob",
+ "jid":"06aeaa3b0aadacf9981f368e",
+ "created_at":"2018-04-03T22:57:21.930Z",
+ "enqueued_at":"2018-04-03T22:57:21.931Z",
+ "pid":10077,
+ "message":"UpdateAllMirrorsWorker JID-06aeaa3b0aadacf9981f368e: done: 0.139 sec",
+ "job_status":"done",
+ "duration":0.139,
+ "completed_at":"2018-04-03T22:57:22.071Z",
+ "db_duration":0.05,
+ "db_duration_s":0.0005,
+ "gitaly_duration":0,
+ "gitaly_calls":0
+}
```
For Omnibus GitLab installations, add the configuration option:
@@ -286,14 +400,29 @@ For source installations, edit the `gitlab.yml` and set the Sidekiq
log_format: json
```
+## `sidekiq_client.log`
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26586) in GitLab 12.9.
+
+This file lives in `/var/log/gitlab/gitlab-rails/sidekiq_client.log` for
+Omnibus GitLab packages or in `/home/git/gitlab/log/sidekiq_client.log` for
+installations from source.
+
+This file contains logging information about jobs before they are start
+being processed by Sidekiq, for example before being enqueued.
+
+This logfile follows the same structure as
+[`sidekiq.log`](#sidekiqlog), so it will be structured as JSON if
+you've configured this for Sidekiq as mentioned above.
+
## `gitlab-shell.log`
This file lives in `/var/log/gitlab/gitaly/gitlab-shell.log` for
Omnibus GitLab packages or in `/home/git/gitaly/gitlab-shell.log` for
installations from source.
-NOTE: **Note**
-For GitLab 12.5 and earlier the file lives in `/var/log/gitlab/gitlab-shell/gitlab-shell.log`.
+NOTE: **Note:**
+For GitLab 12.5 and earlier, the file lives in `/var/log/gitlab/gitlab-shell/gitlab-shell.log`.
GitLab Shell is used by GitLab for executing Git commands and provide
SSH access to Git repositories. For example:
@@ -363,7 +492,7 @@ This log records:
- [Protected paths] abusive requests.
NOTE: **Note:**
-From [%12.1](https://gitlab.com/gitlab-org/gitlab-foss/issues/62756), user id and username are also
+From [%12.1](https://gitlab.com/gitlab-org/gitlab-foss/issues/62756), user ID and username are also
recorded on this log, if available.
## `graphql_json.log`
@@ -411,13 +540,13 @@ was initiated, such as `1509705644.log`
## `sidekiq_exporter.log` and `web_exporter.log`
If Prometheus metrics and the Sidekiq Exporter are both enabled, Sidekiq will
-start a Web server and listen to the defined port (default: 8082). Access logs
+start a Web server and listen to the defined port (default: `8082`). Access logs
will be generated in `/var/log/gitlab/gitlab-rails/sidekiq_exporter.log` for
Omnibus GitLab packages or in `/home/git/gitlab/log/sidekiq_exporter.log` for
installations from source.
If Prometheus metrics and the Web Exporter are both enabled, Unicorn/Puma will
-start a Web server and listen to the defined port (default: 8083). Access logs
+start a Web server and listen to the defined port (default: `8083`). Access logs
will be generated in `/var/log/gitlab/gitlab-rails/web_exporter.log` for
Omnibus GitLab packages or in `/home/git/gitlab/log/web_exporter.log` for
installations from source.
@@ -427,7 +556,7 @@ installations from source.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/15442) in GitLab 12.3.
Contains details of GitLab's [Database Load Balancing](database_load_balancing.md).
-It is stored at:
+It's stored at:
- `/var/log/gitlab/gitlab-rails/database_load_balancing.log` for Omnibus GitLab packages.
- `/home/git/gitlab/log/database_load_balancing.log` for installations from source.
@@ -444,11 +573,21 @@ from source.
It logs information related to the Elasticsearch Integration including
errors during indexing or searching Elasticsearch.
-Each line contains a JSON line that can be ingested by Elasticsearch, Splunk,
-etc. For example:
+Each line contains a JSON line that can be ingested by services like Elasticsearch and Splunk.
+Line breaks have been added to the following example line for clarity:
```json
-{"severity":"DEBUG","time":"2019-10-17T06:23:13.227Z","correlation_id":null,"message":"redacted_search_result","class_name":"Milestone","id":2,"ability":"read_milestone","current_user_id":2,"query":"project"}
+{
+ "severity":"DEBUG",
+ "time":"2019-10-17T06:23:13.227Z",
+ "correlation_id":null,
+ "message":"redacted_search_result",
+ "class_name":"Milestone",
+ "id":2,
+ "ability":"read_milestone",
+ "current_user_id":2,
+ "query":"project"
+}
```
## `exceptions_json.log`
@@ -460,7 +599,7 @@ This file lives in
packages or in `/home/git/gitlab/log/exceptions_json.log` for installations
from source.
-It logs the information about exceptions being tracked by `Gitlab::ErrorTracking` which provides standard and consistent way of [processing rescued exceptions](https://gitlab.com/gitlab-org/gitlab/blob/master/doc/development/logging.md#exception-handling).
+It logs the information about exceptions being tracked by `Gitlab::ErrorTracking` which provides a standard and consistent way of [processing rescued exceptions](https://gitlab.com/gitlab-org/gitlab/blob/master/doc/development/logging.md#exception-handling).
Each line contains a JSON line that can be ingested by Elasticsearch. For example:
diff --git a/doc/administration/monitoring/gitlab_self_monitoring_project/index.md b/doc/administration/monitoring/gitlab_self_monitoring_project/index.md
index da4dd09bc67..2058aa4f01c 100644
--- a/doc/administration/monitoring/gitlab_self_monitoring_project/index.md
+++ b/doc/administration/monitoring/gitlab_self_monitoring_project/index.md
@@ -1,9 +1,7 @@
# GitLab self monitoring project
-NOTE: **Note:**
-This feature is available behind a feature flag called `self_monitoring_project`
-since [12.7](https://gitlab.com/gitlab-org/gitlab/issues/32351). The feature flag
-will be removed once we [add dashboards to display metrics](https://gitlab.com/groups/gitlab-org/-/epics/2367).
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/32351) in GitLab 12.7, behind a disabled feature flag (`self_monitoring_project`).
+> - The feature flag was removed and the Self Monitoring Project was [made generally available](https://gitlab.com/gitlab-org/gitlab/issues/198511) in GitLab 12.8.
GitLab has been adding the ability for administrators to see insights into the health of
their GitLab instance. In order to surface this experience in a native way, similar to how
@@ -19,7 +17,7 @@ members to the group in order to give them maintainer access to the project.
This project will be used for self monitoring your GitLab instance.
-## Activating the self monitoring project
+## Creating the self monitoring project
1. Navigate to **Admin Area > Settings > Metrics and profiling**, and expand the **Self monitoring** section.
1. Toggle the **Create Project** button on.
@@ -28,10 +26,11 @@ created, GitLab displays a message with a link to the project. The project
will also be linked in the help text above the **Create Project** button. You can also
find the project under **Projects > Your projects**.
-## Deactivating the self monitoring project
+## Deleting the self monitoring project
CAUTION: **Warning:**
-If you deactivate the self monitoring project, it will be permanently deleted.
+If you delete the self monitoring project, you will lose any changes made to the
+project. If you create the project again, it will be created in its default state.
1. Navigate to **Admin Area > Settings > Metrics and profiling**, and expand the **Self monitoring** section.
1. Toggle the **Create Project** button off.
@@ -64,3 +63,29 @@ You can add custom metrics in the self monitoring project by:
1. [Duplicating](../../../user/project/integrations/prometheus.md#duplicating-a-gitlab-defined-dashboard) the default dashboard.
1. [Editing](../../../user/project/integrations/prometheus.md#view-and-edit-the-source-file-of-a-custom-dashboard) the newly created dashboard file and configuring it with [dashboard YAML properties](../../../user/project/integrations/prometheus.md#dashboard-yaml-properties).
+
+## Troubleshooting
+
+### Getting error message in logs: `Could not create instance administrators group. Errors: ["You don’t have permission to create groups."]`
+
+There is [a bug](https://gitlab.com/gitlab-org/gitlab/issues/208676) which causes
+project creation to fail with the following error (which appears in the log file)
+when the first admin user is an
+[external user](../../../user/permissions.md#external-users-core-only):
+
+```text
+Could not create instance administrators group. Errors: ["You don’t have permission to create groups."]
+```
+
+Run the following in a Rails console to check if the first admin user is an external user:
+
+```ruby
+User.admins.active.first.external?
+```
+
+If this returns true, the first admin user is an external user.
+
+If you face this issue, you can temporarily
+[make the admin user a non-external user](../../../user/permissions.md#external-users-core-only)
+and then try to create the project.
+Once the project is created, the admin user can be changed back to an external user.
diff --git a/doc/administration/monitoring/performance/grafana_configuration.md b/doc/administration/monitoring/performance/grafana_configuration.md
index 6d0fc8ad6d4..61b570aa42e 100644
--- a/doc/administration/monitoring/performance/grafana_configuration.md
+++ b/doc/administration/monitoring/performance/grafana_configuration.md
@@ -149,7 +149,14 @@ However, you should **not** reinstate your old data _except_ under one of the fo
1. If you are certain that you changed your default admin password when you enabled Grafana
1. If you run GitLab in a private network, accessed only by trusted users, and your Grafana login page has not been exposed to the internet
-If you require access to your old Grafana data but do not meet one of these criteria, you may consider reinstating it temporarily, [exporting the dashboards](https://grafana.com/docs/reference/export_import/#exporting-a-dashboard) you need, then refreshing the data and [re-importing your dashboards](https://grafana.com/docs/reference/export_import/#importing-a-dashboard). Note that this poses a temporary vulnerability while your old Grafana data is in use, and the decision to do so should be weighed carefully with your need to access existing data and dashboards.
+If you require access to your old Grafana data but do not meet one of these criteria, you may consider:
+
+1. Reinstating it temporarily.
+1. [Exporting the dashboards](https://grafana.com/docs/reference/export_import/#exporting-a-dashboard) you need.
+1. Refreshing the data and [re-importing your dashboards](https://grafana.com/docs/reference/export_import/#importing-a-dashboard).
+
+DANGER: **Danger:**
+This poses a temporary vulnerability while your old Grafana data is in use and the decision to do so should be weighed carefully with your need to access existing data and dashboards.
For more information and further mitigation details, please refer to our [blog post on the security release](https://about.gitlab.com/blog/2019/08/12/critical-security-release-gitlab-12-dot-1-dot-6-released/).
diff --git a/doc/administration/monitoring/performance/influxdb_configuration.md b/doc/administration/monitoring/performance/influxdb_configuration.md
index 234d0dc2e88..b81f0ce1506 100644
--- a/doc/administration/monitoring/performance/influxdb_configuration.md
+++ b/doc/administration/monitoring/performance/influxdb_configuration.md
@@ -150,7 +150,7 @@ before creating a database.
_**Note:** If you [created an admin user](#create-a-new-admin-user) and enabled
[HTTP authentication](#http), remember to append the username (`-username <username>`)
-and password (`-password <password>`) you set earlier to the commands below._
+and password (`-password <password>`) you set earlier to the commands below._
Run the following command to create a database named `gitlab`:
diff --git a/doc/administration/monitoring/prometheus/gitlab_metrics.md b/doc/administration/monitoring/prometheus/gitlab_metrics.md
index 1e0efc6f531..b789f2ddd02 100644
--- a/doc/administration/monitoring/prometheus/gitlab_metrics.md
+++ b/doc/administration/monitoring/prometheus/gitlab_metrics.md
@@ -30,6 +30,8 @@ The following metrics are available:
| `gitlab_cache_misses_total` | Counter | 10.2 | Cache read miss | controller, action |
| `gitlab_cache_operation_duration_seconds` | Histogram | 10.2 | Cache access time | |
| `gitlab_cache_operations_total` | Counter | 12.2 | Cache operations by controller/action | controller, action, operation |
+| `job_waiter_started_total` | Counter | 12.9 | Number of batches of jobs started where a web request is waiting for the jobs to complete | worker |
+| `job_waiter_timeouts_total` | Counter | 12.9 | Number of batches of jobs that timed out where a web request is waiting for the jobs to complete | worker |
| `gitlab_database_transaction_seconds` | Histogram | 12.1 | Time spent in database transactions, in seconds | |
| `gitlab_method_call_duration_seconds` | Histogram | 10.2 | Method calls real duration | controller, action, module, method |
| `gitlab_page_out_of_bounds` | Counter | 12.8 | Counter for the PageLimiter pagination limit being hit | controller, action, bot |
@@ -84,13 +86,15 @@ The following metrics are available:
| `failed_login_captcha_total` | Gauge | 11.0 | Counter of failed CAPTCHA attempts during login | |
| `successful_login_captcha_total` | Gauge | 11.0 | Counter of successful CAPTCHA attempts during login | |
| `auto_devops_pipelines_completed_total` | Counter | 12.7 | Counter of completed Auto DevOps pipelines, labeled by status | |
-| `sidekiq_jobs_cpu_seconds` | Histogram | 12.4 | Seconds of cpu time to run Sidekiq job | |
-| `sidekiq_jobs_completion_seconds` | Histogram | 12.2 | Seconds to complete Sidekiq job | |
-| `sidekiq_jobs_queue_duration_seconds` | Histogram | 12.5 | Duration in seconds that a Sidekiq job was queued before being executed | |
-| `sidekiq_jobs_failed_total` | Counter | 12.2 | Sidekiq jobs failed | |
-| `sidekiq_jobs_retried_total` | Counter | 12.2 | Sidekiq jobs retried | |
-| `sidekiq_running_jobs` | Gauge | 12.2 | Number of Sidekiq jobs running | |
-| `sidekiq_concurrency` | Gauge | 12.5 | Maximum number of Sidekiq jobs | |
+| `sidekiq_jobs_cpu_seconds` | Histogram | 12.4 | Seconds of cpu time to run Sidekiq job | queue, boundary, external_dependencies, feature_category, job_status, urgency |
+| `sidekiq_jobs_completion_seconds` | Histogram | 12.2 | Seconds to complete Sidekiq job | queue, boundary, external_dependencies, feature_category, job_status, urgency |
+| `sidekiq_jobs_db_seconds` | Histogram | 12.9 | Seconds of DB time to run Sidekiq job | queue, boundary, external_dependencies, feature_category, job_status, urgency |
+| `sidekiq_jobs_gitaly_seconds` | Histogram | 12.9 | Seconds of Gitaly time to run Sidekiq job | queue, boundary, external_dependencies, feature_category, job_status, urgency |
+| `sidekiq_jobs_queue_duration_seconds` | Histogram | 12.5 | Duration in seconds that a Sidekiq job was queued before being executed | queue, boundary, external_dependencies, feature_category, urgency |
+| `sidekiq_jobs_failed_total` | Counter | 12.2 | Sidekiq jobs failed | queue, boundary, external_dependencies, feature_category, urgency |
+| `sidekiq_jobs_retried_total` | Counter | 12.2 | Sidekiq jobs retried | queue, boundary, external_dependencies, feature_category, urgency |
+| `sidekiq_running_jobs` | Gauge | 12.2 | Number of Sidekiq jobs running | queue, boundary, external_dependencies, feature_category, urgency |
+| `sidekiq_concurrency` | Gauge | 12.5 | Maximum number of Sidekiq jobs | |
## Metrics controlled by a feature flag
@@ -178,7 +182,7 @@ Unicorn specific metrics, when Unicorn is used.
| `unicorn_queued_connections` | Gauge | 11.0 | The number of queued Unicorn connections |
| `unicorn_workers` | Gauge | 12.0 | The number of Unicorn workers |
-## Puma Metrics **(EXPERIMENTAL)**
+## Puma Metrics
When Puma is used instead of Unicorn, the following metrics are available:
diff --git a/doc/administration/monitoring/prometheus/index.md b/doc/administration/monitoring/prometheus/index.md
index 03ea7656d83..d29eb266431 100644
--- a/doc/administration/monitoring/prometheus/index.md
+++ b/doc/administration/monitoring/prometheus/index.md
@@ -7,7 +7,7 @@
> they got added. For installations from source you will have to install them
> yourself. Over subsequent releases additional GitLab metrics will be captured.
> - Prometheus services are on by default with GitLab 9.0.
-> - Prometheus and its exporters do not authenticate users, and will be available
+> - Prometheus and its exporters don't authenticate users, and will be available
> to anyone who can access them.
[Prometheus] is a powerful time-series monitoring service, providing a flexible
@@ -18,7 +18,7 @@ access to high quality time-series monitoring of GitLab services.
## Overview
Prometheus works by periodically connecting to data sources and collecting their
-performance metrics via the [various exporters](#bundled-software-metrics). To view
+performance metrics through the [various exporters](#bundled-software-metrics). To view
and work with the monitoring data, you can either
[connect directly to Prometheus](#viewing-performance-metrics) or utilize a
dashboard tool like [Grafana](https://grafana.com).
@@ -26,11 +26,11 @@ dashboard tool like [Grafana](https://grafana.com).
## Configuring Prometheus
NOTE: **Note:**
-For installations from source you'll have to install and configure it yourself.
+For installations from source, you'll have to install and configure it yourself.
Prometheus and its exporters are on by default, starting with GitLab 9.0.
Prometheus will run as the `gitlab-prometheus` user and listen on
-`http://localhost:9090`. By default Prometheus is only accessible from the GitLab server itself.
+`http://localhost:9090`. By default, Prometheus is only accessible from the GitLab server itself.
Each exporter will be automatically set up as a
monitoring target for Prometheus, unless individually disabled.
@@ -51,7 +51,7 @@ To disable Prometheus and all of its exporters, as well as any added in the futu
NOTE: **Note:**
The following change was added in [GitLab Omnibus 8.17][1261]. Although possible,
it's not recommended to change the port Prometheus listens
-on as this might affect or conflict with other services running on the GitLab
+on, as this might affect or conflict with other services running on the GitLab
server. Proceed at your own risk.
In order to access Prometheus from outside the GitLab server you will need to
@@ -65,7 +65,7 @@ To change the address/port that Prometheus listens on:
prometheus['listen_address'] = 'localhost:9090'
```
- Replace `localhost:9090` with the address/port you want Prometheus to
+ Replace `localhost:9090` with the address or port you want Prometheus to
listen on. If you would like to allow access to Prometheus to hosts other
than `localhost`, leave out the host, or use `0.0.0.0` to allow public access:
@@ -106,7 +106,7 @@ prometheus['scrape_configs'] = [
### Using an external Prometheus server
NOTE: **Note:**
-Prometheus and most exporters do not support authentication. We do not recommend exposing them outside the local network.
+Prometheus and most exporters don't support authentication. We don't recommend exposing them outside the local network.
A few configuration changes are required to allow GitLab to be monitored by an external Prometheus server. External servers are recommended for highly available deployments of GitLab with multiple nodes.
@@ -135,9 +135,9 @@ To use an external Prometheus server:
1. Install and set up a dedicated Prometheus instance, if necessary, using the [official installation instructions](https://prometheus.io/docs/prometheus/latest/installation/).
1. Add the Prometheus server IP address to the [monitoring IP whitelist](../ip_whitelist.md). For example:
- ```ruby
- gitlab_rails['monitoring_whitelist'] = ['127.0.0.0/8', '192.168.0.1']
- ```
+ ```ruby
+ gitlab_rails['monitoring_whitelist'] = ['127.0.0.0/8', '192.168.0.1']
+ ```
1. To scrape NGINX metrics, you'll also need to configure NGINX to allow the Prometheus server
IP. For example:
@@ -151,7 +151,7 @@ To use an external Prometheus server:
}
```
-1. [Reconfigure GitLab][reconfigure] to apply the changes
+1. [Reconfigure GitLab][reconfigure] to apply the changes.
1. Edit the Prometheus server's configuration file.
1. Add each node's exporters to the Prometheus server's
[scrape target configuration](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#%3Cscrape_config%3E).
@@ -223,9 +223,9 @@ some workarounds: using a separate FQDN, using server IP, using a separate brows
having [NGINX proxy it][nginx-custom-config].
The performance data collected by Prometheus can be viewed directly in the
-Prometheus console or through a compatible dashboard tool.
+Prometheus console, or through a compatible dashboard tool.
The Prometheus interface provides a [flexible query language](https://prometheus.io/docs/prometheus/latest/querying/basics/)
-to work with the collected data where you can visualize their output.
+to work with the collected data where you can visualize the output.
For a more fully featured dashboard, Grafana can be used and has
[official support for Prometheus][prom-grafana].
@@ -238,23 +238,23 @@ Sample Prometheus queries:
## Prometheus as a Grafana data source
-Grafana allows you to import Prometheus performance metrics as a data source
-and render the metrics as graphs and dashboards which is helpful with visualisation.
+Grafana allows you to import Prometheus performance metrics as a data source,
+and render the metrics as graphs and dashboards, which is helpful with visualization.
To add a Prometheus dashboard for a single server GitLab setup:
1. Create a new data source in Grafana.
-1. Name your data source i.e GitLab.
+1. Name your data source (such as GitLab).
1. Select `Prometheus` in the type dropdown box.
-1. Add your Prometheus listen address as the URL and set access to `Browser`.
+1. Add your Prometheus listen address as the URL, and set access to `Browser`.
1. Set the HTTP method to `GET`.
-1. Save & Test your configuration to verify that it works.
+1. Save and test your configuration to verify that it works.
## GitLab metrics
> Introduced in GitLab 9.3.
-GitLab monitors its own internal service metrics, and makes them available at the `/-/metrics` endpoint. Unlike other exporters, this endpoint requires authentication as it is available on the same URL and port as user traffic.
+GitLab monitors its own internal service metrics, and makes them available at the `/-/metrics` endpoint. Unlike other exporters, this endpoint requires authentication as it's available on the same URL and port as user traffic.
[➔ Read more about the GitLab Metrics.](gitlab_metrics.md)
@@ -265,8 +265,8 @@ export Prometheus metrics.
### Node exporter
-The node exporter allows you to measure various machine resources such as
-memory, disk and CPU utilization.
+The node exporter allows you to measure various machine resources, such as
+memory, disk, and CPU utilization.
[➔ Read more about the node exporter.](node_exporter.md)
@@ -310,7 +310,7 @@ If your GitLab server is running within Kubernetes, Prometheus will collect metr
To disable the monitoring of Kubernetes:
1. Edit `/etc/gitlab/gitlab.rb`.
-1. Add or find and uncomment the following line and set it to `false`:
+1. Add (or find and uncomment) the following line and set it to `false`:
```ruby
prometheus['monitor_kubernetes'] = false
diff --git a/doc/administration/operations/extra_sidekiq_processes.md b/doc/administration/operations/extra_sidekiq_processes.md
index d70e9d1baa5..df2e4508eb8 100644
--- a/doc/administration/operations/extra_sidekiq_processes.md
+++ b/doc/administration/operations/extra_sidekiq_processes.md
@@ -53,6 +53,20 @@ To start extra Sidekiq processes, you must enable `sidekiq-cluster`:
]
```
+ [In GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26594) and
+ later, the special queue name `*` means all queues. This starts two
+ processes, each handling all queues:
+
+ ```ruby
+ sidekiq_cluster['queue_groups'] = [
+ "*",
+ "*"
+ ]
+ ```
+
+ `*` cannot be combined with concrete queue names - `*, mailers` will
+ just handle the `mailers` queue.
+
1. Save the file and reconfigure GitLab for the changes to take effect:
```shell
@@ -115,10 +129,10 @@ following attributes:
`source_code_management` category.
- `has_external_dependencies` - whether or not the queue connects to external
services. For example, all importers have this set to `true`.
-- `latency_sensitive` - whether or not the queue is particularly sensitive to
- latency, which also means that its jobs should run quickly. For example, the
- `authorized_projects` queue is used to refresh user permissions, and is
- latency sensitive.
+- `urgency` - how important it is that this queue's jobs run
+ quickly. Can be `high`, `low`, or `throttled`. For example, the
+ `authorized_projects` queue is used to refresh user permissions, and
+ is high urgency.
- `name` - the queue name. The other attributes are typically more useful as
they are more general, but this is available in case a particular queue needs
to be selected.
@@ -126,9 +140,9 @@ following attributes:
`unknown`. For example, the `project_export` queue is memory bound as it has
to load data in memory before saving it for export.
-Both `has_external_dependencies` and `latency_sensitive` are boolean attributes:
-only the exact string `true` is considered true, and everything else is
-considered false.
+`has_external_dependencies` is a boolean attribute: only the exact
+string `true` is considered true, and everything else is considered
+false.
### Available operators
@@ -154,6 +168,10 @@ from highest to lowest precedence:
The operator precedence for this syntax is fixed: it's not possible to make AND
have higher precedence than OR.
+[In GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26594) and
+later, as with the standard queue group syntax above, a single `*` as the
+entire queue group selects all queues.
+
### Example queries
In `/etc/gitlab/gitlab.rb`:
@@ -162,10 +180,12 @@ In `/etc/gitlab/gitlab.rb`:
sidekiq_cluster['enable'] = true
sidekiq_cluster['experimental_queue_selector'] = true
sidekiq_cluster['queue_groups'] = [
- # Run all non-CPU-bound queues that are latency sensitive
- 'resource_boundary!=cpu&latency_sensitive=true',
- # Run all continuous integration and pages queues that are not latency sensitive
- 'feature_category=continuous_integration,pages&latency_sensitive=false'
+ # Run all non-CPU-bound queues that are high urgency
+ 'resource_boundary!=cpu&urgency=high',
+ # Run all continuous integration and pages queues that are not high urgency
+ 'feature_category=continuous_integration,pages&urgency!=high',
+ # Run all queues
+ '*'
]
```
@@ -291,11 +311,11 @@ If you experience a problem, you should contact GitLab support. Use the command
line at your own risk.
For debugging purposes, you can start extra Sidekiq processes by using the command
-`/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster`. This command
+`/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster`. This command
takes arguments using the following syntax:
```shell
-/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster [QUEUE,QUEUE,...] [QUEUE, ...]
+/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster [QUEUE,QUEUE,...] [QUEUE, ...]
```
Each separate argument denotes a group of queues that have to be processed by a
@@ -313,14 +333,14 @@ For example, say you want to start 2 extra processes: one to process the
done as follows:
```shell
-/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster process_commit post_receive
+/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster process_commit post_receive
```
If you instead want to start one process processing both queues, you'd use the
following syntax:
```shell
-/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster process_commit,post_receive
+/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster process_commit,post_receive
```
If you want to have one Sidekiq process dealing with the `process_commit` and
@@ -328,7 +348,7 @@ If you want to have one Sidekiq process dealing with the `process_commit` and
you'd use the following:
```shell
-/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster process_commit,post_receive gitlab_shell
+/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster process_commit,post_receive gitlab_shell
```
### Monitoring the `sidekiq-cluster` command
@@ -360,7 +380,7 @@ file is written, but this can be changed by passing the `--pidfile` option to
`sidekiq-cluster`. For example:
```shell
-/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster --pidfile /var/run/gitlab/sidekiq_cluster.pid process_commit
+/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster --pidfile /var/run/gitlab/sidekiq_cluster.pid process_commit
```
Keep in mind that the PID file will contain the PID of the `sidekiq-cluster`
diff --git a/doc/administration/operations/fast_ssh_key_lookup.md b/doc/administration/operations/fast_ssh_key_lookup.md
index 7d0fc43f810..0ee8f26b97c 100644
--- a/doc/administration/operations/fast_ssh_key_lookup.md
+++ b/doc/administration/operations/fast_ssh_key_lookup.md
@@ -54,8 +54,10 @@ Add the following to your `sshd_config` file. This is usually located at
Omnibus Docker:
```plaintext
-AuthorizedKeysCommand /opt/gitlab/embedded/service/gitlab-shell/bin/gitlab-shell-authorized-keys-check git %u %k
-AuthorizedKeysCommandUser git
+Match User git # Apply the AuthorizedKeysCommands to the git user only
+ AuthorizedKeysCommand /opt/gitlab/embedded/service/gitlab-shell/bin/gitlab-shell-authorized-keys-check git %u %k
+ AuthorizedKeysCommandUser git
+Match all # End match, settings apply to all users again
```
Reload OpenSSH:
diff --git a/doc/administration/operations/puma.md b/doc/administration/operations/puma.md
index 2490cf1f0ae..30fea36fac6 100644
--- a/doc/administration/operations/puma.md
+++ b/doc/administration/operations/puma.md
@@ -2,8 +2,8 @@
## Puma
-GitLab plans to use [Puma](https://github.com/puma/puma) to replace
-[Unicorn](https://bogomips.org/unicorn/).
+As of GitLab 12.9, [Puma](https://github.com/puma/puma) has replaced [Unicorn](https://bogomips.org/unicorn/).
+as the default web server.
## Why switch to Puma?
diff --git a/doc/administration/operations/ssh_certificates.md b/doc/administration/operations/ssh_certificates.md
index 1fa3555e604..5a9caa36cf8 100644
--- a/doc/administration/operations/ssh_certificates.md
+++ b/doc/administration/operations/ssh_certificates.md
@@ -40,7 +40,7 @@ it](https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/htm
We assume that you already have SSH certificates set up, and have
added the `TrustedUserCAKeys` of your CA to your `sshd_config`, e.g.:
-```
+```plaintext
TrustedUserCAKeys /etc/security/mycompany_user_ca.pub
```
@@ -87,7 +87,7 @@ Then, in your `sshd_config` set up `AuthorizedPrincipalsCommand` for
the `git` user. Hopefully you can use the default one shipped with
GitLab:
-```
+```plaintext
Match User git
AuthorizedPrincipalsCommandUser root
AuthorizedPrincipalsCommand /opt/gitlab/embedded/service/gitlab-shell/bin/gitlab-shell-authorized-principals-check %i sshUsers
@@ -95,7 +95,7 @@ Match User git
This command will emit output that looks something like:
-```
+```shell
command="/opt/gitlab/embedded/service/gitlab-shell/bin/gitlab-shell username-{KEY_ID}",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty {PRINCIPAL}
```
@@ -108,7 +108,7 @@ some principal that's guaranteed to be part of the key for all users
who can log in to GitLab, or you must provide a list of principals,
one of which is going to be present for the user, e.g.:
-```
+```plaintext
[...]
AuthorizedPrincipalsCommand /opt/gitlab/embedded/service/gitlab-shell/bin/gitlab-shell-authorized-principals-check %i sshUsers windowsUsers
```
diff --git a/doc/administration/operations/unicorn.md b/doc/administration/operations/unicorn.md
index edd580d10c7..bab20a76546 100644
--- a/doc/administration/operations/unicorn.md
+++ b/doc/administration/operations/unicorn.md
@@ -64,13 +64,19 @@ between requests_, so no user requests are affected. You can set the minimum and
maximum memory threshold (in bytes) for the Unicorn worker killer by
setting the following values `/etc/gitlab/gitlab.rb`:
-```ruby
-unicorn['worker_memory_limit_min'] = "1024 * 1 << 20"
-unicorn['worker_memory_limit_max'] = "1280 * 1 << 20"
-```
+- For GitLab **12.7** and newer:
+
+ ```ruby
+ unicorn['worker_memory_limit_min'] = "1024 * 1 << 20"
+ unicorn['worker_memory_limit_max'] = "1280 * 1 << 20"
+ ```
+
+- For GitLab **12.6** and older:
-NOTE: **Note:**
-These values apply to GitLab 12.7.0 or newer versions. For older GitLab versions please consult [previous worker memory limits](https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/12.6.0+ee.0/files/gitlab-cookbooks/gitlab/attributes/default.rb#L422-423).
+ ```ruby
+ unicorn['worker_memory_limit_min'] = "400 * 1 << 20"
+ unicorn['worker_memory_limit_max'] = "650 * 1 << 20"
+ ```
Otherwise, you can set the `GITLAB_UNICORN_MEMORY_MIN` and `GITLAB_UNICORN_MEMORY_MAX`
[environment variables](../environment_variables.md).
diff --git a/doc/administration/packages/container_registry.md b/doc/administration/packages/container_registry.md
index 87be9d500fb..2b029859447 100644
--- a/doc/administration/packages/container_registry.md
+++ b/doc/administration/packages/container_registry.md
@@ -619,7 +619,7 @@ provided by `gitlab-ctl`.
Consider the following example, where you first build the image:
-```bash
+```shell
# This builds a image with content of sha256:111111
docker build -t my.registry.com/my.group/my.project:latest .
docker push my.registry.com/my.group/my.project:latest
@@ -627,7 +627,7 @@ docker push my.registry.com/my.group/my.project:latest
Now, you do overwrite `:latest` with a new version:
-```bash
+```shell
# This builds a image with content of sha256:222222
docker build -t my.registry.com/my.group/my.project:latest .
docker push my.registry.com/my.group/my.project:latest
@@ -736,10 +736,14 @@ To enable the read-only mode:
This will set the Container Registry into the read only mode.
-1. Next, trigger the garbage collect command:
+1. Next, trigger one of the garbage collect commands:
```sh
+ # Recycling unused tags
sudo /opt/gitlab/embedded/bin/registry garbage-collect /var/opt/gitlab/registry/config.yml
+
+ # Removing unused layers not referenced by manifests
+ sudo /opt/gitlab/embedded/bin/registry garbage-collect -m /var/opt/gitlab/registry/config.yml
```
This will start the garbage collection, which might take some time to complete.
@@ -774,7 +778,7 @@ once a week.
Create a file under `/etc/cron.d/registry-garbage-collect`:
-```bash
+```shell
SHELL=/bin/sh
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
diff --git a/doc/administration/packages/dependency_proxy.md b/doc/administration/packages/dependency_proxy.md
index 4cbb22668d9..b489d829ba7 100644
--- a/doc/administration/packages/dependency_proxy.md
+++ b/doc/administration/packages/dependency_proxy.md
@@ -11,9 +11,8 @@ dependency proxies, see the [user guide](../../user/group/dependency_proxy/index
NOTE: **Note:**
Dependency proxy requires the Puma web server to be enabled.
-Puma support is EXPERIMENTAL at this time.
-To enable the Dependency proxy feature:
+To enable the dependency proxy feature:
**Omnibus GitLab installations**
@@ -37,7 +36,9 @@ To enable the Dependency proxy feature:
```
1. [Restart GitLab](../restart_gitlab.md#installations-from-source "How to restart GitLab") for the changes to take effect.
-1. Enable the [Puma web server](../../install/installation.md#using-puma).
+
+Since Puma is already the default web server for installations from source as of GitLab 12.9,
+no further changes are needed.
## Changing the storage path
diff --git a/doc/administration/packages/index.md b/doc/administration/packages/index.md
index 421b70709b5..40867fc15b6 100644
--- a/doc/administration/packages/index.md
+++ b/doc/administration/packages/index.md
@@ -119,6 +119,9 @@ upload packages:
}
```
+ NOTE: **Note:**
+ Some build tools, like Gradle, must make `HEAD` requests to Amazon S3 to pull a dependency’s metadata. The `gitlab_rails['packages_object_store_proxy_download']` property must be set to `true`. Without this setting, GitLab won't act as a proxy to the Amazon S3 service, and will instead return the signed URL. This will cause a `HTTP 403 Forbidden` response, since Amazon S3 expects a signed URL.
+
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure)
for the changes to take effect.
diff --git a/doc/administration/pages/index.md b/doc/administration/pages/index.md
index 8515cd909db..ce7d2fa3e73 100644
--- a/doc/administration/pages/index.md
+++ b/doc/administration/pages/index.md
@@ -345,7 +345,7 @@ pages:
### Using a custom Certificate Authority (CA)
When using certificates issued by a custom CA, [Access Control](../../user/project/pages/pages_access_control.md#gitlab-pages-access-control) and
-the [online view of HTML job artifacts](../../user/project/pipelines/job_artifacts.md#browsing-artifacts)
+the [online view of HTML job artifacts](../../ci/pipelines/job_artifacts.md#browsing-artifacts)
will fail to work if the custom CA is not recognized.
This usually results in this error:
@@ -360,16 +360,16 @@ that method from working. Use the following workaround:
1. Append your GitLab server TLS/SSL certficate to `/opt/gitlab/embedded/ssl/certs/cacert.pem` where `gitlab-domain-example.com` is your GitLab application URL
- ```shell
- printf "\ngitlab-domain-example.com\n===========================\n" | sudo tee --append /opt/gitlab/embedded/ssl/certs/cacert.pem
- echo -n | openssl s_client -connect gitlab-domain-example.com:443 | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' | sudo tee --append /opt/gitlab/embedded/ssl/certs/cacert.pem
- ```
+ ```shell
+ printf "\ngitlab-domain-example.com\n===========================\n" | sudo tee --append /opt/gitlab/embedded/ssl/certs/cacert.pem
+ echo -n | openssl s_client -connect gitlab-domain-example.com:443 | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' | sudo tee --append /opt/gitlab/embedded/ssl/certs/cacert.pem
+ ```
1. [Restart](../restart_gitlab.md) the GitLab Pages Daemon. For GitLab Omnibus instances:
- ```shell
- sudo gitlab-ctl restart gitlab-pages
- ```
+ ```shell
+ sudo gitlab-ctl restart gitlab-pages
+ ```
CAUTION: **Caution:**
Some GitLab Omnibus upgrades will revert this workaround and you'll need to apply it again.
@@ -566,6 +566,51 @@ GitLab Pages are part of the [regular backup][backup], so there is no separate b
You should strongly consider running GitLab Pages under a different hostname
than GitLab to prevent XSS attacks.
+<!-- ## Troubleshooting
+
+Include any troubleshooting steps that you can foresee. If you know beforehand what issues
+one might have when setting this up, or when something is changed, or on upgrading, it's
+important to describe those, too. Think of things that may go wrong and include them here.
+This is important to minimize requests for support, and to avoid doc comments with
+questions that you know someone might ask.
+
+Each scenario can be a third-level heading, e.g. `### Getting error message X`.
+If you have none to add when creating a doc, leave this section in place
+but commented out to help encourage others to add to it in the future. -->
+
+## Troubleshooting
+
+### `open /etc/ssl/ca-bundle.pem: permission denied`
+
+GitLab Pages runs inside a `chroot` jail, usually in a uniquely numbered directory like
+`/tmp/gitlab-pages-*`.
+
+Within the jail, a bundle of trusted certificates is
+provided at `/etc/ssl/ca-bundle.pem`. It's
+[copied there](https://gitlab.com/gitlab-org/gitlab-pages/-/merge_requests/51)
+from `/opt/gitlab/embedded/ssl/certs/cacert.pem`
+as part of starting up Pages.
+
+If the permissions on the source file are incorrect (they should be `0644`) then
+the file inside the `chroot` jail will also be wrong.
+
+Pages will log errors in `/var/log/gitlab/gitlab-pages/current` like:
+
+```plaintext
+x509: failed to load system roots and no roots provided
+open /etc/ssl/ca-bundle.pem: permission denied
+```
+
+The use of a `chroot` jail makes this error misleading, as it is not
+referring to `/etc/ssl` on the root filesystem.
+
+The fix is to correct the source file permissions and restart Pages:
+
+```shell
+sudo chmod 644 /opt/gitlab/embedded/ssl/certs/cacert.pem
+sudo gitlab-ctl restart gitlab-pages
+```
+
[backup]: ../../raketasks/backup_restore.md
[ce-14605]: https://gitlab.com/gitlab-org/gitlab-foss/issues/14605
[ee-80]: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/80
diff --git a/doc/administration/raketasks/maintenance.md b/doc/administration/raketasks/maintenance.md
index 6dc5542466f..b45f1ae529d 100644
--- a/doc/administration/raketasks/maintenance.md
+++ b/doc/administration/raketasks/maintenance.md
@@ -58,6 +58,7 @@ Runs the following rake tasks:
- `gitlab:app:check`
It will check that each component was set up according to the installation guide and suggest fixes for issues found.
+This command must be run from your app server and will not work correctly on component servers like [Gitaly](../gitaly/index.md#running-gitaly-on-its-own-server).
You may also have a look at our Troubleshooting Guides:
diff --git a/doc/administration/raketasks/uploads/migrate.md b/doc/administration/raketasks/uploads/migrate.md
index adef6251a27..2dc07bc09d6 100644
--- a/doc/administration/raketasks/uploads/migrate.md
+++ b/doc/administration/raketasks/uploads/migrate.md
@@ -73,6 +73,9 @@ gitlab-rake "gitlab:uploads:migrate[FileUploader, Project]"
gitlab-rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
gitlab-rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
gitlab-rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
+
+# Design Management design thumbnails (EE)
+gitlab-rake "gitlab:uploads:migrate[DesignManagement::DesignV432x230Uploader, DesignManagement::Action, :image_v432x230]"
```
**Source Installation**
@@ -102,6 +105,8 @@ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[PersonalFileUploader, Sn
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
+# Design Management design thumbnails (EE)
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[DesignManagement::DesignV432x230Uploader, DesignManagement::Action]"
```
## Migrate legacy uploads out of deprecated paths
diff --git a/doc/administration/reply_by_email_postfix_setup.md b/doc/administration/reply_by_email_postfix_setup.md
index 3a28e37cfc0..c6da88a0eec 100644
--- a/doc/administration/reply_by_email_postfix_setup.md
+++ b/doc/administration/reply_by_email_postfix_setup.md
@@ -73,7 +73,7 @@ The instructions make the assumption that you will be using the email address `i
1. Send the new `incoming` user a dummy email to test SMTP, by entering the following into the SMTP prompt:
- ```
+ ```plaintext
ehlo localhost
mail from: root@localhost
rcpt to: incoming@localhost
@@ -101,7 +101,7 @@ The instructions make the assumption that you will be using the email address `i
You should see output like this:
- ```
+ ```plaintext
"/var/mail/incoming": 1 message 1 unread
>U 1 root@localhost 59/2842 Re: Some issue
```
@@ -147,7 +147,7 @@ Courier, which we will install later to add IMAP authentication, requires mailbo
You should see output like this:
- ```
+ ```plaintext
"/home/incoming/Maildir": 1 message 1 unread
>U 1 root@localhost 59/2842 Re: Some issue
```
@@ -253,7 +253,7 @@ Courier, which we will install later to add IMAP authentication, requires mailbo
1. Send the `incoming` user a dummy email to test SMTP, by entering the following into the SMTP prompt:
- ```
+ ```plaintext
ehlo gitlab.example.com
mail from: root@gitlab.example.com
rcpt to: incoming@gitlab.example.com
@@ -277,7 +277,7 @@ Courier, which we will install later to add IMAP authentication, requires mailbo
You should see output like this:
- ```
+ ```plaintext
"/home/incoming/Maildir": 1 message 1 unread
>U 1 root@gitlab.example.com 59/2842 Re: Some issue
```
@@ -313,7 +313,7 @@ Courier, which we will install later to add IMAP authentication, requires mailbo
1. Sign in as the `incoming` user to test IMAP, by entering the following into the IMAP prompt:
- ```
+ ```plaintext
a login incoming PASSWORD
```
@@ -321,7 +321,7 @@ Courier, which we will install later to add IMAP authentication, requires mailbo
You should see output like this:
- ```
+ ```plaintext
a OK LOGIN Ok.
```
diff --git a/doc/administration/repository_storage_paths.md b/doc/administration/repository_storage_paths.md
index c73539cbdb4..5ca1dd18da3 100644
--- a/doc/administration/repository_storage_paths.md
+++ b/doc/administration/repository_storage_paths.md
@@ -15,7 +15,7 @@ storage shards) to distribute the storage load between several mount points.
Example: this is OK:
-```
+```plaintext
default:
path: /mnt/git-storage-1
storage2:
@@ -24,7 +24,7 @@ storage2:
This is not OK because it nests storage paths:
-```
+```plaintext
default:
path: /mnt/git-storage-1
storage2:
diff --git a/doc/administration/restart_gitlab.md b/doc/administration/restart_gitlab.md
index bd3a52d487a..176ff5c1b1b 100644
--- a/doc/administration/restart_gitlab.md
+++ b/doc/administration/restart_gitlab.md
@@ -87,8 +87,8 @@ Reconfiguring GitLab should occur in the event that something in its
configuration (`/etc/gitlab/gitlab.rb`) has changed.
When you run this command, [Chef], the underlying configuration management
-application that powers Omnibus GitLab, will make sure that all directories,
-permissions, services, etc., are in place and in the same shape that they were
+application that powers Omnibus GitLab, will make sure that all things like directories,
+permissions, and services are in place and in the same shape that they were
initially shipped.
It will also restart GitLab components where needed, if any of their
@@ -128,7 +128,7 @@ The GitLab MailRoom email processor with pid 28114 is running.
GitLab and all its components are up and running.
```
-This should restart Unicorn, Sidekiq, GitLab Workhorse and [Mailroom][]
+This should restart Unicorn, Sidekiq, GitLab Workhorse, and [Mailroom][]
(if enabled). The init service file that does all the magic can be found on
your server in `/etc/init.d/gitlab`.
@@ -149,8 +149,8 @@ If you are using other init systems, like systemd, you can check the
There is no single command to restart the entire GitLab application installed via
the [cloud native Helm Chart](https://docs.gitlab.com/charts/). Usually, it should be
-enough to restart a specific component separately (`gitaly`, `unicorn`,
-`workhorse`, `gitlab-shell`, etc.) by deleting all the pods related to it:
+enough to restart a specific component separately (for example, `gitaly`, `unicorn`,
+`workhorse`, or `gitlab-shell`) by deleting all the pods related to it:
```shell
kubectl delete pods -l release=<helm release name>,app=<component name>
diff --git a/doc/administration/server_hooks.md b/doc/administration/server_hooks.md
index 4c72634d4ff..37fffcdce22 100644
--- a/doc/administration/server_hooks.md
+++ b/doc/administration/server_hooks.md
@@ -88,11 +88,10 @@ pattern (`*~`).
The hooks are searched and executed in this order:
-1. `gitlab-shell/hooks` directory as known to Gitaly.
-1. `<project>.git/hooks/<hook_name>` - executed by `git` itself, this is symlinked to `gitlab-shell/hooks/<hook_name>`.
+1. Built-in GitLab server hooks (not user-customizable).
1. `<project>.git/custom_hooks/<hook_name>` - per-project hook (this was kept as the already existing behavior).
1. `<project>.git/custom_hooks/<hook_name>.d/*` - per-project hooks.
-1. `<project>.git/hooks/<hook_name>.d/*` OR `<custom_hooks_dir>/<hook_name.d>/*` - global hooks: all executable files (except editor backup files).
+1. `<custom_hooks_dir>/<hook_name>.d/*` - global hooks: all executable files (except editor backup files).
The hooks of the same type are executed in order and execution stops on the
first script exiting with a non-zero value.
diff --git a/doc/administration/snippets/index.md b/doc/administration/snippets/index.md
index 7632d685dc0..e6bbfa8cf00 100644
--- a/doc/administration/snippets/index.md
+++ b/doc/administration/snippets/index.md
@@ -40,7 +40,7 @@ The steps to configure this setting through the Rails console are:
sudo gitlab-rails console
# For installations from source
- sudo -u git -H bundle exec rails console production
+ sudo -u git -H bundle exec rails console -e production
```
1. Update the snippets maximum file size:
diff --git a/doc/administration/static_objects_external_storage.md b/doc/administration/static_objects_external_storage.md
index ab4abb7716a..f649a1ebcd2 100644
--- a/doc/administration/static_objects_external_storage.md
+++ b/doc/administration/static_objects_external_storage.md
@@ -2,8 +2,8 @@
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/31025) in GitLab 12.3.
-GitLab can be configured to serve repository static objects (for example, archives) from an external
-storage, such as a CDN.
+GitLab can be configured to serve repository static objects (for example, archives or raw blobs) from an external
+storage, such as a Content Delivery Network (CDN).
## Configuring
@@ -11,7 +11,8 @@ To configure external storage for static objects:
1. Navigate to **Admin Area > Settings > Repository**.
1. Expand the **Repository static objects** section.
-1. Enter the base URL and an arbitrary token.
+1. Enter the base URL and an arbitrary token. When you [set up external storage](#set-up-external-storage),
+you'll use a script that uses these values as `ORIGIN_HOSTNAME` and `STORAGE_TOKEN`.
The token is required to distinguish requests coming from the external storage, so users don't
circumvent the external storage and go for the application directly. The token is expected to be
@@ -48,3 +49,173 @@ sequenceDiagram
CDN->>User: master.zip
end
```
+
+## Set up external storage
+
+While this procedure uses [CloudFlare Workers](https://workers.cloudflare.com) for external storage,
+other CDNs or Function as a Service (FaaS) systems should work using the same principles.
+
+1. Choose a CloudFlare Worker domain if you haven't done so already.
+1. In the following script, set the following values for the first two constants:
+
+ - `ORIGIN_HOSTNAME`: the hostname of your GitLab installation.
+ - `STORAGE_TOKEN`: any arbitrary secure token (e.g. you can get one by running
+ `pwgen -cn1 64` on a UNIX machine). Save this token for the admin panel, as
+ described in the [configuring](#configuring) section.
+
+ ```js
+ const ORIGIN_HOSTNAME = 'gitlab.installation.com' // FIXME: SET CORRECT VALUE
+ const STORAGE_TOKEN = 'very-secure-token' // FIXME: SET CORRECT VALUE
+ const CACHE_PRIVATE_OBJECTS = false
+
+ const CORS_HEADERS = {
+ 'Access-Control-Allow-Origin': '*',
+ 'Access-Control-Allow-Methods': 'GET, HEAD, OPTIONS',
+ 'Access-Control-Allow-Headers': 'X-Csrf-Token, X-Requested-With',
+ }
+
+ self.addEventListener('fetch', event => event.respondWith(handle(event)))
+
+ async function handle(event) {
+ try {
+ let response = await verifyAndHandle(event);
+
+ // responses returned from cache are immutable, so we recreate them
+ // to set CORS headers
+ response = new Response(response.body, response)
+ response.headers.set('Access-Control-Allow-Origin', '*')
+
+ return response
+ } catch (e) {
+ return new Response('An error occurred!', {status: e.statusCode || 500})
+ }
+ }
+
+ async function verifyAndHandle(event) {
+ if (!validRequest(event.request)) {
+ return new Response(null, {status: 400})
+ }
+
+ if (event.request.method === 'OPTIONS') {
+ return handleOptions(event.request)
+ }
+
+ return handleRequest(event)
+ }
+
+ function handleOptions(request) {
+ // Make sure the necessary headers are present
+ // for this to be a valid pre-flight request
+ if (
+ request.headers.get('Origin') !== null &&
+ request.headers.get('Access-Control-Request-Method') !== null &&
+ request.headers.get('Access-Control-Request-Headers') !== null
+ ) {
+ // Handle CORS pre-flight request
+ return new Response(null, {
+ headers: CORS_HEADERS,
+ })
+ } else {
+ // Handle standard OPTIONS request
+ return new Response(null, {
+ headers: {
+ Allow: 'GET, HEAD, OPTIONS',
+ },
+ })
+ }
+ }
+
+ async function handleRequest(event) {
+ let cache = caches.default
+ let url = new URL(event.request.url)
+ let static_object_token = url.searchParams.get('token')
+ let headers = new Headers(event.request.headers)
+
+ url.host = ORIGIN_HOSTNAME
+ url = normalizeQuery(url)
+
+ headers.set('X-Gitlab-External-Storage-Token', STORAGE_TOKEN)
+ if (static_object_token !== null) {
+ headers.set('X-Gitlab-Static-Object-Token', static_object_token)
+ }
+
+ let request = new Request(url, { headers: headers })
+ let cached_response = await cache.match(request)
+ let is_conditional_header_set = headers.has('If-None-Match')
+
+ if (cached_response) {
+ return cached_response
+ }
+
+ // We don't want to override If-None-Match that is set on the original request
+ if (cached_response && !is_conditional_header_set) {
+ headers.set('If-None-Match', cached_response.headers.get('ETag'))
+ }
+
+ let response = await fetch(request, {
+ headers: headers,
+ redirect: 'manual'
+ })
+
+ if (response.status == 304) {
+ if (is_conditional_header_set) {
+ return response
+ } else {
+ return cached_response
+ }
+ } else if (response.ok) {
+ response = new Response(response.body, response)
+
+ // cache.put will never cache any response with a Set-Cookie header
+ response.headers.delete('Set-Cookie')
+
+ if (CACHE_PRIVATE_OBJECTS) {
+ response.headers.delete('Cache-Control')
+ }
+
+ event.waitUntil(cache.put(request, response.clone()))
+ }
+
+ return response
+ }
+
+ function normalizeQuery(url) {
+ let searchParams = url.searchParams
+ url = new URL(url.toString().split('?')[0])
+
+ if (url.pathname.includes('/raw/')) {
+ let inline = searchParams.get('inline')
+
+ if (inline == 'false' || inline == 'true') {
+ url.searchParams.set('inline', inline)
+ }
+ } else if (url.pathname.includes('/-/archive/')) {
+ let append_sha = searchParams.get('append_sha')
+ let path = searchParams.get('path')
+
+ if (append_sha == 'false' || append_sha == 'true') {
+ url.searchParams.set('append_sha', append_sha)
+ }
+ if (path) {
+ url.searchParams.set('path', path)
+ }
+ }
+
+ return url
+ }
+
+ function validRequest(request) {
+ let url = new URL(request.url)
+ let path = url.pathname
+
+ if (/^(.+)(\/raw\/|\/-\/archive\/)/.test(path)) {
+ return true
+ }
+
+ return false
+ }
+ ```
+
+1. Create a new worker with this script.
+1. Copy your values for `ORIGIN_HOSTNAME` and `STORAGE_TOKEN`.
+ Use those values [to configure external storage for static objects](#configuring).
diff --git a/doc/administration/troubleshooting/debug.md b/doc/administration/troubleshooting/debug.md
index 01d143d045e..c1f2a5c92a3 100644
--- a/doc/administration/troubleshooting/debug.md
+++ b/doc/administration/troubleshooting/debug.md
@@ -11,13 +11,13 @@ an SMTP server, but you're not seeing mail delivered. Here's how to check the se
1. Run a Rails console:
```shell
- sudo gitlab-rails console production
+ sudo gitlab-rails console -e production
```
or for source installs:
```shell
- bundle exec rails console production
+ bundle exec rails console -e production
```
1. Look at the ActionMailer `delivery_method` to make sure it matches what you
@@ -33,7 +33,7 @@ an SMTP server, but you're not seeing mail delivered. Here's how to check the se
```ruby
irb(main):002:0> ActionMailer::Base.smtp_settings
- => {:address=>"localhost", :port=>25, :domain=>"localhost.localdomain", :user_name=>nil, :password=>nil, :authentication=>nil, :enable_starttls_auto=>true}```
+ => {:address=>"localhost", :port=>25, :domain=>"localhost.localdomain", :user_name=>nil, :password=>nil, :authentication=>nil, :enable_starttls_auto=>true}
```
In the example above, the SMTP server is configured for the local machine. If this is intended, you may need to check your local mail
@@ -56,13 +56,13 @@ For more advanced issues, `gdb` is a must-have tool for debugging issues.
To install on Ubuntu/Debian:
-```
+```shell
sudo apt-get install gdb
```
On CentOS:
-```
+```shell
sudo yum install gdb
```
@@ -103,14 +103,14 @@ downtime. Otherwise skip to the next section.
1. Run `sudo gdb -p <PID>` to attach to the Unicorn process.
1. In the gdb window, type:
- ```
+ ```plaintext
call (void) rb_backtrace()
```
1. This forces the process to generate a Ruby backtrace. Check
`/var/log/gitlab/unicorn/unicorn_stderr.log` for the backtace. For example, you may see:
- ```ruby
+ ```plaintext
from /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/metrics/sampler.rb:33:in `block in start'
from /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/metrics/sampler.rb:33:in `loop'
from /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/metrics/sampler.rb:36:in `block (2 levels) in start'
@@ -124,13 +124,13 @@ downtime. Otherwise skip to the next section.
1. To see the current threads, run:
- ```
+ ```plaintext
thread apply all bt
```
1. Once you're done debugging with `gdb`, be sure to detach from the process and exit:
- ```
+ ```plaintext
detach
exit
```
@@ -162,7 +162,7 @@ separate Rails process to debug the issue:
1. Create a Personal Access Token for your user (Profile Settings -> Access Tokens).
1. Bring up the GitLab Rails console. For omnibus users, run:
- ```
+ ```shell
sudo gitlab-rails console
```
diff --git a/doc/administration/troubleshooting/gdb-stuck-ruby.txt b/doc/administration/troubleshooting/gdb-stuck-ruby.txt
index 13d5dfcffa4..de8d704f9e3 100644
--- a/doc/administration/troubleshooting/gdb-stuck-ruby.txt
+++ b/doc/administration/troubleshooting/gdb-stuck-ruby.txt
@@ -1,6 +1,6 @@
# Here's the script I'll use to demonstrate - it just loops forever:
-$ cat test.rb
+$ cat test.rb
#!/usr/bin/env ruby
loop do
@@ -75,7 +75,7 @@ Thread 1 (Thread 0xb74d76c0 (LWP 1343)):
at eval.c:5778
#5 rb_call0 (klass=3075304600, recv=3075299660, id=9393, oid=9393, argc=1, argv=0xbf85f490, body=0xb74c85a8, flags=2)
at eval.c:5928
-#6 0x0805e35d in rb_call (klass=3075304600, recv=3075299660, mid=9393, argc=1, argv=0xbf85f490, scope=1,
+#6 0x0805e35d in rb_call (klass=3075304600, recv=3075299660, mid=9393, argc=1, argv=0xbf85f490, scope=1,
self=<optimized out>) at eval.c:6176
#7 0x080651ec in rb_eval (self=3075299660, n=0xb74c4e1c) at eval.c:3521
#8 0x0805c31c in rb_yield_0 (val=6, self=3075299660, klass=<optimized out>, flags=0, avalue=0) at eval.c:5095
@@ -139,4 +139,4 @@ A debugging session is active.
Quit anyway? (y or n) y
Detaching from program: /opt/vagrant_ruby/bin/ruby, process 1343
-$
+$
diff --git a/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md b/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md
index a4180eb638a..2bb2e13511c 100644
--- a/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md
+++ b/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md
@@ -262,14 +262,6 @@ p.each do |project|
end
```
-### Identify un-indexed projects
-
-```ruby
-Project.find_each do |project|
- puts "id #{project.id}: #{project.namespace.name.to_s}/#{project.name.to_s}" if project.index_status.nil?
-end
-```
-
## Wikis
### Recreate
@@ -634,7 +626,7 @@ EE::Gitlab::LDAP::Sync::Group.execute_all_providers(group)
# Run a GroupSync for a single group (10.6+)
group = Group.find_by(name: 'my_gitlab_group')
-EE::Gitlab::Auth::LDAP::Sync::Group.execute_all_providers(group)
+EE::Gitlab::Auth::Ldap::Sync::Group.execute_all_providers(group)
# Query an LDAP group directly (10.6-)
adapter = Gitlab::LDAP::Adapter.new('ldapmain') # If `main` is the LDAP provider
@@ -643,20 +635,20 @@ ldap_group.member_dns
ldap_group.member_uids
# Query an LDAP group directly (10.6+)
-adapter = Gitlab::Auth::LDAP::Adapter.new('ldapmain') # If `main` is the LDAP provider
-ldap_group = EE::Gitlab::Auth::LDAP::Group.find_by_cn('group_cn_here', adapter)
+adapter = Gitlab::Auth::Ldap::Adapter.new('ldapmain') # If `main` is the LDAP provider
+ldap_group = EE::Gitlab::Auth::Ldap::Group.find_by_cn('group_cn_here', adapter)
ldap_group.member_dns
ldap_group.member_uids
# Lookup a particular user (10.6+)
# This could expose potential errors connecting to and/or querying LDAP that may seem to
# fail silently in the GitLab UI
-adapter = Gitlab::Auth::LDAP::Adapter.new('ldapmain') # If `main` is the LDAP provider
-user = Gitlab::Auth::LDAP::Person.find_by_uid('<username>',adapter)
+adapter = Gitlab::Auth::Ldap::Adapter.new('ldapmain') # If `main` is the LDAP provider
+user = Gitlab::Auth::Ldap::Person.find_by_uid('<username>',adapter)
# Query the LDAP server directly (10.6+)
## For an example, see https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/ee/gitlab/auth/ldap/adapter.rb
-adapter = Gitlab::Auth::LDAP::Adapter.new('ldapmain')
+adapter = Gitlab::Auth::Ldap::Adapter.new('ldapmain')
options = {
# the :base is required
# use adapter.config.base for the base or .group_base for the group_base
@@ -761,12 +753,6 @@ Ci::Pipeline.where(project_id: p.id).where(status: 'pending').each {|p| p.cancel
Ci::Pipeline.where(project_id: p.id).where(status: 'pending').count
```
-### Manually modify runner minutes
-
-```ruby
-Namespace.find_by_full_path("user/proj").namespace_statistics.update(shared_runners_seconds: 27360)
-```
-
### Remove artifacts more than a week old
The Latest version of these steps can be found in the [job artifacts documentation](../job_artifacts.md)
@@ -806,21 +792,6 @@ build.dependencies.each do |d| { puts "status: #{d.status}, finished at: #{d.fin
completed: #{d.complete?}, artifacts_expired: #{d.artifacts_expired?}, erased: #{d.erased?}" }
```
-### Disable strict artifact checking (Introduced in GitLab 10.3.0)
-
-See [job artifacts documentation](../job_artifacts.md#validation-for-dependencies).
-
-```ruby
-Feature.enable('ci_disable_validates_dependencies')
-```
-
-### Remove CI traces older than 6 months
-
-```ruby
-current_user = User.find_by_email('cindy@gitlap.com')
-Ci::Build.where("finished_at < ?", 6.months.ago.to_date).each {|b| puts b.id; b.erase(erased_by: current_user) if b.erasable?};nil
-```
-
### Try CI service
```ruby
@@ -838,6 +809,12 @@ Project.all.each do |p|
end
```
+### Obtain runners registration token
+
+```ruby
+Gitlab::CurrentSettings.current_application_settings.runners_registration_token
+```
+
## License
### See license plan name (since v9.3.0-ee)
@@ -959,12 +936,6 @@ end
## Sidekiq
-### Size of a queue
-
-```ruby
-Sidekiq::Queue.new('background_migration').size
-```
-
### Kill a worker's Sidekiq jobs
```ruby
@@ -1011,12 +982,6 @@ See <https://github.com/mperham/sidekiq/wiki/Signals#ttin>.
/opt/gitlab/embedded/bin/redis-cli -s /var/opt/gitlab/redis/redis.socket
```
-### Connect to Redis (HA)
-
-```shell
-/opt/gitlab/embedded/bin/redis-cli -h <host ip> -a <password>
-```
-
## LFS
### Get info about LFS objects and associated project
diff --git a/doc/administration/troubleshooting/img/AzureAD-basic_SAML.png b/doc/administration/troubleshooting/img/AzureAD-basic_SAML.png
index be420b1a3de..e86ad7572e8 100644
--- a/doc/administration/troubleshooting/img/AzureAD-basic_SAML.png
+++ b/doc/administration/troubleshooting/img/AzureAD-basic_SAML.png
Binary files differ
diff --git a/doc/administration/troubleshooting/img/AzureAD-claims.png b/doc/administration/troubleshooting/img/AzureAD-claims.png
index ef594390ce0..aab92288704 100644
--- a/doc/administration/troubleshooting/img/AzureAD-claims.png
+++ b/doc/administration/troubleshooting/img/AzureAD-claims.png
Binary files differ
diff --git a/doc/administration/troubleshooting/img/OneLogin-SSOsettings.png b/doc/administration/troubleshooting/img/OneLogin-SSOsettings.png
index 72737b9a017..58f936d8567 100644
--- a/doc/administration/troubleshooting/img/OneLogin-SSOsettings.png
+++ b/doc/administration/troubleshooting/img/OneLogin-SSOsettings.png
Binary files differ
diff --git a/doc/administration/troubleshooting/img/OneLogin-app_details.png b/doc/administration/troubleshooting/img/OneLogin-app_details.png
index 3e36a001d1b..77618960897 100644
--- a/doc/administration/troubleshooting/img/OneLogin-app_details.png
+++ b/doc/administration/troubleshooting/img/OneLogin-app_details.png
Binary files differ
diff --git a/doc/administration/troubleshooting/img/OneLogin-encryption.png b/doc/administration/troubleshooting/img/OneLogin-encryption.png
index a1b90873a5a..2b811409bd0 100644
--- a/doc/administration/troubleshooting/img/OneLogin-encryption.png
+++ b/doc/administration/troubleshooting/img/OneLogin-encryption.png
Binary files differ
diff --git a/doc/administration/troubleshooting/img/OneLogin-parameters.png b/doc/administration/troubleshooting/img/OneLogin-parameters.png
index c9ff4f8f018..a2fa734152c 100644
--- a/doc/administration/troubleshooting/img/OneLogin-parameters.png
+++ b/doc/administration/troubleshooting/img/OneLogin-parameters.png
Binary files differ
diff --git a/doc/administration/troubleshooting/img/OneLogin-userAdd.png b/doc/administration/troubleshooting/img/OneLogin-userAdd.png
index c7187fe5dd6..54c1ecd2e68 100644
--- a/doc/administration/troubleshooting/img/OneLogin-userAdd.png
+++ b/doc/administration/troubleshooting/img/OneLogin-userAdd.png
Binary files differ
diff --git a/doc/administration/troubleshooting/kubernetes_cheat_sheet.md b/doc/administration/troubleshooting/kubernetes_cheat_sheet.md
index 48d415c6bdf..ec59705ca99 100644
--- a/doc/administration/troubleshooting/kubernetes_cheat_sheet.md
+++ b/doc/administration/troubleshooting/kubernetes_cheat_sheet.md
@@ -74,14 +74,14 @@ and they will assist you with any issues you are having.
- How to get cronjobs configured on a cluster
- ```bash
+ ```shell
kubectl get cronjobs
```
-
+
When one configures [cron-based backups](https://docs.gitlab.com/charts/backup-restore/backup.html#cron-based-backup),
you will be able to see the new schedule here. Some details about the schedules can be found
in [Running Automated Tasks with a CronJob](https://kubernetes.io/docs/tasks/job/automated-tasks-with-cron-jobs/#creating-a-cron-job)
-
+
## GitLab-specific Kubernetes information
- Minimal config that can be used to test a Kubernetes Helm chart can be found
@@ -167,7 +167,7 @@ and they will assist you with any issues you are having.
```shell
kubectl exec -it <task-runner-pod-name> -- /srv/gitlab/bin/rails dbconsole -p
```
-
+
- How to get info about Helm installation status:
```shell
@@ -202,9 +202,9 @@ and they will assist you with any issues you are having.
- How to get the manifest for a release. It can be useful because it contains the info about
all Kubernetes resources and dependent charts:
- ```shell
- helm get manifest <release name>
- ```
+ ```shell
+ helm get manifest <release name>
+ ```
## Installation of minimal GitLab config via Minukube on macOS
diff --git a/doc/administration/troubleshooting/postgresql.md b/doc/administration/troubleshooting/postgresql.md
index ab302c919b2..b793f0a2ebc 100644
--- a/doc/administration/troubleshooting/postgresql.md
+++ b/doc/administration/troubleshooting/postgresql.md
@@ -99,13 +99,13 @@ References:
- [Customer ticket (internal) GitLab 12.1.6](https://gitlab.zendesk.com/agent/tickets/134307) and [Google doc (internal)](https://docs.google.com/document/d/19xw2d_D1ChLiU-MO1QzWab-4-QXgsIUcN5e_04WTKy4)
- [Issue #2 deadlocks can occur if an instance is flooded with pushes](https://gitlab.com/gitlab-org/gitlab/issues/33650). Provided for context about how GitLab code can have this sort of unanticipated effect in unusual situations.
-```
+```plaintext
ERROR: deadlock detected
```
Three applicable timeouts are identified in the issue [#1](https://gitlab.com/gitlab-org/gitlab/issues/30528); our recommended settings are as follows:
-```
+```ini
deadlock_timeout = 5s
statement_timeout = 15s
idle_in_transaction_session_timeout = 60s
@@ -128,7 +128,7 @@ Comments in issue [#1](https://gitlab.com/gitlab-org/gitlab/issues/30528) indica
See current settings with:
-```
+```shell
sudo gitlab-rails runner "c = ApplicationRecord.connection ; puts c.execute('SHOW statement_timeout').to_a ;
puts c.execute('SHOW lock_timeout').to_a ;
puts c.execute('SHOW idle_in_transaction_session_timeout').to_a ;"
diff --git a/doc/administration/troubleshooting/sidekiq.md b/doc/administration/troubleshooting/sidekiq.md
index 91361dddf02..73598bb9441 100644
--- a/doc/administration/troubleshooting/sidekiq.md
+++ b/doc/administration/troubleshooting/sidekiq.md
@@ -18,6 +18,26 @@ troubleshooting steps that will help you diagnose the bottleneck.
> may be using all available CPU, or have a Ruby Global Interpreter Lock,
> preventing other threads from continuing.
+## Log arguments to Sidekiq jobs
+
+If you want to see what arguments are being passed to Sidekiq jobs you can set
+the `SIDEKIQ_LOG_ARGUMENTS` [environment variable](https://docs.gitlab.com/omnibus/settings/environment-variables.html) to `1` (true).
+
+Example:
+
+```
+gitlab_rails['env'] = {"SIDEKIQ_LOG_ARGUMENTS" => "1"}
+```
+
+Please note: It is not recommend to enable this setting in production because some
+Sidekiq jobs (such as sending a password reset email) take secret arguments (for
+example the password reset token).
+
+When using [Sidekiq JSON logging](../logs.md#sidekiqlog),
+arguments logs are limited to a maximum size of 10 kilobytes of text;
+any arguments after this limit will be discarded and replaced with a
+single argument containing the string `"..."`.
+
## Thread dump
Send the Sidekiq process ID the `TTIN` signal and it will output thread
@@ -31,7 +51,7 @@ Check in `/var/log/gitlab/sidekiq/current` or `$GITLAB_HOME/log/sidekiq.log` for
the backtrace output. The backtraces will be lengthy and generally start with
several `WARN` level messages. Here's an example of a single thread's backtrace:
-```
+```plaintext
2016-04-13T06:21:20.022Z 31517 TID-orn4urby0 WARN: ActiveRecord::RecordNotFound: Couldn't find Note with 'id'=3375386
2016-04-13T06:21:20.022Z 31517 TID-orn4urby0 WARN: /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/activerecord-4.2.5.2/lib/active_record/core.rb:155:in `find'
/opt/gitlab/embedded/service/gitlab-rails/app/workers/new_note_worker.rb:7:in `perform'
@@ -55,7 +75,7 @@ respond to the `TTIN` signal, this is a good next step.
If `perf` is not installed on your system, install it with `apt-get` or `yum`:
-```
+```shell
# Debian
sudo apt-get install linux-tools
@@ -68,13 +88,13 @@ sudo yum install perf
Run perf against the Sidekiq PID:
-```
+```shell
sudo perf record -p <sidekiq_pid>
```
Let this run for 30-60 seconds and then press Ctrl-C. Then view the perf report:
-```
+```shell
sudo perf report
# Sample output
@@ -102,13 +122,13 @@ of the process (Sidekiq will not process jobs while `gdb` is attached).
Start by attaching to the Sidekiq PID:
-```
+```shell
gdb -p <sidekiq_pid>
```
Then gather information on all the threads:
-```
+```plaintext
info threads
# Example output
@@ -129,7 +149,7 @@ from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokog
If you see a suspicious thread, like the Nokogiri one above, you may want
to get more information:
-```
+```plaintext
thread 21
bt
@@ -147,7 +167,7 @@ bt
To output a backtrace from all threads at once:
-```
+```plaintext
set pagination off
thread apply all bt
```
@@ -155,7 +175,7 @@ thread apply all bt
Once you're done debugging with `gdb`, be sure to detach from the process and
exit:
-```
+```plaintext
detach
exit
```
@@ -287,4 +307,4 @@ has number of drawbacks, as mentioned in [Why Ruby’s Timeout is dangerous (and
> - while creating an object to save to the database afterwards
> - in any of your code, regardless of whether it could have possibly raised an exception before
>
-> Nobody writes code to defend against an exception being raised on literally any line. That’s not even possible. So Thread.raise is basically like a sneak attack on your code that could result in almost anything. It would probably be okay if it were pure-functional code that did not modify any state. But this is Ruby, so that’s unlikely :)
+> Nobody writes code to defend against an exception being raised on literally any line. That’s not even possible. So Thread.raise is basically like a sneak attack on your code that could result in almost anything. It would probably be okay if it were pure-functional code that did not modify any state. But this is Ruby, so that’s unlikely :)
diff --git a/doc/administration/uploads.md b/doc/administration/uploads.md
index 8bc84d4848a..f53c4e63bcb 100644
--- a/doc/administration/uploads.md
+++ b/doc/administration/uploads.md
@@ -63,7 +63,7 @@ For source installations the following settings are nested under `uploads:` and
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Uploads will be stored| |
-| `direct_upload` | Set to true to remove Unicorn from the Upload path. Workhorse handles the actual Artifact Upload to Object Storage while Unicorn does minimal processing to keep track of the upload. There is no need for local shared storage. The option may be removed if support for a single storage type for all files is introduced. Read more on [what the direct_upload setting means](https://docs.gitlab.com/ee/development/uploads.html#what-does-the-direct_upload-setting-mean). | `false` |
+| `direct_upload` | Set to true to remove Unicorn from the Upload path. Workhorse handles the actual Artifact Upload to Object Storage while Unicorn does minimal processing to keep track of the upload. There is no need for local shared storage. The option may be removed if support for a single storage type for all files is introduced. Read more on [direct upload](../development/uploads.md#direct-upload). | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 (if `direct_upload` is set to `true` it will override `background_upload`) | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
@@ -153,7 +153,7 @@ Note that Oracle Cloud S3 must be sure to use the following settings:
If `enable_signature_v4_streaming` is set to `true`, you may see the
following error:
-```
+```plaintext
STREAMING-AWS4-HMAC-SHA256-PAYLOAD is not supported
```
diff --git a/doc/api/README.md b/doc/api/README.md
index a261e1e7dc6..7a9d86ee718 100644
--- a/doc/api/README.md
+++ b/doc/api/README.md
@@ -260,7 +260,7 @@ returned with status code `404`:
Example of a valid API call and a request using cURL with sudo request,
providing a username:
-```
+```plaintext
GET /projects?private_token=<your_access_token>&sudo=username
```
@@ -271,7 +271,7 @@ curl --header "Private-Token: <your_access_token>" --header "Sudo: username" "ht
Example of a valid API call and a request using cURL with sudo request,
providing an ID:
-```
+```plaintext
GET /projects?private_token=<your_access_token>&sudo=23
```
@@ -355,7 +355,7 @@ curl --head --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example
The response will then be:
-```
+```http
HTTP/1.1 200 OK
Cache-Control: no-cache
Content-Length: 1103
@@ -415,7 +415,7 @@ curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab
The response header includes a link to the next page. For example:
-```
+```http
HTTP/1.1 200 OK
...
Link: <https://gitlab.example.com/api/v4/projects?pagination=keyset&per_page=50&order_by=id&sort=asc&id_after=42>; rel="next"
@@ -444,7 +444,7 @@ URL-encoded.
For example, `/` is represented by `%2F`:
-```
+```plaintext
GET /api/v4/projects/diaspora%2Fdiaspora
```
@@ -460,7 +460,7 @@ URL-encoded.
For example, `/` is represented by `%2F`:
-```
+```plaintext
GET /api/v4/projects/1/branches/my%2Fbranch/commits
```
@@ -540,7 +540,7 @@ Such errors appear in two cases:
When an attribute is missing, you will get something like:
-```
+```http
HTTP/1.1 400 Bad Request
Content-Type: application/json
{
@@ -551,7 +551,7 @@ Content-Type: application/json
When a validation error occurs, error messages will be different. They will
hold all details of validation errors:
-```
+```http
HTTP/1.1 400 Bad Request
Content-Type: application/json
{
@@ -589,7 +589,7 @@ follows:
When you try to access an API URL that does not exist you will receive 404 Not Found.
-```
+```http
HTTP/1.1 404 Not Found
Content-Type: application/json
{
@@ -604,13 +604,13 @@ to a [W3 recommendation](http://www.w3.org/Addressing/URL/4_URI_Recommentations.
causes a `+` to be interpreted as a space. For example, in an ISO 8601 date, you may want to pass
a time in Mountain Standard Time, such as:
-```
+```plaintext
2017-10-17T23:11:13.000+05:30
```
The correct encoding for the query parameter would be:
-```
+```plaintext
2017-10-17T23:11:13.000%2B05:30
```
diff --git a/doc/api/admin_sidekiq_queues.md b/doc/api/admin_sidekiq_queues.md
new file mode 100644
index 00000000000..9d053714b54
--- /dev/null
+++ b/doc/api/admin_sidekiq_queues.md
@@ -0,0 +1,47 @@
+# Admin Sidekiq queues API
+
+> **Note:** This feature was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25998) in GitLab 12.9
+
+Delete jobs from a Sidekiq queue that match the given
+[metadata](../development/logging.md#logging-context-metadata-through-rails-or-grape-requests).
+
+The response has three fields:
+
+1. `deleted_jobs` - the number of jobs deleted by the request.
+1. `queue_size` - the remaining size of the queue after processing the
+ request.
+1. `completed` - whether or not the request was able to process the
+ entire queue in time. If not, retrying with the same parameters may
+ delete further jobs (including those added after the first request
+ was issued).
+
+This API endpoint is only available to admin users.
+
+```plaintext
+DELETE /admin/sidekiq/queues/:queue_name
+```
+
+| Attribute | Type | Required | Description |
+| --------- | -------------- | -------- | ----------- |
+| `queue_name` | string | yes | The name of the queue to delete jobs from |
+| `user` | string | no | The username of the user who scheduled the jobs |
+| `project` | string | no | The full path of the project where the jobs were scheduled from |
+| `root_namespace` | string | no | The root namespace of the project |
+| `subscription_plan` | string | no | The subscription plan of the root namespace (GitLab.com only) |
+| `caller_id` | string | no | The endpoint or background job that schedule the job (for example: `ProjectsController#create`, `/api/:version/projects/:id`, `PostReceive`) |
+
+At least one attribute, other than `queue_name`, is required.
+
+```shell
+curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/admin/sidekiq/queues/authorized_projects?user=root
+```
+
+Example response:
+
+```json
+{
+ "completed": true,
+ "deleted_jobs": 7,
+ "queue_size": 14
+}
+```
diff --git a/doc/api/api_resources.md b/doc/api/api_resources.md
index a20b903d534..0ce4efa7d9f 100644
--- a/doc/api/api_resources.md
+++ b/doc/api/api_resources.md
@@ -61,6 +61,7 @@ The following API resources are available in the project context:
| [Protected tags](protected_tags.md) | `/projects/:id/protected_tags` |
| [Releases](releases/index.md) | `/projects/:id/releases` |
| [Release links](releases/links.md) | `/projects/:id/releases/.../assets/links` |
+| [Remote mirrors](remote_mirrors.md) | `/projects/:id/remote_mirrors` |
| [Repositories](repositories.md) | `/projects/:id/repository` |
| [Repository files](repository_files.md) | `/projects/:id/repository/files` |
| [Repository submodules](repository_submodules.md) | `/projects/:id/repository/submodules` |
@@ -105,41 +106,43 @@ The following API resources are available in the group context:
The following API resources are available outside of project and group contexts (including `/users`):
-| Resource | Available endpoints |
-|:--------------------------------------------------|:------------------------------------------------------------------------|
-| [Appearance](appearance.md) **(CORE ONLY)** | `/application/appearance` |
-| [Applications](applications.md) | `/applications` |
+| Resource | Available endpoints |
+|:---------------------------------------------------|:------------------------------------------------------------------------|
+| [Admin Sidekiq queues](admin_sidekiq_queues.md) | `/admin/sidekiq/queues/:queue_name` |
+| [Appearance](appearance.md) **(CORE ONLY)** | `/application/appearance` |
+| [Applications](applications.md) | `/applications` |
| [Audit Events](audit_events.md) **(PREMIUM ONLY)** | `/audit_events` |
-| [Avatar](avatar.md) | `/avatar` |
-| [Broadcast messages](broadcast_messages.md) | `/broadcast_messages` |
-| [Code snippets](snippets.md) | `/snippets` |
-| [Custom attributes](custom_attributes.md) | `/users/:id/custom_attributes` (also available for groups and projects) |
-| [Deploy keys](deploy_keys.md) | `/deploy_keys` (also available for projects) |
-| [Events](events.md) | `/events`, `/users/:id/events` (also available for projects) |
-| [Feature flags](features.md) | `/features` |
-| [Geo Nodes](geo_nodes.md) **(PREMIUM ONLY)** | `/geo_nodes` |
-| [Import repository from GitHub](import.md) | `/import/github` |
-| [Issues](issues.md) | `/issues` (also available for groups and projects) |
-| [Issues Statistics](issues_statistics.md) | `/issues_statistics` (also available for groups and projects) |
-| [Keys](keys.md) | `/keys` |
-| [License](license.md) **(CORE ONLY)** | `/license` |
-| [Markdown](markdown.md) | `/markdown` |
-| [Merge requests](merge_requests.md) | `/merge_requests` (also available for groups and projects) |
-| [Namespaces](namespaces.md) | `/namespaces` |
-| [Notification settings](notification_settings.md) | `/notification_settings` (also available for groups and projects) |
-| [Pages domains](pages_domains.md) | `/pages/domains` (also available for projects) |
-| [Projects](projects.md) | `/users/:id/projects` (also available for projects) |
-| [Runners](runners.md) | `/runners` (also available for projects) |
-| [Search](search.md) | `/search` (also available for groups and projects) |
-| [Settings](settings.md) **(CORE ONLY)** | `/application/settings` |
-| [Statistics](statistics.md) | `/application/statistics` |
-| [Sidekiq metrics](sidekiq_metrics.md) | `/sidekiq` |
-| [Suggestions](suggestions.md) | `/suggestions` |
-| [System hooks](system_hooks.md) | `/hooks` |
-| [Todos](todos.md) | `/todos` |
-| [Users](users.md) | `/users` |
-| [Validate `.gitlab-ci.yml` file](lint.md) | `/lint` |
-| [Version](version.md) | `/version` |
+| [Avatar](avatar.md) | `/avatar` |
+| [Broadcast messages](broadcast_messages.md) | `/broadcast_messages` |
+| [Code snippets](snippets.md) | `/snippets` |
+| [Custom attributes](custom_attributes.md) | `/users/:id/custom_attributes` (also available for groups and projects) |
+| [Deploy keys](deploy_keys.md) | `/deploy_keys` (also available for projects) |
+| [Events](events.md) | `/events`, `/users/:id/events` (also available for projects) |
+| [Feature flags](features.md) | `/features` |
+| [Geo Nodes](geo_nodes.md) **(PREMIUM ONLY)** | `/geo_nodes` |
+| [Group Activity Analytics](group_activity_analytics.md) **(STARTER)** | `/analytics/group_activity/{issues_count | merge_requests_count}` |
+| [Import repository from GitHub](import.md) | `/import/github` |
+| [Issues](issues.md) | `/issues` (also available for groups and projects) |
+| [Issues Statistics](issues_statistics.md) | `/issues_statistics` (also available for groups and projects) |
+| [Keys](keys.md) | `/keys` |
+| [License](license.md) **(CORE ONLY)** | `/license` |
+| [Markdown](markdown.md) | `/markdown` |
+| [Merge requests](merge_requests.md) | `/merge_requests` (also available for groups and projects) |
+| [Namespaces](namespaces.md) | `/namespaces` |
+| [Notification settings](notification_settings.md) | `/notification_settings` (also available for groups and projects) |
+| [Pages domains](pages_domains.md) | `/pages/domains` (also available for projects) |
+| [Projects](projects.md) | `/users/:id/projects` (also available for projects) |
+| [Runners](runners.md) | `/runners` (also available for projects) |
+| [Search](search.md) | `/search` (also available for groups and projects) |
+| [Settings](settings.md) **(CORE ONLY)** | `/application/settings` |
+| [Statistics](statistics.md) | `/application/statistics` |
+| [Sidekiq metrics](sidekiq_metrics.md) | `/sidekiq` |
+| [Suggestions](suggestions.md) | `/suggestions` |
+| [System hooks](system_hooks.md) | `/hooks` |
+| [Todos](todos.md) | `/todos` |
+| [Users](users.md) | `/users` |
+| [Validate `.gitlab-ci.yml` file](lint.md) | `/lint` |
+| [Version](version.md) | `/version` |
## Templates API resources
diff --git a/doc/api/appearance.md b/doc/api/appearance.md
index 354a69cb604..f9ca6aed01a 100644
--- a/doc/api/appearance.md
+++ b/doc/api/appearance.md
@@ -9,7 +9,7 @@ Appearance API allows you to maintain GitLab's appearance as if using the GitLab
List the current appearance configuration of the GitLab instance.
-```
+```plaintext
GET /application/appearance
```
@@ -39,7 +39,7 @@ Example response:
Use an API call to modify GitLab instance appearance configuration.
-```
+```plaintext
PUT /application/appearance
```
diff --git a/doc/api/audit_events.md b/doc/api/audit_events.md
index 5af2bdc1d29..7754f431110 100644
--- a/doc/api/audit_events.md
+++ b/doc/api/audit_events.md
@@ -4,11 +4,11 @@
The Audit Events API allows you to retrieve [instance audit events](../administration/audit_events.md#instance-events-premium-only).
-To retrieve audit events using the API, you must [authenticate yourself](README.html#authentication) as an Administrator.
+To retrieve audit events using the API, you must [authenticate yourself](README.md#authentication) as an Administrator.
### Retrieve all instance audit events
-```
+```plaintext
GET /audit_events
```
@@ -87,10 +87,14 @@ Example response:
### Retrieve single instance audit event
-```
+```plaintext
GET /audit_events/:id
```
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `id` | integer | yes | The ID of the audit event |
+
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://primary.example.com/api/v4/audit_events/1
```
@@ -122,11 +126,11 @@ Example response:
The Group Audit Events API allows you to retrieve [group audit events](../administration/audit_events.md#group-events-starter).
-To retrieve group audit events using the API, you must [authenticate yourself](README.html#authentication) as an Administrator or an owner of the group.
+To retrieve group audit events using the API, you must [authenticate yourself](README.md#authentication) as an Administrator or an owner of the group.
### Retrieve all group audit events
-```
+```plaintext
GET /groups/:id/audit_events
```
@@ -188,14 +192,14 @@ Example response:
Only available to group owners and administrators.
-```
+```plaintext
GET /groups/:id/audit_events/:audit_event_id
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) |
-| `audit_event_id` | integer | yes | ID of the audit event |
+| `audit_event_id` | integer | yes | The ID of the audit event |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://primary.example.com/api/v4/groups/60/audit_events/2
diff --git a/doc/api/boards.md b/doc/api/boards.md
index 54b92a7511c..b99f249cab1 100644
--- a/doc/api/boards.md
+++ b/doc/api/boards.md
@@ -9,7 +9,7 @@ request on that project will result to a `404` status code.
Lists Issue Boards in the given project.
-```
+```plaintext
GET /projects/:id/boards
```
@@ -83,7 +83,7 @@ Example response:
Get a single board.
-```
+```plaintext
GET /projects/:id/boards/:board_id
```
@@ -157,7 +157,7 @@ Example response:
Creates a board.
-```
+```plaintext
POST /projects/:id/boards
```
@@ -233,7 +233,7 @@ Example response:
Updates a board.
-```
+```plaintext
PUT /projects/:id/boards/:board_id
```
@@ -312,7 +312,7 @@ Example response:
Deletes a board.
-```
+```plaintext
DELETE /projects/:id/boards/:board_id
```
@@ -330,7 +330,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Get a list of the board's lists.
Does not include `open` and `closed` lists
-```
+```plaintext
GET /projects/:id/boards/:board_id/lists
```
@@ -387,7 +387,7 @@ Example response:
Get a single board list.
-```
+```plaintext
GET /projects/:id/boards/:board_id/lists/:list_id
```
@@ -421,7 +421,7 @@ Example response:
Creates a new Issue Board list.
-```
+```plaintext
POST /projects/:id/boards/:board_id/lists
```
@@ -463,7 +463,7 @@ Example response:
Updates an existing Issue Board list. This call is used to change list position.
-```
+```plaintext
PUT /projects/:id/boards/:board_id/lists/:list_id
```
@@ -498,7 +498,7 @@ Example response:
Only for admins and project owners. Deletes the board list in question.
-```
+```plaintext
DELETE /projects/:id/boards/:board_id/lists/:list_id
```
diff --git a/doc/api/broadcast_messages.md b/doc/api/broadcast_messages.md
index b9f9621e1f9..1ff40103750 100644
--- a/doc/api/broadcast_messages.md
+++ b/doc/api/broadcast_messages.md
@@ -36,7 +36,8 @@ Example response:
"id":1,
"active": false,
"target_path": "*/welcome",
- "broadcast_type": "banner"
+ "broadcast_type": "banner",
+ "dismissable": false
}
]
```
@@ -73,7 +74,8 @@ Example response:
"id":1,
"active":false,
"target_path": "*/welcome",
- "broadcast_type": "banner"
+ "broadcast_type": "banner",
+ "dismissable": false
}
```
@@ -87,15 +89,16 @@ POST /broadcast_messages
Parameters:
-| Attribute | Type | Required | Description |
-|:------------|:---------|:---------|:------------------------------------------------------|
-| `message` | string | yes | Message to display. |
-| `starts_at` | datetime | no | Starting time (defaults to current time). |
-| `ends_at` | datetime | no | Ending time (defaults to one hour from current time). |
-| `color` | string | no | Background color hex code. |
-| `font` | string | no | Foreground color hex code. |
-| `target_path`| string | no | Target path of the broadcast message. |
-| `broadcast_type`| string | no | Appearance type (defaults to banner) |
+| Attribute | Type | Required | Description |
+|:----------------|:---------|:---------|:------------------------------------------------------|
+| `message` | string | yes | Message to display. |
+| `starts_at` | datetime | no | Starting time (defaults to current time). |
+| `ends_at` | datetime | no | Ending time (defaults to one hour from current time). |
+| `color` | string | no | Background color hex code. |
+| `font` | string | no | Foreground color hex code. |
+| `target_path` | string | no | Target path of the broadcast message. |
+| `broadcast_type`| string | no | Appearance type (defaults to banner) |
+| `dismissable` | boolean | no | Can the user dismiss the message? |
Example request:
@@ -116,6 +119,7 @@ Example response:
"active": true,
"target_path": "*/welcome",
"broadcast_type": "notification",
+ "dismissable": false
}
```
@@ -129,16 +133,17 @@ PUT /broadcast_messages/:id
Parameters:
-| Attribute | Type | Required | Description |
-|:------------|:---------|:---------|:-----------------------------------|
-| `id` | integer | yes | ID of broadcast message to update. |
-| `message` | string | no | Message to display. |
-| `starts_at` | datetime | no | Starting time. |
-| `ends_at` | datetime | no | Ending time. |
-| `color` | string | no | Background color hex code. |
-| `font` | string | no | Foreground color hex code. |
-| `target_path`| string | no | Target path of the broadcast message. |
-| `broadcast_type`| string | no | Appearance type (defaults to banner) |
+| Attribute | Type | Required | Description |
+|:----------------|:---------|:---------|:--------------------------------------|
+| `id` | integer | yes | ID of broadcast message to update. |
+| `message` | string | no | Message to display. |
+| `starts_at` | datetime | no | Starting time. |
+| `ends_at` | datetime | no | Ending time. |
+| `color` | string | no | Background color hex code. |
+| `font` | string | no | Foreground color hex code. |
+| `target_path` | string | no | Target path of the broadcast message. |
+| `broadcast_type`| string | no | Appearance type (defaults to banner) |
+| `dismissable` | boolean | no | Can the user dismiss the message? |
Example request:
@@ -159,6 +164,7 @@ Example response:
"active": true,
"target_path": "*/welcome",
"broadcast_type": "notification",
+ "dismissable": false
}
```
diff --git a/doc/api/commits.md b/doc/api/commits.md
index ee635a009bf..02fb260d010 100644
--- a/doc/api/commits.md
+++ b/doc/api/commits.md
@@ -4,7 +4,7 @@
Get a list of repository commits in a project.
-```
+```plaintext
GET /projects/:id/repository/commits
```
@@ -32,8 +32,8 @@ Example response:
"id": "ed899a2f4b50b4370feeea94676502b42383c746",
"short_id": "ed899a2f4b5",
"title": "Replace sanitize with escape once",
- "author_name": "Dmitriy Zaporozhets",
- "author_email": "dzaporozhets@sphereconsultinginc.com",
+ "author_name": "Example User",
+ "author_email": "user@example.com",
"authored_date": "2012-09-20T11:50:22+03:00",
"committer_name": "Administrator",
"committer_email": "admin@example.com",
@@ -42,21 +42,23 @@ Example response:
"message": "Replace sanitize with escape once",
"parent_ids": [
"6104942438c14ec7bd21c6cd5bd995272b3faff6"
- ]
+ ],
+ "web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/ed899a2f4b50b4370feeea94676502b42383c746"
},
{
"id": "6104942438c14ec7bd21c6cd5bd995272b3faff6",
"short_id": "6104942438c",
"title": "Sanitize for network graph",
"author_name": "randx",
- "author_email": "dmitriy.zaporozhets@gmail.com",
- "committer_name": "Dmitriy",
- "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "author_email": "user@example.com",
+ "committer_name": "ExampleName",
+ "committer_email": "user@example.com",
"created_at": "2012-09-20T09:06:12+03:00",
"message": "Sanitize for network graph",
"parent_ids": [
"ae1d9fb46aa2b07ee9836d49862ec4e2c46fbbba"
- ]
+ ],
+ "web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/ed899a2f4b50b4370feeea94676502b42383c746"
}
]
```
@@ -67,7 +69,7 @@ Example response:
Create a commit by posting a JSON payload
-```
+```plaintext
POST /projects/:id/repository/commits
```
@@ -140,10 +142,10 @@ Example response:
"id": "ed899a2f4b50b4370feeea94676502b42383c746",
"short_id": "ed899a2f4b5",
"title": "some commit message",
- "author_name": "Dmitriy Zaporozhets",
- "author_email": "dzaporozhets@sphereconsultinginc.com",
- "committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dzaporozhets@sphereconsultinginc.com",
+ "author_name": "Example User",
+ "author_email": "user@example.com",
+ "committer_name": "Example User",
+ "committer_email": "user@example.com",
"created_at": "2016-09-20T09:26:24.000-07:00",
"message": "some commit message",
"parent_ids": [
@@ -156,7 +158,8 @@ Example response:
"deletions": 2,
"total": 4
},
- "status": null
+ "status": null,
+ "web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/ed899a2f4b50b4370feeea94676502b42383c746"
}
```
@@ -190,7 +193,7 @@ curl --request POST \
Get a specific commit identified by the commit hash or name of a branch or tag.
-```
+```plaintext
GET /projects/:id/repository/commits/:sha
```
@@ -214,9 +217,9 @@ Example response:
"short_id": "6104942438c",
"title": "Sanitize for network graph",
"author_name": "randx",
- "author_email": "dmitriy.zaporozhets@gmail.com",
+ "author_email": "user@example.com",
"committer_name": "Dmitriy",
- "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "committer_email": "user@example.com",
"created_at": "2012-09-20T09:06:12+03:00",
"message": "Sanitize for network graph",
"committed_date": "2012-09-20T09:06:12+03:00",
@@ -235,7 +238,8 @@ Example response:
"deletions": 10,
"total": 25
},
- "status": "running"
+ "status": "running",
+ "web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/6104942438c14ec7bd21c6cd5bd995272b3faff6"
}
```
@@ -246,7 +250,7 @@ Example response:
Get all references (from branches or tags) a commit is pushed to.
The pagination parameters `page` and `per_page` can be used to restrict the list of references.
-```
+```plaintext
GET /projects/:id/repository/commits/:sha/refs
```
@@ -280,7 +284,7 @@ Example response:
Cherry picks a commit to a given branch.
-```
+```plaintext
POST /projects/:id/repository/commits/:sha/cherry_pick
```
@@ -303,18 +307,19 @@ Example response:
"id": "8b090c1b79a14f2bd9e8a738f717824ff53aebad",
"short_id": "8b090c1b",
"title": "Feature added",
- "author_name": "Dmitriy Zaporozhets",
- "author_email": "dmitriy.zaporozhets@gmail.com",
+ "author_name": "Example User",
+ "author_email": "user@example.com",
"authored_date": "2016-12-12T20:10:39.000+01:00",
"created_at": "2016-12-12T20:10:39.000+01:00",
"committer_name": "Administrator",
"committer_email": "admin@example.com",
"committed_date": "2016-12-12T20:10:39.000+01:00",
"title": "Feature added",
- "message": "Feature added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
+ "message": "Feature added\n\nSigned-off-by: Example User <user@example.com>\n",
"parent_ids": [
"a738f717824ff53aebad8b090c1b79a14f2bd9e8"
- ]
+ ],
+ "web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/8b090c1b79a14f2bd9e8a738f717824ff53aebad"
}
```
@@ -339,7 +344,7 @@ conflict.
Reverts a commit in a given branch.
-```
+```plaintext
POST /projects/:id/repository/commits/:sha/revert
```
@@ -370,7 +375,8 @@ Example response:
"authored_date":"2018-11-08T15:55:26.000Z",
"committer_name":"Administrator",
"committer_email":"admin@example.com",
- "committed_date":"2018-11-08T15:55:26.000Z"
+ "committed_date":"2018-11-08T15:55:26.000Z",
+ "web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/8b090c1b79a14f2bd9e8a738f717824ff53aebad"
}
```
@@ -391,7 +397,7 @@ changeset was empty, likely due to the change having already been reverted.
Get the diff of a commit in a project.
-```
+```plaintext
GET /projects/:id/repository/commits/:sha/diff
```
@@ -427,7 +433,7 @@ Example response:
Get the comments of a commit in a project.
-```
+```plaintext
GET /projects/:id/repository/commits/:sha/comments
```
@@ -478,7 +484,7 @@ cases below is valid:
In any of the above cases, the response of `line`, `line_type` and `path` is
set to `null`.
-```
+```plaintext
POST /projects/:id/repository/commits/:sha/comments
```
@@ -524,7 +530,7 @@ Since GitLab 8.1, this is the new commit status API.
List the statuses of a commit in a project.
The pagination parameters `page` and `per_page` can be used to restrict the list of references.
-```
+```plaintext
GET /projects/:id/repository/commits/:sha/statuses
```
@@ -598,7 +604,7 @@ Example response:
Adds or updates a build status of a commit.
-```
+```plaintext
POST /projects/:id/statuses/:sha
```
@@ -651,7 +657,7 @@ Example response:
Get a list of Merge Requests related to the specified commit.
-```
+```plaintext
GET /projects/:id/repository/commits/:sha/merge_requests
```
@@ -720,7 +726,7 @@ Example response:
Get the [GPG signature from a commit](../user/project/repository/gpg_signed_commits/index.md),
if it is signed. For unsigned commits, it results in a 404 response.
-```
+```plaintext
GET /projects/:id/repository/commits/:sha/signature
```
diff --git a/doc/api/container_registry.md b/doc/api/container_registry.md
index f9c6ec4e572..eb46bcfc457 100644
--- a/doc/api/container_registry.md
+++ b/doc/api/container_registry.md
@@ -10,7 +10,7 @@ This is the API docs of the [GitLab Container Registry](../user/packages/contain
Get a list of registry repositories in a project.
-```
+```plaintext
GET /projects/:id/registry/repositories
```
@@ -50,7 +50,7 @@ Example response:
Get a list of registry repositories in a group.
-```
+```plaintext
GET /groups/:id/registry/repositories
```
@@ -116,7 +116,7 @@ Delete a repository in registry.
This operation is executed asynchronously and might take some time to get executed.
-```
+```plaintext
DELETE /projects/:id/registry/repositories/:repository_id
```
@@ -135,7 +135,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://git
Get a list of tags for given registry repository.
-```
+```plaintext
GET /projects/:id/registry/repositories/:repository_id/tags
```
@@ -169,7 +169,7 @@ Example response:
Get details of a registry repository tag.
-```
+```plaintext
GET /projects/:id/registry/repositories/:repository_id/tags/:tag_name
```
@@ -202,7 +202,7 @@ Example response:
Delete a registry repository tag.
-```
+```plaintext
DELETE /projects/:id/registry/repositories/:repository_id/tags/:tag_name
```
@@ -223,7 +223,7 @@ This action does not delete blobs. In order to delete them and recycle disk spac
Delete registry repository tags in bulk based on given criteria.
-```
+```plaintext
DELETE /projects/:id/registry/repositories/:repository_id/tags
```
@@ -231,7 +231,9 @@ DELETE /projects/:id/registry/repositories/:repository_id/tags
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
| `repository_id` | integer | yes | The ID of registry repository. |
-| `name_regex` | string | yes | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`.|
+| `name_regex` | string | no | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`. **Note:** `name_regex` is deprecated in favor of `name_regex_delete`.|
+| `name_regex_delete` | string | yes | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`.|
+| `name_regex_keep` | string | no | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to keep. This value will override any matches from `name_regex_delete`. Note: setting to `.*` will result in a no-op. |
| `keep_n` | integer | no | The amount of latest tags of given name to keep. |
| `older_than` | string | no | Tags to delete that are older than the given time, written in human readable form `1h`, `1d`, `1month`. |
@@ -239,7 +241,7 @@ This API call performs the following operations:
1. It orders all tags by creation date. The creation date is the time of the
manifest creation, not the time of tag push.
-1. It removes only the tags matching the given `name_regex`.
+1. It removes only the tags matching the given `name_regex_delete` (or deprecated `name_regex`), keeping any that match `name_regex_keep`.
1. It never removes the tag named `latest`.
1. It keeps N latest matching tags (if `keep_n` is specified).
1. It only removes tags that are older than X amount of time (if `older_than` is specified).
@@ -261,17 +263,23 @@ Examples:
and remove ones that are older than 2 days:
```shell
- curl --request DELETE --data 'name_regex=[0-9a-z]{40}' --data 'keep_n=5' --data 'older_than=2d' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
+ curl --request DELETE --data 'name_regex_delete=[0-9a-z]{40}' --data 'keep_n=5' --data 'older_than=2d' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
```
1. Remove all tags, but keep always the latest 5:
```shell
- curl --request DELETE --data 'name_regex=.*' --data 'keep_n=5' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
+ curl --request DELETE --data 'name_regex_delete=.*' --data 'keep_n=5' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
+ ```
+
+1. Remove all tags, but keep always tags beginning with `stable`:
+
+ ```shell
+ curl --request DELETE --data 'name_regex_delete=.*' --data 'name_regex_keep=stable.*' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
```
1. Remove all tags that are older than 1 month:
```shell
- curl --request DELETE --data 'name_regex=.*' --data 'older_than=1month' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
+ curl --request DELETE --data 'name_regex_delete=.*' --data 'older_than=1month' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
```
diff --git a/doc/api/custom_attributes.md b/doc/api/custom_attributes.md
index 983b2381ae0..20b364993ae 100644
--- a/doc/api/custom_attributes.md
+++ b/doc/api/custom_attributes.md
@@ -9,7 +9,7 @@ which will be referred to as "resource" in this documentation.
Get all custom attributes on a resource.
-```
+```plaintext
GET /users/:id/custom_attributes
GET /groups/:id/custom_attributes
GET /projects/:id/custom_attributes
@@ -42,7 +42,7 @@ Example response:
Get a single custom attribute on a resource.
-```
+```plaintext
GET /users/:id/custom_attributes/:key
GET /groups/:id/custom_attributes/:key
GET /projects/:id/custom_attributes/:key
@@ -71,7 +71,7 @@ Example response:
Set a custom attribute on a resource. The attribute will be updated if it already exists,
or newly created otherwise.
-```
+```plaintext
PUT /users/:id/custom_attributes/:key
PUT /groups/:id/custom_attributes/:key
PUT /projects/:id/custom_attributes/:key
@@ -100,7 +100,7 @@ Example response:
Delete a custom attribute on a resource.
-```
+```plaintext
DELETE /users/:id/custom_attributes/:key
DELETE /groups/:id/custom_attributes/:key
DELETE /projects/:id/custom_attributes/:key
diff --git a/doc/api/dependencies.md b/doc/api/dependencies.md
index 561645d7697..bb7e5ae238d 100644
--- a/doc/api/dependencies.md
+++ b/doc/api/dependencies.md
@@ -16,7 +16,7 @@ Get a list of project dependencies. This API partially mirroring
This list can be generated only for [languages and package managers](../user/application_security/dependency_scanning/index.md#supported-languages-and-package-managers)
supported by Gemnasium.
-```
+```plaintext
GET /projects/:id/dependencies
GET /projects/:id/dependencies?package_manager=maven
GET /projects/:id/dependencies?package_manager=yarn,bundler
diff --git a/doc/api/deploy_keys.md b/doc/api/deploy_keys.md
index 492777fb785..f6d00988c56 100644
--- a/doc/api/deploy_keys.md
+++ b/doc/api/deploy_keys.md
@@ -4,7 +4,7 @@
Get a list of all deploy keys across all projects of the GitLab instance. This endpoint requires admin access.
-```
+```plaintext
GET /deploy_keys
```
@@ -35,7 +35,7 @@ Example response:
Get a list of a project's deploy keys.
-```
+```plaintext
GET /projects/:id/deploy_keys
```
@@ -72,7 +72,7 @@ Example response:
Get a single key.
-```
+```plaintext
GET /projects/:id/deploy_keys/:key_id
```
@@ -106,7 +106,7 @@ Creates a new deploy key for a project.
If the deploy key already exists in another project, it will be joined to current
project only if original one is accessible by the same user.
-```
+```plaintext
POST /projects/:id/deploy_keys
```
@@ -137,7 +137,7 @@ Example response:
Updates a deploy key for a project.
-```
+```plaintext
PUT /projects/:id/deploy_keys/:key_id
```
@@ -167,7 +167,7 @@ Example response:
Removes a deploy key from the project. If the deploy key is used only for this project, it will be deleted from the system.
-```
+```plaintext
DELETE /projects/:id/deploy_keys/:key_id
```
diff --git a/doc/api/deploy_tokens.md b/doc/api/deploy_tokens.md
new file mode 100644
index 00000000000..8000629ba78
--- /dev/null
+++ b/doc/api/deploy_tokens.md
@@ -0,0 +1,238 @@
+# Deploy Tokens API
+
+## List all deploy tokens
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/21811) in GitLab 12.9.
+
+Get a list of all deploy tokens across the GitLab instance. This endpoint requires admin access.
+
+```plaintext
+GET /deploy_tokens
+```
+
+Example request:
+
+```shell
+curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/deploy_tokens"
+```
+
+Example response:
+
+```json
+[
+ {
+ "id": 1,
+ "name": "MyToken",
+ "username": "gitlab+deploy-token-1",
+ "expires_at": "2020-02-14T00:00:00.000Z",
+ "scopes": [
+ "read_repository",
+ "read_registry"
+ ]
+ }
+]
+```
+
+## Project deploy tokens
+
+Project deploy token API endpoints require project maintainer access or higher.
+
+### List project deploy tokens
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/21811) in GitLab 12.9.
+
+Get a list of a project's deploy tokens.
+
+```plaintext
+GET /projects/:id/deploy_tokens
+```
+
+Parameters:
+
+| Attribute | Type | Required | Description |
+|:---------------|:---------------|:---------|:-----------------------------------------------------------------------------|
+| `id` | integer/string | yes | ID or [URL-encoded path of the project](README.md#namespaced-path-encoding). |
+
+Example request:
+
+```shell
+curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/deploy_tokens"
+```
+
+Example response:
+
+```json
+[
+ {
+ "id": 1,
+ "name": "MyToken",
+ "username": "gitlab+deploy-token-1",
+ "expires_at": "2020-02-14T00:00:00.000Z",
+ "scopes": [
+ "read_repository",
+ "read_registry"
+ ]
+ }
+]
+```
+
+### Create a project deploy token
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/21811) in GitLab 12.9.
+
+Creates a new deploy token for a project.
+
+```
+POST /projects/:id/deploy_tokens
+```
+
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
+| `name` | string | yes | New deploy token's name |
+| `expires_at` | datetime | no | Expiration date for the deploy token. Does not expire if no value is provided. |
+| `username` | string | no | Username for deploy token. Default is `gitlab+deploy-token-{n}` |
+| `scopes` | array of strings | yes | Indicates the deploy token scopes. Must be at least one of `read_repository` or `read_registry`. |
+
+```shell
+curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" --header "Content-Type: application/json" --data '{"name": "My deploy token", "expires_at": "2021-01-01", "username": "custom-user", "scopes": ["read_repository"]}' "https://gitlab.example.com/api/v4/projects/5/deploy_tokens/"
+```
+
+Example response:
+
+```json
+{
+ "id": 1,
+ "name": "My deploy token",
+ "username": "custom-user",
+ "expires_at": "2021-01-01T00:00:00.000Z",
+ "token": "jMRvtPNxrn3crTAGukpZ",
+ "scopes": [
+ "read_repository"
+ ]
+}
+```
+
+### Delete a project deploy token
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/21811) in GitLab 12.9.
+
+Removes a deploy token from the project.
+
+```
+DELETE /projects/:id/deploy_tokens/:token_id
+```
+
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
+| `token_id` | integer | yes | The ID of the deploy token |
+
+Example request:
+
+```shell
+curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/deploy_tokens/13"
+```
+
+## Group deploy tokens
+
+These endpoints require group maintainer access or higher.
+
+### List group deploy deploy tokens
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/21811) in GitLab 12.9.
+
+Get a list of a group's deploy tokens
+
+```
+GET /groups/:id/deploy_tokens
+```
+
+Parameters:
+
+| Attribute | Type | Required | Description |
+|:---------------|:---------------|:---------|:-----------------------------------------------------------------------------|
+| `id` | integer/string | yes | ID or [URL-encoded path of the project](README.md#namespaced-path-encoding). |
+
+Example request:
+
+```shell
+curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/1/deploy_tokens"
+```
+
+Example response:
+
+```json
+[
+ {
+ "id": 1,
+ "name": "MyToken",
+ "username": "gitlab+deploy-token-1",
+ "expires_at": "2020-02-14T00:00:00.000Z",
+ "scopes": [
+ "read_repository",
+ "read_registry"
+ ]
+ }
+]
+```
+
+### Create a group deploy token
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/21811) in GitLab 12.9.
+
+Creates a new deploy token for a group.
+
+```
+POST /groups/:id/deploy_tokens
+```
+
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
+| `name` | string | yes | New deploy token's name |
+| `expires_at` | datetime | no | Expiration date for the deploy token. Does not expire if no value is provided. |
+| `username` | string | no | Username for deploy token. Default is `gitlab+deploy-token-{n}` |
+| `scopes` | array of strings | yes | Indicates the deploy token scopes. Must be at least one of `read_repository` or `read_registry`. |
+
+Example request:
+
+```shell
+curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" --header "Content-Type: application/json" --data '{"name": "My deploy token", "expires_at": "2021-01-01", "username": "custom-user", "scopes": ["read_repository"]}' "https://gitlab.example.com/api/v4/groups/5/deploy_tokens/"
+```
+
+Example response:
+
+```json
+{
+ "id": 1,
+ "name": "My deploy token",
+ "username": "custom-user",
+ "expires_at": "2021-01-01T00:00:00.000Z",
+ "token": "jMRvtPNxrn3crTAGukpZ",
+ "scopes": [
+ "read_registry"
+ ]
+}
+```
+
+### Delete a group deploy token
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/21811) in GitLab 12.9.
+
+Removes a deploy token from the group.
+
+```
+DELETE /groups/:id/deploy_tokens/:token_id
+```
+
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
+| `token_id` | integer | yes | The ID of the deploy token |
+
+Example request:
+
+```shell
+curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/5/deploy_tokens/13"
+```
diff --git a/doc/api/deployments.md b/doc/api/deployments.md
index 02c7e178aaf..f34c8026ac4 100644
--- a/doc/api/deployments.md
+++ b/doc/api/deployments.md
@@ -4,7 +4,7 @@
Get a list of deployments in a project.
-```
+```plaintext
GET /projects/:id/deployments
```
@@ -177,7 +177,7 @@ Example of response
## Get a specific deployment
-```
+```plaintext
GET /projects/:id/deployments/:deployment_id
```
@@ -265,7 +265,7 @@ Example of response
## Create a deployment
-```
+```plaintext
POST /projects/:id/deployments
```
@@ -319,7 +319,7 @@ Example of a response:
## Updating a deployment
-```
+```plaintext
PUT /projects/:id/deployments/:deployment_id
```
@@ -366,7 +366,7 @@ Example of a response:
This API retrieves the list of merge requests shipped with a given deployment:
-```
+```plaintext
GET /projects/:id/deployments/:deployment_id/merge_requests
```
diff --git a/doc/api/discussions.md b/doc/api/discussions.md
index 22d615eba28..936fcae0b72 100644
--- a/doc/api/discussions.md
+++ b/doc/api/discussions.md
@@ -22,7 +22,7 @@ Read more on [pagination](README.md#pagination).
Gets a list of all discussion items for a single issue.
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/discussions
```
@@ -118,7 +118,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single discussion item for a specific project issue
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/discussions/:discussion_id
```
@@ -138,7 +138,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Creates a new thread to a single project issue. This is similar to creating a note but other comments (replies) can be added to it later.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/discussions
```
@@ -162,7 +162,7 @@ Adds a new note to the thread. This can also [create a thread from a single comm
**WARNING**
Notes can be added to other items than comments (system notes, etc.) making them threads.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/discussions/:discussion_id/notes
```
@@ -185,7 +185,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Modify existing thread note of an issue.
-```
+```plaintext
PUT /projects/:id/issues/:issue_iid/discussions/:discussion_id/notes/:note_id
```
@@ -207,7 +207,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
Deletes an existing thread note of an issue.
-```
+```plaintext
DELETE /projects/:id/issues/:issue_iid/discussions/:discussion_id/notes/:note_id
```
@@ -230,7 +230,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Gets a list of all discussion items for a single snippet.
-```
+```plaintext
GET /projects/:id/snippets/:snippet_id/discussions
```
@@ -326,7 +326,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single discussion item for a specific project snippet
-```
+```plaintext
GET /projects/:id/snippets/:snippet_id/discussions/:discussion_id
```
@@ -347,7 +347,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Creates a new thread to a single project snippet. This is similar to creating
a note but other comments (replies) can be added to it later.
-```
+```plaintext
POST /projects/:id/snippets/:snippet_id/discussions
```
@@ -368,7 +368,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Adds a new note to the thread.
-```
+```plaintext
POST /projects/:id/snippets/:snippet_id/discussions/:discussion_id/notes
```
@@ -391,7 +391,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Modify existing thread note of a snippet.
-```
+```plaintext
PUT /projects/:id/snippets/:snippet_id/discussions/:discussion_id/notes/:note_id
```
@@ -413,7 +413,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
Deletes an existing thread note of a snippet.
-```
+```plaintext
DELETE /projects/:id/snippets/:snippet_id/discussions/:discussion_id/notes/:note_id
```
@@ -436,7 +436,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Gets a list of all discussion items for a single epic.
-```
+```plaintext
GET /groups/:id/epics/:epic_id/discussions
```
@@ -533,7 +533,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single discussion item for a specific group epic
-```
+```plaintext
GET /groups/:id/epics/:epic_id/discussions/:discussion_id
```
@@ -554,7 +554,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Creates a new thread to a single group epic. This is similar to creating
a note but but other comments (replies) can be added to it later.
-```
+```plaintext
POST /groups/:id/epics/:epic_id/discussions
```
@@ -576,7 +576,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Adds a new note to the thread. This can also
[create a thread from a single comment](../user/discussions/#start-a-thread-by-replying-to-a-standard-comment).
-```
+```plaintext
POST /groups/:id/epics/:epic_id/discussions/:discussion_id/notes
```
@@ -599,7 +599,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Modify existing thread note of an epic.
-```
+```plaintext
PUT /groups/:id/epics/:epic_id/discussions/:discussion_id/notes/:note_id
```
@@ -621,7 +621,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
Deletes an existing thread note of an epic.
-```
+```plaintext
DELETE /groups/:id/epics/:epic_id/discussions/:discussion_id/notes/:note_id
```
@@ -644,7 +644,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Gets a list of all discussion items for a single merge request.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/discussions
```
@@ -793,7 +793,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single discussion item for a specific project merge request
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/discussions/:discussion_id
```
@@ -814,7 +814,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Creates a new thread to a single project merge request. This is similar to creating
a note but other comments (replies) can be added to it later.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/discussions
```
@@ -848,7 +848,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Resolve/unresolve whole thread of a merge request.
-```
+```plaintext
PUT /projects/:id/merge_requests/:merge_request_iid/discussions/:discussion_id
```
@@ -870,7 +870,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
Adds a new note to the thread. This can also
[create a thread from a single comment](../user/discussions/#start-a-thread-by-replying-to-a-standard-comment).
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/discussions/:discussion_id/notes
```
@@ -893,7 +893,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Modify or resolve an existing thread note of a merge request.
-```
+```plaintext
PUT /projects/:id/merge_requests/:merge_request_iid/discussions/:discussion_id/notes/:note_id
```
@@ -922,7 +922,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
Deletes an existing thread note of a merge request.
-```
+```plaintext
DELETE /projects/:id/merge_requests/:merge_request_iid/discussions/:discussion_id/notes/:note_id
```
@@ -945,7 +945,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Gets a list of all discussion items for a single commit.
-```
+```plaintext
GET /projects/:id/commits/:commit_id/discussions
```
@@ -1086,7 +1086,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single discussion item for a specific project commit
-```
+```plaintext
GET /projects/:id/commits/:commit_id/discussions/:discussion_id
```
@@ -1107,7 +1107,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Creates a new thread to a single project commit. This is similar to creating
a note but other comments (replies) can be added to it later.
-```
+```plaintext
POST /projects/:id/commits/:commit_id/discussions
```
@@ -1141,7 +1141,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Adds a new note to the thread.
-```
+```plaintext
POST /projects/:id/commits/:commit_id/discussions/:discussion_id/notes
```
@@ -1164,7 +1164,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Modify or resolve an existing thread note of a commit.
-```
+```plaintext
PUT /projects/:id/commits/:commit_id/discussions/:discussion_id/notes/:note_id
```
@@ -1192,7 +1192,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
Deletes an existing thread note of a commit.
-```
+```plaintext
DELETE /projects/:id/commits/:commit_id/discussions/:discussion_id/notes/:note_id
```
diff --git a/doc/api/environments.md b/doc/api/environments.md
index 6817b904618..ffaff5f4f1e 100644
--- a/doc/api/environments.md
+++ b/doc/api/environments.md
@@ -4,7 +4,7 @@
Get all environments for a given project.
-```
+```plaintext
GET /projects/:id/environments
```
@@ -34,7 +34,7 @@ Example response:
## Get a specific environment
-```
+```plaintext
GET /projects/:id/environments/:environment_id
```
@@ -145,7 +145,7 @@ Creates a new environment with the given name and external_url.
It returns `201` if the environment was successfully created, `400` for wrong parameters.
-```
+```plaintext
POST /projects/:id/environments
```
@@ -177,7 +177,7 @@ Updates an existing environment's name and/or external_url.
It returns `200` if the environment was successfully updated. In case of an error, a status code `400` is returned.
-```
+```plaintext
PUT /projects/:id/environments/:environments_id
```
@@ -208,7 +208,7 @@ Example response:
It returns `204` if the environment was successfully deleted, and `404` if the environment does not exist.
-```
+```plaintext
DELETE /projects/:id/environments/:environment_id
```
@@ -225,7 +225,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://git
It returns `200` if the environment was successfully stopped, and `404` if the environment does not exist.
-```
+```plaintext
POST /projects/:id/environments/:environment_id/stop
```
diff --git a/doc/api/epic_issues.md b/doc/api/epic_issues.md
index cea6da43add..b001749ff5b 100644
--- a/doc/api/epic_issues.md
+++ b/doc/api/epic_issues.md
@@ -8,16 +8,16 @@ Epics are available only in Ultimate. If epics feature is not available a `403`
## List issues for an epic
-Gets all issues that are assigned to an epic and the authenticated user has access to.
+Gets all issues that are assigned to an epic and the authenticated user has access to.
-```
+```plaintext
GET /groups/:id/epics/:epic_iid/issues
```
| Attribute | Type | Required | Description |
| ------------------- | ---------------- | ---------- | ---------------------------------------------------------------------------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
-| `epic_iid` | integer/string | yes | The internal ID of the epic. |
+| `epic_iid` | integer/string | yes | The internal ID of the epic. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/groups/1/epics/5/issues/
@@ -106,15 +106,15 @@ Example response:
Creates an epic - issue association. If the issue in question belongs to another epic it is unassigned from that epic.
-```
+```plaintext
POST /groups/:id/epics/:epic_iid/issues/:issue_id
```
| Attribute | Type | Required | Description |
| ------------------- | ---------------- | ---------- | ---------------------------------------------------------------------------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
-| `epic_iid` | integer/string | yes | The internal ID of the epic. |
-| `issue_id` | integer/string | yes | The ID of the issue. |
+| `epic_iid` | integer/string | yes | The internal ID of the epic. |
+| `issue_id` | integer/string | yes | The ID of the issue. |
```shell
curl --header POST "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/groups/1/epics/5/issues/55
@@ -212,15 +212,15 @@ Example response:
Removes an epic - issue association.
-```
+```plaintext
DELETE /groups/:id/epics/:epic_iid/issues/:epic_issue_id
```
| Attribute | Type | Required | Description |
| ------------------- | ---------------- | ---------- | -----------------------------------------------------------------------------------------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
-| `epic_iid` | integer/string | yes | The internal ID of the epic. |
-| `epic_issue_id` | integer/string | yes | The ID of the issue - epic association. |
+| `epic_iid` | integer/string | yes | The internal ID of the epic. |
+| `epic_issue_id` | integer/string | yes | The ID of the issue - epic association. |
```shell
curl --header DELETE "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/groups/1/epics/5/issues/11
@@ -318,14 +318,14 @@ Example response:
Updates an epic - issue association.
-```
+```plaintext
PUT /groups/:id/epics/:epic_iid/issues/:epic_issue_id
```
| Attribute | Type | Required | Description |
| ------------------- | ---------------- | ---------- | -----------------------------------------------------------------------------------------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
-| `epic_iid` | integer/string | yes | The internal ID of the epic. |
+| `epic_iid` | integer/string | yes | The internal ID of the epic. |
| `epic_issue_id` | integer/string | yes | The ID of the issue - epic association. |
| `move_before_id` | integer/string | no | The ID of the issue - epic association that should be placed before the link in the question. |
| `move_after_id` | integer/string | no | The ID of the issue - epic association that should be placed after the link in the question. |
diff --git a/doc/api/epic_links.md b/doc/api/epic_links.md
index 0e2fb2653c4..5df91e106eb 100644
--- a/doc/api/epic_links.md
+++ b/doc/api/epic_links.md
@@ -15,7 +15,7 @@ Epics are available only in the [Ultimate/Gold tier](https://about.gitlab.com/pr
Gets all child epics of an epic.
-```
+```plaintext
GET /groups/:id/epics/:epic_iid/epics
```
@@ -69,7 +69,7 @@ Example response:
Creates an association between two epics, designating one as the parent epic and the other as the child epic. A parent epic can have multiple child epics. If the new child epic already belonged to another epic, it is unassigned from that previous parent.
-```
+```plaintext
POST /groups/:id/epics/:epic_iid/epics
```
@@ -122,7 +122,7 @@ Example response:
Creates a a new epic and associates it with provided parent epic. The response is LinkedEpic object.
-```
+```plaintext
POST /groups/:id/epics/:epic_iid/epics
```
@@ -155,7 +155,7 @@ Example response:
## Re-order a child epic
-```
+```plaintext
PUT /groups/:id/epics/:epic_iid/epics/:child_epic_id
```
@@ -212,7 +212,7 @@ Example response:
Unassigns a child epic from a parent epic.
-```
+```plaintext
DELETE /groups/:id/epics/:epic_iid/epics/:child_epic_id
```
diff --git a/doc/api/epics.md b/doc/api/epics.md
index b8eb1ab9f9a..0a99ac6262b 100644
--- a/doc/api/epics.md
+++ b/doc/api/epics.md
@@ -41,7 +41,7 @@ NOTE: **Note**
Gets all epics of the requested group and its subgroups.
-```
+```plaintext
GET /groups/:id/epics
GET /groups/:id/epics?author_id=5
GET /groups/:id/epics?labels=bug,reproduced
@@ -53,7 +53,7 @@ GET /groups/:id/epics?state=opened
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
| `author_id` | integer | no | Return epics created by the given user `id` |
| `labels` | string | no | Return epics matching a comma separated list of labels names. Label names from the epic group or a parent group can be used |
-| `with_labels_details` | Boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. Introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413)|
+| `with_labels_details` | boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. Introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413)|
| `order_by` | string | no | Return epics ordered by `created_at` or `updated_at` fields. Default is `created_at` |
| `sort` | string | no | Return epics sorted in `asc` or `desc` order. Default is `desc` |
| `search` | string | no | Search epics against their `title` and `description` |
@@ -160,14 +160,14 @@ Example response:
Gets a single epic
-```
+```plaintext
GET /groups/:id/epics/:epic_iid
```
| Attribute | Type | Required | Description |
| ------------------- | ---------------- | ---------- | ---------------------------------------------------------------------------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
-| `epic_iid` | integer/string | yes | The internal ID of the epic. |
+| `epic_iid` | integer/string | yes | The internal ID of the epic. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/groups/1/epics/5
@@ -228,7 +228,7 @@ Starting with GitLab [11.3][ee-6448], `start_date` and `end_date` should no long
directly, as they now represent composite values. You can configure it via the `*_is_fixed` and
`*_fixed` fields instead.
-```
+```plaintext
POST /groups/:id/epics
```
@@ -302,14 +302,14 @@ Starting with GitLab [11.3][ee-6448], `start_date` and `end_date` should no long
directly, as they now represent composite values. You can configure it via the `*_is_fixed` and
`*_fixed` fields instead.
-```
+```plaintext
PUT /groups/:id/epics/:epic_iid
```
| Attribute | Type | Required | Description |
| ------------------- | ---------------- | ---------- | ---------------------------------------------------------------------------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
-| `epic_iid` | integer/string | yes | The internal ID of the epic |
+| `epic_iid` | integer/string | yes | The internal ID of the epic |
| `title` | string | no | The title of an epic |
| `description` | string | no | The description of an epic. Limited to 1,048,576 characters. |
| `labels` | string | no | The comma separated list of labels |
@@ -372,14 +372,14 @@ Example response:
Deletes an epic
-```
+```plaintext
DELETE /groups/:id/epics/:epic_iid
```
| Attribute | Type | Required | Description |
| ------------------- | ---------------- | ---------- | ---------------------------------------------------------------------------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
-| `epic_iid` | integer/string | yes | The internal ID of the epic. |
+| `epic_iid` | integer/string | yes | The internal ID of the epic. |
```shell
curl --header DELETE "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/groups/1/epics/5
@@ -391,7 +391,7 @@ Manually creates a todo for the current user on an epic. If
there already exists a todo for the user on that epic, status code `304` is
returned.
-```
+```plaintext
POST /groups/:id/epics/:epic_iid/todo
```
diff --git a/doc/api/error_tracking.md b/doc/api/error_tracking.md
index e20b74d764b..f44266aa552 100644
--- a/doc/api/error_tracking.md
+++ b/doc/api/error_tracking.md
@@ -8,7 +8,7 @@ The project settings API allows you to retrieve the Error Tracking settings for
### Get Error Tracking settings
-```
+```plaintext
GET /projects/:id/error_tracking/settings
```
@@ -35,7 +35,7 @@ Example response:
The API allows you to enable or disable the Error Tracking settings for a project. Only for project maintainers.
-```
+```plaintext
PATCH /projects/:id/error_tracking/settings
```
diff --git a/doc/api/events.md b/doc/api/events.md
index f3963bcf970..431e96b2804 100644
--- a/doc/api/events.md
+++ b/doc/api/events.md
@@ -38,7 +38,7 @@ Note that these options are downcased.
Dates for the `before` and `after` parameters should be supplied in the following format:
-```
+```plaintext
YYYY-MM-DD
```
@@ -54,7 +54,7 @@ GitLab removes events older than 2 years from the events table for performance r
Get a list of events for the authenticated user. Scope `read_user` or `api` is required.
-```
+```plaintext
GET /events
```
@@ -128,7 +128,7 @@ Example response:
Get the contribution events for the specified user, sorted from newest to oldest. Scope `read_user` or `api` is required.
-```
+```plaintext
GET /users/:id/events
```
@@ -259,7 +259,7 @@ Example response:
Get a list of visible events for a particular project.
-```
+```plaintext
GET /projects/:project_id/events
```
diff --git a/doc/api/feature_flag_specs.md b/doc/api/feature_flag_specs.md
index e41bbea0e66..442d2c2c2d7 100644
--- a/doc/api/feature_flag_specs.md
+++ b/doc/api/feature_flag_specs.md
@@ -16,7 +16,7 @@ For instance, there are two specs, `staging` and `production`, for a feature fla
When you pass `production` as a parameter to this endpoint, the system returns
the `production` feature flag spec only.
-```
+```plaintext
GET /projects/:id/feature_flag_scopes
```
@@ -84,7 +84,7 @@ Example response:
Get all specs of a feature flag.
-```
+```plaintext
GET /projects/:id/feature_flags/:name/scopes
```
@@ -147,7 +147,7 @@ Example response:
Creates a new feature flag spec.
-```
+```plaintext
POST /projects/:id/feature_flags/:name/scopes
```
@@ -194,7 +194,7 @@ Example response:
Gets a single feature flag spec.
-```
+```plaintext
GET /projects/:id/feature_flags/:name/scopes/:environment_scope
```
@@ -230,7 +230,7 @@ Example response:
Updates an existing feature flag spec.
-```
+```plaintext
PUT /projects/:id/feature_flags/:name/scopes/:environment_scope
```
@@ -276,7 +276,7 @@ Example response:
Deletes a feature flag spec.
-```
+```plaintext
DELETE /projects/:id/feature_flags/:name/scopes/:environment_scope
```
diff --git a/doc/api/feature_flags.md b/doc/api/feature_flags.md
index 384708be5df..f95eb31c84c 100644
--- a/doc/api/feature_flags.md
+++ b/doc/api/feature_flags.md
@@ -15,7 +15,7 @@ are [paginated](README.md#pagination).
Gets all feature flags of the requested project.
-```
+```plaintext
GET /projects/:id/feature_flags
```
@@ -145,7 +145,7 @@ Example response:
Creates a new feature flag.
-```
+```plaintext
POST /projects/:id/feature_flags
```
@@ -219,7 +219,7 @@ Example response:
Gets a single feature flag.
-```
+```plaintext
GET /projects/:id/feature_flags/:name
```
@@ -294,7 +294,7 @@ Example response:
Deletes a feature flag.
-```
+```plaintext
DELETE /projects/:id/feature_flags/:name
```
diff --git a/doc/api/features.md b/doc/api/features.md
index d3c585f67c9..a43f2daa93f 100644
--- a/doc/api/features.md
+++ b/doc/api/features.md
@@ -9,7 +9,7 @@ values.
Get a list of all persisted features, with its gate values.
-```
+```plaintext
GET /features
```
@@ -50,7 +50,7 @@ Set a feature's gate value. If a feature with the given name doesn't exist yet
it will be created. The value can be a boolean, or an integer to indicate
percentage of time.
-```
+```plaintext
POST /features/:name
```
@@ -93,6 +93,6 @@ Example response:
Removes a feature gate. Response is equal when the gate exists, or doesn't.
-```
+```plaintext
DELETE /features/:name
```
diff --git a/doc/api/geo_nodes.md b/doc/api/geo_nodes.md
index 83a00cf3f10..baaa2e2f09f 100644
--- a/doc/api/geo_nodes.md
+++ b/doc/api/geo_nodes.md
@@ -7,7 +7,7 @@ as an admin.
Creates a new Geo node.
-```
+```plaintext
POST /geo_nodes
```
@@ -68,7 +68,7 @@ Example response:
## Retrieve configuration about all Geo nodes
-```
+```plaintext
GET /geo_nodes
```
@@ -135,7 +135,7 @@ Example response:
## Retrieve configuration about a specific Geo node
-```
+```plaintext
GET /geo_nodes/:id
```
@@ -178,7 +178,7 @@ Updates settings of an existing Geo node.
_This can only be run against a primary Geo node._
-```
+```plaintext
PUT /geo_nodes/:id
```
@@ -237,7 +237,7 @@ Removes the Geo node.
NOTE: **Note:**
Only a Geo primary node will accept this request.
-```
+```plaintext
DELETE /geo_nodes/:id
```
@@ -251,7 +251,7 @@ To repair the OAuth authentication of a Geo node.
_This can only be run against a primary Geo node._
-```
+```plaintext
POST /geo_nodes/:id/repair
```
@@ -282,7 +282,7 @@ Example response:
## Retrieve status about all Geo nodes
-```
+```plaintext
GET /geo_nodes/status
```
@@ -441,7 +441,7 @@ In GitLab 12.0, deprecated fields `wikis_count` and `repositories_count` were re
## Retrieve status about a specific Geo node
-```
+```plaintext
GET /geo_nodes/:id/status
```
@@ -512,7 +512,7 @@ In GitLab 12.0, deprecated fields `wikis_count` and `repositories_count` were re
This only works on a secondary node.
-```
+```plaintext
GET /geo_nodes/current/failures
```
diff --git a/doc/api/graphql/reference/gitlab_schema.graphql b/doc/api/graphql/reference/gitlab_schema.graphql
index 605eba981aa..e3f988016fe 100644
--- a/doc/api/graphql/reference/gitlab_schema.graphql
+++ b/doc/api/graphql/reference/gitlab_schema.graphql
@@ -39,6 +39,66 @@ type AddAwardEmojiPayload {
}
"""
+Autogenerated input type of AdminSidekiqQueuesDeleteJobs
+"""
+input AdminSidekiqQueuesDeleteJobsInput {
+ """
+ Delete jobs matching caller_id in the context metadata
+ """
+ callerId: String
+
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+
+ """
+ Delete jobs matching project in the context metadata
+ """
+ project: String
+
+ """
+ The name of the queue to delete jobs from
+ """
+ queueName: String!
+
+ """
+ Delete jobs matching root_namespace in the context metadata
+ """
+ rootNamespace: String
+
+ """
+ Delete jobs matching subscription_plan in the context metadata
+ """
+ subscriptionPlan: String
+
+ """
+ Delete jobs matching user in the context metadata
+ """
+ user: String
+}
+
+"""
+Autogenerated return type of AdminSidekiqQueuesDeleteJobs
+"""
+type AdminSidekiqQueuesDeleteJobsPayload {
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+
+ """
+ Reasons why the mutation failed.
+ """
+ errors: [String!]!
+
+ """
+ Information about the status of the deletion request
+ """
+ result: DeleteJobsResponse
+}
+
+"""
An emoji awarded by a user.
"""
type AwardEmoji {
@@ -159,6 +219,61 @@ enum BlobViewersType {
simple
}
+"""
+Represents a project or group board
+"""
+type Board {
+ """
+ ID (global ID) of the board
+ """
+ id: ID!
+
+ """
+ Name of the board
+ """
+ name: String
+
+ """
+ Weight of the board
+ """
+ weight: Int
+}
+
+"""
+The connection type for Board.
+"""
+type BoardConnection {
+ """
+ A list of edges.
+ """
+ edges: [BoardEdge]
+
+ """
+ A list of nodes.
+ """
+ nodes: [Board]
+
+ """
+ Information to aid in pagination.
+ """
+ pageInfo: PageInfo!
+}
+
+"""
+An edge in a connection.
+"""
+type BoardEdge {
+ """
+ A cursor for use in pagination.
+ """
+ cursor: String!
+
+ """
+ The item at the end of the edge.
+ """
+ node: Board
+}
+
type Commit {
"""
Author of the commit
@@ -487,6 +602,46 @@ type CreateNotePayload {
}
"""
+Autogenerated input type of CreateRequirement
+"""
+input CreateRequirementInput {
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+
+ """
+ The project full path the requirement is associated with
+ """
+ projectPath: ID!
+
+ """
+ Title of the requirement
+ """
+ title: String!
+}
+
+"""
+Autogenerated return type of CreateRequirement
+"""
+type CreateRequirementPayload {
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+
+ """
+ Reasons why the mutation failed.
+ """
+ errors: [String!]!
+
+ """
+ The requirement after mutation
+ """
+ requirement: Requirement
+}
+
+"""
Autogenerated input type of CreateSnippet
"""
input CreateSnippetInput {
@@ -547,6 +702,26 @@ type CreateSnippetPayload {
}
"""
+The response from the AdminSidekiqQueuesDeleteJobs mutation.
+"""
+type DeleteJobsResponse {
+ """
+ Whether or not the entire queue was processed in time; if not, retrying the same request is safe
+ """
+ completed: Boolean
+
+ """
+ The number of matching jobs deleted
+ """
+ deletedJobs: Int
+
+ """
+ The queue size after processing
+ """
+ queueSize: Int
+}
+
+"""
A single design
"""
type Design implements DesignFields & Noteable {
@@ -1756,6 +1931,11 @@ type Epic implements Noteable {
iid: ID
"""
+ Filter epics by iid for autocomplete
+ """
+ iidStartsWith: String
+
+ """
List of IIDs of epics, e.g., [1, 2]
"""
iids: [ID!]
@@ -1808,6 +1988,11 @@ type Epic implements Noteable {
descendantCounts: EpicDescendantCount
"""
+ Total weight of open and closed issues in the epic and its descendants
+ """
+ descendantWeightSum: EpicDescendantWeights
+
+ """
Description of the epic
"""
description: String
@@ -1878,9 +2063,9 @@ type Epic implements Noteable {
hasIssues: Boolean!
"""
- Current health status. Available only when feature flag save_issuable_health_status is enabled.
+ Current health status of the epic
"""
- healthStatus: HealthStatus
+ healthStatus: EpicHealthStatus
"""
ID of the epic
@@ -2079,6 +2264,61 @@ type Epic implements Noteable {
}
"""
+Autogenerated input type of EpicAddIssue
+"""
+input EpicAddIssueInput {
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+
+ """
+ The group the epic to mutate belongs to
+ """
+ groupPath: ID!
+
+ """
+ The iid of the epic to mutate
+ """
+ iid: ID!
+
+ """
+ The iid of the issue to be added
+ """
+ issueIid: String!
+
+ """
+ The project the issue belongs to
+ """
+ projectPath: ID!
+}
+
+"""
+Autogenerated return type of EpicAddIssue
+"""
+type EpicAddIssuePayload {
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+
+ """
+ The epic after mutation
+ """
+ epic: Epic
+
+ """
+ The epic-issue relation
+ """
+ epicIssue: EpicIssue
+
+ """
+ Reasons why the mutation failed.
+ """
+ errors: [String!]!
+}
+
+"""
The connection type for Epic.
"""
type EpicConnection {
@@ -2124,6 +2364,21 @@ type EpicDescendantCount {
}
"""
+Total weight of open and closed descendant issues
+"""
+type EpicDescendantWeights {
+ """
+ Total weight of completed (closed) issues in this epic, including epic descendants
+ """
+ closedIssues: Int
+
+ """
+ Total weight of opened issues in this epic, including epic descendants
+ """
+ openedIssues: Int
+}
+
+"""
An edge in a connection.
"""
type EpicEdge {
@@ -2139,6 +2394,26 @@ type EpicEdge {
}
"""
+Health status of child issues
+"""
+type EpicHealthStatus {
+ """
+ Number of issues at risk
+ """
+ issuesAtRisk: Int
+
+ """
+ Number of issues that need attention
+ """
+ issuesNeedingAttention: Int
+
+ """
+ Number of issues on track
+ """
+ issuesOnTrack: Int
+}
+
+"""
Relationship between an epic and an issue
"""
type EpicIssue implements Noteable {
@@ -2258,7 +2533,7 @@ type EpicIssue implements Noteable {
epicIssueId: ID!
"""
- Current health status. Available only when feature flag save_issuable_health_status is enabled.
+ Current health status. Available only when feature flag `save_issuable_health_status` is enabled.
"""
healthStatus: HealthStatus
@@ -2533,12 +2808,12 @@ input EpicSetSubscriptionInput {
clientMutationId: String
"""
- The group the epic to (un)subscribe is in
+ The group the epic to mutate belongs to
"""
groupPath: ID!
"""
- The iid of the epic to (un)subscribe
+ The iid of the epic to mutate
"""
iid: ID!
@@ -2716,6 +2991,46 @@ type Group {
avatarUrl: String
"""
+ A single board of the group
+ """
+ board(
+ """
+ Find a board by its ID
+ """
+ id: ID
+ ): Board
+
+ """
+ Boards of the group
+ """
+ boards(
+ """
+ Returns the elements in the list that come after the specified cursor.
+ """
+ after: String
+
+ """
+ Returns the elements in the list that come before the specified cursor.
+ """
+ before: String
+
+ """
+ Returns the first _n_ elements from the list.
+ """
+ first: Int
+
+ """
+ Find a board by its ID
+ """
+ id: ID
+
+ """
+ Returns the last _n_ elements from the list.
+ """
+ last: Int
+ ): BoardConnection
+
+ """
Description of the namespace
"""
description: String
@@ -2751,6 +3066,11 @@ type Group {
iid: ID
"""
+ Filter epics by iid for autocomplete
+ """
+ iidStartsWith: String
+
+ """
List of IIDs of epics, e.g., [1, 2]
"""
iids: [ID!]
@@ -2818,6 +3138,11 @@ type Group {
iid: ID
"""
+ Filter epics by iid for autocomplete
+ """
+ iidStartsWith: String
+
+ """
List of IIDs of epics, e.g., [1, 2]
"""
iids: [ID!]
@@ -3199,7 +3524,7 @@ type Issue implements Noteable {
epic: Epic
"""
- Current health status. Available only when feature flag save_issuable_health_status is enabled.
+ Current health status. Available only when feature flag `save_issuable_health_status` is enabled.
"""
healthStatus: HealthStatus
@@ -3648,6 +3973,11 @@ enum IssueState {
opened
}
+"""
+Represents untyped JSON
+"""
+scalar JSON
+
type Label {
"""
Background color of the label
@@ -4640,31 +4970,34 @@ enum MilestoneStateEnum {
}
"""
-The position the adjacent object should be moved.
+The position to which the adjacent object should be moved
"""
enum MoveType {
"""
- The adjacent object will be moved after the object that is being moved.
+ The adjacent object will be moved after the object that is being moved
"""
after
"""
- The adjacent object will be moved before the object that is being moved.
+ The adjacent object will be moved before the object that is being moved
"""
before
}
type Mutation {
addAwardEmoji(input: AddAwardEmojiInput!): AddAwardEmojiPayload
+ adminSidekiqQueuesDeleteJobs(input: AdminSidekiqQueuesDeleteJobsInput!): AdminSidekiqQueuesDeleteJobsPayload
createDiffNote(input: CreateDiffNoteInput!): CreateDiffNotePayload
createEpic(input: CreateEpicInput!): CreateEpicPayload
createImageDiffNote(input: CreateImageDiffNoteInput!): CreateImageDiffNotePayload
createNote(input: CreateNoteInput!): CreateNotePayload
+ createRequirement(input: CreateRequirementInput!): CreateRequirementPayload
createSnippet(input: CreateSnippetInput!): CreateSnippetPayload
designManagementDelete(input: DesignManagementDeleteInput!): DesignManagementDeletePayload
designManagementUpload(input: DesignManagementUploadInput!): DesignManagementUploadPayload
destroyNote(input: DestroyNoteInput!): DestroyNotePayload
destroySnippet(input: DestroySnippetInput!): DestroySnippetPayload
+ epicAddIssue(input: EpicAddIssueInput!): EpicAddIssuePayload
epicSetSubscription(input: EpicSetSubscriptionInput!): EpicSetSubscriptionPayload
epicTreeReorder(input: EpicTreeReorderInput!): EpicTreeReorderPayload
issueSetConfidential(input: IssueSetConfidentialInput!): IssueSetConfidentialPayload
@@ -5175,6 +5508,46 @@ type Project {
avatarUrl: String
"""
+ A single board of the project
+ """
+ board(
+ """
+ Find a board by its ID
+ """
+ id: ID
+ ): Board
+
+ """
+ Boards of the project
+ """
+ boards(
+ """
+ Returns the elements in the list that come after the specified cursor.
+ """
+ after: String
+
+ """
+ Returns the elements in the list that come before the specified cursor.
+ """
+ before: String
+
+ """
+ Returns the first _n_ elements from the list.
+ """
+ first: Int
+
+ """
+ Find a board by its ID
+ """
+ id: ID
+
+ """
+ Returns the last _n_ elements from the list.
+ """
+ last: Int
+ ): BoardConnection
+
+ """
Indicates if the project stores Docker container images in a container registry
"""
containerRegistryEnabled: Boolean
@@ -5269,6 +5642,16 @@ type Project {
"""
issue(
"""
+ ID of a user assigned to the issues, "none" and "any" values supported
+ """
+ assigneeId: String
+
+ """
+ Username of a user assigned to the issues
+ """
+ assigneeUsername: String
+
+ """
Issues closed after this date
"""
closedAfter: Time
@@ -5304,6 +5687,11 @@ type Project {
labelName: [String]
"""
+ Milestones applied to this issue
+ """
+ milestoneTitle: [String]
+
+ """
Search query for finding issues by title or description
"""
search: String
@@ -5339,6 +5727,16 @@ type Project {
after: String
"""
+ ID of a user assigned to the issues, "none" and "any" values supported
+ """
+ assigneeId: String
+
+ """
+ Username of a user assigned to the issues
+ """
+ assigneeUsername: String
+
+ """
Returns the elements in the list that come before the specified cursor.
"""
before: String
@@ -5389,6 +5787,11 @@ type Project {
last: Int
"""
+ Milestones applied to this issue
+ """
+ milestoneTitle: [String]
+
+ """
Search query for finding issues by title or description
"""
search: String
@@ -5598,6 +6001,76 @@ type Project {
requestAccessEnabled: Boolean
"""
+ Find a single requirement. Available only when feature flag `requirements_management` is enabled.
+ """
+ requirement(
+ """
+ IID of the requirement, e.g., "1"
+ """
+ iid: ID
+
+ """
+ List of IIDs of requirements, e.g., [1, 2]
+ """
+ iids: [ID!]
+
+ """
+ List requirements by sort order
+ """
+ sort: Sort
+
+ """
+ Filter requirements by state
+ """
+ state: RequirementState
+ ): Requirement
+
+ """
+ Find requirements. Available only when feature flag `requirements_management` is enabled.
+ """
+ requirements(
+ """
+ Returns the elements in the list that come after the specified cursor.
+ """
+ after: String
+
+ """
+ Returns the elements in the list that come before the specified cursor.
+ """
+ before: String
+
+ """
+ Returns the first _n_ elements from the list.
+ """
+ first: Int
+
+ """
+ IID of the requirement, e.g., "1"
+ """
+ iid: ID
+
+ """
+ List of IIDs of requirements, e.g., [1, 2]
+ """
+ iids: [ID!]
+
+ """
+ Returns the last _n_ elements from the list.
+ """
+ last: Int
+
+ """
+ List requirements by sort order
+ """
+ sort: Sort
+
+ """
+ Filter requirements by state
+ """
+ state: RequirementState
+ ): RequirementConnection
+
+ """
Detailed version of a Sentry error on the project
"""
sentryDetailedError(
@@ -5703,6 +6176,31 @@ type Project {
visibility: String
"""
+ Vulnerabilities reported on the project. Available only when feature flag `first_class_vulnerabilities` is enabled.
+ """
+ vulnerabilities(
+ """
+ Returns the elements in the list that come after the specified cursor.
+ """
+ after: String
+
+ """
+ Returns the elements in the list that come before the specified cursor.
+ """
+ before: String
+
+ """
+ Returns the first _n_ elements from the list.
+ """
+ first: Int
+
+ """
+ Returns the last _n_ elements from the list.
+ """
+ last: Int
+ ): VulnerabilityConnection
+
+ """
Web URL of the project
"""
webUrl: String
@@ -6186,6 +6684,129 @@ type Repository {
): Tree
}
+"""
+Represents a requirement.
+"""
+type Requirement {
+ """
+ Author of the requirement
+ """
+ author: User!
+
+ """
+ Timestamp of when the requirement was created
+ """
+ createdAt: Time!
+
+ """
+ ID of the requirement
+ """
+ id: ID!
+
+ """
+ Internal ID of the requirement
+ """
+ iid: ID!
+
+ """
+ Project to which the requirement belongs
+ """
+ project: Project!
+
+ """
+ State of the requirement
+ """
+ state: RequirementState!
+
+ """
+ Title of the requirement
+ """
+ title: String
+
+ """
+ Timestamp of when the requirement was last updated
+ """
+ updatedAt: Time!
+
+ """
+ Permissions for the current user on the resource
+ """
+ userPermissions: RequirementPermissions!
+}
+
+"""
+The connection type for Requirement.
+"""
+type RequirementConnection {
+ """
+ A list of edges.
+ """
+ edges: [RequirementEdge]
+
+ """
+ A list of nodes.
+ """
+ nodes: [Requirement]
+
+ """
+ Information to aid in pagination.
+ """
+ pageInfo: PageInfo!
+}
+
+"""
+An edge in a connection.
+"""
+type RequirementEdge {
+ """
+ A cursor for use in pagination.
+ """
+ cursor: String!
+
+ """
+ The item at the end of the edge.
+ """
+ node: Requirement
+}
+
+"""
+Check permissions for the current user on a requirement
+"""
+type RequirementPermissions {
+ """
+ Indicates the user can perform `admin_requirement` on this resource
+ """
+ adminRequirement: Boolean!
+
+ """
+ Indicates the user can perform `create_requirement` on this resource
+ """
+ createRequirement: Boolean!
+
+ """
+ Indicates the user can perform `destroy_requirement` on this resource
+ """
+ destroyRequirement: Boolean!
+
+ """
+ Indicates the user can perform `read_requirement` on this resource
+ """
+ readRequirement: Boolean!
+
+ """
+ Indicates the user can perform `update_requirement` on this resource
+ """
+ updateRequirement: Boolean!
+}
+
+"""
+State of a requirement
+"""
+enum RequirementState {
+ ARCHIVED
+ OPENED
+}
+
type RootStorageStatistics {
"""
The CI artifacts size in bytes
@@ -6947,6 +7568,31 @@ type SnippetPermissions {
updateSnippet: Boolean!
}
+"""
+Common sort values
+"""
+enum Sort {
+ """
+ Created at ascending order
+ """
+ created_asc
+
+ """
+ Created at descending order
+ """
+ created_desc
+
+ """
+ Updated at ascending order
+ """
+ updated_asc
+
+ """
+ Updated at descending order
+ """
+ updated_desc
+}
+
type Submodule implements Entry {
"""
Flat path of the entry
@@ -7627,14 +8273,9 @@ input UpdateEpicInput {
groupPath: ID!
"""
- The desired health status
- """
- healthStatus: HealthStatus
-
- """
The iid of the epic to mutate
"""
- iid: String!
+ iid: ID!
"""
The IDs of labels to be removed from the epic.
@@ -7737,6 +8378,21 @@ input UpdateIssueInput {
clientMutationId: String
"""
+ Indicates the issue is confidential
+ """
+ confidential: Boolean
+
+ """
+ Description of the issue
+ """
+ description: String
+
+ """
+ Due date of the issue
+ """
+ dueDate: Time
+
+ """
The desired health status
"""
healthStatus: HealthStatus
@@ -7750,6 +8406,11 @@ input UpdateIssueInput {
The project the issue to mutate is in
"""
projectPath: ID!
+
+ """
+ Title of the issue
+ """
+ title: String
}
"""
@@ -8048,4 +8709,117 @@ enum VisibilityScopesEnum {
internal
private
public
+}
+
+"""
+Represents a vulnerability.
+"""
+type Vulnerability {
+ """
+ Description of the vulnerability
+ """
+ description: String
+
+ """
+ GraphQL ID of the vulnerability
+ """
+ id: ID!
+
+ """
+ The JSON location metadata for the vulnerability. Its format depends on the
+ type of the security scan that found the vulnerability
+ """
+ location: JSON
+
+ """
+ Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST)
+ """
+ reportType: VulnerabilityReportType
+
+ """
+ Severity of the vulnerability (INFO, UNKNOWN, LOW, MEDIUM, HIGH, CRITICAL)
+ """
+ severity: VulnerabilitySeverity
+
+ """
+ State of the vulnerability (DETECTED, DISMISSED, RESOLVED, CONFIRMED)
+ """
+ state: VulnerabilityState
+
+ """
+ Title of the vulnerability
+ """
+ title: String
+
+ """
+ URL to the vulnerability's details page
+ """
+ vulnerabilityPath: String
+}
+
+"""
+The connection type for Vulnerability.
+"""
+type VulnerabilityConnection {
+ """
+ A list of edges.
+ """
+ edges: [VulnerabilityEdge]
+
+ """
+ A list of nodes.
+ """
+ nodes: [Vulnerability]
+
+ """
+ Information to aid in pagination.
+ """
+ pageInfo: PageInfo!
+}
+
+"""
+An edge in a connection.
+"""
+type VulnerabilityEdge {
+ """
+ A cursor for use in pagination.
+ """
+ cursor: String!
+
+ """
+ The item at the end of the edge.
+ """
+ node: Vulnerability
+}
+
+"""
+The type of the security scan that found the vulnerability.
+"""
+enum VulnerabilityReportType {
+ CONTAINER_SCANNING
+ DAST
+ DEPENDENCY_SCANNING
+ SAST
+}
+
+"""
+The severity of the vulnerability.
+"""
+enum VulnerabilitySeverity {
+ CRITICAL
+ HIGH
+ INFO
+ LOW
+ MEDIUM
+ UNKNOWN
+}
+
+"""
+The state of the vulnerability.
+"""
+enum VulnerabilityState {
+ CONFIRMED
+ DETECTED
+ DISMISSED
+ RESOLVED
} \ No newline at end of file
diff --git a/doc/api/graphql/reference/gitlab_schema.json b/doc/api/graphql/reference/gitlab_schema.json
index 615ae88077f..3d941d9cc69 100644
--- a/doc/api/graphql/reference/gitlab_schema.json
+++ b/doc/api/graphql/reference/gitlab_schema.json
@@ -10,70 +10,100 @@
"subscriptionType": null,
"types": [
{
- "kind": "SCALAR",
- "name": "Boolean",
- "description": "Represents `true` or `false` values.",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "SCALAR",
- "name": "String",
- "description": "Represents textual data as UTF-8 character sequences. This type is most often used by GraphQL to represent free-form human-readable text.",
+ "kind": "INPUT_OBJECT",
+ "name": "AddAwardEmojiInput",
+ "description": "Autogenerated input type of AddAwardEmoji",
"fields": null,
- "inputFields": null,
+ "inputFields": [
+ {
+ "name": "awardableId",
+ "description": "The global id of the awardable resource",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "name",
+ "description": "The emoji name",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "Query",
- "description": null,
+ "name": "AddAwardEmojiPayload",
+ "description": "Autogenerated return type of AddAwardEmoji",
"fields": [
{
- "name": "currentUser",
- "description": "Get information about current user",
+ "name": "awardEmoji",
+ "description": "The award emoji after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "User",
+ "name": "AwardEmoji",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "designManagement",
- "description": "Fields related to design management",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "DesignManagement",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "echo",
- "description": "Text to echo back",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
- {
- "name": "text",
- "description": "Text to echo back",
- "type": {
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
@@ -81,10 +111,79 @@
"name": "String",
"ofType": null
}
- },
- "defaultValue": null
+ }
}
- ],
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "AdminSidekiqQueuesDeleteJobsInput",
+ "description": "Autogenerated input type of AdminSidekiqQueuesDeleteJobs",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "user",
+ "description": "Delete jobs matching user in the context metadata",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "project",
+ "description": "Delete jobs matching project in the context metadata",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "rootNamespace",
+ "description": "Delete jobs matching root_namespace in the context metadata",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "subscriptionPlan",
+ "description": "Delete jobs matching subscription_plan in the context metadata",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "callerId",
+ "description": "Delete jobs matching caller_id in the context metadata",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "queueName",
+ "description": "The name of the queue to delete jobs from",
"type": {
"kind": "NON_NULL",
"name": null,
@@ -94,220 +193,77 @@
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "group",
- "description": "Find a group",
- "args": [
- {
- "name": "fullPath",
- "description": "The full path of the project, group or namespace, e.g., \"gitlab-org/gitlab-foss\"",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
- }
- ],
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"type": {
- "kind": "OBJECT",
- "name": "Group",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
- },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "AdminSidekiqQueuesDeleteJobsPayload",
+ "description": "Autogenerated return type of AdminSidekiqQueuesDeleteJobs",
+ "fields": [
{
- "name": "metadata",
- "description": "Metadata about GitLab",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Metadata",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "namespace",
- "description": "Find a namespace",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
- {
- "name": "fullPath",
- "description": "The full path of the project, group or namespace, e.g., \"gitlab-org/gitlab-foss\"",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "Namespace",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "project",
- "description": "Find a project",
- "args": [
- {
- "name": "fullPath",
- "description": "The full path of the project, group or namespace, e.g., \"gitlab-org/gitlab-foss\"",
- "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
}
- },
- "defaultValue": null
+ }
}
- ],
- "type": {
- "kind": "OBJECT",
- "name": "Project",
- "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "snippets",
- "description": "Find Snippets visible to the current user",
+ "name": "result",
+ "description": "Information about the status of the deletion request",
"args": [
- {
- "name": "ids",
- "description": "Array of global snippet ids, e.g., \"gid://gitlab/ProjectSnippet/1\"",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "visibility",
- "description": "The visibility of the snippet",
- "type": {
- "kind": "ENUM",
- "name": "VisibilityScopesEnum",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "authorId",
- "description": "The ID of an author",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "projectId",
- "description": "The ID of a project",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "type",
- "description": "The type of snippet",
- "type": {
- "kind": "ENUM",
- "name": "TypeEnum",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "explore",
- "description": "Explore personal snippets",
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
+
],
"type": {
"kind": "OBJECT",
- "name": "SnippetConnection",
+ "name": "DeleteJobsResponse",
"ofType": null
},
"isDeprecated": false,
@@ -323,183 +279,183 @@
},
{
"kind": "OBJECT",
- "name": "Project",
- "description": null,
+ "name": "AwardEmoji",
+ "description": "An emoji awarded by a user.",
"fields": [
{
- "name": "archived",
- "description": "Indicates the archived status of the project",
+ "name": "description",
+ "description": "The emoji description",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "autocloseReferencedIssues",
- "description": "Indicates if issues referenced by merge requests and commits within the default branch are closed automatically",
+ "name": "emoji",
+ "description": "The emoji as an icon",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "avatarUrl",
- "description": "URL to avatar image file of the project",
+ "name": "name",
+ "description": "The emoji name",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "containerRegistryEnabled",
- "description": "Indicates if the project stores Docker container images in a container registry",
+ "name": "unicode",
+ "description": "The emoji in unicode",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createdAt",
- "description": "Timestamp of the project creation",
+ "name": "unicodeVersion",
+ "description": "The unicode version for this emoji",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "description",
- "description": "Short description of the project",
+ "name": "user",
+ "description": "The user who awarded the emoji",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "User",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Blob",
+ "description": null,
+ "fields": [
{
- "name": "descriptionHtml",
- "description": "The GitLab Flavored Markdown rendering of `description`",
+ "name": "flatPath",
+ "description": "Flat path of the entry",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "environments",
- "description": "Environments of the project",
+ "name": "id",
+ "description": "ID of the entry",
"args": [
- {
- "name": "name",
- "description": "Name of the environment",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "search",
- "description": "Search query",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
}
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "lfsOid",
+ "description": "LFS ID of the blob",
+ "args": [
+
],
"type": {
- "kind": "OBJECT",
- "name": "EnvironmentConnection",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "forksCount",
- "description": "Number of times the project has been forked",
+ "name": "name",
+ "description": "Name of the entry",
"args": [
],
@@ -508,7 +464,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
}
},
@@ -516,8 +472,8 @@
"deprecationReason": null
},
{
- "name": "fullPath",
- "description": "Full path of the project",
+ "name": "path",
+ "description": "Path of the entry",
"args": [
],
@@ -526,7 +482,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
}
},
@@ -534,36 +490,44 @@
"deprecationReason": null
},
{
- "name": "grafanaIntegration",
- "description": "Grafana integration details for the project",
+ "name": "sha",
+ "description": "Last commit sha for the entry",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "GrafanaIntegration",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "group",
- "description": "Group of the project",
+ "name": "type",
+ "description": "Type of tree entry",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Group",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "ENUM",
+ "name": "EntryType",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "httpUrlToRepo",
- "description": "URL to connect to the project via HTTPS",
+ "name": "webUrl",
+ "description": "Web URL of the blob",
"args": [
],
@@ -574,19 +538,36 @@
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
{
- "name": "id",
- "description": "ID of the project",
+ "kind": "INTERFACE",
+ "name": "Entry",
+ "ofType": null
+ }
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "BlobConnection",
+ "description": "The connection type for Blob.",
+ "fields": [
+ {
+ "name": "edges",
+ "description": "A list of edges.",
"args": [
],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "BlobEdge",
"ofType": null
}
},
@@ -594,558 +575,257 @@
"deprecationReason": null
},
{
- "name": "importStatus",
- "description": "Status of project import background job of the project",
+ "name": "nodes",
+ "description": "A list of nodes.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Blob",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "issue",
- "description": "A single issue of the project",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
"args": [
- {
- "name": "iid",
- "description": "IID of the issue. For example, \"1\"",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "iids",
- "description": "List of IIDs of issues. For example, [1, 2]",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "state",
- "description": "Current state of this issue",
- "type": {
- "kind": "ENUM",
- "name": "IssuableState",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "labelName",
- "description": "Labels applied to this issue",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "createdBefore",
- "description": "Issues created before this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "createdAfter",
- "description": "Issues created after this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "updatedBefore",
- "description": "Issues updated before this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "updatedAfter",
- "description": "Issues updated after this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "closedBefore",
- "description": "Issues closed before this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "closedAfter",
- "description": "Issues closed after this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "search",
- "description": "Search query for finding issues by title or description",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "sort",
- "description": "Sort issues by this criteria",
- "type": {
- "kind": "ENUM",
- "name": "IssueSort",
- "ofType": null
- },
- "defaultValue": "created_desc"
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "Issue",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "BlobEdge",
+ "description": "An edge in a connection.",
+ "fields": [
{
- "name": "issues",
- "description": "Issues of the project",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
- {
- "name": "iid",
- "description": "IID of the issue. For example, \"1\"",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "iids",
- "description": "List of IIDs of issues. For example, [1, 2]",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "state",
- "description": "Current state of this issue",
- "type": {
- "kind": "ENUM",
- "name": "IssuableState",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "labelName",
- "description": "Labels applied to this issue",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "createdBefore",
- "description": "Issues created before this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "createdAfter",
- "description": "Issues created after this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "updatedBefore",
- "description": "Issues updated before this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "updatedAfter",
- "description": "Issues updated after this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "closedBefore",
- "description": "Issues closed before this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "closedAfter",
- "description": "Issues closed after this date",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "search",
- "description": "Search query for finding issues by title or description",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "sort",
- "description": "Sort issues by this criteria",
- "type": {
- "kind": "ENUM",
- "name": "IssueSort",
- "ofType": null
- },
- "defaultValue": "created_desc"
- },
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "IssueConnection",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "issuesEnabled",
- "description": "(deprecated) Does this project have issues enabled?. Use `issues_access_level` instead",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "Blob",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "BlobViewersType",
+ "description": "Types of blob viewers",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "rich",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "jobsEnabled",
- "description": "(deprecated) Enable jobs for this project. Use `builds_access_level` instead",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
+ "name": "simple",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "lastActivityAt",
- "description": "Timestamp of the project last activity",
+ "name": "auxiliary",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Board",
+ "description": "Represents a project or group board",
+ "fields": [
+ {
+ "name": "id",
+ "description": "ID (global ID) of the board",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "lfsEnabled",
- "description": "Indicates if the project has Large File Storage (LFS) enabled",
+ "name": "name",
+ "description": "Name of the board",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeRequest",
- "description": "A single merge request of the project",
+ "name": "weight",
+ "description": "Weight of the board",
"args": [
- {
- "name": "iid",
- "description": "The IID of the merge request, e.g., \"1\"",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "iids",
- "description": "The list of IIDs of issues, e.g., [1, 2]",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "MergeRequest",
+ "kind": "SCALAR",
+ "name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "BoardConnection",
+ "description": "The connection type for Board.",
+ "fields": [
{
- "name": "mergeRequests",
- "description": "Merge requests of the project",
+ "name": "edges",
+ "description": "A list of edges.",
"args": [
- {
- "name": "iid",
- "description": "The IID of the merge request, e.g., \"1\"",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "iids",
- "description": "The list of IIDs of issues, e.g., [1, 2]",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "MergeRequestConnection",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "BoardEdge",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeRequestsEnabled",
- "description": "(deprecated) Does this project have merge_requests enabled?. Use `merge_requests_access_level` instead",
+ "name": "nodes",
+ "description": "A list of nodes.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Board",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeRequestsFfOnlyEnabled",
- "description": "Indicates if no merge commits should be created and all merges should instead be fast-forwarded, which means that merging is only allowed if the branch could be fast-forwarded.",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "BoardEdge",
+ "description": "An edge in a connection.",
+ "fields": [
{
- "name": "name",
- "description": "Name of the project (without namespace)",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
@@ -1162,82 +842,115 @@
"deprecationReason": null
},
{
- "name": "nameWithNamespace",
- "description": "Full name of the project with its namespace",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Board",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "description": "Represents `true` or `false` values.",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Commit",
+ "description": null,
+ "fields": [
{
- "name": "namespace",
- "description": "Namespace of the project",
+ "name": "author",
+ "description": "Author of the commit",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Namespace",
+ "name": "User",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "onlyAllowMergeIfAllDiscussionsAreResolved",
- "description": "Indicates if merge requests of the project can only be merged when all the discussions are resolved",
+ "name": "authorGravatar",
+ "description": "Commit authors gravatar",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "onlyAllowMergeIfPipelineSucceeds",
- "description": "Indicates if merge requests of the project can only be merged with successful jobs",
+ "name": "authorName",
+ "description": "Commit authors name",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "openIssuesCount",
- "description": "Number of open issues for the project",
+ "name": "authoredDate",
+ "description": "Timestamp of when the commit was authored",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "path",
- "description": "Path of the project",
+ "name": "description",
+ "description": "Description of the commit message",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "id",
+ "description": "ID (global ID) of the commit",
"args": [
],
@@ -1246,7 +959,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "ID",
"ofType": null
}
},
@@ -1254,8 +967,65 @@
"deprecationReason": null
},
{
+ "name": "latestPipeline",
+ "description": "Latest pipeline of the commit",
+ "args": [
+ {
+ "name": "status",
+ "description": "Filter pipelines by their status",
+ "type": {
+ "kind": "ENUM",
+ "name": "PipelineStatusEnum",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "ref",
+ "description": "Filter pipelines by the ref they are run for",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "sha",
+ "description": "Filter pipelines by the sha of the commit they are run for",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Pipeline",
+ "ofType": null
+ },
+ "isDeprecated": true,
+ "deprecationReason": "Use pipelines"
+ },
+ {
+ "name": "message",
+ "description": "Raw commit message",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "pipelines",
- "description": "Build pipelines of the project",
+ "description": "Pipelines of the commit ordered latest first",
"args": [
{
"name": "status",
@@ -1337,302 +1107,346 @@
"deprecationReason": null
},
{
- "name": "printingMergeRequestLinkEnabled",
- "description": "Indicates if a link to create or view a merge request should display after a push to Git repositories of the project from the command line",
+ "name": "sha",
+ "description": "SHA1 ID of the commit",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "publicJobs",
- "description": "Indicates if there is public access to pipelines and job details of the project, including output logs and artifacts",
+ "name": "signatureHtml",
+ "description": "Rendered HTML of the commit signature",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "removeSourceBranchAfterMerge",
- "description": "Indicates if `Delete source branch` option should be enabled by default for all new merge requests of the project",
+ "name": "title",
+ "description": "Title of the commit message",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "repository",
- "description": "Git repository of the project",
+ "name": "webUrl",
+ "description": "Web URL of the commit",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Repository",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateDiffNoteInput",
+ "description": "Autogenerated input type of CreateDiffNote",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "noteableId",
+ "description": "The global id of the resource to add a note to",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "requestAccessEnabled",
- "description": "Indicates if users can request member access to the project",
+ "name": "body",
+ "description": "Content of the note",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "position",
+ "description": "The position of this note on a diff",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "DiffPositionInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "CreateDiffNotePayload",
+ "description": "Autogenerated return type of CreateDiffNote",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "sentryDetailedError",
- "description": "Detailed version of a Sentry error on the project",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
- {
- "name": "id",
- "description": "ID of the Sentry issue",
- "type": {
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
}
- },
- "defaultValue": null
+ }
}
- ],
- "type": {
- "kind": "OBJECT",
- "name": "SentryDetailedError",
- "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "sentryErrors",
- "description": "Paginated collection of Sentry errors on the project",
+ "name": "note",
+ "description": "The note after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "SentryErrorCollection",
+ "name": "Note",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateEpicInput",
+ "description": "Autogenerated input type of CreateEpic",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "groupPath",
+ "description": "The group the epic to mutate is in",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "serviceDeskAddress",
- "description": "E-mail address of the service desk.",
- "args": [
-
- ],
+ "name": "title",
+ "description": "The title of the epic",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "serviceDeskEnabled",
- "description": "Indicates if the project has service desk enabled.",
- "args": [
-
- ],
+ "name": "description",
+ "description": "The description of the epic",
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "sharedRunnersEnabled",
- "description": "Indicates if shared runners are enabled on the project",
- "args": [
-
- ],
+ "name": "startDateFixed",
+ "description": "The start date of the epic",
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "snippets",
- "description": "Snippets of the project",
- "args": [
- {
- "name": "ids",
- "description": "Array of global snippet ids, e.g., \"gid://gitlab/ProjectSnippet/1\"",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "visibility",
- "description": "The visibility of the snippet",
- "type": {
- "kind": "ENUM",
- "name": "VisibilityScopesEnum",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
+ "name": "dueDateFixed",
+ "description": "The end date of the epic",
"type": {
- "kind": "OBJECT",
- "name": "SnippetConnection",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "snippetsEnabled",
- "description": "(deprecated) Does this project have snippets enabled?. Use `snippets_access_level` instead",
- "args": [
-
- ],
+ "name": "startDateIsFixed",
+ "description": "Indicates start date should be sourced from start_date_fixed field not the issue milestones",
"type": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "sshUrlToRepo",
- "description": "URL to connect to the project via SSH",
- "args": [
-
- ],
+ "name": "dueDateIsFixed",
+ "description": "Indicates end date should be sourced from due_date_fixed field not the issue milestones",
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "starCount",
- "description": "Number of times the project has been starred",
- "args": [
-
- ],
+ "name": "addLabelIds",
+ "description": "The IDs of labels to be added to the epic.",
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "statistics",
- "description": "Statistics of the project",
- "args": [
-
- ],
+ "name": "removeLabelIds",
+ "description": "The IDs of labels to be removed from the epic.",
"type": {
- "kind": "OBJECT",
- "name": "ProjectStatistics",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "suggestionCommitMessage",
- "description": "The commit message used to apply merge request suggestions",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "CreateEpicPayload",
+ "description": "Autogenerated return type of CreateEpic",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
@@ -1645,22 +1459,22 @@
"deprecationReason": null
},
{
- "name": "tagList",
- "description": "List of project tags",
+ "name": "epic",
+ "description": "The created epic",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Epic",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "userPermissions",
- "description": "Permissions for the current user on the resource",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -1668,17 +1482,101 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "ProjectPermissions",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateImageDiffNoteInput",
+ "description": "Autogenerated input type of CreateImageDiffNote",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "noteableId",
+ "description": "The global id of the resource to add a note to",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "visibility",
- "description": "Visibility of the project",
+ "name": "body",
+ "description": "Content of the note",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "position",
+ "description": "The position of this note on a diff",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "DiffImagePositionInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "CreateImageDiffNotePayload",
+ "description": "Autogenerated return type of CreateImageDiffNote",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
@@ -1691,28 +1589,40 @@
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL of the project",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "wikiEnabled",
- "description": "(deprecated) Does this project have wiki enabled?. Use `wiki_access_level` instead",
+ "name": "note",
+ "description": "The note after mutation",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "Note",
"ofType": null
},
"isDeprecated": false,
@@ -1727,67 +1637,86 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "ProjectPermissions",
- "description": null,
- "fields": [
+ "kind": "INPUT_OBJECT",
+ "name": "CreateNoteInput",
+ "description": "Autogenerated input type of CreateNote",
+ "fields": null,
+ "inputFields": [
{
- "name": "adminOperations",
- "description": "Indicates the user can perform `admin_operations` on this resource",
- "args": [
-
- ],
+ "name": "noteableId",
+ "description": "The global id of the resource to add a note to",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "adminProject",
- "description": "Indicates the user can perform `admin_project` on this resource",
- "args": [
-
- ],
+ "name": "body",
+ "description": "Content of the note",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "adminRemoteMirror",
- "description": "Indicates the user can perform `admin_remote_mirror` on this resource",
+ "name": "discussionId",
+ "description": "The global id of the discussion this note is in reply to",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "CreateNotePayload",
+ "description": "Autogenerated return type of CreateNote",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "adminWiki",
- "description": "Indicates the user can perform `admin_wiki` on this resource",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -1795,71 +1724,115 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "archiveProject",
- "description": "Indicates the user can perform `archive_project` on this resource",
+ "name": "note",
+ "description": "The note after mutation",
"args": [
],
"type": {
+ "kind": "OBJECT",
+ "name": "Note",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateRequirementInput",
+ "description": "Autogenerated input type of CreateRequirement",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "title",
+ "description": "Title of the requirement",
+ "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "changeNamespace",
- "description": "Indicates the user can perform `change_namespace` on this resource",
- "args": [
-
- ],
+ "name": "projectPath",
+ "description": "The project full path the requirement is associated with",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "changeVisibilityLevel",
- "description": "Indicates the user can perform `change_visibility_level` on this resource",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "CreateRequirementPayload",
+ "description": "Autogenerated return type of CreateRequirement",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createDeployment",
- "description": "Indicates the user can perform `create_deployment` on this resource",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -1867,71 +1840,159 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createDesign",
- "description": "Indicates the user can perform `create_design` on this resource",
+ "name": "requirement",
+ "description": "The requirement after mutation",
"args": [
],
"type": {
+ "kind": "OBJECT",
+ "name": "Requirement",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateSnippetInput",
+ "description": "Autogenerated input type of CreateSnippet",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "title",
+ "description": "Title of the snippet",
+ "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "createIssue",
- "description": "Indicates the user can perform `create_issue` on this resource",
- "args": [
-
- ],
+ "name": "fileName",
+ "description": "File name of the snippet",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "content",
+ "description": "Content of the snippet",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "createLabel",
- "description": "Indicates the user can perform `create_label` on this resource",
- "args": [
-
- ],
+ "name": "description",
+ "description": "Description of the snippet",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "visibilityLevel",
+ "description": "The visibility level of the snippet",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "ENUM",
+ "name": "VisibilityLevelsEnum",
"ofType": null
}
},
+ "defaultValue": null
+ },
+ {
+ "name": "projectPath",
+ "description": "The project full path the snippet is associated with",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "CreateSnippetPayload",
+ "description": "Autogenerated return type of CreateSnippet",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createMergeRequestFrom",
- "description": "Indicates the user can perform `create_merge_request_from` on this resource",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -1939,89 +2000,107 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createMergeRequestIn",
- "description": "Indicates the user can perform `create_merge_request_in` on this resource",
+ "name": "snippet",
+ "description": "The snippet after mutation",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Snippet",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DeleteJobsResponse",
+ "description": "The response from the AdminSidekiqQueuesDeleteJobs mutation.",
+ "fields": [
{
- "name": "createPages",
- "description": "Indicates the user can perform `create_pages` on this resource",
+ "name": "completed",
+ "description": "Whether or not the entire queue was processed in time; if not, retrying the same request is safe",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createPipeline",
- "description": "Indicates the user can perform `create_pipeline` on this resource",
+ "name": "deletedJobs",
+ "description": "The number of matching jobs deleted",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createPipelineSchedule",
- "description": "Indicates the user can perform `create_pipeline_schedule` on this resource",
+ "name": "queueSize",
+ "description": "The queue size after processing",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Design",
+ "description": "A single design",
+ "fields": [
{
- "name": "createSnippet",
- "description": "Indicates the user can perform `create_snippet` on this resource",
+ "name": "diffRefs",
+ "description": "The diff refs for this design",
"args": [
],
@@ -2029,8 +2108,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "DiffRefs",
"ofType": null
}
},
@@ -2038,17 +2117,56 @@
"deprecationReason": null
},
{
- "name": "createWiki",
- "description": "Indicates the user can perform `create_wiki` on this resource",
+ "name": "discussions",
+ "description": "All discussions on this noteable",
"args": [
-
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "DiscussionConnection",
"ofType": null
}
},
@@ -2056,8 +2174,8 @@
"deprecationReason": null
},
{
- "name": "destroyDesign",
- "description": "Indicates the user can perform `destroy_design` on this resource",
+ "name": "event",
+ "description": "How this design was changed in the current version",
"args": [
],
@@ -2065,8 +2183,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "ENUM",
+ "name": "DesignVersionEvent",
"ofType": null
}
},
@@ -2074,8 +2192,8 @@
"deprecationReason": null
},
{
- "name": "destroyPages",
- "description": "Indicates the user can perform `destroy_pages` on this resource",
+ "name": "filename",
+ "description": "The filename of the design",
"args": [
],
@@ -2084,7 +2202,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
@@ -2092,8 +2210,8 @@
"deprecationReason": null
},
{
- "name": "destroyWiki",
- "description": "Indicates the user can perform `destroy_wiki` on this resource",
+ "name": "fullPath",
+ "description": "The full path to the design file",
"args": [
],
@@ -2102,7 +2220,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
@@ -2110,8 +2228,8 @@
"deprecationReason": null
},
{
- "name": "downloadCode",
- "description": "Indicates the user can perform `download_code` on this resource",
+ "name": "id",
+ "description": "The ID of this design",
"args": [
],
@@ -2120,7 +2238,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
}
},
@@ -2128,8 +2246,8 @@
"deprecationReason": null
},
{
- "name": "downloadWikiCode",
- "description": "Indicates the user can perform `download_wiki_code` on this resource",
+ "name": "image",
+ "description": "The URL of the image",
"args": [
],
@@ -2138,7 +2256,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
@@ -2146,8 +2264,8 @@
"deprecationReason": null
},
{
- "name": "forkProject",
- "description": "Indicates the user can perform `fork_project` on this resource",
+ "name": "issue",
+ "description": "The issue the design belongs to",
"args": [
],
@@ -2155,8 +2273,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "Issue",
"ofType": null
}
},
@@ -2164,17 +2282,56 @@
"deprecationReason": null
},
{
- "name": "pushCode",
- "description": "Indicates the user can perform `push_code` on this resource",
+ "name": "notes",
+ "description": "All notes on this noteable",
"args": [
-
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "NoteConnection",
"ofType": null
}
},
@@ -2182,8 +2339,8 @@
"deprecationReason": null
},
{
- "name": "pushToDeleteProtectedBranch",
- "description": "Indicates the user can perform `push_to_delete_protected_branch` on this resource",
+ "name": "notesCount",
+ "description": "The total count of user-created notes for this design",
"args": [
],
@@ -2192,7 +2349,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Int",
"ofType": null
}
},
@@ -2200,8 +2357,8 @@
"deprecationReason": null
},
{
- "name": "readCommitStatus",
- "description": "Indicates the user can perform `read_commit_status` on this resource",
+ "name": "project",
+ "description": "The project the design belongs to",
"args": [
],
@@ -2209,8 +2366,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "Project",
"ofType": null
}
},
@@ -2218,26 +2375,107 @@
"deprecationReason": null
},
{
- "name": "readCycleAnalytics",
- "description": "Indicates the user can perform `read_cycle_analytics` on this resource",
+ "name": "versions",
+ "description": "All versions related to this design ordered newest first",
"args": [
-
+ {
+ "name": "earlierOrEqualToSha",
+ "description": "The SHA256 of the most recent acceptable version",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "earlierOrEqualToId",
+ "description": "The Global ID of the most recent acceptable version",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "DesignVersionConnection",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+ {
+ "kind": "INTERFACE",
+ "name": "Noteable",
+ "ofType": null
},
{
- "name": "readDesign",
- "description": "Indicates the user can perform `read_design` on this resource",
+ "kind": "INTERFACE",
+ "name": "DesignFields",
+ "ofType": null
+ }
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DesignAtVersion",
+ "description": "A design pinned to a specific version. The image field reflects the design as of the associated version.",
+ "fields": [
+ {
+ "name": "design",
+ "description": "The underlying design.",
"args": [
],
@@ -2245,8 +2483,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "Design",
"ofType": null
}
},
@@ -2254,8 +2492,8 @@
"deprecationReason": null
},
{
- "name": "readMergeRequest",
- "description": "Indicates the user can perform `read_merge_request` on this resource",
+ "name": "diffRefs",
+ "description": "The diff refs for this design",
"args": [
],
@@ -2263,8 +2501,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "DiffRefs",
"ofType": null
}
},
@@ -2272,8 +2510,8 @@
"deprecationReason": null
},
{
- "name": "readPagesContent",
- "description": "Indicates the user can perform `read_pages_content` on this resource",
+ "name": "event",
+ "description": "How this design was changed in the current version",
"args": [
],
@@ -2281,8 +2519,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "ENUM",
+ "name": "DesignVersionEvent",
"ofType": null
}
},
@@ -2290,8 +2528,8 @@
"deprecationReason": null
},
{
- "name": "readProject",
- "description": "Indicates the user can perform `read_project` on this resource",
+ "name": "filename",
+ "description": "The filename of the design",
"args": [
],
@@ -2300,7 +2538,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
@@ -2308,8 +2546,8 @@
"deprecationReason": null
},
{
- "name": "readProjectMember",
- "description": "Indicates the user can perform `read_project_member` on this resource",
+ "name": "fullPath",
+ "description": "The full path to the design file",
"args": [
],
@@ -2318,7 +2556,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
@@ -2326,8 +2564,8 @@
"deprecationReason": null
},
{
- "name": "readWiki",
- "description": "Indicates the user can perform `read_wiki` on this resource",
+ "name": "id",
+ "description": "The ID of this design",
"args": [
],
@@ -2336,7 +2574,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
}
},
@@ -2344,8 +2582,8 @@
"deprecationReason": null
},
{
- "name": "removeForkProject",
- "description": "Indicates the user can perform `remove_fork_project` on this resource",
+ "name": "image",
+ "description": "The URL of the image",
"args": [
],
@@ -2354,7 +2592,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
@@ -2362,8 +2600,8 @@
"deprecationReason": null
},
{
- "name": "removePages",
- "description": "Indicates the user can perform `remove_pages` on this resource",
+ "name": "issue",
+ "description": "The issue the design belongs to",
"args": [
],
@@ -2371,8 +2609,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "Issue",
"ofType": null
}
},
@@ -2380,8 +2618,8 @@
"deprecationReason": null
},
{
- "name": "removeProject",
- "description": "Indicates the user can perform `remove_project` on this resource",
+ "name": "notesCount",
+ "description": "The total count of user-created notes for this design",
"args": [
],
@@ -2390,7 +2628,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Int",
"ofType": null
}
},
@@ -2398,8 +2636,8 @@
"deprecationReason": null
},
{
- "name": "renameProject",
- "description": "Indicates the user can perform `rename_project` on this resource",
+ "name": "project",
+ "description": "The project the design belongs to",
"args": [
],
@@ -2407,8 +2645,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "Project",
"ofType": null
}
},
@@ -2416,8 +2654,8 @@
"deprecationReason": null
},
{
- "name": "requestAccess",
- "description": "Indicates the user can perform `request_access` on this resource",
+ "name": "version",
+ "description": "The version this design-at-versions is pinned to",
"args": [
],
@@ -2425,26 +2663,43 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "DesignVersion",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
{
- "name": "updatePages",
- "description": "Indicates the user can perform `update_pages` on this resource",
+ "kind": "INTERFACE",
+ "name": "DesignFields",
+ "ofType": null
+ }
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DesignAtVersionConnection",
+ "description": "The connection type for DesignAtVersion.",
+ "fields": [
+ {
+ "name": "edges",
+ "description": "A list of edges.",
"args": [
],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "DesignAtVersionEdge",
"ofType": null
}
},
@@ -2452,17 +2707,17 @@
"deprecationReason": null
},
{
- "name": "updateWiki",
- "description": "Indicates the user can perform `update_wiki` on this resource",
+ "name": "nodes",
+ "description": "A list of nodes.",
"args": [
],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "DesignAtVersion",
"ofType": null
}
},
@@ -2470,8 +2725,8 @@
"deprecationReason": null
},
{
- "name": "uploadFile",
- "description": "Indicates the user can perform `upload_file` on this resource",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
"args": [
],
@@ -2479,8 +2734,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "PageInfo",
"ofType": null
}
},
@@ -2496,116 +2751,212 @@
"possibleTypes": null
},
{
- "kind": "SCALAR",
- "name": "ID",
- "description": "Represents a unique identifier that is Base64 obfuscated. It is often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `\"VXNlci0xMA==\"`) or integer (such as `4`) input value will be accepted as an ID.",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "SCALAR",
- "name": "Int",
- "description": "Represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "SCALAR",
- "name": "Time",
- "description": "Time represented in ISO 8601",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
"kind": "OBJECT",
- "name": "Namespace",
- "description": null,
+ "name": "DesignAtVersionEdge",
+ "description": "An edge in a connection.",
"fields": [
{
- "name": "description",
- "description": "Description of the namespace",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "descriptionHtml",
- "description": "The GitLab Flavored Markdown rendering of `description`",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "DesignAtVersion",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DesignCollection",
+ "description": "A collection of designs.",
+ "fields": [
{
- "name": "fullName",
- "description": "Full name of the namespace",
+ "name": "design",
+ "description": "Find a specific design",
"args": [
-
+ {
+ "name": "id",
+ "description": "Find a design by its ID",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "filename",
+ "description": "Find a design by its filename",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Design",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "fullPath",
- "description": "Full path of the namespace",
+ "name": "designAtVersion",
+ "description": "Find a design as of a version",
"args": [
-
+ {
+ "name": "id",
+ "description": "The Global ID of the design at this version",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "DesignAtVersion",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "id",
- "description": "ID of the namespace",
+ "name": "designs",
+ "description": "All designs for the design collection",
"args": [
-
+ {
+ "name": "ids",
+ "description": "Filters designs by their ID",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "filenames",
+ "description": "Filters designs by their filename",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "atVersion",
+ "description": "Filters designs to only those that existed at the version. If argument is omitted or nil then all designs will reflect the latest version",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "DesignConnection",
"ofType": null
}
},
@@ -2613,22 +2964,26 @@
"deprecationReason": null
},
{
- "name": "lfsEnabled",
- "description": "Indicates if Large File Storage (LFS) is enabled for namespace",
+ "name": "issue",
+ "description": "Issue associated with the design collection",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Issue",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "name",
- "description": "Name of the namespace",
+ "name": "project",
+ "description": "Project associated with the design collection",
"args": [
],
@@ -2636,8 +2991,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Project",
"ofType": null
}
},
@@ -2645,36 +3000,61 @@
"deprecationReason": null
},
{
- "name": "path",
- "description": "Path of the namespace",
+ "name": "version",
+ "description": "A specific version",
"args": [
-
+ {
+ "name": "sha",
+ "description": "The SHA256 of a specific version",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "id",
+ "description": "The Global ID of the version",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "DesignVersion",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "projects",
- "description": "Projects within this namespace",
+ "name": "versions",
+ "description": "All versions related to all designs, ordered newest first",
"args": [
{
- "name": "includeSubgroups",
- "description": "Include also subgroup projects",
+ "name": "earlierOrEqualToSha",
+ "description": "The SHA256 of the most recent acceptable version",
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
- "defaultValue": "false"
+ "defaultValue": null
+ },
+ {
+ "name": "earlierOrEqualToId",
+ "description": "The Global ID of the most recent acceptable version",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
},
{
"name": "after",
@@ -2722,7 +3102,38 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "ProjectConnection",
+ "name": "DesignVersionConnection",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DesignConnection",
+ "description": "The connection type for Design.",
+ "fields": [
+ {
+ "name": "edges",
+ "description": "A list of edges.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "DesignEdge",
"ofType": null
}
},
@@ -2730,42 +3141,81 @@
"deprecationReason": null
},
{
- "name": "requestAccessEnabled",
- "description": "Indicates if users can request access to namespace",
+ "name": "nodes",
+ "description": "A list of nodes.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Design",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "rootStorageStatistics",
- "description": "Aggregated storage statistics of the namespace. Only available for root namespaces",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "RootStorageStatistics",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DesignEdge",
+ "description": "An edge in a connection.",
+ "fields": [
+ {
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "visibility",
- "description": "Visibility of the namespace",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Design",
"ofType": null
},
"isDeprecated": false,
@@ -2780,13 +3230,13 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "RootStorageStatistics",
+ "kind": "INTERFACE",
+ "name": "DesignFields",
"description": null,
"fields": [
{
- "name": "buildArtifactsSize",
- "description": "The CI artifacts size in bytes",
+ "name": "diffRefs",
+ "description": "The diff refs for this design",
"args": [
],
@@ -2794,8 +3244,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Int",
+ "kind": "OBJECT",
+ "name": "DiffRefs",
"ofType": null
}
},
@@ -2803,8 +3253,8 @@
"deprecationReason": null
},
{
- "name": "lfsObjectsSize",
- "description": "The LFS objects size in bytes",
+ "name": "event",
+ "description": "How this design was changed in the current version",
"args": [
],
@@ -2812,8 +3262,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Int",
+ "kind": "ENUM",
+ "name": "DesignVersionEvent",
"ofType": null
}
},
@@ -2821,8 +3271,8 @@
"deprecationReason": null
},
{
- "name": "packagesSize",
- "description": "The packages size in bytes",
+ "name": "filename",
+ "description": "The filename of the design",
"args": [
],
@@ -2831,7 +3281,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
}
},
@@ -2839,8 +3289,8 @@
"deprecationReason": null
},
{
- "name": "repositorySize",
- "description": "The Git repository size in bytes",
+ "name": "fullPath",
+ "description": "The full path to the design file",
"args": [
],
@@ -2849,7 +3299,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
}
},
@@ -2857,8 +3307,8 @@
"deprecationReason": null
},
{
- "name": "storageSize",
- "description": "The total storage in bytes",
+ "name": "id",
+ "description": "The ID of this design",
"args": [
],
@@ -2867,7 +3317,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "ID",
"ofType": null
}
},
@@ -2875,8 +3325,8 @@
"deprecationReason": null
},
{
- "name": "wikiSize",
- "description": "The wiki size in bytes",
+ "name": "image",
+ "description": "The URL of the image",
"args": [
],
@@ -2885,38 +3335,25 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "ProjectConnection",
- "description": "The connection type for Project.",
- "fields": [
+ },
{
- "name": "edges",
- "description": "A list of edges.",
+ "name": "issue",
+ "description": "The issue the design belongs to",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "ProjectEdge",
+ "name": "Issue",
"ofType": null
}
},
@@ -2924,17 +3361,17 @@
"deprecationReason": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
+ "name": "notesCount",
+ "description": "The total count of user-created notes for this design",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Project",
+ "kind": "SCALAR",
+ "name": "Int",
"ofType": null
}
},
@@ -2942,8 +3379,8 @@
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "project",
+ "description": "The project the design belongs to",
"args": [
],
@@ -2952,7 +3389,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "PageInfo",
+ "name": "Project",
"ofType": null
}
},
@@ -2961,97 +3398,181 @@
}
],
"inputFields": null,
- "interfaces": [
-
- ],
+ "interfaces": null,
"enumValues": null,
- "possibleTypes": null
+ "possibleTypes": [
+ {
+ "kind": "OBJECT",
+ "name": "Design",
+ "ofType": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DesignAtVersion",
+ "ofType": null
+ }
+ ]
},
{
"kind": "OBJECT",
- "name": "PageInfo",
- "description": "Information about pagination in a connection.",
+ "name": "DesignManagement",
+ "description": null,
"fields": [
{
- "name": "endCursor",
- "description": "When paginating forwards, the cursor to continue.",
+ "name": "designAtVersion",
+ "description": "Find a design as of a version",
"args": [
-
+ {
+ "name": "id",
+ "description": "The Global ID of the design at this version",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "DesignAtVersion",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "hasNextPage",
- "description": "When paginating forwards, are there more items?",
+ "name": "version",
+ "description": "Find a version",
"args": [
-
+ {
+ "name": "id",
+ "description": "The Global ID of the version",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
+ "kind": "OBJECT",
+ "name": "DesignVersion",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "DesignManagementDeleteInput",
+ "description": "Autogenerated input type of DesignManagementDelete",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "projectPath",
+ "description": "The project where the issue is to upload designs for",
+ "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "hasPreviousPage",
- "description": "When paginating backwards, are there more items?",
- "args": [
-
- ],
+ "name": "iid",
+ "description": "The iid of the issue to modify designs for",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "startCursor",
- "description": "When paginating backwards, the cursor to continue.",
- "args": [
-
- ],
+ "name": "filenames",
+ "description": "The filenames of the designs to delete",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
}
],
- "inputFields": null,
- "interfaces": [
-
- ],
+ "interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "ProjectEdge",
- "description": "An edge in a connection.",
+ "name": "DesignManagementDeletePayload",
+ "description": "Autogenerated return type of DesignManagementDelete",
"fields": [
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -3059,23 +3580,31 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
+ "name": "version",
+ "description": "The new version in which the designs are deleted",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Project",
+ "name": "DesignVersion",
"ofType": null
},
"isDeprecated": false,
@@ -3090,27 +3619,84 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "Group",
- "description": null,
- "fields": [
+ "kind": "INPUT_OBJECT",
+ "name": "DesignManagementUploadInput",
+ "description": "Autogenerated input type of DesignManagementUpload",
+ "fields": null,
+ "inputFields": [
{
- "name": "autoDevopsEnabled",
- "description": "Indicates whether Auto DevOps is enabled for all projects within this group",
- "args": [
-
- ],
+ "name": "projectPath",
+ "description": "The project where the issue is to upload designs for",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iid",
+ "description": "The iid of the issue to modify designs for",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "files",
+ "description": "The files to upload",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Upload",
+ "ofType": null
+ }
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
- },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DesignManagementUploadPayload",
+ "description": "Autogenerated return type of DesignManagementUpload",
+ "fields": [
{
- "name": "avatarUrl",
- "description": "Avatar URL of the group",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
@@ -3123,112 +3709,160 @@
"deprecationReason": null
},
{
- "name": "description",
- "description": "Description of the namespace",
+ "name": "designs",
+ "description": "The designs that were uploaded by the mutation",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Design",
+ "ofType": null
+ }
+ }
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "descriptionHtml",
- "description": "The GitLab Flavored Markdown rendering of `description`",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "emailsDisabled",
- "description": "Indicates if a group has email notifications disabled",
+ "name": "skippedDesigns",
+ "description": "Any designs that were skipped from the upload due to there being no change to their content since their last version",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Design",
+ "ofType": null
+ }
+ }
+ }
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DesignVersion",
+ "description": "A specific version in which designs were added, modified or deleted",
+ "fields": [
{
- "name": "epic",
- "description": "Find a single epic",
+ "name": "designAtVersion",
+ "description": "A particular design as of this version, provided it is visible at this version",
"args": [
{
- "name": "startDate",
- "description": "List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)",
+ "name": "id",
+ "description": "The ID of the DesignAtVersion",
"type": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "ID",
"ofType": null
},
"defaultValue": null
},
{
- "name": "endDate",
- "description": "List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)",
+ "name": "designId",
+ "description": "The ID of a specific design",
"type": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "ID",
"ofType": null
},
"defaultValue": null
},
{
- "name": "iid",
- "description": "IID of the epic, e.g., \"1\"",
+ "name": "filename",
+ "description": "The filename of a specific design",
"type": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
},
"defaultValue": null
- },
- {
- "name": "iids",
- "description": "List of IIDs of epics, e.g., [1, 2]",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
+ }
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "DesignAtVersion",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "designs",
+ "description": "All designs that were changed in the version",
+ "args": [
{
- "name": "state",
- "description": "Filter epics by state",
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
"type": {
- "kind": "ENUM",
- "name": "EpicState",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"defaultValue": null
},
{
- "name": "search",
- "description": "Filter epics by title and description",
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
@@ -3237,89 +3871,45 @@
"defaultValue": null
},
{
- "name": "sort",
- "description": "List epics by sort order",
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
"type": {
- "kind": "ENUM",
- "name": "EpicSort",
+ "kind": "SCALAR",
+ "name": "Int",
"ofType": null
},
"defaultValue": null
},
{
- "name": "authorUsername",
- "description": "Filter epics by author",
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Int",
"ofType": null
},
"defaultValue": null
- },
- {
- "name": "labelName",
- "description": "Filter epics by labels",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- },
- "defaultValue": null
}
],
"type": {
- "kind": "OBJECT",
- "name": "Epic",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "DesignConnection",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "epics",
- "description": "Find epics",
+ "name": "designsAtVersion",
+ "description": "All designs that are visible at this version, as of this version",
"args": [
{
- "name": "startDate",
- "description": "List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "endDate",
- "description": "List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "iid",
- "description": "IID of the epic, e.g., \"1\"",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "iids",
- "description": "List of IIDs of epics, e.g., [1, 2]",
+ "name": "ids",
+ "description": "Filters designs by their ID",
"type": {
"kind": "LIST",
"name": null,
@@ -3336,48 +3926,8 @@
"defaultValue": null
},
{
- "name": "state",
- "description": "Filter epics by state",
- "type": {
- "kind": "ENUM",
- "name": "EpicState",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "search",
- "description": "Filter epics by title and description",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "sort",
- "description": "List epics by sort order",
- "type": {
- "kind": "ENUM",
- "name": "EpicSort",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "authorUsername",
- "description": "Filter epics by author",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "labelName",
- "description": "Filter epics by labels",
+ "name": "filenames",
+ "description": "Filters designs by their filename",
"type": {
"kind": "LIST",
"name": null,
@@ -3435,30 +3985,38 @@
}
],
"type": {
- "kind": "OBJECT",
- "name": "EpicConnection",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "DesignAtVersionConnection",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "epicsEnabled",
- "description": "Indicates if Epics are enabled for namespace",
+ "name": "id",
+ "description": "ID of the design version",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "fullName",
- "description": "Full name of the namespace",
+ "name": "sha",
+ "description": "SHA of the design version",
"args": [
],
@@ -3467,7 +4025,38 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DesignVersionConnection",
+ "description": "The connection type for DesignVersion.",
+ "fields": [
+ {
+ "name": "edges",
+ "description": "A list of edges.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "DesignVersionEdge",
"ofType": null
}
},
@@ -3475,8 +4064,57 @@
"deprecationReason": null
},
{
- "name": "fullPath",
- "description": "Full path of the namespace",
+ "name": "nodes",
+ "description": "A list of nodes.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "DesignVersion",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DesignVersionEdge",
+ "description": "An edge in a connection.",
+ "fields": [
+ {
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
@@ -3485,7 +4123,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
}
},
@@ -3493,25 +4131,71 @@
"deprecationReason": null
},
{
- "name": "groupTimelogsEnabled",
- "description": "Indicates if Group timelogs are enabled for namespace",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "DesignVersion",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "DesignVersionEvent",
+ "description": "Mutation event of a design within a version",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "NONE",
+ "description": "No change",
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
+ "name": "CREATION",
+ "description": "A creation event",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "MODIFICATION",
+ "description": "A modification event",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "DELETION",
+ "description": "A deletion event",
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "DestroyNoteInput",
+ "description": "Autogenerated input type of DestroyNote",
+ "fields": null,
+ "inputFields": [
+ {
"name": "id",
- "description": "ID of the namespace",
- "args": [
-
- ],
+ "description": "The global id of the note to destroy",
"type": {
"kind": "NON_NULL",
"name": null,
@@ -3521,123 +4205,218 @@
"ofType": null
}
},
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DestroyNotePayload",
+ "description": "Autogenerated return type of DestroyNote",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ }
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "lfsEnabled",
- "description": "Indicates if Large File Storage (LFS) is enabled for namespace",
+ "name": "note",
+ "description": "The note after mutation",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "Note",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "DestroySnippetInput",
+ "description": "Autogenerated input type of DestroySnippet",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "id",
+ "description": "The global id of the snippet to destroy",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "mentionsDisabled",
- "description": "Indicates if a group is disabled from getting mentioned",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DestroySnippetPayload",
+ "description": "Autogenerated return type of DestroySnippet",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "milestones",
- "description": "Find milestones",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
- {
- "name": "startDate",
- "description": "List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "endDate",
- "description": "List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)",
- "type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "state",
- "description": "Filter milestones by state",
- "type": {
- "kind": "ENUM",
- "name": "MilestoneStateEnum",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "snippet",
+ "description": "The snippet after mutation",
+ "args": [
+
],
"type": {
"kind": "OBJECT",
- "name": "MilestoneConnection",
+ "name": "Snippet",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DetailedStatus",
+ "description": null,
+ "fields": [
+ {
+ "name": "detailsPath",
+ "description": "Path of the details for the pipeline status",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "name",
- "description": "Name of the namespace",
+ "name": "favicon",
+ "description": "Favicon of the pipeline status",
"args": [
],
@@ -3654,22 +4433,44 @@
"deprecationReason": null
},
{
- "name": "parent",
- "description": "Parent group",
+ "name": "group",
+ "description": "Group of the pipeline status",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Group",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "path",
- "description": "Path of the namespace",
+ "name": "hasDetails",
+ "description": "Indicates if the pipeline status has further details",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "icon",
+ "description": "Icon of the pipeline status",
"args": [
],
@@ -3686,80 +4487,252 @@
"deprecationReason": null
},
{
- "name": "projectCreationLevel",
- "description": "The permission level required to create projects in the group",
+ "name": "label",
+ "description": "Label of the pipeline status",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "text",
+ "description": "Text of the pipeline status",
"args": [
],
"type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "tooltip",
+ "description": "Tooltip associated with the pipeline status",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "DiffImagePositionInput",
+ "description": null,
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "headSha",
+ "description": "SHA of the HEAD at the time the comment was made",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "baseSha",
+ "description": "Merge base of the branch the comment was made on",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "startSha",
+ "description": "SHA of the branch being compared against",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "paths",
+ "description": "The paths of the file that was changed. Both of the properties of this input are optional, but at least one of them is required",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "DiffPathsInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "x",
+ "description": "X position of the note",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "y",
+ "description": "Y position of the note",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "width",
+ "description": "Total width of the image",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "height",
+ "description": "Total height of the image",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "DiffPathsInput",
+ "description": null,
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "oldPath",
+ "description": "The path of the file on the start sha",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "newPath",
+ "description": "The path of the file on the head sha",
+ "type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DiffPosition",
+ "description": null,
+ "fields": [
+ {
+ "name": "diffRefs",
+ "description": "Information about the branch, HEAD, and base at the time of commenting",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "DiffRefs",
+ "ofType": null
+ }
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "projects",
- "description": "Projects within this namespace",
+ "name": "filePath",
+ "description": "Path of the file that was changed",
"args": [
- {
- "name": "includeSubgroups",
- "description": "Include also subgroup projects",
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "defaultValue": "false"
- },
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
+
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "ProjectConnection",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
@@ -3767,64 +4740,64 @@
"deprecationReason": null
},
{
- "name": "requestAccessEnabled",
- "description": "Indicates if users can request access to namespace",
+ "name": "height",
+ "description": "Total height of the image",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "requireTwoFactorAuthentication",
- "description": "Indicates if all users in this group are required to set up two-factor authentication",
+ "name": "newLine",
+ "description": "Line on HEAD SHA that was changed",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "rootStorageStatistics",
- "description": "Aggregated storage statistics of the namespace. Only available for root namespaces",
+ "name": "newPath",
+ "description": "Path of the file on the HEAD SHA",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "RootStorageStatistics",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "shareWithGroupLock",
- "description": "Indicates if sharing a project with another group within this group is prevented",
+ "name": "oldLine",
+ "description": "Line on start SHA that was changed",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "subgroupCreationLevel",
- "description": "The permission level required to create subgroups within the group",
+ "name": "oldPath",
+ "description": "Path of the file on the start SHA",
"args": [
],
@@ -3837,84 +4810,17 @@
"deprecationReason": null
},
{
- "name": "timelogs",
- "description": "Time logged in issues by group members",
+ "name": "positionType",
+ "description": "Type of file the position refers to",
"args": [
- {
- "name": "startDate",
- "description": "List time logs within a time range where the logged date is after start_date parameter.",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "endDate",
- "description": "List time logs within a time range where the logged date is before end_date parameter.",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
+
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "TimelogConnection",
+ "kind": "ENUM",
+ "name": "DiffPositionType",
"ofType": null
}
},
@@ -3922,8 +4828,8 @@
"deprecationReason": null
},
{
- "name": "twoFactorGracePeriod",
- "description": "Time before two-factor authentication is enforced",
+ "name": "width",
+ "description": "Total width of the image",
"args": [
],
@@ -3936,26 +4842,159 @@
"deprecationReason": null
},
{
- "name": "userPermissions",
- "description": "Permissions for the current user on the resource",
+ "name": "x",
+ "description": "X position of the note",
"args": [
],
"type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "y",
+ "description": "Y position of the note",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "DiffPositionInput",
+ "description": null,
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "headSha",
+ "description": "SHA of the HEAD at the time the comment was made",
+ "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "GroupPermissions",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
+ "defaultValue": null
+ },
+ {
+ "name": "baseSha",
+ "description": "Merge base of the branch the comment was made on",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "startSha",
+ "description": "SHA of the branch being compared against",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "paths",
+ "description": "The paths of the file that was changed. Both of the properties of this input are optional, but at least one of them is required",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "DiffPathsInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "oldLine",
+ "description": "Line on start SHA that was changed",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "newLine",
+ "description": "Line on HEAD SHA that was changed",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "DiffPositionType",
+ "description": "Type of file the position refers to",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "text",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "visibility",
- "description": "Visibility of the namespace",
+ "name": "image",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DiffRefs",
+ "description": null,
+ "fields": [
+ {
+ "name": "baseSha",
+ "description": "Merge base of the branch the comment was made on",
"args": [
],
@@ -3968,8 +5007,26 @@
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL of the group",
+ "name": "headSha",
+ "description": "SHA of the HEAD at the time the comment was made",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "startSha",
+ "description": "SHA of the branch being compared against",
"args": [
],
@@ -3995,12 +5052,12 @@
},
{
"kind": "OBJECT",
- "name": "GroupPermissions",
+ "name": "Discussion",
"description": null,
"fields": [
{
- "name": "readGroup",
- "description": "Indicates the user can perform `read_group` on this resource",
+ "name": "createdAt",
+ "description": "Timestamp of the discussion's creation",
"args": [
],
@@ -4009,7 +5066,100 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Time",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "id",
+ "description": "ID of this discussion",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "notes",
+ "description": "All notes in the discussion",
+ "args": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "NoteConnection",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "replyId",
+ "description": "ID used to reply to this discussion",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
"ofType": null
}
},
@@ -4026,8 +5176,8 @@
},
{
"kind": "OBJECT",
- "name": "MilestoneConnection",
- "description": "The connection type for Milestone.",
+ "name": "DiscussionConnection",
+ "description": "The connection type for Discussion.",
"fields": [
{
"name": "edges",
@@ -4040,7 +5190,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "MilestoneEdge",
+ "name": "DiscussionEdge",
"ofType": null
}
},
@@ -4058,7 +5208,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "Milestone",
+ "name": "Discussion",
"ofType": null
}
},
@@ -4093,7 +5243,7 @@
},
{
"kind": "OBJECT",
- "name": "MilestoneEdge",
+ "name": "DiscussionEdge",
"description": "An edge in a connection.",
"fields": [
{
@@ -4122,7 +5272,7 @@
],
"type": {
"kind": "OBJECT",
- "name": "Milestone",
+ "name": "Discussion",
"ofType": null
},
"isDeprecated": false,
@@ -4137,13 +5287,13 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "Milestone",
- "description": "Represents a milestone.",
+ "kind": "INTERFACE",
+ "name": "Entry",
+ "description": null,
"fields": [
{
- "name": "createdAt",
- "description": "Timestamp of milestone creation",
+ "name": "flatPath",
+ "description": "Flat path of the entry",
"args": [
],
@@ -4152,7 +5302,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "String",
"ofType": null
}
},
@@ -4160,36 +5310,44 @@
"deprecationReason": null
},
{
- "name": "description",
- "description": "Description of the milestone",
+ "name": "id",
+ "description": "ID of the entry",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "dueDate",
- "description": "Timestamp of the milestone due date",
+ "name": "name",
+ "description": "Name of the entry",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "id",
- "description": "ID of the milestone",
+ "name": "path",
+ "description": "Path of the entry",
"args": [
],
@@ -4198,7 +5356,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
}
},
@@ -4206,22 +5364,26 @@
"deprecationReason": null
},
{
- "name": "startDate",
- "description": "Timestamp of the milestone start date",
+ "name": "sha",
+ "description": "Last commit sha for the entry",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "state",
- "description": "State of the milestone",
+ "name": "type",
+ "description": "Type of tree entry",
"args": [
],
@@ -4230,16 +5392,72 @@
"name": null,
"ofType": {
"kind": "ENUM",
- "name": "MilestoneStateEnum",
+ "name": "EntryType",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": [
+ {
+ "kind": "OBJECT",
+ "name": "Blob",
+ "ofType": null
},
{
- "name": "title",
- "description": "Title of the milestone",
+ "kind": "OBJECT",
+ "name": "Submodule",
+ "ofType": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "TreeEntry",
+ "ofType": null
+ }
+ ]
+ },
+ {
+ "kind": "ENUM",
+ "name": "EntryType",
+ "description": "Type of a tree entry",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "tree",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "blob",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "commit",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Environment",
+ "description": "Describes where code is deployed for a project",
+ "fields": [
+ {
+ "name": "id",
+ "description": "ID of the environment",
"args": [
],
@@ -4248,7 +5466,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "ID",
"ofType": null
}
},
@@ -4256,8 +5474,8 @@
"deprecationReason": null
},
{
- "name": "updatedAt",
- "description": "Timestamp of last milestone update",
+ "name": "name",
+ "description": "Human-readable name of the environment",
"args": [
],
@@ -4266,7 +5484,38 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "EnvironmentConnection",
+ "description": "The connection type for Environment.",
+ "fields": [
+ {
+ "name": "edges",
+ "description": "A list of edges.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "EnvironmentEdge",
"ofType": null
}
},
@@ -4274,8 +5523,26 @@
"deprecationReason": null
},
{
- "name": "webPath",
- "description": "Web path of the milestone",
+ "name": "nodes",
+ "description": "A list of nodes.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Environment",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
"args": [
],
@@ -4283,8 +5550,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "PageInfo",
"ofType": null
}
},
@@ -4300,26 +5567,48 @@
"possibleTypes": null
},
{
- "kind": "ENUM",
- "name": "MilestoneStateEnum",
- "description": null,
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
+ "kind": "OBJECT",
+ "name": "EnvironmentEdge",
+ "description": "An edge in a connection.",
+ "fields": [
{
- "name": "active",
- "description": null,
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "closed",
- "description": null,
+ "name": "node",
+ "description": "The item at the end of the edge.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Environment",
+ "ofType": null
+ },
"isDeprecated": false,
"deprecationReason": null
}
],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
"possibleTypes": null
},
{
@@ -4456,6 +5745,16 @@
"defaultValue": null
},
{
+ "name": "iidStartsWith",
+ "description": "Filter epics by iid for autocomplete",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
@@ -4547,6 +5846,20 @@
"deprecationReason": null
},
{
+ "name": "descendantWeightSum",
+ "description": "Total weight of open and closed issues in the epic and its descendants",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "EpicDescendantWeights",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "description",
"description": "Description of the epic",
"args": [
@@ -4746,6 +6059,20 @@
"deprecationReason": null
},
{
+ "name": "healthStatus",
+ "description": "Current health status of the epic",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "EpicHealthStatus",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "id",
"description": "ID of the epic",
"args": [
@@ -5271,165 +6598,167 @@
"possibleTypes": null
},
{
- "kind": "INTERFACE",
- "name": "Noteable",
- "description": null,
- "fields": [
+ "kind": "INPUT_OBJECT",
+ "name": "EpicAddIssueInput",
+ "description": "Autogenerated input type of EpicAddIssue",
+ "fields": null,
+ "inputFields": [
{
- "name": "discussions",
- "description": "All discussions on this noteable",
- "args": [
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
+ "name": "iid",
+ "description": "The iid of the epic to mutate",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
}
- ],
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "groupPath",
+ "description": "The group the epic to mutate belongs to",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "DiscussionConnection",
+ "kind": "SCALAR",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "notes",
- "description": "All notes on this noteable",
- "args": [
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
+ "name": "projectPath",
+ "description": "The project the issue belongs to",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
}
- ],
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "issueIid",
+ "description": "The iid of the issue to be added",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "NoteConnection",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
}
],
- "inputFields": null,
"interfaces": null,
"enumValues": null,
- "possibleTypes": [
- {
- "kind": "OBJECT",
- "name": "Design",
- "ofType": null
- },
- {
- "kind": "OBJECT",
- "name": "Epic",
- "ofType": null
- },
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "EpicAddIssuePayload",
+ "description": "Autogenerated return type of EpicAddIssue",
+ "fields": [
{
- "kind": "OBJECT",
- "name": "EpicIssue",
- "ofType": null
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "kind": "OBJECT",
- "name": "Issue",
- "ofType": null
+ "name": "epic",
+ "description": "The epic after mutation",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Epic",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "kind": "OBJECT",
- "name": "MergeRequest",
- "ofType": null
+ "name": "epicIssue",
+ "description": "The epic-issue relation",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "EpicIssue",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "kind": "OBJECT",
- "name": "Snippet",
- "ofType": null
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
}
- ]
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "NoteConnection",
- "description": "The connection type for Note.",
+ "name": "EpicConnection",
+ "description": "The connection type for Epic.",
"fields": [
{
"name": "edges",
@@ -5442,7 +6771,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "NoteEdge",
+ "name": "EpicEdge",
"ofType": null
}
},
@@ -5460,7 +6789,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "Note",
+ "name": "Epic",
"ofType": null
}
},
@@ -5495,36 +6824,60 @@
},
{
"kind": "OBJECT",
- "name": "NoteEdge",
- "description": "An edge in a connection.",
+ "name": "EpicDescendantCount",
+ "description": "Counts of descendent epics.",
"fields": [
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "closedEpics",
+ "description": "Number of closed sub-epics",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
+ "name": "closedIssues",
+ "description": "Number of closed epic issues",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Note",
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "openedEpics",
+ "description": "Number of opened sub-epics",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "openedIssues",
+ "description": "Number of opened epic issues",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
"ofType": null
},
"isDeprecated": false,
@@ -5540,30 +6893,53 @@
},
{
"kind": "OBJECT",
- "name": "Note",
- "description": null,
+ "name": "EpicDescendantWeights",
+ "description": "Total weight of open and closed descendant issues",
"fields": [
{
- "name": "author",
- "description": "User who wrote this note",
+ "name": "closedIssues",
+ "description": "Total weight of completed (closed) issues in this epic, including epic descendants",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "User",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "body",
- "description": "Content of the note",
+ "name": "openedIssues",
+ "description": "Total weight of opened issues in this epic, including epic descendants",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "EpicEdge",
+ "description": "An edge in a connection.",
+ "fields": [
+ {
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
@@ -5580,54 +6956,143 @@
"deprecationReason": null
},
{
- "name": "bodyHtml",
- "description": "The GitLab Flavored Markdown rendering of `note`",
+ "name": "node",
+ "description": "The item at the end of the edge.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Epic",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "EpicHealthStatus",
+ "description": "Health status of child issues",
+ "fields": [
+ {
+ "name": "issuesAtRisk",
+ "description": "Number of issues at risk",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createdAt",
- "description": "Timestamp of the note creation",
+ "name": "issuesNeedingAttention",
+ "description": "Number of issues that need attention",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "discussion",
- "description": "The discussion this note is a part of",
+ "name": "issuesOnTrack",
+ "description": "Number of issues on track",
"args": [
],
"type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "EpicIssue",
+ "description": "Relationship between an epic and an issue",
+ "fields": [
+ {
+ "name": "assignees",
+ "description": "Assignees of the issue",
+ "args": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
"kind": "OBJECT",
- "name": "Discussion",
+ "name": "UserConnection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "id",
- "description": "ID of the note",
+ "name": "author",
+ "description": "User that created the issue",
"args": [
],
@@ -5635,8 +7100,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "User",
"ofType": null
}
},
@@ -5644,36 +7109,40 @@
"deprecationReason": null
},
{
- "name": "position",
- "description": "The position of this note on a diff",
+ "name": "closedAt",
+ "description": "Timestamp of when the issue was closed",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "DiffPosition",
+ "kind": "SCALAR",
+ "name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "project",
- "description": "Project associated with the note",
+ "name": "confidential",
+ "description": "Indicates the issue is confidential",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Project",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "resolvable",
- "description": "Indicates if this note can be resolved. That is, if it is a resolvable discussion or simply a standalone note",
+ "name": "createdAt",
+ "description": "Timestamp of when the issue was created",
"args": [
],
@@ -5682,7 +7151,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Time",
"ofType": null
}
},
@@ -5690,54 +7159,64 @@
"deprecationReason": null
},
{
- "name": "resolvedAt",
- "description": "Timestamp of the note's resolution",
+ "name": "description",
+ "description": "Description of the issue",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "resolvedBy",
- "description": "User that resolved the discussion",
+ "name": "descriptionHtml",
+ "description": "The GitLab Flavored Markdown rendering of `description`",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "User",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "system",
- "description": "Indicates whether this note was created by the system or by a user",
+ "name": "designCollection",
+ "description": "Collection of design images associated with this issue",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "DesignCollection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "updatedAt",
- "description": "Timestamp of the note's last activity",
+ "name": "designs",
+ "description": "Deprecated. Use `designCollection`",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "DesignCollection",
+ "ofType": null
+ },
+ "isDeprecated": true,
+ "deprecationReason": "Use designCollection"
+ },
+ {
+ "name": "discussionLocked",
+ "description": "Indicates discussion is locked on the issue",
"args": [
],
@@ -5746,7 +7225,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "Boolean",
"ofType": null
}
},
@@ -5754,39 +7233,65 @@
"deprecationReason": null
},
{
- "name": "userPermissions",
- "description": "Permissions for the current user on the resource",
+ "name": "discussions",
+ "description": "All discussions on this noteable",
"args": [
-
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "NotePermissions",
+ "name": "DiscussionConnection",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "NotePermissions",
- "description": null,
- "fields": [
+ },
{
- "name": "adminNote",
- "description": "Indicates the user can perform `admin_note` on this resource",
+ "name": "downvotes",
+ "description": "Number of downvotes the issue has received",
"args": [
],
@@ -5795,7 +7300,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Int",
"ofType": null
}
},
@@ -5803,44 +7308,36 @@
"deprecationReason": null
},
{
- "name": "awardEmoji",
- "description": "Indicates the user can perform `award_emoji` on this resource",
+ "name": "dueDate",
+ "description": "Due date of the issue",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createNote",
- "description": "Indicates the user can perform `create_note` on this resource",
+ "name": "epic",
+ "description": "Epic to which this issue belongs",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Epic",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "readNote",
- "description": "Indicates the user can perform `read_note` on this resource",
+ "name": "epicIssueId",
+ "description": "ID of the epic-issue relation",
"args": [
],
@@ -5849,7 +7346,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
}
},
@@ -5857,53 +7354,36 @@
"deprecationReason": null
},
{
- "name": "resolveNote",
- "description": "Indicates the user can perform `resolve_note` on this resource",
+ "name": "healthStatus",
+ "description": "Current health status. Available only when feature flag `save_issuable_health_status` is enabled.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "ENUM",
+ "name": "HealthStatus",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "User",
- "description": null,
- "fields": [
+ },
{
- "name": "avatarUrl",
- "description": "URL of the user's avatar",
+ "name": "id",
+ "description": "Global ID of the epic-issue relation",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "ID",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "name",
- "description": "Human-readable name of the user",
+ "name": "iid",
+ "description": "Internal ID of the issue",
"args": [
],
@@ -5912,7 +7392,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "ID",
"ofType": null
}
},
@@ -5920,48 +7400,10 @@
"deprecationReason": null
},
{
- "name": "snippets",
- "description": "Snippets authored by the user",
+ "name": "labels",
+ "description": "Labels of the issue",
"args": [
{
- "name": "ids",
- "description": "Array of global snippet ids, e.g., \"gid://gitlab/ProjectSnippet/1\"",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "visibility",
- "description": "The visibility of the snippet",
- "type": {
- "kind": "ENUM",
- "name": "VisibilityScopesEnum",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "type",
- "description": "The type of snippet",
- "type": {
- "kind": "ENUM",
- "name": "TypeEnum",
- "ofType": null
- },
- "defaultValue": null
- },
- {
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
@@ -6004,124 +7446,87 @@
],
"type": {
"kind": "OBJECT",
- "name": "SnippetConnection",
+ "name": "LabelConnection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "todos",
- "description": "Todos of the user",
+ "name": "milestone",
+ "description": "Milestone of the issue",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Milestone",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "notes",
+ "description": "All notes on this noteable",
"args": [
{
- "name": "action",
- "description": "The action to be filtered",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "ENUM",
- "name": "TodoActionEnum",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "authorId",
- "description": "The ID of an author",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "projectId",
- "description": "The ID of a project",
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
"type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"defaultValue": null
},
{
- "name": "groupId",
- "description": "The ID of a group",
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
"type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"defaultValue": null
},
{
- "name": "state",
- "description": "The state of the todo",
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
"type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "ENUM",
- "name": "TodoStateEnum",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"defaultValue": null
},
{
- "name": "type",
- "description": "The type of the todo",
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
"type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "ENUM",
- "name": "TodoTargetEnum",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"defaultValue": null
- },
+ }
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "NoteConnection",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "participants",
+ "description": "List of participants in the issue",
+ "args": [
{
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
@@ -6164,11 +7569,34 @@
}
],
"type": {
+ "kind": "OBJECT",
+ "name": "UserConnection",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "reference",
+ "description": "Internal reference of the issue. Returned in shortened format by default",
+ "args": [
+ {
+ "name": "full",
+ "description": "Boolean option specifying whether the reference should be returned in full",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "defaultValue": "false"
+ }
+ ],
+ "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "TodoConnection",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
@@ -6176,8 +7604,72 @@
"deprecationReason": null
},
{
- "name": "userPermissions",
- "description": "Permissions for the current user on the resource",
+ "name": "relationPath",
+ "description": "URI path of the epic-issue relation",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "relativePosition",
+ "description": "Relative position of the issue (used for positioning in epic tree and issue boards)",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "state",
+ "description": "State of the issue",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "ENUM",
+ "name": "IssueState",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "subscribed",
+ "description": "Indicates the currently logged in user is subscribed to the issue",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "taskCompletionStatus",
+ "description": "Task completion status of the issue",
"args": [
],
@@ -6186,7 +7678,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "UserPermissions",
+ "name": "TaskCompletionStatus",
"ofType": null
}
},
@@ -6194,8 +7686,8 @@
"deprecationReason": null
},
{
- "name": "username",
- "description": "Username of the user. Unique within this instance of GitLab",
+ "name": "timeEstimate",
+ "description": "Time estimate of the issue",
"args": [
],
@@ -6204,7 +7696,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Int",
"ofType": null
}
},
@@ -6212,8 +7704,8 @@
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL of the user",
+ "name": "title",
+ "description": "Title of the issue",
"args": [
],
@@ -6228,23 +7720,24 @@
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
+ },
+ {
+ "name": "titleHtml",
+ "description": "The GitLab Flavored Markdown rendering of `title`",
+ "args": [
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "UserPermissions",
- "description": null,
- "fields": [
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
{
- "name": "createSnippet",
- "description": "Indicates the user can perform `create_snippet` on this resource",
+ "name": "totalTimeSpent",
+ "description": "Total time reported as spent on the issue",
"args": [
],
@@ -6253,25 +7746,151 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "updatedAt",
+ "description": "Timestamp of when the issue was last updated",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "upvotes",
+ "description": "Number of upvotes the issue has received",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "userNotesCount",
+ "description": "Number of user notes of the issue",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "IssuePermissions",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "webPath",
+ "description": "Web path of the issue",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
+ },
+ {
+ "name": "webUrl",
+ "description": "Web URL of the issue",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "weight",
+ "description": "Weight of the issue",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
-
+ {
+ "kind": "INTERFACE",
+ "name": "Noteable",
+ "ofType": null
+ }
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "TodoConnection",
- "description": "The connection type for Todo.",
+ "name": "EpicIssueConnection",
+ "description": "The connection type for EpicIssue.",
"fields": [
{
"name": "edges",
@@ -6284,7 +7903,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "TodoEdge",
+ "name": "EpicIssueEdge",
"ofType": null
}
},
@@ -6302,7 +7921,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "Todo",
+ "name": "EpicIssue",
"ofType": null
}
},
@@ -6337,7 +7956,7 @@
},
{
"kind": "OBJECT",
- "name": "TodoEdge",
+ "name": "EpicIssueEdge",
"description": "An edge in a connection.",
"fields": [
{
@@ -6366,7 +7985,7 @@
],
"type": {
"kind": "OBJECT",
- "name": "Todo",
+ "name": "EpicIssue",
"ofType": null
},
"isDeprecated": false,
@@ -6382,12 +8001,12 @@
},
{
"kind": "OBJECT",
- "name": "Todo",
- "description": "Representing a todo entry",
+ "name": "EpicPermissions",
+ "description": "Check permissions for the current user on an epic",
"fields": [
{
- "name": "action",
- "description": "Action of the todo",
+ "name": "adminEpic",
+ "description": "Indicates the user can perform `admin_epic` on this resource",
"args": [
],
@@ -6395,8 +8014,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "TodoActionEnum",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -6404,8 +8023,8 @@
"deprecationReason": null
},
{
- "name": "author",
- "description": "The owner of this todo",
+ "name": "awardEmoji",
+ "description": "Indicates the user can perform `award_emoji` on this resource",
"args": [
],
@@ -6413,8 +8032,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "User",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -6422,8 +8041,8 @@
"deprecationReason": null
},
{
- "name": "body",
- "description": "Body of the todo",
+ "name": "createEpic",
+ "description": "Indicates the user can perform `create_epic` on this resource",
"args": [
],
@@ -6432,7 +8051,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -6440,8 +8059,8 @@
"deprecationReason": null
},
{
- "name": "createdAt",
- "description": "Timestamp this todo was created",
+ "name": "createNote",
+ "description": "Indicates the user can perform `create_note` on this resource",
"args": [
],
@@ -6450,7 +8069,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "Boolean",
"ofType": null
}
},
@@ -6458,22 +8077,26 @@
"deprecationReason": null
},
{
- "name": "group",
- "description": "Group this todo is associated with",
+ "name": "destroyEpic",
+ "description": "Indicates the user can perform `destroy_epic` on this resource",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Group",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "id",
- "description": "Id of the todo",
+ "name": "readEpic",
+ "description": "Indicates the user can perform `read_epic` on this resource",
"args": [
],
@@ -6482,7 +8105,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "Boolean",
"ofType": null
}
},
@@ -6490,22 +8113,26 @@
"deprecationReason": null
},
{
- "name": "project",
- "description": "The project this todo is associated with",
+ "name": "readEpicIid",
+ "description": "Indicates the user can perform `read_epic_iid` on this resource",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Project",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "state",
- "description": "State of the todo",
+ "name": "updateEpic",
+ "description": "Indicates the user can perform `update_epic` on this resource",
"args": [
],
@@ -6513,17 +8140,121 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "TodoStateEnum",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "EpicSetSubscriptionInput",
+ "description": "Autogenerated input type of EpicSetSubscription",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "iid",
+ "description": "The iid of the epic to mutate",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "targetType",
- "description": "Target type of the todo",
+ "name": "groupPath",
+ "description": "The group the epic to mutate belongs to",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "subscribedState",
+ "description": "The desired state of the subscription",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "EpicSetSubscriptionPayload",
+ "description": "Autogenerated return type of EpicSetSubscription",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "epic",
+ "description": "The epic after mutation",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Epic",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -6531,9 +8262,17 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "TodoTargetEnum",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
@@ -6549,50 +8288,61 @@
},
{
"kind": "ENUM",
- "name": "TodoActionEnum",
- "description": null,
+ "name": "EpicSort",
+ "description": "Roadmap sort values",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{
- "name": "assigned",
- "description": null,
+ "name": "start_date_desc",
+ "description": "Start date at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mentioned",
- "description": null,
+ "name": "start_date_asc",
+ "description": "Start date at ascending order",
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "build_failed",
- "description": null,
+ "name": "end_date_desc",
+ "description": "End date at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "marked",
- "description": null,
+ "name": "end_date_asc",
+ "description": "End date at ascending order",
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "EpicState",
+ "description": "State of an epic.",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
{
- "name": "approval_required",
+ "name": "all",
"description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "unmergeable",
+ "name": "opened",
"description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "directly_addressed",
+ "name": "closed",
"description": null,
"isDeprecated": false,
"deprecationReason": null
@@ -6602,39 +8352,21 @@
},
{
"kind": "ENUM",
- "name": "TodoTargetEnum",
- "description": null,
+ "name": "EpicStateEvent",
+ "description": "State event of an epic",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{
- "name": "COMMIT",
- "description": "A Commit",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "ISSUE",
- "description": "An Issue",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "MERGEREQUEST",
- "description": "A MergeRequest",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "DESIGN",
- "description": "A Design",
+ "name": "REOPEN",
+ "description": "Reopen the epic",
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "EPIC",
- "description": "An Epic",
+ "name": "CLOSE",
+ "description": "Close the epic",
"isDeprecated": false,
"deprecationReason": null
}
@@ -6642,72 +8374,129 @@
"possibleTypes": null
},
{
- "kind": "ENUM",
- "name": "TodoStateEnum",
- "description": null,
+ "kind": "INPUT_OBJECT",
+ "name": "EpicTreeNodeFieldsInputType",
+ "description": "A node of an epic tree.",
"fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
+ "inputFields": [
{
- "name": "pending",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "id",
+ "description": "The id of the epic_issue or epic that is being moved",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "done",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "adjacentReferenceId",
+ "description": "The id of the epic_issue or issue that the actual epic or issue is switched with",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "relativePosition",
+ "description": "The type of the switch, after or before allowed",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "ENUM",
+ "name": "MoveType",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
}
],
+ "interfaces": null,
+ "enumValues": null,
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "SnippetConnection",
- "description": "The connection type for Snippet.",
- "fields": [
+ "kind": "INPUT_OBJECT",
+ "name": "EpicTreeReorderInput",
+ "description": "Autogenerated input type of EpicTreeReorder",
+ "fields": null,
+ "inputFields": [
{
- "name": "edges",
- "description": "A list of edges.",
- "args": [
-
- ],
+ "name": "baseEpicId",
+ "description": "The id of the base epic of the tree",
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "SnippetEdge",
+ "kind": "SCALAR",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
- "args": [
-
- ],
+ "name": "moved",
+ "description": "Parameters for updating the tree positions",
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Snippet",
+ "kind": "INPUT_OBJECT",
+ "name": "EpicTreeNodeFieldsInputType",
"ofType": null
}
},
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "EpicTreeReorderPayload",
+ "description": "Autogenerated return type of EpicTreeReorder",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -6715,9 +8504,17 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
@@ -6732,13 +8529,23 @@
"possibleTypes": null
},
{
+ "kind": "SCALAR",
+ "name": "Float",
+ "description": "Represents signed double-precision fractional values as specified by [IEEE 754](https://en.wikipedia.org/wiki/IEEE_floating_point).",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
"kind": "OBJECT",
- "name": "SnippetEdge",
- "description": "An edge in a connection.",
+ "name": "GrafanaIntegration",
+ "description": null,
"fields": [
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "createdAt",
+ "description": "Timestamp of the issue's creation",
"args": [
],
@@ -6747,7 +8554,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Time",
"ofType": null
}
},
@@ -6755,35 +8562,26 @@
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
+ "name": "enabled",
+ "description": "Indicates whether Grafana integration is enabled",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Snippet",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "Snippet",
- "description": "Represents a snippet entry",
- "fields": [
+ },
{
- "name": "author",
- "description": "The owner of the snippet",
+ "name": "grafanaUrl",
+ "description": "Url for the Grafana host for the Grafana integration",
"args": [
],
@@ -6791,8 +8589,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "User",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
@@ -6800,8 +8598,8 @@
"deprecationReason": null
},
{
- "name": "blob",
- "description": "Snippet blob",
+ "name": "id",
+ "description": "Internal ID of the Grafana integration",
"args": [
],
@@ -6809,8 +8607,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "SnippetBlob",
+ "kind": "SCALAR",
+ "name": "ID",
"ofType": null
}
},
@@ -6818,8 +8616,26 @@
"deprecationReason": null
},
{
- "name": "createdAt",
- "description": "Timestamp this snippet was created",
+ "name": "token",
+ "description": "API token for the Grafana integration. Field is permanently masked.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": true,
+ "deprecationReason": "Plain text token has been masked for security reasons"
+ },
+ {
+ "name": "updatedAt",
+ "description": "Timestamp of the issue's last activity",
"args": [
],
@@ -6834,24 +8650,37 @@
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Group",
+ "description": null,
+ "fields": [
{
- "name": "description",
- "description": "Description of the snippet",
+ "name": "autoDevopsEnabled",
+ "description": "Indicates whether Auto DevOps is enabled for all projects within this group",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "descriptionHtml",
- "description": "The GitLab Flavored Markdown rendering of `description`",
+ "name": "avatarUrl",
+ "description": "Avatar URL of the group",
"args": [
],
@@ -6864,10 +8693,43 @@
"deprecationReason": null
},
{
- "name": "discussions",
- "description": "All discussions on this noteable",
+ "name": "board",
+ "description": "A single board of the group",
+ "args": [
+ {
+ "name": "id",
+ "description": "Find a board by its ID",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Board",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "boards",
+ "description": "Boards of the group",
"args": [
{
+ "name": "id",
+ "description": "Find a board by its ID",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
@@ -6909,20 +8771,16 @@
}
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "DiscussionConnection",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "BoardConnection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "fileName",
- "description": "File Name of the snippet",
+ "name": "description",
+ "description": "Description of the namespace",
"args": [
],
@@ -6935,28 +8793,283 @@
"deprecationReason": null
},
{
- "name": "id",
- "description": "Id of the snippet",
+ "name": "descriptionHtml",
+ "description": "The GitLab Flavored Markdown rendering of `description`",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "emailsDisabled",
+ "description": "Indicates if a group has email notifications disabled",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "epic",
+ "description": "Find a single epic",
+ "args": [
+ {
+ "name": "startDate",
+ "description": "List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "endDate",
+ "description": "List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iid",
+ "description": "IID of the epic, e.g., \"1\"",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iids",
+ "description": "List of IIDs of epics, e.g., [1, 2]",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "state",
+ "description": "Filter epics by state",
+ "type": {
+ "kind": "ENUM",
+ "name": "EpicState",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "search",
+ "description": "Filter epics by title and description",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "sort",
+ "description": "List epics by sort order",
+ "type": {
+ "kind": "ENUM",
+ "name": "EpicSort",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "authorUsername",
+ "description": "Filter epics by author",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "labelName",
+ "description": "Filter epics by labels",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iidStartsWith",
+ "description": "Filter epics by iid for autocomplete",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Epic",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "notes",
- "description": "All notes on this noteable",
+ "name": "epics",
+ "description": "Find epics",
"args": [
{
+ "name": "startDate",
+ "description": "List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "endDate",
+ "description": "List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iid",
+ "description": "IID of the epic, e.g., \"1\"",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iids",
+ "description": "List of IIDs of epics, e.g., [1, 2]",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "state",
+ "description": "Filter epics by state",
+ "type": {
+ "kind": "ENUM",
+ "name": "EpicState",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "search",
+ "description": "Filter epics by title and description",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "sort",
+ "description": "List epics by sort order",
+ "type": {
+ "kind": "ENUM",
+ "name": "EpicSort",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "authorUsername",
+ "description": "Filter epics by author",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "labelName",
+ "description": "Filter epics by labels",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iidStartsWith",
+ "description": "Filter epics by iid for autocomplete",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
@@ -6998,34 +9111,30 @@
}
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "NoteConnection",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "EpicConnection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "project",
- "description": "The project the snippet is associated with",
+ "name": "epicsEnabled",
+ "description": "Indicates if Epics are enabled for namespace",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Project",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "rawUrl",
- "description": "Raw URL of the snippet",
+ "name": "fullName",
+ "description": "Full name of the namespace",
"args": [
],
@@ -7042,8 +9151,8 @@
"deprecationReason": null
},
{
- "name": "title",
- "description": "Title of the snippet",
+ "name": "fullPath",
+ "description": "Full path of the namespace",
"args": [
],
@@ -7052,7 +9161,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "ID",
"ofType": null
}
},
@@ -7060,8 +9169,22 @@
"deprecationReason": null
},
{
- "name": "updatedAt",
- "description": "Timestamp this snippet was updated",
+ "name": "groupTimelogsEnabled",
+ "description": "Indicates if Group timelogs are enabled for namespace",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "id",
+ "description": "ID of the namespace",
"args": [
],
@@ -7070,7 +9193,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "ID",
"ofType": null
}
},
@@ -7078,44 +9201,119 @@
"deprecationReason": null
},
{
- "name": "userPermissions",
- "description": "Permissions for the current user on the resource",
+ "name": "lfsEnabled",
+ "description": "Indicates if Large File Storage (LFS) is enabled for namespace",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "SnippetPermissions",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "visibilityLevel",
- "description": "Visibility Level of the snippet",
+ "name": "mentionsDisabled",
+ "description": "Indicates if a group is disabled from getting mentioned",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "ENUM",
- "name": "VisibilityLevelsEnum",
- "ofType": null
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "milestones",
+ "description": "Find milestones",
+ "args": [
+ {
+ "name": "startDate",
+ "description": "List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "endDate",
+ "description": "List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "state",
+ "description": "Filter milestones by state",
+ "type": {
+ "kind": "ENUM",
+ "name": "MilestoneStateEnum",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MilestoneConnection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL of the snippet",
+ "name": "name",
+ "description": "Name of the namespace",
"args": [
],
@@ -7130,54 +9328,33 @@
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
- {
- "kind": "INTERFACE",
- "name": "Noteable",
- "ofType": null
- }
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "DiscussionConnection",
- "description": "The connection type for Discussion.",
- "fields": [
+ },
{
- "name": "edges",
- "description": "A list of edges.",
+ "name": "parent",
+ "description": "Parent group",
"args": [
],
"type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "DiscussionEdge",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Group",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
+ "name": "path",
+ "description": "Path of the namespace",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Discussion",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
@@ -7185,122 +9362,189 @@
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "projectCreationLevel",
+ "description": "The permission level required to create projects in the group",
"args": [
],
"type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "projects",
+ "description": "Projects within this namespace",
+ "args": [
+ {
+ "name": "includeSubgroups",
+ "description": "Include also subgroup projects",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "defaultValue": "false"
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "PageInfo",
+ "name": "ProjectConnection",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
+ },
+ {
+ "name": "requestAccessEnabled",
+ "description": "Indicates if users can request access to namespace",
+ "args": [
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "DiscussionEdge",
- "description": "An edge in a connection.",
- "fields": [
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "requireTwoFactorAuthentication",
+ "description": "Indicates if all users in this group are required to set up two-factor authentication",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
+ "name": "rootStorageStatistics",
+ "description": "Aggregated storage statistics of the namespace. Only available for root namespaces",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Discussion",
+ "name": "RootStorageStatistics",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "Discussion",
- "description": null,
- "fields": [
+ },
{
- "name": "createdAt",
- "description": "Timestamp of the discussion's creation",
+ "name": "shareWithGroupLock",
+ "description": "Indicates if sharing a project with another group within this group is prevented",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "id",
- "description": "ID of this discussion",
+ "name": "subgroupCreationLevel",
+ "description": "The permission level required to create subgroups within the group",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "notes",
- "description": "All notes in the discussion",
+ "name": "timelogs",
+ "description": "Time logged in issues by group members",
"args": [
{
+ "name": "startDate",
+ "description": "List time logs within a time range where the logged date is after start_date parameter.",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "endDate",
+ "description": "List time logs within a time range where the logged date is before end_date parameter.",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
@@ -7346,7 +9590,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "NoteConnection",
+ "name": "TimelogConnection",
"ofType": null
}
},
@@ -7354,39 +9598,22 @@
"deprecationReason": null
},
{
- "name": "replyId",
- "description": "ID used to reply to this discussion",
+ "name": "twoFactorGracePeriod",
+ "description": "Time before two-factor authentication is enforced",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "SnippetPermissions",
- "description": null,
- "fields": [
+ },
{
- "name": "adminSnippet",
- "description": "Indicates the user can perform `admin_snippet` on this resource",
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
"args": [
],
@@ -7394,8 +9621,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "GroupPermissions",
"ofType": null
}
},
@@ -7403,26 +9630,22 @@
"deprecationReason": null
},
{
- "name": "awardEmoji",
- "description": "Indicates the user can perform `award_emoji` on this resource",
+ "name": "visibility",
+ "description": "Visibility of the namespace",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createNote",
- "description": "Indicates the user can perform `create_note` on this resource",
+ "name": "webUrl",
+ "description": "Web URL of the group",
"args": [
],
@@ -7431,16 +9654,29 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "GroupPermissions",
+ "description": null,
+ "fields": [
{
- "name": "readSnippet",
- "description": "Indicates the user can perform `read_snippet` on this resource",
+ "name": "readGroup",
+ "description": "Indicates the user can perform `read_group` on this resource",
"args": [
],
@@ -7455,73 +9691,86 @@
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "HealthStatus",
+ "description": "Health status of an issue or epic",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "onTrack",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "reportSnippet",
- "description": "Indicates the user can perform `report_snippet` on this resource",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
- },
+ "name": "needsAttention",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "updateSnippet",
- "description": "Indicates the user can perform `update_snippet` on this resource",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
- },
+ "name": "atRisk",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
}
],
+ "possibleTypes": null
+ },
+ {
+ "kind": "SCALAR",
+ "name": "ID",
+ "description": "Represents a unique identifier that is Base64 obfuscated. It is often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `\"VXNlci0xMA==\"`) or integer (such as `4`) input value will be accepted as an ID.",
+ "fields": null,
"inputFields": null,
- "interfaces": [
-
- ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "SCALAR",
+ "name": "Int",
+ "description": "Represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "ENUM",
- "name": "VisibilityLevelsEnum",
- "description": null,
+ "name": "IssuableState",
+ "description": "State of a GitLab issue or merge request",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{
- "name": "private",
+ "name": "opened",
"description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "internal",
+ "name": "closed",
"description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "public",
+ "name": "locked",
"description": null,
"isDeprecated": false,
"deprecationReason": null
@@ -7531,12 +9780,65 @@
},
{
"kind": "OBJECT",
- "name": "SnippetBlob",
- "description": "Represents the snippet blob",
+ "name": "Issue",
+ "description": null,
"fields": [
{
- "name": "binary",
- "description": "Shows whether the blob is binary",
+ "name": "assignees",
+ "description": "Assignees of the issue",
+ "args": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "UserConnection",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "author",
+ "description": "User that created the issue",
"args": [
],
@@ -7544,8 +9846,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "User",
"ofType": null
}
},
@@ -7553,50 +9855,58 @@
"deprecationReason": null
},
{
- "name": "mode",
- "description": "Blob mode",
+ "name": "closedAt",
+ "description": "Timestamp of when the issue was closed",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "name",
- "description": "Blob name",
+ "name": "confidential",
+ "description": "Indicates the issue is confidential",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "path",
- "description": "Blob path",
+ "name": "createdAt",
+ "description": "Timestamp of when the issue was created",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "plainData",
- "description": "Blob plain highlighted data",
+ "name": "description",
+ "description": "Description of the issue",
"args": [
],
@@ -7609,54 +9919,50 @@
"deprecationReason": null
},
{
- "name": "rawPath",
- "description": "Blob raw content endpoint path",
+ "name": "descriptionHtml",
+ "description": "The GitLab Flavored Markdown rendering of `description`",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "richData",
- "description": "Blob highlighted data",
+ "name": "designCollection",
+ "description": "Collection of design images associated with this issue",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "DesignCollection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "richViewer",
- "description": "Blob content rich viewer",
+ "name": "designs",
+ "description": "Deprecated. Use `designCollection`",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "SnippetBlobViewer",
+ "name": "DesignCollection",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "isDeprecated": true,
+ "deprecationReason": "Use designCollection"
},
{
- "name": "simpleViewer",
- "description": "Blob content simple viewer",
+ "name": "discussionLocked",
+ "description": "Indicates discussion is locked on the issue",
"args": [
],
@@ -7664,8 +9970,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "SnippetBlobViewer",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -7673,39 +9979,65 @@
"deprecationReason": null
},
{
- "name": "size",
- "description": "Blob size",
+ "name": "discussions",
+ "description": "All discussions on this noteable",
"args": [
-
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Int",
+ "kind": "OBJECT",
+ "name": "DiscussionConnection",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "SnippetBlobViewer",
- "description": "Represents how the blob content should be displayed",
- "fields": [
+ },
{
- "name": "collapsed",
- "description": "Shows whether the blob should be displayed collapsed",
+ "name": "downvotes",
+ "description": "Number of downvotes the issue has received",
"args": [
],
@@ -7714,7 +10046,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Int",
"ofType": null
}
},
@@ -7722,8 +10054,50 @@
"deprecationReason": null
},
{
- "name": "fileType",
- "description": "Content file type",
+ "name": "dueDate",
+ "description": "Due date of the issue",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "epic",
+ "description": "Epic to which this issue belongs",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Epic",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "healthStatus",
+ "description": "Current health status. Available only when feature flag `save_issuable_health_status` is enabled.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "ENUM",
+ "name": "HealthStatus",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "iid",
+ "description": "Internal ID of the issue",
"args": [
],
@@ -7732,7 +10106,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "ID",
"ofType": null
}
},
@@ -7740,17 +10114,123 @@
"deprecationReason": null
},
{
- "name": "loadAsync",
- "description": "Shows whether the blob content is loaded async",
+ "name": "labels",
+ "description": "Labels of the issue",
+ "args": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "LabelConnection",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "milestone",
+ "description": "Milestone of the issue",
"args": [
],
"type": {
+ "kind": "OBJECT",
+ "name": "Milestone",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "notes",
+ "description": "All notes on this noteable",
+ "args": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "NoteConnection",
"ofType": null
}
},
@@ -7758,10 +10238,72 @@
"deprecationReason": null
},
{
- "name": "loadingPartialName",
- "description": "Loading partial name",
+ "name": "participants",
+ "description": "List of participants in the issue",
"args": [
-
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "UserConnection",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "reference",
+ "description": "Internal reference of the issue. Returned in shortened format by default",
+ "args": [
+ {
+ "name": "full",
+ "description": "Boolean option specifying whether the reference should be returned in full",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "defaultValue": "false"
+ }
],
"type": {
"kind": "NON_NULL",
@@ -7776,22 +10318,22 @@
"deprecationReason": null
},
{
- "name": "renderError",
- "description": "Error rendering the blob content",
+ "name": "relativePosition",
+ "description": "Relative position of the issue (used for positioning in epic tree and issue boards)",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "tooLarge",
- "description": "Shows whether the blob too large to be displayed",
+ "name": "state",
+ "description": "State of the issue",
"args": [
],
@@ -7799,8 +10341,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "ENUM",
+ "name": "IssueState",
"ofType": null
}
},
@@ -7808,8 +10350,8 @@
"deprecationReason": null
},
{
- "name": "type",
- "description": "Type of blob viewer",
+ "name": "subscribed",
+ "description": "Indicates the currently logged in user is subscribed to the issue",
"args": [
],
@@ -7817,111 +10359,35 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "BlobViewersType",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "ENUM",
- "name": "BlobViewersType",
- "description": "Types of blob viewers",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "rich",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
},
{
- "name": "simple",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "auxiliary",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "possibleTypes": null
- },
- {
- "kind": "ENUM",
- "name": "VisibilityScopesEnum",
- "description": null,
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "private",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "internal",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "public",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "possibleTypes": null
- },
- {
- "kind": "ENUM",
- "name": "TypeEnum",
- "description": null,
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "personal",
- "description": null,
+ "name": "taskCompletionStatus",
+ "description": "Task completion status of the issue",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "TaskCompletionStatus",
+ "ofType": null
+ }
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "project",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "DiffPosition",
- "description": null,
- "fields": [
- {
- "name": "diffRefs",
- "description": "Information about the branch, HEAD, and base at the time of commenting",
+ "name": "timeEstimate",
+ "description": "Time estimate of the issue",
"args": [
],
@@ -7929,8 +10395,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "DiffRefs",
+ "kind": "SCALAR",
+ "name": "Int",
"ofType": null
}
},
@@ -7938,8 +10404,8 @@
"deprecationReason": null
},
{
- "name": "filePath",
- "description": "Path of the file that was changed",
+ "name": "title",
+ "description": "Title of the issue",
"args": [
],
@@ -7956,78 +10422,94 @@
"deprecationReason": null
},
{
- "name": "height",
- "description": "Total height of the image",
+ "name": "titleHtml",
+ "description": "The GitLab Flavored Markdown rendering of `title`",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "newLine",
- "description": "Line on HEAD SHA that was changed",
+ "name": "totalTimeSpent",
+ "description": "Total time reported as spent on the issue",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "newPath",
- "description": "Path of the file on the HEAD SHA",
+ "name": "updatedAt",
+ "description": "Timestamp of when the issue was last updated",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "oldLine",
- "description": "Line on start SHA that was changed",
+ "name": "upvotes",
+ "description": "Number of upvotes the issue has received",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "oldPath",
- "description": "Path of the file on the start SHA",
+ "name": "userNotesCount",
+ "description": "Number of user notes of the issue",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "positionType",
- "description": "Type of file the position refers to",
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
"args": [
],
@@ -8035,8 +10517,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "DiffPositionType",
+ "kind": "OBJECT",
+ "name": "IssuePermissions",
"ofType": null
}
},
@@ -8044,36 +10526,44 @@
"deprecationReason": null
},
{
- "name": "width",
- "description": "Total width of the image",
+ "name": "webPath",
+ "description": "Web path of the issue",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "x",
- "description": "X position of the note",
+ "name": "webUrl",
+ "description": "Web URL of the issue",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "y",
- "description": "Y position of the note",
+ "name": "weight",
+ "description": "Weight of the issue",
"args": [
],
@@ -8088,42 +10578,50 @@
],
"inputFields": null,
"interfaces": [
-
+ {
+ "kind": "INTERFACE",
+ "name": "Noteable",
+ "ofType": null
+ }
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "DiffRefs",
- "description": null,
+ "name": "IssueConnection",
+ "description": "The connection type for Issue.",
"fields": [
{
- "name": "baseSha",
- "description": "Merge base of the branch the comment was made on",
+ "name": "edges",
+ "description": "A list of edges.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "IssueEdge",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "headSha",
- "description": "SHA of the HEAD at the time the comment was made",
+ "name": "nodes",
+ "description": "A list of nodes.",
"args": [
],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Issue",
"ofType": null
}
},
@@ -8131,8 +10629,8 @@
"deprecationReason": null
},
{
- "name": "startSha",
- "description": "SHA of the branch being compared against",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
"args": [
],
@@ -8140,8 +10638,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "PageInfo",
"ofType": null
}
},
@@ -8157,36 +10655,58 @@
"possibleTypes": null
},
{
- "kind": "ENUM",
- "name": "DiffPositionType",
- "description": "Type of file the position refers to",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
+ "kind": "OBJECT",
+ "name": "IssueEdge",
+ "description": "An edge in a connection.",
+ "fields": [
{
- "name": "text",
- "description": null,
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "image",
- "description": null,
+ "name": "node",
+ "description": "The item at the end of the edge.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Issue",
+ "ofType": null
+ },
"isDeprecated": false,
"deprecationReason": null
}
],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "EpicPermissions",
- "description": "Check permissions for the current user on an epic",
+ "name": "IssuePermissions",
+ "description": "Check permissions for the current user on a issue",
"fields": [
{
- "name": "adminEpic",
- "description": "Indicates the user can perform `admin_epic` on this resource",
+ "name": "adminIssue",
+ "description": "Indicates the user can perform `admin_issue` on this resource",
"args": [
],
@@ -8203,8 +10723,8 @@
"deprecationReason": null
},
{
- "name": "awardEmoji",
- "description": "Indicates the user can perform `award_emoji` on this resource",
+ "name": "createDesign",
+ "description": "Indicates the user can perform `create_design` on this resource",
"args": [
],
@@ -8221,8 +10741,8 @@
"deprecationReason": null
},
{
- "name": "createEpic",
- "description": "Indicates the user can perform `create_epic` on this resource",
+ "name": "createNote",
+ "description": "Indicates the user can perform `create_note` on this resource",
"args": [
],
@@ -8239,8 +10759,8 @@
"deprecationReason": null
},
{
- "name": "createNote",
- "description": "Indicates the user can perform `create_note` on this resource",
+ "name": "destroyDesign",
+ "description": "Indicates the user can perform `destroy_design` on this resource",
"args": [
],
@@ -8257,8 +10777,8 @@
"deprecationReason": null
},
{
- "name": "destroyEpic",
- "description": "Indicates the user can perform `destroy_epic` on this resource",
+ "name": "readDesign",
+ "description": "Indicates the user can perform `read_design` on this resource",
"args": [
],
@@ -8275,8 +10795,8 @@
"deprecationReason": null
},
{
- "name": "readEpic",
- "description": "Indicates the user can perform `read_epic` on this resource",
+ "name": "readIssue",
+ "description": "Indicates the user can perform `read_issue` on this resource",
"args": [
],
@@ -8293,8 +10813,8 @@
"deprecationReason": null
},
{
- "name": "readEpicIid",
- "description": "Indicates the user can perform `read_epic_iid` on this resource",
+ "name": "reopenIssue",
+ "description": "Indicates the user can perform `reopen_issue` on this resource",
"args": [
],
@@ -8311,8 +10831,8 @@
"deprecationReason": null
},
{
- "name": "updateEpic",
- "description": "Indicates the user can perform `update_epic` on this resource",
+ "name": "updateIssue",
+ "description": "Indicates the user can perform `update_issue` on this resource",
"args": [
],
@@ -8337,89 +10857,123 @@
"possibleTypes": null
},
{
- "kind": "ENUM",
- "name": "EpicState",
- "description": "State of an epic.",
+ "kind": "INPUT_OBJECT",
+ "name": "IssueSetConfidentialInput",
+ "description": "Autogenerated input type of IssueSetConfidential",
"fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
+ "inputFields": [
{
- "name": "all",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "projectPath",
+ "description": "The project the issue to mutate is in",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "opened",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "iid",
+ "description": "The iid of the issue to mutate",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "closed",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "confidential",
+ "description": "Whether or not to set the issue as a confidential.",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
}
],
+ "interfaces": null,
+ "enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "EpicConnection",
- "description": "The connection type for Epic.",
+ "name": "IssueSetConfidentialPayload",
+ "description": "Autogenerated return type of IssueSetConfidential",
"fields": [
{
- "name": "edges",
- "description": "A list of edges.",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "EpicEdge",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Epic",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "issue",
+ "description": "The issue after mutation",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Issue",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
@@ -8433,13 +10987,90 @@
"possibleTypes": null
},
{
+ "kind": "INPUT_OBJECT",
+ "name": "IssueSetDueDateInput",
+ "description": "Autogenerated input type of IssueSetDueDate",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "projectPath",
+ "description": "The project the issue to mutate is in",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iid",
+ "description": "The iid of the issue to mutate",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "dueDate",
+ "description": "The desired due date for the issue",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
"kind": "OBJECT",
- "name": "EpicEdge",
- "description": "An edge in a connection.",
+ "name": "IssueSetDueDatePayload",
+ "description": "Autogenerated return type of IssueSetDueDate",
"fields": [
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -8447,23 +11078,31 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
+ "name": "issue",
+ "description": "The issue after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Epic",
+ "name": "Issue",
"ofType": null
},
"isDeprecated": false,
@@ -8478,95 +11117,123 @@
"possibleTypes": null
},
{
- "kind": "ENUM",
- "name": "EpicSort",
- "description": "Roadmap sort values",
+ "kind": "INPUT_OBJECT",
+ "name": "IssueSetWeightInput",
+ "description": "Autogenerated input type of IssueSetWeight",
"fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
+ "inputFields": [
{
- "name": "start_date_desc",
- "description": "Start date at descending order",
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "projectPath",
+ "description": "The project the issue to mutate is in",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "start_date_asc",
- "description": "Start date at ascending order",
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "iid",
+ "description": "The iid of the issue to mutate",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "end_date_desc",
- "description": "End date at descending order",
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "weight",
+ "description": "The desired weight for the issue",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "end_date_asc",
- "description": "End date at ascending order",
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
}
],
+ "interfaces": null,
+ "enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "LabelConnection",
- "description": "The connection type for Label.",
+ "name": "IssueSetWeightPayload",
+ "description": "Autogenerated return type of IssueSetWeight",
"fields": [
{
- "name": "edges",
- "description": "A list of edges.",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "LabelEdge",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Label",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "issue",
+ "description": "The issue after mutation",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Issue",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
@@ -8580,47 +11247,106 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "LabelEdge",
- "description": "An edge in a connection.",
- "fields": [
+ "kind": "ENUM",
+ "name": "IssueSort",
+ "description": "Values for sorting issues",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
+ "name": "updated_desc",
+ "description": "Updated at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
- "args": [
-
- ],
- "type": {
- "kind": "OBJECT",
- "name": "Label",
- "ofType": null
- },
+ "name": "updated_asc",
+ "description": "Updated at ascending order",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "created_desc",
+ "description": "Created at descending order",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "created_asc",
+ "description": "Created at ascending order",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "DUE_DATE_ASC",
+ "description": "Due date by ascending order",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "DUE_DATE_DESC",
+ "description": "Due date by descending order",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "RELATIVE_POSITION_ASC",
+ "description": "Relative position by ascending order",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "WEIGHT_ASC",
+ "description": "Weight by ascending order",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "WEIGHT_DESC",
+ "description": "Weight by descending order",
"isDeprecated": false,
"deprecationReason": null
}
],
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "IssueState",
+ "description": "State of a GitLab issue",
+ "fields": null,
"inputFields": null,
- "interfaces": [
-
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "opened",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "closed",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "locked",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
],
+ "possibleTypes": null
+ },
+ {
+ "kind": "SCALAR",
+ "name": "JSON",
+ "description": "Represents untyped JSON",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
"enumValues": null,
"possibleTypes": null
},
@@ -8739,8 +11465,8 @@
},
{
"kind": "OBJECT",
- "name": "UserConnection",
- "description": "The connection type for User.",
+ "name": "LabelConnection",
+ "description": "The connection type for Label.",
"fields": [
{
"name": "edges",
@@ -8753,7 +11479,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "UserEdge",
+ "name": "LabelEdge",
"ofType": null
}
},
@@ -8771,7 +11497,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "User",
+ "name": "Label",
"ofType": null
}
},
@@ -8806,7 +11532,7 @@
},
{
"kind": "OBJECT",
- "name": "UserEdge",
+ "name": "LabelEdge",
"description": "An edge in a connection.",
"fields": [
{
@@ -8835,7 +11561,7 @@
],
"type": {
"kind": "OBJECT",
- "name": "User",
+ "name": "Label",
"ofType": null
},
"isDeprecated": false,
@@ -8850,80 +11576,62 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "EpicIssueConnection",
- "description": "The connection type for EpicIssue.",
- "fields": [
- {
- "name": "edges",
- "description": "A list of edges.",
- "args": [
-
- ],
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "EpicIssueEdge",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
+ "kind": "INPUT_OBJECT",
+ "name": "MarkAsSpamSnippetInput",
+ "description": "Autogenerated input type of MarkAsSpamSnippet",
+ "fields": null,
+ "inputFields": [
{
- "name": "nodes",
- "description": "A list of nodes.",
- "args": [
-
- ],
+ "name": "id",
+ "description": "The global id of the snippet to update",
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "EpicIssue",
+ "kind": "SCALAR",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
- "args": [
-
- ],
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
}
],
- "inputFields": null,
- "interfaces": [
-
- ],
+ "interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "EpicIssueEdge",
- "description": "An edge in a connection.",
+ "name": "MarkAsSpamSnippetPayload",
+ "description": "Autogenerated return type of MarkAsSpamSnippet",
"fields": [
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -8931,23 +11639,31 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
+ "name": "snippet",
+ "description": "The snippet after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "EpicIssue",
+ "name": "Snippet",
"ofType": null
},
"isDeprecated": false,
@@ -8963,12 +11679,26 @@
},
{
"kind": "OBJECT",
- "name": "EpicIssue",
- "description": "Relationship between an epic and an issue",
+ "name": "MergeRequest",
+ "description": null,
"fields": [
{
+ "name": "allowCollaboration",
+ "description": "Indicates if members of the target project can push to the fork",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "assignees",
- "description": "Assignees of the issue",
+ "description": "Assignees of the merge request",
"args": [
{
"name": "after",
@@ -9020,8 +11750,8 @@
"deprecationReason": null
},
{
- "name": "author",
- "description": "User that created the issue",
+ "name": "createdAt",
+ "description": "Timestamp of when the merge request was created",
"args": [
],
@@ -9029,8 +11759,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "User",
+ "kind": "SCALAR",
+ "name": "Time",
"ofType": null
}
},
@@ -9038,58 +11768,22 @@
"deprecationReason": null
},
{
- "name": "closedAt",
- "description": "Timestamp of when the issue was closed",
+ "name": "defaultMergeCommitMessage",
+ "description": "Default merge commit message of the merge request",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "confidential",
- "description": "Indicates the issue is confidential",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "createdAt",
- "description": "Timestamp of when the issue was created",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
"name": "description",
- "description": "Description of the issue",
+ "description": "Description of the merge request (Markdown rendered as HTML for caching)",
"args": [
],
@@ -9116,36 +11810,36 @@
"deprecationReason": null
},
{
- "name": "designCollection",
- "description": "Collection of design images associated with this issue",
+ "name": "diffHeadSha",
+ "description": "Diff head SHA of the merge request",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "DesignCollection",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "designs",
- "description": "Deprecated. Use `designCollection`",
+ "name": "diffRefs",
+ "description": "References of the base SHA, the head SHA, and the start SHA for this merge request",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "DesignCollection",
+ "name": "DiffRefs",
"ofType": null
},
- "isDeprecated": true,
- "deprecationReason": "Use designCollection"
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
"name": "discussionLocked",
- "description": "Indicates discussion is locked on the issue",
+ "description": "Indicates if comments on the merge request are locked to members only",
"args": [
],
@@ -9220,7 +11914,7 @@
},
{
"name": "downvotes",
- "description": "Number of downvotes the issue has received",
+ "description": "Number of downvotes for the merge request",
"args": [
],
@@ -9237,36 +11931,36 @@
"deprecationReason": null
},
{
- "name": "dueDate",
- "description": "Due date of the issue",
+ "name": "forceRemoveSourceBranch",
+ "description": "Indicates if the project settings will lead to source branch deletion after merge",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "epic",
- "description": "Epic to which this issue belongs",
+ "name": "headPipeline",
+ "description": "The pipeline running on the branch HEAD of the merge request",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Epic",
+ "name": "Pipeline",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "epicIssueId",
- "description": "ID of the epic-issue relation",
+ "name": "id",
+ "description": "ID of the merge request",
"args": [
],
@@ -9283,40 +11977,40 @@
"deprecationReason": null
},
{
- "name": "id",
- "description": "Global ID of the epic-issue relation",
+ "name": "iid",
+ "description": "Internal ID of the merge request",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "iid",
- "description": "Internal ID of the issue",
+ "name": "inProgressMergeCommitSha",
+ "description": "Commit SHA of the merge request if merge is in progress",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "labels",
- "description": "Labels of the issue",
+ "description": "Labels of the merge request",
"args": [
{
"name": "after",
@@ -9368,8 +12062,110 @@
"deprecationReason": null
},
{
+ "name": "mergeCommitMessage",
+ "description": "Deprecated - renamed to defaultMergeCommitMessage",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": true,
+ "deprecationReason": "Renamed to defaultMergeCommitMessage"
+ },
+ {
+ "name": "mergeCommitSha",
+ "description": "SHA of the merge request commit (set once merged)",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeError",
+ "description": "Error message due to a merge error",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeOngoing",
+ "description": "Indicates if a merge is currently occurring",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeStatus",
+ "description": "Status of the merge request",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeWhenPipelineSucceeds",
+ "description": "Indicates if the merge has been set to be merged when its pipeline succeeds (MWPS)",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeableDiscussionsState",
+ "description": "Indicates if all discussions in the merge request have been resolved, allowing the merge request to be merged",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "milestone",
- "description": "Milestone of the issue",
+ "description": "The milestone of the merge request",
"args": [
],
@@ -9440,7 +12236,7 @@
},
{
"name": "participants",
- "description": "List of participants in the issue",
+ "description": "Participants in the merge request",
"args": [
{
"name": "after",
@@ -9492,8 +12288,163 @@
"deprecationReason": null
},
{
+ "name": "pipelines",
+ "description": "Pipelines for the merge request",
+ "args": [
+ {
+ "name": "status",
+ "description": "Filter pipelines by their status",
+ "type": {
+ "kind": "ENUM",
+ "name": "PipelineStatusEnum",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "ref",
+ "description": "Filter pipelines by the ref they are run for",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "sha",
+ "description": "Filter pipelines by the sha of the commit they are run for",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PipelineConnection",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "project",
+ "description": "Alias for target_project",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Project",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "projectId",
+ "description": "ID of the merge request project",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "rebaseCommitSha",
+ "description": "Rebase commit SHA of the merge request",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "rebaseInProgress",
+ "description": "Indicates if there is a rebase currently in progress for the merge request",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "reference",
- "description": "Internal reference of the issue. Returned in shortened format by default",
+ "description": "Internal reference of the merge request. Returned in shortened format by default",
"args": [
{
"name": "full",
@@ -9519,22 +12470,90 @@
"deprecationReason": null
},
{
- "name": "relationPath",
- "description": "URI path of the epic-issue relation",
+ "name": "shouldBeRebased",
+ "description": "Indicates if the merge request will be rebased",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "shouldRemoveSourceBranch",
+ "description": "Indicates if the source branch of the merge request will be deleted after merge",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "relativePosition",
- "description": "Relative position of the issue (used for positioning in epic tree and issue boards)",
+ "name": "sourceBranch",
+ "description": "Source branch of the merge request",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "sourceBranchExists",
+ "description": "Indicates if the source branch of the merge request exists",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "sourceProject",
+ "description": "Source project of the merge request",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Project",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "sourceProjectId",
+ "description": "ID of the merge request source project",
"args": [
],
@@ -9548,7 +12567,7 @@
},
{
"name": "state",
- "description": "State of the issue",
+ "description": "State of the merge request",
"args": [
],
@@ -9557,7 +12576,7 @@
"name": null,
"ofType": {
"kind": "ENUM",
- "name": "IssueState",
+ "name": "MergeRequestState",
"ofType": null
}
},
@@ -9566,7 +12585,7 @@
},
{
"name": "subscribed",
- "description": "Indicates the currently logged in user is subscribed to the issue",
+ "description": "Indicates if the currently logged in user is subscribed to this merge request",
"args": [
],
@@ -9583,8 +12602,62 @@
"deprecationReason": null
},
{
+ "name": "targetBranch",
+ "description": "Target branch of the merge request",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "targetProject",
+ "description": "Target project of the merge request",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Project",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "targetProjectId",
+ "description": "ID of the merge request target project",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "taskCompletionStatus",
- "description": "Task completion status of the issue",
+ "description": "Completion status of tasks",
"args": [
],
@@ -9602,7 +12675,7 @@
},
{
"name": "timeEstimate",
- "description": "Time estimate of the issue",
+ "description": "Time estimate of the merge request",
"args": [
],
@@ -9620,7 +12693,7 @@
},
{
"name": "title",
- "description": "Title of the issue",
+ "description": "Title of the merge request",
"args": [
],
@@ -9652,7 +12725,7 @@
},
{
"name": "totalTimeSpent",
- "description": "Total time reported as spent on the issue",
+ "description": "Total time reported as spent on the merge request",
"args": [
],
@@ -9670,7 +12743,7 @@
},
{
"name": "updatedAt",
- "description": "Timestamp of when the issue was last updated",
+ "description": "Timestamp of when the merge request was last updated",
"args": [
],
@@ -9688,7 +12761,7 @@
},
{
"name": "upvotes",
- "description": "Number of upvotes the issue has received",
+ "description": "Number of upvotes for the merge request",
"args": [
],
@@ -9706,7 +12779,21 @@
},
{
"name": "userNotesCount",
- "description": "Number of user notes of the issue",
+ "description": "User notes count of the merge request",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
"args": [
],
@@ -9714,8 +12801,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Int",
+ "kind": "OBJECT",
+ "name": "MergeRequestPermissions",
"ofType": null
}
},
@@ -9723,8 +12810,22 @@
"deprecationReason": null
},
{
- "name": "userPermissions",
- "description": "Permissions for the current user on the resource",
+ "name": "webUrl",
+ "description": "Web URL of the merge request",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "workInProgress",
+ "description": "Indicates if the merge request is a work in progress (WIP)",
"args": [
],
@@ -9732,8 +12833,43 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+ {
+ "kind": "INTERFACE",
+ "name": "Noteable",
+ "ofType": null
+ }
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "MergeRequestConnection",
+ "description": "The connection type for MergeRequest.",
+ "fields": [
+ {
+ "name": "edges",
+ "description": "A list of edges.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
"kind": "OBJECT",
- "name": "IssuePermissions",
+ "name": "MergeRequestEdge",
"ofType": null
}
},
@@ -9741,17 +12877,17 @@
"deprecationReason": null
},
{
- "name": "webPath",
- "description": "Web path of the issue",
+ "name": "nodes",
+ "description": "A list of nodes.",
"args": [
],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "MergeRequest",
"ofType": null
}
},
@@ -9759,8 +12895,39 @@
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL of the issue",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "MergeRequestEdge",
+ "description": "An edge in a connection.",
+ "fields": [
+ {
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
@@ -9777,14 +12944,14 @@
"deprecationReason": null
},
{
- "name": "weight",
- "description": "Weight of the issue",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Int",
+ "kind": "OBJECT",
+ "name": "MergeRequest",
"ofType": null
},
"isDeprecated": false,
@@ -9793,23 +12960,19 @@
],
"inputFields": null,
"interfaces": [
- {
- "kind": "INTERFACE",
- "name": "Noteable",
- "ofType": null
- }
+
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "IssuePermissions",
- "description": "Check permissions for the current user on a issue",
+ "name": "MergeRequestPermissions",
+ "description": "Check permissions for the current user on a merge request",
"fields": [
{
- "name": "adminIssue",
- "description": "Indicates the user can perform `admin_issue` on this resource",
+ "name": "adminMergeRequest",
+ "description": "Indicates the user can perform `admin_merge_request` on this resource",
"args": [
],
@@ -9826,8 +12989,8 @@
"deprecationReason": null
},
{
- "name": "createDesign",
- "description": "Indicates the user can perform `create_design` on this resource",
+ "name": "cherryPickOnCurrentMergeRequest",
+ "description": "Indicates the user can perform `cherry_pick_on_current_merge_request` on this resource",
"args": [
],
@@ -9862,8 +13025,8 @@
"deprecationReason": null
},
{
- "name": "destroyDesign",
- "description": "Indicates the user can perform `destroy_design` on this resource",
+ "name": "pushToSourceBranch",
+ "description": "Indicates the user can perform `push_to_source_branch` on this resource",
"args": [
],
@@ -9880,8 +13043,8 @@
"deprecationReason": null
},
{
- "name": "readDesign",
- "description": "Indicates the user can perform `read_design` on this resource",
+ "name": "readMergeRequest",
+ "description": "Indicates the user can perform `read_merge_request` on this resource",
"args": [
],
@@ -9898,8 +13061,8 @@
"deprecationReason": null
},
{
- "name": "readIssue",
- "description": "Indicates the user can perform `read_issue` on this resource",
+ "name": "removeSourceBranch",
+ "description": "Indicates the user can perform `remove_source_branch` on this resource",
"args": [
],
@@ -9916,8 +13079,8 @@
"deprecationReason": null
},
{
- "name": "reopenIssue",
- "description": "Indicates the user can perform `reopen_issue` on this resource",
+ "name": "revertOnCurrentMergeRequest",
+ "description": "Indicates the user can perform `revert_on_current_merge_request` on this resource",
"args": [
],
@@ -9934,8 +13097,8 @@
"deprecationReason": null
},
{
- "name": "updateIssue",
- "description": "Indicates the user can perform `update_issue` on this resource",
+ "name": "updateMergeRequest",
+ "description": "Indicates the user can perform `update_merge_request` on this resource",
"args": [
],
@@ -9960,60 +13123,108 @@
"possibleTypes": null
},
{
- "kind": "ENUM",
- "name": "IssueState",
- "description": "State of a GitLab issue",
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetAssigneesInput",
+ "description": "Autogenerated input type of MergeRequestSetAssignees",
"fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
+ "inputFields": [
{
- "name": "opened",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "projectPath",
+ "description": "The project the merge request to mutate is in",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "closed",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "iid",
+ "description": "The iid of the merge request to mutate",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
},
{
- "name": "locked",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
+ "name": "assigneeUsernames",
+ "description": "The usernames to assign to the merge request. Replaces existing assignees by default.\n",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "operationMode",
+ "description": "The operation to perform. Defaults to REPLACE.\n",
+ "type": {
+ "kind": "ENUM",
+ "name": "MutationOperationMode",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
}
],
+ "interfaces": null,
+ "enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "TaskCompletionStatus",
- "description": "Completion status of tasks",
+ "name": "MergeRequestSetAssigneesPayload",
+ "description": "Autogenerated return type of MergeRequestSetAssignees",
"fields": [
{
- "name": "completedCount",
- "description": "Number of completed tasks",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "count",
- "description": "Number of total tasks",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -10021,13 +13232,35 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
+ },
+ {
+ "name": "mergeRequest",
+ "description": "The merge request after mutation",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MergeRequest",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
"inputFields": null,
@@ -10038,51 +13271,49 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "DesignCollection",
- "description": "A collection of designs.",
- "fields": [
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetLabelsInput",
+ "description": "Autogenerated input type of MergeRequestSetLabels",
+ "fields": null,
+ "inputFields": [
{
- "name": "design",
- "description": "Find a specific design",
- "args": [
- {
- "name": "id",
- "description": "Find a design by its ID",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "filename",
- "description": "Find a design by its filename",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "name": "projectPath",
+ "description": "The project the merge request to mutate is in",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
}
- ],
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iid",
+ "description": "The iid of the merge request to mutate",
"type": {
- "kind": "OBJECT",
- "name": "Design",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "designAtVersion",
- "description": "Find a design as of a version",
- "args": [
- {
- "name": "id",
- "description": "The Global ID of the design at this version",
- "type": {
+ "name": "labelIds",
+ "description": "The Label IDs to set. Replaces existing labels by default.\n",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
@@ -10090,124 +13321,58 @@
"name": "ID",
"ofType": null
}
- },
- "defaultValue": null
+ }
}
- ],
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "operationMode",
+ "description": "Changes the operation mode. Defaults to REPLACE.\n",
"type": {
- "kind": "OBJECT",
- "name": "DesignAtVersion",
+ "kind": "ENUM",
+ "name": "MutationOperationMode",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "designs",
- "description": "All designs for the design collection",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "MergeRequestSetLabelsPayload",
+ "description": "Autogenerated return type of MergeRequestSetLabels",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
- {
- "name": "ids",
- "description": "Filters designs by their ID",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "filenames",
- "description": "Filters designs by their filename",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "atVersion",
- "description": "Filters designs to only those that existed at the version. If argument is omitted or nil then all designs will reflect the latest version",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "DesignConnection",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "issue",
- "description": "Issue associated with the design collection",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -10215,211 +13380,129 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Issue",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "project",
- "description": "Project associated with the design collection",
+ "name": "mergeRequest",
+ "description": "The merge request after mutation",
"args": [
],
"type": {
+ "kind": "OBJECT",
+ "name": "MergeRequest",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetLockedInput",
+ "description": "Autogenerated input type of MergeRequestSetLocked",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "projectPath",
+ "description": "The project the merge request to mutate is in",
+ "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Project",
+ "kind": "SCALAR",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "version",
- "description": "A specific version",
- "args": [
- {
- "name": "sha",
- "description": "The SHA256 of a specific version",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "id",
- "description": "The Global ID of the version",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
+ "name": "iid",
+ "description": "The iid of the merge request to mutate",
"type": {
- "kind": "OBJECT",
- "name": "DesignVersion",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "versions",
- "description": "All versions related to all designs, ordered newest first",
- "args": [
- {
- "name": "earlierOrEqualToSha",
- "description": "The SHA256 of the most recent acceptable version",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "earlierOrEqualToId",
- "description": "The Global ID of the most recent acceptable version",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
+ "name": "locked",
+ "description": "Whether or not to lock the merge request.\n",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "DesignVersionConnection",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
}
],
- "inputFields": null,
- "interfaces": [
-
- ],
+ "interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "Issue",
- "description": null,
+ "name": "MergeRequestSetLockedPayload",
+ "description": "Autogenerated return type of MergeRequestSetLocked",
"fields": [
{
- "name": "assignees",
- "description": "Assignees of the issue",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "UserConnection",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "author",
- "description": "User that created the issue",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -10427,81 +13510,111 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "User",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "closedAt",
- "description": "Timestamp of when the issue was closed",
+ "name": "mergeRequest",
+ "description": "The merge request after mutation",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Time",
+ "kind": "OBJECT",
+ "name": "MergeRequest",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
- {
- "name": "confidential",
- "description": "Indicates the issue is confidential",
- "args": [
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
- ],
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetMilestoneInput",
+ "description": "Autogenerated input type of MergeRequestSetMilestone",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "projectPath",
+ "description": "The project the merge request to mutate is in",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "createdAt",
- "description": "Timestamp of when the issue was created",
- "args": [
-
- ],
+ "name": "iid",
+ "description": "The iid of the merge request to mutate",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "String",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "description",
- "description": "Description of the issue",
- "args": [
-
- ],
+ "name": "milestoneId",
+ "description": "The milestone to assign to the merge request.\n",
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "ID",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "descriptionHtml",
- "description": "The GitLab Flavored Markdown rendering of `description`",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "MergeRequestSetMilestonePayload",
+ "description": "Autogenerated return type of MergeRequestSetMilestone",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
@@ -10514,157 +13627,138 @@
"deprecationReason": null
},
{
- "name": "designCollection",
- "description": "Collection of design images associated with this issue",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "DesignCollection",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "designs",
- "description": "Deprecated. Use `designCollection`",
+ "name": "mergeRequest",
+ "description": "The merge request after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "DesignCollection",
+ "name": "MergeRequest",
"ofType": null
},
- "isDeprecated": true,
- "deprecationReason": "Use designCollection"
- },
- {
- "name": "discussionLocked",
- "description": "Indicates discussion is locked on the issue",
- "args": [
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
- ],
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetSubscriptionInput",
+ "description": "Autogenerated input type of MergeRequestSetSubscription",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "projectPath",
+ "description": "The project the merge request to mutate is in",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "discussions",
- "description": "All discussions on this noteable",
- "args": [
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
+ "name": "iid",
+ "description": "The iid of the merge request to mutate",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "DiscussionConnection",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "downvotes",
- "description": "Number of downvotes the issue has received",
- "args": [
-
- ],
+ "name": "subscribedState",
+ "description": "The desired state of the subscription",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "Boolean",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "dueDate",
- "description": "Due date of the issue",
- "args": [
-
- ],
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
- },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "MergeRequestSetSubscriptionPayload",
+ "description": "Autogenerated return type of MergeRequestSetSubscription",
+ "fields": [
{
- "name": "epic",
- "description": "Epic to which this issue belongs",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Epic",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "iid",
- "description": "Internal ID of the issue",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -10672,235 +13766,129 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "labels",
- "description": "Labels of the issue",
- "args": [
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
- ],
- "type": {
- "kind": "OBJECT",
- "name": "LabelConnection",
- "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "milestone",
- "description": "Milestone of the issue",
+ "name": "mergeRequest",
+ "description": "The merge request after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Milestone",
+ "name": "MergeRequest",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetWipInput",
+ "description": "Autogenerated input type of MergeRequestSetWip",
+ "fields": null,
+ "inputFields": [
{
- "name": "notes",
- "description": "All notes on this noteable",
- "args": [
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
+ "name": "projectPath",
+ "description": "The project the merge request to mutate is in",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "NoteConnection",
+ "kind": "SCALAR",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "participants",
- "description": "List of participants in the issue",
- "args": [
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
+ "name": "iid",
+ "description": "The iid of the merge request to mutate",
"type": {
- "kind": "OBJECT",
- "name": "UserConnection",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "reference",
- "description": "Internal reference of the issue. Returned in shortened format by default",
- "args": [
- {
- "name": "full",
- "description": "Boolean option specifying whether the reference should be returned in full",
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "defaultValue": "false"
- }
- ],
+ "name": "wip",
+ "description": "Whether or not to set the merge request as a WIP.\n",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "relativePosition",
- "description": "Relative position of the issue (used for positioning in epic tree and issue boards)",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "MergeRequestSetWipPayload",
+ "description": "Autogenerated return type of MergeRequestSetWip",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "state",
- "description": "State of the issue",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -10908,35 +13896,87 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "IssueState",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "subscribed",
- "description": "Indicates the currently logged in user is subscribed to the issue",
+ "name": "mergeRequest",
+ "description": "The merge request after mutation",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "MergeRequest",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "MergeRequestState",
+ "description": "State of a GitLab merge request",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "opened",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "taskCompletionStatus",
- "description": "Task completion status of the issue",
+ "name": "closed",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "locked",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "merged",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Metadata",
+ "description": null,
+ "fields": [
+ {
+ "name": "revision",
+ "description": "Revision",
"args": [
],
@@ -10944,8 +13984,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "TaskCompletionStatus",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
@@ -10953,8 +13993,8 @@
"deprecationReason": null
},
{
- "name": "timeEstimate",
- "description": "Time estimate of the issue",
+ "name": "version",
+ "description": "Version",
"args": [
],
@@ -10963,16 +14003,29 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Milestone",
+ "description": "Represents a milestone.",
+ "fields": [
{
- "name": "title",
- "description": "Title of the issue",
+ "name": "createdAt",
+ "description": "Timestamp of milestone creation",
"args": [
],
@@ -10981,7 +14034,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Time",
"ofType": null
}
},
@@ -10989,8 +14042,8 @@
"deprecationReason": null
},
{
- "name": "titleHtml",
- "description": "The GitLab Flavored Markdown rendering of `title`",
+ "name": "description",
+ "description": "Description of the milestone",
"args": [
],
@@ -11003,26 +14056,22 @@
"deprecationReason": null
},
{
- "name": "totalTimeSpent",
- "description": "Total time reported as spent on the issue",
+ "name": "dueDate",
+ "description": "Timestamp of the milestone due date",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "updatedAt",
- "description": "Timestamp of when the issue was last updated",
+ "name": "id",
+ "description": "ID of the milestone",
"args": [
],
@@ -11031,7 +14080,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "ID",
"ofType": null
}
},
@@ -11039,26 +14088,22 @@
"deprecationReason": null
},
{
- "name": "upvotes",
- "description": "Number of upvotes the issue has received",
+ "name": "startDate",
+ "description": "Timestamp of the milestone start date",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "userNotesCount",
- "description": "Number of user notes of the issue",
+ "name": "state",
+ "description": "State of the milestone",
"args": [
],
@@ -11066,8 +14111,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Int",
+ "kind": "ENUM",
+ "name": "MilestoneStateEnum",
"ofType": null
}
},
@@ -11075,8 +14120,8 @@
"deprecationReason": null
},
{
- "name": "userPermissions",
- "description": "Permissions for the current user on the resource",
+ "name": "title",
+ "description": "Title of the milestone",
"args": [
],
@@ -11084,8 +14129,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "IssuePermissions",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
@@ -11093,8 +14138,8 @@
"deprecationReason": null
},
{
- "name": "webPath",
- "description": "Web path of the issue",
+ "name": "updatedAt",
+ "description": "Timestamp of last milestone update",
"args": [
],
@@ -11103,7 +14148,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Time",
"ofType": null
}
},
@@ -11111,8 +14156,8 @@
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL of the issue",
+ "name": "webPath",
+ "description": "Web path of the milestone",
"args": [
],
@@ -11127,66 +14172,19 @@
},
"isDeprecated": false,
"deprecationReason": null
- },
- {
- "name": "weight",
- "description": "Weight of the issue",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
- {
- "kind": "INTERFACE",
- "name": "Noteable",
- "ofType": null
- }
+
],
"enumValues": null,
"possibleTypes": null
},
{
- "kind": "ENUM",
- "name": "HealthStatus",
- "description": "Health status of an issue or epic",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "onTrack",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "needsAttention",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "atRisk",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "possibleTypes": null
- },
- {
"kind": "OBJECT",
- "name": "DesignConnection",
- "description": "The connection type for Design.",
+ "name": "MilestoneConnection",
+ "description": "The connection type for Milestone.",
"fields": [
{
"name": "edges",
@@ -11199,7 +14197,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "DesignEdge",
+ "name": "MilestoneEdge",
"ofType": null
}
},
@@ -11217,7 +14215,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "Design",
+ "name": "Milestone",
"ofType": null
}
},
@@ -11252,7 +14250,7 @@
},
{
"kind": "OBJECT",
- "name": "DesignEdge",
+ "name": "MilestoneEdge",
"description": "An edge in a connection.",
"fields": [
{
@@ -11281,7 +14279,7 @@
],
"type": {
"kind": "OBJECT",
- "name": "Design",
+ "name": "Milestone",
"ofType": null
},
"isDeprecated": false,
@@ -11296,244 +14294,1110 @@
"possibleTypes": null
},
{
+ "kind": "ENUM",
+ "name": "MilestoneStateEnum",
+ "description": null,
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "active",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "closed",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "MoveType",
+ "description": "The position to which the adjacent object should be moved",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "before",
+ "description": "The adjacent object will be moved before the object that is being moved",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "after",
+ "description": "The adjacent object will be moved after the object that is being moved",
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
"kind": "OBJECT",
- "name": "Design",
- "description": "A single design",
+ "name": "Mutation",
+ "description": null,
"fields": [
{
- "name": "diffRefs",
- "description": "The diff refs for this design",
+ "name": "addAwardEmoji",
+ "description": null,
"args": [
-
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "AddAwardEmojiInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "DiffRefs",
- "ofType": null
+ "kind": "OBJECT",
+ "name": "AddAwardEmojiPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "adminSidekiqQueuesDeleteJobs",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "AdminSidekiqQueuesDeleteJobsInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "AdminSidekiqQueuesDeleteJobsPayload",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "discussions",
- "description": "All discussions on this noteable",
+ "name": "createDiffNote",
+ "description": null,
"args": [
{
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
+ "name": "input",
+ "description": null,
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateDiffNoteInput",
+ "ofType": null
+ }
},
"defaultValue": null
- },
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "CreateDiffNotePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "createEpic",
+ "description": null,
+ "args": [
{
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
+ "name": "input",
+ "description": null,
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateEpicInput",
+ "ofType": null
+ }
},
"defaultValue": null
- },
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "CreateEpicPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "createImageDiffNote",
+ "description": null,
+ "args": [
{
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
+ "name": "input",
+ "description": null,
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateImageDiffNoteInput",
+ "ofType": null
+ }
},
"defaultValue": null
- },
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "CreateImageDiffNotePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "createNote",
+ "description": null,
+ "args": [
{
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
+ "name": "input",
+ "description": null,
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateNoteInput",
+ "ofType": null
+ }
},
"defaultValue": null
}
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "DiscussionConnection",
- "ofType": null
+ "kind": "OBJECT",
+ "name": "CreateNotePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "createRequirement",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateRequirementInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "CreateRequirementPayload",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "event",
- "description": "How this design was changed in the current version",
+ "name": "createSnippet",
+ "description": null,
"args": [
-
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "CreateSnippetInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "ENUM",
- "name": "DesignVersionEvent",
- "ofType": null
+ "kind": "OBJECT",
+ "name": "CreateSnippetPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "designManagementDelete",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "DesignManagementDeleteInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "DesignManagementDeletePayload",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "filename",
- "description": "The filename of the design",
+ "name": "designManagementUpload",
+ "description": null,
"args": [
-
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "DesignManagementUploadInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "OBJECT",
+ "name": "DesignManagementUploadPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "destroyNote",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "DestroyNoteInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "DestroyNotePayload",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "fullPath",
- "description": "The full path to the design file",
+ "name": "destroySnippet",
+ "description": null,
"args": [
-
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "DestroySnippetInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "OBJECT",
+ "name": "DestroySnippetPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "epicAddIssue",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "EpicAddIssueInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "EpicAddIssuePayload",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "id",
- "description": "The ID of this design",
+ "name": "epicSetSubscription",
+ "description": null,
"args": [
-
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "EpicSetSubscriptionInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
+ "kind": "OBJECT",
+ "name": "EpicSetSubscriptionPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "epicTreeReorder",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "EpicTreeReorderInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "EpicTreeReorderPayload",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "image",
- "description": "The URL of the image",
+ "name": "issueSetConfidential",
+ "description": null,
"args": [
-
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "IssueSetConfidentialInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "OBJECT",
+ "name": "IssueSetConfidentialPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "issueSetDueDate",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "IssueSetDueDateInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "IssueSetDueDatePayload",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "issue",
- "description": "The issue the design belongs to",
+ "name": "issueSetWeight",
+ "description": null,
"args": [
-
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "IssueSetWeightInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "Issue",
- "ofType": null
+ "kind": "OBJECT",
+ "name": "IssueSetWeightPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "markAsSpamSnippet",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "MarkAsSpamSnippetInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MarkAsSpamSnippetPayload",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "notes",
- "description": "All notes on this noteable",
+ "name": "mergeRequestSetAssignees",
+ "description": null,
"args": [
{
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
+ "name": "input",
+ "description": null,
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetAssigneesInput",
+ "ofType": null
+ }
},
"defaultValue": null
- },
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MergeRequestSetAssigneesPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeRequestSetLabels",
+ "description": null,
+ "args": [
{
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
+ "name": "input",
+ "description": null,
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetLabelsInput",
+ "ofType": null
+ }
},
"defaultValue": null
- },
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MergeRequestSetLabelsPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeRequestSetLocked",
+ "description": null,
+ "args": [
{
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
+ "name": "input",
+ "description": null,
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetLockedInput",
+ "ofType": null
+ }
},
"defaultValue": null
- },
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MergeRequestSetLockedPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeRequestSetMilestone",
+ "description": null,
+ "args": [
{
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
+ "name": "input",
+ "description": null,
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetMilestoneInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MergeRequestSetMilestonePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeRequestSetSubscription",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetSubscriptionInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MergeRequestSetSubscriptionPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeRequestSetWip",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "MergeRequestSetWipInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MergeRequestSetWipPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "removeAwardEmoji",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "RemoveAwardEmojiInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "RemoveAwardEmojiPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "todoMarkDone",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "TodoMarkDoneInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "TodoMarkDonePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "todoRestore",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "TodoRestoreInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "TodoRestorePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "todoRestoreMany",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "TodoRestoreManyInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "TodoRestoreManyPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "todosMarkAllDone",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "TodosMarkAllDoneInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "TodosMarkAllDonePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "toggleAwardEmoji",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "ToggleAwardEmojiInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "ToggleAwardEmojiPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "updateEpic",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "UpdateEpicInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "UpdateEpicPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "updateImageDiffNote",
+ "description": "Updates a DiffNote on an image (a `Note` where the `position.positionType` is `\"image\"`). If the body of the Note contains only quick actions, the Note will be destroyed during the update, and no Note will be returned",
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "UpdateImageDiffNoteInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "UpdateImageDiffNotePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "updateIssue",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "UpdateIssueInput",
+ "ofType": null
+ }
},
"defaultValue": null
}
],
"type": {
+ "kind": "OBJECT",
+ "name": "UpdateIssuePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "updateNote",
+ "description": "Updates a Note. If the body of the Note contains only quick actions, the Note will be destroyed during the update, and no Note will be returned",
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "UpdateNoteInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "UpdateNotePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "updateSnippet",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "UpdateSnippetInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "UpdateSnippetPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "MutationOperationMode",
+ "description": "Different toggles for changing mutator behavior.",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "REPLACE",
+ "description": "Performs a replace operation",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "APPEND",
+ "description": "Performs an append operation",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "REMOVE",
+ "description": "Performs a removal operation",
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Namespace",
+ "description": null,
+ "fields": [
+ {
+ "name": "description",
+ "description": "Description of the namespace",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "descriptionHtml",
+ "description": "The GitLab Flavored Markdown rendering of `description`",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "fullName",
+ "description": "Full name of the namespace",
+ "args": [
+
+ ],
+ "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "NoteConnection",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
@@ -11541,8 +15405,8 @@
"deprecationReason": null
},
{
- "name": "notesCount",
- "description": "The total count of user-created notes for this design",
+ "name": "fullPath",
+ "description": "Full path of the namespace",
"args": [
],
@@ -11551,7 +15415,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "ID",
"ofType": null
}
},
@@ -11559,8 +15423,8 @@
"deprecationReason": null
},
{
- "name": "project",
- "description": "The project the design belongs to",
+ "name": "id",
+ "description": "ID of the namespace",
"args": [
],
@@ -11568,8 +15432,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Project",
+ "kind": "SCALAR",
+ "name": "ID",
"ofType": null
}
},
@@ -11577,28 +15441,68 @@
"deprecationReason": null
},
{
- "name": "versions",
- "description": "All versions related to this design ordered newest first",
+ "name": "lfsEnabled",
+ "description": "Indicates if Large File Storage (LFS) is enabled for namespace",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "name",
+ "description": "Name of the namespace",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "path",
+ "description": "Path of the namespace",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "projects",
+ "description": "Projects within this namespace",
"args": [
{
- "name": "earlierOrEqualToSha",
- "description": "The SHA256 of the most recent acceptable version",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "earlierOrEqualToId",
- "description": "The Global ID of the most recent acceptable version",
+ "name": "includeSubgroups",
+ "description": "Include also subgroup projects",
"type": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "Boolean",
"ofType": null
},
- "defaultValue": null
+ "defaultValue": "false"
},
{
"name": "after",
@@ -11646,38 +15550,71 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "DesignVersionConnection",
+ "name": "ProjectConnection",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
+ },
{
- "kind": "INTERFACE",
- "name": "Noteable",
- "ofType": null
+ "name": "requestAccessEnabled",
+ "description": "Indicates if users can request access to namespace",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "kind": "INTERFACE",
- "name": "DesignFields",
- "ofType": null
+ "name": "rootStorageStatistics",
+ "description": "Aggregated storage statistics of the namespace. Only available for root namespaces",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "RootStorageStatistics",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "visibility",
+ "description": "Visibility of the namespace",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
"enumValues": null,
"possibleTypes": null
},
{
- "kind": "INTERFACE",
- "name": "DesignFields",
+ "kind": "OBJECT",
+ "name": "Note",
"description": null,
"fields": [
{
- "name": "diffRefs",
- "description": "The diff refs for this design",
+ "name": "author",
+ "description": "User who wrote this note",
"args": [
],
@@ -11686,7 +15623,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "DiffRefs",
+ "name": "User",
"ofType": null
}
},
@@ -11694,8 +15631,8 @@
"deprecationReason": null
},
{
- "name": "event",
- "description": "How this design was changed in the current version",
+ "name": "body",
+ "description": "Content of the note",
"args": [
],
@@ -11703,8 +15640,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "DesignVersionEvent",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
@@ -11712,26 +15649,22 @@
"deprecationReason": null
},
{
- "name": "filename",
- "description": "The filename of the design",
+ "name": "bodyHtml",
+ "description": "The GitLab Flavored Markdown rendering of `note`",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "fullPath",
- "description": "The full path to the design file",
+ "name": "createdAt",
+ "description": "Timestamp of the note creation",
"args": [
],
@@ -11740,7 +15673,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Time",
"ofType": null
}
},
@@ -11748,8 +15681,22 @@
"deprecationReason": null
},
{
+ "name": "discussion",
+ "description": "The discussion this note is a part of",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Discussion",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "id",
- "description": "The ID of this design",
+ "description": "ID of the note",
"args": [
],
@@ -11766,8 +15713,36 @@
"deprecationReason": null
},
{
- "name": "image",
- "description": "The URL of the image",
+ "name": "position",
+ "description": "The position of this note on a diff",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "DiffPosition",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "project",
+ "description": "Project associated with the note",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Project",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "resolvable",
+ "description": "Indicates if this note can be resolved. That is, if it is a resolvable discussion or simply a standalone note",
"args": [
],
@@ -11776,7 +15751,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -11784,8 +15759,36 @@
"deprecationReason": null
},
{
- "name": "issue",
- "description": "The issue the design belongs to",
+ "name": "resolvedAt",
+ "description": "Timestamp of the note's resolution",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "resolvedBy",
+ "description": "User that resolved the discussion",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "User",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "system",
+ "description": "Indicates whether this note was created by the system or by a user",
"args": [
],
@@ -11793,8 +15796,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Issue",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -11802,8 +15805,8 @@
"deprecationReason": null
},
{
- "name": "notesCount",
- "description": "The total count of user-created notes for this design",
+ "name": "updatedAt",
+ "description": "Timestamp of the note's last activity",
"args": [
],
@@ -11812,7 +15815,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "Time",
"ofType": null
}
},
@@ -11820,8 +15823,8 @@
"deprecationReason": null
},
{
- "name": "project",
- "description": "The project the design belongs to",
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
"args": [
],
@@ -11830,7 +15833,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "Project",
+ "name": "NotePermissions",
"ofType": null
}
},
@@ -11839,60 +15842,16 @@
}
],
"inputFields": null,
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": [
- {
- "kind": "OBJECT",
- "name": "Design",
- "ofType": null
- },
- {
- "kind": "OBJECT",
- "name": "DesignAtVersion",
- "ofType": null
- }
- ]
- },
- {
- "kind": "ENUM",
- "name": "DesignVersionEvent",
- "description": "Mutation event of a design within a version",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "NONE",
- "description": "No change",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "CREATION",
- "description": "A creation event",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "MODIFICATION",
- "description": "A modification event",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "DELETION",
- "description": "A deletion event",
- "isDeprecated": false,
- "deprecationReason": null
- }
+ "interfaces": [
+
],
+ "enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "DesignVersionConnection",
- "description": "The connection type for DesignVersion.",
+ "name": "NoteConnection",
+ "description": "The connection type for Note.",
"fields": [
{
"name": "edges",
@@ -11905,7 +15864,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "DesignVersionEdge",
+ "name": "NoteEdge",
"ofType": null
}
},
@@ -11923,7 +15882,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "DesignVersion",
+ "name": "Note",
"ofType": null
}
},
@@ -11958,7 +15917,7 @@
},
{
"kind": "OBJECT",
- "name": "DesignVersionEdge",
+ "name": "NoteEdge",
"description": "An edge in a connection.",
"fields": [
{
@@ -11987,7 +15946,7 @@
],
"type": {
"kind": "OBJECT",
- "name": "DesignVersion",
+ "name": "Note",
"ofType": null
},
"isDeprecated": false,
@@ -12003,50 +15962,39 @@
},
{
"kind": "OBJECT",
- "name": "DesignVersion",
- "description": "A specific version in which designs were added, modified or deleted",
+ "name": "NotePermissions",
+ "description": null,
"fields": [
{
- "name": "designAtVersion",
- "description": "A particular design as of this version, provided it is visible at this version",
+ "name": "adminNote",
+ "description": "Indicates the user can perform `admin_note` on this resource",
"args": [
- {
- "name": "id",
- "description": "The ID of the DesignAtVersion",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "designId",
- "description": "The ID of a specific design",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "filename",
- "description": "The filename of a specific design",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
}
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "awardEmoji",
+ "description": "Indicates the user can perform `award_emoji` on this resource",
+ "args": [
+
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "DesignAtVersion",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -12054,8 +16002,75 @@
"deprecationReason": null
},
{
- "name": "designs",
- "description": "All designs that were changed in the version",
+ "name": "createNote",
+ "description": "Indicates the user can perform `create_note` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "readNote",
+ "description": "Indicates the user can perform `read_note` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "resolveNote",
+ "description": "Indicates the user can perform `resolve_note` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INTERFACE",
+ "name": "Noteable",
+ "description": null,
+ "fields": [
+ {
+ "name": "discussions",
+ "description": "All discussions on this noteable",
"args": [
{
"name": "after",
@@ -12103,7 +16118,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "DesignConnection",
+ "name": "DiscussionConnection",
"ofType": null
}
},
@@ -12111,46 +16126,10 @@
"deprecationReason": null
},
{
- "name": "designsAtVersion",
- "description": "All designs that are visible at this version, as of this version",
+ "name": "notes",
+ "description": "All notes on this noteable",
"args": [
{
- "name": "ids",
- "description": "Filters designs by their ID",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
- "name": "filenames",
- "description": "Filters designs by their filename",
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- },
- "defaultValue": null
- },
- {
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
@@ -12196,34 +16175,72 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "DesignAtVersionConnection",
+ "name": "NoteConnection",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": [
+ {
+ "kind": "OBJECT",
+ "name": "Design",
+ "ofType": null
},
{
- "name": "id",
- "description": "ID of the design version",
+ "kind": "OBJECT",
+ "name": "Epic",
+ "ofType": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "EpicIssue",
+ "ofType": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Issue",
+ "ofType": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "MergeRequest",
+ "ofType": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Snippet",
+ "ofType": null
+ }
+ ]
+ },
+ {
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "description": "Information about pagination in a connection.",
+ "fields": [
+ {
+ "name": "endCursor",
+ "description": "When paginating forwards, the cursor to continue.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "sha",
- "description": "SHA of the design version",
+ "name": "hasNextPage",
+ "description": "When paginating forwards, are there more items?",
"args": [
],
@@ -12232,38 +16249,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "DesignAtVersionConnection",
- "description": "The connection type for DesignAtVersion.",
- "fields": [
- {
- "name": "edges",
- "description": "A list of edges.",
- "args": [
-
- ],
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "DesignAtVersionEdge",
+ "name": "Boolean",
"ofType": null
}
},
@@ -12271,17 +16257,17 @@
"deprecationReason": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
+ "name": "hasPreviousPage",
+ "description": "When paginating backwards, are there more items?",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "DesignAtVersion",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -12289,19 +16275,15 @@
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "startCursor",
+ "description": "When paginating backwards, the cursor to continue.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
@@ -12316,75 +16298,54 @@
},
{
"kind": "OBJECT",
- "name": "DesignAtVersionEdge",
- "description": "An edge in a connection.",
+ "name": "Pipeline",
+ "description": null,
"fields": [
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "beforeSha",
+ "description": "Base SHA of the source branch",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
+ "name": "committedAt",
+ "description": "Timestamp of the pipeline's commit",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "DesignAtVersion",
+ "kind": "SCALAR",
+ "name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "DesignAtVersion",
- "description": "A design pinned to a specific version. The image field reflects the design as of the associated version.",
- "fields": [
+ },
{
- "name": "design",
- "description": "The underlying design.",
+ "name": "coverage",
+ "description": "Coverage percentage",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "Design",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Float",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "diffRefs",
- "description": "The diff refs for this design",
+ "name": "createdAt",
+ "description": "Timestamp of the pipeline's creation",
"args": [
],
@@ -12392,8 +16353,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "DiffRefs",
+ "kind": "SCALAR",
+ "name": "Time",
"ofType": null
}
},
@@ -12401,8 +16362,8 @@
"deprecationReason": null
},
{
- "name": "event",
- "description": "How this design was changed in the current version",
+ "name": "detailedStatus",
+ "description": "Detailed status of the pipeline",
"args": [
],
@@ -12410,8 +16371,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "DesignVersionEvent",
+ "kind": "OBJECT",
+ "name": "DetailedStatus",
"ofType": null
}
},
@@ -12419,44 +16380,36 @@
"deprecationReason": null
},
{
- "name": "filename",
- "description": "The filename of the design",
+ "name": "duration",
+ "description": "Duration of the pipeline in seconds",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "fullPath",
- "description": "The full path to the design file",
+ "name": "finishedAt",
+ "description": "Timestamp of the pipeline's completion",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "id",
- "description": "The ID of this design",
+ "description": "ID of the pipeline",
"args": [
],
@@ -12473,8 +16426,8 @@
"deprecationReason": null
},
{
- "name": "image",
- "description": "The URL of the image",
+ "name": "iid",
+ "description": "Internal ID of the pipeline",
"args": [
],
@@ -12491,8 +16444,8 @@
"deprecationReason": null
},
{
- "name": "issue",
- "description": "The issue the design belongs to",
+ "name": "sha",
+ "description": "SHA of the pipeline's commit",
"args": [
],
@@ -12500,8 +16453,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Issue",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
@@ -12509,26 +16462,22 @@
"deprecationReason": null
},
{
- "name": "notesCount",
- "description": "The total count of user-created notes for this design",
+ "name": "startedAt",
+ "description": "Timestamp when the pipeline was started",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "project",
- "description": "The project the design belongs to",
+ "name": "status",
+ "description": "Status of the pipeline (CREATED, WAITING_FOR_RESOURCE, PREPARING, PENDING, RUNNING, FAILED, SUCCESS, CANCELED, SKIPPED, MANUAL, SCHEDULED)",
"args": [
],
@@ -12536,8 +16485,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Project",
+ "kind": "ENUM",
+ "name": "PipelineStatusEnum",
"ofType": null
}
},
@@ -12545,8 +16494,8 @@
"deprecationReason": null
},
{
- "name": "version",
- "description": "The version this design-at-versions is pinned to",
+ "name": "updatedAt",
+ "description": "Timestamp of the pipeline's last activity",
"args": [
],
@@ -12554,83 +16503,28 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "DesignVersion",
+ "kind": "SCALAR",
+ "name": "Time",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
- {
- "kind": "INTERFACE",
- "name": "DesignFields",
- "ofType": null
- }
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "EpicDescendantCount",
- "description": "Counts of descendent epics.",
- "fields": [
- {
- "name": "closedEpics",
- "description": "Number of closed sub-epics",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "closedIssues",
- "description": "Number of closed epic issues",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
},
{
- "name": "openedEpics",
- "description": "Number of opened sub-epics",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "openedIssues",
- "description": "Number of opened epic issues",
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PipelinePermissions",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
@@ -12645,8 +16539,8 @@
},
{
"kind": "OBJECT",
- "name": "TimelogConnection",
- "description": "The connection type for Timelog.",
+ "name": "PipelineConnection",
+ "description": "The connection type for Pipeline.",
"fields": [
{
"name": "edges",
@@ -12659,7 +16553,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "TimelogEdge",
+ "name": "PipelineEdge",
"ofType": null
}
},
@@ -12677,7 +16571,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "Timelog",
+ "name": "Pipeline",
"ofType": null
}
},
@@ -12712,7 +16606,7 @@
},
{
"kind": "OBJECT",
- "name": "TimelogEdge",
+ "name": "PipelineEdge",
"description": "An edge in a connection.",
"fields": [
{
@@ -12741,7 +16635,7 @@
],
"type": {
"kind": "OBJECT",
- "name": "Timelog",
+ "name": "Pipeline",
"ofType": null
},
"isDeprecated": false,
@@ -12757,12 +16651,12 @@
},
{
"kind": "OBJECT",
- "name": "Timelog",
+ "name": "PipelinePermissions",
"description": null,
"fields": [
{
- "name": "date",
- "description": "The date when the time tracked was spent at",
+ "name": "adminPipeline",
+ "description": "Indicates the user can perform `admin_pipeline` on this resource",
"args": [
],
@@ -12771,7 +16665,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "Boolean",
"ofType": null
}
},
@@ -12779,22 +16673,8 @@
"deprecationReason": null
},
{
- "name": "issue",
- "description": "The issue that logged time was added to",
- "args": [
-
- ],
- "type": {
- "kind": "OBJECT",
- "name": "Issue",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "timeSpent",
- "description": "The time spent displayed in seconds",
+ "name": "destroyPipeline",
+ "description": "Indicates the user can perform `destroy_pipeline` on this resource",
"args": [
],
@@ -12803,7 +16683,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "Boolean",
"ofType": null
}
},
@@ -12811,8 +16691,8 @@
"deprecationReason": null
},
{
- "name": "user",
- "description": "The user that logged the time",
+ "name": "updatePipeline",
+ "description": "Indicates the user can perform `update_pipeline` on this resource",
"args": [
],
@@ -12820,8 +16700,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "User",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -12837,184 +16717,118 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "ProjectStatistics",
+ "kind": "ENUM",
+ "name": "PipelineStatusEnum",
"description": null,
- "fields": [
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
{
- "name": "buildArtifactsSize",
- "description": "Build artifacts size of the project",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
- },
+ "name": "CREATED",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "commitCount",
- "description": "Commit count of the project",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
- },
+ "name": "WAITING_FOR_RESOURCE",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "lfsObjectsSize",
- "description": "Large File Storage (LFS) object size of the project",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
- },
+ "name": "PREPARING",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "packagesSize",
- "description": "Packages size of the project",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
- },
+ "name": "PENDING",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "repositorySize",
- "description": "Repository size of the project",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
- },
+ "name": "RUNNING",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "storageSize",
- "description": "Storage size of the project",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
- },
+ "name": "FAILED",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "wikiSize",
- "description": "Wiki size of the project",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
+ "name": "SUCCESS",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "CANCELED",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "SKIPPED",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "MANUAL",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "SCHEDULED",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
}
],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "Repository",
+ "name": "Project",
"description": null,
"fields": [
{
- "name": "empty",
- "description": "Indicates repository has no visible content",
+ "name": "archived",
+ "description": "Indicates the archived status of the project",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "exists",
- "description": "Indicates a corresponding Git repository exists on disk",
+ "name": "autocloseReferencedIssues",
+ "description": "Indicates if issues referenced by merge requests and commits within the default branch are closed automatically",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "rootRef",
- "description": "Default branch of the repository",
+ "name": "avatarUrl",
+ "description": "URL to avatar image file of the project",
"args": [
],
@@ -13027,66 +16841,43 @@
"deprecationReason": null
},
{
- "name": "tree",
- "description": "Tree of the repository",
+ "name": "board",
+ "description": "A single board of the project",
"args": [
{
- "name": "path",
- "description": "The path to get the tree for. Default value is the root of the repository",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": "\"\""
- },
- {
- "name": "ref",
- "description": "The commit ref to get the tree for. Default value is HEAD",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": "\"head\""
- },
- {
- "name": "recursive",
- "description": "Used to get a recursive tree. Default is false",
+ "name": "id",
+ "description": "Find a board by its ID",
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
},
- "defaultValue": "false"
+ "defaultValue": null
}
],
"type": {
"kind": "OBJECT",
- "name": "Tree",
+ "name": "Board",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "Tree",
- "description": null,
- "fields": [
+ },
{
- "name": "blobs",
- "description": "Blobs of the tree",
+ "name": "boards",
+ "description": "Boards of the project",
"args": [
{
+ "name": "id",
+ "description": "Find a board by its ID",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
@@ -13128,38 +16919,76 @@
}
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "BlobConnection",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "BoardConnection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "lastCommit",
- "description": "Last commit for the tree",
+ "name": "containerRegistryEnabled",
+ "description": "Indicates if the project stores Docker container images in a container registry",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Commit",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "submodules",
- "description": "Sub-modules of the tree",
+ "name": "createdAt",
+ "description": "Timestamp of the project creation",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "description",
+ "description": "Short description of the project",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "descriptionHtml",
+ "description": "The GitLab Flavored Markdown rendering of `description`",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "environments",
+ "description": "Environments of the project",
"args": [
{
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
+ "name": "name",
+ "description": "Name of the environment",
"type": {
"kind": "SCALAR",
"name": "String",
@@ -13168,8 +16997,8 @@
"defaultValue": null
},
{
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
+ "name": "search",
+ "description": "Search query",
"type": {
"kind": "SCALAR",
"name": "String",
@@ -13178,43 +17007,6 @@
"defaultValue": null
},
{
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "SubmoduleConnection",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "trees",
- "description": "Trees of the tree",
- "args": [
- {
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
@@ -13256,89 +17048,80 @@
}
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "TreeEntryConnection",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "EnvironmentConnection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "Commit",
- "description": null,
- "fields": [
+ },
{
- "name": "author",
- "description": "Author of the commit",
+ "name": "forksCount",
+ "description": "Number of times the project has been forked",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "User",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "authorGravatar",
- "description": "Commit authors gravatar",
+ "name": "fullPath",
+ "description": "Full path of the project",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "authorName",
- "description": "Commit authors name",
+ "name": "grafanaIntegration",
+ "description": "Grafana integration details for the project",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "GrafanaIntegration",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "authoredDate",
- "description": "Timestamp of when the commit was authored",
+ "name": "group",
+ "description": "Group of the project",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Time",
+ "kind": "OBJECT",
+ "name": "Group",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "description",
- "description": "Description of the commit message",
+ "name": "httpUrlToRepo",
+ "description": "URL to connect to the project via HTTPS",
"args": [
],
@@ -13352,7 +17135,7 @@
},
{
"name": "id",
- "description": "ID (global ID) of the commit",
+ "description": "ID of the project",
"args": [
],
@@ -13369,22 +17152,92 @@
"deprecationReason": null
},
{
- "name": "latestPipeline",
- "description": "Latest pipeline of the commit",
+ "name": "importStatus",
+ "description": "Status of project import background job of the project",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "issue",
+ "description": "A single issue of the project",
"args": [
{
- "name": "status",
- "description": "Filter pipelines by their status",
+ "name": "iid",
+ "description": "IID of the issue. For example, \"1\"",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iids",
+ "description": "List of IIDs of issues. For example, [1, 2]",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "state",
+ "description": "Current state of this issue",
"type": {
"kind": "ENUM",
- "name": "PipelineStatusEnum",
+ "name": "IssuableState",
"ofType": null
},
"defaultValue": null
},
{
- "name": "ref",
- "description": "Filter pipelines by the ref they are run for",
+ "name": "labelName",
+ "description": "Labels applied to this issue",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "milestoneTitle",
+ "description": "Milestones applied to this issue",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "assigneeUsername",
+ "description": "Username of a user assigned to the issues",
"type": {
"kind": "SCALAR",
"name": "String",
@@ -13393,55 +17246,177 @@
"defaultValue": null
},
{
- "name": "sha",
- "description": "Filter pipelines by the sha of the commit they are run for",
+ "name": "assigneeId",
+ "description": "ID of a user assigned to the issues, \"none\" and \"any\" values supported",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
+ },
+ {
+ "name": "createdBefore",
+ "description": "Issues created before this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "createdAfter",
+ "description": "Issues created after this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "updatedBefore",
+ "description": "Issues updated before this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "updatedAfter",
+ "description": "Issues updated after this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "closedBefore",
+ "description": "Issues closed before this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "closedAfter",
+ "description": "Issues closed after this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "search",
+ "description": "Search query for finding issues by title or description",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "sort",
+ "description": "Sort issues by this criteria",
+ "type": {
+ "kind": "ENUM",
+ "name": "IssueSort",
+ "ofType": null
+ },
+ "defaultValue": "created_desc"
}
],
"type": {
"kind": "OBJECT",
- "name": "Pipeline",
- "ofType": null
- },
- "isDeprecated": true,
- "deprecationReason": "Use pipelines"
- },
- {
- "name": "message",
- "description": "Raw commit message",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
+ "name": "Issue",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "pipelines",
- "description": "Pipelines of the commit ordered latest first",
+ "name": "issues",
+ "description": "Issues of the project",
"args": [
{
- "name": "status",
- "description": "Filter pipelines by their status",
+ "name": "iid",
+ "description": "IID of the issue. For example, \"1\"",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iids",
+ "description": "List of IIDs of issues. For example, [1, 2]",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "state",
+ "description": "Current state of this issue",
"type": {
"kind": "ENUM",
- "name": "PipelineStatusEnum",
+ "name": "IssuableState",
"ofType": null
},
"defaultValue": null
},
{
- "name": "ref",
- "description": "Filter pipelines by the ref they are run for",
+ "name": "labelName",
+ "description": "Labels applied to this issue",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "milestoneTitle",
+ "description": "Milestones applied to this issue",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "assigneeUsername",
+ "description": "Username of a user assigned to the issues",
"type": {
"kind": "SCALAR",
"name": "String",
@@ -13450,8 +17425,8 @@
"defaultValue": null
},
{
- "name": "sha",
- "description": "Filter pipelines by the sha of the commit they are run for",
+ "name": "assigneeId",
+ "description": "ID of a user assigned to the issues, \"none\" and \"any\" values supported",
"type": {
"kind": "SCALAR",
"name": "String",
@@ -13460,6 +17435,86 @@
"defaultValue": null
},
{
+ "name": "createdBefore",
+ "description": "Issues created before this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "createdAfter",
+ "description": "Issues created after this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "updatedBefore",
+ "description": "Issues updated before this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "updatedAfter",
+ "description": "Issues updated after this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "closedBefore",
+ "description": "Issues closed before this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "closedAfter",
+ "description": "Issues closed after this date",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "search",
+ "description": "Search query for finding issues by title or description",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "sort",
+ "description": "Sort issues by this criteria",
+ "type": {
+ "kind": "ENUM",
+ "name": "IssueSort",
+ "ofType": null
+ },
+ "defaultValue": "created_desc"
+ },
+ {
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
@@ -13502,128 +17557,221 @@
],
"type": {
"kind": "OBJECT",
- "name": "PipelineConnection",
+ "name": "IssueConnection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "sha",
- "description": "SHA1 ID of the commit",
+ "name": "issuesEnabled",
+ "description": "(deprecated) Does this project have issues enabled?. Use `issues_access_level` instead",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "signatureHtml",
- "description": "Rendered HTML of the commit signature",
+ "name": "jobsEnabled",
+ "description": "(deprecated) Enable jobs for this project. Use `builds_access_level` instead",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "title",
- "description": "Title of the commit message",
+ "name": "lastActivityAt",
+ "description": "Timestamp of the project last activity",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL of the commit",
+ "name": "lfsEnabled",
+ "description": "Indicates if the project has Large File Storage (LFS) enabled",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeRequest",
+ "description": "A single merge request of the project",
+ "args": [
+ {
+ "name": "iid",
+ "description": "The IID of the merge request, e.g., \"1\"",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iids",
+ "description": "The list of IIDs of issues, e.g., [1, 2]",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MergeRequest",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "PipelineConnection",
- "description": "The connection type for Pipeline.",
- "fields": [
+ },
{
- "name": "edges",
- "description": "A list of edges.",
+ "name": "mergeRequests",
+ "description": "Merge requests of the project",
+ "args": [
+ {
+ "name": "iid",
+ "description": "The IID of the merge request, e.g., \"1\"",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iids",
+ "description": "The list of IIDs of issues, e.g., [1, 2]",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "MergeRequestConnection",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "mergeRequestsEnabled",
+ "description": "(deprecated) Does this project have merge_requests enabled?. Use `merge_requests_access_level` instead",
"args": [
],
"type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "PipelineEdge",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
+ "name": "mergeRequestsFfOnlyEnabled",
+ "description": "Indicates if no merge commits should be created and all merges should instead be fast-forwarded, which means that merging is only allowed if the branch could be fast-forwarded.",
"args": [
],
"type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "Pipeline",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "name",
+ "description": "Name of the project (without namespace)",
"args": [
],
@@ -13631,30 +17779,17 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "PipelineEdge",
- "description": "An edge in a connection.",
- "fields": [
+ },
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "nameWithNamespace",
+ "description": "Full name of the project with its namespace",
"args": [
],
@@ -13671,77 +17806,64 @@
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
+ "name": "namespace",
+ "description": "Namespace of the project",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Pipeline",
+ "name": "Namespace",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "Pipeline",
- "description": null,
- "fields": [
+ },
{
- "name": "beforeSha",
- "description": "Base SHA of the source branch",
+ "name": "onlyAllowMergeIfAllDiscussionsAreResolved",
+ "description": "Indicates if merge requests of the project can only be merged when all the discussions are resolved",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "committedAt",
- "description": "Timestamp of the pipeline's commit",
+ "name": "onlyAllowMergeIfPipelineSucceeds",
+ "description": "Indicates if merge requests of the project can only be merged with successful jobs",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "coverage",
- "description": "Coverage percentage",
+ "name": "openIssuesCount",
+ "description": "Number of open issues for the project",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Float",
+ "name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createdAt",
- "description": "Timestamp of the pipeline's creation",
+ "name": "path",
+ "description": "Path of the project",
"args": [
],
@@ -13750,7 +17872,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "String",
"ofType": null
}
},
@@ -13758,333 +17880,515 @@
"deprecationReason": null
},
{
- "name": "detailedStatus",
- "description": "Detailed status of the pipeline",
+ "name": "pipelines",
+ "description": "Build pipelines of the project",
"args": [
-
+ {
+ "name": "status",
+ "description": "Filter pipelines by their status",
+ "type": {
+ "kind": "ENUM",
+ "name": "PipelineStatusEnum",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "ref",
+ "description": "Filter pipelines by the ref they are run for",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "sha",
+ "description": "Filter pipelines by the sha of the commit they are run for",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "DetailedStatus",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "PipelineConnection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "duration",
- "description": "Duration of the pipeline in seconds",
+ "name": "printingMergeRequestLinkEnabled",
+ "description": "Indicates if a link to create or view a merge request should display after a push to Git repositories of the project from the command line",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "finishedAt",
- "description": "Timestamp of the pipeline's completion",
+ "name": "publicJobs",
+ "description": "Indicates if there is public access to pipelines and job details of the project, including output logs and artifacts",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "id",
- "description": "ID of the pipeline",
+ "name": "removeSourceBranchAfterMerge",
+ "description": "Indicates if `Delete source branch` option should be enabled by default for all new merge requests of the project",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "iid",
- "description": "Internal ID of the pipeline",
+ "name": "repository",
+ "description": "Git repository of the project",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Repository",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "sha",
- "description": "SHA of the pipeline's commit",
+ "name": "requestAccessEnabled",
+ "description": "Indicates if users can request member access to the project",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "startedAt",
- "description": "Timestamp when the pipeline was started",
+ "name": "requirement",
+ "description": "Find a single requirement. Available only when feature flag `requirements_management` is enabled.",
"args": [
-
+ {
+ "name": "iid",
+ "description": "IID of the requirement, e.g., \"1\"",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iids",
+ "description": "List of IIDs of requirements, e.g., [1, 2]",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "sort",
+ "description": "List requirements by sort order",
+ "type": {
+ "kind": "ENUM",
+ "name": "Sort",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "state",
+ "description": "Filter requirements by state",
+ "type": {
+ "kind": "ENUM",
+ "name": "RequirementState",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "SCALAR",
- "name": "Time",
+ "kind": "OBJECT",
+ "name": "Requirement",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "status",
- "description": "Status of the pipeline (CREATED, WAITING_FOR_RESOURCE, PREPARING, PENDING, RUNNING, FAILED, SUCCESS, CANCELED, SKIPPED, MANUAL, SCHEDULED)",
+ "name": "requirements",
+ "description": "Find requirements. Available only when feature flag `requirements_management` is enabled.",
"args": [
-
+ {
+ "name": "iid",
+ "description": "IID of the requirement, e.g., \"1\"",
+ "type": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iids",
+ "description": "List of IIDs of requirements, e.g., [1, 2]",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "sort",
+ "description": "List requirements by sort order",
+ "type": {
+ "kind": "ENUM",
+ "name": "Sort",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "state",
+ "description": "Filter requirements by state",
+ "type": {
+ "kind": "ENUM",
+ "name": "RequirementState",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "ENUM",
- "name": "PipelineStatusEnum",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "RequirementConnection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "updatedAt",
- "description": "Timestamp of the pipeline's last activity",
+ "name": "sentryDetailedError",
+ "description": "Detailed version of a Sentry error on the project",
"args": [
-
+ {
+ "name": "id",
+ "description": "ID of the Sentry issue",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "SentryDetailedError",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "userPermissions",
- "description": "Permissions for the current user on the resource",
+ "name": "sentryErrors",
+ "description": "Paginated collection of Sentry errors on the project",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "PipelinePermissions",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "SentryErrorCollection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "PipelinePermissions",
- "description": null,
- "fields": [
+ },
{
- "name": "adminPipeline",
- "description": "Indicates the user can perform `admin_pipeline` on this resource",
+ "name": "serviceDeskAddress",
+ "description": "E-mail address of the service desk.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "destroyPipeline",
- "description": "Indicates the user can perform `destroy_pipeline` on this resource",
+ "name": "serviceDeskEnabled",
+ "description": "Indicates if the project has service desk enabled.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "updatePipeline",
- "description": "Indicates the user can perform `update_pipeline` on this resource",
+ "name": "sharedRunnersEnabled",
+ "description": "Indicates if shared runners are enabled on the project",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "ENUM",
- "name": "PipelineStatusEnum",
- "description": null,
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "CREATED",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
},
{
- "name": "WAITING_FOR_RESOURCE",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "PREPARING",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "PENDING",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "RUNNING",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "FAILED",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "SUCCESS",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "CANCELED",
- "description": null,
+ "name": "snippets",
+ "description": "Snippets of the project",
+ "args": [
+ {
+ "name": "ids",
+ "description": "Array of global snippet ids, e.g., \"gid://gitlab/ProjectSnippet/1\"",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "visibility",
+ "description": "The visibility of the snippet",
+ "type": {
+ "kind": "ENUM",
+ "name": "VisibilityScopesEnum",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "SnippetConnection",
+ "ofType": null
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "SKIPPED",
- "description": null,
+ "name": "snippetsEnabled",
+ "description": "(deprecated) Does this project have snippets enabled?. Use `snippets_access_level` instead",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "MANUAL",
- "description": null,
+ "name": "sshUrlToRepo",
+ "description": "URL to connect to the project via SSH",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "SCHEDULED",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "DetailedStatus",
- "description": null,
- "fields": [
- {
- "name": "detailsPath",
- "description": "Path of the details for the pipeline status",
+ "name": "starCount",
+ "description": "Number of times the project has been starred",
"args": [
],
@@ -14093,7 +18397,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Int",
"ofType": null
}
},
@@ -14101,62 +18405,50 @@
"deprecationReason": null
},
{
- "name": "favicon",
- "description": "Favicon of the pipeline status",
+ "name": "statistics",
+ "description": "Statistics of the project",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "ProjectStatistics",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "group",
- "description": "Group of the pipeline status",
+ "name": "suggestionCommitMessage",
+ "description": "The commit message used to apply merge request suggestions",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "hasDetails",
- "description": "Indicates if the pipeline status has further details",
+ "name": "tagList",
+ "description": "List of project tags",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "icon",
- "description": "Icon of the pipeline status",
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
"args": [
],
@@ -14164,8 +18456,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "ProjectPermissions",
"ofType": null
}
},
@@ -14173,55 +18465,96 @@
"deprecationReason": null
},
{
- "name": "label",
- "description": "Label of the pipeline status",
+ "name": "visibility",
+ "description": "Visibility of the project",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "vulnerabilities",
+ "description": "Vulnerabilities reported on the project. Available only when feature flag `first_class_vulnerabilities` is enabled.",
+ "args": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
}
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "VulnerabilityConnection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "text",
- "description": "Text of the pipeline status",
+ "name": "webUrl",
+ "description": "Web URL of the project",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "tooltip",
- "description": "Tooltip associated with the pipeline status",
+ "name": "wikiEnabled",
+ "description": "(deprecated) Does this project have wiki enabled?. Use `wiki_access_level` instead",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
@@ -14235,19 +18568,9 @@
"possibleTypes": null
},
{
- "kind": "SCALAR",
- "name": "Float",
- "description": "Represents signed double-precision fractional values as specified by [IEEE 754](https://en.wikipedia.org/wiki/IEEE_floating_point).",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
"kind": "OBJECT",
- "name": "TreeEntryConnection",
- "description": "The connection type for TreeEntry.",
+ "name": "ProjectConnection",
+ "description": "The connection type for Project.",
"fields": [
{
"name": "edges",
@@ -14260,7 +18583,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "TreeEntryEdge",
+ "name": "ProjectEdge",
"ofType": null
}
},
@@ -14278,7 +18601,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "TreeEntry",
+ "name": "Project",
"ofType": null
}
},
@@ -14313,7 +18636,7 @@
},
{
"kind": "OBJECT",
- "name": "TreeEntryEdge",
+ "name": "ProjectEdge",
"description": "An edge in a connection.",
"fields": [
{
@@ -14342,7 +18665,7 @@
],
"type": {
"kind": "OBJECT",
- "name": "TreeEntry",
+ "name": "Project",
"ofType": null
},
"isDeprecated": false,
@@ -14358,12 +18681,12 @@
},
{
"kind": "OBJECT",
- "name": "TreeEntry",
- "description": "Represents a directory",
+ "name": "ProjectPermissions",
+ "description": null,
"fields": [
{
- "name": "flatPath",
- "description": "Flat path of the entry",
+ "name": "adminOperations",
+ "description": "Indicates the user can perform `admin_operations` on this resource",
"args": [
],
@@ -14372,7 +18695,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14380,8 +18703,8 @@
"deprecationReason": null
},
{
- "name": "id",
- "description": "ID of the entry",
+ "name": "adminProject",
+ "description": "Indicates the user can perform `admin_project` on this resource",
"args": [
],
@@ -14390,7 +18713,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14398,8 +18721,8 @@
"deprecationReason": null
},
{
- "name": "name",
- "description": "Name of the entry",
+ "name": "adminRemoteMirror",
+ "description": "Indicates the user can perform `admin_remote_mirror` on this resource",
"args": [
],
@@ -14408,7 +18731,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14416,8 +18739,8 @@
"deprecationReason": null
},
{
- "name": "path",
- "description": "Path of the entry",
+ "name": "adminWiki",
+ "description": "Indicates the user can perform `admin_wiki` on this resource",
"args": [
],
@@ -14426,7 +18749,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14434,8 +18757,8 @@
"deprecationReason": null
},
{
- "name": "sha",
- "description": "Last commit sha for the entry",
+ "name": "archiveProject",
+ "description": "Indicates the user can perform `archive_project` on this resource",
"args": [
],
@@ -14444,7 +18767,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14452,8 +18775,8 @@
"deprecationReason": null
},
{
- "name": "type",
- "description": "Type of tree entry",
+ "name": "changeNamespace",
+ "description": "Indicates the user can perform `change_namespace` on this resource",
"args": [
],
@@ -14461,8 +18784,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "EntryType",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14470,39 +18793,8 @@
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL for the tree entry (directory)",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
- {
- "kind": "INTERFACE",
- "name": "Entry",
- "ofType": null
- }
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INTERFACE",
- "name": "Entry",
- "description": null,
- "fields": [
- {
- "name": "flatPath",
- "description": "Flat path of the entry",
+ "name": "changeVisibilityLevel",
+ "description": "Indicates the user can perform `change_visibility_level` on this resource",
"args": [
],
@@ -14511,7 +18803,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14519,8 +18811,8 @@
"deprecationReason": null
},
{
- "name": "id",
- "description": "ID of the entry",
+ "name": "createDeployment",
+ "description": "Indicates the user can perform `create_deployment` on this resource",
"args": [
],
@@ -14529,7 +18821,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14537,8 +18829,8 @@
"deprecationReason": null
},
{
- "name": "name",
- "description": "Name of the entry",
+ "name": "createDesign",
+ "description": "Indicates the user can perform `create_design` on this resource",
"args": [
],
@@ -14547,7 +18839,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14555,8 +18847,8 @@
"deprecationReason": null
},
{
- "name": "path",
- "description": "Path of the entry",
+ "name": "createIssue",
+ "description": "Indicates the user can perform `create_issue` on this resource",
"args": [
],
@@ -14565,7 +18857,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14573,8 +18865,8 @@
"deprecationReason": null
},
{
- "name": "sha",
- "description": "Last commit sha for the entry",
+ "name": "createLabel",
+ "description": "Indicates the user can perform `create_label` on this resource",
"args": [
],
@@ -14583,7 +18875,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14591,8 +18883,8 @@
"deprecationReason": null
},
{
- "name": "type",
- "description": "Type of tree entry",
+ "name": "createMergeRequestFrom",
+ "description": "Indicates the user can perform `create_merge_request_from` on this resource",
"args": [
],
@@ -14600,82 +18892,26 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "EntryType",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": [
- {
- "kind": "OBJECT",
- "name": "Blob",
- "ofType": null
- },
- {
- "kind": "OBJECT",
- "name": "Submodule",
- "ofType": null
- },
- {
- "kind": "OBJECT",
- "name": "TreeEntry",
- "ofType": null
- }
- ]
- },
- {
- "kind": "ENUM",
- "name": "EntryType",
- "description": "Type of a tree entry",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "tree",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
},
{
- "name": "blob",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "commit",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "SubmoduleConnection",
- "description": "The connection type for Submodule.",
- "fields": [
- {
- "name": "edges",
- "description": "A list of edges.",
+ "name": "createMergeRequestIn",
+ "description": "Indicates the user can perform `create_merge_request_in` on this resource",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "SubmoduleEdge",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14683,17 +18919,17 @@
"deprecationReason": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
+ "name": "createPages",
+ "description": "Indicates the user can perform `create_pages` on this resource",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Submodule",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14701,8 +18937,8 @@
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "createPipeline",
+ "description": "Indicates the user can perform `create_pipeline` on this resource",
"args": [
],
@@ -14710,30 +18946,17 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "SubmoduleEdge",
- "description": "An edge in a connection.",
- "fields": [
+ },
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "createPipelineSchedule",
+ "description": "Indicates the user can perform `create_pipeline_schedule` on this resource",
"args": [
],
@@ -14742,7 +18965,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14750,35 +18973,8 @@
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
- "args": [
-
- ],
- "type": {
- "kind": "OBJECT",
- "name": "Submodule",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "Submodule",
- "description": null,
- "fields": [
- {
- "name": "flatPath",
- "description": "Flat path of the entry",
+ "name": "createSnippet",
+ "description": "Indicates the user can perform `create_snippet` on this resource",
"args": [
],
@@ -14787,7 +18983,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14795,8 +18991,8 @@
"deprecationReason": null
},
{
- "name": "id",
- "description": "ID of the entry",
+ "name": "createWiki",
+ "description": "Indicates the user can perform `create_wiki` on this resource",
"args": [
],
@@ -14805,7 +19001,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14813,8 +19009,8 @@
"deprecationReason": null
},
{
- "name": "name",
- "description": "Name of the entry",
+ "name": "destroyDesign",
+ "description": "Indicates the user can perform `destroy_design` on this resource",
"args": [
],
@@ -14823,7 +19019,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14831,8 +19027,8 @@
"deprecationReason": null
},
{
- "name": "path",
- "description": "Path of the entry",
+ "name": "destroyPages",
+ "description": "Indicates the user can perform `destroy_pages` on this resource",
"args": [
],
@@ -14841,7 +19037,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14849,8 +19045,8 @@
"deprecationReason": null
},
{
- "name": "sha",
- "description": "Last commit sha for the entry",
+ "name": "destroyWiki",
+ "description": "Indicates the user can perform `destroy_wiki` on this resource",
"args": [
],
@@ -14859,7 +19055,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14867,22 +19063,26 @@
"deprecationReason": null
},
{
- "name": "treeUrl",
- "description": "Tree URL for the sub-module",
+ "name": "downloadCode",
+ "description": "Indicates the user can perform `download_code` on this resource",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "type",
- "description": "Type of tree entry",
+ "name": "downloadWikiCode",
+ "description": "Indicates the user can perform `download_wiki_code` on this resource",
"args": [
],
@@ -14890,8 +19090,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "EntryType",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14899,48 +19099,35 @@
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL for the sub-module",
+ "name": "forkProject",
+ "description": "Indicates the user can perform `fork_project` on this resource",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
- {
- "kind": "INTERFACE",
- "name": "Entry",
- "ofType": null
- }
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "BlobConnection",
- "description": "The connection type for Blob.",
- "fields": [
+ },
{
- "name": "edges",
- "description": "A list of edges.",
+ "name": "pushCode",
+ "description": "Indicates the user can perform `push_code` on this resource",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "BlobEdge",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14948,17 +19135,17 @@
"deprecationReason": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
+ "name": "pushToDeleteProtectedBranch",
+ "description": "Indicates the user can perform `push_to_delete_protected_branch` on this resource",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Blob",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -14966,8 +19153,8 @@
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "readCommitStatus",
+ "description": "Indicates the user can perform `read_commit_status` on this resource",
"args": [
],
@@ -14975,30 +19162,17 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "BlobEdge",
- "description": "An edge in a connection.",
- "fields": [
+ },
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "readCycleAnalytics",
+ "description": "Indicates the user can perform `read_cycle_analytics` on this resource",
"args": [
],
@@ -15007,7 +19181,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -15015,35 +19189,26 @@
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
+ "name": "readDesign",
+ "description": "Indicates the user can perform `read_design` on this resource",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Blob",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "Blob",
- "description": null,
- "fields": [
+ },
{
- "name": "flatPath",
- "description": "Flat path of the entry",
+ "name": "readMergeRequest",
+ "description": "Indicates the user can perform `read_merge_request` on this resource",
"args": [
],
@@ -15052,7 +19217,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -15060,8 +19225,8 @@
"deprecationReason": null
},
{
- "name": "id",
- "description": "ID of the entry",
+ "name": "readPagesContent",
+ "description": "Indicates the user can perform `read_pages_content` on this resource",
"args": [
],
@@ -15070,7 +19235,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "Boolean",
"ofType": null
}
},
@@ -15078,22 +19243,26 @@
"deprecationReason": null
},
{
- "name": "lfsOid",
- "description": "LFS ID of the blob",
+ "name": "readProject",
+ "description": "Indicates the user can perform `read_project` on this resource",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "name",
- "description": "Name of the entry",
+ "name": "readProjectMember",
+ "description": "Indicates the user can perform `read_project_member` on this resource",
"args": [
],
@@ -15102,7 +19271,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -15110,8 +19279,8 @@
"deprecationReason": null
},
{
- "name": "path",
- "description": "Path of the entry",
+ "name": "readWiki",
+ "description": "Indicates the user can perform `read_wiki` on this resource",
"args": [
],
@@ -15120,7 +19289,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -15128,8 +19297,8 @@
"deprecationReason": null
},
{
- "name": "sha",
- "description": "Last commit sha for the entry",
+ "name": "removeForkProject",
+ "description": "Indicates the user can perform `remove_fork_project` on this resource",
"args": [
],
@@ -15138,7 +19307,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -15146,8 +19315,8 @@
"deprecationReason": null
},
{
- "name": "type",
- "description": "Type of tree entry",
+ "name": "removePages",
+ "description": "Indicates the user can perform `remove_pages` on this resource",
"args": [
],
@@ -15155,8 +19324,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "EntryType",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -15164,48 +19333,35 @@
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL of the blob",
+ "name": "removeProject",
+ "description": "Indicates the user can perform `remove_project` on this resource",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
- {
- "kind": "INTERFACE",
- "name": "Entry",
- "ofType": null
- }
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "MergeRequestConnection",
- "description": "The connection type for MergeRequest.",
- "fields": [
+ },
{
- "name": "edges",
- "description": "A list of edges.",
+ "name": "renameProject",
+ "description": "Indicates the user can perform `rename_project` on this resource",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "MergeRequestEdge",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -15213,17 +19369,17 @@
"deprecationReason": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
+ "name": "requestAccess",
+ "description": "Indicates the user can perform `request_access` on this resource",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "MergeRequest",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
@@ -15231,8 +19387,8 @@
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "updatePages",
+ "description": "Indicates the user can perform `update_pages` on this resource",
"args": [
],
@@ -15240,30 +19396,17 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
+ "kind": "SCALAR",
+ "name": "Boolean",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "MergeRequestEdge",
- "description": "An edge in a connection.",
- "fields": [
+ },
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "updateWiki",
+ "description": "Indicates the user can perform `update_wiki` on this resource",
"args": [
],
@@ -15272,7 +19415,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -15280,15 +19423,19 @@
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
+ "name": "uploadFile",
+ "description": "Indicates the user can perform `upload_file` on this resource",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "MergeRequest",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
@@ -15303,79 +19450,30 @@
},
{
"kind": "OBJECT",
- "name": "MergeRequest",
+ "name": "ProjectStatistics",
"description": null,
"fields": [
{
- "name": "allowCollaboration",
- "description": "Indicates if members of the target project can push to the fork",
+ "name": "buildArtifactsSize",
+ "description": "Build artifacts size of the project",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "assignees",
- "description": "Assignees of the merge request",
- "args": [
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
}
- ],
- "type": {
- "kind": "OBJECT",
- "name": "UserConnection",
- "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createdAt",
- "description": "Timestamp of when the merge request was created",
+ "name": "commitCount",
+ "description": "Commit count of the project",
"args": [
],
@@ -15384,7 +19482,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "Int",
"ofType": null
}
},
@@ -15392,78 +19490,26 @@
"deprecationReason": null
},
{
- "name": "defaultMergeCommitMessage",
- "description": "Default merge commit message of the merge request",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "description",
- "description": "Description of the merge request (Markdown rendered as HTML for caching)",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "descriptionHtml",
- "description": "The GitLab Flavored Markdown rendering of `description`",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "diffHeadSha",
- "description": "Diff head SHA of the merge request",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "diffRefs",
- "description": "References of the base SHA, the head SHA, and the start SHA for this merge request",
+ "name": "lfsObjectsSize",
+ "description": "Large File Storage (LFS) object size of the project",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "DiffRefs",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "discussionLocked",
- "description": "Indicates if comments on the merge request are locked to members only",
+ "name": "packagesSize",
+ "description": "Packages size of the project",
"args": [
],
@@ -15472,7 +19518,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Int",
"ofType": null
}
},
@@ -15480,56 +19526,17 @@
"deprecationReason": null
},
{
- "name": "discussions",
- "description": "All discussions on this noteable",
+ "name": "repositorySize",
+ "description": "Repository size of the project",
"args": [
- {
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
+
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "DiscussionConnection",
+ "kind": "SCALAR",
+ "name": "Int",
"ofType": null
}
},
@@ -15537,8 +19544,8 @@
"deprecationReason": null
},
{
- "name": "downvotes",
- "description": "Number of downvotes for the merge request",
+ "name": "storageSize",
+ "description": "Storage size of the project",
"args": [
],
@@ -15555,36 +19562,49 @@
"deprecationReason": null
},
{
- "name": "forceRemoveSourceBranch",
- "description": "Indicates if the project settings will lead to source branch deletion after merge",
+ "name": "wikiSize",
+ "description": "Wiki size of the project",
"args": [
],
"type": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Query",
+ "description": null,
+ "fields": [
{
- "name": "headPipeline",
- "description": "The pipeline running on the branch HEAD of the merge request",
+ "name": "currentUser",
+ "description": "Get information about current user",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Pipeline",
+ "name": "User",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "id",
- "description": "ID of the merge request",
+ "name": "designManagement",
+ "description": "Fields related to design management",
"args": [
],
@@ -15592,8 +19612,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "DesignManagement",
"ofType": null
}
},
@@ -15601,10 +19621,23 @@
"deprecationReason": null
},
{
- "name": "iid",
- "description": "Internal ID of the merge request",
+ "name": "echo",
+ "description": "Text to echo back",
"args": [
-
+ {
+ "name": "text",
+ "description": "Text to echo back",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
],
"type": {
"kind": "NON_NULL",
@@ -15619,328 +19652,168 @@
"deprecationReason": null
},
{
- "name": "inProgressMergeCommitSha",
- "description": "Commit SHA of the merge request if merge is in progress",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "labels",
- "description": "Labels of the merge request",
+ "name": "group",
+ "description": "Find a group",
"args": [
{
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
+ "name": "fullPath",
+ "description": "The full path of the project, group or namespace, e.g., \"gitlab-org/gitlab-foss\"",
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
- "name": "LabelConnection",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "mergeCommitMessage",
- "description": "Deprecated - renamed to defaultMergeCommitMessage",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": true,
- "deprecationReason": "Renamed to defaultMergeCommitMessage"
- },
- {
- "name": "mergeCommitSha",
- "description": "SHA of the merge request commit (set once merged)",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
+ "name": "Group",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeError",
- "description": "Error message due to a merge error",
+ "name": "metadata",
+ "description": "Metadata about GitLab",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Metadata",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeOngoing",
- "description": "Indicates if a merge is currently occurring",
+ "name": "namespace",
+ "description": "Find a namespace",
"args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ {
+ "name": "fullPath",
+ "description": "The full path of the project, group or namespace, e.g., \"gitlab-org/gitlab-foss\"",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
}
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "mergeStatus",
- "description": "Status of the merge request",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "mergeWhenPipelineSucceeds",
- "description": "Indicates if the merge has been set to be merged when its pipeline succeeds (MWPS)",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "mergeableDiscussionsState",
- "description": "Indicates if all discussions in the merge request have been resolved, allowing the merge request to be merged",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "milestone",
- "description": "The milestone of the merge request",
- "args": [
-
],
"type": {
"kind": "OBJECT",
- "name": "Milestone",
+ "name": "Namespace",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "notes",
- "description": "All notes on this noteable",
+ "name": "project",
+ "description": "Find a project",
"args": [
{
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
+ "name": "fullPath",
+ "description": "The full path of the project, group or namespace, e.g., \"gitlab-org/gitlab-foss\"",
"type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
},
"defaultValue": null
}
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "NoteConnection",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Project",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "participants",
- "description": "Participants in the merge request",
+ "name": "snippets",
+ "description": "Find Snippets visible to the current user",
"args": [
{
- "name": "after",
- "description": "Returns the elements in the list that come after the specified cursor.",
+ "name": "ids",
+ "description": "Array of global snippet ids, e.g., \"gid://gitlab/ProjectSnippet/1\"",
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
},
"defaultValue": null
},
{
- "name": "before",
- "description": "Returns the elements in the list that come before the specified cursor.",
+ "name": "visibility",
+ "description": "The visibility of the snippet",
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "ENUM",
+ "name": "VisibilityScopesEnum",
"ofType": null
},
"defaultValue": null
},
{
- "name": "first",
- "description": "Returns the first _n_ elements from the list.",
+ "name": "authorId",
+ "description": "The ID of an author",
"type": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "ID",
"ofType": null
},
"defaultValue": null
},
{
- "name": "last",
- "description": "Returns the last _n_ elements from the list.",
+ "name": "projectId",
+ "description": "The ID of a project",
"type": {
"kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
- "type": {
- "kind": "OBJECT",
- "name": "UserConnection",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "pipelines",
- "description": "Pipelines for the merge request",
- "args": [
- {
- "name": "status",
- "description": "Filter pipelines by their status",
- "type": {
- "kind": "ENUM",
- "name": "PipelineStatusEnum",
+ "name": "ID",
"ofType": null
},
"defaultValue": null
},
{
- "name": "ref",
- "description": "Filter pipelines by the ref they are run for",
+ "name": "type",
+ "description": "The type of snippet",
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "ENUM",
+ "name": "TypeEnum",
"ofType": null
},
"defaultValue": null
},
{
- "name": "sha",
- "description": "Filter pipelines by the sha of the commit they are run for",
+ "name": "explore",
+ "description": "Explore personal snippets",
"type": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
},
"defaultValue": null
@@ -15987,115 +19860,106 @@
}
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "PipelineConnection",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "SnippetConnection",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
- {
- "name": "project",
- "description": "Alias for target_project",
- "args": [
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
- ],
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "RemoveAwardEmojiInput",
+ "description": "Autogenerated input type of RemoveAwardEmoji",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "awardableId",
+ "description": "The global id of the awardable resource",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Project",
+ "kind": "SCALAR",
+ "name": "ID",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "projectId",
- "description": "ID of the merge request project",
- "args": [
-
- ],
+ "name": "name",
+ "description": "The emoji name",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "rebaseCommitSha",
- "description": "Rebase commit SHA of the merge request",
- "args": [
-
- ],
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
- },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "RemoveAwardEmojiPayload",
+ "description": "Autogenerated return type of RemoveAwardEmoji",
+ "fields": [
{
- "name": "rebaseInProgress",
- "description": "Indicates if there is a rebase currently in progress for the merge request",
+ "name": "awardEmoji",
+ "description": "The award emoji after mutation",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "AwardEmoji",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "reference",
- "description": "Internal reference of the merge request. Returned in shortened format by default",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"args": [
- {
- "name": "full",
- "description": "Boolean option specifying whether the reference should be returned in full",
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "defaultValue": "false"
- }
+
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "shouldBeRebased",
- "description": "Indicates if the merge request will be rebased",
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
"args": [
],
@@ -16103,31 +19967,38 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
}
},
"isDeprecated": false,
"deprecationReason": null
- },
- {
- "name": "shouldRemoveSourceBranch",
- "description": "Indicates if the source branch of the merge request will be deleted after merge",
- "args": [
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Repository",
+ "description": null,
+ "fields": [
{
- "name": "sourceBranch",
- "description": "Source branch of the merge request",
+ "name": "empty",
+ "description": "Indicates repository has no visible content",
"args": [
],
@@ -16136,7 +20007,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -16144,8 +20015,8 @@
"deprecationReason": null
},
{
- "name": "sourceBranchExists",
- "description": "Indicates if the source branch of the merge request exists",
+ "name": "exists",
+ "description": "Indicates a corresponding Git repository exists on disk",
"args": [
],
@@ -16162,54 +20033,78 @@
"deprecationReason": null
},
{
- "name": "sourceProject",
- "description": "Source project of the merge request",
+ "name": "rootRef",
+ "description": "Default branch of the repository",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Project",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "sourceProjectId",
- "description": "ID of the merge request source project",
+ "name": "tree",
+ "description": "Tree of the repository",
"args": [
-
+ {
+ "name": "path",
+ "description": "The path to get the tree for. Default value is the root of the repository",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": "\"\""
+ },
+ {
+ "name": "ref",
+ "description": "The commit ref to get the tree for. Default value is HEAD",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": "\"head\""
+ },
+ {
+ "name": "recursive",
+ "description": "Used to get a recursive tree. Default is false",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "defaultValue": "false"
+ }
],
"type": {
- "kind": "SCALAR",
- "name": "Int",
+ "kind": "OBJECT",
+ "name": "Tree",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
- {
- "name": "state",
- "description": "State of the merge request",
- "args": [
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "ENUM",
- "name": "MergeRequestState",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Requirement",
+ "description": "Represents a requirement.",
+ "fields": [
{
- "name": "subscribed",
- "description": "Indicates if the currently logged in user is subscribed to this merge request",
+ "name": "author",
+ "description": "Author of the requirement",
"args": [
],
@@ -16217,8 +20112,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "User",
"ofType": null
}
},
@@ -16226,8 +20121,8 @@
"deprecationReason": null
},
{
- "name": "targetBranch",
- "description": "Target branch of the merge request",
+ "name": "createdAt",
+ "description": "Timestamp of when the requirement was created",
"args": [
],
@@ -16236,7 +20131,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Time",
"ofType": null
}
},
@@ -16244,8 +20139,8 @@
"deprecationReason": null
},
{
- "name": "targetProject",
- "description": "Target project of the merge request",
+ "name": "id",
+ "description": "ID of the requirement",
"args": [
],
@@ -16253,8 +20148,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Project",
+ "kind": "SCALAR",
+ "name": "ID",
"ofType": null
}
},
@@ -16262,8 +20157,8 @@
"deprecationReason": null
},
{
- "name": "targetProjectId",
- "description": "ID of the merge request target project",
+ "name": "iid",
+ "description": "Internal ID of the requirement",
"args": [
],
@@ -16272,7 +20167,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "ID",
"ofType": null
}
},
@@ -16280,8 +20175,8 @@
"deprecationReason": null
},
{
- "name": "taskCompletionStatus",
- "description": "Completion status of tasks",
+ "name": "project",
+ "description": "Project to which the requirement belongs",
"args": [
],
@@ -16290,7 +20185,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "TaskCompletionStatus",
+ "name": "Project",
"ofType": null
}
},
@@ -16298,8 +20193,8 @@
"deprecationReason": null
},
{
- "name": "timeEstimate",
- "description": "Time estimate of the merge request",
+ "name": "state",
+ "description": "State of the requirement",
"args": [
],
@@ -16307,8 +20202,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Int",
+ "kind": "ENUM",
+ "name": "RequirementState",
"ofType": null
}
},
@@ -16317,25 +20212,7 @@
},
{
"name": "title",
- "description": "Title of the merge request",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "titleHtml",
- "description": "The GitLab Flavored Markdown rendering of `title`",
+ "description": "Title of the requirement",
"args": [
],
@@ -16348,8 +20225,8 @@
"deprecationReason": null
},
{
- "name": "totalTimeSpent",
- "description": "Total time reported as spent on the merge request",
+ "name": "updatedAt",
+ "description": "Timestamp of when the requirement was last updated",
"args": [
],
@@ -16358,7 +20235,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "Time",
"ofType": null
}
},
@@ -16366,8 +20243,8 @@
"deprecationReason": null
},
{
- "name": "updatedAt",
- "description": "Timestamp of when the merge request was last updated",
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
"args": [
],
@@ -16375,26 +20252,39 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Time",
+ "kind": "OBJECT",
+ "name": "RequirementPermissions",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "RequirementConnection",
+ "description": "The connection type for Requirement.",
+ "fields": [
{
- "name": "upvotes",
- "description": "Number of upvotes for the merge request",
+ "name": "edges",
+ "description": "A list of edges.",
"args": [
],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Int",
+ "kind": "OBJECT",
+ "name": "RequirementEdge",
"ofType": null
}
},
@@ -16402,31 +20292,17 @@
"deprecationReason": null
},
{
- "name": "userNotesCount",
- "description": "User notes count of the merge request",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "userPermissions",
- "description": "Permissions for the current user on the resource",
+ "name": "nodes",
+ "description": "A list of nodes.",
"args": [
],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "MergeRequestPermissions",
+ "name": "Requirement",
"ofType": null
}
},
@@ -16434,22 +20310,8 @@
"deprecationReason": null
},
{
- "name": "webUrl",
- "description": "Web URL of the merge request",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "workInProgress",
- "description": "Indicates if the merge request is a work in progress (WIP)",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
"args": [
],
@@ -16457,8 +20319,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "PageInfo",
"ofType": null
}
},
@@ -16468,23 +20330,19 @@
],
"inputFields": null,
"interfaces": [
- {
- "kind": "INTERFACE",
- "name": "Noteable",
- "ofType": null
- }
+
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "MergeRequestPermissions",
- "description": "Check permissions for the current user on a merge request",
+ "name": "RequirementEdge",
+ "description": "An edge in a connection.",
"fields": [
{
- "name": "adminMergeRequest",
- "description": "Indicates the user can perform `admin_merge_request` on this resource",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
@@ -16493,7 +20351,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
@@ -16501,44 +20359,35 @@
"deprecationReason": null
},
{
- "name": "cherryPickOnCurrentMergeRequest",
- "description": "Indicates the user can perform `cherry_pick_on_current_merge_request` on this resource",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Requirement",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
- {
- "name": "createNote",
- "description": "Indicates the user can perform `create_note` on this resource",
- "args": [
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "RequirementPermissions",
+ "description": "Check permissions for the current user on a requirement",
+ "fields": [
{
- "name": "pushToSourceBranch",
- "description": "Indicates the user can perform `push_to_source_branch` on this resource",
+ "name": "adminRequirement",
+ "description": "Indicates the user can perform `admin_requirement` on this resource",
"args": [
],
@@ -16555,8 +20404,8 @@
"deprecationReason": null
},
{
- "name": "readMergeRequest",
- "description": "Indicates the user can perform `read_merge_request` on this resource",
+ "name": "createRequirement",
+ "description": "Indicates the user can perform `create_requirement` on this resource",
"args": [
],
@@ -16573,8 +20422,8 @@
"deprecationReason": null
},
{
- "name": "removeSourceBranch",
- "description": "Indicates the user can perform `remove_source_branch` on this resource",
+ "name": "destroyRequirement",
+ "description": "Indicates the user can perform `destroy_requirement` on this resource",
"args": [
],
@@ -16591,8 +20440,8 @@
"deprecationReason": null
},
{
- "name": "revertOnCurrentMergeRequest",
- "description": "Indicates the user can perform `revert_on_current_merge_request` on this resource",
+ "name": "readRequirement",
+ "description": "Indicates the user can perform `read_requirement` on this resource",
"args": [
],
@@ -16609,8 +20458,8 @@
"deprecationReason": null
},
{
- "name": "updateMergeRequest",
- "description": "Indicates the user can perform `update_merge_request` on this resource",
+ "name": "updateRequirement",
+ "description": "Indicates the user can perform `update_requirement` on this resource",
"args": [
],
@@ -16636,32 +20485,20 @@
},
{
"kind": "ENUM",
- "name": "MergeRequestState",
- "description": "State of a GitLab merge request",
+ "name": "RequirementState",
+ "description": "State of a requirement",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{
- "name": "opened",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "closed",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "locked",
+ "name": "OPENED",
"description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "merged",
+ "name": "ARCHIVED",
"description": null,
"isDeprecated": false,
"deprecationReason": null
@@ -16671,79 +20508,12 @@
},
{
"kind": "OBJECT",
- "name": "IssueConnection",
- "description": "The connection type for Issue.",
- "fields": [
- {
- "name": "edges",
- "description": "A list of edges.",
- "args": [
-
- ],
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "IssueEdge",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "nodes",
- "description": "A list of nodes.",
- "args": [
-
- ],
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "Issue",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "IssueEdge",
- "description": "An edge in a connection.",
+ "name": "RootStorageStatistics",
+ "description": null,
"fields": [
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "buildArtifactsSize",
+ "description": "The CI artifacts size in bytes",
"args": [
],
@@ -16752,146 +20522,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "node",
- "description": "The item at the end of the edge.",
- "args": [
-
- ],
- "type": {
- "kind": "OBJECT",
- "name": "Issue",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "ENUM",
- "name": "IssuableState",
- "description": "State of a GitLab issue or merge request",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "opened",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "closed",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "locked",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "possibleTypes": null
- },
- {
- "kind": "ENUM",
- "name": "IssueSort",
- "description": "Values for sorting issues",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "updated_desc",
- "description": "Updated at descending order",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "updated_asc",
- "description": "Updated at ascending order",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "created_desc",
- "description": "Created at descending order",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "created_asc",
- "description": "Created at ascending order",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "DUE_DATE_ASC",
- "description": "Due date by ascending order",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "DUE_DATE_DESC",
- "description": "Due date by descending order",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "RELATIVE_POSITION_ASC",
- "description": "Relative position by ascending order",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "WEIGHT_ASC",
- "description": "Weight by ascending order",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "WEIGHT_DESC",
- "description": "Weight by descending order",
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "EnvironmentConnection",
- "description": "The connection type for Environment.",
- "fields": [
- {
- "name": "edges",
- "description": "A list of edges.",
- "args": [
-
- ],
- "type": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "EnvironmentEdge",
+ "name": "Int",
"ofType": null
}
},
@@ -16899,17 +20530,17 @@
"deprecationReason": null
},
{
- "name": "nodes",
- "description": "A list of nodes.",
+ "name": "lfsObjectsSize",
+ "description": "The LFS objects size in bytes",
"args": [
],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Environment",
+ "kind": "SCALAR",
+ "name": "Int",
"ofType": null
}
},
@@ -16917,8 +20548,8 @@
"deprecationReason": null
},
{
- "name": "pageInfo",
- "description": "Information to aid in pagination.",
+ "name": "packagesSize",
+ "description": "The packages size in bytes",
"args": [
],
@@ -16926,30 +20557,17 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "PageInfo",
+ "kind": "SCALAR",
+ "name": "Int",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "EnvironmentEdge",
- "description": "An edge in a connection.",
- "fields": [
+ },
{
- "name": "cursor",
- "description": "A cursor for use in pagination.",
+ "name": "repositorySize",
+ "description": "The Git repository size in bytes",
"args": [
],
@@ -16958,7 +20576,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Int",
"ofType": null
}
},
@@ -16966,35 +20584,8 @@
"deprecationReason": null
},
{
- "name": "node",
- "description": "The item at the end of the edge.",
- "args": [
-
- ],
- "type": {
- "kind": "OBJECT",
- "name": "Environment",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "Environment",
- "description": "Describes where code is deployed for a project",
- "fields": [
- {
- "name": "id",
- "description": "ID of the environment",
+ "name": "storageSize",
+ "description": "The total storage in bytes",
"args": [
],
@@ -17003,7 +20594,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "Int",
"ofType": null
}
},
@@ -17011,8 +20602,8 @@
"deprecationReason": null
},
{
- "name": "name",
- "description": "Human-readable name of the environment",
+ "name": "wikiSize",
+ "description": "The wiki size in bytes",
"args": [
],
@@ -17021,7 +20612,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Int",
"ofType": null
}
},
@@ -17494,48 +21085,111 @@
"possibleTypes": null
},
{
- "kind": "ENUM",
- "name": "SentryErrorStatus",
- "description": "State of a Sentry error",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
+ "kind": "OBJECT",
+ "name": "SentryError",
+ "description": "A Sentry error. A simplified version of SentryDetailedError.",
+ "fields": [
{
- "name": "RESOLVED",
- "description": "Error has been resolved",
+ "name": "count",
+ "description": "Count of occurrences",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "RESOLVED_IN_NEXT_RELEASE",
- "description": "Error has been ignored until next release",
+ "name": "culprit",
+ "description": "Culprit of the error",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "UNRESOLVED",
- "description": "Error is unresolved",
+ "name": "externalUrl",
+ "description": "External URL of the error",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "IGNORED",
- "description": "Error has been ignored",
+ "name": "firstSeen",
+ "description": "Timestamp when the error was first seen",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ }
+ },
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "SentryErrorFrequency",
- "description": null,
- "fields": [
+ },
{
- "name": "count",
- "description": "Count of errors received since the previously recorded time",
+ "name": "frequency",
+ "description": "Last 24hr stats of the error",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "SentryErrorFrequency",
+ "ofType": null
+ }
+ }
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "id",
+ "description": "ID (global ID) of the error",
"args": [
],
@@ -17544,7 +21198,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "ID",
"ofType": null
}
},
@@ -17552,8 +21206,8 @@
"deprecationReason": null
},
{
- "name": "time",
- "description": "Time the error frequency stats were recorded",
+ "name": "lastSeen",
+ "description": "Timestamp when the error was last seen",
"args": [
],
@@ -17568,23 +21222,10 @@
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "SentryErrorTags",
- "description": "State of a Sentry error",
- "fields": [
+ },
{
- "name": "level",
- "description": "Severity level of the Sentry Error",
+ "name": "message",
+ "description": "Sentry metadata message of the error",
"args": [
],
@@ -17597,35 +21238,44 @@
"deprecationReason": null
},
{
- "name": "logger",
- "description": "Logger of the Sentry Error",
+ "name": "sentryId",
+ "description": "ID (Sentry ID) of the error",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
+ },
+ {
+ "name": "sentryProjectId",
+ "description": "ID of the project (Sentry project)",
+ "args": [
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "GrafanaIntegration",
- "description": null,
- "fields": [
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
{
- "name": "createdAt",
- "description": "Timestamp of the issue's creation",
+ "name": "sentryProjectName",
+ "description": "Name of the project affected by the error",
"args": [
],
@@ -17634,7 +21284,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "String",
"ofType": null
}
},
@@ -17642,8 +21292,8 @@
"deprecationReason": null
},
{
- "name": "enabled",
- "description": "Indicates whether Grafana integration is enabled",
+ "name": "sentryProjectSlug",
+ "description": "Slug of the project affected by the error",
"args": [
],
@@ -17652,7 +21302,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
@@ -17660,8 +21310,8 @@
"deprecationReason": null
},
{
- "name": "grafanaUrl",
- "description": "Url for the Grafana host for the Grafana integration",
+ "name": "shortId",
+ "description": "Short ID (Sentry ID) of the error",
"args": [
],
@@ -17678,8 +21328,26 @@
"deprecationReason": null
},
{
- "name": "id",
- "description": "Internal ID of the Grafana integration",
+ "name": "status",
+ "description": "Status of the error",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "ENUM",
+ "name": "SentryErrorStatus",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "title",
+ "description": "Title of the error",
"args": [
],
@@ -17688,7 +21356,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
}
},
@@ -17696,8 +21364,8 @@
"deprecationReason": null
},
{
- "name": "token",
- "description": "API token for the Grafana integration. Field is permanently masked.",
+ "name": "type",
+ "description": "Type of the error",
"args": [
],
@@ -17710,12 +21378,12 @@
"ofType": null
}
},
- "isDeprecated": true,
- "deprecationReason": "Plain text token has been masked for security reasons"
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "updatedAt",
- "description": "Timestamp of the issue's last activity",
+ "name": "userCount",
+ "description": "Count of users affected by the error",
"args": [
],
@@ -17724,7 +21392,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "Int",
"ofType": null
}
},
@@ -18007,12 +21675,12 @@
},
{
"kind": "OBJECT",
- "name": "SentryError",
- "description": "A Sentry error. A simplified version of SentryDetailedError.",
+ "name": "SentryErrorFrequency",
+ "description": null,
"fields": [
{
"name": "count",
- "description": "Count of occurrences",
+ "description": "Count of errors received since the previously recorded time",
"args": [
],
@@ -18029,8 +21697,8 @@
"deprecationReason": null
},
{
- "name": "culprit",
- "description": "Culprit of the error",
+ "name": "time",
+ "description": "Time the error frequency stats were recorded",
"args": [
],
@@ -18039,16 +21707,29 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Time",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "SentryErrorStackTrace",
+ "description": "An object containing a stack trace entry for a Sentry error.",
+ "fields": [
{
- "name": "externalUrl",
- "description": "External URL of the error",
+ "name": "dateReceived",
+ "description": "Time the stack trace was received by Sentry",
"args": [
],
@@ -18065,8 +21746,8 @@
"deprecationReason": null
},
{
- "name": "firstSeen",
- "description": "Timestamp when the error was first seen",
+ "name": "issueId",
+ "description": "ID of the Sentry error",
"args": [
],
@@ -18075,7 +21756,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Time",
+ "name": "String",
"ofType": null
}
},
@@ -18083,8 +21764,8 @@
"deprecationReason": null
},
{
- "name": "frequency",
- "description": "Last 24hr stats of the error",
+ "name": "stackTraceEntries",
+ "description": "Stack trace entries for the Sentry error",
"args": [
],
@@ -18099,7 +21780,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "SentryErrorFrequency",
+ "name": "SentryErrorStackTraceEntry",
"ofType": null
}
}
@@ -18107,218 +21788,6 @@
},
"isDeprecated": false,
"deprecationReason": null
- },
- {
- "name": "id",
- "description": "ID (global ID) of the error",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "lastSeen",
- "description": "Timestamp when the error was last seen",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Time",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "message",
- "description": "Sentry metadata message of the error",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "sentryId",
- "description": "ID (Sentry ID) of the error",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "sentryProjectId",
- "description": "ID of the project (Sentry project)",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "sentryProjectName",
- "description": "Name of the project affected by the error",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "sentryProjectSlug",
- "description": "Slug of the project affected by the error",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "shortId",
- "description": "Short ID (Sentry ID) of the error",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "status",
- "description": "Status of the error",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "ENUM",
- "name": "SentryErrorStatus",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "title",
- "description": "Title of the error",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "type",
- "description": "Type of the error",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "userCount",
- "description": "Count of users affected by the error",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
}
],
"inputFields": null,
@@ -18330,12 +21799,12 @@
},
{
"kind": "OBJECT",
- "name": "SentryErrorStackTrace",
- "description": "An object containing a stack trace entry for a Sentry error.",
+ "name": "SentryErrorStackTraceContext",
+ "description": "An object context for a Sentry error stack trace",
"fields": [
{
- "name": "dateReceived",
- "description": "Time the stack trace was received by Sentry",
+ "name": "code",
+ "description": "Code number of the context",
"args": [
],
@@ -18352,8 +21821,8 @@
"deprecationReason": null
},
{
- "name": "issueId",
- "description": "ID of the Sentry error",
+ "name": "line",
+ "description": "Line number of the context",
"args": [
],
@@ -18362,38 +21831,12 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Int",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- },
- {
- "name": "stackTraceEntries",
- "description": "Stack trace entries for the Sentry error",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "SentryErrorStackTraceEntry",
- "ofType": null
- }
- }
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
}
],
"inputFields": null,
@@ -18495,157 +21938,68 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "SentryErrorStackTraceContext",
- "description": "An object context for a Sentry error stack trace",
- "fields": [
+ "kind": "ENUM",
+ "name": "SentryErrorStatus",
+ "description": "State of a Sentry error",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
{
- "name": "code",
- "description": "Code number of the context",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
+ "name": "RESOLVED",
+ "description": "Error has been resolved",
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "line",
- "description": "Line number of the context",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
- },
+ "name": "RESOLVED_IN_NEXT_RELEASE",
+ "description": "Error has been ignored until next release",
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "Metadata",
- "description": null,
- "fields": [
+ },
{
- "name": "revision",
- "description": "Revision",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
+ "name": "UNRESOLVED",
+ "description": "Error is unresolved",
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "version",
- "description": "Version",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
+ "name": "IGNORED",
+ "description": "Error has been ignored",
"isDeprecated": false,
"deprecationReason": null
}
],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "DesignManagement",
- "description": null,
+ "name": "SentryErrorTags",
+ "description": "State of a Sentry error",
"fields": [
{
- "name": "designAtVersion",
- "description": "Find a design as of a version",
+ "name": "level",
+ "description": "Severity level of the Sentry Error",
"args": [
- {
- "name": "id",
- "description": "The Global ID of the design at this version",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "DesignAtVersion",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "version",
- "description": "Find a version",
+ "name": "logger",
+ "description": "Logger of the Sentry Error",
"args": [
- {
- "name": "id",
- "description": "The Global ID of the version",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "DesignVersion",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
@@ -18661,944 +22015,517 @@
},
{
"kind": "OBJECT",
- "name": "Mutation",
- "description": null,
+ "name": "Snippet",
+ "description": "Represents a snippet entry",
"fields": [
{
- "name": "addAwardEmoji",
- "description": null,
+ "name": "author",
+ "description": "The owner of the snippet",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "AddAwardEmojiInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "AddAwardEmojiPayload",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "createDiffNote",
- "description": null,
- "args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "CreateDiffNoteInput",
- "ofType": null
- }
- },
- "defaultValue": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "User",
+ "ofType": null
}
- ],
- "type": {
- "kind": "OBJECT",
- "name": "CreateDiffNotePayload",
- "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createEpic",
- "description": null,
+ "name": "blob",
+ "description": "Snippet blob",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "CreateEpicInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "CreateEpicPayload",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "createImageDiffNote",
- "description": null,
- "args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "CreateImageDiffNoteInput",
- "ofType": null
- }
- },
- "defaultValue": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "SnippetBlob",
+ "ofType": null
}
- ],
- "type": {
- "kind": "OBJECT",
- "name": "CreateImageDiffNotePayload",
- "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "createNote",
- "description": null,
+ "name": "createdAt",
+ "description": "Timestamp this snippet was created",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "CreateNoteInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "CreateNotePayload",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "createSnippet",
- "description": null,
- "args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "CreateSnippetInput",
- "ofType": null
- }
- },
- "defaultValue": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
}
- ],
- "type": {
- "kind": "OBJECT",
- "name": "CreateSnippetPayload",
- "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "designManagementDelete",
- "description": null,
+ "name": "description",
+ "description": "Description of the snippet",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "DesignManagementDeleteInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "DesignManagementDeletePayload",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "designManagementUpload",
- "description": null,
+ "name": "descriptionHtml",
+ "description": "The GitLab Flavored Markdown rendering of `description`",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "DesignManagementUploadInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "DesignManagementUploadPayload",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "destroyNote",
- "description": null,
+ "name": "discussions",
+ "description": "All discussions on this noteable",
"args": [
{
- "name": "input",
- "description": null,
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "DestroyNoteInput",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"defaultValue": null
- }
- ],
- "type": {
- "kind": "OBJECT",
- "name": "DestroyNotePayload",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "destroySnippet",
- "description": null,
- "args": [
+ },
{
- "name": "input",
- "description": null,
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "DestroySnippetInput",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"defaultValue": null
- }
- ],
- "type": {
- "kind": "OBJECT",
- "name": "DestroySnippetPayload",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "epicSetSubscription",
- "description": null,
- "args": [
+ },
{
- "name": "input",
- "description": null,
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "EpicSetSubscriptionInput",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"defaultValue": null
- }
- ],
- "type": {
- "kind": "OBJECT",
- "name": "EpicSetSubscriptionPayload",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "epicTreeReorder",
- "description": null,
- "args": [
+ },
{
- "name": "input",
- "description": null,
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "EpicTreeReorderInput",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"defaultValue": null
}
],
"type": {
- "kind": "OBJECT",
- "name": "EpicTreeReorderPayload",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "issueSetConfidential",
- "description": null,
- "args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "IssueSetConfidentialInput",
- "ofType": null
- }
- },
- "defaultValue": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "DiscussionConnection",
+ "ofType": null
}
- ],
- "type": {
- "kind": "OBJECT",
- "name": "IssueSetConfidentialPayload",
- "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "issueSetDueDate",
- "description": null,
+ "name": "fileName",
+ "description": "File Name of the snippet",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "IssueSetDueDateInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "IssueSetDueDatePayload",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "issueSetWeight",
- "description": null,
+ "name": "id",
+ "description": "Id of the snippet",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "IssueSetWeightInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "IssueSetWeightPayload",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "markAsSpamSnippet",
- "description": null,
+ "name": "notes",
+ "description": "All notes on this noteable",
"args": [
{
- "name": "input",
- "description": null,
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "MarkAsSpamSnippetInput",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"defaultValue": null
- }
- ],
- "type": {
- "kind": "OBJECT",
- "name": "MarkAsSpamSnippetPayload",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "mergeRequestSetAssignees",
- "description": null,
- "args": [
+ },
{
- "name": "input",
- "description": null,
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetAssigneesInput",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"defaultValue": null
- }
- ],
- "type": {
- "kind": "OBJECT",
- "name": "MergeRequestSetAssigneesPayload",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "mergeRequestSetLabels",
- "description": null,
- "args": [
+ },
{
- "name": "input",
- "description": null,
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetLabelsInput",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"defaultValue": null
- }
- ],
- "type": {
- "kind": "OBJECT",
- "name": "MergeRequestSetLabelsPayload",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "mergeRequestSetLocked",
- "description": null,
- "args": [
+ },
{
- "name": "input",
- "description": null,
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetLockedInput",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
},
"defaultValue": null
}
],
"type": {
- "kind": "OBJECT",
- "name": "MergeRequestSetLockedPayload",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "NoteConnection",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeRequestSetMilestone",
- "description": null,
+ "name": "project",
+ "description": "The project the snippet is associated with",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetMilestoneInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
"kind": "OBJECT",
- "name": "MergeRequestSetMilestonePayload",
+ "name": "Project",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeRequestSetSubscription",
- "description": null,
+ "name": "rawUrl",
+ "description": "Raw URL of the snippet",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetSubscriptionInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "MergeRequestSetSubscriptionPayload",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeRequestSetWip",
- "description": null,
+ "name": "title",
+ "description": "Title of the snippet",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetWipInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "MergeRequestSetWipPayload",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "removeAwardEmoji",
- "description": null,
+ "name": "updatedAt",
+ "description": "Timestamp this snippet was updated",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "RemoveAwardEmojiInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "RemoveAwardEmojiPayload",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "todoMarkDone",
- "description": null,
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "TodoMarkDoneInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "TodoMarkDonePayload",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "SnippetPermissions",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "todoRestore",
- "description": null,
+ "name": "visibilityLevel",
+ "description": "Visibility Level of the snippet",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "TodoRestoreInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "TodoRestorePayload",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "ENUM",
+ "name": "VisibilityLevelsEnum",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "todoRestoreMany",
- "description": null,
+ "name": "webUrl",
+ "description": "Web URL of the snippet",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "TodoRestoreManyInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "TodoRestoreManyPayload",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
{
- "name": "todosMarkAllDone",
- "description": null,
+ "kind": "INTERFACE",
+ "name": "Noteable",
+ "ofType": null
+ }
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "SnippetBlob",
+ "description": "Represents the snippet blob",
+ "fields": [
+ {
+ "name": "binary",
+ "description": "Shows whether the blob is binary",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "TodosMarkAllDoneInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "TodosMarkAllDonePayload",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "toggleAwardEmoji",
- "description": null,
+ "name": "mode",
+ "description": "Blob mode",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "ToggleAwardEmojiInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "ToggleAwardEmojiPayload",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "updateEpic",
- "description": null,
+ "name": "name",
+ "description": "Blob name",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "UpdateEpicInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "UpdateEpicPayload",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "updateImageDiffNote",
- "description": "Updates a DiffNote on an image (a `Note` where the `position.positionType` is `\"image\"`). If the body of the Note contains only quick actions, the Note will be destroyed during the update, and no Note will be returned",
+ "name": "path",
+ "description": "Blob path",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "UpdateImageDiffNoteInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "UpdateImageDiffNotePayload",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "updateIssue",
- "description": null,
+ "name": "plainData",
+ "description": "Blob plain highlighted data",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "UpdateIssueInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "UpdateIssuePayload",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "updateNote",
- "description": "Updates a Note. If the body of the Note contains only quick actions, the Note will be destroyed during the update, and no Note will be returned",
+ "name": "rawPath",
+ "description": "Blob raw content endpoint path",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "UpdateNoteInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "UpdateNotePayload",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "updateSnippet",
- "description": null,
+ "name": "richData",
+ "description": "Blob highlighted data",
"args": [
- {
- "name": "input",
- "description": null,
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "UpdateSnippetInput",
- "ofType": null
- }
- },
- "defaultValue": null
- }
+
],
"type": {
- "kind": "OBJECT",
- "name": "UpdateSnippetPayload",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "AddAwardEmojiPayload",
- "description": "Autogenerated return type of AddAwardEmoji",
- "fields": [
+ },
{
- "name": "awardEmoji",
- "description": "The award emoji after mutation",
+ "name": "richViewer",
+ "description": "Blob content rich viewer",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "AwardEmoji",
+ "name": "SnippetBlobViewer",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "simpleViewer",
+ "description": "Blob content simple viewer",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "SnippetBlobViewer",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "size",
+ "description": "Blob size",
"args": [
],
@@ -19606,17 +22533,9 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
}
},
"isDeprecated": false,
@@ -19632,12 +22551,12 @@
},
{
"kind": "OBJECT",
- "name": "AwardEmoji",
- "description": "An emoji awarded by a user.",
+ "name": "SnippetBlobViewer",
+ "description": "Represents how the blob content should be displayed",
"fields": [
{
- "name": "description",
- "description": "The emoji description",
+ "name": "collapsed",
+ "description": "Shows whether the blob should be displayed collapsed",
"args": [
],
@@ -19646,7 +22565,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -19654,8 +22573,8 @@
"deprecationReason": null
},
{
- "name": "emoji",
- "description": "The emoji as an icon",
+ "name": "fileType",
+ "description": "Content file type",
"args": [
],
@@ -19672,8 +22591,8 @@
"deprecationReason": null
},
{
- "name": "name",
- "description": "The emoji name",
+ "name": "loadAsync",
+ "description": "Shows whether the blob content is loaded async",
"args": [
],
@@ -19682,7 +22601,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -19690,8 +22609,8 @@
"deprecationReason": null
},
{
- "name": "unicode",
- "description": "The emoji in unicode",
+ "name": "loadingPartialName",
+ "description": "Loading partial name",
"args": [
],
@@ -19708,8 +22627,22 @@
"deprecationReason": null
},
{
- "name": "unicodeVersion",
- "description": "The unicode version for this emoji",
+ "name": "renderError",
+ "description": "Error rendering the blob content",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "tooLarge",
+ "description": "Shows whether the blob too large to be displayed",
"args": [
],
@@ -19718,7 +22651,7 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
@@ -19726,8 +22659,8 @@
"deprecationReason": null
},
{
- "name": "user",
- "description": "The user who awarded the emoji",
+ "name": "type",
+ "description": "Type of blob viewer",
"args": [
],
@@ -19735,8 +22668,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "User",
+ "kind": "ENUM",
+ "name": "BlobViewersType",
"ofType": null
}
},
@@ -19752,112 +22685,108 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "AddAwardEmojiInput",
- "description": "Autogenerated input type of AddAwardEmoji",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "SnippetConnection",
+ "description": "The connection type for Snippet.",
+ "fields": [
{
- "name": "awardableId",
- "description": "The global id of the awardable resource",
+ "name": "edges",
+ "description": "A list of edges.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "SnippetEdge",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "name",
- "description": "The emoji name",
+ "name": "nodes",
+ "description": "A list of nodes.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Snippet",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
+ "args": [
+
+ ],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "ofType": null
+ }
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
+ "inputFields": null,
+ "interfaces": [
+
+ ],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "RemoveAwardEmojiPayload",
- "description": "Autogenerated return type of RemoveAwardEmoji",
+ "name": "SnippetEdge",
+ "description": "An edge in a connection.",
"fields": [
{
- "name": "awardEmoji",
- "description": "The award emoji after mutation",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "AwardEmoji",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Snippet",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- },
- {
- "name": "errors",
- "description": "Reasons why the mutation failed.",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
}
],
"inputFields": null,
@@ -19868,90 +22797,85 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "RemoveAwardEmojiInput",
- "description": "Autogenerated input type of RemoveAwardEmoji",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "SnippetPermissions",
+ "description": null,
+ "fields": [
{
- "name": "awardableId",
- "description": "The global id of the awardable resource",
+ "name": "adminSnippet",
+ "description": "Indicates the user can perform `admin_snippet` on this resource",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "Boolean",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "name",
- "description": "The emoji name",
+ "name": "awardEmoji",
+ "description": "Indicates the user can perform `award_emoji` on this resource",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "ToggleAwardEmojiPayload",
- "description": "Autogenerated return type of ToggleAwardEmoji",
- "fields": [
- {
- "name": "awardEmoji",
- "description": "The award emoji after mutation",
+ "name": "createNote",
+ "description": "Indicates the user can perform `create_note` on this resource",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "AwardEmoji",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "readSnippet",
+ "description": "Indicates the user can perform `read_snippet` on this resource",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "reportSnippet",
+ "description": "Indicates the user can perform `report_snippet` on this resource",
"args": [
],
@@ -19959,25 +22883,17 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "toggledOn",
- "description": "Indicates the status of the emoji. True if the toggle awarded the emoji, and false if the toggle removed the emoji.",
+ "name": "updateSnippet",
+ "description": "Indicates the user can perform `update_snippet` on this resource",
"args": [
],
@@ -20002,76 +22918,76 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "ToggleAwardEmojiInput",
- "description": "Autogenerated input type of ToggleAwardEmoji",
+ "kind": "ENUM",
+ "name": "Sort",
+ "description": "Common sort values",
"fields": null,
- "inputFields": [
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
{
- "name": "awardableId",
- "description": "The global id of the awardable resource",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
+ "name": "updated_desc",
+ "description": "Updated at descending order",
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "name",
- "description": "The emoji name",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "defaultValue": null
+ "name": "updated_asc",
+ "description": "Updated at ascending order",
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "name": "created_desc",
+ "description": "Created at descending order",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "created_asc",
+ "description": "Created at ascending order",
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
+ "possibleTypes": null
+ },
+ {
+ "kind": "SCALAR",
+ "name": "String",
+ "description": "Represents textual data as UTF-8 character sequences. This type is most often used by GraphQL to represent free-form human-readable text.",
+ "fields": null,
+ "inputFields": null,
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "IssueSetConfidentialPayload",
- "description": "Autogenerated return type of IssueSetConfidential",
+ "name": "Submodule",
+ "description": null,
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "flatPath",
+ "description": "Flat path of the entry",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "id",
+ "description": "ID of the entry",
"args": [
],
@@ -20079,67 +22995,38 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "issue",
- "description": "The issue after mutation",
+ "name": "name",
+ "description": "Name of the entry",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Issue",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INPUT_OBJECT",
- "name": "IssueSetConfidentialInput",
- "description": "Autogenerated input type of IssueSetConfidential",
- "fields": null,
- "inputFields": [
- {
- "name": "projectPath",
- "description": "The project the issue to mutate is in",
- "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "iid",
- "description": "The iid of the issue to mutate",
+ "name": "path",
+ "description": "Path of the entry",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
@@ -20149,45 +23036,30 @@
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "confidential",
- "description": "Whether or not to set the issue as a confidential.",
+ "name": "sha",
+ "description": "Last commit sha for the entry",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "IssueSetDueDatePayload",
- "description": "Autogenerated return type of IssueSetDueDate",
- "fields": [
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "treeUrl",
+ "description": "Tree URL for the sub-module",
"args": [
],
@@ -20200,8 +23072,8 @@
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "type",
+ "description": "Type of tree entry",
"args": [
],
@@ -20209,31 +23081,23 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "ENUM",
+ "name": "EntryType",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "issue",
- "description": "The issue after mutation",
+ "name": "webUrl",
+ "description": "Web URL for the sub-module",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Issue",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
@@ -20242,96 +23106,90 @@
],
"inputFields": null,
"interfaces": [
-
+ {
+ "kind": "INTERFACE",
+ "name": "Entry",
+ "ofType": null
+ }
],
"enumValues": null,
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "IssueSetDueDateInput",
- "description": "Autogenerated input type of IssueSetDueDate",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "SubmoduleConnection",
+ "description": "The connection type for Submodule.",
+ "fields": [
{
- "name": "projectPath",
- "description": "The project the issue to mutate is in",
+ "name": "edges",
+ "description": "A list of edges.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "SubmoduleEdge",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "iid",
- "description": "The iid of the issue to mutate",
+ "name": "nodes",
+ "description": "A list of nodes.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Submodule",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "dueDate",
- "description": "The desired due date for the issue",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Time",
+ "kind": "OBJECT",
+ "name": "PageInfo",
"ofType": null
}
},
- "defaultValue": null
- },
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
+ "inputFields": null,
+ "interfaces": [
+
+ ],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "UpdateIssuePayload",
- "description": "Autogenerated return type of UpdateIssue",
+ "name": "SubmoduleEdge",
+ "description": "An edge in a connection.",
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
@@ -20339,31 +23197,23 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "issue",
- "description": "The issue after mutation",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Issue",
+ "name": "Submodule",
"ofType": null
},
"isDeprecated": false,
@@ -20378,86 +23228,104 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "UpdateIssueInput",
- "description": "Autogenerated input type of UpdateIssue",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "TaskCompletionStatus",
+ "description": "Completion status of tasks",
+ "fields": [
{
- "name": "projectPath",
- "description": "The project the issue to mutate is in",
+ "name": "completedCount",
+ "description": "Number of completed tasks",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "Int",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "iid",
- "description": "The iid of the issue to mutate",
+ "name": "count",
+ "description": "Number of total tasks",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Int",
"ofType": null
}
},
- "defaultValue": null
- },
- {
- "name": "healthStatus",
- "description": "The desired health status",
- "type": {
- "kind": "ENUM",
- "name": "HealthStatus",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "SCALAR",
+ "name": "Time",
+ "description": "Time represented in ISO 8601",
+ "fields": null,
+ "inputFields": null,
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "MergeRequestSetLabelsPayload",
- "description": "Autogenerated return type of MergeRequestSetLabels",
+ "name": "Timelog",
+ "description": null,
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "date",
+ "description": "The date when the time tracked was spent at",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "issue",
+ "description": "The issue that logged time was added to",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Issue",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "timeSpent",
+ "description": "The time spent displayed in seconds",
"args": [
],
@@ -20465,32 +23333,28 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeRequest",
- "description": "The merge request after mutation",
+ "name": "user",
+ "description": "The user that logged the time",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "MergeRequest",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "User",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
@@ -20504,137 +23368,80 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetLabelsInput",
- "description": "Autogenerated input type of MergeRequestSetLabels",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "TimelogConnection",
+ "description": "The connection type for Timelog.",
+ "fields": [
{
- "name": "projectPath",
- "description": "The project the merge request to mutate is in",
+ "name": "edges",
+ "description": "A list of edges.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "TimelogEdge",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "iid",
- "description": "The iid of the merge request to mutate",
+ "name": "nodes",
+ "description": "A list of nodes.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Timelog",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "labelIds",
- "description": "The Label IDs to set. Replaces existing labels by default.\n",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "ofType": null
}
},
- "defaultValue": null
- },
- {
- "name": "operationMode",
- "description": "Changes the operation mode. Defaults to REPLACE.\n",
- "type": {
- "kind": "ENUM",
- "name": "MutationOperationMode",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "ENUM",
- "name": "MutationOperationMode",
- "description": "Different toggles for changing mutator behavior.",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "REPLACE",
- "description": "Performs a replace operation",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "APPEND",
- "description": "Performs an append operation",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "REMOVE",
- "description": "Performs a removal operation",
"isDeprecated": false,
"deprecationReason": null
}
],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "MergeRequestSetLockedPayload",
- "description": "Autogenerated return type of MergeRequestSetLocked",
+ "name": "TimelogEdge",
+ "description": "An edge in a connection.",
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
@@ -20642,31 +23449,23 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeRequest",
- "description": "The merge request after mutation",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "MergeRequest",
+ "name": "Timelog",
"ofType": null
},
"isDeprecated": false,
@@ -20681,90 +23480,99 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetLockedInput",
- "description": "Autogenerated input type of MergeRequestSetLocked",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "Todo",
+ "description": "Representing a todo entry",
+ "fields": [
{
- "name": "projectPath",
- "description": "The project the merge request to mutate is in",
+ "name": "action",
+ "description": "Action of the todo",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "ENUM",
+ "name": "TodoActionEnum",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "iid",
- "description": "The iid of the merge request to mutate",
+ "name": "author",
+ "description": "The owner of this todo",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "User",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "locked",
- "description": "Whether or not to lock the merge request.\n",
+ "name": "body",
+ "description": "Body of the todo",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "String",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "createdAt",
+ "description": "Timestamp this todo was created",
+ "args": [
+
+ ],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ }
},
- "defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "MergeRequestSetMilestonePayload",
- "description": "Autogenerated return type of MergeRequestSetMilestone",
- "fields": [
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "group",
+ "description": "Group this todo is associated with",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Group",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "id",
+ "description": "Id of the todo",
"args": [
],
@@ -20772,158 +23580,180 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeRequest",
- "description": "The merge request after mutation",
+ "name": "project",
+ "description": "The project this todo is associated with",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "MergeRequest",
+ "name": "Project",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetMilestoneInput",
- "description": "Autogenerated input type of MergeRequestSetMilestone",
- "fields": null,
- "inputFields": [
+ },
{
- "name": "projectPath",
- "description": "The project the merge request to mutate is in",
+ "name": "state",
+ "description": "State of the todo",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "ENUM",
+ "name": "TodoStateEnum",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "iid",
- "description": "The iid of the merge request to mutate",
+ "name": "targetType",
+ "description": "Target type of the todo",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "ENUM",
+ "name": "TodoTargetEnum",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "TodoActionEnum",
+ "description": null,
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "assigned",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "milestoneId",
- "description": "The milestone to assign to the merge request.\n",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
+ "name": "mentioned",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "name": "build_failed",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "marked",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "approval_required",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "unmergeable",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "directly_addressed",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
- "enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "MergeRequestSetSubscriptionPayload",
- "description": "Autogenerated return type of MergeRequestSetSubscription",
+ "name": "TodoConnection",
+ "description": "The connection type for Todo.",
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "edges",
+ "description": "A list of edges.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "TodoEdge",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "nodes",
+ "description": "A list of nodes.",
"args": [
],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "OBJECT",
+ "name": "Todo",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "mergeRequest",
- "description": "The merge request after mutation",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "MergeRequest",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
@@ -20937,48 +23767,65 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetSubscriptionInput",
- "description": "Autogenerated input type of MergeRequestSetSubscription",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "TodoEdge",
+ "description": "An edge in a connection.",
+ "fields": [
{
- "name": "projectPath",
- "description": "The project the merge request to mutate is in",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "iid",
- "description": "The iid of the merge request to mutate",
+ "name": "node",
+ "description": "The item at the end of the edge.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "OBJECT",
+ "name": "Todo",
+ "ofType": null
},
- "defaultValue": null
- },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "TodoMarkDoneInput",
+ "description": "Autogenerated input type of TodoMarkDone",
+ "fields": null,
+ "inputFields": [
{
- "name": "subscribedState",
- "description": "The desired state of the subscription",
+ "name": "id",
+ "description": "The global id of the todo to mark as done",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Boolean",
+ "name": "ID",
"ofType": null
}
},
@@ -21001,8 +23848,8 @@
},
{
"kind": "OBJECT",
- "name": "MergeRequestSetWipPayload",
- "description": "Autogenerated return type of MergeRequestSetWip",
+ "name": "TodoMarkDonePayload",
+ "description": "Autogenerated return type of TodoMarkDone",
"fields": [
{
"name": "clientMutationId",
@@ -21045,15 +23892,19 @@
"deprecationReason": null
},
{
- "name": "mergeRequest",
- "description": "The merge request after mutation",
+ "name": "todo",
+ "description": "The requested todo",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "MergeRequest",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Todo",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
@@ -21068,13 +23919,13 @@
},
{
"kind": "INPUT_OBJECT",
- "name": "MergeRequestSetWipInput",
- "description": "Autogenerated input type of MergeRequestSetWip",
+ "name": "TodoRestoreInput",
+ "description": "Autogenerated input type of TodoRestore",
"fields": null,
"inputFields": [
{
- "name": "projectPath",
- "description": "The project the merge request to mutate is in",
+ "name": "id",
+ "description": "The global id of the todo to restore",
"type": {
"kind": "NON_NULL",
"name": null,
@@ -21087,29 +23938,44 @@
"defaultValue": null
},
{
- "name": "iid",
- "description": "The iid of the merge request to mutate",
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"defaultValue": null
- },
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "TodoRestoreManyInput",
+ "description": "Autogenerated input type of TodoRestoreMany",
+ "fields": null,
+ "inputFields": [
{
- "name": "wip",
- "description": "Whether or not to set the merge request as a WIP.\n",
+ "name": "ids",
+ "description": "The global ids of the todos to restore (a maximum of 50 is supported at once)",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
}
},
"defaultValue": null
@@ -21131,8 +23997,8 @@
},
{
"kind": "OBJECT",
- "name": "MergeRequestSetAssigneesPayload",
- "description": "Autogenerated return type of MergeRequestSetAssignees",
+ "name": "TodoRestoreManyPayload",
+ "description": "Autogenerated return type of TodoRestoreMany",
"fields": [
{
"name": "clientMutationId",
@@ -21175,65 +24041,12 @@
"deprecationReason": null
},
{
- "name": "mergeRequest",
- "description": "The merge request after mutation",
+ "name": "updatedIds",
+ "description": "The ids of the updated todo items",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "MergeRequest",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INPUT_OBJECT",
- "name": "MergeRequestSetAssigneesInput",
- "description": "Autogenerated input type of MergeRequestSetAssignees",
- "fields": null,
- "inputFields": [
- {
- "name": "projectPath",
- "description": "The project the merge request to mutate is in",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "iid",
- "description": "The iid of the merge request to mutate",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "assigneeUsernames",
- "description": "The usernames to assign to the merge request. Replaces existing assignees by default.\n",
- "type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
@@ -21244,43 +24057,27 @@
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "ID",
"ofType": null
}
}
}
},
- "defaultValue": null
- },
- {
- "name": "operationMode",
- "description": "The operation to perform. Defaults to REPLACE.\n",
- "type": {
- "kind": "ENUM",
- "name": "MutationOperationMode",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
+ "inputFields": null,
+ "interfaces": [
+
+ ],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "CreateNotePayload",
- "description": "Autogenerated return type of CreateNote",
+ "name": "TodoRestorePayload",
+ "description": "Autogenerated return type of TodoRestore",
"fields": [
{
"name": "clientMutationId",
@@ -21323,15 +24120,19 @@
"deprecationReason": null
},
{
- "name": "note",
- "description": "The note after mutation",
+ "name": "todo",
+ "description": "The requested todo",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Note",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Todo",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
@@ -21345,50 +24146,76 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "CreateNoteInput",
- "description": "Autogenerated input type of CreateNote",
+ "kind": "ENUM",
+ "name": "TodoStateEnum",
+ "description": null,
"fields": null,
- "inputFields": [
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
{
- "name": "noteableId",
- "description": "The global id of the resource to add a note to",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
+ "name": "pending",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "body",
- "description": "Content of the note",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "defaultValue": null
+ "name": "done",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "TodoTargetEnum",
+ "description": null,
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
+ {
+ "name": "COMMIT",
+ "description": "A Commit",
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "discussionId",
- "description": "The global id of the discussion this note is in reply to",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
+ "name": "ISSUE",
+ "description": "An Issue",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "MERGEREQUEST",
+ "description": "A MergeRequest",
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
+ "name": "DESIGN",
+ "description": "A Design",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "EPIC",
+ "description": "An Epic",
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "possibleTypes": null
+ },
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "TodosMarkAllDoneInput",
+ "description": "Autogenerated input type of TodosMarkAllDone",
+ "fields": null,
+ "inputFields": [
+ {
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
@@ -21405,8 +24232,8 @@
},
{
"kind": "OBJECT",
- "name": "CreateDiffNotePayload",
- "description": "Autogenerated return type of CreateDiffNote",
+ "name": "TodosMarkAllDonePayload",
+ "description": "Autogenerated return type of TodosMarkAllDone",
"fields": [
{
"name": "clientMutationId",
@@ -21449,15 +24276,27 @@
"deprecationReason": null
},
{
- "name": "note",
- "description": "The note after mutation",
+ "name": "updatedIds",
+ "description": "Ids of the updated todos",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Note",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ }
},
"isDeprecated": false,
"deprecationReason": null
@@ -21472,13 +24311,13 @@
},
{
"kind": "INPUT_OBJECT",
- "name": "CreateDiffNoteInput",
- "description": "Autogenerated input type of CreateDiffNote",
+ "name": "ToggleAwardEmojiInput",
+ "description": "Autogenerated input type of ToggleAwardEmoji",
"fields": null,
"inputFields": [
{
- "name": "noteableId",
- "description": "The global id of the resource to add a note to",
+ "name": "awardableId",
+ "description": "The global id of the awardable resource",
"type": {
"kind": "NON_NULL",
"name": null,
@@ -21491,8 +24330,8 @@
"defaultValue": null
},
{
- "name": "body",
- "description": "Content of the note",
+ "name": "name",
+ "description": "The emoji name",
"type": {
"kind": "NON_NULL",
"name": null,
@@ -21505,20 +24344,6 @@
"defaultValue": null
},
{
- "name": "position",
- "description": "The position of this note on a diff",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "DiffPositionInput",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
@@ -21534,129 +24359,25 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "DiffPositionInput",
- "description": null,
- "fields": null,
- "inputFields": [
- {
- "name": "headSha",
- "description": "SHA of the HEAD at the time the comment was made",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "baseSha",
- "description": "Merge base of the branch the comment was made on",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "startSha",
- "description": "SHA of the branch being compared against",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "paths",
- "description": "The paths of the file that was changed. Both of the properties of this input are optional, but at least one of them is required",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "DiffPathsInput",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "oldLine",
- "description": "Line on start SHA that was changed",
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "newLine",
- "description": "Line on HEAD SHA that was changed",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- }
- },
- "defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INPUT_OBJECT",
- "name": "DiffPathsInput",
- "description": null,
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "ToggleAwardEmojiPayload",
+ "description": "Autogenerated return type of ToggleAwardEmoji",
+ "fields": [
{
- "name": "oldPath",
- "description": "The path of the file on the start sha",
+ "name": "awardEmoji",
+ "description": "The award emoji after mutation",
+ "args": [
+
+ ],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "AwardEmoji",
"ofType": null
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "newPath",
- "description": "The path of the file on the head sha",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "CreateImageDiffNotePayload",
- "description": "Autogenerated return type of CreateImageDiffNote",
- "fields": [
- {
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"args": [
@@ -21697,15 +24418,19 @@
"deprecationReason": null
},
{
- "name": "note",
- "description": "The note after mutation",
+ "name": "toggledOn",
+ "description": "Indicates the status of the emoji. True if the toggle awarded the emoji, and false if the toggle removed the emoji.",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Note",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
@@ -21719,101 +24444,214 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "CreateImageDiffNoteInput",
- "description": "Autogenerated input type of CreateImageDiffNote",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "Tree",
+ "description": null,
+ "fields": [
{
- "name": "noteableId",
- "description": "The global id of the resource to add a note to",
+ "name": "blobs",
+ "description": "Blobs of the tree",
+ "args": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "BlobConnection",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "body",
- "description": "Content of the note",
+ "name": "lastCommit",
+ "description": "Last commit for the tree",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Commit",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "submodules",
+ "description": "Sub-modules of the tree",
+ "args": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "SubmoduleConnection",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "position",
- "description": "The position of this note on a diff",
+ "name": "trees",
+ "description": "Trees of the tree",
+ "args": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "INPUT_OBJECT",
- "name": "DiffImagePositionInput",
+ "kind": "OBJECT",
+ "name": "TreeEntryConnection",
"ofType": null
}
},
- "defaultValue": null
- },
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
+ "inputFields": null,
+ "interfaces": [
+
+ ],
"enumValues": null,
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "DiffImagePositionInput",
- "description": null,
- "fields": null,
- "inputFields": [
- {
- "name": "headSha",
- "description": "SHA of the HEAD at the time the comment was made",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "baseSha",
- "description": "Merge base of the branch the comment was made on",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
+ "kind": "OBJECT",
+ "name": "TreeEntry",
+ "description": "Represents a directory",
+ "fields": [
{
- "name": "startSha",
- "description": "SHA of the branch being compared against",
+ "name": "flatPath",
+ "description": "Flat path of the entry",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
@@ -21823,105 +24661,84 @@
"ofType": null
}
},
- "defaultValue": null
- },
- {
- "name": "paths",
- "description": "The paths of the file that was changed. Both of the properties of this input are optional, but at least one of them is required",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "DiffPathsInput",
- "ofType": null
- }
- },
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "x",
- "description": "X position of the note",
+ "name": "id",
+ "description": "ID of the entry",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "ID",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "y",
- "description": "Y position of the note",
+ "name": "name",
+ "description": "Name of the entry",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "width",
- "description": "Total width of the image",
+ "name": "path",
+ "description": "Path of the entry",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "height",
- "description": "Total height of the image",
+ "name": "sha",
+ "description": "Last commit sha for the entry",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "Int",
+ "name": "String",
"ofType": null
}
},
- "defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "UpdateNotePayload",
- "description": "Autogenerated return type of UpdateNote",
- "fields": [
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "type",
+ "description": "Type of tree entry",
"args": [
],
@@ -21929,31 +24746,23 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "ENUM",
+ "name": "EntryType",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "note",
- "description": "The note after mutation",
+ "name": "webUrl",
+ "description": "Web URL for the tree entry (directory)",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Note",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
@@ -21962,82 +24771,90 @@
],
"inputFields": null,
"interfaces": [
-
+ {
+ "kind": "INTERFACE",
+ "name": "Entry",
+ "ofType": null
+ }
],
"enumValues": null,
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "UpdateNoteInput",
- "description": "Autogenerated input type of UpdateNote",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "TreeEntryConnection",
+ "description": "The connection type for TreeEntry.",
+ "fields": [
{
- "name": "id",
- "description": "The global id of the note to update",
+ "name": "edges",
+ "description": "A list of edges.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "TreeEntryEdge",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "body",
- "description": "Content of the note",
+ "name": "nodes",
+ "description": "A list of nodes.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "TreeEntry",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
+ "args": [
+
+ ],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "ofType": null
+ }
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
+ "inputFields": null,
+ "interfaces": [
+
+ ],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "UpdateImageDiffNotePayload",
- "description": "Autogenerated return type of UpdateImageDiffNote",
+ "name": "TreeEntryEdge",
+ "description": "An edge in a connection.",
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
@@ -22045,31 +24862,23 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "note",
- "description": "The note after mutation",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Note",
+ "name": "TreeEntry",
"ofType": null
},
"isDeprecated": false,
@@ -22084,58 +24893,26 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "UpdateImageDiffNoteInput",
- "description": "Autogenerated input type of UpdateImageDiffNote",
+ "kind": "ENUM",
+ "name": "TypeEnum",
+ "description": null,
"fields": null,
- "inputFields": [
- {
- "name": "id",
- "description": "The global id of the note to update",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "body",
- "description": "Content of the note",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
{
- "name": "position",
- "description": "The position of this note on a diff",
- "type": {
- "kind": "INPUT_OBJECT",
- "name": "UpdateDiffImagePositionInput",
- "ofType": null
- },
- "defaultValue": null
+ "name": "personal",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "name": "project",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
- "enumValues": null,
"possibleTypes": null
},
{
@@ -22190,195 +24967,142 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "DestroyNotePayload",
- "description": "Autogenerated return type of DestroyNote",
- "fields": [
+ "kind": "INPUT_OBJECT",
+ "name": "UpdateEpicInput",
+ "description": "Autogenerated input type of UpdateEpic",
+ "fields": null,
+ "inputFields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "args": [
-
- ],
+ "name": "iid",
+ "description": "The iid of the epic to mutate",
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
- "args": [
-
- ],
+ "name": "groupPath",
+ "description": "The group the epic to mutate is in",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "note",
- "description": "The note after mutation",
- "args": [
-
- ],
+ "name": "title",
+ "description": "The title of the epic",
"type": {
- "kind": "OBJECT",
- "name": "Note",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INPUT_OBJECT",
- "name": "DestroyNoteInput",
- "description": "Autogenerated input type of DestroyNote",
- "fields": null,
- "inputFields": [
+ "defaultValue": null
+ },
{
- "name": "id",
- "description": "The global id of the note to destroy",
+ "name": "description",
+ "description": "The description of the epic",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"defaultValue": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "startDateFixed",
+ "description": "The start date of the epic",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "TodoMarkDonePayload",
- "description": "Autogenerated return type of TodoMarkDone",
- "fields": [
+ },
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "args": [
-
- ],
+ "name": "dueDateFixed",
+ "description": "The end date of the epic",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
- "args": [
-
- ],
+ "name": "startDateIsFixed",
+ "description": "Indicates start date should be sourced from start_date_fixed field not the issue milestones",
"type": {
- "kind": "NON_NULL",
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "dueDateIsFixed",
+ "description": "Indicates end date should be sourced from due_date_fixed field not the issue milestones",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "addLabelIds",
+ "description": "The IDs of labels to be added to the epic.",
+ "type": {
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
}
}
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "todo",
- "description": "The requested todo",
- "args": [
-
- ],
+ "name": "removeLabelIds",
+ "description": "The IDs of labels to be removed from the epic.",
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "OBJECT",
- "name": "Todo",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
}
},
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INPUT_OBJECT",
- "name": "TodoMarkDoneInput",
- "description": "Autogenerated input type of TodoMarkDone",
- "fields": null,
- "inputFields": [
+ "defaultValue": null
+ },
{
- "name": "id",
- "description": "The global id of the todo to mark as done",
+ "name": "stateEvent",
+ "description": "State event for the epic",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "ENUM",
+ "name": "EpicStateEvent",
+ "ofType": null
},
"defaultValue": null
},
@@ -22399,8 +25123,8 @@
},
{
"kind": "OBJECT",
- "name": "TodoRestorePayload",
- "description": "Autogenerated return type of TodoRestore",
+ "name": "UpdateEpicPayload",
+ "description": "Autogenerated return type of UpdateEpic",
"fields": [
{
"name": "clientMutationId",
@@ -22417,6 +25141,20 @@
"deprecationReason": null
},
{
+ "name": "epic",
+ "description": "The epic after mutation",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Epic",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "errors",
"description": "Reasons why the mutation failed.",
"args": [
@@ -22441,24 +25179,6 @@
},
"isDeprecated": false,
"deprecationReason": null
- },
- {
- "name": "todo",
- "description": "The requested todo",
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "Todo",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
}
],
"inputFields": null,
@@ -22470,13 +25190,13 @@
},
{
"kind": "INPUT_OBJECT",
- "name": "TodoRestoreInput",
- "description": "Autogenerated input type of TodoRestore",
+ "name": "UpdateImageDiffNoteInput",
+ "description": "Autogenerated input type of UpdateImageDiffNote",
"fields": null,
"inputFields": [
{
"name": "id",
- "description": "The global id of the todo to restore",
+ "description": "The global id of the note to update",
"type": {
"kind": "NON_NULL",
"name": null,
@@ -22489,6 +25209,26 @@
"defaultValue": null
},
{
+ "name": "body",
+ "description": "Content of the note",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "position",
+ "description": "The position of this note on a diff",
+ "type": {
+ "kind": "INPUT_OBJECT",
+ "name": "UpdateDiffImagePositionInput",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
@@ -22505,8 +25245,8 @@
},
{
"kind": "OBJECT",
- "name": "TodosMarkAllDonePayload",
- "description": "Autogenerated return type of TodosMarkAllDone",
+ "name": "UpdateImageDiffNotePayload",
+ "description": "Autogenerated return type of UpdateImageDiffNote",
"fields": [
{
"name": "clientMutationId",
@@ -22549,27 +25289,15 @@
"deprecationReason": null
},
{
- "name": "updatedIds",
- "description": "Ids of the updated todos",
+ "name": "note",
+ "description": "The note after mutation",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
- }
+ "kind": "OBJECT",
+ "name": "Note",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
@@ -22584,129 +25312,85 @@
},
{
"kind": "INPUT_OBJECT",
- "name": "TodosMarkAllDoneInput",
- "description": "Autogenerated input type of TodosMarkAllDone",
+ "name": "UpdateIssueInput",
+ "description": "Autogenerated input type of UpdateIssue",
"fields": null,
"inputFields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "projectPath",
+ "description": "The project the issue to mutate is in",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "iid",
+ "description": "The iid of the issue to mutate",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "title",
+ "description": "Title of the issue",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "TodoRestoreManyPayload",
- "description": "Autogenerated return type of TodoRestoreMany",
- "fields": [
+ },
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "args": [
-
- ],
+ "name": "description",
+ "description": "Description of the issue",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
- "args": [
-
- ],
+ "name": "dueDate",
+ "description": "Due date of the issue",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- }
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
+ "defaultValue": null
},
{
- "name": "updatedIds",
- "description": "The ids of the updated todo items",
- "args": [
-
- ],
+ "name": "confidential",
+ "description": "Indicates the issue is confidential",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
},
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INPUT_OBJECT",
- "name": "TodoRestoreManyInput",
- "description": "Autogenerated input type of TodoRestoreMany",
- "fields": null,
- "inputFields": [
+ "defaultValue": null
+ },
{
- "name": "ids",
- "description": "The global ids of the todos to restore (a maximum of 50 is supported at once)",
+ "name": "healthStatus",
+ "description": "The desired health status",
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- }
- }
+ "kind": "ENUM",
+ "name": "HealthStatus",
+ "ofType": null
},
"defaultValue": null
},
@@ -22727,8 +25411,8 @@
},
{
"kind": "OBJECT",
- "name": "DestroySnippetPayload",
- "description": "Autogenerated return type of DestroySnippet",
+ "name": "UpdateIssuePayload",
+ "description": "Autogenerated return type of UpdateIssue",
"fields": [
{
"name": "clientMutationId",
@@ -22771,14 +25455,14 @@
"deprecationReason": null
},
{
- "name": "snippet",
- "description": "The snippet after mutation",
+ "name": "issue",
+ "description": "The issue after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Snippet",
+ "name": "Issue",
"ofType": null
},
"isDeprecated": false,
@@ -22794,13 +25478,13 @@
},
{
"kind": "INPUT_OBJECT",
- "name": "DestroySnippetInput",
- "description": "Autogenerated input type of DestroySnippet",
+ "name": "UpdateNoteInput",
+ "description": "Autogenerated input type of UpdateNote",
"fields": null,
"inputFields": [
{
"name": "id",
- "description": "The global id of the snippet to destroy",
+ "description": "The global id of the note to update",
"type": {
"kind": "NON_NULL",
"name": null,
@@ -22813,6 +25497,20 @@
"defaultValue": null
},
{
+ "name": "body",
+ "description": "Content of the note",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
@@ -22829,8 +25527,8 @@
},
{
"kind": "OBJECT",
- "name": "UpdateSnippetPayload",
- "description": "Autogenerated return type of UpdateSnippet",
+ "name": "UpdateNotePayload",
+ "description": "Autogenerated return type of UpdateNote",
"fields": [
{
"name": "clientMutationId",
@@ -22873,14 +25571,14 @@
"deprecationReason": null
},
{
- "name": "snippet",
- "description": "The snippet after mutation",
+ "name": "note",
+ "description": "The note after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
- "name": "Snippet",
+ "name": "Note",
"ofType": null
},
"isDeprecated": false,
@@ -22981,8 +25679,8 @@
},
{
"kind": "OBJECT",
- "name": "CreateSnippetPayload",
- "description": "Autogenerated return type of CreateSnippet",
+ "name": "UpdateSnippetPayload",
+ "description": "Autogenerated return type of UpdateSnippet",
"fields": [
{
"name": "clientMutationId",
@@ -23047,38 +25745,40 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "CreateSnippetInput",
- "description": "Autogenerated input type of CreateSnippet",
+ "kind": "SCALAR",
+ "name": "Upload",
+ "description": null,
"fields": null,
- "inputFields": [
- {
- "name": "title",
- "description": "Title of the snippet",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "defaultValue": null
- },
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "User",
+ "description": null,
+ "fields": [
{
- "name": "fileName",
- "description": "File name of the snippet",
+ "name": "avatarUrl",
+ "description": "URL of the user's avatar",
+ "args": [
+
+ ],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "content",
- "description": "Content of the snippet",
+ "name": "name",
+ "description": "Human-readable name of the user",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
@@ -23088,79 +25788,286 @@
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "description",
- "description": "Description of the snippet",
+ "name": "snippets",
+ "description": "Snippets authored by the user",
+ "args": [
+ {
+ "name": "ids",
+ "description": "Array of global snippet ids, e.g., \"gid://gitlab/ProjectSnippet/1\"",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "visibility",
+ "description": "The visibility of the snippet",
+ "type": {
+ "kind": "ENUM",
+ "name": "VisibilityScopesEnum",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "type",
+ "description": "The type of snippet",
+ "type": {
+ "kind": "ENUM",
+ "name": "TypeEnum",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "SnippetConnection",
"ofType": null
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "visibilityLevel",
- "description": "The visibility level of the snippet",
+ "name": "todos",
+ "description": "Todos of the user",
+ "args": [
+ {
+ "name": "action",
+ "description": "The action to be filtered",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "ENUM",
+ "name": "TodoActionEnum",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "authorId",
+ "description": "The ID of an author",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "projectId",
+ "description": "The ID of a project",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "groupId",
+ "description": "The ID of a group",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "state",
+ "description": "The state of the todo",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "ENUM",
+ "name": "TodoStateEnum",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "type",
+ "description": "The type of the todo",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "ENUM",
+ "name": "TodoTargetEnum",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "first",
+ "description": "Returns the first _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "last",
+ "description": "Returns the last _n_ elements from the list.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "ENUM",
- "name": "VisibilityLevelsEnum",
+ "kind": "OBJECT",
+ "name": "TodoConnection",
"ofType": null
}
},
- "defaultValue": null
- },
- {
- "name": "projectPath",
- "description": "The project full path the snippet is associated with",
- "type": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- },
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "MarkAsSpamSnippetPayload",
- "description": "Autogenerated return type of MarkAsSpamSnippet",
- "fields": [
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "UserPermissions",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "username",
+ "description": "Username of the user. Unique within this instance of GitLab",
"args": [
],
@@ -23168,32 +26075,28 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "snippet",
- "description": "The snippet after mutation",
+ "name": "webUrl",
+ "description": "Web URL of the user",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Snippet",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
@@ -23207,62 +26110,49 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "MarkAsSpamSnippetInput",
- "description": "Autogenerated input type of MarkAsSpamSnippet",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "UserConnection",
+ "description": "The connection type for User.",
+ "fields": [
{
- "name": "id",
- "description": "The global id of the snippet to update",
+ "name": "edges",
+ "description": "A list of edges.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "UserEdge",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "DesignManagementUploadPayload",
- "description": "Autogenerated return type of DesignManagementUpload",
- "fields": [
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "nodes",
+ "description": "A list of nodes.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "User",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "designs",
- "description": "The designs that were uploaded by the mutation",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
"args": [
],
@@ -23270,25 +26160,30 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "Design",
- "ofType": null
- }
- }
+ "kind": "OBJECT",
+ "name": "PageInfo",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "UserEdge",
+ "description": "An edge in a connection.",
+ "fields": [
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
@@ -23296,44 +26191,24 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "skippedDesigns",
- "description": "Any designs that were skipped from the upload due to there being no change to their content since their last version",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "Design",
- "ofType": null
- }
- }
- }
+ "kind": "OBJECT",
+ "name": "User",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
@@ -23347,94 +26222,102 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "DesignManagementUploadInput",
- "description": "Autogenerated input type of DesignManagementUpload",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "UserPermissions",
+ "description": null,
+ "fields": [
{
- "name": "projectPath",
- "description": "The project where the issue is to upload designs for",
+ "name": "createSnippet",
+ "description": "Indicates the user can perform `create_snippet` on this resource",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "Boolean",
"ofType": null
}
},
- "defaultValue": null
- },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "ENUM",
+ "name": "VisibilityLevelsEnum",
+ "description": null,
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
{
- "name": "iid",
- "description": "The iid of the issue to modify designs for",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
+ "name": "private",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "files",
- "description": "The files to upload",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Upload",
- "ofType": null
- }
- }
- }
- },
- "defaultValue": null
+ "name": "internal",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "name": "public",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
- "enumValues": null,
"possibleTypes": null
},
{
- "kind": "SCALAR",
- "name": "Upload",
+ "kind": "ENUM",
+ "name": "VisibilityScopesEnum",
"description": null,
"fields": null,
"inputFields": null,
"interfaces": null,
- "enumValues": null,
+ "enumValues": [
+ {
+ "name": "private",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "internal",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "public",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "DesignManagementDeletePayload",
- "description": "Autogenerated return type of DesignManagementDelete",
+ "name": "Vulnerability",
+ "description": "Represents a vulnerability.",
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "description",
+ "description": "Description of the vulnerability",
"args": [
],
@@ -23447,8 +26330,8 @@
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "id",
+ "description": "GraphQL ID of the vulnerability",
"args": [
],
@@ -23456,169 +26339,93 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "version",
- "description": "The new version in which the designs are deleted",
+ "name": "location",
+ "description": "The JSON location metadata for the vulnerability. Its format depends on the type of the security scan that found the vulnerability",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "DesignVersion",
+ "kind": "SCALAR",
+ "name": "JSON",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INPUT_OBJECT",
- "name": "DesignManagementDeleteInput",
- "description": "Autogenerated input type of DesignManagementDelete",
- "fields": null,
- "inputFields": [
- {
- "name": "projectPath",
- "description": "The project where the issue is to upload designs for",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "iid",
- "description": "The iid of the issue to modify designs for",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
},
{
- "name": "filenames",
- "description": "The filenames of the designs to delete",
+ "name": "reportType",
+ "description": "Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST)",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- }
+ "kind": "ENUM",
+ "name": "VulnerabilityReportType",
+ "ofType": null
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "severity",
+ "description": "Severity of the vulnerability (INFO, UNKNOWN, LOW, MEDIUM, HIGH, CRITICAL)",
+ "args": [
+
+ ],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "ENUM",
+ "name": "VulnerabilitySeverity",
"ofType": null
},
- "defaultValue": null
- }
- ],
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "IssueSetWeightPayload",
- "description": "Autogenerated return type of IssueSetWeight",
- "fields": [
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "state",
+ "description": "State of the vulnerability (DETECTED, DISMISSED, RESOLVED, CONFIRMED)",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "ENUM",
+ "name": "VulnerabilityState",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "title",
+ "description": "Title of the vulnerability",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "issue",
- "description": "The issue after mutation",
+ "name": "vulnerabilityPath",
+ "description": "URL to the vulnerability's details page",
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Issue",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
@@ -23633,109 +26440,105 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "IssueSetWeightInput",
- "description": "Autogenerated input type of IssueSetWeight",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "VulnerabilityConnection",
+ "description": "The connection type for Vulnerability.",
+ "fields": [
{
- "name": "projectPath",
- "description": "The project the issue to mutate is in",
+ "name": "edges",
+ "description": "A list of edges.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "kind": "OBJECT",
+ "name": "VulnerabilityEdge",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "iid",
- "description": "The iid of the issue to mutate",
+ "name": "nodes",
+ "description": "A list of nodes.",
+ "args": [
+
+ ],
"type": {
- "kind": "NON_NULL",
+ "kind": "LIST",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "OBJECT",
+ "name": "Vulnerability",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "weight",
- "description": "The desired weight for the issue",
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.",
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Int",
+ "kind": "OBJECT",
+ "name": "PageInfo",
"ofType": null
}
},
- "defaultValue": null
- },
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
+ "inputFields": null,
+ "interfaces": [
+
+ ],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "EpicTreeReorderPayload",
- "description": "Autogenerated return type of EpicTreeReorder",
+ "name": "VulnerabilityEdge",
+ "description": "An edge in a connection.",
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "cursor",
+ "description": "A cursor for use in pagination.",
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "node",
+ "description": "The item at the end of the edge.",
"args": [
],
"type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- }
+ "kind": "OBJECT",
+ "name": "Vulnerability",
+ "ofType": null
},
"isDeprecated": false,
"deprecationReason": null
@@ -23749,124 +26552,116 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "EpicTreeReorderInput",
- "description": "Autogenerated input type of EpicTreeReorder",
+ "kind": "ENUM",
+ "name": "VulnerabilityReportType",
+ "description": "The type of the security scan that found the vulnerability.",
"fields": null,
- "inputFields": [
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
{
- "name": "baseEpicId",
- "description": "The id of the base epic of the tree",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
+ "name": "SAST",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "moved",
- "description": "Parameters for updating the tree positions",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "INPUT_OBJECT",
- "name": "EpicTreeNodeFieldsInputType",
- "ofType": null
- }
- },
- "defaultValue": null
+ "name": "DEPENDENCY_SCANNING",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "name": "CONTAINER_SCANNING",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "DAST",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
- "enumValues": null,
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "EpicTreeNodeFieldsInputType",
- "description": "A node of an epic tree.",
+ "kind": "ENUM",
+ "name": "VulnerabilitySeverity",
+ "description": "The severity of the vulnerability.",
"fields": null,
- "inputFields": [
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": [
{
- "name": "id",
- "description": "The id of the epic_issue or epic that is being moved",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
+ "name": "INFO",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "adjacentReferenceId",
- "description": "The id of the epic_issue or issue that the actual epic or issue is switched with",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
+ "name": "UNKNOWN",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "relativePosition",
- "description": "The type of the switch, after or before allowed",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "ENUM",
- "name": "MoveType",
- "ofType": null
- }
- },
- "defaultValue": null
+ "name": "LOW",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "MEDIUM",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "HIGH",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "CRITICAL",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
- "enumValues": null,
"possibleTypes": null
},
{
"kind": "ENUM",
- "name": "MoveType",
- "description": "The position the adjacent object should be moved.",
+ "name": "VulnerabilityState",
+ "description": "The state of the vulnerability.",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{
- "name": "before",
- "description": "The adjacent object will be moved before the object that is being moved.",
+ "name": "DETECTED",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "after",
- "description": "The adjacent object will be moved after the object that is being moved.",
+ "name": "DISMISSED",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "RESOLVED",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "CONFIRMED",
+ "description": null,
"isDeprecated": false,
"deprecationReason": null
}
@@ -23875,40 +26670,52 @@
},
{
"kind": "OBJECT",
- "name": "UpdateEpicPayload",
- "description": "Autogenerated return type of UpdateEpic",
+ "name": "__Directive",
+ "description": "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.",
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "args",
+ "description": null,
"args": [
],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "__InputValue",
+ "ofType": null
+ }
+ }
+ }
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "epic",
- "description": "The epic after mutation",
+ "name": "description",
+ "description": null,
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Epic",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "locations",
+ "description": null,
"args": [
],
@@ -23922,8 +26729,8 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "String",
+ "kind": "ENUM",
+ "name": "__DirectiveLocation",
"ofType": null
}
}
@@ -23931,197 +26738,200 @@
},
"isDeprecated": false,
"deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INPUT_OBJECT",
- "name": "UpdateEpicInput",
- "description": "Autogenerated input type of UpdateEpic",
- "fields": null,
- "inputFields": [
+ },
{
- "name": "groupPath",
- "description": "The group the epic to mutate is in",
+ "name": "name",
+ "description": null,
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
}
},
- "defaultValue": null
- },
- {
- "name": "title",
- "description": "The title of the epic",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "description",
- "description": "The description of the epic",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "startDateFixed",
- "description": "The start date of the epic",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "dueDateFixed",
- "description": "The end date of the epic",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "startDateIsFixed",
- "description": "Indicates start date should be sourced from start_date_fixed field not the issue milestones",
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "dueDateIsFixed",
- "description": "Indicates end date should be sourced from due_date_fixed field not the issue milestones",
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "addLabelIds",
- "description": "The IDs of labels to be added to the epic.",
+ "name": "onField",
+ "description": null,
+ "args": [
+
+ ],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": true,
+ "deprecationReason": "Use `locations`."
},
{
- "name": "removeLabelIds",
- "description": "The IDs of labels to be removed from the epic.",
+ "name": "onFragment",
+ "description": null,
+ "args": [
+
+ ],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": true,
+ "deprecationReason": "Use `locations`."
},
{
- "name": "iid",
- "description": "The iid of the epic to mutate",
+ "name": "onOperation",
+ "description": null,
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "String",
+ "name": "Boolean",
"ofType": null
}
},
- "defaultValue": null
- },
- {
- "name": "stateEvent",
- "description": "State event for the epic",
- "type": {
- "kind": "ENUM",
- "name": "EpicStateEvent",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "healthStatus",
- "description": "The desired health status",
- "type": {
- "kind": "ENUM",
- "name": "HealthStatus",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "isDeprecated": true,
+ "deprecationReason": "Use `locations`."
}
],
- "interfaces": null,
+ "inputFields": null,
+ "interfaces": [
+
+ ],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "ENUM",
- "name": "EpicStateEvent",
- "description": "State event of an epic",
+ "name": "__DirectiveLocation",
+ "description": "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{
- "name": "REOPEN",
- "description": "Reopen the epic",
+ "name": "QUERY",
+ "description": "Location adjacent to a query operation.",
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "CLOSE",
- "description": "Close the epic",
+ "name": "MUTATION",
+ "description": "Location adjacent to a mutation operation.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "SUBSCRIPTION",
+ "description": "Location adjacent to a subscription operation.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "FIELD",
+ "description": "Location adjacent to a field.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "FRAGMENT_DEFINITION",
+ "description": "Location adjacent to a fragment definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "FRAGMENT_SPREAD",
+ "description": "Location adjacent to a fragment spread.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "INLINE_FRAGMENT",
+ "description": "Location adjacent to an inline fragment.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "SCHEMA",
+ "description": "Location adjacent to a schema definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "SCALAR",
+ "description": "Location adjacent to a scalar definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "OBJECT",
+ "description": "Location adjacent to an object type definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "FIELD_DEFINITION",
+ "description": "Location adjacent to a field definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "ARGUMENT_DEFINITION",
+ "description": "Location adjacent to an argument definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "INTERFACE",
+ "description": "Location adjacent to an interface definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "UNION",
+ "description": "Location adjacent to a union definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "ENUM",
+ "description": "Location adjacent to an enum definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "ENUM_VALUE",
+ "description": "Location adjacent to an enum value definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "INPUT_OBJECT",
+ "description": "Location adjacent to an input object type definition.",
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "INPUT_FIELD_DEFINITION",
+ "description": "Location adjacent to an input object field definition.",
"isDeprecated": false,
"deprecationReason": null
}
@@ -24130,12 +26940,12 @@
},
{
"kind": "OBJECT",
- "name": "CreateEpicPayload",
- "description": "Autogenerated return type of CreateEpic",
+ "name": "__EnumValue",
+ "description": "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.",
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "deprecationReason",
+ "description": null,
"args": [
],
@@ -24148,22 +26958,22 @@
"deprecationReason": null
},
{
- "name": "epic",
- "description": "The created epic",
+ "name": "description",
+ "description": null,
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Epic",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "isDeprecated",
+ "description": null,
"args": [
],
@@ -24171,17 +26981,27 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "name",
+ "description": null,
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
}
},
"isDeprecated": false,
@@ -24196,144 +27016,134 @@
"possibleTypes": null
},
{
- "kind": "INPUT_OBJECT",
- "name": "CreateEpicInput",
- "description": "Autogenerated input type of CreateEpic",
- "fields": null,
- "inputFields": [
+ "kind": "OBJECT",
+ "name": "__Field",
+ "description": "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.",
+ "fields": [
{
- "name": "groupPath",
- "description": "The group the epic to mutate is in",
+ "name": "args",
+ "description": null,
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "__InputValue",
+ "ofType": null
+ }
+ }
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "title",
- "description": "The title of the epic",
+ "name": "deprecationReason",
+ "description": null,
+ "args": [
+
+ ],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
"name": "description",
- "description": "The description of the epic",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "startDateFixed",
- "description": "The start date of the epic",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "dueDateFixed",
- "description": "The end date of the epic",
+ "description": null,
+ "args": [
+
+ ],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
- "defaultValue": null
- },
- {
- "name": "startDateIsFixed",
- "description": "Indicates start date should be sourced from start_date_fixed field not the issue milestones",
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "defaultValue": null
- },
- {
- "name": "dueDateIsFixed",
- "description": "Indicates end date should be sourced from due_date_fixed field not the issue milestones",
- "type": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- },
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "addLabelIds",
- "description": "The IDs of labels to be added to the epic.",
+ "name": "isDeprecated",
+ "description": null,
+ "args": [
+
+ ],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "removeLabelIds",
- "description": "The IDs of labels to be removed from the epic.",
+ "name": "name",
+ "description": null,
+ "args": [
+
+ ],
"type": {
- "kind": "LIST",
+ "kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "type",
+ "description": null,
+ "args": [
+
+ ],
"type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "__Type",
+ "ofType": null
+ }
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
+ "inputFields": null,
+ "interfaces": [
+
+ ],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "EpicSetSubscriptionPayload",
- "description": "Autogenerated return type of EpicSetSubscription",
+ "name": "__InputValue",
+ "description": "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.",
"fields": [
{
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
+ "name": "defaultValue",
+ "description": "A GraphQL-formatted string representing the default value for this input value.",
"args": [
],
@@ -24346,22 +27156,22 @@
"deprecationReason": null
},
{
- "name": "epic",
- "description": "The epic after mutation",
+ "name": "description",
+ "description": null,
"args": [
],
"type": {
- "kind": "OBJECT",
- "name": "Epic",
+ "kind": "SCALAR",
+ "name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
- "name": "errors",
- "description": "Reasons why the mutation failed.",
+ "name": "name",
+ "description": null,
"args": [
],
@@ -24369,90 +27179,37 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- }
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "INPUT_OBJECT",
- "name": "EpicSetSubscriptionInput",
- "description": "Autogenerated input type of EpicSetSubscription",
- "fields": null,
- "inputFields": [
- {
- "name": "groupPath",
- "description": "The group the epic to (un)subscribe is in",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
"kind": "SCALAR",
- "name": "ID",
- "ofType": null
- }
- },
- "defaultValue": null
- },
- {
- "name": "iid",
- "description": "The iid of the epic to (un)subscribe",
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "ID",
+ "name": "String",
"ofType": null
}
},
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
},
{
- "name": "subscribedState",
- "description": "The desired state of the subscription",
+ "name": "type",
+ "description": null,
+ "args": [
+
+ ],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
+ "kind": "OBJECT",
+ "name": "__Type",
"ofType": null
}
},
- "defaultValue": null
- },
- {
- "name": "clientMutationId",
- "description": "A unique identifier for the client performing the mutation.",
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "defaultValue": null
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
- "interfaces": null,
+ "inputFields": null,
+ "interfaces": [
+
+ ],
"enumValues": null,
"possibleTypes": null
},
@@ -24769,432 +27526,6 @@
"possibleTypes": null
},
{
- "kind": "OBJECT",
- "name": "__Field",
- "description": "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.",
- "fields": [
- {
- "name": "args",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "__InputValue",
- "ofType": null
- }
- }
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "deprecationReason",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "description",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "isDeprecated",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "name",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "type",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "__Type",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "__Directive",
- "description": "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.",
- "fields": [
- {
- "name": "args",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "__InputValue",
- "ofType": null
- }
- }
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "description",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "locations",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "LIST",
- "name": null,
- "ofType": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "ENUM",
- "name": "__DirectiveLocation",
- "ofType": null
- }
- }
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "name",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "onField",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
- },
- "isDeprecated": true,
- "deprecationReason": "Use `locations`."
- },
- {
- "name": "onFragment",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
- },
- "isDeprecated": true,
- "deprecationReason": "Use `locations`."
- },
- {
- "name": "onOperation",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
- },
- "isDeprecated": true,
- "deprecationReason": "Use `locations`."
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "__EnumValue",
- "description": "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.",
- "fields": [
- {
- "name": "deprecationReason",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "description",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "isDeprecated",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Boolean",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "name",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "__InputValue",
- "description": "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.",
- "fields": [
- {
- "name": "defaultValue",
- "description": "A GraphQL-formatted string representing the default value for this input value.",
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "description",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "name",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "String",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "type",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "OBJECT",
- "name": "__Type",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [
-
- ],
- "enumValues": null,
- "possibleTypes": null
- },
- {
"kind": "ENUM",
"name": "__TypeKind",
"description": "An enum describing what kind of type a given `__Type` is.",
@@ -25252,125 +27583,6 @@
}
],
"possibleTypes": null
- },
- {
- "kind": "ENUM",
- "name": "__DirectiveLocation",
- "description": "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "QUERY",
- "description": "Location adjacent to a query operation.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "MUTATION",
- "description": "Location adjacent to a mutation operation.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "SUBSCRIPTION",
- "description": "Location adjacent to a subscription operation.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "FIELD",
- "description": "Location adjacent to a field.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "FRAGMENT_DEFINITION",
- "description": "Location adjacent to a fragment definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "FRAGMENT_SPREAD",
- "description": "Location adjacent to a fragment spread.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "INLINE_FRAGMENT",
- "description": "Location adjacent to an inline fragment.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "SCHEMA",
- "description": "Location adjacent to a schema definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "SCALAR",
- "description": "Location adjacent to a scalar definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "OBJECT",
- "description": "Location adjacent to an object type definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "FIELD_DEFINITION",
- "description": "Location adjacent to a field definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "ARGUMENT_DEFINITION",
- "description": "Location adjacent to an argument definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "INTERFACE",
- "description": "Location adjacent to an interface definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "UNION",
- "description": "Location adjacent to a union definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "ENUM",
- "description": "Location adjacent to an enum definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "ENUM_VALUE",
- "description": "Location adjacent to an enum value definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "INPUT_OBJECT",
- "description": "Location adjacent to an input object type definition.",
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "INPUT_FIELD_DEFINITION",
- "description": "Location adjacent to an input object field definition.",
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "possibleTypes": null
}
],
"directives": [
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index e5e37339457..38067b275d5 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -13,6 +13,9 @@ The API can be explored interactively using the [GraphiQL IDE](../index.md#graph
Each table below documents a GraphQL type. Types match loosely to models, but not all
fields and methods on a model are available via GraphQL.
+CAUTION: **Caution:**
+Fields that are deprecated are marked with **{warning-solid}**.
+
## AddAwardEmojiPayload
Autogenerated return type of AddAwardEmoji
@@ -23,6 +26,16 @@ Autogenerated return type of AddAwardEmoji
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Reasons why the mutation failed. |
+## AdminSidekiqQueuesDeleteJobsPayload
+
+Autogenerated return type of AdminSidekiqQueuesDeleteJobs
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
+| `errors` | String! => Array | Reasons why the mutation failed. |
+| `result` | DeleteJobsResponse | Information about the status of the deletion request |
+
## AwardEmoji
An emoji awarded by a user.
@@ -49,6 +62,16 @@ An emoji awarded by a user.
| `type` | EntryType! | Type of tree entry |
| `webUrl` | String | Web URL of the blob |
+## Board
+
+Represents a project or group board
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `id` | ID! | ID (global ID) of the board |
+| `name` | String | Name of the board |
+| `weight` | Int | Weight of the board |
+
## Commit
| Name | Type | Description |
@@ -59,7 +82,7 @@ An emoji awarded by a user.
| `authoredDate` | Time | Timestamp of when the commit was authored |
| `description` | String | Description of the commit message |
| `id` | ID! | ID (global ID) of the commit |
-| `latestPipeline` | Pipeline | Latest pipeline of the commit |
+| `latestPipeline` **{warning-solid}** | Pipeline | **Deprecated:** Use pipelines |
| `message` | String | Raw commit message |
| `sha` | String! | SHA1 ID of the commit |
| `signatureHtml` | String | Rendered HTML of the commit signature |
@@ -106,6 +129,16 @@ Autogenerated return type of CreateNote
| `errors` | String! => Array | Reasons why the mutation failed. |
| `note` | Note | The note after mutation |
+## CreateRequirementPayload
+
+Autogenerated return type of CreateRequirement
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
+| `errors` | String! => Array | Reasons why the mutation failed. |
+| `requirement` | Requirement | The requirement after mutation |
+
## CreateSnippetPayload
Autogenerated return type of CreateSnippet
@@ -116,6 +149,16 @@ Autogenerated return type of CreateSnippet
| `errors` | String! => Array | Reasons why the mutation failed. |
| `snippet` | Snippet | The snippet after mutation |
+## DeleteJobsResponse
+
+The response from the AdminSidekiqQueuesDeleteJobs mutation.
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `completed` | Boolean | Whether or not the entire queue was processed in time; if not, retrying the same request is safe |
+| `deletedJobs` | Int | The number of matching jobs deleted |
+| `queueSize` | Int | The queue size after processing |
+
## Design
A single design
@@ -284,6 +327,7 @@ Represents an epic.
| `closedAt` | Time | Timestamp of the epic's closure |
| `createdAt` | Time | Timestamp of the epic's creation |
| `descendantCounts` | EpicDescendantCount | Number of open and closed descendant epics and issues |
+| `descendantWeightSum` | EpicDescendantWeights | Total weight of open and closed issues in the epic and its descendants |
| `description` | String | Description of the epic |
| `downvotes` | Int! | Number of downvotes the epic has received |
| `dueDate` | Time | Due date of the epic |
@@ -293,7 +337,7 @@ Represents an epic.
| `group` | Group! | Group to which the epic belongs |
| `hasChildren` | Boolean! | Indicates if the epic has children |
| `hasIssues` | Boolean! | Indicates if the epic has direct issues |
-| `healthStatus` | HealthStatus | Current health status. Available only when feature flag save_issuable_health_status is enabled. |
+| `healthStatus` | EpicHealthStatus | Current health status of the epic |
| `id` | ID! | ID of the epic |
| `iid` | ID! | Internal ID of the epic |
| `parent` | Epic | Parent epic of the epic |
@@ -313,6 +357,17 @@ Represents an epic.
| `webPath` | String! | Web path of the epic |
| `webUrl` | String! | Web URL of the epic |
+## EpicAddIssuePayload
+
+Autogenerated return type of EpicAddIssue
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
+| `epic` | Epic | The epic after mutation |
+| `epicIssue` | EpicIssue | The epic-issue relation |
+| `errors` | String! => Array | Reasons why the mutation failed. |
+
## EpicDescendantCount
Counts of descendent epics.
@@ -324,6 +379,25 @@ Counts of descendent epics.
| `openedEpics` | Int | Number of opened sub-epics |
| `openedIssues` | Int | Number of opened epic issues |
+## EpicDescendantWeights
+
+Total weight of open and closed descendant issues
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `closedIssues` | Int | Total weight of completed (closed) issues in this epic, including epic descendants |
+| `openedIssues` | Int | Total weight of opened issues in this epic, including epic descendants |
+
+## EpicHealthStatus
+
+Health status of child issues
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `issuesAtRisk` | Int | Number of issues at risk |
+| `issuesNeedingAttention` | Int | Number of issues that need attention |
+| `issuesOnTrack` | Int | Number of issues on track |
+
## EpicIssue
Relationship between an epic and an issue
@@ -337,13 +411,13 @@ Relationship between an epic and an issue
| `description` | String | Description of the issue |
| `descriptionHtml` | String | The GitLab Flavored Markdown rendering of `description` |
| `designCollection` | DesignCollection | Collection of design images associated with this issue |
-| `designs` | DesignCollection | Deprecated. Use `designCollection` |
+| `designs` **{warning-solid}** | DesignCollection | **Deprecated:** Use designCollection |
| `discussionLocked` | Boolean! | Indicates discussion is locked on the issue |
| `downvotes` | Int! | Number of downvotes the issue has received |
| `dueDate` | Time | Due date of the issue |
| `epic` | Epic | Epic to which this issue belongs |
| `epicIssueId` | ID! | ID of the epic-issue relation |
-| `healthStatus` | HealthStatus | Current health status. Available only when feature flag save_issuable_health_status is enabled. |
+| `healthStatus` | HealthStatus | Current health status. Available only when feature flag `save_issuable_health_status` is enabled. |
| `id` | ID | Global ID of the epic-issue relation |
| `iid` | ID! | Internal ID of the issue |
| `milestone` | Milestone | Milestone of the issue |
@@ -407,7 +481,7 @@ Autogenerated return type of EpicTreeReorder
| `enabled` | Boolean! | Indicates whether Grafana integration is enabled |
| `grafanaUrl` | String! | Url for the Grafana host for the Grafana integration |
| `id` | ID! | Internal ID of the Grafana integration |
-| `token` | String! | API token for the Grafana integration. Field is permanently masked. |
+| `token` **{warning-solid}** | String! | **Deprecated:** Plain text token has been masked for security reasons |
| `updatedAt` | Time! | Timestamp of the issue's last activity |
## Group
@@ -416,6 +490,7 @@ Autogenerated return type of EpicTreeReorder
| --- | ---- | ---------- |
| `autoDevopsEnabled` | Boolean | Indicates whether Auto DevOps is enabled for all projects within this group |
| `avatarUrl` | String | Avatar URL of the group |
+| `board` | Board | A single board of the group |
| `description` | String | Description of the namespace |
| `descriptionHtml` | String | The GitLab Flavored Markdown rendering of `description` |
| `emailsDisabled` | Boolean | Indicates if a group has email notifications disabled |
@@ -458,12 +533,12 @@ Autogenerated return type of EpicTreeReorder
| `description` | String | Description of the issue |
| `descriptionHtml` | String | The GitLab Flavored Markdown rendering of `description` |
| `designCollection` | DesignCollection | Collection of design images associated with this issue |
-| `designs` | DesignCollection | Deprecated. Use `designCollection` |
+| `designs` **{warning-solid}** | DesignCollection | **Deprecated:** Use designCollection |
| `discussionLocked` | Boolean! | Indicates discussion is locked on the issue |
| `downvotes` | Int! | Number of downvotes the issue has received |
| `dueDate` | Time | Due date of the issue |
| `epic` | Epic | Epic to which this issue belongs |
-| `healthStatus` | HealthStatus | Current health status. Available only when feature flag save_issuable_health_status is enabled. |
+| `healthStatus` | HealthStatus | Current health status. Available only when feature flag `save_issuable_health_status` is enabled. |
| `iid` | ID! | Internal ID of the issue |
| `milestone` | Milestone | Milestone of the issue |
| `reference` | String! | Internal reference of the issue. Returned in shortened format by default |
@@ -567,7 +642,7 @@ Autogenerated return type of MarkAsSpamSnippet
| `id` | ID! | ID of the merge request |
| `iid` | String! | Internal ID of the merge request |
| `inProgressMergeCommitSha` | String | Commit SHA of the merge request if merge is in progress |
-| `mergeCommitMessage` | String | Deprecated - renamed to defaultMergeCommitMessage |
+| `mergeCommitMessage` **{warning-solid}** | String | **Deprecated:** Renamed to defaultMergeCommitMessage |
| `mergeCommitSha` | String | SHA of the merge request commit (set once merged) |
| `mergeError` | String | Error message due to a merge error |
| `mergeOngoing` | Boolean! | Indicates if a merge is currently occurring |
@@ -791,6 +866,7 @@ Information about pagination in a connection.
| `archived` | Boolean | Indicates the archived status of the project |
| `autocloseReferencedIssues` | Boolean | Indicates if issues referenced by merge requests and commits within the default branch are closed automatically |
| `avatarUrl` | String | URL to avatar image file of the project |
+| `board` | Board | A single board of the project |
| `containerRegistryEnabled` | Boolean | Indicates if the project stores Docker container images in a container registry |
| `createdAt` | Time | Timestamp of the project creation |
| `description` | String | Short description of the project |
@@ -822,6 +898,7 @@ Information about pagination in a connection.
| `removeSourceBranchAfterMerge` | Boolean | Indicates if `Delete source branch` option should be enabled by default for all new merge requests of the project |
| `repository` | Repository | Git repository of the project |
| `requestAccessEnabled` | Boolean | Indicates if users can request member access to the project |
+| `requirement` | Requirement | Find a single requirement. Available only when feature flag `requirements_management` is enabled. |
| `sentryDetailedError` | SentryDetailedError | Detailed version of a Sentry error on the project |
| `sentryErrors` | SentryErrorCollection | Paginated collection of Sentry errors on the project |
| `serviceDeskAddress` | String | E-mail address of the service desk. |
@@ -916,6 +993,34 @@ Autogenerated return type of RemoveAwardEmoji
| `rootRef` | String | Default branch of the repository |
| `tree` | Tree | Tree of the repository |
+## Requirement
+
+Represents a requirement.
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `author` | User! | Author of the requirement |
+| `createdAt` | Time! | Timestamp of when the requirement was created |
+| `id` | ID! | ID of the requirement |
+| `iid` | ID! | Internal ID of the requirement |
+| `project` | Project! | Project to which the requirement belongs |
+| `state` | RequirementState! | State of the requirement |
+| `title` | String | Title of the requirement |
+| `updatedAt` | Time! | Timestamp of when the requirement was last updated |
+| `userPermissions` | RequirementPermissions! | Permissions for the current user on the resource |
+
+## RequirementPermissions
+
+Check permissions for the current user on a requirement
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `adminRequirement` | Boolean! | Indicates the user can perform `admin_requirement` on this resource |
+| `createRequirement` | Boolean! | Indicates the user can perform `create_requirement` on this resource |
+| `destroyRequirement` | Boolean! | Indicates the user can perform `destroy_requirement` on this resource |
+| `readRequirement` | Boolean! | Indicates the user can perform `read_requirement` on this resource |
+| `updateRequirement` | Boolean! | Indicates the user can perform `update_requirement` on this resource |
+
## RootStorageStatistics
| Name | Type | Description |
@@ -1288,3 +1393,18 @@ Autogenerated return type of UpdateSnippet
| Name | Type | Description |
| --- | ---- | ---------- |
| `createSnippet` | Boolean! | Indicates the user can perform `create_snippet` on this resource |
+
+## Vulnerability
+
+Represents a vulnerability.
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `description` | String | Description of the vulnerability |
+| `id` | ID! | GraphQL ID of the vulnerability |
+| `location` | JSON | The JSON location metadata for the vulnerability. Its format depends on the type of the security scan that found the vulnerability |
+| `reportType` | VulnerabilityReportType | Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST) |
+| `severity` | VulnerabilitySeverity | Severity of the vulnerability (INFO, UNKNOWN, LOW, MEDIUM, HIGH, CRITICAL) |
+| `state` | VulnerabilityState | State of the vulnerability (DETECTED, DISMISSED, RESOLVED, CONFIRMED) |
+| `title` | String | Title of the vulnerability |
+| `vulnerabilityPath` | String | URL to the vulnerability's details page |
diff --git a/doc/api/group_activity_analytics.md b/doc/api/group_activity_analytics.md
new file mode 100644
index 00000000000..2e93967fe64
--- /dev/null
+++ b/doc/api/group_activity_analytics.md
@@ -0,0 +1,55 @@
+# Group Activity Analytics API
+
+> **Note:** This feature was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26460) in GitLab 12.9.
+
+## Get count of recently created issues for group
+
+```plaintext
+GET /analytics/group_activity/issues_count
+```
+
+Parameters:
+
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `group_path` | string | yes | Group path |
+
+Example request:
+
+```shell
+curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/analytics/group_activity/issues_count?group_path=gitlab-org
+```
+
+Example response:
+
+```json
+[
+ { issues_count : 10 }
+]
+```
+
+## Get count of recently created merge requests for group
+
+```plaintext
+GET /analytics/group_activity/merge_requests_count
+```
+
+Parameters:
+
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `group_path` | string | yes | Group path |
+
+Example request:
+
+```shell
+curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/analytics/group_activity/merge_requests_count?group_path=gitlab-org
+```
+
+Example response:
+
+```json
+[
+ { merge_requests_count : 10 }
+]
+```
diff --git a/doc/api/group_badges.md b/doc/api/group_badges.md
index 4fa2fa2b577..0d1a333e318 100644
--- a/doc/api/group_badges.md
+++ b/doc/api/group_badges.md
@@ -19,7 +19,7 @@ from the first group's project by creation date. If the group hasn't got any pro
Gets a list of a group's badges.
-```
+```plaintext
GET /groups/:id/badges
```
@@ -52,7 +52,7 @@ Example response:
Gets a badge of a group.
-```
+```plaintext
GET /groups/:id/badges/:badge_id
```
@@ -82,7 +82,7 @@ Example response:
Adds a badge to a group.
-```
+```plaintext
POST /groups/:id/badges
```
@@ -113,7 +113,7 @@ Example response:
Updates a badge of a group.
-```
+```plaintext
PUT /groups/:id/badges/:badge_id
```
@@ -145,7 +145,7 @@ Example response:
Removes a badge from a group.
-```
+```plaintext
DELETE /groups/:id/badges/:badge_id
```
@@ -162,7 +162,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Returns how the `link_url` and `image_url` final URLs would be after resolving the placeholder interpolation.
-```
+```plaintext
GET /groups/:id/badges/render
```
diff --git a/doc/api/group_boards.md b/doc/api/group_boards.md
index 186e00022cf..adfcd6e65cb 100644
--- a/doc/api/group_boards.md
+++ b/doc/api/group_boards.md
@@ -9,7 +9,7 @@ request will result in `404` status code.
Lists Issue Boards in the given group.
-```
+```plaintext
GET /groups/:id/boards
```
@@ -126,7 +126,7 @@ Example response:
Gets a single group issue board.
-```
+```plaintext
GET /groups/:id/boards/:board_id
```
@@ -240,7 +240,7 @@ Example response:
Creates a Group Issue Board.
-```
+```plaintext
POST /groups/:id/boards
```
@@ -306,7 +306,7 @@ Example response:
Updates a Group Issue Board.
-```
+```plaintext
PUT /groups/:id/boards/:board_id
```
@@ -372,7 +372,7 @@ Example response:
Deletes a Group Issue Board.
-```
+```plaintext
DELETE /groups/:id/boards/:board_id
```
@@ -390,7 +390,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Get a list of the board's lists.
Does not include `open` and `closed` lists
-```
+```plaintext
GET /groups/:id/boards/:board_id/lists
```
@@ -441,7 +441,7 @@ Example response:
Get a single board list.
-```
+```plaintext
GET /groups/:id/boards/:board_id/lists/:list_id
```
@@ -473,7 +473,7 @@ Example response:
Creates a new Issue Board list.
-```
+```plaintext
POST /groups/:id/boards/:board_id/lists
```
@@ -514,7 +514,7 @@ Example response:
Updates an existing Issue Board list. This call is used to change list position.
-```
+```plaintext
PUT /groups/:id/boards/:board_id/lists/:list_id
```
@@ -547,7 +547,7 @@ Example response:
Only for admins and group owners. Deletes the board list in question.
-```
+```plaintext
DELETE /groups/:id/boards/:board_id/lists/:list_id
```
diff --git a/doc/api/group_clusters.md b/doc/api/group_clusters.md
index b94b4773a1d..be07c5232ab 100644
--- a/doc/api/group_clusters.md
+++ b/doc/api/group_clusters.md
@@ -10,7 +10,7 @@ User will need at least maintainer access for the group to use these endpoints.
Returns a list of group clusters.
-```
+```plaintext
GET /groups/:id/clusters
```
@@ -77,7 +77,7 @@ Example response:
Gets a single group cluster.
-```
+```plaintext
GET /groups/:id/clusters/:cluster_id
```
@@ -144,7 +144,7 @@ Example response:
Adds an existing Kubernetes cluster to the group.
-```
+```plaintext
POST /groups/:id/clusters/user
```
@@ -153,16 +153,16 @@ Parameters:
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) |
-| `name` | String | yes | The name of the cluster |
-| `domain` | String | no | The [base domain](../user/group/clusters/index.md#base-domain) of the cluster |
+| `name` | string | yes | The name of the cluster |
+| `domain` | string | no | The [base domain](../user/group/clusters/index.md#base-domain) of the cluster |
| `management_project_id` | integer | no | The ID of the [management project](../user/clusters/management_project.md) for the cluster |
-| `enabled` | Boolean | no | Determines if cluster is active or not, defaults to true |
-| `managed` | Boolean | no | Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true |
-| `platform_kubernetes_attributes[api_url]` | String | yes | The URL to access the Kubernetes API |
-| `platform_kubernetes_attributes[token]` | String | yes | The token to authenticate against Kubernetes |
-| `platform_kubernetes_attributes[ca_cert]` | String | no | TLS certificate (needed if API is using a self-signed TLS certificate |
-| `platform_kubernetes_attributes[authorization_type]` | String | no | The cluster authorization type: `rbac`, `abac` or `unknown_authorization`. Defaults to `rbac`. |
-| `environment_scope` | String | no | The associated environment to the cluster. Defaults to `*` **(PREMIUM)** |
+| `enabled` | boolean | no | Determines if cluster is active or not, defaults to true |
+| `managed` | boolean | no | Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true |
+| `platform_kubernetes_attributes[api_url]` | string | yes | The URL to access the Kubernetes API |
+| `platform_kubernetes_attributes[token]` | string | yes | The token to authenticate against Kubernetes |
+| `platform_kubernetes_attributes[ca_cert]` | string | no | TLS certificate. Required if API is using a self-signed TLS certificate. |
+| `platform_kubernetes_attributes[authorization_type]` | string | no | The cluster authorization type: `rbac`, `abac` or `unknown_authorization`. Defaults to `rbac`. |
+| `environment_scope` | string | no | The associated environment to the cluster. Defaults to `*` **(PREMIUM)** |
Example request:
@@ -213,7 +213,7 @@ Example response:
Updates an existing group cluster.
-```
+```plaintext
PUT /groups/:id/clusters/:cluster_id
```
@@ -223,12 +223,12 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) |
| `cluster_id` | integer | yes | The ID of the cluster |
-| `name` | String | no | The name of the cluster |
-| `domain` | String | no | The [base domain](../user/group/clusters/index.md#base-domain) of the cluster |
-| `platform_kubernetes_attributes[api_url]` | String | no | The URL to access the Kubernetes API |
-| `platform_kubernetes_attributes[token]` | String | no | The token to authenticate against Kubernetes |
-| `platform_kubernetes_attributes[ca_cert]` | String | no | TLS certificate (needed if API is using a self-signed TLS certificate |
-| `environment_scope` | String | no | The associated environment to the cluster **(PREMIUM)** |
+| `name` | string | no | The name of the cluster |
+| `domain` | string | no | The [base domain](../user/group/clusters/index.md#base-domain) of the cluster |
+| `platform_kubernetes_attributes[api_url]` | string | no | The URL to access the Kubernetes API |
+| `platform_kubernetes_attributes[token]` | string | no | The token to authenticate against Kubernetes |
+| `platform_kubernetes_attributes[ca_cert]` | string | no | TLS certificate. Required if API is using a self-signed TLS certificate. |
+| `environment_scope` | string | no | The associated environment to the cluster **(PREMIUM)** |
NOTE: **Note:**
`name`, `api_url`, `ca_cert` and `token` can only be updated if the cluster was added
@@ -287,14 +287,13 @@ Example response:
"web_url":"https://gitlab.example.com/group-with-clusters-api"
}
}
-
```
## Delete group cluster
Deletes an existing group cluster.
-```
+```plaintext
DELETE /groups/:id/clusters/:cluster_id
```
diff --git a/doc/api/group_import_export.md b/doc/api/group_import_export.md
index c97a753d298..039f81a18d0 100644
--- a/doc/api/group_import_export.md
+++ b/doc/api/group_import_export.md
@@ -1,25 +1,25 @@
# Group Import/Export API
-> Introduced in GitLab 12.8 as an experimental feature. May change in future releases.
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20353) in GitLab 12.8 as an experimental feature. May change in future releases.
-Group Import/Export functionality allows to export group structure and import it at a new location.
-Used in combination with [Project Import/Export](project_import_export.md) it allows you to preserve connections with group level relations
-(e.g. a connection between a project issue and group epic).
+Group Import/Export allows you to export group structure and import it to a new location.
+When used with [Project Import/Export](project_import_export.md), you can preserve connections with
+group-level relationships, such as connections between project issues and group epics.
-Group Export includes:
+Group exports include the following:
-1. Group Milestones
-1. Group Boards
-1. Group Labels
-1. Group Badges
-1. Group Members
-1. Sub-groups (each sub-group includes all data above)
+- Group milestones
+- Group boards
+- Group labels
+- Group badges
+- Group members
+- Sub-groups. Each sub-group includes all data above
## Schedule new export
Start a new group export.
-```text
+```plaintext
POST /groups/:id/export
```
@@ -58,7 +58,11 @@ ls *export.tar.gz
2020-12-05_22-11-148_namespace_export.tar.gz
```
-Time spent on exporting a group may vary depending on a size of the group. Export download endpoint will return exported archive once it is available. 404 is returned otherwise.
+Time spent on exporting a group may vary depending on a size of the group. This endpoint
+returns either:
+
+- The exported archive (when available)
+- A 404 message
## Import a file
@@ -81,3 +85,12 @@ by `@`. For example:
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" --form "name=imported-group" --form "path=imported-group" --form "file=@/path/to/file" https://gitlab.example.com/api/v4/groups/import
```
+
+## Important notes
+
+Note the following:
+
+- To preserve group-level relationships from imported projects, run Group Import/Export first,
+ to allow project imports into the desired group structure.
+- Imported groups are given a `private` visibility level, unless imported into a parent group.
+- If imported into a parent group, subgroups will inherit a similar level of visibility, unless otherwise restricted.
diff --git a/doc/api/group_labels.md b/doc/api/group_labels.md
index c9c084c6d11..37ab2fe48f1 100644
--- a/doc/api/group_labels.md
+++ b/doc/api/group_labels.md
@@ -11,7 +11,7 @@ The `description_html` - was added to response JSON in [GitLab 12.7](https://git
Get all labels for a given group.
-```
+```plaintext
GET /groups/:id/labels
```
@@ -60,7 +60,7 @@ Example response:
Get a single label for a given group.
-```
+```plaintext
GET /groups/:id/labels/:label_id
```
@@ -95,7 +95,7 @@ Example response:
Create a new group label for a given group.
-```
+```plaintext
POST /groups/:id/labels
```
@@ -131,7 +131,7 @@ Example response:
Updates an existing group label. At least one parameter is required, to update the group label.
-```
+```plaintext
PUT /groups/:id/labels/:label_id
```
@@ -170,7 +170,7 @@ NOTE: **Note:** An older endpoint `PUT /groups/:id/labels` with `name` in the pa
Deletes a group label with a given name.
-```
+```plaintext
DELETE /groups/:id/labels/:label_id
```
@@ -190,7 +190,7 @@ NOTE: **Note:** An older endpoint `DELETE /groups/:id/labels` with `name` in the
Subscribes the authenticated user to a group label to receive notifications. If
the user is already subscribed to the label, the status code `304` is returned.
-```
+```plaintext
POST /groups/:id/labels/:label_id/subscribe
```
@@ -226,7 +226,7 @@ Unsubscribes the authenticated user from a group label to not receive
notifications from it. If the user is not subscribed to the label, the status
code `304` is returned.
-```
+```plaintext
POST /groups/:id/labels/:label_id/unsubscribe
```
diff --git a/doc/api/group_level_variables.md b/doc/api/group_level_variables.md
index f4d98bf454f..1812aaa3bff 100644
--- a/doc/api/group_level_variables.md
+++ b/doc/api/group_level_variables.md
@@ -1,4 +1,4 @@
-# Group-level Variables API
+# Group-level Variables API
> [Introduced][ce-34519] in GitLab 9.5
@@ -6,7 +6,7 @@
Get list of a group's variables.
-```
+```plaintext
GET /groups/:id/variables
```
@@ -14,7 +14,7 @@ GET /groups/:id/variables
|-----------|---------|----------|---------------------|
| `id` | integer/string | yes | The ID of a group or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/1/variables"
```
@@ -23,12 +23,16 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
{
"key": "TEST_VARIABLE_1",
"variable_type": "env_var",
- "value": "TEST_1"
+ "value": "TEST_1",
+ "protected": false,
+ "masked": false
},
{
"key": "TEST_VARIABLE_2",
"variable_type": "env_var",
- "value": "TEST_2"
+ "value": "TEST_2",
+ "protected": false,
+ "masked": false
}
]
```
@@ -37,7 +41,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
Get the details of a group's specific variable.
-```
+```plaintext
GET /groups/:id/variables/:key
```
@@ -46,7 +50,7 @@ GET /groups/:id/variables/:key
| `id` | integer/string | yes | The ID of a group or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
| `key` | string | yes | The `key` of a variable |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/1/variables/TEST_VARIABLE_1"
```
@@ -54,7 +58,9 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
{
"key": "TEST_VARIABLE_1",
"variable_type": "env_var",
- "value": "TEST_1"
+ "value": "TEST_1",
+ "protected": false,
+ "masked": false
}
```
@@ -62,7 +68,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
Create a new variable.
-```
+```plaintext
POST /groups/:id/variables
```
@@ -73,8 +79,9 @@ POST /groups/:id/variables
| `value` | string | yes | The `value` of a variable |
| `variable_type` | string | no | The type of a variable. Available types are: `env_var` (default) and `file` |
| `protected` | boolean | no | Whether the variable is protected |
+| `masked` | boolean | no | Whether the variable is masked |
-```
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/1/variables" --form "key=NEW_VARIABLE" --form "value=new value"
```
@@ -83,7 +90,8 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitla
"key": "NEW_VARIABLE",
"value": "new value",
"variable_type": "env_var",
- "protected": false
+ "protected": false,
+ "masked": false
}
```
@@ -91,7 +99,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitla
Update a group's variable.
-```
+```plaintext
PUT /groups/:id/variables/:key
```
@@ -102,8 +110,9 @@ PUT /groups/:id/variables/:key
| `value` | string | yes | The `value` of a variable |
| `variable_type` | string | no | The type of a variable. Available types are: `env_var` (default) and `file` |
| `protected` | boolean | no | Whether the variable is protected |
+| `masked` | boolean | no | Whether the variable is masked |
-```
+```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/1/variables/NEW_VARIABLE" --form "value=updated value"
```
@@ -112,7 +121,8 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab
"key": "NEW_VARIABLE",
"value": "updated value",
"variable_type": "env_var",
- "protected": true
+ "protected": true,
+ "masked": true
}
```
@@ -120,7 +130,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab
Remove a group's variable.
-```
+```plaintext
DELETE /groups/:id/variables/:key
```
@@ -129,7 +139,7 @@ DELETE /groups/:id/variables/:key
| `id` | integer/string | yes | The ID of a group or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
| `key` | string | yes | The `key` of a variable |
-```
+```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/1/variables/VARIABLE_1"
```
diff --git a/doc/api/group_milestones.md b/doc/api/group_milestones.md
index c86ddab5793..7e2a6987208 100644
--- a/doc/api/group_milestones.md
+++ b/doc/api/group_milestones.md
@@ -6,7 +6,7 @@
Returns a list of group milestones.
-```
+```plaintext
GET /groups/:id/milestones
GET /groups/:id/milestones?iids[]=42
GET /groups/:id/milestones?iids[]=42&iids[]=43
@@ -53,7 +53,7 @@ Example Response:
Gets a single group milestone.
-```
+```plaintext
GET /groups/:id/milestones/:milestone_id
```
@@ -66,7 +66,7 @@ Parameters:
Creates a new group milestone.
-```
+```plaintext
POST /groups/:id/milestones
```
@@ -82,7 +82,7 @@ Parameters:
Updates an existing group milestone.
-```
+```plaintext
PUT /groups/:id/milestones/:milestone_id
```
@@ -100,7 +100,7 @@ Parameters:
Only for users with Developer access to the group.
-```
+```plaintext
DELETE /groups/:id/milestones/:milestone_id
```
@@ -113,7 +113,7 @@ Parameters:
Gets all issues assigned to a single group milestone.
-```
+```plaintext
GET /groups/:id/milestones/:milestone_id/issues
```
@@ -126,7 +126,7 @@ Parameters:
Gets all merge requests assigned to a single group milestone.
-```
+```plaintext
GET /groups/:id/milestones/:milestone_id/merge_requests
```
@@ -143,7 +143,7 @@ Parameters:
Get all burndown chart events for a single milestone.
-```
+```plaintext
GET /groups/:id/milestones/:milestone_id/burndown_events
```
diff --git a/doc/api/groups.md b/doc/api/groups.md
index de0b2543645..235f7f4081a 100644
--- a/doc/api/groups.md
+++ b/doc/api/groups.md
@@ -21,7 +21,7 @@ Parameters:
| `owned` | boolean | no | Limit to groups explicitly owned by the current user |
| `min_access_level` | integer | no | Limit to groups where current user has at least this [access level](members.md) |
-```
+```plaintext
GET /groups
```
@@ -42,6 +42,7 @@ GET /groups
"emails_disabled": null,
"mentions_disabled": null,
"lfs_enabled": true,
+ "default_branch_protection": 2,
"avatar_url": "http://localhost:3000/uploads/group/avatar/1/foo.jpg",
"web_url": "http://localhost:3000/groups/foo-bar",
"request_access_enabled": false,
@@ -55,7 +56,7 @@ GET /groups
When adding the parameter `statistics=true` and the authenticated user is an admin, additional group statistics are returned.
-```
+```plaintext
GET /groups?statistics=true
```
@@ -76,6 +77,7 @@ GET /groups?statistics=true
"emails_disabled": null,
"mentions_disabled": null,
"lfs_enabled": true,
+ "default_branch_protection": 2,
"avatar_url": "http://localhost:3000/uploads/group/avatar/1/foo.jpg",
"web_url": "http://localhost:3000/groups/foo-bar",
"request_access_enabled": false,
@@ -99,7 +101,7 @@ You can search for groups by name or path, see below.
You can filter by [custom attributes](custom_attributes.md) with:
-```
+```plaintext
GET /groups?custom_attributes[key]=value&custom_attributes[other_key]=other_value
```
@@ -127,7 +129,7 @@ Parameters:
| `owned` | boolean | no | Limit to groups explicitly owned by the current user |
| `min_access_level` | integer | no | Limit to groups where current user has at least this [access level](members.md) |
-```
+```plaintext
GET /groups/:id/subgroups
```
@@ -148,6 +150,7 @@ GET /groups/:id/subgroups
"emails_disabled": null,
"mentions_disabled": null,
"lfs_enabled": true,
+ "default_branch_protection": 2,
"avatar_url": "http://gitlab.example.com/uploads/group/avatar/1/foo.jpg",
"web_url": "http://gitlab.example.com/groups/foo-bar",
"request_access_enabled": false,
@@ -165,7 +168,7 @@ Get a list of projects in this group. When accessed without authentication, only
By default, this request returns 20 results at a time because the API results [are paginated](README.md#pagination).
-```
+```plaintext
GET /groups/:id/projects
```
@@ -239,7 +242,7 @@ Example response:
Get all details of a group. This endpoint can be accessed without authentication
if the group is publicly accessible. In case the user that requests is admin of the group, it will return the `runners_token` for the group too.
-```
+```plaintext
GET /groups/:id
```
@@ -470,7 +473,7 @@ Feature.disable(:limit_projects_in_groups_api)
Creates a new project group. Available only for users who can create groups.
-```
+```plaintext
POST /groups
```
@@ -481,6 +484,7 @@ Parameters:
| `name` | string | yes | The name of the group. |
| `path` | string | yes | The path of the group. |
| `description` | string | no | The group's description. |
+| `membership_lock` | boolean | no | **(STARTER)** Prevent adding new members to project membership within this group. |
| `visibility` | string | no | The group's visibility. Can be `private`, `internal`, or `public`. |
| `share_with_group_lock` | boolean | no | Prevent sharing a project with another group within this group. |
| `require_two_factor_authentication` | boolean | no | Require all users in this group to setup Two-factor authentication. |
@@ -489,18 +493,30 @@ Parameters:
| `auto_devops_enabled` | boolean | no | Default to Auto DevOps pipeline for all projects within this group. |
| `subgroup_creation_level` | string | no | Allowed to create subgroups. Can be `owner` (Owners), or `maintainer` (Maintainers). |
| `emails_disabled` | boolean | no | Disable email notifications |
+| `avatar` | mixed | no | Image file for avatar of the group. [Introduced in GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/issues/36681) |
| `mentions_disabled` | boolean | no | Disable the capability of a group from getting mentioned |
| `lfs_enabled` | boolean | no | Enable/disable Large File Storage (LFS) for the projects in this group. |
| `request_access_enabled` | boolean | no | Allow users to request member access. |
| `parent_id` | integer | no | The parent group ID for creating nested group. |
+| `default_branch_protection` | integer | no | See [Options for `default_branch_protection`](#options-for-default_branch_protection). Default to the global level default branch protection setting. |
| `shared_runners_minutes_limit` | integer | no | **(STARTER ONLY)** Pipeline minutes quota for this group. |
| `extra_shared_runners_minutes_limit` | integer | no | **(STARTER ONLY)** Extra pipeline minutes quota for this group. |
+### Options for `default_branch_protection`
+
+The `default_branch_protection` attribute determines whether developers and maintainers can push to the applicable master branch, as described in the following table:
+
+| Value | Description |
+|-------|-------------------------------------------------------------------------------------------------------------|
+| `0` | No protection. Developers and maintainers can: <br>- Push new commits<br>- Force push changes<br>- Delete the branch |
+| `1` | Partial protection. Developers and maintainers can: <br>- Push new commits |
+| `2` | Full protection. Only maintainers can: <br>- Push new commits |
+
## Transfer project to group
-Transfer a project to the Group namespace. Available only for admin
+Transfer a project to the Group namespace. Available only to instance administrators, although an [alternative API endpoint](projects.md#transfer-a-project-to-a-new-namespace) is available which does not require instance administrator access. Transferring projects may fail when tagged packages exist in the project's repository.
-```
+```plaintext
POST /groups/:id/projects/:project_id
```
@@ -508,14 +524,18 @@ Parameters:
| Attribute | Type | Required | Description |
| ------------ | -------------- | -------- | ----------- |
-| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the target group](README.md#namespaced-path-encoding) |
| `project_id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) |
+```shell
+curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/groups/4/projects/56
+```
+
## Update group
Updates the project group. Only available to group owners and administrators.
-```
+```plaintext
PUT /groups/:id
```
@@ -535,16 +555,17 @@ PUT /groups/:id
| `auto_devops_enabled` | boolean | no | Default to Auto DevOps pipeline for all projects within this group. |
| `subgroup_creation_level` | string | no | Allowed to create subgroups. Can be `owner` (Owners), or `maintainer` (Maintainers). |
| `emails_disabled` | boolean | no | Disable email notifications |
+| `avatar` | mixed | no | Image file for avatar of the group. [Introduced in GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/issues/36681) |
| `mentions_disabled` | boolean | no | Disable the capability of a group from getting mentioned |
| `lfs_enabled` (optional) | boolean | no | Enable/disable Large File Storage (LFS) for the projects in this group. |
| `request_access_enabled` | boolean | no | Allow users to request member access. |
+| `default_branch_protection` | integer | no | See [Options for `default_branch_protection`](#options-for-default_branch_protection). |
| `file_template_project_id` | integer | no | **(PREMIUM)** The ID of a project to load custom file templates from. |
| `shared_runners_minutes_limit` | integer | no | **(STARTER ONLY)** Pipeline minutes quota for this group. |
| `extra_shared_runners_minutes_limit` | integer | no | **(STARTER ONLY)** Extra pipeline minutes quota for this group. |
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/5?name=Experimental"
-
```
This endpoint returns:
@@ -635,7 +656,7 @@ This endpoint either:
- Removes group, and queues a background job to delete all projects in the group as well.
- Since [GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/issues/33257), on [Premium or Silver](https://about.gitlab.com/pricing/) or higher tiers, marks a group for deletion. The deletion will happen 7 days later by default, but this can be changed in the [instance settings](../user/admin_area/settings/visibility_and_access_controls.md#default-deletion-adjourned-period-premium-only).
-```
+```plaintext
DELETE /groups/:id
```
@@ -667,7 +688,7 @@ Parameters:
Get all groups that match your string in their name or path.
-```
+```plaintext
GET /groups?search=foobar
```
@@ -691,7 +712,7 @@ These are different from [System Hooks](system_hooks.md) that are system wide an
Get a list of group hooks
-```
+```plaintext
GET /groups/:id/hooks
```
@@ -708,7 +729,7 @@ Get a specific hook for a group.
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) |
| `hook_id` | integer | yes | The ID of a group hook |
-```
+```plaintext
GET /groups/:id/hooks/:hook_id
```
@@ -735,7 +756,7 @@ GET /groups/:id/hooks/:hook_id
Adds a hook to a specified group.
-```
+```plaintext
POST /groups/:id/hooks
```
@@ -759,7 +780,7 @@ POST /groups/:id/hooks
Edits a hook for a specified group.
-```
+```plaintext
PUT /groups/:id/hooks/:hook_id
```
@@ -785,7 +806,7 @@ PUT /groups/:id/hooks/:hook_id
Removes a hook from a group. This is an idempotent method and can be called multiple times.
Either the hook is available or not.
-```
+```plaintext
DELETE /groups/:id/hooks/:hook_id
```
@@ -802,7 +823,7 @@ Group audit events can be accessed via the [Group Audit Events API](audit_events
Syncs the group with its linked LDAP group. Only available to group owners and administrators.
-```
+```plaintext
POST /groups/:id/ldap_sync
```
@@ -822,7 +843,7 @@ List, add, and delete LDAP group links.
Lists LDAP group links.
-```
+```plaintext
GET /groups/:id/ldap_group_links
```
@@ -834,7 +855,7 @@ Parameters:
Adds an LDAP group link.
-```
+```plaintext
POST /groups/:id/ldap_group_links
```
@@ -849,7 +870,7 @@ Parameters:
Deletes an LDAP group link.
-```
+```plaintext
DELETE /groups/:id/ldap_group_links/:cn
```
@@ -860,7 +881,7 @@ Parameters:
Deletes a LDAP group link for a specific LDAP provider
-```
+```plaintext
DELETE /groups/:id/ldap_group_links/:provider/:cn
```
@@ -876,13 +897,13 @@ By default, groups only get 20 namespaces at a time because the API results are
To get more (up to 100), pass the following as an argument to the API call:
-```
+```plaintext
/groups?per_page=100
```
And to switch pages add:
-```
+```plaintext
/groups?per_page=100&page=2
```
diff --git a/doc/api/import.md b/doc/api/import.md
index 0f8a9bc71d1..8db8dc7eea4 100644
--- a/doc/api/import.md
+++ b/doc/api/import.md
@@ -4,7 +4,7 @@
Import your projects from GitHub to GitLab via the API.
-```
+```plaintext
POST /import/github
```
diff --git a/doc/api/issue_links.md b/doc/api/issue_links.md
index 2a64c2370fc..b6502bf099c 100644
--- a/doc/api/issue_links.md
+++ b/doc/api/issue_links.md
@@ -5,7 +5,7 @@
Get a list of related issues of a given issue, sorted by the relationship creation datetime (ascending).
Issues will be filtered according to the user authorizations.
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/links
```
@@ -57,7 +57,7 @@ Parameters:
Creates a two-way relation between two issues. User must be allowed to update both issues in order to succeed.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/links
```
@@ -145,7 +145,7 @@ Example response:
Deletes an issue link, thus removes the two-way relationship.
-```
+```plaintext
DELETE /projects/:id/issues/:issue_iid/links/:issue_link_id
```
diff --git a/doc/api/issues.md b/doc/api/issues.md
index b688a75ad67..d1404efd265 100644
--- a/doc/api/issues.md
+++ b/doc/api/issues.md
@@ -26,7 +26,7 @@ Get all issues the authenticated user has access to. By default it
returns only issues created by the current user. To get all issues,
use parameter `scope=all`.
-```
+```plaintext
GET /issues
GET /issues?state=opened
GET /issues?state=closed
@@ -47,7 +47,7 @@ GET /issues?confidential=true
| ------------------- | ---------------- | ---------- | --------------------------------------------------------------------------------------------------------------------------------------------------- |
| `state` | string | no | Return `all` issues or just those that are `opened` or `closed` |
| `labels` | string | no | Comma-separated list of label names, issues must have all labels to be returned. `None` lists all issues with no labels. `Any` lists all issues with at least one label. `No+Label` (Deprecated) lists all issues with no labels. Predefined names are case-insensitive. |
-| `with_labels_details` | Boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. The `description_html` attribute was introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413)|
+| `with_labels_details` | boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. The `description_html` attribute was introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413)|
| `milestone` | string | no | The milestone title. `None` lists all issues with no milestone. `Any` lists all issues that have an assigned milestone. |
| `scope` | string | no | Return issues for the given scope: `created_by_me`, `assigned_to_me` or `all`. Defaults to `created_by_me`<br> For versions before 11.0, use the now deprecated `created-by-me` or `assigned-to-me` scopes instead.<br> _([Introduced][ce-13004] in GitLab 9.5. [Changed to snake_case][ce-18935] in GitLab 11.0)_ |
| `author_id` | integer | no | Return issues created by the given user `id`. Mutually exclusive with `author_username`. Combine with `scope=all` or `scope=assigned_to_me`. _([Introduced][ce-13004] in GitLab 9.5)_ |
@@ -65,7 +65,7 @@ GET /issues?confidential=true
| `created_before` | datetime | no | Return issues created on or before the given time |
| `updated_after` | datetime | no | Return issues updated on or after the given time |
| `updated_before` | datetime | no | Return issues updated on or before the given time |
-| `confidential` | Boolean | no | Filter confidential or public issues. |
+| `confidential` | boolean | no | Filter confidential or public issues. |
| `not` | Hash | no | Return issues that do not match the parameters supplied. Accepts: `labels`, `milestone`, `author_id`, `author_username`, `assignee_id`, `assignee_username`, `my_reaction_emoji`, `search`, `in` |
```shell
@@ -181,7 +181,7 @@ the `weight` parameter:
Get a list of a group's issues.
-```
+```plaintext
GET /groups/:id/issues
GET /groups/:id/issues?state=opened
GET /groups/:id/issues?state=closed
@@ -203,7 +203,7 @@ GET /groups/:id/issues?confidential=true
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
| `state` | string | no | Return all issues or just those that are `opened` or `closed` |
| `labels` | string | no | Comma-separated list of label names, issues must have all labels to be returned. `None` lists all issues with no labels. `Any` lists all issues with at least one label. `No+Label` (Deprecated) lists all issues with no labels. Predefined names are case-insensitive. |
-| `with_labels_details` | Boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. The `description_html` attribute was introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413) |
+| `with_labels_details` | boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. The `description_html` attribute was introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413) |
| `iids[]` | integer array | no | Return only the issues having the given `iid` |
| `milestone` | string | no | The milestone title. `None` lists all issues with no milestone. `Any` lists all issues that have an assigned milestone. |
| `scope` | string | no | Return issues for the given scope: `created_by_me`, `assigned_to_me` or `all`.<br> For versions before 11.0, use the now deprecated `created-by-me` or `assigned-to-me` scopes instead.<br> _([Introduced][ce-13004] in GitLab 9.5. [Changed to snake_case][ce-18935] in GitLab 11.0)_ |
@@ -220,9 +220,9 @@ GET /groups/:id/issues?confidential=true
| `created_before` | datetime | no | Return issues created on or before the given time |
| `updated_after` | datetime | no | Return issues updated on or after the given time |
| `updated_before` | datetime | no | Return issues updated on or before the given time |
-| `confidential` | Boolean | no | Filter confidential or public issues. |
+| `confidential` | boolean | no | Filter confidential or public issues. |
| `not` | Hash | no | Return issues that do not match the parameters supplied. Accepts: `labels`, `milestone`, `author_id`, `author_username`, `assignee_id`, `assignee_username`, `my_reaction_emoji`, `search`, `in` |
-| `non_archived` | Boolean | no | Return issues from non archived projects. Default is true. _(Introduced in [GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23785))_ |
+| `non_archived` | boolean | no | Return issues from non archived projects. Default is true. _(Introduced in [GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23785))_ |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/groups/4/issues
@@ -336,7 +336,7 @@ the `weight` parameter:
Get a list of a project's issues.
-```
+```plaintext
GET /projects/:id/issues
GET /projects/:id/issues?state=opened
GET /projects/:id/issues?state=closed
@@ -359,7 +359,7 @@ GET /projects/:id/issues?confidential=true
| `iids[]` | integer array | no | Return only the milestone having the given `iid` |
| `state` | string | no | Return all issues or just those that are `opened` or `closed` |
| `labels` | string | no | Comma-separated list of label names, issues must have all labels to be returned. `None` lists all issues with no labels. `Any` lists all issues with at least one label. `No+Label` (Deprecated) lists all issues with no labels. Predefined names are case-insensitive. |
-| `with_labels_details` | Boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. `description_html` Introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413) |
+| `with_labels_details` | boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. `description_html` Introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413) |
| `milestone` | string | no | The milestone title. `None` lists all issues with no milestone. `Any` lists all issues that have an assigned milestone. |
| `scope` | string | no | Return issues for the given scope: `created_by_me`, `assigned_to_me` or `all`.<br> For versions before 11.0, use the now deprecated `created-by-me` or `assigned-to-me` scopes instead.<br> _([Introduced][ce-13004] in GitLab 9.5. [Changed to snake_case][ce-18935] in GitLab 11.0)_ |
| `author_id` | integer | no | Return issues created by the given user `id`. Mutually exclusive with `author_username`. Combine with `scope=all` or `scope=assigned_to_me`. _([Introduced][ce-13004] in GitLab 9.5)_ |
@@ -375,7 +375,7 @@ GET /projects/:id/issues?confidential=true
| `created_before` | datetime | no | Return issues created on or before the given time |
| `updated_after` | datetime | no | Return issues updated on or after the given time |
| `updated_before` | datetime | no | Return issues updated on or before the given time |
-| `confidential` | Boolean | no | Filter confidential or public issues. |
+| `confidential` | boolean | no | Filter confidential or public issues. |
| `not` | Hash | no | Return issues that do not match the parameters supplied. Accepts: `labels`, `milestone`, `author_id`, `author_username`, `assignee_id`, `assignee_username`, `my_reaction_emoji`, `search`, `in` |
```shell
@@ -497,7 +497,7 @@ the `weight` parameter:
Get a single project issue.
-```
+```plaintext
GET /projects/:id/issues/:issue_iid
```
@@ -637,7 +637,7 @@ Please use `iid` of the `epic` attribute instead.
Creates a new project issue.
-```
+```plaintext
POST /projects/:id/issues
```
@@ -745,7 +745,7 @@ the `weight` parameter:
Updates an existing project issue. This call is also used to mark an issue as
closed.
-```
+```plaintext
PUT /projects/:id/issues/:issue_iid
```
@@ -760,7 +760,7 @@ PUT /projects/:id/issues/:issue_iid
| `milestone_id` | integer | no | The global ID of a milestone to assign the issue to. Set to `0` or provide an empty value to unassign a milestone.|
| `labels` | string | no | Comma-separated label names for an issue. Set to an empty string to unassign all labels. |
| `state_event` | string | no | The state event of an issue. Set `close` to close the issue and `reopen` to reopen it |
-| `updated_at` | string | no | Date time string, ISO 8601 formatted, e.g. `2016-03-11T03:45:40Z` (requires admin or project owner rights) |
+| `updated_at` | string | no | Date time string, ISO 8601 formatted, e.g. `2016-03-11T03:45:40Z` (requires admin or project owner rights). Empty string or null values are not accepted.|
| `due_date` | string | no | Date time string in the format YEAR-MONTH-DAY, e.g. `2016-03-11` |
| `weight` **(STARTER)** | integer | no | The weight of the issue. Valid values are greater than or equal to 0. 0 |
| `discussion_locked` | boolean | no | Flag indicating if the issue's discussion is locked. If the discussion is locked only project members can add or edit comments. |
@@ -851,15 +851,20 @@ the `weight` parameter:
}
```
-**Note**: `assignee` column is deprecated, now we show it as a single-sized array `assignees` to conform to the GitLab EE API.
+NOTE: **Note:**
+At least one of following parameters is required to be passed for the request to be successful: `:assignee_id`, `:assignee_ids`, `:confidential`, `:created_at`, `:description`, `:discussion_locked`, `:due_date`, `:labels`, `:milestone_id`, `:state_event`, or `:title`.
-**Note**: The `closed_by` attribute was [introduced in GitLab 10.6][ce-17042]. This value will only be present for issues which were closed after GitLab 10.6 and when the user account that closed the issue still exists.
+NOTE: **Note**:
+`assignee` column is deprecated. We now show it as a single-sized array `assignees` to conform to the GitLab EE API.
+
+NOTE: **Note**:
+The `closed_by` attribute was [introduced in GitLab 10.6][ce-17042]. This value will only be present for issues which were closed after GitLab 10.6 and when the user account that closed the issue still exists.
## Delete an issue
Only for admins and project owners. Deletes the issue in question.
-```
+```plaintext
DELETE /projects/:id/issues/:issue_iid
```
@@ -881,7 +886,7 @@ issue, error `400` together with an explaining error message is returned.
If a given label and/or milestone with the same name also exists in the target
project, it will then be assigned to the issue that is being moved.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/move
```
@@ -988,7 +993,7 @@ Subscribes the authenticated user to an issue to receive notifications.
If the user is already subscribed to the issue, the status code `304`
is returned.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/subscribe
```
@@ -1094,7 +1099,7 @@ Unsubscribes the authenticated user from the issue to not receive notifications
from it. If the user is not subscribed to the issue, the
status code `304` is returned.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/unsubscribe
```
@@ -1165,7 +1170,7 @@ Manually creates a todo for the current user on an issue. If
there already exists a todo for the user on that issue, status code `304` is
returned.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/todo
```
@@ -1279,7 +1284,7 @@ Example response:
Sets an estimated time of work for this issue.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/time_estimate
```
@@ -1308,7 +1313,7 @@ Example response:
Resets the estimated time for this issue to 0 seconds.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/reset_time_estimate
```
@@ -1336,7 +1341,7 @@ Example response:
Adds spent time for this issue
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/add_spent_time
```
@@ -1365,7 +1370,7 @@ Example response:
Resets the total spent time for this issue to 0 seconds.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/reset_spent_time
```
@@ -1391,7 +1396,7 @@ Example response:
## Get time tracking stats
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/time_stats
```
@@ -1419,7 +1424,7 @@ Example response:
Get all the merge requests that are related to the issue.
-```
+```plaintext
GET /projects/:id/issues/:issue_id/related_merge_requests
```
@@ -1575,7 +1580,7 @@ Example response:
Get all the merge requests that will close issue when merged.
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/closed_by
```
@@ -1648,7 +1653,7 @@ Example response:
## Participants on issues
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/participants
```
@@ -1692,7 +1697,7 @@ Comments are done via the [notes](notes.md) resource.
Available only for admins.
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/user_agent_detail
```
diff --git a/doc/api/issues_statistics.md b/doc/api/issues_statistics.md
index 2c391166088..511078f3028 100644
--- a/doc/api/issues_statistics.md
+++ b/doc/api/issues_statistics.md
@@ -11,7 +11,7 @@ Gets issues count statistics on all issues the authenticated user has access to.
returns only issues created by the current user. To get all issues,
use parameter `scope=all`.
-```
+```plaintext
GET /issues_statistics
GET /issues_statistics?labels=foo
GET /issues_statistics?labels=foo,bar
@@ -43,7 +43,7 @@ GET /issues_statistics?confidential=true
| `created_before` | datetime | no | Return issues created on or before the given time |
| `updated_after` | datetime | no | Return issues updated on or after the given time |
| `updated_before` | datetime | no | Return issues updated on or before the given time |
-| `confidential` | Boolean | no | Filter confidential or public issues. |
+| `confidential` | boolean | no | Filter confidential or public issues. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/issues_statistics
@@ -67,7 +67,7 @@ Example response:
Gets issues count statistics for given group.
-```
+```plaintext
GET /groups/:id/issues_statistics
GET /groups/:id/issues_statistics?labels=foo
GET /groups/:id/issues_statistics?labels=foo,bar
@@ -99,7 +99,7 @@ GET /groups/:id/issues_statistics?confidential=true
| `created_before` | datetime | no | Return issues created on or before the given time |
| `updated_after` | datetime | no | Return issues updated on or after the given time |
| `updated_before` | datetime | no | Return issues updated on or before the given time |
-| `confidential` | Boolean | no | Filter confidential or public issues. |
+| `confidential` | boolean | no | Filter confidential or public issues. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/groups/4/issues_statistics
@@ -123,7 +123,7 @@ Example response:
Gets issues count statistics for given project.
-```
+```plaintext
GET /projects/:id/issues_statistics
GET /projects/:id/issues_statistics?labels=foo
GET /projects/:id/issues_statistics?labels=foo,bar
@@ -155,7 +155,7 @@ GET /projects/:id/issues_statistics?confidential=true
| `created_before` | datetime | no | Return issues created on or before the given time |
| `updated_after` | datetime | no | Return issues updated on or after the given time |
| `updated_before` | datetime | no | Return issues updated on or before the given time |
-| `confidential` | Boolean | no | Filter confidential or public issues. |
+| `confidential` | boolean | no | Filter confidential or public issues. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/4/issues_statistics
diff --git a/doc/api/jobs.md b/doc/api/jobs.md
index 00920f94610..6cd2d0a04f5 100644
--- a/doc/api/jobs.md
+++ b/doc/api/jobs.md
@@ -4,7 +4,7 @@
Get a list of jobs in a project. Jobs are sorted in descending order of their IDs.
-```
+```plaintext
GET /projects/:id/jobs
```
@@ -138,7 +138,7 @@ Example of response
Get a list of jobs for a pipeline.
-```
+```plaintext
GET /projects/:id/pipelines/:pipeline_id/jobs
```
@@ -273,7 +273,7 @@ Example of response
Get a single job of a project
-```
+```plaintext
GET /projects/:id/jobs/:job_id
```
@@ -351,7 +351,7 @@ Example of response
Get the job's artifacts zipped archive of a project.
-```
+```plaintext
GET /projects/:id/jobs/:job_id/artifacts
```
@@ -414,7 +414,7 @@ the given reference name and job, provided the job finished successfully. This
is the same as [getting the job's artifacts](#get-job-artifacts), but by
defining the job's name instead of its ID.
-```
+```plaintext
GET /projects/:id/jobs/artifacts/:ref_name/download?job=name
```
@@ -476,7 +476,7 @@ Download a single artifact file from a job with a specified ID from within
the job's artifacts zipped archive. The file is extracted from the archive and
streamed to the client.
-```
+```plaintext
GET /projects/:id/jobs/:job_id/artifacts/*artifact_path
```
@@ -510,7 +510,7 @@ Download a single artifact file for a specific job of the latest successful
pipeline for the given reference name from within the job's artifacts archive.
The file is extracted from the archive and streamed to the client.
-```
+```plaintext
GET /projects/:id/jobs/artifacts/:ref_name/raw/*artifact_path?job=name
```
@@ -541,7 +541,7 @@ Possible response status codes:
Get a log (trace) of a specific job of a project:
-```
+```plaintext
GET /projects/:id/jobs/:job_id/trace
```
@@ -565,7 +565,7 @@ Possible response status codes:
Cancel a single job of a project
-```
+```plaintext
POST /projects/:id/jobs/:job_id/cancel
```
@@ -614,7 +614,7 @@ Example of response
Retry a single job of a project
-```
+```plaintext
POST /projects/:id/jobs/:job_id/retry
```
@@ -663,7 +663,7 @@ Example of response
Erase a single job of a project (remove job artifacts and a job log)
-```
+```plaintext
POST /projects/:id/jobs/:job_id/erase
```
@@ -717,7 +717,7 @@ Example of response
Prevents artifacts from being deleted when expiration is set.
-```
+```plaintext
POST /projects/:id/jobs/:job_id/artifacts/keep
```
@@ -773,7 +773,7 @@ Example response:
Delete artifacts of a job.
-```
+```plaintext
DELETE /projects/:id/jobs/:job_id/artifacts
```
@@ -797,7 +797,7 @@ If the artifacts were deleted successfully, a response with status `204 No Conte
Triggers a manual action to start a job.
-```
+```plaintext
POST /projects/:id/jobs/:job_id/play
```
diff --git a/doc/api/keys.md b/doc/api/keys.md
index 05933e5a1d1..d4eb1161a97 100644
--- a/doc/api/keys.md
+++ b/doc/api/keys.md
@@ -24,6 +24,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" 'https://gitlab.example.com/a
"title": "Sample key 25",
"key": "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt1256k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=",
"created_at": "2015-09-03T07:24:44.627Z",
+ "expires_at": "2020-05-05T00:00:00.000Z"
"user": {
"name": "John Smith",
"username": "john_smith",
@@ -92,6 +93,7 @@ Example response:
"title": "Sample key 1",
"key": "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt1016k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=",
"created_at": "2019-11-14T15:11:13.222Z",
+ "expires_at": "2020-05-05T00:00:00.000Z"
"user": {
"id": 1,
"name": "Administrator",
diff --git a/doc/api/labels.md b/doc/api/labels.md
index 4ee7484d301..54af6c093b9 100644
--- a/doc/api/labels.md
+++ b/doc/api/labels.md
@@ -7,7 +7,7 @@ The `description_html` - was added to response JSON in [GitLab 12.7](https://git
Get all labels for a given project.
-```
+```plaintext
GET /projects/:id/labels
```
@@ -102,7 +102,7 @@ Example response:
Get a single label for a given project.
-```
+```plaintext
GET /projects/:id/labels/:label_id
```
@@ -139,7 +139,7 @@ Example response:
Creates a new label for the given repository with the given name and color.
-```
+```plaintext
POST /projects/:id/labels
```
@@ -178,7 +178,7 @@ Example response:
Deletes a label with a given name.
-```
+```plaintext
DELETE /projects/:id/labels/:label_id
```
@@ -198,7 +198,7 @@ NOTE: **Note:** An older endpoint `DELETE /projects/:id/labels` with `name` in t
Updates an existing label with new name or new color. At least one parameter
is required, to update the label.
-```
+```plaintext
PUT /projects/:id/labels/:label_id
```
@@ -242,7 +242,7 @@ NOTE: **Note:** An older endpoint `PUT /projects/:id/labels` with `name` or `lab
Promotes a project label to a group label.
-```
+```plaintext
PUT /projects/:id/labels/:label_id/promote
```
@@ -279,7 +279,7 @@ Subscribes the authenticated user to a label to receive notifications.
If the user is already subscribed to the label, the status code `304`
is returned.
-```
+```plaintext
POST /projects/:id/labels/:label_id/subscribe
```
@@ -317,7 +317,7 @@ Unsubscribes the authenticated user from a label to not receive notifications
from it. If the user is not subscribed to the label, the
status code `304` is returned.
-```
+```plaintext
POST /projects/:id/labels/:label_id/unsubscribe
```
diff --git a/doc/api/license.md b/doc/api/license.md
index 2be26e1bddf..71e95fc3202 100644
--- a/doc/api/license.md
+++ b/doc/api/license.md
@@ -5,7 +5,7 @@ as an admin.
## Retrieve information about the current license
-```
+```plaintext
GET /license
```
@@ -34,7 +34,7 @@ GET /license
## Retrieve information about all licenses
-```
+```plaintext
GET /licenses
```
@@ -93,7 +93,7 @@ Returns:
## Add a new license
-```
+```plaintext
POST /license
```
@@ -137,7 +137,7 @@ Returns:
## Delete a license
-```
+```plaintext
DELETE /license/:id
```
diff --git a/doc/api/lint.md b/doc/api/lint.md
index c495bfaa7cb..4ecce92df26 100644
--- a/doc/api/lint.md
+++ b/doc/api/lint.md
@@ -4,7 +4,7 @@
Checks if your `.gitlab-ci.yml` file is valid.
-```
+```plaintext
POST /ci/lint
```
diff --git a/doc/api/managed_licenses.md b/doc/api/managed_licenses.md
index 34b1e5dc6dc..66125d23a82 100644
--- a/doc/api/managed_licenses.md
+++ b/doc/api/managed_licenses.md
@@ -4,7 +4,7 @@
Get all managed licenses for a given project.
-```
+```plaintext
GET /projects/:id/managed_licenses
```
@@ -37,7 +37,7 @@ Example response:
Shows an existing managed license.
-```
+```plaintext
GET /projects/:id/managed_licenses/:managed_license_id
```
@@ -64,7 +64,7 @@ Example response:
Creates a new managed license for the given project with the given name and approval status.
-```
+```plaintext
POST /projects/:id/managed_licenses
```
@@ -92,7 +92,7 @@ Example response:
Deletes a managed license with a given id.
-```
+```plaintext
DELETE /projects/:id/managed_licenses/:managed_license_id
```
@@ -111,7 +111,7 @@ When successful, it replies with an HTTP 204 response.
Updates an existing managed license with a new approval status.
-```
+```plaintext
PATCH /projects/:id/managed_licenses/:managed_license_id
```
diff --git a/doc/api/markdown.md b/doc/api/markdown.md
index b9207b7ba15..3b4b6323022 100644
--- a/doc/api/markdown.md
+++ b/doc/api/markdown.md
@@ -6,7 +6,7 @@ Available only in APIv4.
## Render an arbitrary Markdown document
-```
+```plaintext
POST /api/v4/markdown
```
@@ -14,7 +14,7 @@ POST /api/v4/markdown
| --------- | ------- | ------------- | ------------------------------------------ |
| `text` | string | yes | The Markdown text to render |
| `gfm` | boolean | no (optional) | Render text using GitLab Flavored Markdown. Default is `false` |
-| `project` | string | no (optional) | Use `project` as a context when creating references using GitLab Flavored Markdown. [Authentication](README.html#authentication) is required if a project is not public. |
+| `project` | string | no (optional) | Use `project` as a context when creating references using GitLab Flavored Markdown. [Authentication](README.md#authentication) is required if a project is not public. |
```shell
curl --header Content-Type:application/json --data '{"text":"Hello world! :tada:", "gfm":true, "project":"group_example/project_example"}' https://gitlab.example.com/api/v4/markdown
diff --git a/doc/api/members.md b/doc/api/members.md
index ed6fb60f86c..e9131e2d4c3 100644
--- a/doc/api/members.md
+++ b/doc/api/members.md
@@ -4,7 +4,7 @@
The access levels are defined in the `Gitlab::Access` module. Currently, these levels are recognized:
-```
+```plaintext
10 => Guest access
20 => Reporter access
30 => Developer access
@@ -17,7 +17,9 @@ The access levels are defined in the `Gitlab::Access` module. Currently, these l
Gets a list of group or project members viewable by the authenticated user.
Returns only direct members and not inherited members through ancestors groups.
-```
+This function takes pagination parameters `page` and `per_page` to restrict the list of users.
+
+```plaintext
GET /groups/:id/members
GET /projects/:id/members
```
@@ -45,7 +47,8 @@ Example response:
"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",
"web_url": "http://192.168.1.8:3000/root",
"expires_at": "2012-10-22T14:13:35Z",
- "access_level": 30
+ "access_level": 30,
+ "group_saml_identity": null
},
{
"id": 2,
@@ -55,7 +58,12 @@ Example response:
"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",
"web_url": "http://192.168.1.8:3000/root",
"expires_at": "2012-10-22T14:13:35Z",
- "access_level": 30
+ "access_level": 30,
+ "group_saml_identity": {
+ "extern_uid":"ABC-1234567890",
+ "provider": "group_saml",
+ "saml_provider_id": 10
+ }
}
]
```
@@ -66,7 +74,9 @@ Gets a list of group or project members viewable by the authenticated user, incl
When a user is a member of the project/group and of one or more ancestor groups the user is returned only once with the project `access_level` (if exists)
or the `access_level` for the user in the first group which they belong to in the project groups ancestors chain.
-```
+This function takes pagination parameters `page` and `per_page` to restrict the list of users.
+
+```plaintext
GET /groups/:id/members/all
GET /projects/:id/members/all
```
@@ -94,7 +104,8 @@ Example response:
"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",
"web_url": "http://192.168.1.8:3000/root",
"expires_at": "2012-10-22T14:13:35Z",
- "access_level": 30
+ "access_level": 30,
+ "group_saml_identity": null
},
{
"id": 2,
@@ -105,6 +116,11 @@ Example response:
"web_url": "http://192.168.1.8:3000/root",
"expires_at": "2012-10-22T14:13:35Z",
"access_level": 30
+ "group_saml_identity": {
+ "extern_uid":"ABC-1234567890",
+ "provider": "group_saml",
+ "saml_provider_id": 10
+ }
},
{
"id": 3,
@@ -114,7 +130,8 @@ Example response:
"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",
"web_url": "http://192.168.1.8:3000/root",
"expires_at": "2012-11-22T14:13:35Z",
- "access_level": 30
+ "access_level": 30,
+ "group_saml_identity": null
}
]
```
@@ -123,7 +140,7 @@ Example response:
Gets a member of a group or project. Returns only direct members and not inherited members through ancestor groups.
-```
+```plaintext
GET /groups/:id/members/:user_id
GET /projects/:id/members/:user_id
```
@@ -149,7 +166,8 @@ Example response:
"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",
"web_url": "http://192.168.1.8:3000/root",
"access_level": 30,
- "expires_at": null
+ "expires_at": null,
+ "group_saml_identity": null
}
```
@@ -159,7 +177,7 @@ Example response:
Gets a member of a group or project, including members inherited through ancestor groups. See the corresponding [endpoint to list all inherited members](#list-all-members-of-a-group-or-project-including-inherited-members) for details.
-```
+```plaintext
GET /groups/:id/members/all/:user_id
GET /projects/:id/members/all/:user_id
```
@@ -185,7 +203,8 @@ Example response:
"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",
"web_url": "http://192.168.1.8:3000/root",
"access_level": 30,
- "expires_at": null
+ "expires_at": null,
+ "group_saml_identity": null
}
```
@@ -193,7 +212,7 @@ Example response:
Adds a member to a group or project.
-```
+```plaintext
POST /groups/:id/members
POST /projects/:id/members
```
@@ -221,7 +240,8 @@ Example response:
"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",
"web_url": "http://192.168.1.8:3000/root",
"expires_at": "2012-10-22T14:13:35Z",
- "access_level": 30
+ "access_level": 30,
+ "group_saml_identity": null
}
```
@@ -229,7 +249,7 @@ Example response:
Updates a member of a group or project.
-```
+```plaintext
PUT /groups/:id/members/:user_id
PUT /projects/:id/members/:user_id
```
@@ -257,7 +277,8 @@ Example response:
"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",
"web_url": "http://192.168.1.8:3000/root",
"expires_at": "2012-10-22T14:13:35Z",
- "access_level": 40
+ "access_level": 40,
+ "group_saml_identity": null
}
```
@@ -265,7 +286,7 @@ Example response:
Removes a user from a group or project.
-```
+```plaintext
DELETE /groups/:id/members/:user_id
DELETE /projects/:id/members/:user_id
```
diff --git a/doc/api/merge_request_approvals.md b/doc/api/merge_request_approvals.md
index a96b9aadc41..004c10fa70a 100644
--- a/doc/api/merge_request_approvals.md
+++ b/doc/api/merge_request_approvals.md
@@ -11,7 +11,7 @@ Configuration for approvals on all Merge Requests (MR) in the project. Must be a
You can request information about a project's approval configuration using the
following endpoint:
-```
+```plaintext
GET /projects/:id/approvals
```
@@ -39,7 +39,7 @@ GET /projects/:id/approvals
If you are allowed to, you can change approval configuration using the following
endpoint:
-```
+```plaintext
POST /projects/:id/approvals
```
@@ -73,7 +73,7 @@ POST /projects/:id/approvals
You can request information about a project's approval rules using the following endpoint:
-```
+```plaintext
GET /projects/:id/approval_rules
```
@@ -172,7 +172,7 @@ GET /projects/:id/approval_rules
You can create project approval rules using the following endpoint:
-```
+```plaintext
POST /projects/:id/approval_rules
```
@@ -274,7 +274,7 @@ POST /projects/:id/approval_rules
You can update project approval rules using the following endpoint:
-```
+```plaintext
PUT /projects/:id/approval_rules/:approval_rule_id
```
@@ -379,7 +379,7 @@ PUT /projects/:id/approval_rules/:approval_rule_id
You can delete project approval rules using the following endpoint:
-```
+```plaintext
DELETE /projects/:id/approval_rules/:approval_rule_id
```
@@ -398,7 +398,7 @@ DELETE /projects/:id/approval_rules/:approval_rule_id
If you are allowed to, you can change approvers and approver groups using
the following endpoint:
-```
+```plaintext
PUT /projects/:id/approvers
```
@@ -464,7 +464,7 @@ Configuration for approvals on a specific Merge Request. Must be authenticated f
You can request information about a merge request's approval status using the
following endpoint:
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/approvals
```
@@ -510,7 +510,7 @@ GET /projects/:id/merge_requests/:merge_request_iid/approvals
If you are allowed to, you can change `approvals_required` using the following
endpoint:
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/approvals
```
@@ -547,7 +547,7 @@ POST /projects/:id/merge_requests/:merge_request_iid/approvals
If you are allowed to, you can change approvers and approver groups using
the following endpoint:
-```
+```plaintext
PUT /projects/:id/merge_requests/:merge_request_iid/approvers
```
@@ -617,7 +617,7 @@ PUT /projects/:id/merge_requests/:merge_request_iid/approvers
You can request information about a merge request's approval state by using the following endpoint:
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/approval_state
```
@@ -688,7 +688,7 @@ This includes additional information about the users who have already approved
You can request information about a merge request's approval rules using the following endpoint:
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/approval_rules
```
@@ -764,7 +764,7 @@ GET /projects/:id/merge_requests/:merge_request_iid/approval_rules
You can create merge request approval rules using the following endpoint:
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/approval_rules
```
@@ -847,7 +847,7 @@ will be used.
You can update merge request approval rules using the following endpoint:
-```
+```plaintext
PUT /projects/:id/merge_requests/:merge_request_iid/approval_rules/:approval_rule_id
```
@@ -931,7 +931,7 @@ These are system generated rules.
You can delete merge request approval rules using the following endpoint:
-```
+```plaintext
DELETE /projects/:id/merge_requests/:merge_request_iid/approval_rules/:approval_rule_id
```
@@ -953,7 +953,7 @@ These are system generated rules.
If you are allowed to, you can approve a merge request using the following
endpoint:
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/approve
```
@@ -1016,7 +1016,7 @@ does not match, the response code will be `409`.
If you did approve a merge request, you can unapprove it using the following
endpoint:
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/unapprove
```
diff --git a/doc/api/merge_request_context_commits.md b/doc/api/merge_request_context_commits.md
index a4659204127..4fbef3594d6 100644
--- a/doc/api/merge_request_context_commits.md
+++ b/doc/api/merge_request_context_commits.md
@@ -1,10 +1,10 @@
-# Merge request context commits API
+# Merge request context commits API
## List MR context commits
Get a list of merge request context commits.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/context_commits
```
@@ -22,11 +22,11 @@ Parameters:
"parent_ids": null,
"title": "Update README.md to include `Usage in testing and development`",
"message": "Update README.md to include `Usage in testing and development`",
- "author_name": "Luke \"Jared\" Bennett",
- "author_email": "lbennett@gitlab.com",
+ "author_name": "Example \"Sample\" User",
+ "author_email": "user@example.com",
"authored_date": "2017-04-11T10:08:59.000Z",
- "committer_name": "Luke \"Jared\" Bennett",
- "committer_email": "lbennett@gitlab.com",
+ "committer_name": "Example \"Sample\" User",
+ "committer_email": "user@example.com",
"committed_date": "2017-04-11T10:08:59.000Z"
}
]
@@ -36,7 +36,7 @@ Parameters:
Create a list of merge request context commits.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/context_commits
```
@@ -45,7 +45,7 @@ Parameters:
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
- `merge_request_iid` (required) - The internal ID of the merge request
-```
+```plaintext
POST /projects/:id/merge_requests/
```
@@ -57,16 +57,16 @@ POST /projects/:id/merge_requests/
[
{
"id": "6d394385cf567f80a8fd85055db1ab4c5295806f",
- "message": "Added contributing guide\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
+ "message": "Added contributing guide\n\nSigned-off-by: Example User <user@example.com>\n",
"parent_ids": [
"1a0b36b3cdad1d2ee32457c102a8c0b7056fa863"
],
"authored_date": "2014-02-27T10:05:10.000+02:00",
- "author_name": "Dmitriy Zaporozhets",
- "author_email": "dmitriy.zaporozhets@gmail.com",
+ "author_name": "Example User",
+ "author_email": "user@example.com",
"committed_date": "2014-02-27T10:05:10.000+02:00",
- "committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_name": "Example User",
+ "committer_email": "user@example.com"
}
]
```
@@ -75,7 +75,7 @@ POST /projects/:id/merge_requests/
Delete a list of merge request context commits.
-```
+```plaintext
DELETE /projects/:id/merge_requests/:merge_request_iid/context_commits
```
diff --git a/doc/api/merge_requests.md b/doc/api/merge_requests.md
index 54d140461e1..3c31ebf067b 100644
--- a/doc/api/merge_requests.md
+++ b/doc/api/merge_requests.md
@@ -23,7 +23,7 @@ given state (`opened`, `closed`, `locked`, or `merged`) or all of them (`all`).
The pagination parameters `page` and `per_page` can be used to
restrict the list of merge requests.
-```
+```plaintext
GET /merge_requests
GET /merge_requests?state=opened
GET /merge_requests?state=all
@@ -45,7 +45,7 @@ Parameters:
| `milestone` | string | no | Return merge requests for a specific milestone. `None` returns merge requests with no milestone. `Any` returns merge requests that have an assigned milestone. |
| `view` | string | no | If `simple`, returns the `iid`, URL, title, description, and basic state of merge request |
| `labels` | string | no | Return merge requests matching a comma separated list of labels. `None` lists all merge requests with no labels. `Any` lists all merge requests with at least one label. `No+Label` (Deprecated) lists all merge requests with no labels. Predefined names are case-insensitive. |
-| `with_labels_details` | Boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. Introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413) |
+| `with_labels_details` | boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. Introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413) |
| `created_after` | datetime | no | Return merge requests created on or after the given time |
| `created_before` | datetime | no | Return merge requests created on or before the given time |
| `updated_after` | datetime | no | Return merge requests updated on or after the given time |
@@ -190,7 +190,7 @@ Get all merge requests for this project.
The `state` parameter can be used to get only merge requests with a given state (`opened`, `closed`, `locked`, or `merged`) or all of them (`all`).
The pagination parameters `page` and `per_page` can be used to restrict the list of merge requests.
-```
+```plaintext
GET /projects/:id/merge_requests
GET /projects/:id/merge_requests?state=opened
GET /projects/:id/merge_requests?state=all
@@ -221,7 +221,7 @@ Parameters:
| `milestone` | string | no | Return merge requests for a specific milestone. `None` returns merge requests with no milestone. `Any` returns merge requests that have an assigned milestone. |
| `view` | string | no | If `simple`, returns the `iid`, URL, title, description, and basic state of merge request |
| `labels` | string | no | Return merge requests matching a comma separated list of labels. `None` lists all merge requests with no labels. `Any` lists all merge requests with at least one label. `No+Label` (Deprecated) lists all merge requests with no labels. Predefined names are case-insensitive. |
-| `with_labels_details` | Boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. Introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413) |
+| `with_labels_details` | boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. Introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413) |
| `created_after` | datetime | no | Return merge requests created on or after the given time |
| `created_before` | datetime | no | Return merge requests created on or before the given time |
| `updated_after` | datetime | no | Return merge requests updated on or after the given time |
@@ -361,7 +361,7 @@ Get all merge requests for this group and its subgroups.
The `state` parameter can be used to get only merge requests with a given state (`opened`, `closed`, `locked`, or `merged`) or all of them (`all`).
The pagination parameters `page` and `per_page` can be used to restrict the list of merge requests.
-```
+```plaintext
GET /groups/:id/merge_requests
GET /groups/:id/merge_requests?state=opened
GET /groups/:id/merge_requests?state=all
@@ -383,7 +383,7 @@ Parameters:
| `milestone` | string | no | Return merge requests for a specific milestone. `None` returns merge requests with no milestone. `Any` returns merge requests that have an assigned milestone. |
| `view` | string | no | If `simple`, returns the `iid`, URL, title, description, and basic state of merge request |
| `labels` | string | no | Return merge requests matching a comma separated list of labels. `None` lists all merge requests with no labels. `Any` lists all merge requests with at least one label. `No+Label` (Deprecated) lists all merge requests with no labels. Predefined names are case-insensitive. |
-| `with_labels_details` | Boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. Introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413)|
+| `with_labels_details` | boolean | no | If `true`, response will return more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. Introduced in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413)|
| `created_after` | datetime | no | Return merge requests created on or after the given time |
| `created_before` | datetime | no | Return merge requests created on or before the given time |
| `updated_after` | datetime | no | Return merge requests updated on or after the given time |
@@ -397,7 +397,7 @@ Parameters:
| `source_branch` | string | no | Return merge requests with the given source branch |
| `target_branch` | string | no | Return merge requests with the given target branch |
| `search` | string | no | Search merge requests against their `title` and `description` |
-| `non_archived` | Boolean | no | Return merge requests from non archived projects only. Default is true. _(Introduced in [GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23809))_ |
+| `non_archived` | boolean | no | Return merge requests from non archived projects only. Default is true. _(Introduced in [GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23809))_ |
```json
[
@@ -524,7 +524,7 @@ integer. This is because when an MR has too many changes to display and store,
it will be capped at 1,000. In that case, the API will return the string
`"1000+"` for the changes count.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid
```
@@ -683,7 +683,7 @@ the `approvals_before_merge` parameter:
Get a list of merge request participants.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/participants
```
@@ -717,7 +717,7 @@ Parameters:
Get a list of merge request commits.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/commits
```
@@ -732,8 +732,8 @@ Parameters:
"id": "ed899a2f4b50b4370feeea94676502b42383c746",
"short_id": "ed899a2f4b5",
"title": "Replace sanitize with escape once",
- "author_name": "Dmitriy Zaporozhets",
- "author_email": "dzaporozhets@sphereconsultinginc.com",
+ "author_name": "Example User",
+ "author_email": "user@example.com",
"created_at": "2012-09-20T11:50:22+03:00",
"message": "Replace sanitize with escape once"
},
@@ -741,8 +741,8 @@ Parameters:
"id": "6104942438c14ec7bd21c6cd5bd995272b3faff6",
"short_id": "6104942438c",
"title": "Sanitize for network graph",
- "author_name": "randx",
- "author_email": "dmitriy.zaporozhets@gmail.com",
+ "author_name": "Example User",
+ "author_email": "user@example.com",
"created_at": "2012-09-20T09:06:12+03:00",
"message": "Sanitize for network graph"
}
@@ -753,7 +753,7 @@ Parameters:
Shows information about the merge request including its files and changes.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/changes
```
@@ -864,7 +864,7 @@ Parameters:
Get a list of merge request pipelines.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/pipelines
```
@@ -896,7 +896,7 @@ The new pipeline can be:
- A [pipeline for merged results](../ci/merge_request_pipelines/pipelines_for_merged_results/index.md)
if the [project setting is enabled](../ci/merge_request_pipelines/pipelines_for_merged_results/index.md#enabling-pipelines-for-merged-results).
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/pipelines
```
@@ -948,7 +948,7 @@ Parameters:
Creates a new merge request.
-```
+```plaintext
POST /projects/:id/merge_requests
```
@@ -1105,7 +1105,7 @@ the `approvals_before_merge` parameter:
Updates an existing merge request. You can change the target branch, title, or even close the MR.
-```
+```plaintext
PUT /projects/:id/merge_requests/:merge_request_iid
```
@@ -1264,7 +1264,7 @@ the `approvals_before_merge` parameter:
Only for admins and project owners. Deletes the merge request in question.
-```
+```plaintext
DELETE /projects/:id/merge_requests/:merge_request_iid
```
@@ -1289,20 +1289,20 @@ If the `sha` parameter is passed and does not match the HEAD of the source - you
If you don't have permissions to accept this merge request - you'll get a `401`
-```
+```plaintext
PUT /projects/:id/merge_requests/:merge_request_iid/merge
```
Parameters:
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
-- `merge_request_iid` (required) - Internal ID of MR
-- `merge_commit_message` (optional) - Custom merge commit message
-- `squash_commit_message` (optional) - Custom squash commit message
-- `squash` (optional) - if `true` the commits will be squashed into a single commit on merge
-- `should_remove_source_branch` (optional) - if `true` removes the source branch
+- `merge_request_iid` (required) - Internal ID of MR
+- `merge_commit_message` (optional) - Custom merge commit message
+- `squash_commit_message` (optional) - Custom squash commit message
+- `squash` (optional) - if `true` the commits will be squashed into a single commit on merge
+- `should_remove_source_branch` (optional) - if `true` removes the source branch
- `merge_when_pipeline_succeeds` (optional) - if `true` the MR is merged when the pipeline succeeds
-- `sha` (optional) - if present, then this SHA must match the HEAD of the source branch, otherwise the merge will fail
+- `sha` (optional) - if present, then this SHA must match the HEAD of the source branch, otherwise the merge will fail
```json
{
@@ -1450,14 +1450,14 @@ If the merge request has conflicts, is empty or already merged, you'll get a `40
It returns the HEAD commit of `refs/merge-requests/:iid/merge` in the response body in case of `200`.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/merge_ref
```
Parameters:
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
-- `merge_request_iid` (required) - Internal ID of MR
+- `merge_request_iid` (required) - Internal ID of MR
```json
{
@@ -1473,7 +1473,7 @@ If the merge request is already merged or closed - you get `405` and error messa
In case the merge request is not set to be merged when the pipeline succeeds, you'll also get a `406` error.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/cancel_merge_when_pipeline_succeeds
```
@@ -1621,7 +1621,7 @@ Automatically rebase the `source_branch` of the merge request against its
If you don't have permissions to push to the merge request's source branch -
you'll get a `403 Forbidden` response.
-```
+```plaintext
PUT /projects/:id/merge_requests/:merge_request_iid/rebase
```
@@ -1684,7 +1684,7 @@ Comments are done via the [notes](notes.md) resource.
Get all the issues that would be closed by merging the provided merge request.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/closes_issues
```
@@ -1760,7 +1760,7 @@ Example response when an external issue tracker (e.g. Jira) is used:
Subscribes the authenticated user to a merge request to receive notification. If the user is already subscribed to the merge request, the
status code `304` is returned.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/subscribe
```
@@ -1911,7 +1911,7 @@ Unsubscribes the authenticated user from a merge request to not receive
notifications from that merge request. If the user is
not subscribed to the merge request, the status code `304` is returned.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/unsubscribe
```
@@ -2062,7 +2062,7 @@ Manually creates a todo for the current user on a merge request.
If there already exists a todo for the user on that merge request,
status code `304` is returned.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/todo
```
@@ -2179,7 +2179,7 @@ Example response:
Get a list of merge request diff versions.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/versions
```
@@ -2220,7 +2220,7 @@ Example response:
Get a single merge request diff version.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/versions/:version_id
```
@@ -2288,7 +2288,7 @@ Example response:
Sets an estimated time of work for this merge request.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/time_estimate
```
@@ -2317,7 +2317,7 @@ Example response:
Resets the estimated time for this merge request to 0 seconds.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/reset_time_estimate
```
@@ -2345,7 +2345,7 @@ Example response:
Adds spent time for this merge request
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/add_spent_time
```
@@ -2374,7 +2374,7 @@ Example response:
Resets the total spent time for this merge request to 0 seconds.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/reset_spent_time
```
@@ -2400,7 +2400,7 @@ Example response:
## Get time tracking stats
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/time_stats
```
diff --git a/doc/api/merge_trains.md b/doc/api/merge_trains.md
new file mode 100644
index 00000000000..d8446ed445f
--- /dev/null
+++ b/doc/api/merge_trains.md
@@ -0,0 +1,80 @@
+# Merge Trains API **(PREMIUM)**
+
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/36146) in GitLab 12.9.
+> - Using this API you can consume GitLab's [Merge Train](../ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md) entries.
+
+Every API call to merge trains must be authenticated with Developer or higher [permissions](../user/permissions.md).
+
+If a user is not a member of a project and the project is private, a `GET` request on that project will result to a `404` status code.
+
+If Merge Trains is not available for the project, a `403` status code will return.
+
+## Merge Trains API pagination
+
+By default, `GET` requests return 20 results at a time because the API results
+are paginated.
+
+Read more on [pagination](README.md#pagination).
+
+## List Merge Trains for a project
+
+Get all Merge Trains of the requested project:
+
+```txt
+GET /projects/:id/merge_trains
+GET /projects/:id/merge_trains?scope=complete
+```
+
+| Attribute | Type | Required | Description |
+| ------------------- | ---------------- | ---------- | --------------------------------------------------------------------------------------------------------------------------- |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding). |
+| `scope` | string | no | Return Merge Trains filtered by the given scope. Available scopes are `active` (to be merged) and `complete` (have been merged). |
+| `sort` | string | no | Return Merge Trains sorted in `asc` or `desc` order. Default is `desc`. |
+
+```shell
+curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/1/merge_trains
+```
+
+Example response:
+
+```json
+[
+ {
+ "id": 110,
+ "merge_request": {
+ "id": 126,
+ "iid": 59,
+ "project_id": 20,
+ "title": "Test MR 1580978354",
+ "description": "",
+ "state": "merged",
+ "created_at": "2020-02-06T08:39:14.883Z",
+ "updated_at": "2020-02-06T08:40:57.038Z",
+ "web_url": "http://local.gitlab.test:8181/root/merge-train-race-condition/-/merge_requests/59"
+ },
+ "user": {
+ "id": 1,
+ "name": "Administrator",
+ "username": "root",
+ "state": "active",
+ "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
+ "web_url": "http://local.gitlab.test:8181/root"
+ },
+ "pipeline": {
+ "id": 246,
+ "sha": "bcc17a8ffd51be1afe45605e714085df28b80b13",
+ "ref": "refs/merge-requests/59/train",
+ "status": "success",
+ "created_at": "2020-02-06T08:40:42.410Z",
+ "updated_at": "2020-02-06T08:40:46.912Z",
+ "web_url": "http://local.gitlab.test:8181/root/merge-train-race-condition/pipelines/246"
+ },
+ "created_at": "2020-02-06T08:39:47.217Z",
+ "updated_at": "2020-02-06T08:40:57.720Z",
+ "target_branch": "feature-1580973432",
+ "status": "merged",
+ "merged_at": "2020-02-06T08:40:57.719Z",
+ "duration": 70
+ }
+]
+```
diff --git a/doc/api/milestones.md b/doc/api/milestones.md
index 43faf04a709..5727a4b637f 100644
--- a/doc/api/milestones.md
+++ b/doc/api/milestones.md
@@ -4,7 +4,7 @@
Returns a list of project milestones.
-```
+```plaintext
GET /projects/:id/milestones
GET /projects/:id/milestones?iids[]=42
GET /projects/:id/milestones?iids[]=42&iids[]=43
@@ -51,7 +51,7 @@ Example Response:
Gets a single project milestone.
-```
+```plaintext
GET /projects/:id/milestones/:milestone_id
```
@@ -64,7 +64,7 @@ Parameters:
Creates a new project milestone.
-```
+```plaintext
POST /projects/:id/milestones
```
@@ -80,7 +80,7 @@ Parameters:
Updates an existing project milestone.
-```
+```plaintext
PUT /projects/:id/milestones/:milestone_id
```
@@ -98,7 +98,7 @@ Parameters:
Only for users with Developer access to the project.
-```
+```plaintext
DELETE /projects/:id/milestones/:milestone_id
```
@@ -111,7 +111,7 @@ Parameters:
Gets all issues assigned to a single project milestone.
-```
+```plaintext
GET /projects/:id/milestones/:milestone_id/issues
```
@@ -124,7 +124,7 @@ Parameters:
Gets all merge requests assigned to a single project milestone.
-```
+```plaintext
GET /projects/:id/milestones/:milestone_id/merge_requests
```
@@ -139,7 +139,7 @@ Parameters:
Only for users with Developer access to the group.
-```
+```plaintext
POST /projects/:id/milestones/:milestone_id/promote
```
@@ -154,7 +154,7 @@ Parameters:
Gets all burndown chart events for a single milestone.
-```
+```plaintext
GET /projects/:id/milestones/:milestone_id/burndown_events
```
diff --git a/doc/api/namespaces.md b/doc/api/namespaces.md
index 872385b8b5e..50b5f3f19cd 100644
--- a/doc/api/namespaces.md
+++ b/doc/api/namespaces.md
@@ -12,7 +12,7 @@ For users and groups supported API calls see the [users](users.md) and
Get a list of the namespaces of the authenticated user. If the user is an
administrator, a list of all namespaces in the GitLab instance is shown.
-```
+```plaintext
GET /namespaces
```
@@ -74,7 +74,7 @@ NOTE: **Note:** Only group maintainers/owners are presented with `members_count_
Get all namespaces that match a string in their name or path.
-```
+```plaintext
GET /namespaces?search=foobar
```
@@ -108,7 +108,7 @@ Example response:
Get a namespace by ID.
-```
+```plaintext
GET /namespaces/:id
```
diff --git a/doc/api/notes.md b/doc/api/notes.md
index 302b89e5359..f7d7163b7a8 100644
--- a/doc/api/notes.md
+++ b/doc/api/notes.md
@@ -22,7 +22,7 @@ Read more on [pagination](README.md#pagination).
Gets a list of all notes for a single issue.
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/notes
GET /projects/:id/issues/:issue_iid/notes?sort=asc&order_by=updated_at
```
@@ -87,7 +87,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single note for a specific project issue
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/notes/:note_id
```
@@ -105,7 +105,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Creates a new note to a single project issue.
-```
+```plaintext
POST /projects/:id/issues/:issue_iid/notes
```
@@ -124,7 +124,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Modify existing note of an issue.
-```
+```plaintext
PUT /projects/:id/issues/:issue_iid/notes/:note_id
```
@@ -143,7 +143,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
Deletes an existing note of an issue.
-```
+```plaintext
DELETE /projects/:id/issues/:issue_iid/notes/:note_id
```
@@ -165,7 +165,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Gets a list of all notes for a single snippet. Snippet notes are comments users can post to a snippet.
-```
+```plaintext
GET /projects/:id/snippets/:snippet_id/notes
GET /projects/:id/snippets/:snippet_id/notes?sort=asc&order_by=updated_at
```
@@ -185,7 +185,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single note for a given snippet.
-```
+```plaintext
GET /projects/:id/snippets/:snippet_id/notes/:note_id
```
@@ -223,7 +223,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Creates a new note for a single snippet. Snippet notes are comments users can post to a snippet.
If you create a note where the body only contains an Award Emoji, you'll receive this object back.
-```
+```plaintext
POST /projects/:id/snippets/:snippet_id/notes
```
@@ -242,7 +242,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Modify existing note of a snippet.
-```
+```plaintext
PUT /projects/:id/snippets/:snippet_id/notes/:note_id
```
@@ -261,7 +261,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
Deletes an existing note of a snippet.
-```
+```plaintext
DELETE /projects/:id/snippets/:snippet_id/notes/:note_id
```
@@ -283,7 +283,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Gets a list of all notes for a single merge request.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/notes
GET /projects/:id/merge_requests/:merge_request_iid/notes?sort=asc&order_by=updated_at
```
@@ -303,7 +303,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single note for a given merge request.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/notes/:note_id
```
@@ -346,7 +346,7 @@ Creates a new note for a single merge request.
If you create a note where the body only contains an Award Emoji, you'll receive
this object back.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/notes
```
@@ -361,7 +361,7 @@ Parameters:
Modify existing note of a merge request.
-```
+```plaintext
PUT /projects/:id/merge_requests/:merge_request_iid/notes/:note_id
```
@@ -380,7 +380,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
Deletes an existing note of a merge request.
-```
+```plaintext
DELETE /projects/:id/merge_requests/:merge_request_iid/notes/:note_id
```
@@ -402,7 +402,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Gets a list of all notes for a single epic. Epic notes are comments users can post to an epic.
-```
+```plaintext
GET /groups/:id/epics/:epic_id/notes
GET /groups/:id/epics/:epic_id/notes?sort=asc&order_by=updated_at
```
@@ -422,7 +422,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single note for a given epic.
-```
+```plaintext
GET /groups/:id/epics/:epic_id/notes/:note_id
```
@@ -462,7 +462,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Creates a new note for a single epic. Epic notes are comments users can post to an epic.
If you create a note where the body only contains an Award Emoji, you'll receive this object back.
-```
+```plaintext
POST /groups/:id/epics/:epic_id/notes
```
@@ -482,7 +482,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Modify existing note of an epic.
-```
+```plaintext
PUT /groups/:id/epics/:epic_id/notes/:note_id
```
@@ -503,7 +503,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
Deletes an existing note of an epic.
-```
+```plaintext
DELETE /groups/:id/epics/:epic_id/notes/:note_id
```
diff --git a/doc/api/notification_settings.md b/doc/api/notification_settings.md
index 71223669d5c..596365743fa 100644
--- a/doc/api/notification_settings.md
+++ b/doc/api/notification_settings.md
@@ -6,7 +6,7 @@
The notification levels are defined in the `NotificationSetting.level` model enumeration. Currently, these levels are recognized:
-```
+```plaintext
disabled
participating
watch
@@ -30,6 +30,7 @@ If the `custom` level is used, specific email events can be controlled. Availabl
- `reassign_merge_request`
- `merge_merge_request`
- `failed_pipeline`
+- `fixed_pipeline`
- `success_pipeline`
- `new_epic` **(ULTIMATE)**
@@ -37,7 +38,7 @@ If the `custom` level is used, specific email events can be controlled. Availabl
Get current notification settings and email address.
-```
+```plaintext
GET /notification_settings
```
@@ -58,7 +59,7 @@ Example response:
Update current notification settings and email address.
-```
+```plaintext
PUT /notification_settings
```
@@ -83,6 +84,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
| `reassign_merge_request` | boolean | no | Enable/disable this notification |
| `merge_merge_request` | boolean | no | Enable/disable this notification |
| `failed_pipeline` | boolean | no | Enable/disable this notification |
+| `fixed_pipeline` | boolean | no | Enable/disable this notification |
| `success_pipeline` | boolean | no | Enable/disable this notification |
| `new_epic` | boolean | no | Enable/disable this notification ([Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/6626) in 11.3) **(ULTIMATE)** |
@@ -99,7 +101,7 @@ Example response:
Get current group or project notification settings.
-```
+```plaintext
GET /groups/:id/notification_settings
GET /projects/:id/notification_settings
```
@@ -125,7 +127,7 @@ Example response:
Update current group/project notification settings.
-```
+```plaintext
PUT /groups/:id/notification_settings
PUT /projects/:id/notification_settings
```
@@ -152,6 +154,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.
| `reassign_merge_request` | boolean | no | Enable/disable this notification |
| `merge_merge_request` | boolean | no | Enable/disable this notification |
| `failed_pipeline` | boolean | no | Enable/disable this notification |
+| `fixed_pipeline` | boolean | no | Enable/disable this notification |
| `success_pipeline` | boolean | no | Enable/disable this notification |
| `new_epic` | boolean | no | Enable/disable this notification ([Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/6626) in 11.3) **(ULTIMATE)** |
@@ -178,6 +181,7 @@ Example responses:
"reassign_merge_request": false,
"merge_merge_request": false,
"failed_pipeline": false,
+ "fixed_pipeline": false,
"success_pipeline": false
}
}
diff --git a/doc/api/oauth2.md b/doc/api/oauth2.md
index c70ca1a74d5..e84f3509dcf 100644
--- a/doc/api/oauth2.md
+++ b/doc/api/oauth2.md
@@ -49,7 +49,7 @@ The web application flow is:
1. Request authorization code. To do that, you should redirect the user to the
`/oauth/authorize` endpoint with the following GET parameters:
- ```
+ ```plaintext
https://gitlab.example.com/oauth/authorize?client_id=APP_ID&redirect_uri=REDIRECT_URI&response_type=code&state=YOUR_UNIQUE_STATE_HASH&scope=REQUESTED_SCOPES
```
@@ -60,7 +60,7 @@ The web application flow is:
would request `read_user` and `profile` scopes). The redirect will
include the GET `code` parameter, for example:
- ```
+ ```plaintext
http://myapp.com/oauth/redirect?code=1234567890&state=YOUR_UNIQUE_STATE_HASH
```
@@ -112,7 +112,7 @@ easily accessible, therefore secrets can leak easily.
To request the access token, you should redirect the user to the
`/oauth/authorize` endpoint using `token` response type:
-```
+```plaintext
https://gitlab.example.com/oauth/authorize?client_id=APP_ID&redirect_uri=REDIRECT_URI&response_type=token&state=YOUR_UNIQUE_STATE_HASH&scope=REQUESTED_SCOPES
```
@@ -124,7 +124,7 @@ would request `read_user` and `profile` scopes). The redirect
will include a fragment with `access_token` as well as token details in GET
parameters, for example:
-```
+```plaintext
http://myapp.com/oauth/redirect#access_token=ABCDExyz123&state=YOUR_UNIQUE_STATE_HASH&token_type=bearer&expires_in=3600
```
@@ -182,7 +182,7 @@ curl --data "@auth.txt" --request POST https://gitlab.example.com/oauth/token
Then, you'll receive the access token back in the response:
-```
+```json
{
"access_token": "1f0af717251950dbd4d73154fdf0a474a5c5119adad999683f5b450c460726aa",
"token_type": "bearer",
@@ -192,7 +192,7 @@ Then, you'll receive the access token back in the response:
For testing, you can use the `oauth2` Ruby gem:
-```
+```ruby
client = OAuth2::Client.new('the_client_id', 'the_client_secret', :site => "http://example.com")
access_token = client.password.get_token('user@example.com', 'secret')
puts access_token.token
@@ -203,33 +203,36 @@ puts access_token.token
The `access token` allows you to make requests to the API on behalf of a user.
You can pass the token either as GET parameter:
-```
+```plaintext
GET https://gitlab.example.com/api/v4/user?access_token=OAUTH-TOKEN
```
or you can put the token to the Authorization header:
-```
+```shell
curl --header "Authorization: Bearer OAUTH-TOKEN" https://gitlab.example.com/api/v4/user
```
## Retrieving the Token Info
-To verify the details of a token you can call the `token/info` endpoint. This is provided from the doorkeeper gem (see [`/oauth/token/info`](https://github.com/doorkeeper-gem/doorkeeper/wiki/API-endpoint-descriptions-and-examples#get----oauthtokeninfo)).
+To verify the details of a token, use the `token/info` endpoint provided by the Doorkeeper gem.
+For more information, see [`/oauth/token/info`](https://github.com/doorkeeper-gem/doorkeeper/wiki/API-endpoint-descriptions-and-examples#get----oauthtokeninfo).
-You will need to supply the access token, either as a parameter
+You must supply the access token, either:
-```
-GET https://gitlab.example.com/oauth/token/info?access_token=OAUTH-TOKEN
-```
+- As a parameter:
-Or in the Authorization header:
+ ```plaintext
+ GET https://gitlab.example.com/oauth/token/info?access_token=<OAUTH-TOKEN>
+ ```
-```
-curl --header "Authorization: Bearer OAUTH-TOKEN" https://gitlab.example.com/oauth/token/info
-```
+- In the Authorization header:
-You will receive the following in response:
+ ```shell
+ curl --header "Authorization: Bearer <OAUTH-TOKEN>" https://gitlab.example.com/oauth/token/info
+ ```
+
+The following is an example response:
```json
{
@@ -241,5 +244,11 @@ You will receive the following in response:
}
```
-CAUTION: **Deprecated fields:**
-The fields `scopes` and `expires_in_seconds` are also included in the response. They are aliases for `scope` and `expires_in` respectively and have been included to prevent breaking changes introduced in [doorkeeper 5.0.2](https://github.com/doorkeeper-gem/doorkeeper/wiki/Migration-from-old-versions#from-4x-to-5x). Please don't rely on these fields as they will be removed in a later release.
+### Deprecated fields
+
+The fields `scopes` and `expires_in_seconds` are included in the response.
+
+These are aliases for `scope` and `expires_in` respectively, and have been included to
+prevent breaking changes introduced in [doorkeeper 5.0.2](https://github.com/doorkeeper-gem/doorkeeper/wiki/Migration-from-old-versions#from-4x-to-5x).
+
+Don't rely on these fields as they will be removed in a later release.
diff --git a/doc/api/packages.md b/doc/api/packages.md
index 1d39e19bbf3..a29e1f99fd3 100644
--- a/doc/api/packages.md
+++ b/doc/api/packages.md
@@ -8,16 +8,20 @@ This is the API docs of [GitLab Packages](../administration/packages/index.md).
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/9259) in GitLab 11.8.
-Get a list of project packages. Both Maven and NPM packages are included in results.
-When accessed without authentication, only packages of public projects are returned.
+Get a list of project packages. All package types are included in results. When
+accessed without authentication, only packages of public projects are returned.
-```
+```plaintext
GET /projects/:id/packages
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) |
+| `order_by`| string | no | The field to use as order. One of `created_at` (default), `name`, `version`, or `type`. |
+| `sort` | string | no | The direction of the order, either `asc` (default) for ascending order or `desc` for descending order. |
+| `package_type` | string | no | Filter the returned packages by type. One of `conan`, `maven`, `npm` or `nuget`. (_Introduced in GitLab 12.9_)
+| `package_name` | string | no | Filter the project packages with a fuzzy search by name. (_Introduced in GitLab 12.9_)
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/:id/packages
@@ -53,7 +57,7 @@ By default, the `GET` request will return 20 results, since the API is [paginate
Get a list of project packages at the group level.
When accessed without authentication, only packages of public projects are returned.
-```
+```plaintext
GET /groups/:id/packages
```
@@ -61,6 +65,9 @@ GET /groups/:id/packages
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | ID or [URL-encoded path of the group](README.md#namespaced-path-encoding). |
| `exclude_subgroups` | boolean | false | If the param is included as true, packages from projects from subgroups are not listed. Default is `false`. |
+| `order_by`| string | no | The field to use as order. One of `created_at` (default), `name`, `version`, `type`, or `project_path`. |
+| `sort` | string | no | The direction of the order, either `asc` (default) for ascending order or `desc` for descending order. |
+| `package_type` | string | no | Filter the returned packages by type. One of `conan`, `maven`, `npm` or `nuget`. (_Introduced in GitLab 12.9_) |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/groups/:id/packages?exclude_subgroups=true
@@ -130,7 +137,7 @@ The `_links` object contains the following properties:
Get a single project package.
-```
+```plaintext
GET /projects/:id/packages/:package_id
```
@@ -181,7 +188,7 @@ The `_links` object contains the following properties:
Get a list of package files of a single package.
-```
+```plaintext
GET /projects/:id/packages/:package_id/package_files
```
@@ -236,7 +243,7 @@ By default, the `GET` request will return 20 results, since the API is [paginate
Deletes a project package.
-```
+```plaintext
DELETE /projects/:id/packages/:package_id
```
diff --git a/doc/api/pipeline_schedules.md b/doc/api/pipeline_schedules.md
index 3624921fde7..859e88d0945 100644
--- a/doc/api/pipeline_schedules.md
+++ b/doc/api/pipeline_schedules.md
@@ -1,12 +1,12 @@
# Pipeline schedules API
-You can read more about [pipeline schedules](../user/project/pipelines/schedules.md).
+You can read more about [pipeline schedules](../ci/pipelines/schedules.md).
## Get all pipeline schedules
Get a list of the pipeline schedules of a project.
-```
+```plaintext
GET /projects/:id/pipeline_schedules
```
@@ -47,7 +47,7 @@ curl --header "PRIVATE-TOKEN: k5ESFgWY2Qf5xEvDcFxZ" "https://gitlab.example.com/
Get the pipeline schedule of a project.
-```
+```plaintext
GET /projects/:id/pipeline_schedules/:pipeline_schedule_id
```
@@ -99,7 +99,7 @@ curl --header "PRIVATE-TOKEN: k5ESFgWY2Qf5xEvDcFxZ" "https://gitlab.example.com/
Create a new pipeline schedule of a project.
-```
+```plaintext
POST /projects/:id/pipeline_schedules
```
@@ -141,9 +141,9 @@ curl --request POST --header "PRIVATE-TOKEN: k5ESFgWY2Qf5xEvDcFxZ" --form descri
## Edit a pipeline schedule
-Updates the pipeline schedule of a project. Once the update is done, it will be rescheduled automatically.
+Updates the pipeline schedule of a project. Once the update is done, it will be rescheduled automatically.
-```
+```plaintext
PUT /projects/:id/pipeline_schedules/:pipeline_schedule_id
```
@@ -193,7 +193,7 @@ curl --request PUT --header "PRIVATE-TOKEN: k5ESFgWY2Qf5xEvDcFxZ" --form cron="0
Update the owner of the pipeline schedule of a project.
-```
+```plaintext
POST /projects/:id/pipeline_schedules/:pipeline_schedule_id/take_ownership
```
@@ -238,7 +238,7 @@ curl --request POST --header "PRIVATE-TOKEN: hf2CvZXB9w8Uc5pZKpSB" "https://gitl
Delete the pipeline schedule of a project.
-```
+```plaintext
DELETE /projects/:id/pipeline_schedules/:pipeline_schedule_id
```
@@ -317,7 +317,7 @@ Example response:
Create a new variable of a pipeline schedule.
-```
+```plaintext
POST /projects/:id/pipeline_schedules/:pipeline_schedule_id/variables
```
@@ -345,7 +345,7 @@ curl --request POST --header "PRIVATE-TOKEN: k5ESFgWY2Qf5xEvDcFxZ" --form "key=N
Updates the variable of a pipeline schedule.
-```
+```plaintext
PUT /projects/:id/pipeline_schedules/:pipeline_schedule_id/variables/:key
```
@@ -373,7 +373,7 @@ curl --request PUT --header "PRIVATE-TOKEN: k5ESFgWY2Qf5xEvDcFxZ" --form "value=
Delete the variable of a pipeline schedule.
-```
+```plaintext
DELETE /projects/:id/pipeline_schedules/:pipeline_schedule_id/variables/:key
```
diff --git a/doc/api/pipeline_triggers.md b/doc/api/pipeline_triggers.md
index e207ff8e98a..55c6e37c164 100644
--- a/doc/api/pipeline_triggers.md
+++ b/doc/api/pipeline_triggers.md
@@ -6,7 +6,7 @@ You can read more about [triggering pipelines through the API](../ci/triggers/RE
Get a list of project's build triggers.
-```
+```plaintext
GET /projects/:id/triggers
```
@@ -14,7 +14,7 @@ GET /projects/:id/triggers
|-----------|---------|----------|---------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/triggers"
```
@@ -36,7 +36,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
Get details of project's build trigger.
-```
+```plaintext
GET /projects/:id/triggers/:trigger_id
```
@@ -45,7 +45,7 @@ GET /projects/:id/triggers/:trigger_id
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `trigger_id` | integer | yes | The trigger id |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/triggers/5"
```
@@ -65,7 +65,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
Create a trigger for a project.
-```
+```plaintext
POST /projects/:id/triggers
```
@@ -74,7 +74,7 @@ POST /projects/:id/triggers
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `description` | string | yes | The trigger name |
-```
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" --form description="my description" "https://gitlab.example.com/api/v4/projects/1/triggers"
```
@@ -94,7 +94,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" --form descrip
Update a trigger for a project.
-```
+```plaintext
PUT /projects/:id/triggers/:trigger_id
```
@@ -104,7 +104,7 @@ PUT /projects/:id/triggers/:trigger_id
| `trigger_id` | integer | yes | The trigger id |
| `description` | string | no | The trigger name |
-```
+```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" --form description="my description" "https://gitlab.example.com/api/v4/projects/1/triggers/10"
```
@@ -124,7 +124,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" --form descript
Remove a project's build trigger.
-```
+```plaintext
DELETE /projects/:id/triggers/:trigger_id
```
@@ -133,6 +133,6 @@ DELETE /projects/:id/triggers/:trigger_id
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `trigger_id` | integer | yes | The trigger id |
-```
+```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/triggers/5"
```
diff --git a/doc/api/pipelines.md b/doc/api/pipelines.md
index a3835045e5c..55894a36147 100644
--- a/doc/api/pipelines.md
+++ b/doc/api/pipelines.md
@@ -4,7 +4,7 @@
> [Introduced][ce-5837] in GitLab 8.11
-```
+```plaintext
GET /projects/:id/pipelines
```
@@ -12,7 +12,7 @@ GET /projects/:id/pipelines
|-----------|---------|----------|---------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `scope` | string | no | The scope of pipelines, one of: `running`, `pending`, `finished`, `branches`, `tags` |
-| `status` | string | no | The status of pipelines, one of: `running`, `pending`, `success`, `failed`, `canceled`, `skipped` |
+| `status` | string | no | The status of pipelines, one of: `running`, `pending`, `success`, `failed`, `canceled`, `skipped`, `created` |
| `ref` | string | no | The ref of pipelines |
| `sha` | string | no | The sha of pipelines |
| `yaml_errors`| boolean | no | Returns pipelines with invalid configurations |
@@ -23,7 +23,7 @@ GET /projects/:id/pipelines
| `order_by`| string | no | Order pipelines by `id`, `status`, `ref`, `updated_at` or `user_id` (default: `id`) |
| `sort` | string | no | Sort pipelines in `asc` or `desc` order (default: `desc`) |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/pipelines"
```
@@ -56,7 +56,7 @@ Example of response
> [Introduced][ce-5837] in GitLab 8.11
-```
+```plaintext
GET /projects/:id/pipelines/:pipeline_id
```
@@ -65,7 +65,7 @@ GET /projects/:id/pipelines/:pipeline_id
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `pipeline_id` | integer | yes | The ID of a pipeline |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/pipelines/46"
```
@@ -101,7 +101,7 @@ Example of response
### Get variables of a pipeline
-```
+```plaintext
GET /projects/:id/pipelines/:pipeline_id/variables
```
@@ -110,7 +110,7 @@ GET /projects/:id/pipelines/:pipeline_id/variables
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `pipeline_id` | integer | yes | The ID of a pipeline |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/pipelines/46/variables"
```
@@ -134,7 +134,7 @@ Example of response
> [Introduced][ce-7209] in GitLab 8.14
-```
+```plaintext
POST /projects/:id/pipeline
```
@@ -144,7 +144,7 @@ POST /projects/:id/pipeline
| `ref` | string | yes | Reference to commit |
| `variables` | array | no | An array containing the variables available in the pipeline, matching the structure `[{ 'key' => 'UPLOAD_TO_S3', 'variable_type' => 'file', 'value' => 'true' }]` |
-```
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/pipeline?ref=master"
```
@@ -182,7 +182,7 @@ Example of response
> [Introduced][ce-5837] in GitLab 8.11
-```
+```plaintext
POST /projects/:id/pipelines/:pipeline_id/retry
```
@@ -191,7 +191,7 @@ POST /projects/:id/pipelines/:pipeline_id/retry
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `pipeline_id` | integer | yes | The ID of a pipeline |
-```
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/pipelines/46/retry"
```
@@ -229,7 +229,7 @@ Response:
> [Introduced][ce-5837] in GitLab 8.11
-```
+```plaintext
POST /projects/:id/pipelines/:pipeline_id/cancel
```
@@ -238,7 +238,7 @@ POST /projects/:id/pipelines/:pipeline_id/cancel
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `pipeline_id` | integer | yes | The ID of a pipeline |
-```
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/pipelines/46/cancel"
```
@@ -276,7 +276,7 @@ Response:
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/22988) in GitLab 11.6.
-```
+```plaintext
DELETE /projects/:id/pipelines/:pipeline_id
```
@@ -285,7 +285,7 @@ DELETE /projects/:id/pipelines/:pipeline_id
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `pipeline_id` | integer | yes | The ID of a pipeline |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" --request "DELETE" "https://gitlab.example.com/api/v4/projects/1/pipelines/46"
```
diff --git a/doc/api/project_aliases.md b/doc/api/project_aliases.md
index da8d7600c7c..59c0ffee76d 100644
--- a/doc/api/project_aliases.md
+++ b/doc/api/project_aliases.md
@@ -8,11 +8,11 @@ All methods require administrator authorization.
Get a list of all project aliases:
-```
+```plaintext
GET /project_aliases
```
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/project_aliases"
```
@@ -37,7 +37,7 @@ Example response:
Get details of a project alias:
-```
+```plaintext
GET /project_aliases/:name
```
@@ -45,7 +45,7 @@ GET /project_aliases/:name
|-----------|--------|----------|-----------------------|
| `name` | string | yes | The name of the alias |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/project_aliases/gitlab"
```
@@ -64,7 +64,7 @@ Example response:
Add a new alias for a project. Responds with a 201 when successful,
400 when there are validation errors (e.g. alias already exists):
-```
+```plaintext
POST /project_aliases
```
@@ -73,13 +73,13 @@ POST /project_aliases
| `project_id` | integer/string | yes | The ID or path of the project. |
| `name` | string | yes | The name of the alias. Must be unique. |
-```
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/project_aliases" --form "project_id=1" --form "name=gitlab"
```
or
-```
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/project_aliases" --form "project_id=gitlab-org/gitlab" --form "name=gitlab"
```
@@ -98,7 +98,7 @@ Example response:
Removes a project aliases. Responds with a 204 when project alias
exists, 404 when it doesn't:
-```
+```plaintext
DELETE /project_aliases/:name
```
@@ -106,6 +106,6 @@ DELETE /project_aliases/:name
|-----------|--------|----------|-----------------------|
| `name` | string | yes | The name of the alias |
-```
+```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/project_aliases/gitlab"
```
diff --git a/doc/api/project_badges.md b/doc/api/project_badges.md
index 77c5d6cf37e..2e335d80947 100644
--- a/doc/api/project_badges.md
+++ b/doc/api/project_badges.md
@@ -16,7 +16,7 @@ Badges support placeholders that will be replaced in real time in both the link
Gets a list of a project's badges and its group badges.
-```
+```plaintext
GET /projects/:id/badges
```
@@ -58,7 +58,7 @@ Example response:
Gets a badge of a project.
-```
+```plaintext
GET /projects/:id/badges/:badge_id
```
@@ -88,7 +88,7 @@ Example response:
Adds a badge to a project.
-```
+```plaintext
POST /projects/:id/badges
```
@@ -119,7 +119,7 @@ Example response:
Updates a badge of a project.
-```
+```plaintext
PUT /projects/:id/badges/:badge_id
```
@@ -151,7 +151,7 @@ Example response:
Removes a badge from a project. Only project's badges will be removed by using this endpoint.
-```
+```plaintext
DELETE /projects/:id/badges/:badge_id
```
@@ -168,7 +168,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Returns how the `link_url` and `image_url` final URLs would be after resolving the placeholder interpolation.
-```
+```plaintext
GET /projects/:id/badges/render
```
diff --git a/doc/api/project_clusters.md b/doc/api/project_clusters.md
index 78d32acd0d2..2ed57eceb85 100644
--- a/doc/api/project_clusters.md
+++ b/doc/api/project_clusters.md
@@ -9,7 +9,7 @@ User will need at least maintainer access to use these endpoints.
Returns a list of project clusters.
-```
+```plaintext
GET /projects/:id/clusters
```
@@ -177,16 +177,16 @@ Parameters:
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer | yes | The ID of the project owned by the authenticated user |
-| `name` | String | yes | The name of the cluster |
-| `domain` | String | no | The [base domain](../user/project/clusters/index.md#base-domain) of the cluster |
-| `enabled` | Boolean | no | Determines if cluster is active or not, defaults to true |
-| `managed` | Boolean | no | Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true |
-| `platform_kubernetes_attributes[api_url]` | String | yes | The URL to access the Kubernetes API |
-| `platform_kubernetes_attributes[token]` | String | yes | The token to authenticate against Kubernetes |
-| `platform_kubernetes_attributes[ca_cert]` | String | no | TLS certificate (needed if API is using a self-signed TLS certificate |
-| `platform_kubernetes_attributes[namespace]` | String | no | The unique namespace related to the project |
-| `platform_kubernetes_attributes[authorization_type]` | String | no | The cluster authorization type: `rbac`, `abac` or `unknown_authorization`. Defaults to `rbac`. |
-| `environment_scope` | String | no | The associated environment to the cluster. Defaults to `*` **(PREMIUM)** |
+| `name` | string | yes | The name of the cluster |
+| `domain` | string | no | The [base domain](../user/project/clusters/index.md#base-domain) of the cluster |
+| `enabled` | boolean | no | Determines if cluster is active or not, defaults to true |
+| `managed` | boolean | no | Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true |
+| `platform_kubernetes_attributes[api_url]` | string | yes | The URL to access the Kubernetes API |
+| `platform_kubernetes_attributes[token]` | string | yes | The token to authenticate against Kubernetes |
+| `platform_kubernetes_attributes[ca_cert]` | string | no | TLS certificate. Required if API is using a self-signed TLS certificate. |
+| `platform_kubernetes_attributes[namespace]` | string | no | The unique namespace related to the project |
+| `platform_kubernetes_attributes[authorization_type]` | string | no | The cluster authorization type: `rbac`, `abac` or `unknown_authorization`. Defaults to `rbac`. |
+| `environment_scope` | string | no | The associated environment to the cluster. Defaults to `*` **(PREMIUM)** |
Example request:
@@ -271,14 +271,14 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `id` | integer | yes | The ID of the project owned by the authenticated user |
| `cluster_id` | integer | yes | The ID of the cluster |
-| `name` | String | no | The name of the cluster |
-| `domain` | String | no | The [base domain](../user/project/clusters/index.md#base-domain) of the cluster |
+| `name` | string | no | The name of the cluster |
+| `domain` | string | no | The [base domain](../user/project/clusters/index.md#base-domain) of the cluster |
| `management_project_id` | integer | no | The ID of the [management project](../user/clusters/management_project.md) for the cluster |
-| `platform_kubernetes_attributes[api_url]` | String | no | The URL to access the Kubernetes API |
-| `platform_kubernetes_attributes[token]` | String | no | The token to authenticate against Kubernetes |
-| `platform_kubernetes_attributes[ca_cert]` | String | no | TLS certificate (needed if API is using a self-signed TLS certificate |
-| `platform_kubernetes_attributes[namespace]` | String | no | The unique namespace related to the project |
-| `environment_scope` | String | no | The associated environment to the cluster **(PREMIUM)** |
+| `platform_kubernetes_attributes[api_url]` | string | no | The URL to access the Kubernetes API |
+| `platform_kubernetes_attributes[token]` | string | no | The token to authenticate against Kubernetes |
+| `platform_kubernetes_attributes[ca_cert]` | string | no | TLS certificate. Required if API is using a self-signed TLS certificate. |
+| `platform_kubernetes_attributes[namespace]` | string | no | The unique namespace related to the project |
+| `environment_scope` | string | no | The associated environment to the cluster **(PREMIUM)** |
NOTE: **Note:**
`name`, `api_url`, `ca_cert` and `token` can only be updated if the cluster was added
@@ -368,7 +368,7 @@ Example response:
Deletes an existing project cluster.
-```
+```plaintext
DELETE /projects/:id/clusters/:cluster_id
```
diff --git a/doc/api/project_import_export.md b/doc/api/project_import_export.md
index d1aaa01d37c..476abc18835 100644
--- a/doc/api/project_import_export.md
+++ b/doc/api/project_import_export.md
@@ -61,14 +61,20 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Status can be one of:
- `none`
+- `queued`
- `started`
-- `after_export_action`
- `finished`
+- `regeneration_in_progress`
-The `after_export_action` state represents that the export process has been completed successfully and
-the platform is performing some actions on the resulted file. For example, sending
-an email notifying the user to download the file, uploading the exported file
-to a web server, etc.
+`queued` state represents the request for export is received, and is currently in the queue to be processed.
+
+The `started` state represents that the export process has started and is currently in progress.
+It includes the process of exporting, actions performed on the resultant file such as sending
+an email notifying the user to download the file, uploading the exported file to a web server, etc.
+
+`finished` state is after the export process has completed and the user has been notified.
+
+`regeneration_in_progress` is when an export file is available to download, and a request to generate a new export is in process.
`_links` are only present when export has finished.
diff --git a/doc/api/project_level_variables.md b/doc/api/project_level_variables.md
index d4bda992f7c..fbeba9d6c7d 100644
--- a/doc/api/project_level_variables.md
+++ b/doc/api/project_level_variables.md
@@ -4,7 +4,7 @@
Get list of a project's variables.
-```
+```plaintext
GET /projects/:id/variables
```
@@ -12,7 +12,7 @@ GET /projects/:id/variables
|-----------|---------|----------|---------------------|
| `id` | integer/string | yes | The ID of a project or [urlencoded NAMESPACE/PROJECT_NAME of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/variables"
```
@@ -35,7 +35,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
Get the details of a project's specific variable.
-```
+```plaintext
GET /projects/:id/variables/:key
```
@@ -44,7 +44,7 @@ GET /projects/:id/variables/:key
| `id` | integer/string | yes | The ID of a project or [urlencoded NAMESPACE/PROJECT_NAME of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `key` | string | yes | The `key` of a variable |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/variables/TEST_VARIABLE_1"
```
@@ -62,7 +62,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
Create a new variable.
-```
+```plaintext
POST /projects/:id/variables
```
@@ -76,7 +76,7 @@ POST /projects/:id/variables
| `masked` | boolean | no | Whether the variable is masked |
| `environment_scope` | string | no | The `environment_scope` of the variable |
-```
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/variables" --form "key=NEW_VARIABLE" --form "value=new value"
```
@@ -95,7 +95,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitla
Update a project's variable.
-```
+```plaintext
PUT /projects/:id/variables/:key
```
@@ -109,7 +109,7 @@ PUT /projects/:id/variables/:key
| `masked` | boolean | no | Whether the variable is masked |
| `environment_scope` | string | no | The `environment_scope` of the variable |
-```
+```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/variables/NEW_VARIABLE" --form "value=updated value"
```
@@ -128,7 +128,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab
Remove a project's variable.
-```
+```plaintext
DELETE /projects/:id/variables/:key
```
@@ -137,6 +137,6 @@ DELETE /projects/:id/variables/:key
| `id` | integer/string | yes | The ID of a project or [urlencoded NAMESPACE/PROJECT_NAME of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `key` | string | yes | The `key` of a variable |
-```
+```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/variables/VARIABLE_1"
```
diff --git a/doc/api/project_snippets.md b/doc/api/project_snippets.md
index ffdbd82adba..39df2925a1e 100644
--- a/doc/api/project_snippets.md
+++ b/doc/api/project_snippets.md
@@ -23,7 +23,7 @@ visibility setting keep this setting. You can read more about the change in the
Get a list of project snippets.
-```
+```plaintext
GET /projects/:id/snippets
```
@@ -35,7 +35,7 @@ Parameters:
Get a single project snippet.
-```
+```plaintext
GET /projects/:id/snippets/:snippet_id
```
@@ -68,7 +68,7 @@ Parameters:
Creates a new project snippet. The user must have permission to create new snippets.
-```
+```plaintext
POST /projects/:id/snippets
```
@@ -106,7 +106,7 @@ curl --request POST https://gitlab.com/api/v4/projects/:id/snippets \
Updates an existing project snippet. The user must have permission to change an existing snippet.
-```
+```plaintext
PUT /projects/:id/snippets/:snippet_id
```
@@ -123,7 +123,7 @@ Parameters:
Example request:
```shell
-curl --request PUT https://gitlab.com/api/v4/projects/:id/snippets \
+curl --request PUT https://gitlab.com/api/v4/projects/:id/snippets/:snippet_id \
--header "PRIVATE-TOKEN: <your_access_token>" \
--header "Content-Type: application/json" \
-d @snippet.json
@@ -145,7 +145,7 @@ curl --request PUT https://gitlab.com/api/v4/projects/:id/snippets \
Deletes an existing project snippet. This returns a `204 No Content` status code if the operation was successfully or `404` if the resource was not found.
-```
+```plaintext
DELETE /projects/:id/snippets/:snippet_id
```
@@ -157,7 +157,7 @@ Parameters:
Example request:
```shell
-curl --request DELETE https://gitlab.com/api/v4/projects/:id/snippets \
+curl --request DELETE https://gitlab.com/api/v4/projects/:id/snippets/:snippet_id \
--header "PRIVATE-TOKEN: <your_access_token>"
```
@@ -165,7 +165,7 @@ curl --request DELETE https://gitlab.com/api/v4/projects/:id/snippets \
Returns the raw project snippet as plain text.
-```
+```plaintext
GET /projects/:id/snippets/:snippet_id/raw
```
@@ -187,7 +187,7 @@ curl https://gitlab.com/api/v4/projects/:id/snippets/:snippet_id/raw \
Available only for admins.
-```
+```plaintext
GET /projects/:id/snippets/:snippet_id/user_agent_detail
```
diff --git a/doc/api/project_statistics.md b/doc/api/project_statistics.md
index 2732fa47fa0..d96d3de6a73 100644
--- a/doc/api/project_statistics.md
+++ b/doc/api/project_statistics.md
@@ -8,7 +8,7 @@ Retrieving the statistics requires write access to the repository.
Currently only HTTP fetches statistics are returned.
Fetches statistics includes both clones and pulls count and are HTTP only, SSH fetches are not included.
-```
+```plaintext
GET /projects/:id/statistics
```
diff --git a/doc/api/project_templates.md b/doc/api/project_templates.md
index d6ad77de429..4062df24525 100644
--- a/doc/api/project_templates.md
+++ b/doc/api/project_templates.md
@@ -21,7 +21,7 @@ in GitLab 11.5
## Get all templates of a particular type
-```
+```plaintext
GET /projects/:id/templates/:type
```
@@ -87,7 +87,7 @@ Example response (licenses):
## Get one template of a particular type
-```
+```plaintext
GET /projects/:id/templates/:type/:key
```
@@ -106,7 +106,6 @@ Example response (Dockerfile):
"name": "Binary",
"content": "# This file is a template, and might need editing before it works on your project.\n# This Dockerfile installs a compiled binary into a bare system.\n# You must either commit your compiled binary into source control (not recommended)\n# or build the binary first as part of a CI/CD pipeline.\n\nFROM buildpack-deps:jessie\n\nWORKDIR /usr/local/bin\n\n# Change `app` to whatever your binary is called\nAdd app .\nCMD [\"./app\"]\n"
}
-
```
Example response (license):
diff --git a/doc/api/projects.md b/doc/api/projects.md
index a0243be1907..a00bd442872 100644
--- a/doc/api/projects.md
+++ b/doc/api/projects.md
@@ -35,7 +35,7 @@ There are currently three options for `merge_method` to choose from:
Get a list of all visible projects across GitLab for the authenticated user.
When accessed without authentication, only public projects with "simple" fields are returned.
-```
+```plaintext
GET /projects
```
@@ -298,7 +298,7 @@ the `approvals_before_merge` parameter:
You can filter by [custom attributes](custom_attributes.md) with:
-```
+```plaintext
GET /projects?custom_attributes[key]=value&custom_attributes[other_key]=other_value
```
@@ -315,7 +315,7 @@ Note that keyset pagination only supports `order_by=id`. Other sorting options a
Get a list of visible projects owned by the given user. When accessed without authentication, only public projects are returned.
-```
+```plaintext
GET /users/:user_id/projects
```
@@ -530,7 +530,7 @@ This endpoint supports [keyset pagination](README.md#keyset-based-pagination) fo
Get a list of visible projects owned by the given user. When accessed without authentication, only public projects are returned.
-```
+```plaintext
GET /users/:user_id/starred_projects
```
@@ -740,7 +740,7 @@ Example response:
Get a specific project. This endpoint can be accessed without authentication if
the project is publicly accessible.
-```
+```plaintext
GET /projects/:id
```
@@ -955,7 +955,7 @@ If the project is a fork, and you provide a valid token to authenticate, the
Get the users list of a project.
-```
+```plaintext
GET /projects/:id/users
```
@@ -993,7 +993,7 @@ Please refer to the [Events API documentation](events.md#list-a-projects-visible
Creates a new project owned by the authenticated user.
-```
+```plaintext
POST /projects
```
@@ -1041,7 +1041,7 @@ POST /projects
| `ci_config_path` | string | no | The path to CI config file |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
-| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins |
+| `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge requests by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
@@ -1061,7 +1061,7 @@ where `password` is a public access key with the `api` scope enabled.
Creates a new project owned by the specified user. Available only for admins.
-```
+```plaintext
POST /projects/user/:user_id
```
@@ -1109,7 +1109,7 @@ POST /projects/user/:user_id
| `ci_config_path` | string | no | The path to CI config file |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
-| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins |
+| `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge requests by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
@@ -1128,7 +1128,7 @@ where `password` is a public access key with the `api` scope enabled.
Updates an existing project.
-```
+```plaintext
PUT /projects/:id
```
@@ -1174,10 +1174,10 @@ PUT /projects/:id
| `auto_cancel_pending_pipelines` | string | no | Auto-cancel pending pipelines (Note: this is not a boolean, but enabled/disabled |
| `build_coverage_regex` | string | no | Test coverage parsing |
| `ci_config_path` | string | no | The path to CI config file |
-| `ci_default_git_depth` | integer | no | Default number of revisions for [shallow cloning](../user/project/pipelines/settings.md#git-shallow-clone) |
+| `ci_default_git_depth` | integer | no | Default number of revisions for [shallow cloning](../ci/pipelines/settings.md#git-shallow-clone) |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
-| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins |
+| `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge request by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
@@ -1200,14 +1200,16 @@ The forking operation for a project is asynchronous and is completed in a
background job. The request will return immediately. To determine whether the
fork of the project has completed, query the `import_status` for the new project.
-```
+```plaintext
POST /projects/:id/fork
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) |
-| `namespace` | integer/string | yes | The ID or path of the namespace that the project will be forked to |
+| `namespace` | integer/string | no | (deprecated) The ID or path of the namespace that the project will be forked to |
+| `namespace_id` | integer | no | The ID of the namespace that the project will be forked to |
+| `namespace_path` | string | no | The path of the namespace that the project will be forked to |
| `path` | string | no | The path that will be assigned to the resultant project after forking |
| `name` | string | no | The name that will be assigned to the resultant project after forking |
@@ -1217,7 +1219,7 @@ POST /projects/:id/fork
List the projects accessible to the calling user that have an established, forked relationship with the specified project
-```
+```plaintext
GET /projects/:id/forks
```
@@ -1315,7 +1317,7 @@ Example responses:
Stars a given project. Returns status code `304` if the project is already starred.
-```
+```plaintext
POST /projects/:id/star
```
@@ -1405,7 +1407,7 @@ Example response:
Unstars a given project. Returns status code `304` if the project is not starred.
-```
+```plaintext
POST /projects/:id/unstar
```
@@ -1495,7 +1497,7 @@ Example response:
List the users who starred the specified project.
-```
+```plaintext
GET /projects/:id/starrers
```
@@ -1540,7 +1542,7 @@ Example responses:
Get languages used in a project with percentage value.
-```
+```plaintext
GET /projects/:id/languages
```
@@ -1564,7 +1566,7 @@ Example response:
Archives the project if the user is either admin or the project owner of this project. This action is
idempotent, thus archiving an already archived project will not change the project.
-```
+```plaintext
POST /projects/:id/archive
```
@@ -1673,7 +1675,7 @@ Example response:
Unarchives the project if the user is either admin or the project owner of this project. This action is
idempotent, thus unarchiving a non-archived project will not change the project.
-```
+```plaintext
POST /projects/:id/unarchive
```
@@ -1786,7 +1788,7 @@ This endpoint either:
deletion happens after number of days specified in
[instance settings](../user/admin_area/settings/visibility_and_access_controls.md#default-deletion-adjourned-period-premium-only).
-```
+```plaintext
DELETE /projects/:id
```
@@ -1800,7 +1802,7 @@ DELETE /projects/:id
Restores project marked for deletion.
-```
+```plaintext
POST /projects/:id/restore
```
@@ -1812,7 +1814,7 @@ POST /projects/:id/restore
Uploads a file to the specified project to be used in an issue or merge request description, or a comment.
-```
+```plaintext
POST /projects/:id/uploads
```
@@ -1836,11 +1838,12 @@ Returned object:
{
"alt": "dk",
"url": "/uploads/66dbcd21ec5d24ed6ea225176098d52b/dk.png",
+ "full_path": "/namespace1/project1/uploads/66dbcd21ec5d24ed6ea225176098d52b/dk.png",
"markdown": "![dk](/uploads/66dbcd21ec5d24ed6ea225176098d52b/dk.png)"
}
```
->**Note**: The returned `url` is relative to the project path.
+>**Note**: The returned `url` is relative to the project path. The returned `full_path` is the absolute path to the file.
In Markdown contexts, the link is automatically expanded when the format in
`markdown` is used.
@@ -1848,7 +1851,7 @@ In Markdown contexts, the link is automatically expanded when the format in
Allow to share project with group.
-```
+```plaintext
POST /projects/:id/share
```
@@ -1863,7 +1866,7 @@ POST /projects/:id/share
Unshare the project from the group. Returns `204` and no content on success.
-```
+```plaintext
DELETE /projects/:id/share/:group_id
```
@@ -1885,7 +1888,7 @@ These are different for [System Hooks](system_hooks.md) that are system wide.
Get a list of project hooks.
-```
+```plaintext
GET /projects/:id/hooks
```
@@ -1897,7 +1900,7 @@ GET /projects/:id/hooks
Get a specific hook for a project.
-```
+```plaintext
GET /projects/:id/hooks/:hook_id
```
@@ -1930,7 +1933,7 @@ GET /projects/:id/hooks/:hook_id
Adds a hook to a specified project.
-```
+```plaintext
POST /projects/:id/hooks
```
@@ -1955,7 +1958,7 @@ POST /projects/:id/hooks
Edits a hook for a specified project.
-```
+```plaintext
PUT /projects/:id/hooks/:hook_id
```
@@ -1982,7 +1985,7 @@ PUT /projects/:id/hooks/:hook_id
Removes a hook from a project. This is an idempotent method and can be called multiple times.
Either the hook is available or not.
-```
+```plaintext
DELETE /projects/:id/hooks/:hook_id
```
@@ -2000,7 +2003,7 @@ Allows modification of the forked relationship between existing projects. Availa
### Create a forked from/to relation between existing projects
-```
+```plaintext
POST /projects/:id/fork/:forked_from_id
```
@@ -2011,7 +2014,7 @@ POST /projects/:id/fork/:forked_from_id
### Delete an existing forked from relationship
-```
+```plaintext
DELETE /projects/:id/fork
```
@@ -2025,7 +2028,7 @@ Search for projects by name which are accessible to the authenticated user. This
endpoint can be accessed without authentication if the project is publicly
accessible.
-```
+```plaintext
GET /projects
```
@@ -2043,7 +2046,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
> Introduced in GitLab 9.0.
-```
+```plaintext
POST /projects/:id/housekeeping
```
@@ -2057,7 +2060,7 @@ POST /projects/:id/housekeeping
Get the push rules of a project.
-```
+```plaintext
GET /projects/:id/push_rule
```
@@ -2101,7 +2104,7 @@ the `commit_committer_check` and `reject_unsigned_commits` parameters:
Adds a push rule to a specified project.
-```
+```plaintext
POST /projects/:id/push_rule
```
@@ -2124,7 +2127,7 @@ POST /projects/:id/push_rule
Edits a push rule for a specified project.
-```
+```plaintext
PUT /projects/:id/push_rule
```
@@ -2150,7 +2153,7 @@ PUT /projects/:id/push_rule
Removes a push rule from a project. This is an idempotent method and can be called multiple times.
Either the push rule is available or not.
-```
+```plaintext
DELETE /projects/:id/push_rule
```
@@ -2162,7 +2165,7 @@ DELETE /projects/:id/push_rule
> Introduced in GitLab 11.1.
-```
+```plaintext
PUT /projects/:id/transfer
```
@@ -2186,7 +2189,7 @@ Read more in the [Project members](members.md) documentation.
> Introduced in [GitLab Starter](https://about.gitlab.com/pricing/) 10.3.
-```
+```plaintext
POST /projects/:id/mirror/pull
```
@@ -2219,7 +2222,7 @@ format.
If a repository is corrupted to the point where `git clone` does not work, the
snapshot may allow some of the data to be retrieved.
-```
+```plaintext
GET /projects/:id/snapshot
```
diff --git a/doc/api/protected_branches.md b/doc/api/protected_branches.md
index e59d7130356..de862109055 100644
--- a/doc/api/protected_branches.md
+++ b/doc/api/protected_branches.md
@@ -6,7 +6,7 @@
The access levels are defined in the `ProtectedRefAccess.allowed_access_levels` method. Currently, these levels are recognized:
-```
+```plaintext
0 => No access
30 => Developer access
40 => Maintainer access
@@ -17,7 +17,7 @@ The access levels are defined in the `ProtectedRefAccess.allowed_access_levels`
Gets a list of protected branches from a project.
-```
+```plaintext
GET /projects/:id/protected_branches
```
@@ -91,7 +91,7 @@ Example response:
Gets a single protected branch or wildcard protected branch.
-```
+```plaintext
GET /projects/:id/protected_branches/:name
```
@@ -160,7 +160,7 @@ Example response:
Protects a single repository branch or several project repository
branches using a wildcard protected branch.
-```
+```plaintext
POST /projects/:id/protected_branches
```
@@ -292,7 +292,7 @@ Example response:
Unprotects the given protected branch or wildcard protected branch.
-```
+```plaintext
DELETE /projects/:id/protected_branches/:name
```
@@ -309,7 +309,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" 'https://git
Update the "code owner approval required" option for the given protected branch protected branch.
-```
+```plaintext
PATCH /projects/:id/protected_branches/:name
```
diff --git a/doc/api/protected_environments.md b/doc/api/protected_environments.md
index 7d4e62a8ff5..dea1382af29 100644
--- a/doc/api/protected_environments.md
+++ b/doc/api/protected_environments.md
@@ -7,7 +7,7 @@
The access levels are defined in the `ProtectedEnvironment::DeployAccessLevel::ALLOWED_ACCESS_LEVELS` method.
Currently, these levels are recognized:
-```
+```plaintext
30 => Developer access
40 => Maintainer access
60 => Admin access
@@ -17,7 +17,7 @@ Currently, these levels are recognized:
Gets a list of protected environments from a project:
-```bash
+```shell
GET /projects/:id/protected_environments
```
@@ -25,7 +25,7 @@ GET /projects/:id/protected_environments
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
-```bash
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" 'https://gitlab.example.com/api/v4/projects/5/protected_environments/'
```
@@ -51,7 +51,7 @@ Example response:
Gets a single protected environment:
-```bash
+```shell
GET /projects/:id/protected_environments/:name
```
@@ -60,7 +60,7 @@ GET /projects/:id/protected_environments/:name
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `name` | string | yes | The name of the protected environment |
-```bash
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" 'https://gitlab.example.com/api/v4/projects/5/protected_environments/production'
```
@@ -84,11 +84,11 @@ Example response:
Protects a single environment:
-```bash
+```shell
POST /projects/:id/protected_environments
```
-```bash
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" 'https://gitlab.example.com/api/v4/projects/5/protected_environments?name=staging&deploy_access_levels%5B%5D%5Buser_id%5D=1'
```
@@ -122,11 +122,11 @@ Example response:
Unprotects the given protected environment:
-```bash
+```shell
DELETE /projects/:id/protected_environments/:name
```
-```bash
+```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" 'https://gitlab.example.com/api/v4/projects/5/protected_environments/staging'
```
diff --git a/doc/api/protected_tags.md b/doc/api/protected_tags.md
index a5490094a44..1d844a2c5c4 100644
--- a/doc/api/protected_tags.md
+++ b/doc/api/protected_tags.md
@@ -6,7 +6,7 @@
Currently, these levels are recognized:
-```
+```plaintext
0 => No access
30 => Developer access
40 => Maintainer access
@@ -17,7 +17,7 @@ Currently, these levels are recognized:
Gets a list of protected tags from a project.
This function takes pagination parameters `page` and `per_page` to restrict the list of protected tags.
-```
+```plaintext
GET /projects/:id/protected_tags
```
@@ -51,7 +51,7 @@ Example response:
Gets a single protected tag or wildcard protected tag.
The pagination parameters `page` and `per_page` can be used to restrict the list of protected tags.
-```
+```plaintext
GET /projects/:id/protected_tags/:name
```
@@ -83,7 +83,7 @@ Example response:
Protects a single repository tag or several project repository
tags using a wildcard protected tag.
-```
+```plaintext
POST /projects/:id/protected_tags
```
@@ -115,7 +115,7 @@ Example response:
Unprotects the given protected tag or wildcard protected tag.
-```
+```plaintext
DELETE /projects/:id/protected_tags/:name
```
diff --git a/doc/api/releases/index.md b/doc/api/releases/index.md
index e24dea3c278..c2dd9108364 100644
--- a/doc/api/releases/index.md
+++ b/doc/api/releases/index.md
@@ -9,7 +9,7 @@
Paginated list of Releases, sorted by `released_at`.
-```
+```plaintext
GET /projects/:id/releases
```
@@ -70,7 +70,11 @@ Example response:
"updated_at":"2019-07-12T19:45:44.256Z",
"due_date":"2019-08-16T11:00:00.256Z",
"start_date":"2019-07-30T12:00:00.256Z",
- "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/1"
+ "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/1",
+ "issue_stats": {
+ "total": 98,
+ "closed": 76
+ }
},
{
"id":52,
@@ -83,7 +87,11 @@ Example response:
"updated_at":"2019-07-16T14:00:12.256Z",
"due_date":"2019-08-16T11:00:00.256Z",
"start_date":"2019-07-30T12:00:00.256Z",
- "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/2"
+ "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/2",
+ "issue_stats": {
+ "total": 24,
+ "closed": 21
+ }
}
],
"commit_path":"/root/awesome-app/commit/588440f66559714280628a4f9799f0c4eb880a4a",
@@ -191,7 +199,7 @@ Example response:
Get a Release for the given tag.
-```
+```plaintext
GET /projects/:id/releases/:tag_name
```
@@ -252,7 +260,11 @@ Example response:
"updated_at":"2019-07-12T19:45:44.256Z",
"due_date":"2019-08-16T11:00:00.256Z",
"start_date":"2019-07-30T12:00:00.256Z",
- "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/1"
+ "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/1",
+ "issue_stats": {
+ "total": 98,
+ "closed": 76
+ }
},
{
"id":52,
@@ -265,7 +277,11 @@ Example response:
"updated_at":"2019-07-16T14:00:12.256Z",
"due_date":"2019-08-16T11:00:00.256Z",
"start_date":"2019-07-30T12:00:00.256Z",
- "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/2"
+ "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/2",
+ "issue_stats": {
+ "total": 24,
+ "closed": 21
+ }
}
],
"commit_path":"/root/awesome-app/commit/588440f66559714280628a4f9799f0c4eb880a4a",
@@ -292,7 +308,12 @@ Example response:
}
],
"links":[
-
+ {
+ "id":3,
+ "name":"hoge",
+ "url":"https://gitlab.example.com/root/awesome-app/-/tags/v0.11.1/binaries/linux-amd64",
+ "external":true
+ }
],
"evidence_url":"https://gitlab.example.com/root/awesome-app/-/releases/v0.1/evidence.json"
},
@@ -303,7 +324,7 @@ Example response:
Create a Release. You need push access to the repository to create a Release.
-```
+```plaintext
POST /projects/:id/releases
```
@@ -312,19 +333,20 @@ POST /projects/:id/releases
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](../README.md#namespaced-path-encoding). |
| `name` | string | no | The release name. |
| `tag_name` | string | yes | The tag where the release will be created from. |
-| `description` | string | yes | The description of the release. You can use [Markdown](../../user/markdown.md). |
+| `description` | string | no | The description of the release. You can use [Markdown](../../user/markdown.md). |
| `ref` | string | yes, if `tag_name` doesn't exist | If `tag_name` doesn't exist, the release will be created from `ref`. It can be a commit SHA, another tag name, or a branch name. |
| `milestones` | array of string | no | The title of each milestone the release is associated with. |
| `assets:links` | array of hash | no | An array of assets links. |
| `assets:links:name`| string | required by: `assets:links` | The name of the link. |
| `assets:links:url` | string | required by: `assets:links` | The url of the link. |
+| `assets:links:filepath` | string | no | Optional path for a [Direct Asset link](../../user/project/releases.md).
| `released_at` | datetime | no | The date when the release will be/was ready. Defaults to the current time. Expected in ISO 8601 format (`2019-03-15T08:00:00Z`). |
Example request:
```shell
curl --header 'Content-Type: application/json' --header "PRIVATE-TOKEN: gDybLx3yrUK_HLp3qPjS" \
- --data '{ "name": "New release", "tag_name": "v0.3", "description": "Super nice release", "milestones": ["v1.0", "v1.0-rc"], "assets": { "links": [{ "name": "hoge", "url": "https://google.com" }] } }' \
+ --data '{ "name": "New release", "tag_name": "v0.3", "description": "Super nice release", "milestones": ["v1.0", "v1.0-rc"], "assets": { "links": [{ "name": "hoge", "url": "https://google.com", "filepath": "/binaries/linux-amd64" }] } }' \
--request POST https://gitlab.example.com/api/v4/projects/24/releases
```
@@ -374,7 +396,11 @@ Example response:
"updated_at":"2019-07-12T19:45:44.256Z",
"due_date":"2019-08-16T11:00:00.256Z",
"start_date":"2019-07-30T12:00:00.256Z",
- "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/1"
+ "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/1",
+ "issue_stats": {
+ "total": 99,
+ "closed": 76
+ }
},
{
"id":52,
@@ -387,7 +413,11 @@ Example response:
"updated_at":"2019-07-16T14:00:12.256Z",
"due_date":"2019-08-16T11:00:00.256Z",
"start_date":"2019-07-30T12:00:00.256Z",
- "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/2"
+ "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/2",
+ "issue_stats": {
+ "total": 24,
+ "closed": 21
+ }
}
],
"commit_path":"/root/awesome-app/commit/588440f66559714280628a4f9799f0c4eb880a4a",
@@ -417,7 +447,7 @@ Example response:
{
"id":3,
"name":"hoge",
- "url":"https://google.com",
+ "url":"https://gitlab.example.com/root/awesome-app/-/tags/v0.11.1/binaries/linux-amd64",
"external":true
}
],
@@ -430,7 +460,7 @@ Example response:
Update a Release.
-```
+```plaintext
PUT /projects/:id/releases/:tag_name
```
@@ -495,7 +525,11 @@ Example response:
"updated_at":"2019-09-01T13:00:00.256Z",
"due_date":"2019-09-20T13:00:00.256Z",
"start_date":"2019-09-05T12:00:00.256Z",
- "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/3"
+ "web_url":"https://gitlab.example.com/root/awesome-app/-/milestones/3",
+ "issue_stats": {
+ "opened": 11,
+ "closed": 78
+ }
}
],
"commit_path":"/root/awesome-app/commit/588440f66559714280628a4f9799f0c4eb880a4a",
@@ -533,7 +567,7 @@ Example response:
Delete a Release. Deleting a Release will not delete the associated tag.
-```
+```plaintext
DELETE /projects/:id/releases/:tag_name
```
diff --git a/doc/api/releases/links.md b/doc/api/releases/links.md
index 2a9e0ccb664..ed428b0fe75 100644
--- a/doc/api/releases/links.md
+++ b/doc/api/releases/links.md
@@ -9,7 +9,7 @@ GitLab supports links links to `http`, `https`, and `ftp` assets.
Get assets as links from a Release.
-```
+```plaintext
GET /projects/:id/releases/:tag_name/assets/links
```
@@ -47,7 +47,7 @@ Example response:
Get an asset as a link from a Release.
-```
+```plaintext
GET /projects/:id/releases/:tag_name/assets/links/:link_id
```
@@ -78,7 +78,7 @@ Example response:
Create an asset as a link from a Release.
-```
+```plaintext
POST /projects/:id/releases/:tag_name/assets/links
```
@@ -114,7 +114,7 @@ Example response:
Update an asset as a link from a Release.
-```
+```plaintext
PUT /projects/:id/releases/:tag_name/assets/links/:link_id
```
@@ -150,7 +150,7 @@ Example response:
Delete an asset as a link from a Release.
-```
+```plaintext
DELETE /projects/:id/releases/:tag_name/assets/links/:link_id
```
diff --git a/doc/api/remote_mirrors.md b/doc/api/remote_mirrors.md
new file mode 100644
index 00000000000..0ffff194976
--- /dev/null
+++ b/doc/api/remote_mirrors.md
@@ -0,0 +1,121 @@
+# Project remote mirrors API
+
+[Push mirrors](../user/project/repository/repository_mirroring.md#pushing-to-a-remote-repository-core)
+defined on a project's repository settings are called "remote mirrors", and the
+state of these mirrors can be queried and modified via the remote mirror API
+outlined below.
+
+## List a project's remote mirrors
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/38121) in GitLab 12.9.
+
+Returns an Array of remote mirrors and their statuses:
+
+```text
+GET /projects/:id/remote_mirrors
+```
+
+Example request:
+
+```sh
+curl --header "PRIVATE-TOKEN: <your_access_token>" 'https://gitlab.example.com/api/v4/projects/42/remote_mirrors'
+```
+
+Example response:
+
+```json
+[
+ {
+ "enabled": true,
+ "id": 101486,
+ "last_error": null,
+ "last_successful_update_at": "2020-01-06T17:32:02.823Z",
+ "last_update_at": "2020-01-06T17:32:02.823Z",
+ "last_update_started_at": "2020-01-06T17:31:55.864Z",
+ "only_protected_branches": true,
+ "update_status": "finished",
+ "url": "https://*****:*****@gitlab.com/gitlab-org/security/gitlab.git"
+ }
+]
+```
+
+NOTE: **Note:**
+For security reasons, the `url` attribute will always be scrubbed of username
+and password information.
+
+## Create a remote mirror
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/24189) in GitLab 12.9.
+
+Create a remote mirror for a project. The mirror will be disabled by default. You can enable it by including the optional parameter `enabled` when creating it:
+
+```text
+POST /projects/:id/remote_mirrors
+```
+
+| Attribute | Type | Required | Description |
+| :---------- | :----- | :--------- | :------------ |
+| `url` | String | yes | The URL of the remote repository to be mirrored. |
+| `enabled` | Boolean | no | Determines if the mirror is enabled. |
+| `only_protected_branches` | Boolean | no | Determines if only protected branches are mirrored. |
+
+Example request:
+
+```sh
+curl --request POST --data "url=https://username:token@example.com/gitlab/example.git" --header "PRIVATE-TOKEN: <your_access_token>" 'https://gitlab.example.com/api/v4/projects/42/remote_mirrors'
+```
+
+Example response:
+
+```json
+{
+ "enabled": false,
+ "id": 101486,
+ "last_error": null,
+ "last_successful_update_at": null,
+ "last_update_at": null,
+ "last_update_started_at": null,
+ "only_protected_branches": false,
+ "update_status": "none",
+ "url": "https://*****:*****@example.com/gitlab/example.git"
+}
+```
+
+## Update a remote mirror's attributes
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/38121) in GitLab 12.9.
+
+Toggle a remote mirror on or off, or change which types of branches are
+mirrored:
+
+```text
+PUT /projects/:id/remote_mirrors/:mirror_id
+```
+
+| Attribute | Type | Required | Description |
+| :---------- | :----- | :--------- | :------------ |
+| `mirror_id` | Integer | yes | The remote mirror ID. |
+| `enabled` | Boolean | no | Determines if the mirror is enabled. |
+| `only_protected_branches` | Boolean | no | Determines if only protected branches are mirrored. |
+
+Example request:
+
+```sh
+curl --request PUT --data "enabled=false" --header "PRIVATE-TOKEN: <your_access_token>" 'https://gitlab.example.com/api/v4/projects/42/remote_mirrors/101486'
+```
+
+Example response:
+
+```json
+{
+ "enabled": false,
+ "id": 101486,
+ "last_error": null,
+ "last_successful_update_at": "2020-01-06T17:32:02.823Z",
+ "last_update_at": "2020-01-06T17:32:02.823Z",
+ "last_update_started_at": "2020-01-06T17:31:55.864Z",
+ "only_protected_branches": true,
+ "update_status": "finished",
+ "url": "https://*****:*****@gitlab.com/gitlab-org/security/gitlab.git"
+}
+```
diff --git a/doc/api/repositories.md b/doc/api/repositories.md
index 3e34e1522e4..f261c9ab9f7 100644
--- a/doc/api/repositories.md
+++ b/doc/api/repositories.md
@@ -7,7 +7,7 @@ be accessed without authentication if the repository is publicly accessible.
This command provides essentially the same functionality as the `git ls-tree` command. For more information, see the section _Tree Objects_ in the [Git internals documentation](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects/#_tree_objects).
-```
+```plaintext
GET /projects/:id/repository/tree
```
@@ -79,7 +79,7 @@ Allows you to receive information about blob in repository like size and
content. Note that blob content is Base64 encoded. This endpoint can be accessed
without authentication if the repository is publicly accessible.
-```
+```plaintext
GET /projects/:id/repository/blobs/:sha
```
@@ -93,7 +93,7 @@ Parameters:
Get the raw file contents for a blob by blob SHA. This endpoint can be accessed
without authentication if the repository is publicly accessible.
-```
+```plaintext
GET /projects/:id/repository/blobs/:sha/raw
```
@@ -107,7 +107,7 @@ Parameters:
Get an archive of the repository. This endpoint can be accessed without
authentication if the repository is publicly accessible.
-```
+```plaintext
GET /projects/:id/repository/archive[.format]
```
@@ -130,7 +130,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.com/api/v4/pro
This endpoint can be accessed without authentication if the repository is
publicly accessible. Note that diffs could have an empty diff string if [diff limits](../development/diffs.md#diff-limits) are reached.
-```
+```plaintext
GET /projects/:id/repository/compare
```
@@ -141,7 +141,7 @@ Parameters:
- `to` (required) - the commit SHA or branch name
- `straight` (optional) - comparison method, `true` for direct comparison between `from` and `to` (`from`..`to`), `false` to compare using merge base (`from`...`to`)'. Default is `false`.
-```
+```plaintext
GET /projects/:id/repository/compare?from=master&to=feature
```
@@ -154,16 +154,16 @@ Response:
"id": "12d65c8dd2b2676fa3ac47d955accc085a37a9c1",
"short_id": "12d65c8dd2b",
"title": "JS fix",
- "author_name": "Dmitriy Zaporozhets",
- "author_email": "dmitriy.zaporozhets@gmail.com",
+ "author_name": "Example User",
+ "author_email": "user@example.com",
"created_at": "2014-02-27T10:27:00+02:00"
},
"commits": [{
"id": "12d65c8dd2b2676fa3ac47d955accc085a37a9c1",
"short_id": "12d65c8dd2b",
"title": "JS fix",
- "author_name": "Dmitriy Zaporozhets",
- "author_email": "dmitriy.zaporozhets@gmail.com",
+ "author_name": "Example User",
+ "author_email": "user@example.com",
"created_at": "2014-02-27T10:27:00+02:00"
}],
"diffs": [{
@@ -186,7 +186,7 @@ Response:
Get repository contributors list. This endpoint can be accessed without
authentication if the repository is publicly accessible.
-```
+```plaintext
GET /projects/:id/repository/contributors
```
@@ -198,16 +198,16 @@ Parameters:
Response:
-```
+```json
[{
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com",
+ "name": "Example User",
+ "email": "example@example.com",
"commits": 117,
"additions": 2097,
"deletions": 517
}, {
- "name": "Jacob Vosmaer",
- "email": "contact@jacobvosmaer.nl",
+ "name": "Sample User",
+ "email": "sample@example.com",
"commits": 33,
"additions": 338,
"deletions": 244
@@ -218,7 +218,7 @@ Response:
Get the common ancestor for 2 or more refs (commit SHAs, branch names or tags).
-```
+```plaintext
GET /projects/:id/repository/merge_base
```
@@ -241,11 +241,11 @@ Example response:
"created_at": "2014-02-27T08:03:18.000Z",
"parent_ids": [],
"message": "Initial commit\n",
- "author_name": "Dmitriy Zaporozhets",
- "author_email": "dmitriy.zaporozhets@gmail.com",
+ "author_name": "Example User",
+ "author_email": "user@example.com",
"authored_date": "2014-02-27T08:03:18.000Z",
- "committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "committer_name": "Example User",
+ "committer_email": "user@example.com",
"committed_date": "2014-02-27T08:03:18.000Z"
}
```
diff --git a/doc/api/repository_files.md b/doc/api/repository_files.md
index c556f1e8108..845a1187a94 100644
--- a/doc/api/repository_files.md
+++ b/doc/api/repository_files.md
@@ -2,7 +2,7 @@
**CRUD for repository files**
-**Create, read, update and delete repository files using this API**
+**Create, read, update, and delete repository files using this API**
The different scopes available using [personal access tokens](../user/profile/personal_access_tokens.md) are depicted
in the following table.
@@ -20,7 +20,7 @@ Allows you to receive information about file in repository like name, size,
content. Note that file content is Base64 encoded. This endpoint can be accessed
without authentication if the repository is publicly accessible.
-```
+```plaintext
GET /projects/:id/repository/files/:file_path
```
@@ -55,7 +55,7 @@ NOTE: **Note:**
In addition to the `GET` method, you can also use `HEAD` to get just file metadata.
-```
+```plaintext
HEAD /projects/:id/repository/files/:file_path
```
@@ -84,7 +84,7 @@ X-Gitlab-Size: 1476
Allows you to receive blame information. Each blame range contains lines and corresponding commit info.
-```
+```plaintext
GET /projects/:id/repository/files/:file_path/blame
```
@@ -151,7 +151,7 @@ X-Gitlab-Size: 1476
## Get raw file from repository
-```
+```plaintext
GET /projects/:id/repository/files/:file_path/raw
```
@@ -169,9 +169,9 @@ Like [Get file from repository](repository_files.md#get-file-from-repository) yo
## Create new file in repository
-This allows you to create a single file. For creating multiple files with a single request see the [commits API](commits.html#create-a-commit-with-multiple-files-and-actions).
+This allows you to create a single file. For creating multiple files with a single request see the [commits API](commits.md#create-a-commit-with-multiple-files-and-actions).
-```
+```plaintext
POST /projects/:id/repository/files/:file_path
```
@@ -204,9 +204,9 @@ Parameters:
## Update existing file in repository
-This allows you to update a single file. For updating multiple files with a single request see the [commits API](commits.html#create-a-commit-with-multiple-files-and-actions).
+This allows you to update a single file. For updating multiple files with a single request see the [commits API](commits.md#create-a-commit-with-multiple-files-and-actions).
-```
+```plaintext
PUT /projects/:id/repository/files/:file_path
```
@@ -242,7 +242,7 @@ If the commit fails for any reason we return a 400 error with a non-specific
error message. Possible causes for a failed commit include:
- the `file_path` contained `/../` (attempted directory traversal);
-- the new file contents were identical to the current file contents, i.e. the
+- the new file contents were identical to the current file contents. That is, the
user tried to make an empty commit;
- the branch was updated by a Git push while the file edit was in progress.
@@ -250,9 +250,9 @@ Currently GitLab Shell has a boolean return code, preventing GitLab from specify
## Delete existing file in repository
-This allows you to delete a single file. For deleting multiple files with a single request, see the [commits API](commits.html#create-a-commit-with-multiple-files-and-actions).
+This allows you to delete a single file. For deleting multiple files with a single request, see the [commits API](commits.md#create-a-commit-with-multiple-files-and-actions).
-```
+```plaintext
DELETE /projects/:id/repository/files/:file_path
```
diff --git a/doc/api/repository_submodules.md b/doc/api/repository_submodules.md
index 59f6131fc96..2ae7afa35b2 100644
--- a/doc/api/repository_submodules.md
+++ b/doc/api/repository_submodules.md
@@ -9,7 +9,7 @@ submodule's reference to keep up to date other projects that use it.
This endpoint allows you to update a [Git submodule](https://git-scm.com/book/en/v2/Git-Tools-Submodules) reference in a
specific branch.
-```
+```plaintext
PUT /projects/:id/repository/submodules/:submodule
```
diff --git a/doc/api/resource_label_events.md b/doc/api/resource_label_events.md
index 6532e2d4231..20f48674932 100644
--- a/doc/api/resource_label_events.md
+++ b/doc/api/resource_label_events.md
@@ -1,6 +1,6 @@
# Resource label events API
-Resource label events keep track about who, when, and which label was added or removed to an issuable.
+Resource label events keep track about who, when, and which label was added to, or removed from, an issuable.
## Issues
@@ -8,7 +8,7 @@ Resource label events keep track about who, when, and which label was added or r
Gets a list of all label events for a single issue.
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/resource_label_events
```
@@ -72,7 +72,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single label event for a specific project issue
-```
+```plaintext
GET /projects/:id/issues/:issue_iid/resource_label_events/:resource_label_event_id
```
@@ -94,7 +94,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Gets a list of all label events for a single epic.
-```
+```plaintext
GET /groups/:id/epics/:epic_id/resource_label_events
```
@@ -158,7 +158,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single label event for a specific group epic
-```
+```plaintext
GET /groups/:id/epics/:epic_id/resource_label_events/:resource_label_event_id
```
@@ -180,7 +180,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab
Gets a list of all label events for a single merge request.
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/resource_label_events
```
@@ -244,7 +244,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Returns a single label event for a specific project merge request
-```
+```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/resource_label_events/:resource_label_event_id
```
diff --git a/doc/api/runners.md b/doc/api/runners.md
index 53ca97b9e41..551a7580e54 100644
--- a/doc/api/runners.md
+++ b/doc/api/runners.md
@@ -31,7 +31,7 @@ GitLab and Runner are then connected.
Get a list of specific runners available to the user.
-```
+```plaintext
GET /runners
GET /runners?scope=active
GET /runners?type=project_type
@@ -46,7 +46,7 @@ GET /runners?tag_list=tag1,tag2
| `status` | string | no | The status of runners to show, one of: `active`, `paused`, `online`, `offline` |
| `tag_list` | string array | no | List of of the runner's tags |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/runners"
```
@@ -82,7 +82,7 @@ Example response:
Get a list of all runners in the GitLab instance (specific and shared). Access
is restricted to users with `admin` privileges.
-```
+```plaintext
GET /runners/all
GET /runners/all?scope=online
GET /runners/all?type=project_type
@@ -97,7 +97,7 @@ GET /runners/all?tag_list=tag1,tag2
| `status` | string | no | The status of runners to show, one of: `active`, `paused`, `online`, `offline` |
| `tag_list` | string array | no | List of of the runner's tags |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/runners/all"
```
@@ -152,7 +152,7 @@ Example response:
Get details of a runner.
-```
+```plaintext
GET /runners/:id
```
@@ -160,7 +160,7 @@ GET /runners/:id
|-----------|---------|----------|---------------------|
| `id` | integer | yes | The ID of a runner |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/runners/6"
```
@@ -204,7 +204,7 @@ Example response:
Update details of a runner.
-```
+```plaintext
PUT /runners/:id
```
@@ -219,7 +219,7 @@ PUT /runners/:id
| `access_level` | string | no | The access_level of the runner; `not_protected` or `ref_protected` |
| `maximum_timeout` | integer | no | Maximum timeout set when this Runner will handle the job |
-```
+```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/runners/6" --form "description=test-1-20150125-test" --form "tag_list=ruby,mysql,tag1,tag2"
```
@@ -265,7 +265,7 @@ Example response:
Remove a runner.
-```
+```plaintext
DELETE /runners/:id
```
@@ -273,7 +273,7 @@ DELETE /runners/:id
|-----------|---------|----------|---------------------|
| `id` | integer | yes | The ID of a runner |
-```
+```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/runners/6"
```
@@ -283,7 +283,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://git
List jobs that are being processed or were processed by specified Runner.
-```
+```plaintext
GET /runners/:id/jobs
```
@@ -294,7 +294,7 @@ GET /runners/:id/jobs
| `order_by`| string | no | Order jobs by `id`. |
| `sort` | string | no | Sort jobs in `asc` or `desc` order (default: `desc`) |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/runners/1/jobs?status=running"
```
@@ -373,7 +373,7 @@ Example response:
List all runners (specific and shared) available in the project. Shared runners
are listed if at least one shared runner is defined.
-```
+```plaintext
GET /projects/:id/runners
GET /projects/:id/runners?scope=active
GET /projects/:id/runners?type=project_type
@@ -389,7 +389,7 @@ GET /projects/:id/runners?tag_list=tag1,tag2
| `status` | string | no | The status of runners to show, one of: `active`, `paused`, `online`, `offline` |
| `tag_list` | string array | no | List of of the runner's tags |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/9/runners"
```
@@ -424,7 +424,7 @@ Example response:
Enable an available specific runner in the project.
-```
+```plaintext
POST /projects/:id/runners
```
@@ -433,7 +433,7 @@ POST /projects/:id/runners
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `runner_id` | integer | yes | The ID of a runner |
-```
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/9/runners" --form "runner_id=9"
```
@@ -458,7 +458,7 @@ Disable a specific runner from the project. It works only if the project isn't
the only project associated with the specified runner. If so, an error is
returned. Use the [Remove a runner](#remove-a-runner) call instead.
-```
+```plaintext
DELETE /projects/:id/runners/:runner_id
```
@@ -467,7 +467,7 @@ DELETE /projects/:id/runners/:runner_id
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `runner_id` | integer | yes | The ID of a runner |
-```
+```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/9/runners/9"
```
@@ -475,7 +475,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://git
Register a new Runner for the instance.
-```
+```plaintext
POST /runners
```
@@ -491,7 +491,7 @@ POST /runners
| `access_level` | string | no | The access_level of the runner; `not_protected` or `ref_protected` |
| `maximum_timeout` | integer | no | Maximum timeout set when this Runner will handle the job |
-```
+```shell
curl --request POST "https://gitlab.example.com/api/v4/runners" --form "token=<registration_token>" --form "description=test-1-20150125-test" --form "tag_list=ruby,mysql,tag1,tag2"
```
@@ -514,7 +514,7 @@ Example response:
Deletes a registered Runner.
-```
+```plaintext
DELETE /runners
```
@@ -522,7 +522,7 @@ DELETE /runners
|-------------|---------|----------|---------------------|
| `token` | string | yes | Runner's [authentication token](#registration-and-authentication-tokens). |
-```
+```shell
curl --request DELETE "https://gitlab.example.com/api/v4/runners" --form "token=<authentication_token>"
```
@@ -536,7 +536,7 @@ Response:
Validates authentication credentials for a registered Runner.
-```
+```plaintext
POST /runners/verify
```
@@ -544,7 +544,7 @@ POST /runners/verify
|-------------|---------|----------|---------------------|
| `token` | string | yes | Runner's [authentication token](#registration-and-authentication-tokens). |
-```
+```shell
curl --request POST "https://gitlab.example.com/api/v4/runners/verify" --form "token=<authentication_token>"
```
diff --git a/doc/api/scim.md b/doc/api/scim.md
index 11721517cbb..eaa56b0d0dd 100644
--- a/doc/api/scim.md
+++ b/doc/api/scim.md
@@ -122,7 +122,7 @@ Parameters:
| `userName` | string | yes | Username of the user. |
| `emails` | JSON string | yes | Work email. |
| `name` | JSON string | yes | Name of the user. |
-| `meta` | string | no | Resource type (`User'). |
+| `meta` | string | no | Resource type (`User`). |
Example request:
@@ -219,7 +219,7 @@ They match an expression as specified in [the RFC7644 filtering section](https:/
Example:
-```
+```plaintext
id eq a-b-c-d
```
diff --git a/doc/api/search.md b/doc/api/search.md
index 8203fbecbb3..9ca9cc5c2a4 100644
--- a/doc/api/search.md
+++ b/doc/api/search.md
@@ -8,7 +8,7 @@ Every API call to search must be authenticated.
Search globally across the GitLab instance.
-```
+```plaintext
GET /search
```
@@ -255,6 +255,8 @@ Example response:
### Scope: snippet_blobs
+This scope will be disabled after GitLab 13.0.
+
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/search?scope=snippet_blobs&search=test
```
@@ -410,7 +412,7 @@ Search within the specified group.
If a user is not a member of a group and the group is private, a `GET` request on that group will result to a `404` status code.
-```
+```plaintext
GET /groups/:id/search
```
@@ -751,7 +753,7 @@ Search within the specified project.
If a user is not a member of a project and the project is private, a `GET` request on that project will result to a `404` status code.
-```
+```plaintext
GET /projects/:id/search
```
diff --git a/doc/api/services.md b/doc/api/services.md
index 2f70cb21964..8c70033d71d 100644
--- a/doc/api/services.md
+++ b/doc/api/services.md
@@ -8,7 +8,7 @@
Get a list of all active project services.
-```
+```plaintext
GET /projects/:id/services
```
@@ -69,7 +69,7 @@ Set Asana service for a project.
> This service adds commit messages as comments to Asana tasks. Once enabled, commit messages are checked for Asana task URLs (for example, `https://app.asana.com/0/123456/987654`) or task IDs starting with # (for example, `#987654`). Every task ID found will get the commit comment added to it. You can also close a task with a message containing: `fix #123456`. You can find your API Keys here: <https://asana.com/developers/documentation/getting-started/auth#api-key>.
-```
+```plaintext
PUT /projects/:id/services/asana
```
@@ -85,7 +85,7 @@ Parameters:
Delete Asana service for a project.
-```
+```plaintext
DELETE /projects/:id/services/asana
```
@@ -93,7 +93,7 @@ DELETE /projects/:id/services/asana
Get Asana service settings for a project.
-```
+```plaintext
GET /projects/:id/services/asana
```
@@ -105,7 +105,7 @@ Project Management Software (Source Commits Endpoint)
Set Assembla service for a project.
-```
+```plaintext
PUT /projects/:id/services/assembla
```
@@ -121,7 +121,7 @@ Parameters:
Delete Assembla service for a project.
-```
+```plaintext
DELETE /projects/:id/services/assembla
```
@@ -129,7 +129,7 @@ DELETE /projects/:id/services/assembla
Get Assembla service settings for a project.
-```
+```plaintext
GET /projects/:id/services/assembla
```
@@ -143,7 +143,7 @@ Set Atlassian Bamboo CI service for a project.
> You must set up automatic revision labeling and a repository trigger in Bamboo.
-```
+```plaintext
PUT /projects/:id/services/bamboo
```
@@ -161,7 +161,7 @@ Parameters:
Delete Atlassian Bamboo CI service for a project.
-```
+```plaintext
DELETE /projects/:id/services/bamboo
```
@@ -169,7 +169,7 @@ DELETE /projects/:id/services/bamboo
Get Atlassian Bamboo CI service settings for a project.
-```
+```plaintext
GET /projects/:id/services/bamboo
```
@@ -181,7 +181,7 @@ Bugzilla Issue Tracker
Set Bugzilla service for a project.
-```
+```plaintext
PUT /projects/:id/services/bugzilla
```
@@ -200,7 +200,7 @@ Parameters:
Delete Bugzilla service for a project.
-```
+```plaintext
DELETE /projects/:id/services/bugzilla
```
@@ -208,7 +208,7 @@ DELETE /projects/:id/services/bugzilla
Get Bugzilla service settings for a project.
-```
+```plaintext
GET /projects/:id/services/bugzilla
```
@@ -220,7 +220,7 @@ Continuous integration and deployments
Set Buildkite service for a project.
-```
+```plaintext
PUT /projects/:id/services/buildkite
```
@@ -237,7 +237,7 @@ Parameters:
Delete Buildkite service for a project.
-```
+```plaintext
DELETE /projects/:id/services/buildkite
```
@@ -245,7 +245,7 @@ DELETE /projects/:id/services/buildkite
Get Buildkite service settings for a project.
-```
+```plaintext
GET /projects/:id/services/buildkite
```
@@ -257,7 +257,7 @@ Simple web-based real-time group chat
Set Campfire service for a project.
-```
+```plaintext
PUT /projects/:id/services/campfire
```
@@ -274,7 +274,7 @@ Parameters:
Delete Campfire service for a project.
-```
+```plaintext
DELETE /projects/:id/services/campfire
```
@@ -282,7 +282,7 @@ DELETE /projects/:id/services/campfire
Get Campfire service settings for a project.
-```
+```plaintext
GET /projects/:id/services/campfire
```
@@ -294,7 +294,7 @@ Unify Circuit RTC and collaboration tool.
Set Unify Circuit service for a project.
-```
+```plaintext
PUT /projects/:id/services/unify-circuit
```
@@ -319,7 +319,7 @@ Parameters:
Delete Unify Circuit service for a project.
-```
+```plaintext
DELETE /projects/:id/services/unify-circuit
```
@@ -327,7 +327,7 @@ DELETE /projects/:id/services/unify-circuit
Get Unify Circuit service settings for a project.
-```
+```plaintext
GET /projects/:id/services/unify-circuit
```
@@ -339,7 +339,7 @@ Custom issue tracker
Set Custom Issue Tracker service for a project.
-```
+```plaintext
PUT /projects/:id/services/custom-issue-tracker
```
@@ -358,7 +358,7 @@ Parameters:
Delete Custom Issue Tracker service for a project.
-```
+```plaintext
DELETE /projects/:id/services/custom-issue-tracker
```
@@ -366,7 +366,7 @@ DELETE /projects/:id/services/custom-issue-tracker
Get Custom Issue Tracker service settings for a project.
-```
+```plaintext
GET /projects/:id/services/custom-issue-tracker
```
@@ -378,7 +378,7 @@ Drone is a Continuous Integration platform built on Docker, written in Go
Set Drone CI service for a project.
-```
+```plaintext
PUT /projects/:id/services/drone-ci
```
@@ -397,7 +397,7 @@ Parameters:
Delete Drone CI service for a project.
-```
+```plaintext
DELETE /projects/:id/services/drone-ci
```
@@ -405,7 +405,7 @@ DELETE /projects/:id/services/drone-ci
Get Drone CI service settings for a project.
-```
+```plaintext
GET /projects/:id/services/drone-ci
```
@@ -417,7 +417,7 @@ Email the commits and diff of each push to a list of recipients.
Set Emails on push service for a project.
-```
+```plaintext
PUT /projects/:id/services/emails-on-push
```
@@ -436,7 +436,7 @@ Parameters:
Delete Emails on push service for a project.
-```
+```plaintext
DELETE /projects/:id/services/emails-on-push
```
@@ -444,7 +444,7 @@ DELETE /projects/:id/services/emails-on-push
Get Emails on push service settings for a project.
-```
+```plaintext
GET /projects/:id/services/emails-on-push
```
@@ -456,7 +456,7 @@ Replaces the link to the internal wiki with a link to an external wiki.
Set External Wiki service for a project.
-```
+```plaintext
PUT /projects/:id/services/external-wiki
```
@@ -470,7 +470,7 @@ Parameters:
Delete External Wiki service for a project.
-```
+```plaintext
DELETE /projects/:id/services/external-wiki
```
@@ -478,7 +478,7 @@ DELETE /projects/:id/services/external-wiki
Get External Wiki service settings for a project.
-```
+```plaintext
GET /projects/:id/services/external-wiki
```
@@ -490,7 +490,7 @@ Flowdock is a collaboration web app for technical teams.
Set Flowdock service for a project.
-```
+```plaintext
PUT /projects/:id/services/flowdock
```
@@ -505,7 +505,7 @@ Parameters:
Delete Flowdock service for a project.
-```
+```plaintext
DELETE /projects/:id/services/flowdock
```
@@ -513,7 +513,7 @@ DELETE /projects/:id/services/flowdock
Get Flowdock service settings for a project.
-```
+```plaintext
GET /projects/:id/services/flowdock
```
@@ -525,7 +525,7 @@ Code collaboration software.
Set GitHub service for a project.
-```
+```plaintext
PUT /projects/:id/services/github
```
@@ -541,7 +541,7 @@ Parameters:
Delete GitHub service for a project.
-```
+```plaintext
DELETE /projects/:id/services/github
```
@@ -549,7 +549,7 @@ DELETE /projects/:id/services/github
Get GitHub service settings for a project.
-```
+```plaintext
GET /projects/:id/services/github
```
@@ -563,11 +563,11 @@ Google GSuite team collaboration tool.
Set Hangouts Chat service for a project.
-```
+```plaintext
PUT /projects/:id/services/hangouts-chat
```
->**Note:** Specific event parameters (e.g. `push_events` flag) were [introduced in v10.4][11435]
+>**Note:** Specific event parameters (for example, `push_events` flag) were [introduced in v10.4][11435]
Parameters:
@@ -591,7 +591,7 @@ Parameters:
Delete Hangouts Chat service for a project.
-```
+```plaintext
DELETE /projects/:id/services/hangouts-chat
```
@@ -599,7 +599,7 @@ DELETE /projects/:id/services/hangouts-chat
Get Hangouts Chat service settings for a project.
-```
+```plaintext
GET /projects/:id/services/hangouts-chat
```
@@ -611,7 +611,7 @@ Private group chat and IM
Set HipChat service for a project.
-```
+```plaintext
PUT /projects/:id/services/hipchat
```
@@ -638,7 +638,7 @@ Parameters:
Delete HipChat service for a project.
-```
+```plaintext
DELETE /projects/:id/services/hipchat
```
@@ -646,7 +646,7 @@ DELETE /projects/:id/services/hipchat
Get HipChat service settings for a project.
-```
+```plaintext
GET /projects/:id/services/hipchat
```
@@ -658,9 +658,9 @@ Send IRC messages, on update, to a list of recipients through an Irker gateway.
Set Irker (IRC gateway) service for a project.
-> NOTE: Irker does NOT have built-in authentication, which makes it vulnerable to spamming IRC channels if it is hosted outside of a firewall. Please make sure you run the daemon within a secured network to prevent abuse. For more details, read: <http://www.catb.org/~esr/irker/security.html>.
+> NOTE: Irker does NOT have built-in authentication, which makes it vulnerable to spamming IRC channels if it is hosted outside of a firewall. Please make sure you run the daemon within a secured network to prevent abuse. For more details, read: <http://www.catb.org/~esr/irker/security.html>.
-```
+```plaintext
PUT /projects/:id/services/irker
```
@@ -679,7 +679,7 @@ Parameters:
Delete Irker (IRC gateway) service for a project.
-```
+```plaintext
DELETE /projects/:id/services/irker
```
@@ -687,7 +687,7 @@ DELETE /projects/:id/services/irker
Get Irker (IRC gateway) service settings for a project.
-```
+```plaintext
GET /projects/:id/services/irker
```
@@ -699,7 +699,7 @@ Jira issue tracker.
Get Jira service settings for a project.
-```
+```plaintext
GET /projects/:id/services/jira
```
@@ -711,7 +711,7 @@ Set Jira service for a project.
> `project_url` are replaced by `url`. If you are using an
> older version, [follow this documentation](https://gitlab.com/gitlab-org/gitlab/blob/8-13-stable-ee/doc/api/services.md#jira).
-```
+```plaintext
PUT /projects/:id/services/jira
```
@@ -733,7 +733,7 @@ Parameters:
Remove all previously Jira settings from a project.
-```
+```plaintext
DELETE /projects/:id/services/jira
```
@@ -745,7 +745,7 @@ Ability to receive slash commands from a Slack chat instance.
Get Slack slash command service settings for a project.
-```
+```plaintext
GET /projects/:id/services/slack-slash-commands
```
@@ -778,7 +778,7 @@ Example response:
Set Slack slash command for a project.
-```
+```plaintext
PUT /projects/:id/services/slack-slash-commands
```
@@ -792,7 +792,7 @@ Parameters:
Delete Slack slash command service for a project.
-```
+```plaintext
DELETE /projects/:id/services/slack-slash-commands
```
@@ -804,7 +804,7 @@ Ability to receive slash commands from a Mattermost chat instance.
Get Mattermost slash command service settings for a project.
-```
+```plaintext
GET /projects/:id/services/mattermost-slash-commands
```
@@ -812,7 +812,7 @@ GET /projects/:id/services/mattermost-slash-commands
Set Mattermost slash command for a project.
-```
+```plaintext
PUT /projects/:id/services/mattermost-slash-commands
```
@@ -827,19 +827,19 @@ Parameters:
Delete Mattermost slash command service for a project.
-```
+```plaintext
DELETE /projects/:id/services/mattermost-slash-commands
```
## Packagist
-Update your project on Packagist, the main Composer repository, when commits or tags are pushed to GitLab.
+Update your project on Packagist (the main Composer repository) when commits or tags are pushed to GitLab.
### Create/Edit Packagist service
Set Packagist service for a project.
-```
+```plaintext
PUT /projects/:id/services/packagist
```
@@ -858,7 +858,7 @@ Parameters:
Delete Packagist service for a project.
-```
+```plaintext
DELETE /projects/:id/services/packagist
```
@@ -866,7 +866,7 @@ DELETE /projects/:id/services/packagist
Get Packagist service settings for a project.
-```
+```plaintext
GET /projects/:id/services/packagist
```
@@ -878,7 +878,7 @@ Get emails for GitLab CI pipelines.
Set Pipeline-Emails service for a project.
-```
+```plaintext
PUT /projects/:id/services/pipelines-email
```
@@ -897,7 +897,7 @@ Parameters:
Delete Pipeline-Emails service for a project.
-```
+```plaintext
DELETE /projects/:id/services/pipelines-email
```
@@ -905,7 +905,7 @@ DELETE /projects/:id/services/pipelines-email
Get Pipeline-Emails service settings for a project.
-```
+```plaintext
GET /projects/:id/services/pipelines-email
```
@@ -917,7 +917,7 @@ Project Management Software (Source Commits Endpoint)
Set PivotalTracker service for a project.
-```
+```plaintext
PUT /projects/:id/services/pivotaltracker
```
@@ -933,7 +933,7 @@ Parameters:
Delete PivotalTracker service for a project.
-```
+```plaintext
DELETE /projects/:id/services/pivotaltracker
```
@@ -941,7 +941,7 @@ DELETE /projects/:id/services/pivotaltracker
Get PivotalTracker service settings for a project.
-```
+```plaintext
GET /projects/:id/services/pivotaltracker
```
@@ -953,7 +953,7 @@ Prometheus is a powerful time-series monitoring service.
Set Prometheus service for a project.
-```
+```plaintext
PUT /projects/:id/services/prometheus
```
@@ -967,7 +967,7 @@ Parameters:
Delete Prometheus service for a project.
-```
+```plaintext
DELETE /projects/:id/services/prometheus
```
@@ -975,7 +975,7 @@ DELETE /projects/:id/services/prometheus
Get Prometheus service settings for a project.
-```
+```plaintext
GET /projects/:id/services/prometheus
```
@@ -987,7 +987,7 @@ Pushover makes it easy to get real-time notifications on your Android device, iP
Set Pushover service for a project.
-```
+```plaintext
PUT /projects/:id/services/pushover
```
@@ -1006,7 +1006,7 @@ Parameters:
Delete Pushover service for a project.
-```
+```plaintext
DELETE /projects/:id/services/pushover
```
@@ -1014,7 +1014,7 @@ DELETE /projects/:id/services/pushover
Get Pushover service settings for a project.
-```
+```plaintext
GET /projects/:id/services/pushover
```
@@ -1026,7 +1026,7 @@ Redmine issue tracker
Set Redmine service for a project.
-```
+```plaintext
PUT /projects/:id/services/redmine
```
@@ -1044,7 +1044,7 @@ Parameters:
Delete Redmine service for a project.
-```
+```plaintext
DELETE /projects/:id/services/redmine
```
@@ -1052,7 +1052,7 @@ DELETE /projects/:id/services/redmine
Get Redmine service settings for a project.
-```
+```plaintext
GET /projects/:id/services/redmine
```
@@ -1064,11 +1064,11 @@ Receive event notifications in Slack
Set Slack service for a project.
-```
+```plaintext
PUT /projects/:id/services/slack
```
->**Note:** Specific event parameters (e.g. `push_events` flag and `push_channel`) were [introduced in v10.4][11435]
+>**Note:** Specific event parameters (for example, `push_events` flag and `push_channel`) were [introduced in v10.4][11435]
Parameters:
@@ -1107,7 +1107,7 @@ Parameters:
Delete Slack service for a project.
-```
+```plaintext
DELETE /projects/:id/services/slack
```
@@ -1115,7 +1115,7 @@ DELETE /projects/:id/services/slack
Get Slack service settings for a project.
-```
+```plaintext
GET /projects/:id/services/slack
```
@@ -1127,7 +1127,7 @@ Group Chat Software
Set Microsoft Teams service for a project.
-```
+```plaintext
PUT /projects/:id/services/microsoft-teams
```
@@ -1145,7 +1145,7 @@ Parameters:
| `merge_requests_events` | boolean | false | Enable notifications for merge request events |
| `tag_push_events` | boolean | false | Enable notifications for tag push events |
| `note_events` | boolean | false | Enable notifications for note events |
-| `confidental_note_events` | boolean | false | Enable notifications for confidential note events |
+| `confidential_note_events` | boolean | false | Enable notifications for confidential note events |
| `pipeline_events` | boolean | false | Enable notifications for pipeline events |
| `wiki_page_events` | boolean | false | Enable notifications for wiki page events |
@@ -1153,7 +1153,7 @@ Parameters:
Delete Microsoft Teams service for a project.
-```
+```plaintext
DELETE /projects/:id/services/microsoft-teams
```
@@ -1161,7 +1161,7 @@ DELETE /projects/:id/services/microsoft-teams
Get Microsoft Teams service settings for a project.
-```
+```plaintext
GET /projects/:id/services/microsoft-teams
```
@@ -1173,11 +1173,11 @@ Receive event notifications in Mattermost
Set Mattermost service for a project.
-```
+```plaintext
PUT /projects/:id/services/mattermost
```
->**Note:** Specific event parameters (e.g. `push_events` flag and `push_channel`) were [introduced in v10.4][11435]
+>**Note:** Specific event parameters (for example, `push_events` flag and `push_channel`) were [introduced in v10.4][11435]
Parameters:
@@ -1212,7 +1212,7 @@ Parameters:
Delete Mattermost Notifications service for a project.
-```
+```plaintext
DELETE /projects/:id/services/mattermost
```
@@ -1220,7 +1220,7 @@ DELETE /projects/:id/services/mattermost
Get Mattermost notifications service settings for a project.
-```
+```plaintext
GET /projects/:id/services/mattermost
```
@@ -1234,7 +1234,7 @@ Set JetBrains TeamCity CI service for a project.
> The build configuration in Teamcity must use the build format number %build.vcs.number% you will also want to configure monitoring of all branches so merge requests build, that setting is in the vsc root advanced settings.
-```
+```plaintext
PUT /projects/:id/services/teamcity
```
@@ -1252,7 +1252,7 @@ Parameters:
Delete JetBrains TeamCity CI service for a project.
-```
+```plaintext
DELETE /projects/:id/services/teamcity
```
@@ -1260,7 +1260,7 @@ DELETE /projects/:id/services/teamcity
Get JetBrains TeamCity CI service settings for a project.
-```
+```plaintext
GET /projects/:id/services/teamcity
```
@@ -1272,7 +1272,7 @@ A continuous integration and build server
Set Jenkins CI service for a project.
-```
+```plaintext
PUT /projects/:id/services/jenkins
```
@@ -1287,7 +1287,7 @@ Parameters:
Delete Jenkins CI service for a project.
-```
+```plaintext
DELETE /projects/:id/services/jenkins
```
@@ -1295,7 +1295,7 @@ DELETE /projects/:id/services/jenkins
Get Jenkins CI service settings for a project.
-```
+```plaintext
GET /projects/:id/services/jenkins
```
@@ -1307,7 +1307,7 @@ A continuous integration and build server
Set Jenkins CI (Deprecated) service for a project.
-```
+```plaintext
PUT /projects/:id/services/jenkins-deprecated
```
@@ -1321,7 +1321,7 @@ Parameters:
Delete Jenkins CI (Deprecated) service for a project.
-```
+```plaintext
DELETE /projects/:id/services/jenkins-deprecated
```
@@ -1329,7 +1329,7 @@ DELETE /projects/:id/services/jenkins-deprecated
Get Jenkins CI (Deprecated) service settings for a project.
-```
+```plaintext
GET /projects/:id/services/jenkins-deprecated
```
@@ -1343,7 +1343,7 @@ This service is only available when your environment is set to development.
Set MockCI service for a project.
-```
+```plaintext
PUT /projects/:id/services/mock-ci
```
@@ -1357,7 +1357,7 @@ Parameters:
Delete MockCI service for a project.
-```
+```plaintext
DELETE /projects/:id/services/mock-ci
```
@@ -1365,7 +1365,7 @@ DELETE /projects/:id/services/mock-ci
Get MockCI service settings for a project.
-```
+```plaintext
GET /projects/:id/services/mock-ci
```
@@ -1379,7 +1379,7 @@ YouTrack issue tracker
Set YouTrack service for a project.
-```
+```plaintext
PUT /projects/:id/services/youtrack
```
@@ -1396,7 +1396,7 @@ Parameters:
Delete YouTrack service for a project.
-```
+```plaintext
DELETE /projects/:id/services/youtrack
```
@@ -1404,6 +1404,6 @@ DELETE /projects/:id/services/youtrack
Get YouTrack service settings for a project.
-```
+```plaintext
GET /projects/:id/services/youtrack
```
diff --git a/doc/api/settings.md b/doc/api/settings.md
index cae51579afb..2c75c175fdd 100644
--- a/doc/api/settings.md
+++ b/doc/api/settings.md
@@ -10,7 +10,7 @@ administrator in order to perform this action.
List the current [application settings](#list-of-settings-that-can-be-accessed-via-api-calls)
of the GitLab instance.
-```
+```plaintext
GET /application/settings
```
@@ -90,7 +90,7 @@ the `file_template_project_id`, `deletion_adjourned_period`, or the `geo_node_al
Use an API call to modify GitLab instance
[application settings](#list-of-settings-that-can-be-accessed-via-api-calls).
-```
+```plaintext
PUT /application/settings
```
@@ -201,7 +201,7 @@ are listed in the descriptions of the relevant settings.
| `commit_email_hostname` | string | no | Custom hostname (for private commit emails). |
| `container_registry_token_expire_delay` | integer | no | Container Registry token duration in minutes. |
| `default_artifacts_expire_in` | string | no | Set the default expiration time for each job's artifacts. |
-| `default_branch_protection` | integer | no | Determine if developers can push to master. Can take: `0` _(not protected, both developers and maintainers can push new commits, force push, or delete the branch)_, `1` _(partially protected, developers and maintainers can push new commits, but cannot force push or delete the branch)_ or `2` _(fully protected, developers cannot push new commits, but maintainers can; no-one can force push or delete the branch)_ as a parameter. Default is `2`. |
+| `default_branch_protection` | integer | no | Determine if developers can push to master. Can take: `0` _(not protected, both developers and maintainers can push new commits, force push, or delete the branch)_, `1` _(partially protected, developers and maintainers can push new commits, but cannot force push, or delete, the branch)_ or `2` _(fully protected, developers cannot push new commits, but maintainers can; no-one can force push or delete the branch)_ as a parameter. Default is `2`. |
| `default_ci_config_path` | string | no | Default CI configuration path for new projects (`.gitlab-ci.yml` if not set). |
| `default_group_visibility` | string | no | What visibility level new groups receive. Can take `private`, `internal` and `public` as a parameter. Default is `private`. |
| `default_project_creation` | integer | no | Default project creation protection. Can take: `0` _(No one)_, `1` _(Maintainers)_ or `2` _(Developers + Maintainers)_|
@@ -230,7 +230,7 @@ are listed in the descriptions of the relevant settings.
| `elasticsearch_namespace_ids` | array of integers | no | **(PREMIUM)** The namespaces to index via Elasticsearch if `elasticsearch_limit_indexing` is enabled. |
| `elasticsearch_project_ids` | array of integers | no | **(PREMIUM)** The projects to index via Elasticsearch if `elasticsearch_limit_indexing` is enabled. |
| `elasticsearch_search` | boolean | no | **(PREMIUM)** Enable Elasticsearch search |
-| `elasticsearch_url` | string | no | **(PREMIUM)** The url to use for connecting to Elasticsearch. Use a comma-separated list to support cluster (e.g., `http://localhost:9200, http://localhost:9201"`). If your Elasticsearch instance is password protected, pass the `username:password` in the URL (e.g., `http://<username>:<password>@<elastic_host>:9200/`). |
+| `elasticsearch_url` | string | no | **(PREMIUM)** The url to use for connecting to Elasticsearch. Use a comma-separated list to support cluster (for example, `http://localhost:9200, http://localhost:9201"`). If your Elasticsearch instance is password protected, pass the `username:password` in the URL (for example, `http://<username>:<password>@<elastic_host>:9200/`). |
| `email_additional_text` | string | no | **(PREMIUM)** Additional text added to the bottom of every email for legal/auditing/compliance reasons |
| `email_author_in_body` | boolean | no | Some email servers do not support overriding the email sender name. Enable this option to include the name of the author of the issue, merge request or comment in the email body instead. |
| `enabled_git_access_protocol` | string | no | Enabled protocols for Git access. Allowed values are: `ssh`, `http`, and `nil` to allow both protocols. |
@@ -239,7 +239,7 @@ are listed in the descriptions of the relevant settings.
| `external_auth_client_key_pass` | string | no | Passphrase to use for the private key when authenticating with the external service this is encrypted when stored |
| `external_auth_client_key` | string | required by: `external_auth_client_cert` | Private key for the certificate when authentication is required for the external authorization service, this is encrypted when stored |
| `external_authorization_service_default_label` | string | required by: `external_authorization_service_enabled` | The default classification label to use when requesting authorization and no classification label has been specified on the project |
-| `external_authorization_service_enabled` | boolean | no | (**If enabled, requires:** `external_authorization_service_default_label`, `external_authorization_service_timeout` and `external_authorization_service_url` ) Enable using an external authorization service for accessing projects |
+| `external_authorization_service_enabled` | boolean | no | (**If enabled, requires:** `external_authorization_service_default_label`, `external_authorization_service_timeout` and `external_authorization_service_url`) Enable using an external authorization service for accessing projects |
| `external_authorization_service_timeout` | float | required by: `external_authorization_service_enabled` | The timeout after which an authorization request is aborted, in seconds. When a request times out, access is denied to the user. (min: 0.001, max: 10, step: 0.001) |
| `external_authorization_service_url` | string | required by: `external_authorization_service_enabled` | URL to which authorization requests will be directed |
| `file_template_project_id` | integer | no | **(PREMIUM)** The ID of a project to load custom file templates from |
@@ -323,23 +323,23 @@ are listed in the descriptions of the relevant settings.
| `slack_app_id` | string | required by: `slack_app_enabled` | **(PREMIUM)** The app id of the Slack-app. |
| `slack_app_secret` | string | required by: `slack_app_enabled` | **(PREMIUM)** The app secret of the Slack-app. |
| `slack_app_verification_token` | string | required by: `slack_app_enabled` | **(PREMIUM)** The verification token of the Slack-app. |
-| `snowplow_collector_hostname` | string | required by: `snowplow_enabled` | The Snowplow collector hostname. (e.g. `snowplow.trx.gitlab.net`) |
-| `snowplow_cookie_domain` | string | no | The Snowplow cookie domain. (e.g. `.gitlab.com`) |
+| `snowplow_collector_hostname` | string | required by: `snowplow_enabled` | The Snowplow collector hostname. (for example, `snowplow.trx.gitlab.net`) |
+| `snowplow_cookie_domain` | string | no | The Snowplow cookie domain. (for example, `.gitlab.com`) |
| `snowplow_enabled` | boolean | no | Enable snowplow tracking. |
-| `snowplow_app_id` | string | no | The Snowplow site name / application id. (e.g. `gitlab`) |
+| `snowplow_app_id` | string | no | The Snowplow site name / application id. (for example, `gitlab`) |
| `snowplow_iglu_registry_url` | string | no | The Snowplow base Iglu Schema Registry URL to use for custom context and self describing events'|
| `sourcegraph_enabled` | boolean | no | Enables Sourcegraph integration. Default is `false`. **If enabled, requires** `sourcegraph_url`. |
| `sourcegraph_url` | string | required by: `sourcegraph_enabled` | The Sourcegraph instance URL for integration. |
| `sourcegraph_public_only` | boolean | no | Blocks Sourcegraph from being loaded on private and internal projects. Default is `true`. |
| `terminal_max_session_time` | integer | no | Maximum time for web terminal websocket connection (in seconds). Set to `0` for unlimited time. |
| `terms` | text | required by: `enforce_terms` | (**Required by:** `enforce_terms`) Markdown content for the ToS. |
-| `throttle_authenticated_api_enabled` | boolean | no | (**If enabled, requires:** `throttle_authenticated_api_period_in_seconds` and `throttle_authenticated_api_requests_per_period`) Enable authenticated API request rate limit. Helps reduce request volume (e.g. from crawlers or abusive bots). |
+| `throttle_authenticated_api_enabled` | boolean | no | (**If enabled, requires:** `throttle_authenticated_api_period_in_seconds` and `throttle_authenticated_api_requests_per_period`) Enable authenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_authenticated_api_period_in_seconds` | integer | required by: `throttle_authenticated_api_enabled` | Rate limit period in seconds. |
| `throttle_authenticated_api_requests_per_period` | integer | required by: `throttle_authenticated_api_enabled` | Max requests per period per user. |
-| `throttle_authenticated_web_enabled` | boolean | no | (**If enabled, requires:** `throttle_authenticated_web_period_in_seconds` and `throttle_authenticated_web_requests_per_period`) Enable authenticated web request rate limit. Helps reduce request volume (e.g. from crawlers or abusive bots). |
+| `throttle_authenticated_web_enabled` | boolean | no | (**If enabled, requires:** `throttle_authenticated_web_period_in_seconds` and `throttle_authenticated_web_requests_per_period`) Enable authenticated web request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_authenticated_web_period_in_seconds` | integer | required by: `throttle_authenticated_web_enabled` | Rate limit period in seconds. |
| `throttle_authenticated_web_requests_per_period` | integer | required by: `throttle_authenticated_web_enabled` | Max requests per period per user. |
-| `throttle_unauthenticated_enabled` | boolean | no | (**If enabled, requires:** `throttle_unauthenticated_period_in_seconds` and `throttle_unauthenticated_requests_per_period`) Enable unauthenticated request rate limit. Helps reduce request volume (e.g. from crawlers or abusive bots). |
+| `throttle_unauthenticated_enabled` | boolean | no | (**If enabled, requires:** `throttle_unauthenticated_period_in_seconds` and `throttle_unauthenticated_requests_per_period`) Enable unauthenticated request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_unauthenticated_period_in_seconds` | integer | required by: `throttle_unauthenticated_enabled` | Rate limit period in seconds. |
| `throttle_unauthenticated_requests_per_period` | integer | required by: `throttle_unauthenticated_enabled` | Max requests per period per IP. |
| `time_tracking_limit_to_hours` | boolean | no | Limit display of time tracking units to hours. Default is `false`. |
diff --git a/doc/api/sidekiq_metrics.md b/doc/api/sidekiq_metrics.md
index 76aa04077c7..5350feff4e3 100644
--- a/doc/api/sidekiq_metrics.md
+++ b/doc/api/sidekiq_metrics.md
@@ -10,7 +10,7 @@ of Sidekiq, its jobs, queues, and processes.
List information about all the registered queues, their backlog and their
latency.
-```
+```plaintext
GET /sidekiq/queue_metrics
```
@@ -35,7 +35,7 @@ Example response:
List information about all the Sidekiq workers registered to process your queues.
-```
+```plaintext
GET /sidekiq/process_metrics
```
@@ -77,7 +77,7 @@ Example response:
List information about the jobs that Sidekiq has performed.
-```
+```plaintext
GET /sidekiq/job_stats
```
@@ -102,7 +102,7 @@ Example response:
List all the currently available information about Sidekiq.
-```
+```plaintext
GET /sidekiq/compound_metrics
```
diff --git a/doc/api/statistics.md b/doc/api/statistics.md
index c7713ab2dae..883a7640cf8 100644
--- a/doc/api/statistics.md
+++ b/doc/api/statistics.md
@@ -8,7 +8,7 @@ administrator in order to perform this action.
NOTE: **Note:**
These statistics are approximate.
-```
+```plaintext
GET /application/statistics
```
diff --git a/doc/api/suggestions.md b/doc/api/suggestions.md
index f95ab82848a..84bafd3c1ea 100644
--- a/doc/api/suggestions.md
+++ b/doc/api/suggestions.md
@@ -7,7 +7,7 @@ Every API call to suggestions must be authenticated.
Applies a suggested patch in a merge request. Users must be
at least [Developer](../user/permissions.md) to perform such action.
-```
+```plaintext
PUT /suggestions/:id/apply
```
diff --git a/doc/api/system_hooks.md b/doc/api/system_hooks.md
index 1e34adc5320..cd69a6a6b34 100644
--- a/doc/api/system_hooks.md
+++ b/doc/api/system_hooks.md
@@ -11,7 +11,7 @@ Read more about [system hooks](../system_hooks/system_hooks.md).
Get a list of all system hooks.
-```
+```plaintext
GET /hooks
```
@@ -42,7 +42,7 @@ Example response:
Add a new system hook.
-```
+```plaintext
POST /hooks
```
@@ -81,7 +81,7 @@ Example response:
## Test system hook
-```
+```plaintext
GET /hooks/:id
```
@@ -112,7 +112,7 @@ Example response:
Deletes a system hook.
-```
+```plaintext
DELETE /hooks/:id
```
diff --git a/doc/api/tags.md b/doc/api/tags.md
index a796b758328..0a0490e072e 100644
--- a/doc/api/tags.md
+++ b/doc/api/tags.md
@@ -6,7 +6,7 @@ Get a list of repository tags from a project, sorted by name in reverse
alphabetical order. This endpoint can be accessed without authentication if the
repository is publicly accessible.
-```
+```plaintext
GET /projects/:id/repository/tags
```
@@ -57,7 +57,7 @@ Parameters:
Get a specific repository tag determined by its name. This endpoint can be
accessed without authentication if the repository is publicly accessible.
-```
+```plaintext
GET /projects/:id/repository/tags/:tag_name
```
@@ -104,7 +104,7 @@ Example Response:
Creates a new tag in the repository that points to the supplied ref.
-```
+```plaintext
POST /projects/:id/repository/tags
```
@@ -164,7 +164,7 @@ status code `405` with an explaining error message is returned.
Deletes a tag of a repository with given name.
-```
+```plaintext
DELETE /projects/:id/repository/tags/:tag_name
```
@@ -178,7 +178,7 @@ Parameters:
Add release notes to the existing Git tag. If there
already exists a release for the given tag, status code `409` is returned.
-```
+```plaintext
POST /projects/:id/repository/tags/:tag_name/release
```
@@ -210,7 +210,7 @@ Response:
Updates the release notes of a given release.
-```
+```plaintext
PUT /projects/:id/repository/tags/:tag_name/release
```
diff --git a/doc/api/templates/dockerfiles.md b/doc/api/templates/dockerfiles.md
index 4453d3692c7..6e693a405b6 100644
--- a/doc/api/templates/dockerfiles.md
+++ b/doc/api/templates/dockerfiles.md
@@ -12,7 +12,7 @@ information on Dockerfiles, see the
Get all Dockerfile templates.
-```
+```plaintext
GET /templates/dockerfiles
```
@@ -99,7 +99,7 @@ Example response:
Get a single Dockerfile template.
-```
+```plaintext
GET /templates/dockerfiles/:key
```
diff --git a/doc/api/templates/licenses.md b/doc/api/templates/licenses.md
index 0b95e4d8065..f66fb70e108 100644
--- a/doc/api/templates/licenses.md
+++ b/doc/api/templates/licenses.md
@@ -13,7 +13,7 @@ resources available online.
Get all license templates.
-```
+```plaintext
GET /templates/licenses
```
@@ -110,7 +110,7 @@ Example response:
Get a single license template. You can pass parameters to replace the license
placeholder.
-```
+```plaintext
GET /templates/licenses/:key
```
diff --git a/doc/api/todos.md b/doc/api/todos.md
index a83b045f9a4..058009b0e3b 100644
--- a/doc/api/todos.md
+++ b/doc/api/todos.md
@@ -7,7 +7,7 @@
Returns a list of todos. When no filter is applied, it returns all pending todos
for the current user. Different filters allow the user to precise the request.
-```
+```plaintext
GET /todos
```
@@ -184,7 +184,7 @@ Example Response:
Marks a single pending todo given by its ID for the current user as done. The
todo marked as done is returned in the response.
-```
+```plaintext
POST /todos/:id/mark_as_done
```
@@ -280,7 +280,7 @@ Example Response:
Marks all pending todos for the current user as done. It returns the HTTP status code `204` with an empty response.
-```
+```plaintext
POST /todos/mark_as_done
```
diff --git a/doc/api/users.md b/doc/api/users.md
index 701929520f4..239afa38548 100644
--- a/doc/api/users.md
+++ b/doc/api/users.md
@@ -10,7 +10,7 @@ This function takes pagination parameters `page` and `per_page` to restrict the
### For normal users
-```
+```plaintext
GET /users
```
@@ -39,13 +39,13 @@ You can also search for users by name or primary email using `?search=`. For exa
In addition, you can lookup users by username:
-```
+```plaintext
GET /users?username=:username
```
For example:
-```
+```plaintext
GET /users?username=jack_smith
```
@@ -53,11 +53,11 @@ In addition, you can filter users based on states eg. `blocked`, `active`
This works only to filter users who are `blocked` or `active`.
It does not support `active=false` or `blocked=false`.
-```
+```plaintext
GET /users?active=true
```
-```
+```plaintext
GET /users?blocked=true
```
@@ -66,7 +66,7 @@ Username search is case insensitive.
### For admins
-```
+```plaintext
GET /users
```
@@ -95,6 +95,7 @@ GET /users
"twitter": "",
"website_url": "",
"organization": "",
+ "job_title": "",
"last_sign_in_at": "2012-06-01T11:41:01Z",
"confirmed_at": "2012-05-23T09:05:22Z",
"theme_id": 1,
@@ -132,6 +133,7 @@ GET /users
"twitter": "",
"website_url": "",
"organization": "",
+ "job_title": "",
"last_sign_in_at": null,
"confirmed_at": "2012-05-30T16:53:06.148Z",
"theme_id": 1,
@@ -160,7 +162,7 @@ Users on GitLab [Starter, Bronze, or higher](https://about.gitlab.com/pricing/)
...
"shared_runners_minutes_limit": 133,
"extra_shared_runners_minutes_limit": 133,
- "note": "DMCA Request: 2018-11-05 | DMCA Violation | Abuse | https://gitlab.zendesk.com/agent/tickets/123"
+ "note": "DMCA Request: 2018-11-05 | DMCA Violation | Abuse | https://gitlab.zendesk.com/agent/tickets/123",
...
}
]
@@ -187,13 +189,13 @@ the `group_saml` provider option:
You can lookup users by external UID and provider:
-```
+```plaintext
GET /users?extern_uid=:extern_uid&provider=:provider
```
For example:
-```
+```plaintext
GET /users?extern_uid=1234567&provider=github
```
@@ -201,19 +203,19 @@ You can search for users who are external with: `/users?external=true`
You can search users by creation date time range with:
-```
+```plaintext
GET /users?created_before=2001-01-02T00:00:00.060Z&created_after=1999-01-02T00:00:00.060
```
You can filter by [custom attributes](custom_attributes.md) with:
-```
+```plaintext
GET /users?custom_attributes[key]=value&custom_attributes[other_key]=other_value
```
You can include the users' [custom attributes](custom_attributes.md) in the response with:
-```
+```plaintext
GET /users?with_custom_attributes=true
```
@@ -223,7 +225,7 @@ Get a single user.
### For user
-```
+```plaintext
GET /users/:id
```
@@ -247,13 +249,14 @@ Parameters:
"linkedin": "",
"twitter": "",
"website_url": "",
- "organization": ""
+ "organization": "",
+ "job_title": "Operations Specialist"
}
```
### For admin
-```
+```plaintext
GET /users/:id
```
@@ -282,6 +285,7 @@ Example Responses:
"twitter": "",
"website_url": "",
"organization": "",
+ "job_title": "Operations Specialist",
"last_sign_in_at": "2012-06-01T11:41:01Z",
"confirmed_at": "2012-05-23T09:05:22Z",
"theme_id": 1,
@@ -300,10 +304,14 @@ Example Responses:
"external": false,
"private_profile": false,
"current_sign_in_ip": "196.165.1.102",
- "last_sign_in_ip": "172.127.2.22"
+ "last_sign_in_ip": "172.127.2.22",
+ "plan": "gold",
+ "trial": true
}
```
+NOTE: **Note:** The `plan` and `trial` parameters are only available on GitLab Enterprise Edition.
+
Users on GitLab [Starter, Bronze, or higher](https://about.gitlab.com/pricing/) will also see
the `shared_runners_minutes_limit`, `extra_shared_runners_minutes_limit`, and `note` parameters.
@@ -313,7 +321,7 @@ the `shared_runners_minutes_limit`, `extra_shared_runners_minutes_limit`, and `n
"username": "john_smith",
"shared_runners_minutes_limit": 133,
"extra_shared_runners_minutes_limit": 133,
- "note": "DMCA Request: 2018-11-05 | DMCA Violation | Abuse | https://gitlab.zendesk.com/agent/tickets/123"
+ "note": "DMCA Request: 2018-11-05 | DMCA Violation | Abuse | https://gitlab.zendesk.com/agent/tickets/123",
...
}
```
@@ -327,7 +335,7 @@ see the `group_saml` option:
"username": "john_smith",
"shared_runners_minutes_limit": 133,
"extra_shared_runners_minutes_limit": 133,
- "note": "DMCA Request: 2018-11-05 | DMCA Violation | Abuse | https://gitlab.zendesk.com/agent/tickets/123"
+ "note": "DMCA Request: 2018-11-05 | DMCA Violation | Abuse | https://gitlab.zendesk.com/agent/tickets/123",
"identities": [
{"provider": "github", "extern_uid": "2435223452345"},
{"provider": "bitbucket", "extern_uid": "john.smith"},
@@ -340,7 +348,7 @@ see the `group_saml` option:
You can include the user's [custom attributes](custom_attributes.md) in the response with:
-```
+```plaintext
GET /users/:id?with_custom_attributes=true
```
@@ -358,90 +366,94 @@ over `password`. In addition, `reset_password` and
NOTE: **Note:**
From [GitLab 12.1](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/29888/), `private_profile` will default to `false`.
-```
+```plaintext
POST /users
```
Parameters:
-- `email` (required) - Email
-- `password` (optional) - Password
-- `reset_password` (optional) - Send user password reset link - true or false(default)
-- `username` (required) - Username
-- `name` (required) - Name
-- `skype` (optional) - Skype ID
-- `linkedin` (optional) - LinkedIn
-- `twitter` (optional) - Twitter account
-- `website_url` (optional) - Website URL
-- `organization` (optional) - Organization name
-- `projects_limit` (optional) - Number of projects user can create
-- `extern_uid` (optional) - External UID
-- `provider` (optional) - External provider name
-- `bio` (optional) - User's biography
-- `location` (optional) - User's location
-- `public_email` (optional) - The public email of the user
-- `admin` (optional) - User is admin - true or false (default)
-- `can_create_group` (optional) - User can create groups - true or false
-- `skip_confirmation` (optional) - Skip confirmation - true or false (default)
-- `external` (optional) - Flags the user as external - true or false (default)
-- `avatar` (optional) - Image file for user's avatar
-- `theme_id` (optional) - The GitLab theme for the user (see [the user preference docs](../user/profile/preferences.md#navigation-theme) for more information)
-- `color_scheme_id` (optional) - User's color scheme for the file viewer (see [the user preference docs](../user/profile/preferences.md#syntax-highlighting-theme) for more information)
-- `private_profile` (optional) - User's profile is private - true, false (default), or null (will be converted to false)
-- `shared_runners_minutes_limit` (optional) - Pipeline minutes quota for this user **(STARTER)**
-- `extra_shared_runners_minutes_limit` (optional) - Extra pipeline minutes quota for this user **(STARTER)**
+| Attribute | Required | Description |
+|:-------------------------------------|:---------|:--------------------------------------------------------------------------------------------------------------------------------------------------------|
+| `admin` | No | User is admin - true or false (default) |
+| `avatar` | No | Image file for user's avatar |
+| `bio` | No | User's biography |
+| `can_create_group` | No | User can create groups - true or false |
+| `color_scheme_id` | No | User's color scheme for the file viewer (see [the user preference docs](../user/profile/preferences.md#syntax-highlighting-theme) for more information) |
+| `email` | Yes | Email |
+| `extern_uid` | No | External UID |
+| `external` | No | Flags the user as external - true or false (default) |
+| `extra_shared_runners_minutes_limit` | No | Extra pipeline minutes quota for this user **(STARTER)** |
+| `force_random_password` | No | Set user password to a random value - true or false (default) |
+| `group_id_for_saml` | No | ID of group where SAML has been configured |
+| `linkedin` | No | LinkedIn |
+| `location` | No | User's location |
+| `name` | No | Name |
+| `organization` | No | Organization name |
+| `password` | No | Password |
+| `private_profile` | No | User's profile is private - true, false (default), or null (will be converted to false) |
+| `projects_limit` | No | Number of projects user can create |
+| `provider` | No | External provider name |
+| `public_email` | No | The public email of the user |
+| `reset_password` | No | Send user password reset link - true or false(default) |
+| `shared_runners_minutes_limit` | No | Pipeline minutes quota for this user **(STARTER)** |
+| `skip_confirmation` | No | Skip confirmation - true or false (default) |
+| `skype` | No | Skype ID |
+| `theme_id` | No | The GitLab theme for the user (see [the user preference docs](../user/profile/preferences.md#navigation-theme) for more information) |
+| `twitter` | No | Twitter account |
+| `username` | Yes | Username |
+| `website_url` | No | Website URL |
## User modification
Modifies an existing user. Only administrators can change attributes of a user.
-```
+```plaintext
PUT /users/:id
```
Parameters:
-- `id` (required) - The ID of the user
-- `email` (optional) - Email
-- `username` (optional) - Username
-- `name` (optional) - Name
-- `password` (optional) - Password
-- `skype` (optional) - Skype ID
-- `linkedin` (optional) - LinkedIn
-- `twitter` (optional) - Twitter account
-- `website_url` (optional) - Website URL
-- `organization` (optional) - Organization name
-- `projects_limit` (optional) - Limit projects each user can create
-- `extern_uid` (optional) - External UID
-- `provider` (optional) - External provider name
-- `group_id_for_saml` (optional) - ID of group where SAML has been configured
-- `bio` (optional) - User's biography
-- `location` (optional) - User's location
-- `public_email` (optional) - The public email of the user
-- `admin` (optional) - User is admin - true or false (default)
-- `can_create_group` (optional) - User can create groups - true or false
-- `skip_reconfirmation` (optional) - Skip reconfirmation - true or false (default)
-- `external` (optional) - Flags the user as external - true or false (default)
-- `shared_runners_minutes_limit` (optional) - Pipeline minutes quota for this user
-- `extra_shared_runners_minutes_limit` (optional) - Extra pipeline minutes quota for this user
-- `avatar` (optional) - Image file for user's avatar
-- `theme_id` (optional) - The GitLab theme for the user (see [the user preference docs](../user/profile/preferences.md#navigation-theme) for more information)
-- `color_scheme_id` (optional) - User's color scheme for the file viewer (see [the user preference docs](../user/profile/preferences.md#syntax-highlighting-theme) for more information)
-- `private_profile` (optional) - User's profile is private - true, false (default), or null (will be converted to false)
-- `shared_runners_minutes_limit` (optional) - Pipeline minutes quota for this user **(STARTER)**
-- `extra_shared_runners_minutes_limit` (optional) - Extra pipeline minutes quota for this user **(STARTER)**
-- `note` (optional) - Admin notes for this user **(STARTER)**
+| Attribute | Required | Description |
+|:-------------------------------------|:---------|:--------------------------------------------------------------------------------------------------------------------------------------------------------|
+| `admin` | No | User is admin - true or false (default) |
+| `avatar` | No | Image file for user's avatar |
+| `bio` | No | User's biography |
+| `can_create_group` | No | User can create groups - true or false |
+| `color_scheme_id` | No | User's color scheme for the file viewer (see [the user preference docs](../user/profile/preferences.md#syntax-highlighting-theme) for more information) |
+| `email` | No | Email |
+| `extern_uid` | No | External UID |
+| `external` | No | Flags the user as external - true or false (default) |
+| `extra_shared_runners_minutes_limit` | No | Extra pipeline minutes quota for this user **(STARTER)** |
+| `group_id_for_saml` | No | ID of group where SAML has been configured |
+| `id` | Yes | The ID of the user |
+| `linkedin` | No | LinkedIn |
+| `location` | No | User's location |
+| `name` | No | Name |
+| `note` | No | Admin notes for this user **(STARTER)** |
+| `organization` | No | Organization name |
+| `password` | No | Password |
+| `private_profile` | No | User's profile is private - true, false (default), or null (will be converted to false) |
+| `projects_limit` | No | Limit projects each user can create |
+| `provider` | No | External provider name |
+| `public_email` | No | The public email of the user |
+| `shared_runners_minutes_limit` | No | Pipeline minutes quota for this user **(STARTER)** |
+| `skip_reconfirmation` | No | Skip reconfirmation - true or false (default) |
+| `skype` | No | Skype ID |
+| `theme_id` | No | The GitLab theme for the user (see [the user preference docs](../user/profile/preferences.md#navigation-theme) for more information) |
+| `twitter` | No | Twitter account |
+| `username` | No | Username |
+| `website_url` | No | Website URL |
On password update, user will be forced to change it upon next login.
Note, at the moment this method does only return a `404` error,
-even in cases where a `409` (Conflict) would be more appropriate,
-e.g. when renaming the email address to some existing one.
+even in cases where a `409` (Conflict) would be more appropriate.
+For example, when renaming the email address to some existing one.
## Delete authentication identity from user
Deletes a user's authentication identity using the provider name associated with that identity. Available only for administrators.
-```
+```plaintext
DELETE /users/:id/identities/:provider
```
@@ -455,7 +467,7 @@ Parameters:
Deletes a user. Available only for administrators.
This returns a `204 No Content` status code if the operation was successfully, `404` if the resource was not found or `409` if the user cannot be soft deleted.
-```
+```plaintext
DELETE /users/:id
```
@@ -470,7 +482,7 @@ Parameters:
Gets currently authenticated user.
-```
+```plaintext
GET /user
```
@@ -518,7 +530,7 @@ Parameters:
- `sudo` (optional) - the ID of a user to make the call in their place
-```
+```plaintext
GET /user
```
@@ -541,6 +553,7 @@ GET /user
"twitter": "",
"website_url": "",
"organization": "",
+ "job_title": "",
"last_sign_in_at": "2012-06-01T11:41:01Z",
"confirmed_at": "2012-05-23T09:05:22Z",
"theme_id": 1,
@@ -567,7 +580,7 @@ GET /user
Get the status of the currently signed in user.
-```
+```plaintext
GET /user/status
```
@@ -589,7 +602,7 @@ Example response:
Get the status of a user.
-```
+```plaintext
GET /users/:id_or_username/status
```
@@ -615,7 +628,7 @@ Example response:
Set the status of the current user.
-```
+```plaintext
PUT /user/status
```
@@ -648,7 +661,7 @@ Get the counts (same as in top right menu) of the currently signed in user.
| --------- | ---- | ----------- |
| `merge_requests` | number | Merge requests that are active and assigned to current user. |
-```
+```plaintext
GET /user_counts
```
@@ -672,7 +685,7 @@ Please refer to the [List of user projects](projects.md#list-user-projects).
Get a list of currently authenticated user's SSH keys.
-```
+```plaintext
GET /user/keys
```
@@ -701,7 +714,7 @@ Parameters:
Get a list of a specified user's SSH keys.
-```
+```plaintext
GET /users/:id_or_username/keys
```
@@ -713,7 +726,7 @@ GET /users/:id_or_username/keys
Get a single key.
-```
+```plaintext
GET /user/keys/:key_id
```
@@ -734,14 +747,14 @@ Parameters:
Creates a new key owned by the currently authenticated user.
-```
+```plaintext
POST /user/keys
```
Parameters:
- `title` (required) - new SSH Key's title
-- `key` (required) - new SSH key
+- `key` (required) - new SSH key
```json
{
@@ -772,22 +785,22 @@ error occurs a `400 Bad Request` is returned with a message explaining the error
Create new key owned by specified user. Available only for admin
-```
+```plaintext
POST /users/:id/keys
```
Parameters:
-- `id` (required) - id of specified user
+- `id` (required) - id of specified user
- `title` (required) - new SSH Key's title
-- `key` (required) - new SSH key
+- `key` (required) - new SSH key
## Delete SSH key for current user
Deletes key owned by currently authenticated user.
This returns a `204 No Content` status code if the operation was successfully or `404` if the resource was not found.
-```
+```plaintext
DELETE /user/keys/:key_id
```
@@ -799,20 +812,20 @@ Parameters:
Deletes key owned by a specified user. Available only for admin.
-```
+```plaintext
DELETE /users/:id/keys/:key_id
```
Parameters:
- `id` (required) - id of specified user
-- `key_id` (required) - SSH key ID
+- `key_id` (required) - SSH key ID
## List all GPG keys
Get a list of currently authenticated user's GPG keys.
-```
+```plaintext
GET /user/gpg_keys
```
@@ -836,7 +849,7 @@ Example response:
Get a specific GPG key of currently authenticated user.
-```
+```plaintext
GET /user/gpg_keys/:key_id
```
@@ -864,7 +877,7 @@ Example response:
Creates a new GPG key owned by the currently authenticated user.
-```
+```plaintext
POST /user/gpg_keys
```
@@ -894,7 +907,7 @@ Example response:
Delete a GPG key owned by currently authenticated user.
-```
+```plaintext
DELETE /user/gpg_keys/:key_id
```
@@ -914,7 +927,7 @@ Returns `204 No Content` on success, or `404 Not found` if the key cannot be fou
Get a list of a specified user's GPG keys. Available only for admins.
-```
+```plaintext
GET /users/:id/gpg_keys
```
@@ -944,7 +957,7 @@ Example response:
Get a specific GPG key for a given user. Available only for admins.
-```
+```plaintext
GET /users/:id/gpg_keys/:key_id
```
@@ -973,7 +986,7 @@ Example response:
Create new GPG key owned by the specified user. Available only for admins.
-```
+```plaintext
POST /users/:id/gpg_keys
```
@@ -1004,7 +1017,7 @@ Example response:
Delete a GPG key owned by a specified user. Available only for admins.
-```
+```plaintext
DELETE /users/:id/gpg_keys/:key_id
```
@@ -1023,7 +1036,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitl
Get a list of currently authenticated user's emails.
-```
+```plaintext
GET /user/emails
```
@@ -1048,7 +1061,7 @@ Parameters:
Get a list of a specified user's emails. Available only for admin
-```
+```plaintext
GET /users/:id/emails
```
@@ -1060,7 +1073,7 @@ Parameters:
Get a single email.
-```
+```plaintext
GET /user/emails/:email_id
```
@@ -1079,7 +1092,7 @@ Parameters:
Creates a new email owned by the currently authenticated user.
-```
+```plaintext
POST /user/emails
```
@@ -1111,13 +1124,13 @@ error occurs a `400 Bad Request` is returned with a message explaining the error
Create new email owned by specified user. Available only for admin
-```
+```plaintext
POST /users/:id/emails
```
Parameters:
-- `id` (required) - id of specified user
+- `id` (required) - id of specified user
- `email` (required) - email address
- `skip_confirmation` (optional) - Skip confirmation and assume e-mail is verified - true or false (default)
@@ -1126,7 +1139,7 @@ Parameters:
Deletes email owned by currently authenticated user.
This returns a `204 No Content` status code if the operation was successfully or `404` if the resource was not found.
-```
+```plaintext
DELETE /user/emails/:email_id
```
@@ -1138,20 +1151,20 @@ Parameters:
Deletes email owned by a specified user. Available only for admin.
-```
+```plaintext
DELETE /users/:id/emails/:email_id
```
Parameters:
- `id` (required) - id of specified user
-- `email_id` (required) - email ID
+- `email_id` (required) - email ID
## Block user
Blocks the specified user. Available only for admin.
-```
+```plaintext
POST /users/:id/block
```
@@ -1159,14 +1172,17 @@ Parameters:
- `id` (required) - id of specified user
-Will return `201 OK` on success, `404 User Not Found` is user cannot be found or
-`403 Forbidden` when trying to block an already blocked user by LDAP synchronization.
+Returns:
+
+- `201 OK` on success.
+- `404 User Not Found` if user cannot be found.
+- `403 Forbidden` when trying to block an already blocked user by LDAP synchronization.
## Unblock user
Unblocks the specified user. Available only for admin.
-```
+```plaintext
POST /users/:id/unblock
```
@@ -1183,7 +1199,7 @@ Will return `201 OK` on success, `404 User Not Found` is user cannot be found or
Deactivates the specified user. Available only for admin.
-```
+```plaintext
POST /users/:id/deactivate
```
@@ -1205,7 +1221,7 @@ Returns:
Activates the specified user. Available only for admin.
-```
+```plaintext
POST /users/:id/activate
```
@@ -1230,7 +1246,7 @@ Please refer to the [Events API documentation](events.md#get-user-contribution-e
It retrieves every impersonation token of the user. Use the pagination
parameters `page` and `per_page` to restrict the list of impersonation tokens.
-```
+```plaintext
GET /users/:user_id/impersonation_tokens
```
@@ -1241,7 +1257,7 @@ Parameters:
| `user_id` | integer | yes | The ID of the user |
| `state` | string | no | filter tokens based on state (`all`, `active`, `inactive`) |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/users/42/impersonation_tokens
```
@@ -1282,7 +1298,7 @@ Example response:
It shows a user's impersonation token.
-```
+```plaintext
GET /users/:user_id/impersonation_tokens/:impersonation_token_id
```
@@ -1293,7 +1309,7 @@ Parameters:
| `user_id` | integer | yes | The ID of the user |
| `impersonation_token_id` | integer | yes | The ID of the impersonation token |
-```
+```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/users/42/impersonation_tokens/2
```
@@ -1324,7 +1340,7 @@ You are only able to create impersonation tokens to impersonate the user and per
both API calls and Git reads and writes. The user will not see these tokens in their profile
settings page.
-```
+```plaintext
POST /users/:user_id/impersonation_tokens
```
@@ -1335,7 +1351,7 @@ POST /users/:user_id/impersonation_tokens
| `expires_at` | date | no | The expiration date of the impersonation token in ISO format (`YYYY-MM-DD`)|
| `scopes` | array | yes | The array of scopes of the impersonation token (`api`, `read_user`) |
-```
+```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" --data "name=mytoken" --data "expires_at=2017-04-04" --data "scopes[]=api" https://gitlab.example.com/api/v4/users/42/impersonation_tokens
```
@@ -1363,11 +1379,11 @@ Example response:
It revokes an impersonation token.
-```
+```plaintext
DELETE /users/:user_id/impersonation_tokens/:impersonation_token_id
```
-```
+```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/users/42/impersonation_tokens/1
```
@@ -1388,13 +1404,13 @@ The activities that update the timestamp are:
- Git HTTP/SSH activities (such as clone, push)
- User logging in into GitLab
-- User visiting pages related to Dashboards, Projects, Issues and Merge Requests ([introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/54947) in GitLab 11.8)
+- User visiting pages related to Dashboards, Projects, Issues, and Merge Requests ([introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/54947) in GitLab 11.8)
- User using the API
By default, it shows the activity for all users in the last 6 months, but this can be
amended by using the `from` parameter.
-```
+```plaintext
GET /user/activities
```
@@ -1402,7 +1418,7 @@ Parameters:
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
-| `from` | string | no | Date string in the format YEAR-MONTH-DAY, e.g. `2016-03-11`. Defaults to 6 months ago. |
+| `from` | string | no | Date string in the format YEAR-MONTH-DAY. For example, `2016-03-11`. Defaults to 6 months ago. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/user/activities
@@ -1434,14 +1450,14 @@ Please note that `last_activity_at` is deprecated, please use `last_activity_on`
## User memberships (admin only)
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22518) in GitLab 12.8.
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/20532) in GitLab 12.8.
Lists all projects and groups a user is a member of. This endpoint is available for admins only.
It returns the `source_id`, `source_name`, `source_type` and `access_level` of a membership.
-Source can be of type `Namespace` (representing a group) or `Project`. The response represents only direct memberships. Inherited memberships, for example in subgroups, will not be included.
-Access levels will be represented by an integer value. Read more about the meaning of access level values [here](access_requests.md#valid-access-levels).
+Source can be of type `Namespace` (representing a group) or `Project`. The response represents only direct memberships. Inherited memberships, for example in subgroups, are not included.
+Access levels are represented by an integer value. For more details, read about the meaning of [access level values](access_requests.md#valid-access-levels).
-```
+```plaintext
GET /users/:id/memberships
```
@@ -1455,7 +1471,7 @@ Parameters:
Returns:
- `200 OK` on success.
-- `404 User Not Found` if user cannot be found.
+- `404 User Not Found` if user can't be found.
- `403 Forbidden` when not requested by an admin.
- `400 Bad Request` when requested type is not supported.
diff --git a/doc/api/version.md b/doc/api/version.md
index a89b8878298..6c9ff6ac9e1 100644
--- a/doc/api/version.md
+++ b/doc/api/version.md
@@ -5,7 +5,7 @@
Retrieve version information for this GitLab instance. Responds `200 OK` for
authenticated users.
-```
+```plaintext
GET /version
```
diff --git a/doc/api/visual_review_discussions.md b/doc/api/visual_review_discussions.md
index 3d1c5e5c4c8..161f84f4618 100644
--- a/doc/api/visual_review_discussions.md
+++ b/doc/api/visual_review_discussions.md
@@ -10,7 +10,7 @@ feedback from [Visual Reviews](../ci/review_apps/index.md#visual-reviews-starter
Creates a new thread to a single project merge request. This is similar to creating
a note but other comments (replies) can be added to it later.
-```
+```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/visual_review_discussions
```
diff --git a/doc/api/vulnerability_findings.md b/doc/api/vulnerability_findings.md
index 833a46ccce5..f5e607d6829 100644
--- a/doc/api/vulnerability_findings.md
+++ b/doc/api/vulnerability_findings.md
@@ -6,7 +6,7 @@ NOTE: **Note:**
This API resource is renamed from Vulnerabilities to Vulnerability Findings because the Vulnerabilities are reserved
for serving the upcoming [Standalone Vulnerability objects](https://gitlab.com/gitlab-org/gitlab/issues/13561).
To fix any broken integrations with the former Vulnerabilities API, change the `vulnerabilities` URL part to be
-`vulnerability_findings`.
+`vulnerability_findings`.
Every API call to vulnerability findings must be [authenticated](README.md#authentication).
@@ -34,7 +34,7 @@ Read more on [pagination](README.md#pagination).
List all of a project's vulnerability findings.
-```
+```plaintext
GET /projects/:id/vulnerability_findings
GET /projects/:id/vulnerability_findings?report_type=sast
GET /projects/:id/vulnerability_findings?report_type=container_scanning
@@ -46,6 +46,9 @@ GET /projects/:id/vulnerability_findings?confidence=unknown,experimental
GET /projects/:id/vulnerability_findings?pipeline_id=42
```
+CAUTION: **Deprecation:**
+Beginning with GitLab 12.9, the `undefined` severity level is deprecated and the `undefined` confidence level isn't reported for new vulnerabilities.
+
| Attribute | Type | Required | Description |
| ------------- | -------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) which the authenticated user is a member of. |
diff --git a/doc/api/wikis.md b/doc/api/wikis.md
index 6cde2ebb7a7..cdaf95fc291 100644
--- a/doc/api/wikis.md
+++ b/doc/api/wikis.md
@@ -8,7 +8,7 @@ Available only in APIv4.
Get all wiki pages for a given project.
-```
+```plaintext
GET /projects/:id/wikis
```
@@ -49,7 +49,7 @@ Example response:
Get a wiki page for a given project.
-```
+```plaintext
GET /projects/:id/wikis/:slug
```
@@ -77,7 +77,7 @@ Example response:
Creates a new wiki page for the given repository with the given title, slug, and content.
-```
+```plaintext
POST /projects/:id/wikis
```
@@ -107,7 +107,7 @@ Example response:
Updates an existing wiki page. At least one parameter is required to update the wiki page.
-```
+```plaintext
PUT /projects/:id/wikis/:slug
```
@@ -138,7 +138,7 @@ Example response:
Deletes a wiki page with a given slug.
-```
+```plaintext
DELETE /projects/:id/wikis/:slug
```
@@ -160,7 +160,7 @@ On success the HTTP status code is `204` and no JSON response is expected.
Uploads a file to the attachment folder inside the wiki's repository. The
attachment folder is the `uploads` folder.
-```
+```plaintext
POST /projects/:id/wikis/attachments
```
diff --git a/doc/ci/README.md b/doc/ci/README.md
index f25a0ade42a..fe88b414702 100644
--- a/doc/ci/README.md
+++ b/doc/ci/README.md
@@ -50,6 +50,7 @@ To get started with GitLab CI/CD, we recommend you read through
the following documents:
- [How GitLab CI/CD works](introduction/index.md#how-gitlab-cicd-works).
+- [Fundamental pipeline architectures](pipelines/pipeline_architectures.md).
- [GitLab CI/CD basic workflow](introduction/index.md#basic-cicd-workflow).
- [Step-by-step guide for writing `.gitlab-ci.yml` for the first time](../user/project/pages/getting_started_part_four.md).
@@ -79,13 +80,13 @@ GitLab CI/CD supports numerous configuration options:
| Configuration | Description |
|:--------------|:-------------|
-| [Pipelines](pipelines.md) | Structure your CI/CD process through pipelines. |
+| [Pipelines](pipelines/index.md) | Structure your CI/CD process through pipelines. |
| [Environment variables](variables/README.md) | Reuse values based on a variable/value key pair. |
| [Environments](environments.md) | Deploy your application to different environments (e.g., staging, production). |
-| [Job artifacts](../user/project/pipelines/job_artifacts.md) | Output, use, and reuse job artifacts. |
+| [Job artifacts](pipelines/job_artifacts.md) | Output, use, and reuse job artifacts. |
| [Cache dependencies](caching/index.md) | Cache your dependencies for a faster execution. |
-| [Schedule pipelines](../user/project/pipelines/schedules.md) | Schedule pipelines to run as often as you need. |
-| [Custom path for `.gitlab-ci.yml`](../user/project/pipelines/settings.md#custom-ci-configuration-path) | Define a custom path for the CI/CD configuration file. |
+| [Schedule pipelines](pipelines/schedules.md) | Schedule pipelines to run as often as you need. |
+| [Custom path for `.gitlab-ci.yml`](pipelines/settings.md#custom-ci-configuration-path) | Define a custom path for the CI/CD configuration file. |
| [Git submodules for CI/CD](git_submodules.md) | Configure jobs for using Git submodules.|
| [SSH keys for CI/CD](ssh_keys/README.md) | Using SSH keys in your CI pipelines. |
| [Pipelines triggers](triggers/README.md) | Trigger pipelines through the API. |
@@ -132,7 +133,7 @@ Its feature set is listed on the table below according to DevOps stages.
| **Secure** ||
| [Container Scanning](../user/application_security/container_scanning/index.md) **(ULTIMATE)** | Check your Docker containers for known vulnerabilities.|
| [Dependency Scanning](../user/application_security/dependency_scanning/index.md) **(ULTIMATE)** | Analyze your dependencies for known vulnerabilities. |
-| [License Compliance](../user/application_security/license_compliance/index.md) **(ULTIMATE)** | Search your project dependencies for their licenses. |
+| [License Compliance](../user/compliance/license_compliance/index.md) **(ULTIMATE)** | Search your project dependencies for their licenses. |
| [Security Test reports](../user/application_security/index.md) **(ULTIMATE)** | Check for app vulnerabilities. |
## Examples
@@ -148,7 +149,7 @@ As a GitLab administrator, you can change the default behavior
of GitLab CI/CD for:
- An [entire GitLab instance](../user/admin_area/settings/continuous_integration.md).
-- Specific projects, using [pipelines settings](../user/project/pipelines/settings.md).
+- Specific projects, using [pipelines settings](pipelines/settings.md).
See also:
diff --git a/doc/ci/caching/index.md b/doc/ci/caching/index.md
index 0109d87921b..a60310076a8 100644
--- a/doc/ci/caching/index.md
+++ b/doc/ci/caching/index.md
@@ -206,10 +206,11 @@ templates](https://gitlab.com/gitlab-org/gitlab-foss/tree/master/lib/gitlab/ci/t
### Caching Node.js dependencies
-Assuming your project is using [npm](https://www.npmjs.com/) or
-[Yarn](https://classic.yarnpkg.com/en/) to install the Node.js dependencies, the
-following example defines `cache` globally so that all jobs inherit it.
-Node.js modules are installed in `node_modules/` and are cached per-branch:
+Assuming your project is using [npm](https://www.npmjs.com/) to install the Node.js
+dependencies, the following example defines `cache` globally so that all jobs inherit it.
+By default, npm stores cache data in the home folder `~/.npm` but since
+[you can't cache things outside of the project directory](../yaml/README.md#cachepaths),
+we tell npm to use `./.npm` instead, and it is cached per-branch:
```yaml
#
@@ -221,10 +222,10 @@ image: node:latest
cache:
key: ${CI_COMMIT_REF_SLUG}
paths:
- - node_modules/
+ - .npm/
before_script:
- - npm install
+ - npm ci --cache .npm --prefer-offline
test_async:
script:
diff --git a/doc/ci/cloud_deployment/index.md b/doc/ci/cloud_deployment/index.md
index ccb6b60ecc9..ccff302750c 100644
--- a/doc/ci/cloud_deployment/index.md
+++ b/doc/ci/cloud_deployment/index.md
@@ -61,3 +61,61 @@ To do so, please make sure to [push your image into your ECR
repository](https://docs.aws.amazon.com/AmazonECR/latest/userguide/docker-push-ecr-image.html)
before referencing it in your `.gitlab-ci.yml` file and replace the `image`
path to point to your ECR.
+
+### Deploy your application to AWS Elastic Container Service (ECS)
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/207962) in GitLab 12.9.
+
+GitLab provides a series of [CI templates that you can include in your project](../yaml/README.md#include).
+To automate deployments of your application to your [Amazon Elastic Container Service](https://aws.amazon.com/ecs/) (AWS ECS)
+cluster, you can `include` the `Deploy-ECS.gitlab-ci.yml` template in your `.gitlab-ci.yml` file.
+
+Before getting started with this process, you need a cluster on AWS ECS, as well as related
+components, like an ECS service, ECS task definition, a database on AWS RDS, etc.
+[Read more about AWS ECS](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/Welcome.html).
+
+After you're all set up on AWS ECS, follow these steps:
+
+1. Make sure your AWS credentials are set up as environment variables for your
+ project. You can follow [the steps above](#aws) to complete this setup.
+1. Add these variables to your project's `.gitlab-ci.yml` file:
+
+ ```yaml
+ variables:
+ CI_AWS_ECS_CLUSTER: my-cluster
+ CI_AWS_ECS_SERVICE: my-service
+ CI_AWS_ECS_TASK_DEFINITION: my-task-definition
+ ```
+
+ Three variables are defined in this snippet:
+
+ - `CI_AWS_ECS_CLUSTER`: The name of your AWS ECS cluster that you're
+ targeting for your deployments.
+ - `CI_AWS_ECS_SERVICE`: The name of the targeted service tied to
+ your AWS ECS cluster.
+ - `CI_AWS_ECS_TASK_DEFINITION`: The name of the task definition tied
+ to the service mentioned above.
+
+ You can find these names after selecting the targeted cluster on your [AWS ECS dashboard](https://console.aws.amazon.com/ecs/home):
+
+ ![AWS ECS dashboard](../img/ecs_dashboard_v12_9.png)
+
+1. Include this template in `.gitlab-ci.yml`:
+
+ ```yaml
+ include:
+ - template: Deploy-ECS.gitlab-ci.yml
+ ```
+
+ The `Deploy-ECS` template ships with GitLab and is available [on
+ GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Deploy-ECS.gitlab-ci.yml).
+
+1. Commit and push your updated `.gitlab-ci.yml` to your project's repository, and you're done!
+
+ Your application Docker image will be rebuilt and pushed to the GitLab registry.
+ Then the targeted task definition will be updated with the location of the new
+ Docker image, and a new revision will be created in ECS as result.
+
+ Finally, your AWS ECS service will be updated with the new revision of the
+ task definition, making the cluster pull the newest version of your
+ application.
diff --git a/doc/ci/docker/using_docker_build.md b/doc/ci/docker/using_docker_build.md
index dca8d030820..69618cbd218 100644
--- a/doc/ci/docker/using_docker_build.md
+++ b/doc/ci/docker/using_docker_build.md
@@ -114,9 +114,9 @@ Docker-in-Docker works well, and is the recommended configuration, but it is
not without its own challenges:
- When using docker-in-docker, each job is in a clean environment without the past
- history. Concurrent jobs work fine because every build gets it's own
+ history. Concurrent jobs work fine because every build gets its own
instance of Docker engine so they won't conflict with each other. But this
- also means jobs can be slower because there's no caching of layers.
+ also means that jobs can be slower because there's no caching of layers.
- By default, Docker 17.09 and higher uses `--storage-driver overlay2` which is
the recommended storage driver. See [Using the overlayfs driver](#using-the-overlayfs-driver)
for details.
@@ -127,14 +127,14 @@ not without its own challenges:
and use it as your mount point (for a more thorough explanation, check [issue
#41227](https://gitlab.com/gitlab-org/gitlab-foss/issues/41227)):
- ```yaml
- variables:
- MOUNT_POINT: /builds/$CI_PROJECT_PATH/mnt
+ ```yaml
+ variables:
+ MOUNT_POINT: /builds/$CI_PROJECT_PATH/mnt
- script:
- - mkdir -p "$MOUNT_POINT"
- - docker run -v "$MOUNT_POINT:/mnt" my-docker-image
- ```
+ script:
+ - mkdir -p "$MOUNT_POINT"
+ - docker run -v "$MOUNT_POINT:/mnt" my-docker-image
+ ```
An example project using this approach can be found here: <https://gitlab.com/gitlab-examples/docker>.
@@ -198,7 +198,7 @@ support this.
[runners.cache]
[runners.cache.s3]
[runners.cache.gcs]
- ```
+ ```
1. You can now use `docker` in the build script (note the inclusion of the
`docker:19.03.1-dind` service):
diff --git a/doc/ci/docker/using_docker_images.md b/doc/ci/docker/using_docker_images.md
index 621b679de73..3316113d776 100644
--- a/doc/ci/docker/using_docker_images.md
+++ b/doc/ci/docker/using_docker_images.md
@@ -12,7 +12,7 @@ run applications in independent "containers" that are run within a single Linux
instance. [Docker Hub][hub] has a rich database of pre-built images that can be
used to test and build your applications.
-Docker, when used with GitLab CI, runs each job in a separate and isolated
+When used with GitLab CI, Docker runs each job in a separate and isolated
container using the predefined image that is set up in
[`.gitlab-ci.yml`](../yaml/README.md).
@@ -397,7 +397,7 @@ Before the new extended Docker configuration options, you would need to create
your own image based on the `super/sql:latest` image, add the default command,
and then use it in job's configuration, like:
-```Dockerfile
+```dockerfile
# my-super-sql:latest image's Dockerfile
FROM super/sql:latest
@@ -489,7 +489,7 @@ Look for the `[runners.docker]` section:
```toml
[runners.docker]
- image = "ruby:2.1"
+ image = "ruby:latest"
services = ["mysql:latest", "postgres:latest"]
```
@@ -588,7 +588,7 @@ There are two ways to determine the value of `DOCKER_AUTH_CONFIG`:
# Example output to copy
bXlfdXNlcm5hbWU6bXlfcGFzc3dvcmQ=
```
-
+
Create the Docker JSON configuration content as follows:
```json
@@ -693,7 +693,7 @@ To configure credentials store, follow these steps:
}
```
- - Or, if you are running self-hosted Runners, add the above JSON to
+ - Or, if you are running self-managed Runners, add the above JSON to
`${GITLAB_RUNNER_HOME}/.docker/config.json`. GitLab Runner will read this config file
and will use the needed helper for this specific repository.
@@ -726,7 +726,7 @@ To configure access for `aws_account_id.dkr.ecr.region.amazonaws.com`, follow th
}
```
- - Or, if you are running self-hosted Runners,
+ - Or, if you are running self-managed Runners,
add the above JSON to `${GITLAB_RUNNER_HOME}/.docker/config.json`.
GitLab Runner will read this config file and will use the needed helper for this
specific repository.
diff --git a/doc/ci/environments.md b/doc/ci/environments.md
index 65dc65f23f5..5bb1e221781 100644
--- a/doc/ci/environments.md
+++ b/doc/ci/environments.md
@@ -47,7 +47,7 @@ can even access a [web terminal](#web-terminals) for your environment from withi
Configuring environments involves:
-1. Understanding how [pipelines](pipelines.md) work.
+1. Understanding how [pipelines](pipelines/index.md) work.
1. Defining environments in your project's [`.gitlab-ci.yml`](yaml/README.md) file.
1. Creating a job configured to deploy your application. For example, a deploy job configured with [`environment`](yaml/README.md#environment) to deploy your application to a [Kubernetes cluster](../user/project/clusters/index.md).
@@ -118,7 +118,7 @@ NOTE: **Note:**
The `environment` keyword defines where the app is deployed.
The environment `name` and `url` is exposed in various places
within GitLab. Each time a job that has an environment specified
-succeeds, a deployment is recorded, along with the Git SHA and environment name.
+succeeds, a deployment is recorded, along with the Git SHA, and environment name.
CAUTION: **Caution**:
Some characters are not allowed in environment names. Use only letters,
@@ -156,6 +156,70 @@ Starting with GitLab 9.3, the environment URL is exposed to the Runner via
- `.gitlab-ci.yml`.
- The external URL from the environment if not defined in `.gitlab-ci.yml`.
+#### Set dynamic environment URLs after a job finishes
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/17066) in GitLab 12.9.
+
+In a job script, you can specify a static [environment URL](#using-the-environment-url).
+However, there may be times when you want a dynamic URL. For example,
+if you deploy a Review App to an external hosting
+service that generates a random URL per deployment, like `https://94dd65b.amazonaws.com/qa-lambda-1234567`,
+you don't know the URL before the deployment script finishes.
+If you want to use the environment URL in GitLab, you would have to update it manually.
+
+To address this problem, you can configure a deployment job to report back a set of
+variables, including the URL that was dynamically-generated by the external service.
+GitLab supports [dotenv](https://github.com/bkeepers/dotenv) file as the format,
+and expands the `environment:url` value with variables defined in the dotenv file.
+
+To use this feature, specify the
+[`artifacts:reports:dotenv`](yaml/README.md#artifactsreportsdotenv) keyword in `.gitlab-ci.yml`.
+
+##### Example of setting dynamic environment URLs
+
+The following example shows a Review App that creates a new environment
+per merge request. The `review` job is triggered by every push, and
+creates or updates an environment named `review/your-branch-name`.
+The environment URL is set to `$DYNAMIC_ENVIRONMENT_URL`:
+
+```yaml
+review:
+ script:
+ - DYNAMIC_ENVIRONMENT_URL=$(deploy-script) # In script, get the environment URL.
+ - echo "DYNAMIC_ENVIRONMENT_URL=$DYNAMIC_ENVIRONMENT_URL" >> deploy.env # Add the value to a dotenv file.
+ artifacts:
+ reports:
+ dotenv: deploy.env # Report back dotenv file to rails.
+ environment:
+ name: review/$CI_COMMIT_REF_SLUG
+ url: $DYNAMIC_ENVIRONMENT_URL # and set the variable produced in script to `environment:url`
+ on_stop: stop_review
+
+stop_review:
+ script:
+ - ./teardown-environment
+ when: manual
+ environment:
+ name: review/$CI_COMMIT_REF_SLUG
+ action: stop
+```
+
+As soon as the `review` job finishes, GitLab updates the `review/your-branch-name`
+environment's URL.
+It parses the report artifact `deploy.env`, registers a list of variables as runtime-created,
+uses it for expanding `environment:url: $DYNAMIC_ENVIRONMENT_URL` and sets it to the environment URL.
+You can also specify a static part of the URL at `environment:url:`, such as
+`https://$DYNAMIC_ENVIRONMENT_URL`. If the value of `DYNAMIC_ENVIRONMENT_URL` is
+`123.awesome.com`, the final result will be `https://123.awesome.com`.
+
+The assigned URL for the `review/your-branch-name` environment is visible in the UI.
+[See where the environment URL is displayed](#using-the-environment-url).
+
+> **Notes:**
+>
+> - `stop_review` doesn't generate a dotenv report artifact, so it won't recognize the `DYNAMIC_ENVIRONMENT_URL` variable. Therefore you should not set `environment:url:` in the `stop_review` job.
+> - If the environment URL is not valid (for example, the URL is malformed), the system doesn't update the environment URL.
+
### Configuring manual deployments
Adding `when: manual` to an automatically executed job's configuration converts it to
@@ -340,6 +404,12 @@ deploy:
- master
```
+When deploying to a Kubernetes cluster using GitLab's Kubernetes integration,
+information about the cluster and namespace will be displayed above the job
+trace on the deployment job page:
+
+![Deployment cluster information](img/environments_deployment_cluster_v12_8.png)
+
NOTE: **Note:**
Kubernetes configuration is not supported for Kubernetes clusters
that are [managed by GitLab](../user/project/clusters/index.md#gitlab-managed-clusters).
diff --git a/doc/ci/environments/img/incremental_rollouts_play_v12_7.png b/doc/ci/environments/img/incremental_rollouts_play_v12_7.png
index 314c4a07af0..9d3ca258b08 100644
--- a/doc/ci/environments/img/incremental_rollouts_play_v12_7.png
+++ b/doc/ci/environments/img/incremental_rollouts_play_v12_7.png
Binary files differ
diff --git a/doc/ci/environments/img/timed_rollout_v12_7.png b/doc/ci/environments/img/timed_rollout_v12_7.png
index 6b83bfc574e..94f5d50020f 100644
--- a/doc/ci/environments/img/timed_rollout_v12_7.png
+++ b/doc/ci/environments/img/timed_rollout_v12_7.png
Binary files differ
diff --git a/doc/ci/examples/deployment/composer-npm-deploy.md b/doc/ci/examples/deployment/composer-npm-deploy.md
index f780a13998c..ae0880adef2 100644
--- a/doc/ci/examples/deployment/composer-npm-deploy.md
+++ b/doc/ci/examples/deployment/composer-npm-deploy.md
@@ -23,7 +23,7 @@ before_script:
- php -r "unlink('composer-setup.php');"
```
-This will make sure we have all requirements ready. Next, we want to run `composer install` to fetch all PHP dependencies and `npm install` to load Node.js packages, then run the `npm` script. We need to append them into `before_script` section:
+This will make sure we have all requirements ready. Next, we want to run `composer install` to fetch all PHP dependencies and `npm install` to load Node.js packages, then run the `npm` script. We need to append them into `before_script` section:
```yaml
before_script:
@@ -44,7 +44,7 @@ All these operations will put all files into a `build` folder, which is ready to
## How to transfer files to a live server
-You have multiple options: rsync, scp, sftp and so on. For now, we will use scp.
+You have multiple options: rsync, scp, sftp, and so on. For now, we will use scp.
To make this work, you need to add a GitLab CI/CD Variable (accessible on `gitlab.example/your-project-name/variables`). That variable will be called `STAGING_PRIVATE_KEY` and it's the **private** SSH key of your server.
diff --git a/doc/ci/examples/devops_and_game_dev_with_gitlab_ci_cd/index.md b/doc/ci/examples/devops_and_game_dev_with_gitlab_ci_cd/index.md
index f59401c6f87..848808f65ea 100644
--- a/doc/ci/examples/devops_and_game_dev_with_gitlab_ci_cd/index.md
+++ b/doc/ci/examples/devops_and_game_dev_with_gitlab_ci_cd/index.md
@@ -63,7 +63,7 @@ Next, we'll create a small subset of tests that exemplify most of the states I e
this `Weapon` class to go through. To get started, create a folder called `lib/tests`
and add the following code to a new file `weaponTests.ts`:
-```ts
+```typescript
import { expect } from 'chai';
import { Weapon, BulletFactory } from '../lib/weapon';
@@ -114,7 +114,7 @@ describe('Weapon', () => {
To build and run these tests using gulp, let's also add the following gulp functions
to the existing `gulpfile.js` file:
-```ts
+```typescript
gulp.task('build-test', function () {
return gulp.src('src/tests/**/*.ts', { read: false })
.pipe(tap(function (file) {
@@ -140,7 +140,7 @@ to trigger the weapon. In the `src/lib` folder create a `weapon.ts` file. We'll
to it: `Weapon` and `BulletFactory` which will encapsulate Phaser's **sprite** and
**group** objects, and the logic specific to our game.
-```ts
+```typescript
export class Weapon {
private isTriggered: boolean = false;
private currentTimer: number = 0;
@@ -210,7 +210,7 @@ export class BulletFactory {
Lastly, we'll redo our entry point, `game.ts`, to tie together both `Player` and `Weapon` objects
as well as add them to the update loop. Here is what the updated `game.ts` file looks like:
-```ts
+```typescript
import { Player } from "./player";
import { Weapon, BulletFactory } from "./weapon";
diff --git a/doc/ci/examples/end_to_end_testing_webdriverio/index.md b/doc/ci/examples/end_to_end_testing_webdriverio/index.md
index 3402190c805..8c3156ec25c 100644
--- a/doc/ci/examples/end_to_end_testing_webdriverio/index.md
+++ b/doc/ci/examples/end_to_end_testing_webdriverio/index.md
@@ -49,7 +49,7 @@ visit a specific URL or interact with elements on the page. It can be programmat
from a variety of programming languages. In this article we're going to be using the
[WebdriverIO](https://webdriver.io/) JavaScript bindings, but the general concept should carry over
pretty well to
-[other programming languages supported by Selenium](https://selenium.dev/about/platforms.jsp#programming-languages).
+[other programming languages supported by Selenium](https://www.selenium.dev/documentation/en/legacy_docs/selenium_rc/).
## Writing tests
diff --git a/doc/ci/examples/laravel_with_gitlab_and_envoy/index.md b/doc/ci/examples/laravel_with_gitlab_and_envoy/index.md
index bf7aec7107f..4284038b283 100644
--- a/doc/ci/examples/laravel_with_gitlab_and_envoy/index.md
+++ b/doc/ci/examples/laravel_with_gitlab_and_envoy/index.md
@@ -22,7 +22,7 @@ and you know how to use GitLab.
Laravel is a high quality web framework written in PHP.
It has a great community with a [fantastic documentation](https://laravel.com/docs).
-Aside from the usual routing, controllers, requests, responses, views, and (blade) templates, out of the box Laravel provides plenty of additional services such as cache, events, localization, authentication and many others.
+Aside from the usual routing, controllers, requests, responses, views, and (blade) templates, out of the box Laravel provides plenty of additional services such as cache, events, localization, authentication, and many others.
We will use [Envoy](https://laravel.com/docs/master/envoy) as an SSH task runner based on PHP.
It uses a clean, minimal [Blade syntax](https://laravel.com/docs/master/blade) to set up tasks that can run on remote servers, such as, cloning your project from the repository, installing the Composer dependencies, and running [Artisan commands](https://laravel.com/docs/master/artisan).
@@ -82,7 +82,7 @@ git push -u origin master
## Configure the production server
Before we begin setting up Envoy and GitLab CI/CD, let's quickly make sure the production server is ready for deployment.
-We have installed LEMP stack which stands for Linux, NGINX, MySQL and PHP on our Ubuntu 16.04.
+We have installed LEMP stack which stands for Linux, NGINX, MySQL, and PHP on our Ubuntu 16.04.
### Create a new user
@@ -194,7 +194,7 @@ To start, we create an `Envoy.blade.php` in the root of our app with a simple ta
@endtask
```
-As you may expect, we have an array within `@servers` directive at the top of the file, which contains a key named `web` with a value of the server's address (e.g. `deployer@192.168.1.1`).
+As you may expect, we have an array within `@servers` directive at the top of the file, which contains a key named `web` with a value of the server's address (for example, `deployer@192.168.1.1`).
Then within our `@task` directive we define the bash commands that should be run on the server when the task is executed.
On the local machine use the `run` command to run Envoy tasks.
diff --git a/doc/ci/examples/license_management.md b/doc/ci/examples/license_management.md
index 0d12c9a20f2..df9af4db929 100644
--- a/doc/ci/examples/license_management.md
+++ b/doc/ci/examples/license_management.md
@@ -1,5 +1,5 @@
---
-redirect_to: '../../user/application_security/license_compliance/index.md'
+redirect_to: '../../user/compliance/license_compliance/index.md'
---
-This document was moved to [another location](../../user/application_security/license_compliance/index.md).
+This document was moved to [another location](../../user/compliance/license_compliance/index.md).
diff --git a/doc/ci/examples/test-scala-application.md b/doc/ci/examples/test-scala-application.md
index 7d039ab1aeb..ffdffcc88bd 100644
--- a/doc/ci/examples/test-scala-application.md
+++ b/doc/ci/examples/test-scala-application.md
@@ -60,7 +60,7 @@ You can use other versions of Scala and SBT by defining them in
Add the `Coverage was \[\d+.\d+\%\]` regular expression in the
**Settings ➔ Pipelines ➔ Coverage report** project setting to
-retrieve the [test coverage](../../user/project/pipelines/settings.md#test-coverage-report-badge)
+retrieve the [test coverage](../pipelines/settings.md#test-coverage-report-badge)
rate from the build trace and have it displayed with your jobs.
**Pipelines** must be enabled for this option to appear.
diff --git a/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/img/select_template_v12_6.png b/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/img/select_template_v12_6.png
index 97887db4486..c8c5e152a13 100644
--- a/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/img/select_template_v12_6.png
+++ b/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/img/select_template_v12_6.png
Binary files differ
diff --git a/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/img/set_up_ci_v12_6.png b/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/img/set_up_ci_v12_6.png
index 85fb58d4458..fafabb27bac 100644
--- a/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/img/set_up_ci_v12_6.png
+++ b/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/img/set_up_ci_v12_6.png
Binary files differ
diff --git a/doc/ci/img/collapsible_log_v12_6.png b/doc/ci/img/collapsible_log_v12_6.png
deleted file mode 100644
index 294421c645d..00000000000
--- a/doc/ci/img/collapsible_log_v12_6.png
+++ /dev/null
Binary files differ
diff --git a/doc/ci/img/ecs_dashboard_v12_9.png b/doc/ci/img/ecs_dashboard_v12_9.png
new file mode 100644
index 00000000000..bebd6f7903c
--- /dev/null
+++ b/doc/ci/img/ecs_dashboard_v12_9.png
Binary files differ
diff --git a/doc/ci/img/environment_auto_stop_v12_8.png b/doc/ci/img/environment_auto_stop_v12_8.png
index 3a3c54ab62d..f098938ef04 100644
--- a/doc/ci/img/environment_auto_stop_v12_8.png
+++ b/doc/ci/img/environment_auto_stop_v12_8.png
Binary files differ
diff --git a/doc/ci/img/environments_deployment_cluster_v12_8.png b/doc/ci/img/environments_deployment_cluster_v12_8.png
new file mode 100644
index 00000000000..7fa6d3515a8
--- /dev/null
+++ b/doc/ci/img/environments_deployment_cluster_v12_8.png
Binary files differ
diff --git a/doc/ci/img/parent_pipeline_graph_expanded_v12_6.png b/doc/ci/img/parent_pipeline_graph_expanded_v12_6.png
index 5c493109a54..db18cc201fc 100644
--- a/doc/ci/img/parent_pipeline_graph_expanded_v12_6.png
+++ b/doc/ci/img/parent_pipeline_graph_expanded_v12_6.png
Binary files differ
diff --git a/doc/ci/interactive_web_terminal/index.md b/doc/ci/interactive_web_terminal/index.md
index f7e8a0e412c..661e32b1571 100644
--- a/doc/ci/interactive_web_terminal/index.md
+++ b/doc/ci/interactive_web_terminal/index.md
@@ -15,7 +15,7 @@ taken to protect the users.
NOTE: **Note:**
[Shared runners on GitLab.com](../quick_start/README.md#shared-runners) do not
provide an interactive web terminal. Follow [this
-issue](https://gitlab.com/gitlab-org/gitlab-foss/issues/52611) for progress on
+issue](https://gitlab.com/gitlab-org/gitlab/-/issues/24674) for progress on
adding support. For groups and projects hosted on GitLab.com, interactive web
terminals are available when using your own group or project runner.
diff --git a/doc/ci/introduction/index.md b/doc/ci/introduction/index.md
index a07252f4803..069430579e3 100644
--- a/doc/ci/introduction/index.md
+++ b/doc/ci/introduction/index.md
@@ -206,7 +206,7 @@ With GitLab CI/CD you can also:
- Easily set up your app's entire lifecycle with [Auto DevOps](../../topics/autodevops/index.md).
- Deploy your app to different [environments](../environments.md).
- Install your own [GitLab Runner](https://docs.gitlab.com/runner/).
-- [Schedule pipelines](../../user/project/pipelines/schedules.md).
+- [Schedule pipelines](../pipelines/schedules.md).
- Check for app vulnerabilities with [Security Test reports](../../user/application_security/index.md). **(ULTIMATE)**
To see all CI/CD features, navigate back to the [CI/CD index](../README.md).
diff --git a/doc/ci/jenkins/index.md b/doc/ci/jenkins/index.md
index db97a6b4f2c..ea4ecbcca7f 100644
--- a/doc/ci/jenkins/index.md
+++ b/doc/ci/jenkins/index.md
@@ -7,28 +7,114 @@ type: index, howto
A lot of GitLab users have successfully migrated to GitLab CI/CD from Jenkins. To make this
easier if you're just getting started, we've collected several resources here that you might find useful
-before diving in.
+before diving in. Think of this page as a "GitLab CI/CD for Jenkins Users" guide.
First of all, our [Quick Start Guide](../quick_start/README.md) contains a good overview of how GitLab CI/CD works.
You may also be interested in [Auto DevOps](../../topics/autodevops/index.md) which can potentially be used to build, test,
and deploy your applications with little to no configuration needed at all.
+For advanced CI/CD teams, [templates](#templates) can enable the reuse of pipeline configurations.
+
Otherwise, read on for important information that will help you get the ball rolling. Welcome
to GitLab!
-## Important differences
+If you have questions that are not answered here, the [GitLab community forum](https://forum.gitlab.com/)
+can be a great resource.
+
+## Managing the organizational transition
+
+An important part of transitioning from Jenkins to GitLab is the cultural and organizational
+changes that comes with the move, and successfully managing them. There are a few
+things we have found that helps this:
+
+- Setting and communicating a clear vision of what your migration goals are helps
+ your users understand why the effort is worth it. The value will be clear when
+ the work is done, but people need to be aware while it's in progress too.
+- Sponsorship and alignment from the relevant leadership team helps with the point above.
+- Spending time educating your users on what's different, sharing this document with them,
+ and so on will help ensure you are successful.
+- Finding ways to sequence or delay parts of the migration can help a lot, but you
+ don't want to leave things in a non-migrated (or partially-migrated) state for too
+ long. To gain all the benefits of GitLab, moving your existing Jenkins setup over
+ as-is, including any current problems, will not be enough. You need to take advantage
+ of the improvements that GitLab offers, and this requires (eventually) updating
+ your implementation as part of the transition.
+
+## Important product differences
There are some high level differences between the products worth mentioning:
- With GitLab you don't need a root `pipeline` keyword to wrap everything.
+- The way pipelines are triggered and [trigger other pipelines](../yaml/README.md#trigger)
+ is different than Jenkins. GitLab pipelines can be triggered:
+
+ - on push
+ - on [schedule](../pipelines/schedules.md)
+ - from the [GitLab UI](../pipelines/index.md#manually-executing-pipelines)
+ - by [API call](../triggers/README.md)
+ - by [webhook](../triggers/README.md#triggering-a-pipeline-from-a-webhook)
+ - by [ChatOps](../chatops/README.md)
+
+- You can control which jobs run in which cases, depending on how they are triggered,
+ with the [`rules` syntax](../yaml/README.md#rules).
+- GitLab [pipeline scheduling concepts](../pipelines/schedules.md) are also different than with Jenkins.
+- You can reuse pipeline configurations using the [`include` keyword](../yaml/README.md#include)
+ and [templates](#templates). Your templates can be kept in a central repo (with different
+ permissions), and then any project can use them. This central project could also
+ contain scripts or other reusable code.
+- You can also use the [`extends` keyword](../yaml/README.md#extends) to reuse configuration
+ within a single pipeline configuration.
- All jobs within a single stage always run in parallel, and all stages run in sequence. We are planning
to allow certain jobs to break this sequencing as needed with our [directed acyclic graph](https://gitlab.com/gitlab-org/gitlab-foss/issues/47063)
feature.
+- The [`parallel`](../yaml/README.md#parallel) keyword can automatically parallelize tasks,
+ like tests that support parallelization.
+- Normally all jobs within a single stage run in parallel, and all stages run in sequence.
+ There are different [pipeline architectures](../pipelines/pipeline_architectures.md)
+ that allow you to change this behavior.
+- The new [`rules` syntax](../yaml/README.md#rules) is the recommended method of
+ controlling when different jobs run. It is more powerful than the `only/except` syntax.
+- One important difference is that jobs run independently of each other and have a
+ fresh environment in each job. Passing artifacts between jobs is controlled using the
+ [`artifacts`](../yaml/README.md#artifacts) and [`dependencies`](../yaml/README.md#dependencies)
+ keywords. When finished, the planned [Workspaces](https://gitlab.com/gitlab-org/gitlab/issues/29265)
+ feature will allow you to more easily persist a common workspace between serial jobs.
- The `.gitlab-ci.yml` file is checked in to the root of your repository, much like a Jenkinsfile, but
is in the YAML format (see [complete reference](../yaml/README.md)) instead of a Groovy DSL. It's most
analogous to the declarative Jenkinsfile format.
+- Manual approvals or gates can be set up as [`when:manual` jobs](../yaml/README.md#whenmanual). These can
+ also leverage [`protected environments`](../yaml/README.md#protecting-manual-jobs-premium)
+ to control who is able to approve them.
- GitLab comes with a [container registry](../../user/packages/container_registry/index.md), and we recommend using
- container images to set up your build environment.
+ container images to set up your build environment. For example, set up one pipeline that builds your build environment
+ itself and publish that to the container registry. Then, have your pipelines use this instead of each building their
+ own environment, which will be slower and may be less consistent. We have extensive docs on [how to use the Container Registry](../../user/packages/container_registry/index.md).
+- A central utilities repository can be a great place to put assorted scheduled jobs
+ or other manual jobs that function like utilities. Jenkins installations tend to
+ have a few of these.
+
+## Agents vs. Runners
+
+Both Jenkins agents and GitLab Runners are the hosts that run jobs. To convert the
+Jenkins agent, simply uninstall it and then [install and register the runner](../runners/README.md).
+Runners do not require much overhead, so you can size them similarly to the Jenkins
+agents you were using.
+
+There are some important differences in the way Runners work in comparison to agents:
+
+- Runners can be set up as [shared across an instance, be added at the group level, or set up at the project level](../runners/README.md#shared-specific-and-group-runners).
+ They will self-select jobs from the scopes you've defined automatically.
+- You can also [use tags](../runners/README.md#using-tags) for finer control, and
+ associate runners with specific jobs. For example, you can use a tag for jobs that
+ require dedicated, more powerful, or specific hardware.
+- GitLab has [autoscaling for Runners](https://docs.gitlab.com/runner/configuration/autoscale.html)
+ which will let configure them to be provisioned as needed, and scaled down when not.
+ This is similar to ephemeral agents in Jenkins.
+
+If you are using `gitlab.com`, you can take advantage of our [shared Runner fleet](../../user/gitlab_com/index.md#shared-runners)
+to run jobs without provisioning your own Runners. We are investigating making them
+[available for self-managed instances](https://gitlab.com/gitlab-org/customers-gitlab-com/issues/414)
+as well.
## Groovy vs. YAML
@@ -42,8 +128,9 @@ and manage.
That said, we do of course still value DRY (don't repeat yourself) principles and want to ensure that
behaviors of your jobs can be codified once and applied as needed. You can use the `extends:` syntax to
-[templatize your jobs](../yaml/README.md#extends), and `include:` can be used to [bring in entire sets of behaviors](../yaml/README.md#include)
-to pipelines in different projects.
+[reuse configuration in your jobs](../yaml/README.md#extends), and `include:` can
+be used to [reuse pipeline configurations](../yaml/README.md#include) in pipelines
+in different projects:
```yaml
.in-docker:
@@ -62,7 +149,8 @@ rspec:
Artifacts may work a bit differently than you've used them with Jenkins. In GitLab, any job can define
a set of artifacts to be saved by using the `artifacts:` keyword. This can be configured to point to a file
-or set of files that can then be persisted from job to job. Read more on our detailed [artifacts documentation](../../user/project/pipelines/job_artifacts.md)
+or set of files that can then be persisted from job to job. Read more on our detailed
+[artifacts documentation](../pipelines/job_artifacts.md):
```yaml
pdf:
@@ -88,6 +176,19 @@ need to configure anything to have these appear.
If they aren't working as expected, or if you'd like to see what's available, our [CI feature index](../README.md#feature-set) has the full list
of bundled features and links to the documentation for each.
+### Templates
+
+For advanced CI/CD teams, project templates can enable the reuse of pipeline configurations,
+as well as encourage inner sourcing.
+
+In self-managed GitLab instances, you can build an [Instance Template Repository](../../user/admin_area/settings/instance_template_repository.md).
+Development teams across the whole organization can select templates from a dropdown menu.
+A group administrator is able to set a group to use as the source for the
+[custom project templates](../../user/admin_area/custom_project_templates.md), which can
+be used by all projects in the group. An instance adminsitrator can set a group as
+the source for [instance project templates](../../user/group/custom_project_templates.md),
+which can be used by projects in that instance.
+
## Converting Declarative Jenkinsfiles
Declarative Jenkinsfiles contain "Sections" and "Directives" which are used to control the behavior of your
@@ -108,7 +209,7 @@ to different Runners (execution agents).
The `agent` section also allows you to define which Docker images should be used for execution, for which we use
the [`image`](../yaml/README.md#image) keyword. The `image` can be set on a single job or at the top level, in which
-case it will apply to all jobs in the pipeline.
+case it will apply to all jobs in the pipeline:
```yaml
my_job:
@@ -120,7 +221,7 @@ my_job:
The `post` section defines the actions that should be performed at the end of the pipeline. GitLab also supports
this through the use of stages. You can define your stages as follows, and any jobs assigned to the `before_pipeline`
-or `after_pipeline` stages will run as expected. You can call these stages anything you like.
+or `after_pipeline` stages will run as expected. You can call these stages anything you like:
```yaml
stages:
@@ -131,7 +232,8 @@ stages:
- after_pipeline
```
-Setting a step to be performed before and after any job can be done via the [`before_script` and `after_script` keywords](../yaml/README.md#before_script-and-after_script).
+Setting a step to be performed before and after any job can be done via the
+[`before_script` and `after_script` keywords](../yaml/README.md#before_script-and-after_script):
```yaml
default:
@@ -164,7 +266,7 @@ my_job:
#### `steps`
The `steps` section is equivalent to the [`script` section](../yaml/README.md#script) of an individual job. This is
-a simple YAML array with each line representing an individual command to be run.
+a simple YAML array with each line representing an individual command to be run:
```yaml
my_job:
@@ -181,7 +283,7 @@ my_job:
In GitLab, we use the [`variables` keyword](../yaml/README.md#variables) to define different variables at runtime.
These can also be set up through the GitLab UI, under CI/CD settings. See also our [general documentation on variables](../variables/README.md),
including the section on [protected variables](../variables/README.md#protected-environment-variables) which can be used
-to limit access to certain variables to certain environments or runners.
+to limit access to certain variables to certain environments or runners:
```yaml
variables:
@@ -203,7 +305,7 @@ can provide any variables they like.
#### `triggers` / `cron`
Because GitLab is integrated tightly with Git, SCM polling options for triggers are not needed. We support an easy to use
-[syntax for scheduling pipelines](../../user/project/pipelines/schedules.md).
+[syntax for scheduling pipelines](../pipelines/schedules.md).
#### `tools`
@@ -224,7 +326,8 @@ variable entry.
GitLab does support a [`when` keyword](../yaml/README.md#when) which is used to indicate when a job should be
run in case of (or despite) failure, but most of the logic for controlling pipelines can be found in
-our very powerful [`only/except` rules system](../yaml/README.md#onlyexcept-basic) (see also our [advanced syntax](../yaml/README.md#onlyexcept-basic))
+our very powerful [`only/except` rules system](../yaml/README.md#onlyexcept-basic)
+(see also our [advanced syntax](../yaml/README.md#onlyexcept-basic)):
```yaml
my_job:
diff --git a/doc/ci/junit_test_reports.md b/doc/ci/junit_test_reports.md
index c82f0ab3a42..1595f4d4427 100644
--- a/doc/ci/junit_test_reports.md
+++ b/doc/ci/junit_test_reports.md
@@ -9,7 +9,7 @@ Requires GitLab Runner 11.2 and above.
## Overview
-It is very common that a [CI/CD pipeline](pipelines.md) contains a
+It is very common that a [CI/CD pipeline](pipelines/index.md) contains a
test job that will verify your code.
If the tests fail, the pipeline fails and users get notified. The person that
works on the merge request will have to check the job logs and see where the
@@ -42,13 +42,15 @@ JUnit test reports, where:
- The base branch is the target branch (usually `master`).
- The head branch is the source branch (the latest pipeline in each merge request).
-The reports panel has a summary showing how many tests failed and how many were fixed.
-If no comparison can be done because data for the base branch is not available,
-the panel will just show the list of failed tests for head.
+The reports panel has a summary showing how many tests failed, how many had errors
+and how many were fixed. If no comparison can be done because data for the base branch
+is not available, the panel will just show the list of failed tests for head.
-There are three types of results:
+There are four types of results:
1. **Newly failed tests:** Test cases which passed on base branch and failed on head branch
+1. **Newly encountered errors:** Test cases which passed on base branch and failed due to a
+ test error on head branch
1. **Existing failures:** Test cases which failed on base branch and failed on head branch
1. **Resolved failures:** Test cases which failed on base branch and passed on head branch
diff --git a/doc/ci/large_repositories/index.md b/doc/ci/large_repositories/index.md
index ec8de8d6f6c..6ac3fa2c92d 100644
--- a/doc/ci/large_repositories/index.md
+++ b/doc/ci/large_repositories/index.md
@@ -104,7 +104,7 @@ and that your repository is clean.
[`GIT_CLEAN_FLAGS`](../yaml/README.md#git-clean-flags) is disabled when set
to `none`. On very big repositories, this might be desired because `git
clean` is disk I/O intensive. Controlling that with `GIT_CLEAN_FLAGS: -ffdx
--e .build/`, for example, allows you to control and disable removal of some
+-e .build/` (for example) allows you to control and disable removal of some
directories within the worktree between subsequent runs, which can speed-up
the incremental builds. This has the biggest effect if you re-use existing
machines and have an existing worktree that you can re-use for builds.
diff --git a/doc/ci/merge_request_pipelines/index.md b/doc/ci/merge_request_pipelines/index.md
index e03d767079f..d1749924ad3 100644
--- a/doc/ci/merge_request_pipelines/index.md
+++ b/doc/ci/merge_request_pipelines/index.md
@@ -89,7 +89,7 @@ Read the [documentation on Merge Trains](pipelines_for_merged_results/merge_trai
The behavior of the `only: [merge_requests]` parameter is such that _only_ jobs with
that parameter are run in the context of a merge request; no other jobs will be run.
-However, you may want to reverse this behavior, having all of your jobs to run _except_
+However, you can invert this behavior and have all of your jobs run _except_
for one or two.
Consider the following pipeline, with jobs `A`, `B`, and `C`. Imagine you want:
@@ -188,7 +188,7 @@ External users could steal secret variables from the parent project by modifying
We're discussing a secure solution of running pipelines for merge requests
that are submitted from forked projects,
-see [the issue about the permission extension](https://gitlab.com/gitlab-org/gitlab-foss/issues/23902).
+see [the issue about the permission extension](https://gitlab.com/gitlab-org/gitlab/-/issues/11934).
## Additional predefined variables
diff --git a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/index.md b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/index.md
index 379644beacd..6dbc8f98247 100644
--- a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/index.md
+++ b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/index.md
@@ -85,23 +85,37 @@ the following feature flags are enabled on your GitLab instance:
- `:ci_use_merge_request_ref`
- `:merge_ref_auto_sync`
-To check these feature flag values, please ask administrator to execute the following commands:
+To check and set these feature flag values, please ask an administrator to:
-```shell
-> sudo gitlab-rails console # Login to Rails console of GitLab instance.
-> Feature.enabled?(:ci_use_merge_request_ref) # Check if it's enabled or not.
-> Feature.enable(:ci_use_merge_request_ref) # Enable the feature flag.
-```
+1. Log into the Rails console of the GitLab instance:
+
+ ```shell
+ sudo gitlab-rails console
+ ```
+
+1. Check if the flags are enabled or not:
+
+ ```ruby
+ Feature.enabled?(:ci_use_merge_request_ref)
+ Feature.enabled?(:merge_ref_auto_sync)
+ ```
+
+1. If needed, enable the feature flags:
+
+ ```ruby
+ Feature.enable(:ci_use_merge_request_ref)
+ Feature.enable(:merge_ref_auto_sync)
+ ```
### Intermittently pipelines fail by `fatal: reference is not a tree:` error
Since pipelines for merged results are a run on a merge ref of a merge request
(`refs/merge-requests/<iid>/merge`), the Git reference could be overwritten at an
-unexpected timing, for example, when a source or target branch is advanced.
+unexpected timing. For example, when a source or target branch is advanced.
In this case, the pipeline fails because of `fatal: reference is not a tree:` error,
which indicates that the checkout-SHA is not found in the merge ref.
-This behavior was improved at GitLab 12.4 by introducing [Persistent pipeline refs](../../pipelines.md#persistent-pipeline-refs).
+This behavior was improved at GitLab 12.4 by introducing [Persistent pipeline refs](../../pipelines/index.md#persistent-pipeline-refs).
You should be able to create pipelines at any timings without concerning the error.
## Using Merge Trains **(PREMIUM)**
diff --git a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md
index 744713b0678..9348ba2f21c 100644
--- a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md
+++ b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md
@@ -21,7 +21,7 @@ just as it works in the current state. However, instead of queuing and waiting,
each item takes the completed state of the previous (pending) merge ref, adds its own changes,
and starts the pipeline immediately in parallel under the assumption that everything is going to pass.
-In this way, if all the pipelines in the train merge successfully, no pipeline time is wasted either queuing or retrying.
+This means that if all the pipelines in the train merge successfully, no pipeline time is wasted either queuing or retrying.
If the button is subsequently pressed in a different MR, instead of creating a new pipeline for the target branch,
it creates a new pipeline targeting the merge result of the previous MR plus the target branch.
Pipelines invalidated through failures are immediately canceled and requeued.
@@ -83,7 +83,7 @@ button while the latest pipeline is running.
## Immediately merge a merge request with a merge train
-In case, you have a high-priority merge request (e.g. critical patch) to be merged urgently,
+In the case where you have a high-priority merge request (for example, a critical patch) to be merged urgently,
you can use **Merge Immediately** option for bypassing the merge train.
This is the fastest option to get the change merged into the target branch.
@@ -140,15 +140,15 @@ workaround you'd be able to take immediately. If it's not available or acceptabl
please read through this section.
Merge train is enabled by default when you enable [Pipelines for merged results](../index.md),
-however, you can forcibly disable this feature by disabling the feature flag `:merge_trains_enabled`.
-After you disabled this feature, all the existing merge trains will be aborted and
-you will no longer see the **Start/Add Merge Train** button in merge requests.
+however, you can disable this feature by setting the `:disable_merge_trains` feature flag to `enable`.
+When you disable this feature, all existing merge trains are aborted and
+the **Start/Add Merge Train** button no longer appears in merge requests.
To check if the feature flag is enabled on your GitLab instance,
-please ask administrator to execute the following commands:
+please ask an administrator to execute the following commands **(CORE ONLY)**:
```shell
> sudo gitlab-rails console # Login to Rails console of GitLab instance.
-> Feature.enabled?(:merge_trains_enabled) # Check if it's enabled or not.
-> Feature.disable(:merge_trains_enabled) # Disable the feature flag.
+> Feature.enabled?(:disable_merge_trains) # Check if it's disabled or not.
+> Feature.enable(:disable_merge_trains) # Disable Merge Trains.
```
diff --git a/doc/ci/multi_project_pipelines.md b/doc/ci/multi_project_pipelines.md
index d9aa52eb233..6a4dff635ad 100644
--- a/doc/ci/multi_project_pipelines.md
+++ b/doc/ci/multi_project_pipelines.md
@@ -2,26 +2,18 @@
type: reference
---
-# Multi-project pipelines **(PREMIUM)**
+# Multi-project pipelines
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/2121) in
-[GitLab Premium 9.3](https://about.gitlab.com/releases/2017/06/22/gitlab-9-3-released/#multi-project-pipeline-graphs).
+> - [Introduced](https://about.gitlab.com/releases/2015/08/22/gitlab-7-14-released/#build-triggers-api-gitlab-ci) in GitLab 7.14, as Build Triggers.
+> - [Made available](https://gitlab.com/gitlab-org/gitlab/issues/199224) in all tiers in GitLab 12.8.
-When you set up [GitLab CI/CD](README.md) across multiple projects, you can visualize
-the entire pipeline, including all cross-project inter-dependencies.
+You can set up [GitLab CI/CD](README.md) across multiple projects, so that a pipeline
+in one project can trigger a pipeline in another project.
## Overview
-GitLab CI/CD is a powerful continuous integration tool that works not only per project, but also across projects. When you
-configure GitLab CI for your project, you can visualize the stages
-of your [jobs](pipelines.md#configuring-pipelines) on a [pipeline graph](pipelines.md#visualizing-pipelines).
-
-![Multi-project pipeline graph](img/multi_project_pipeline_graph.png)
-
-In the Merge Request Widget, multi-project pipeline mini-graphs are displayed,
-and when hovering or tapping (on touchscreen devices) they will expand and be shown adjacent to each other.
-
-![Multi-project mini graph](img/multi_pipeline_mini_graph.gif)
+GitLab CI/CD is a powerful continuous integration tool that works not only per project,
+but also across projects with multi-project pipelines.
Multi-project pipelines are useful for larger products that require cross-project inter-dependencies, such as those
adopting a [microservices architecture](https://about.gitlab.com/blog/2016/08/16/trends-in-version-control-land-microservices/).
@@ -30,6 +22,9 @@ For a demonstration of how cross-functional development teams can use cross-pipe
triggering to trigger multiple pipelines for different microservices projects, see
[Cross-project Pipeline Triggering and Visualization](https://about.gitlab.com/handbook/marketing/product-marketing/demo/#cross-project-pipeline-triggering-and-visualization-may-2019---1110).
+Additionally, it's possible to visualize the entire pipeline, including all cross-project
+inter-dependencies. **(PREMIUM)**
+
## Use cases
Let's assume you deploy your web app from different projects in GitLab:
@@ -38,10 +33,27 @@ Let's assume you deploy your web app from different projects in GitLab:
- One for the paid version add-ons, which also pass through builds and tests
- One for the documentation, which also builds, tests, and deploys with an SSG
-With Multi-Project Pipelines, you can visualize the entire pipeline, including all stages of builds and tests for the three projects.
+With Multi-Project Pipelines you can visualize the entire pipeline, including all build and test stages for the three projects.
+
+## Multi-project pipeline visualization **(PREMIUM)**
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/2121) in [GitLab Premium 9.3](https://about.gitlab.com/releases/2017/06/22/gitlab-9-3-released/#multi-project-pipeline-graphs).
+
+When you configure GitLab CI for your project, you can visualize the stages of your
+[jobs](pipelines/index.md#configuring-pipelines) on a [pipeline graph](pipelines/index.md#visualizing-pipelines).
+
+![Multi-project pipeline graph](img/multi_project_pipeline_graph.png)
+
+In the Merge Request Widget, multi-project pipeline mini-graphs are displayed,
+and when hovering or tapping (on touchscreen devices) they will expand and be shown adjacent to each other.
+
+![Multi-project mini graph](img/multi_pipeline_mini_graph.gif)
## Triggering multi-project pipelines through API
+> - Use of `CI_JOB_TOKEN` for multi-project pipelines was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/2017) in [GitLab Premium][ee] 9.3.
+> - Use of `CI_JOB_TOKEN` for multi-project pipelines was [made available](https://gitlab.com/gitlab-org/gitlab/issues/31573) in all tiers in GitLab 12.4.
+
When you use the [`CI_JOB_TOKEN` to trigger pipelines](triggers/README.md#ci-job-token), GitLab
recognizes the source of the job token, and thus internally ties these pipelines
together, allowing you to visualize their relationships on pipeline graphs.
@@ -52,7 +64,7 @@ outbound connections for upstream and downstream pipeline dependencies.
## Creating multi-project pipelines from `.gitlab-ci.yml`
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/8997) in [GitLab Premium](https://about.gitlab.com/pricing/) 11.8.
-> - [Moved](https://gitlab.com/gitlab-org/gitlab/issues/199224) to GitLab Core in 12.8.
+> - [Made available](https://gitlab.com/gitlab-org/gitlab/issues/199224) in all tiers in 12.8.
### Triggering a downstream pipeline using a bridge job
@@ -227,3 +239,19 @@ Some features are not implemented yet. For example, support for environments.
- `only` and `except`
- `when` (only with `on_success`, `on_failure`, and `always` values)
- `extends`
+
+## Trigger a pipeline when an upstream project is rebuilt
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9045) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.8.
+
+You can trigger a pipeline in your project whenever a pipeline finishes for a new
+tag in a different project:
+
+1. Go to the project's **Settings > CI / CD** page, and expand the **Pipeline subscriptions** section.
+1. Enter the path to the project you want to subscribe to.
+1. Click subscribe.
+
+Any pipelines that complete successfully for new tags in the subscribed project
+will now trigger a pipeline on the current project's default branch. The maximum
+number of upstream pipeline subscriptions is 2, for both the upstream and
+downstream projects.
diff --git a/doc/ci/parent_child_pipelines.md b/doc/ci/parent_child_pipelines.md
index 387f2e69606..2ce69a04939 100644
--- a/doc/ci/parent_child_pipelines.md
+++ b/doc/ci/parent_child_pipelines.md
@@ -80,7 +80,68 @@ microservice_a:
strategy: depend
```
+## Merge Request child pipelines
+
+To trigger a child pipeline as a [Merge Request Pipeline](merge_request_pipelines/index.md) we need to:
+
+- Set the trigger job to run on merge requests:
+
+```yaml
+# parent .gitlab-ci.yml
+microservice_a:
+ trigger:
+ include: path/to/microservice_a.yml
+ rules:
+ - if: $CI_MERGE_REQUEST_ID
+```
+
+- Configure the child pipeline by either:
+
+ - Setting all jobs in the child pipeline to evaluate in the context of a merge request:
+
+ ```yaml
+ # child path/to/microservice_a.yml
+ workflow:
+ rules:
+ - if: $CI_MERGE_REQUEST_ID
+
+ job1:
+ script: ...
+
+ job2:
+ script: ...
+ ```
+
+ - Alternatively, setting the rule per job. For example, to create only `job1` in
+ the context of merge request pipelines:
+
+ ```yaml
+ # child path/to/microservice_a.yml
+ job1:
+ script: ...
+ rules:
+ - if: $CI_MERGE_REQUEST_ID
+
+ job2:
+ script: ...
+ ```
+
+## Dynamic child pipelines
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/35632) in GitLab 12.9.
+
+Instead of running a child pipeline from a static YAML file, you can define a job that runs
+your own script to generate a YAML file, which is then [used to trigger a child pipeline](yaml/README.md#trigger-child-pipeline-with-generated-configuration-file).
+
+This technique can be very powerful in generating pipelines targeting content that changed or to
+build a matrix of targets and architectures.
+
## Limitations
A parent pipeline can trigger many child pipelines, but a child pipeline cannot trigger
-further child pipelines. See the [related issue](https://gitlab.com/gitlab-org/gitlab/issues/29651) for discussion on possible future improvements.
+further child pipelines. See the [related issue](https://gitlab.com/gitlab-org/gitlab/issues/29651)
+for discussion on possible future improvements.
+
+When triggering dynamic child pipelines, if the job containing the CI config artifact is not a predecessor of the
+trigger job, the child pipeline will fail to be created, causing also the parent pipeline to fail.
+In the future we want to validate the trigger job's dependencies [at the time the parent pipeline is created](https://gitlab.com/gitlab-org/gitlab/-/issues/209070) rather than when the child pipeline is created.
diff --git a/doc/ci/pipelines.md b/doc/ci/pipelines.md
index 896097ccd3a..edebd12f07a 100644
--- a/doc/ci/pipelines.md
+++ b/doc/ci/pipelines.md
@@ -1,550 +1,5 @@
---
-type: reference
+redirect_to: 'pipelines/index.md'
---
-# Creating and using CI/CD pipelines
-
-> Introduced in GitLab 8.8.
-
-NOTE: **Tip:**
-Watch our
-["Mastering continuous software development"](https://about.gitlab.com/webcast/mastering-ci-cd/)
-webcast to see a comprehensive demo of GitLab CI/CD pipeline.
-
-## Introduction
-
-Pipelines are the top-level component of continuous integration, delivery, and deployment.
-
-Pipelines comprise:
-
-- Jobs that define what to run. For example, code compilation or test runs.
-- Stages that define when and how to run. For example, that tests run only after code compilation.
-
-Multiple jobs in the same stage are executed by [Runners](runners/README.md) in parallel, if there are enough concurrent [Runners](runners/README.md).
-
-If all the jobs in a stage:
-
-- Succeed, the pipeline moves on to the next stage.
-- Fail, the next stage is not (usually) executed and the pipeline ends early.
-
-NOTE: **Note:**
-If you have a [mirrored repository that GitLab pulls from](../user/project/repository/repository_mirroring.md#pulling-from-a-remote-repository-starter),
-you may need to enable pipeline triggering in your project's
-**Settings > Repository > Pull from a remote repository > Trigger pipelines for mirror updates**.
-
-### Simple pipeline example
-
-As an example, imagine a pipeline consisting of four stages, executed in the following order:
-
-- `build`, with a job called `compile`.
-- `test`, with two jobs called `test` and `test2`.
-- `staging`, with a job called `deploy-to-stage`.
-- `production`, with a job called `deploy-to-prod`.
-
-## Visualizing pipelines
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/5742) in GitLab 8.11.
-
-Pipelines can be complex structures with many sequential and parallel jobs.
-
-To make it easier to understand the flow of a pipeline, GitLab has pipeline graphs for viewing pipelines
-and their statuses.
-
-Pipeline graphs can be displayed in two different ways, depending on the page you
-access the graph from.
-
-NOTE: **Note:**
-GitLab capitalizes the stages' names when shown in the pipeline graphs (below).
-
-### Regular pipeline graphs
-
-Regular pipeline graphs show the names of the jobs of each stage. Regular pipeline graphs can
-be found when you are on a [single pipeline page](#accessing-pipelines). For example:
-
-![Pipelines example](img/pipelines.png)
-
-### Pipeline mini graphs
-
-Pipeline mini graphs take less space and can tell you at a
-quick glance if all jobs passed or something failed. The pipeline mini graph can
-be found when you navigate to:
-
-- The pipelines index page.
-- A single commit page.
-- A merge request page.
-
-Pipeline mini graphs allow you to see all related jobs for a single commit and the net result
-of each stage of your pipeline. This allows you to quickly see what failed and
-fix it.
-
-Stages in pipeline mini graphs are collapsible. Hover your mouse over them and click to expand their jobs.
-
-| Mini graph | Mini graph expanded |
-|:-------------------------------------------------------------|:---------------------------------------------------------------|
-| ![Pipelines mini graph](img/pipelines_mini_graph_simple.png) | ![Pipelines mini graph extended](img/pipelines_mini_graph.png) |
-
-### Job ordering in pipeline graphs
-
-Job ordering depends on the type of pipeline graph. For [regular pipeline graphs](#regular-pipeline-graphs), jobs are sorted by name.
-
-For [pipeline mini graphs](#pipeline-mini-graphs) ([introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9760)
-in GitLab 9.0), jobs are sorted by severity and then by name.
-
-The order of severity is:
-
-- failed
-- warning
-- pending
-- running
-- manual
-- scheduled
-- canceled
-- success
-- skipped
-- created
-
-For example:
-
-![Pipeline mini graph sorting](img/pipelines_mini_graph_sorting.png)
-
-### How pipeline duration is calculated
-
-Total running time for a given pipeline excludes retries and pending
-(queued) time.
-
-Each job is represented as a `Period`, which consists of:
-
-- `Period#first` (when the job started).
-- `Period#last` (when the job finished).
-
-A simple example is:
-
-- A (1, 3)
-- B (2, 4)
-- C (6, 7)
-
-In the example:
-
-- A begins at 1 and ends at 3.
-- B begins at 2 and ends at 4.
-- C begins at 6 and ends at 7.
-
-Visually, it can be viewed as:
-
-```text
-0 1 2 3 4 5 6 7
- AAAAAAA
- BBBBBBB
- CCCC
-```
-
-The union of A, B, and C is (1, 4) and (6, 7). Therefore, the total running time is:
-
-```text
-(4 - 1) + (7 - 6) => 4
-```
-
-### How pipeline quotas are used
-
-Each user has a personal pipeline quota that tracks the usage of shared runners in all personal projects.
-Each group has a [usage quota](../subscriptions/index.md#ci-pipeline-minutes) that tracks the usage of shared runners for all projects created within the group.
-
-When a pipeline is triggered, regardless of who triggered it, the pipeline quota for the project owner's [namespace](../user/group/index.md#namespaces) is used. In this case, the namespace can be the user or group that owns the project.
-
-### Expanding and collapsing job log sections
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/14664) in GitLab
-> 12.0.
-
-Job logs are divided into sections that can be collapsed or expanded. Each section will display
-the duration.
-
-In the following example:
-
-- Two sections are collapsed and can be expanded.
-- Three sections are expanded and can be collapsed.
-
-![Collapsible sections](img/collapsible_log_v12_6.png)
-
-#### Custom collapsible sections
-
-You can create collapsible sections in job logs by manually outputting special codes
-that GitLab will use to determine what sections to collapse:
-
-- Section start marker: `section_start:UNIX_TIMESTAMP:SECTION_NAME\r\e[0K` + `TEXT_OF_SECTION_HEADER`
-- Section end marker: `section_end:UNIX_TIMESTAMP:SECTION_NAME\r\e[0K`
-
-You must add these codes to the script section of the CI configuration. For example,
-using `echo`:
-
-```yaml
-job1:
- script:
- - echo -e "section_start:`date +%s`:my_first_section\r\e[0KHeader of the 1st collapsible section"
- - echo 'this line should be hidden when collapsed'
- - echo -e "section_end:`date +%s`:my_first_section\r\e[0K"
-```
-
-In the example above:
-
-- `date +%s`: The Unix timestamp (for example `1560896352`).
-- `my_first_section`: The name given to the section.
-- `\r\e[0K`: Prevents the section markers from displaying in the rendered (colored)
- job log, but they are displayed in the raw job log. To see them, in the top right
- of the job log, click **{doc-text}** (**Show complete raw**).
- - `\r`: carriage return.
- - `\e[0K`: clear line ANSI escape code.
-
-Sample raw job log:
-
-```plaintext
-section_start:1560896352:my_first_section\r\e[0KHeader of the 1st collapsible section
-this line should be hidden when collapsed
-section_end:1560896353:my_first_section\r\e[0K
-```
-
-## Configuring pipelines
-
-Pipelines, and their component jobs and stages, are defined in the [`.gitlab-ci.yml`](yaml/README.md) file for each project.
-
-In particular:
-
-- Jobs are the [basic configuration](yaml/README.html#introduction) component.
-- Stages are defined using the [`stages`](yaml/README.html#stages) keyword.
-
-For all available configuration options, see the [GitLab CI/CD Pipeline Configuration Reference](yaml/README.md).
-
-### Settings and schedules
-
-In addition to configuring jobs through `.gitlab-ci.yml`, additional configuration options are available
-through the GitLab UI:
-
-- Pipeline settings for each project. For more information, see [Pipeline settings](../user/project/pipelines/settings.md).
-- Schedules for pipelines. For more information, see [Pipeline schedules](../user/project/pipelines/schedules.md).
-
-### Grouping jobs
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/6242) in GitLab 8.12.
-
-If you have many similar jobs, your [pipeline graph](#visualizing-pipelines) becomes long and hard
-to read.
-
-For that reason, similar jobs can automatically be grouped together.
-If the job names are formatted in certain ways, they will be collapsed into
-a single group in regular pipeline graphs (not the mini graphs).
-
-You'll know when a pipeline has grouped jobs if you don't see the retry or
-cancel button inside them. Hovering over them will show the number of grouped
-jobs. Click to expand them.
-
-![Grouped pipelines](img/pipelines_grouped.png)
-
-#### Configuring grouping
-
-In the pipeline [configuration file](yaml/README.md), job names must include two numbers separated with one of
-the following (you can even use them interchangeably):
-
-- A space.
-- A slash (`/`).
-- A colon (`:`).
-
-NOTE: **Note:**
-More specifically, it uses [this](https://gitlab.com/gitlab-org/gitlab/blob/2f3dc314f42dbd79813e6251792853bc231e69dd/app/models/commit_status.rb#L99) regular expression: `\d+[\s:\/\\]+\d+\s*`.
-
-#### How grouping works
-
-The jobs will be ordered by comparing those two numbers from left to right. You
-usually want the first to be the index and the second the total.
-
-For example, the following jobs will be grouped under a job named `test`:
-
-- `test 0 3`
-- `test 1 3`
-- `test 2 3`
-
-The following jobs will be grouped under a job named `test ruby`:
-
-- `test 1:2 ruby`
-- `test 2:2 ruby`
-
-The following jobs will be grouped under a job named `test ruby` as well:
-
-- `1/3 test ruby`
-- `2/3 test ruby`
-- `3/3 test ruby`
-
-### Pipelines for merge requests
-
-GitLab supports configuring pipelines that run only for merge requests. For more information, see
-[Pipelines for merge requests](merge_request_pipelines/index.md).
-
-### Badges
-
-Pipeline status and test coverage report badges are available and configurable for each project.
-
-For information on adding pipeline badges to projects, see [Pipeline badges](../user/project/pipelines/settings.md#pipeline-badges).
-
-## Multi-project pipelines **(PREMIUM)**
-
-Pipelines for different projects can be combined and visualized together.
-
-For more information, see [Multi-project pipelines](multi_project_pipelines.md).
-
-## Parent-child pipelines
-
-Complex pipelines can be broken down into one parent pipeline that can trigger
-multiple child sub-pipelines, which all run in the same project and with the same SHA.
-
-For more information, see [Parent-Child pipelines](parent_child_pipelines.md).
-
-## Working with pipelines
-
-In general, pipelines are executed automatically and require no intervention once created.
-
-However, there are instances where you'll need to interact with pipelines. These are documented below.
-
-### Manually executing pipelines
-
-Pipelines can be manually executed, with predefined or manually-specified [variables](variables/README.md).
-
-You might do this if the results of a pipeline (for example, a code build) is required outside the normal
-operation of the pipeline.
-
-To execute a pipeline manually:
-
-1. Navigate to your project's **CI/CD > Pipelines**.
-1. Click on the **Run Pipeline** button.
-1. On the **Run Pipeline** page:
- 1. Select the branch to run the pipeline for in the **Create for** field.
- 1. Enter any [environment variables](variables/README.md) required for the pipeline run.
- 1. Click the **Create pipeline** button.
-
-The pipeline will execute the jobs as configured.
-
-#### Using a query string
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/24146) in GitLab 12.5.
-
-Variables on the **Run Pipeline** page can be pre-populated by passing variable keys and values
-in a query string appended to the `pipelines/new` URL. The format is:
-
-```plaintext
-.../pipelines/new?ref=<branch>&var[<variable_key>]=<value>&file_var[<file_key>]=<value>
-```
-
-The following parameters are supported:
-
-- `ref`: specify the branch to populate the **Run for** field with.
-- `var`: specify a `Variable` variable.
-- `file_var`: specify a `File` variable.
-
-For each `var` or `file_var`, a key and value are required.
-
-For example, the query string
-`.../pipelines/new?ref=my_branch&var[foo]=bar&file_var[file_foo]=file_bar` will pre-populate the
-**Run Pipeline** page as follows:
-
-- **Run for** field: `my_branch`.
-- **Variables** section:
- - Variable:
- - Key: `foo`
- - Value: `bar`
- - File:
- - Key: `file_foo`
- - Value: `file_bar`
-
-### Accessing pipelines
-
-You can find the current and historical pipeline runs under your project's
-**CI/CD > Pipelines** page. You can also access pipelines for a merge request by navigating
-to its **Pipelines** tab.
-
-![Pipelines index page](img/pipelines_index.png)
-
-Clicking on a pipeline will bring you to the **Pipeline Details** page and show
-the jobs that were run for that pipeline. From here you can cancel a running pipeline,
-retry jobs on a failed pipeline, or [delete a pipeline](#deleting-a-single-pipeline).
-
-### Accessing individual jobs
-
-When you access a pipeline, you can see the related jobs for that pipeline.
-
-Clicking on an individual job will show you its job log, and allow you to:
-
-- Cancel the job.
-- Retry the job.
-- Erase the job log.
-
-### Seeing the failure reason for jobs
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17782) in GitLab 10.7.
-
-When a pipeline fails or is allowed to fail, there are several places where you
-can quickly check the reason it failed:
-
-- In the pipeline graph, on the pipeline detail view.
-- In the pipeline widgets, in the merge requests and commit pages.
-- In the job views, in the global and detailed views of a job.
-
-In each place, if you hover over the failed job you can see the reason it failed.
-
-![Pipeline detail](img/job_failure_reason.png)
-
-From [GitLab 10.8](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17814),
-you can also see the reason it failed on the Job detail page.
-
-### Manual actions from pipeline graphs
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/7931) in GitLab 8.15.
-
-Manual actions, configured using the [`when:manual`](yaml/README.md#whenmanual) parameter,
-allow you to require manual interaction before moving forward in the pipeline.
-
-You can do this straight from the pipeline graph. Just click on the play button
-to execute that particular job.
-
-For example, your pipeline start automatically, but require manual action to
-[deploy to production](environments.md#configuring-manual-deployments). In the example below, the `production`
-stage has a job with a manual action.
-
-![Pipelines example](img/pipelines.png)
-
-### Specifying variables when running manual jobs
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/30485) in GitLab 12.2.
-
-When running manual jobs you can supply additional job specific variables.
-
-You can do this from the job page of the manual job you want to run with
-additional variables.
-
-This is useful when you want to alter the execution of a job by using
-environment variables.
-
-![Manual job variables](img/manual_job_variables.png)
-
-### Delay a job in a pipeline graph
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/21767) in GitLab 11.4.
-
-When you do not want to run a job immediately, you can use the [`when:delayed`](yaml/README.md#whendelayed) parameter to
-delay a job's execution for a certain period.
-
-This is especially useful for timed incremental rollout where new code is rolled out gradually.
-
-For example, if you start rolling out new code and:
-
-- Users do not experience trouble, GitLab can automatically complete the deployment from 0% to 100%.
-- Users experience trouble with the new code, you can stop the timed incremental rollout by canceling the pipeline
- and [rolling](environments.md#retrying-and-rolling-back) back to the last stable version.
-
-![Pipelines example](img/pipeline_incremental_rollout.png)
-
-### Using the API
-
-GitLab provides API endpoints to:
-
-- Perform basic functions. For more information, see [Pipelines API](../api/pipelines.md).
-- Maintain pipeline schedules. For more information, see [Pipeline schedules API](../api/pipeline_schedules.md).
-- Trigger pipeline runs. For more information, see:
- - [Triggering pipelines through the API](triggers/README.md).
- - [Pipeline triggers API](../api/pipeline_triggers.md).
-
-### Start multiple manual actions in a stage
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/27188) in GitLab 11.11.
-
-Multiple manual actions in a single stage can be started at the same time using the "Play all manual" button.
-Once the user clicks this button, each individual manual action will be triggered and refreshed
-to an updated status.
-
-This functionality is only available:
-
-- For users with at least Developer access.
-- If the the stage contains [manual actions](#manual-actions-from-pipeline-graphs).
-
-### Deleting a single pipeline
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/24851) in GitLab 12.7.
-
-Users with [owner permissions](../user/permissions.md) in a project can delete a pipeline
-by clicking on the pipeline in the **CI/CD > Pipelines** to get to the **Pipeline Details**
-page, then using the **Delete** button.
-
-![Pipeline Delete Button](img/pipeline-delete.png)
-
-CAUTION: **Warning:**
-Deleting a pipeline will expire all pipeline caches, and delete all related objects,
-such as builds, logs, artifacts, and triggers. **This action cannot be undone.**
-
-## Most Recent Pipeline
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/50499) in GitLab 12.3.
-
-There's a link to the latest pipeline for the last commit of a given branch at `/project/pipelines/[branch]/latest`. Also, `/project/pipelines/latest` will redirect you to the latest pipeline for the last commit on the project's default branch.
-
-## Security on protected branches
-
-A strict security model is enforced when pipelines are executed on
-[protected branches](../user/project/protected_branches.md).
-
-The following actions are allowed on protected branches only if the user is
-[allowed to merge or push](../user/project/protected_branches.md#using-the-allowed-to-merge-and-allowed-to-push-settings)
-on that specific branch:
-
-- Run manual pipelines (using the [Web UI](#manually-executing-pipelines) or pipelines API).
-- Run scheduled pipelines.
-- Run pipelines using triggers.
-- Trigger manual actions on existing pipelines.
-- Retry or cancel existing jobs (using the Web UI or pipelines API).
-
-**Variables** marked as **protected** are accessible only to jobs that
-run on protected branches, preventing untrusted users getting unintended access to
-sensitive information like deployment credentials and tokens.
-
-**Runners** marked as **protected** can run jobs only on protected
-branches, avoiding untrusted code to be executed on the protected runner and
-preserving deployment keys and other credentials from being unintentionally
-accessed. In order to ensure that jobs intended to be executed on protected
-runners will not use regular runners, they must be tagged accordingly.
-
-## Persistent pipeline refs
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/17043) in GitLab 12.4.
-
-Previously, you'd have encountered unexpected pipeline failures when you force-pushed
-a branch to its remote repository. To illustrate the problem, suppose you've had the current workflow:
-
-1. A user creates a feature branch named `example` and pushes it to a remote repository.
-1. A new pipeline starts running on the `example` branch.
-1. A user rebases the `example` branch on the latest `master` branch and force-pushes it to its remote repository.
-1. A new pipeline starts running on the `example` branch again, however,
- the previous pipeline (2) fails because of `fatal: reference is not a tree:` error.
-
-This is because the previous pipeline cannot find a checkout-SHA (which associated with the pipeline record)
-from the `example` branch that the commit history has already been overwritten by the force-push.
-Similarly, [Pipelines for merged results](merge_request_pipelines/pipelines_for_merged_results/index.md)
-might have failed intermittently due to [the same reason](merge_request_pipelines/pipelines_for_merged_results/index.md#intermittently-pipelines-fail-by-fatal-reference-is-not-a-tree-error).
-
-As of GitLab 12.4, we've improved this behavior by persisting pipeline refs exclusively.
-To illustrate its life cycle:
-
-1. A pipeline is created on a feature branch named `example`.
-1. A persistent pipeline ref is created at `refs/pipelines/<pipeline-id>`,
- which retains the checkout-SHA of the associated pipeline record.
- This persistent ref stays intact during the pipeline execution,
- even if the commit history of the `example` branch has been overwritten by force-push.
-1. GitLab Runner fetches the persistent pipeline ref and gets source code from the checkout-SHA.
-1. When the pipeline finished, its persistent ref is cleaned up in a background process.
-
-NOTE: **NOTE**: At this moment, this feature is on by default and can be manually disabled
-by disabling `depend_on_persistent_pipeline_ref` feature flag. If you're interested in
-manually disabling this behavior, please ask the administrator
-to execute the following commands in rails console.
-
-```shell
-> sudo gitlab-rails console # Login to Rails console of GitLab instance.
-> project = Project.find_by_full_path('namespace/project-name') # Get the project instance.
-> Feature.disable(:depend_on_persistent_pipeline_ref, project) # Disable the feature flag for specific project
-> Feature.disable(:depend_on_persistent_pipeline_ref) # Disable the feature flag system-wide
-```
+This document was moved to [another location](pipelines/index.md).
diff --git a/doc/ci/pipelines/img/collapsible_log_v12_6.png b/doc/ci/pipelines/img/collapsible_log_v12_6.png
new file mode 100644
index 00000000000..a1e9aeb244a
--- /dev/null
+++ b/doc/ci/pipelines/img/collapsible_log_v12_6.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/job_artifacts_browser.png b/doc/ci/pipelines/img/job_artifacts_browser.png
index d3d8de5ac60..d3d8de5ac60 100644
--- a/doc/user/project/pipelines/img/job_artifacts_browser.png
+++ b/doc/ci/pipelines/img/job_artifacts_browser.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/job_artifacts_browser_button.png b/doc/ci/pipelines/img/job_artifacts_browser_button.png
index 21072ce1248..21072ce1248 100644
--- a/doc/user/project/pipelines/img/job_artifacts_browser_button.png
+++ b/doc/ci/pipelines/img/job_artifacts_browser_button.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/job_artifacts_builds_page.png b/doc/ci/pipelines/img/job_artifacts_builds_page.png
index 13e039ba934..13e039ba934 100644
--- a/doc/user/project/pipelines/img/job_artifacts_builds_page.png
+++ b/doc/ci/pipelines/img/job_artifacts_builds_page.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/job_artifacts_pipelines_page.png b/doc/ci/pipelines/img/job_artifacts_pipelines_page.png
index 983f903ca72..983f903ca72 100644
--- a/doc/user/project/pipelines/img/job_artifacts_pipelines_page.png
+++ b/doc/ci/pipelines/img/job_artifacts_pipelines_page.png
Binary files differ
diff --git a/doc/ci/img/job_failure_reason.png b/doc/ci/pipelines/img/job_failure_reason.png
index d44b8e6d1be..d44b8e6d1be 100644
--- a/doc/ci/img/job_failure_reason.png
+++ b/doc/ci/pipelines/img/job_failure_reason.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/job_latest_artifacts_browser.png b/doc/ci/pipelines/img/job_latest_artifacts_browser.png
index c6d8856078b..c6d8856078b 100644
--- a/doc/user/project/pipelines/img/job_latest_artifacts_browser.png
+++ b/doc/ci/pipelines/img/job_latest_artifacts_browser.png
Binary files differ
diff --git a/doc/ci/img/manual_job_variables.png b/doc/ci/pipelines/img/manual_job_variables.png
index a5ed351fdcd..a5ed351fdcd 100644
--- a/doc/ci/img/manual_job_variables.png
+++ b/doc/ci/pipelines/img/manual_job_variables.png
Binary files differ
diff --git a/doc/ci/img/pipeline-delete.png b/doc/ci/pipelines/img/pipeline-delete.png
index d9dba1f455d..d9dba1f455d 100644
--- a/doc/ci/img/pipeline-delete.png
+++ b/doc/ci/pipelines/img/pipeline-delete.png
Binary files differ
diff --git a/doc/ci/img/pipeline_incremental_rollout.png b/doc/ci/pipelines/img/pipeline_incremental_rollout.png
index b3498e9a5a5..b3498e9a5a5 100644
--- a/doc/ci/img/pipeline_incremental_rollout.png
+++ b/doc/ci/pipelines/img/pipeline_incremental_rollout.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/pipeline_schedule_play.png b/doc/ci/pipelines/img/pipeline_schedule_play.png
index ec6eb0d156b..ec6eb0d156b 100644
--- a/doc/user/project/pipelines/img/pipeline_schedule_play.png
+++ b/doc/ci/pipelines/img/pipeline_schedule_play.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/pipeline_schedule_variables.png b/doc/ci/pipelines/img/pipeline_schedule_variables.png
index ce3c3dc6af1..ce3c3dc6af1 100644
--- a/doc/user/project/pipelines/img/pipeline_schedule_variables.png
+++ b/doc/ci/pipelines/img/pipeline_schedule_variables.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/pipeline_schedules_list.png b/doc/ci/pipelines/img/pipeline_schedules_list.png
index 541fe4f9b1d..541fe4f9b1d 100644
--- a/doc/user/project/pipelines/img/pipeline_schedules_list.png
+++ b/doc/ci/pipelines/img/pipeline_schedules_list.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/pipeline_schedules_new_form.png b/doc/ci/pipelines/img/pipeline_schedules_new_form.png
index 993fbf8ca00..993fbf8ca00 100644
--- a/doc/user/project/pipelines/img/pipeline_schedules_new_form.png
+++ b/doc/ci/pipelines/img/pipeline_schedules_new_form.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/pipeline_schedules_ownership.png b/doc/ci/pipelines/img/pipeline_schedules_ownership.png
index 8fc5c5fbc82..8fc5c5fbc82 100644
--- a/doc/user/project/pipelines/img/pipeline_schedules_ownership.png
+++ b/doc/ci/pipelines/img/pipeline_schedules_ownership.png
Binary files differ
diff --git a/doc/ci/img/pipelines.png b/doc/ci/pipelines/img/pipelines.png
index a604fcb2587..a604fcb2587 100644
--- a/doc/ci/img/pipelines.png
+++ b/doc/ci/pipelines/img/pipelines.png
Binary files differ
diff --git a/doc/ci/pipelines/img/pipelines_duration_chart.png b/doc/ci/pipelines/img/pipelines_duration_chart.png
new file mode 100644
index 00000000000..12ec262dadb
--- /dev/null
+++ b/doc/ci/pipelines/img/pipelines_duration_chart.png
Binary files differ
diff --git a/doc/ci/img/pipelines_grouped.png b/doc/ci/pipelines/img/pipelines_grouped.png
index 82814754747..82814754747 100644
--- a/doc/ci/img/pipelines_grouped.png
+++ b/doc/ci/pipelines/img/pipelines_grouped.png
Binary files differ
diff --git a/doc/ci/img/pipelines_index.png b/doc/ci/pipelines/img/pipelines_index.png
index e168e7e23df..e168e7e23df 100644
--- a/doc/ci/img/pipelines_index.png
+++ b/doc/ci/pipelines/img/pipelines_index.png
Binary files differ
diff --git a/doc/ci/img/pipelines_mini_graph.png b/doc/ci/pipelines/img/pipelines_mini_graph.png
index 8656b02f60d..8656b02f60d 100644
--- a/doc/ci/img/pipelines_mini_graph.png
+++ b/doc/ci/pipelines/img/pipelines_mini_graph.png
Binary files differ
diff --git a/doc/ci/img/pipelines_mini_graph_simple.png b/doc/ci/pipelines/img/pipelines_mini_graph_simple.png
index d00a8313088..d00a8313088 100644
--- a/doc/ci/img/pipelines_mini_graph_simple.png
+++ b/doc/ci/pipelines/img/pipelines_mini_graph_simple.png
Binary files differ
diff --git a/doc/ci/img/pipelines_mini_graph_sorting.png b/doc/ci/pipelines/img/pipelines_mini_graph_sorting.png
index 3a4e5453360..3a4e5453360 100644
--- a/doc/ci/img/pipelines_mini_graph_sorting.png
+++ b/doc/ci/pipelines/img/pipelines_mini_graph_sorting.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/pipelines_settings_badges.png b/doc/ci/pipelines/img/pipelines_settings_badges.png
index 3bdc6374c15..3bdc6374c15 100644
--- a/doc/user/project/pipelines/img/pipelines_settings_badges.png
+++ b/doc/ci/pipelines/img/pipelines_settings_badges.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/pipelines_settings_test_coverage.png b/doc/ci/pipelines/img/pipelines_settings_test_coverage.png
index 13ed69be810..13ed69be810 100644
--- a/doc/user/project/pipelines/img/pipelines_settings_test_coverage.png
+++ b/doc/ci/pipelines/img/pipelines_settings_test_coverage.png
Binary files differ
diff --git a/doc/ci/pipelines/img/pipelines_success_chart.png b/doc/ci/pipelines/img/pipelines_success_chart.png
new file mode 100644
index 00000000000..f44dc25ff1c
--- /dev/null
+++ b/doc/ci/pipelines/img/pipelines_success_chart.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/pipelines_test_coverage_build.png b/doc/ci/pipelines/img/pipelines_test_coverage_build.png
index 7eaba1a256f..7eaba1a256f 100644
--- a/doc/user/project/pipelines/img/pipelines_test_coverage_build.png
+++ b/doc/ci/pipelines/img/pipelines_test_coverage_build.png
Binary files differ
diff --git a/doc/user/project/pipelines/img/pipelines_test_coverage_mr_widget.png b/doc/ci/pipelines/img/pipelines_test_coverage_mr_widget.png
index fbcd612f3f2..fbcd612f3f2 100644
--- a/doc/user/project/pipelines/img/pipelines_test_coverage_mr_widget.png
+++ b/doc/ci/pipelines/img/pipelines_test_coverage_mr_widget.png
Binary files differ
diff --git a/doc/ci/pipelines/index.md b/doc/ci/pipelines/index.md
new file mode 100644
index 00000000000..092e7729ad3
--- /dev/null
+++ b/doc/ci/pipelines/index.md
@@ -0,0 +1,567 @@
+---
+disqus_identifier: 'https://docs.gitlab.com/ee/ci/pipelines.html'
+type: reference
+---
+
+# Creating and using CI/CD pipelines
+
+> Introduced in GitLab 8.8.
+
+NOTE: **Tip:**
+Watch our
+["Mastering continuous software development"](https://about.gitlab.com/webcast/mastering-ci-cd/)
+webcast to see a comprehensive demo of GitLab CI/CD pipeline.
+
+## Introduction
+
+Pipelines are the top-level component of continuous integration, delivery, and deployment.
+
+Pipelines comprise:
+
+- Jobs that define what to run. For example, code compilation or test runs.
+- Stages that define when and how to run. For example, that tests run only after code compilation.
+
+Multiple jobs in the same stage are executed by [Runners](../runners/README.md) in parallel, if there are enough concurrent [Runners](../runners/README.md).
+
+If all the jobs in a stage:
+
+- Succeed, the pipeline moves on to the next stage.
+- Fail, the next stage is not (usually) executed and the pipeline ends early.
+
+NOTE: **Note:**
+If you have a [mirrored repository that GitLab pulls from](../../user/project/repository/repository_mirroring.md#pulling-from-a-remote-repository-starter),
+you may need to enable pipeline triggering in your project's
+**Settings > Repository > Pull from a remote repository > Trigger pipelines for mirror updates**.
+
+### Simple pipeline example
+
+As an example, imagine a pipeline consisting of four stages, executed in the following order:
+
+- `build`, with a job called `compile`.
+- `test`, with two jobs called `test` and `test2`.
+- `staging`, with a job called `deploy-to-stage`.
+- `production`, with a job called `deploy-to-prod`.
+
+## Visualizing pipelines
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/5742) in GitLab 8.11.
+
+Pipelines can be complex structures with many sequential and parallel jobs.
+
+To make it easier to understand the flow of a pipeline, GitLab has pipeline graphs for viewing pipelines
+and their statuses.
+
+Pipeline graphs can be displayed in two different ways, depending on the page you
+access the graph from.
+
+NOTE: **Note:**
+GitLab capitalizes the stages' names when shown in the pipeline graphs (below).
+
+### Regular pipeline graphs
+
+Regular pipeline graphs show the names of the jobs of each stage. Regular pipeline graphs can
+be found when you are on a [single pipeline page](#accessing-pipelines). For example:
+
+![Pipelines example](img/pipelines.png)
+
+### Pipeline mini graphs
+
+Pipeline mini graphs take less space and can tell you at a
+quick glance if all jobs passed or something failed. The pipeline mini graph can
+be found when you navigate to:
+
+- The pipelines index page.
+- A single commit page.
+- A merge request page.
+
+Pipeline mini graphs allow you to see all related jobs for a single commit and the net result
+of each stage of your pipeline. This allows you to quickly see what failed and
+fix it.
+
+Stages in pipeline mini graphs are collapsible. Hover your mouse over them and click to expand their jobs.
+
+| Mini graph | Mini graph expanded |
+|:-------------------------------------------------------------|:---------------------------------------------------------------|
+| ![Pipelines mini graph](img/pipelines_mini_graph_simple.png) | ![Pipelines mini graph extended](img/pipelines_mini_graph.png) |
+
+### Job ordering in pipeline graphs
+
+Job ordering depends on the type of pipeline graph. For [regular pipeline graphs](#regular-pipeline-graphs), jobs are sorted by name.
+
+For [pipeline mini graphs](#pipeline-mini-graphs) ([introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9760)
+in GitLab 9.0), jobs are sorted by severity and then by name.
+
+The order of severity is:
+
+- failed
+- warning
+- pending
+- running
+- manual
+- scheduled
+- canceled
+- success
+- skipped
+- created
+
+For example:
+
+![Pipeline mini graph sorting](img/pipelines_mini_graph_sorting.png)
+
+### Expanding and collapsing job log sections
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/14664) in GitLab
+> 12.0.
+
+Job logs are divided into sections that can be collapsed or expanded. Each section will display
+the duration.
+
+In the following example:
+
+- Two sections are collapsed and can be expanded.
+- Three sections are expanded and can be collapsed.
+
+![Collapsible sections](img/collapsible_log_v12_6.png)
+
+#### Custom collapsible sections
+
+You can create collapsible sections in job logs by manually outputting special codes
+that GitLab will use to determine what sections to collapse:
+
+- Section start marker: `section_start:UNIX_TIMESTAMP:SECTION_NAME\r\e[0K` + `TEXT_OF_SECTION_HEADER`
+- Section end marker: `section_end:UNIX_TIMESTAMP:SECTION_NAME\r\e[0K`
+
+You must add these codes to the script section of the CI configuration. For example,
+using `echo`:
+
+```yaml
+job1:
+ script:
+ - echo -e "section_start:`date +%s`:my_first_section\r\e[0KHeader of the 1st collapsible section"
+ - echo 'this line should be hidden when collapsed'
+ - echo -e "section_end:`date +%s`:my_first_section\r\e[0K"
+```
+
+In the example above:
+
+- `date +%s`: The Unix timestamp (for example `1560896352`).
+- `my_first_section`: The name given to the section.
+- `\r\e[0K`: Prevents the section markers from displaying in the rendered (colored)
+ job log, but they are displayed in the raw job log. To see them, in the top right
+ of the job log, click **{doc-text}** (**Show complete raw**).
+ - `\r`: carriage return.
+ - `\e[0K`: clear line ANSI escape code.
+
+Sample raw job log:
+
+```plaintext
+section_start:1560896352:my_first_section\r\e[0KHeader of the 1st collapsible section
+this line should be hidden when collapsed
+section_end:1560896353:my_first_section\r\e[0K
+```
+
+### Pipeline success and duration charts
+
+> - Introduced in GitLab 3.1.1 as Commit Stats, and later renamed to Pipeline Charts.
+> - [Renamed](https://gitlab.com/gitlab-org/gitlab/issues/38318) to CI / CD Analytics in GitLab 12.8.
+
+GitLab tracks the history of your pipeline successes and failures, as well as how long each pipeline ran. To view this information, go to **Analytics > CI / CD Analytics**.
+
+View successful pipelines:
+
+![Successful pipelines](img/pipelines_success_chart.png)
+
+View pipeline duration history:
+
+![Pipeline duration](img/pipelines_duration_chart.png)
+
+## Pipeline quotas
+
+Each user has a personal pipeline quota that tracks the usage of shared runners in all personal projects.
+Each group has a [usage quota](../../subscriptions/index.md#ci-pipeline-minutes) that tracks the usage of shared runners for all projects created within the group.
+
+When a pipeline is triggered, regardless of who triggered it, the pipeline quota for the project owner's [namespace](../../user/group/index.md#namespaces) is used. In this case, the namespace can be the user or group that owns the project.
+
+### How pipeline duration is calculated
+
+Total running time for a given pipeline excludes retries and pending
+(queued) time.
+
+Each job is represented as a `Period`, which consists of:
+
+- `Period#first` (when the job started).
+- `Period#last` (when the job finished).
+
+A simple example is:
+
+- A (1, 3)
+- B (2, 4)
+- C (6, 7)
+
+In the example:
+
+- A begins at 1 and ends at 3.
+- B begins at 2 and ends at 4.
+- C begins at 6 and ends at 7.
+
+Visually, it can be viewed as:
+
+```text
+0 1 2 3 4 5 6 7
+ AAAAAAA
+ BBBBBBB
+ CCCC
+```
+
+The union of A, B, and C is (1, 4) and (6, 7). Therefore, the total running time is:
+
+```text
+(4 - 1) + (7 - 6) => 4
+```
+
+## Configuring pipelines
+
+Pipelines, and their component jobs and stages, are defined in the [`.gitlab-ci.yml`](../yaml/README.md) file for each project.
+
+In particular:
+
+- Jobs are the [basic configuration](../yaml/README.md#introduction) component.
+- Stages are defined using the [`stages`](../yaml/README.md#stages) keyword.
+
+For all available configuration options, see the [GitLab CI/CD Pipeline Configuration Reference](../yaml/README.md).
+
+### Settings and schedules
+
+In addition to configuring jobs through `.gitlab-ci.yml`, additional configuration options are available
+through the GitLab UI:
+
+- Pipeline settings for each project. For more information, see [Pipeline settings](settings.md).
+- Schedules for pipelines. For more information, see [Pipeline schedules](schedules.md).
+
+### Grouping jobs
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/6242) in GitLab 8.12.
+
+If you have many similar jobs, your [pipeline graph](#visualizing-pipelines) becomes long and hard
+to read.
+
+For that reason, similar jobs can automatically be grouped together.
+If the job names are formatted in certain ways, they will be collapsed into
+a single group in regular pipeline graphs (not the mini graphs).
+
+You'll know when a pipeline has grouped jobs if you don't see the retry or
+cancel button inside them. Hovering over them will show the number of grouped
+jobs. Click to expand them.
+
+![Grouped pipelines](img/pipelines_grouped.png)
+
+#### Configuring grouping
+
+In the pipeline [configuration file](../yaml/README.md), job names must include two numbers separated with one of
+the following (you can even use them interchangeably):
+
+- A space.
+- A slash (`/`).
+- A colon (`:`).
+
+NOTE: **Note:**
+More specifically, it uses [this](https://gitlab.com/gitlab-org/gitlab/blob/2f3dc314f42dbd79813e6251792853bc231e69dd/app/models/commit_status.rb#L99) regular expression: `\d+[\s:\/\\]+\d+\s*`.
+
+#### How grouping works
+
+The jobs will be ordered by comparing those two numbers from left to right. You
+usually want the first to be the index and the second the total.
+
+For example, the following jobs will be grouped under a job named `test`:
+
+- `test 0 3`
+- `test 1 3`
+- `test 2 3`
+
+The following jobs will be grouped under a job named `test ruby`:
+
+- `test 1:2 ruby`
+- `test 2:2 ruby`
+
+The following jobs will be grouped under a job named `test ruby` as well:
+
+- `1/3 test ruby`
+- `2/3 test ruby`
+- `3/3 test ruby`
+
+### Pipelines for merge requests
+
+GitLab supports configuring pipelines that run only for merge requests. For more information, see
+[Pipelines for merge requests](../merge_request_pipelines/index.md).
+
+### Badges
+
+Pipeline status and test coverage report badges are available and configurable for each project.
+
+For information on adding pipeline badges to projects, see [Pipeline badges](settings.md#pipeline-badges).
+
+## Multi-project pipelines
+
+Pipelines for different projects can be combined together into [Multi-project pipelines](../multi_project_pipelines.md).
+
+[Multi-project pipeline graphs](../multi_project_pipelines.md#multi-project-pipeline-visualization-premium) help
+you visualize the entire pipeline, including all cross-project inter-dependencies. **(PREMIUM)**
+
+## Parent-child pipelines
+
+Complex pipelines can be broken down into one parent pipeline that can trigger
+multiple child sub-pipelines, which all run in the same project and with the same SHA.
+
+For more information, see [Parent-Child pipelines](../parent_child_pipelines.md).
+
+## Working with pipelines
+
+In general, pipelines are executed automatically and require no intervention once created.
+
+However, there are instances where you'll need to interact with pipelines. These are documented below.
+
+### Manually executing pipelines
+
+Pipelines can be manually executed, with predefined or manually-specified [variables](../variables/README.md).
+
+You might do this if the results of a pipeline (for example, a code build) is required outside the normal
+operation of the pipeline.
+
+To execute a pipeline manually:
+
+1. Navigate to your project's **CI/CD > Pipelines**.
+1. Click on the **Run Pipeline** button.
+1. On the **Run Pipeline** page:
+ 1. Select the branch to run the pipeline for in the **Create for** field.
+ 1. Enter any [environment variables](../variables/README.md) required for the pipeline run.
+ 1. Click the **Create pipeline** button.
+
+The pipeline will execute the jobs as configured.
+
+#### Using a query string
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/24146) in GitLab 12.5.
+
+Variables on the **Run Pipeline** page can be pre-populated by passing variable keys and values
+in a query string appended to the `pipelines/new` URL. The format is:
+
+```plaintext
+.../pipelines/new?ref=<branch>&var[<variable_key>]=<value>&file_var[<file_key>]=<value>
+```
+
+The following parameters are supported:
+
+- `ref`: specify the branch to populate the **Run for** field with.
+- `var`: specify a `Variable` variable.
+- `file_var`: specify a `File` variable.
+
+For each `var` or `file_var`, a key and value are required.
+
+For example, the query string
+`.../pipelines/new?ref=my_branch&var[foo]=bar&file_var[file_foo]=file_bar` will pre-populate the
+**Run Pipeline** page as follows:
+
+- **Run for** field: `my_branch`.
+- **Variables** section:
+ - Variable:
+ - Key: `foo`
+ - Value: `bar`
+ - File:
+ - Key: `file_foo`
+ - Value: `file_bar`
+
+### Accessing pipelines
+
+You can find the current and historical pipeline runs under your project's
+**CI/CD > Pipelines** page. You can also access pipelines for a merge request by navigating
+to its **Pipelines** tab.
+
+![Pipelines index page](img/pipelines_index.png)
+
+Clicking on a pipeline will bring you to the **Pipeline Details** page and show
+the jobs that were run for that pipeline. From here you can cancel a running pipeline,
+retry jobs on a failed pipeline, or [delete a pipeline](#deleting-a-single-pipeline).
+
+### Accessing individual jobs
+
+When you access a pipeline, you can see the related jobs for that pipeline.
+
+Clicking on an individual job will show you its job log, and allow you to:
+
+- Cancel the job.
+- Retry the job.
+- Erase the job log.
+
+### Seeing the failure reason for jobs
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17782) in GitLab 10.7.
+
+When a pipeline fails or is allowed to fail, there are several places where you
+can quickly check the reason it failed:
+
+- In the pipeline graph, on the pipeline detail view.
+- In the pipeline widgets, in the merge requests and commit pages.
+- In the job views, in the global and detailed views of a job.
+
+In each place, if you hover over the failed job you can see the reason it failed.
+
+![Pipeline detail](img/job_failure_reason.png)
+
+From [GitLab 10.8](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17814),
+you can also see the reason it failed on the Job detail page.
+
+### Manual actions from pipeline graphs
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/7931) in GitLab 8.15.
+
+Manual actions, configured using the [`when:manual`](../yaml/README.md#whenmanual) parameter,
+allow you to require manual interaction before moving forward in the pipeline.
+
+You can do this straight from the pipeline graph. Just click on the play button
+to execute that particular job.
+
+For example, your pipeline start automatically, but require manual action to
+[deploy to production](../environments.md#configuring-manual-deployments). In the example below, the `production`
+stage has a job with a manual action.
+
+![Pipelines example](img/pipelines.png)
+
+### Specifying variables when running manual jobs
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/30485) in GitLab 12.2.
+
+When running manual jobs you can supply additional job specific variables.
+
+You can do this from the job page of the manual job you want to run with
+additional variables.
+
+This is useful when you want to alter the execution of a job by using
+environment variables.
+
+![Manual job variables](img/manual_job_variables.png)
+
+### Delay a job in a pipeline graph
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/21767) in GitLab 11.4.
+
+When you do not want to run a job immediately, you can use the [`when:delayed`](../yaml/README.md#whendelayed) parameter to
+delay a job's execution for a certain period.
+
+This is especially useful for timed incremental rollout where new code is rolled out gradually.
+
+For example, if you start rolling out new code and:
+
+- Users do not experience trouble, GitLab can automatically complete the deployment from 0% to 100%.
+- Users experience trouble with the new code, you can stop the timed incremental rollout by canceling the pipeline
+ and [rolling](../environments.md#retrying-and-rolling-back) back to the last stable version.
+
+![Pipelines example](img/pipeline_incremental_rollout.png)
+
+### Using the API
+
+GitLab provides API endpoints to:
+
+- Perform basic functions. For more information, see [Pipelines API](../../api/pipelines.md).
+- Maintain pipeline schedules. For more information, see [Pipeline schedules API](../../api/pipeline_schedules.md).
+- Trigger pipeline runs. For more information, see:
+ - [Triggering pipelines through the API](../triggers/README.md).
+ - [Pipeline triggers API](../../api/pipeline_triggers.md).
+
+### Start multiple manual actions in a stage
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/27188) in GitLab 11.11.
+
+Multiple manual actions in a single stage can be started at the same time using the "Play all manual" button.
+Once the user clicks this button, each individual manual action will be triggered and refreshed
+to an updated status.
+
+This functionality is only available:
+
+- For users with at least Developer access.
+- If the the stage contains [manual actions](#manual-actions-from-pipeline-graphs).
+
+### Deleting a single pipeline
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/24851) in GitLab 12.7.
+
+Users with [owner permissions](../../user/permissions.md) in a project can delete a pipeline
+by clicking on the pipeline in the **CI/CD > Pipelines** to get to the **Pipeline Details**
+page, then using the **Delete** button.
+
+![Pipeline Delete Button](img/pipeline-delete.png)
+
+CAUTION: **Warning:**
+Deleting a pipeline will expire all pipeline caches, and delete all related objects,
+such as builds, logs, artifacts, and triggers. **This action cannot be undone.**
+
+## Most Recent Pipeline
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/50499) in GitLab 12.3.
+
+There's a link to the latest pipeline for the last commit of a given branch at `/project/pipelines/[branch]/latest`. Also, `/project/pipelines/latest` will redirect you to the latest pipeline for the last commit on the project's default branch.
+
+## Security on protected branches
+
+A strict security model is enforced when pipelines are executed on
+[protected branches](../../user/project/protected_branches.md).
+
+The following actions are allowed on protected branches only if the user is
+[allowed to merge or push](../../user/project/protected_branches.md#using-the-allowed-to-merge-and-allowed-to-push-settings)
+on that specific branch:
+
+- Run manual pipelines (using the [Web UI](#manually-executing-pipelines) or pipelines API).
+- Run scheduled pipelines.
+- Run pipelines using triggers.
+- Trigger manual actions on existing pipelines.
+- Retry or cancel existing jobs (using the Web UI or pipelines API).
+
+**Variables** marked as **protected** are accessible only to jobs that
+run on protected branches, preventing untrusted users getting unintended access to
+sensitive information like deployment credentials and tokens.
+
+**Runners** marked as **protected** can run jobs only on protected
+branches, avoiding untrusted code to be executed on the protected runner and
+preserving deployment keys and other credentials from being unintentionally
+accessed. In order to ensure that jobs intended to be executed on protected
+runners will not use regular runners, they must be tagged accordingly.
+
+## Persistent pipeline refs
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/17043) in GitLab 12.4.
+
+Previously, you'd have encountered unexpected pipeline failures when you force-pushed
+a branch to its remote repository. To illustrate the problem, suppose you've had the current workflow:
+
+1. A user creates a feature branch named `example` and pushes it to a remote repository.
+1. A new pipeline starts running on the `example` branch.
+1. A user rebases the `example` branch on the latest `master` branch and force-pushes it to its remote repository.
+1. A new pipeline starts running on the `example` branch again, however,
+ the previous pipeline (2) fails because of `fatal: reference is not a tree:` error.
+
+This is because the previous pipeline cannot find a checkout-SHA (which associated with the pipeline record)
+from the `example` branch that the commit history has already been overwritten by the force-push.
+Similarly, [Pipelines for merged results](../merge_request_pipelines/pipelines_for_merged_results/index.md)
+might have failed intermittently due to [the same reason](../merge_request_pipelines/pipelines_for_merged_results/index.md#intermittently-pipelines-fail-by-fatal-reference-is-not-a-tree-error).
+
+As of GitLab 12.4, we've improved this behavior by persisting pipeline refs exclusively.
+To illustrate its life cycle:
+
+1. A pipeline is created on a feature branch named `example`.
+1. A persistent pipeline ref is created at `refs/pipelines/<pipeline-id>`,
+ which retains the checkout-SHA of the associated pipeline record.
+ This persistent ref stays intact during the pipeline execution,
+ even if the commit history of the `example` branch has been overwritten by force-push.
+1. GitLab Runner fetches the persistent pipeline ref and gets source code from the checkout-SHA.
+1. When the pipeline finished, its persistent ref is cleaned up in a background process.
+
+NOTE: **NOTE**: At this moment, this feature is on by default and can be manually disabled
+by disabling `depend_on_persistent_pipeline_ref` feature flag. If you're interested in
+manually disabling this behavior, please ask the administrator
+to execute the following commands in rails console.
+
+```shell
+> sudo gitlab-rails console # Login to Rails console of GitLab instance.
+> project = Project.find_by_full_path('namespace/project-name') # Get the project instance.
+> Feature.disable(:depend_on_persistent_pipeline_ref, project) # Disable the feature flag for specific project
+> Feature.disable(:depend_on_persistent_pipeline_ref) # Disable the feature flag system-wide
+```
diff --git a/doc/ci/pipelines/job_artifacts.md b/doc/ci/pipelines/job_artifacts.md
new file mode 100644
index 00000000000..4cc6c2aa098
--- /dev/null
+++ b/doc/ci/pipelines/job_artifacts.md
@@ -0,0 +1,215 @@
+---
+disqus_identifier: 'https://docs.gitlab.com/ee/user/project/pipelines/job_artifacts.html'
+type: reference, howto
+---
+
+# Introduction to job artifacts
+
+> - Introduced in GitLab 8.2 and GitLab Runner 0.7.0.
+> - Starting with GitLab 8.4 and GitLab Runner 1.0, the artifacts archive format changed to `ZIP`, and it is now possible to browse its contents, with the added ability of downloading the files separately.
+> - In GitLab 8.17, builds were renamed to jobs.
+> - The artifacts browser will be available only for new artifacts that are sent to GitLab using GitLab Runner version 1.0 and up. It will not be possible to browse old artifacts already uploaded to GitLab.
+
+Job artifacts are a list of files and directories created by a job
+once it finishes. This feature is [enabled by default](../../administration/job_artifacts.md) in all
+GitLab installations.
+
+Job artifacts created by GitLab Runner are uploaded to GitLab and are downloadable as a single archive using the GitLab UI or the [GitLab API](../../api/jobs.md#get-job-artifacts).
+
+<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
+For an overview, watch the video [GitLab CI Pipeline, Artifacts, and Environments](https://www.youtube.com/watch?v=PCKDICEe10s).
+Watch also [GitLab CI pipeline tutorial for beginners](https://www.youtube.com/watch?v=Jav4vbUrqII).
+
+## Defining artifacts in `.gitlab-ci.yml`
+
+A simple example of using the artifacts definition in `.gitlab-ci.yml` would be
+the following:
+
+```yaml
+pdf:
+ script: xelatex mycv.tex
+ artifacts:
+ paths:
+ - mycv.pdf
+ expire_in: 1 week
+```
+
+A job named `pdf` calls the `xelatex` command in order to build a pdf file from
+the latex source file `mycv.tex`. We then define the `artifacts` paths which in
+turn are defined with the `paths` keyword. All paths to files and directories
+are relative to the repository that was cloned during the build.
+
+The artifacts will be uploaded when the job succeeds by default, but can be set to upload
+when the job fails, or always, if the [`artifacts:when`](../yaml/README.md#artifactswhen)
+parameter is used. These uploaded artifacts will be kept in GitLab for 1 week as defined
+by the `expire_in` definition. You have the option to keep the artifacts from expiring
+via the [web interface](#browsing-artifacts). If the expiry time is not defined, it defaults
+to the [instance wide setting](../../user/admin_area/settings/continuous_integration.md#default-artifacts-expiration-core-only).
+
+For more examples on artifacts, follow the [artifacts reference in
+`.gitlab-ci.yml`](../yaml/README.md#artifacts).
+
+## Browsing artifacts
+
+> - From GitLab 9.2, PDFs, images, videos and other formats can be previewed directly in the job artifacts browser without the need to download them.
+> - Introduced in [GitLab 10.1][ce-14399], HTML files in a public project can be previewed directly in a new tab without the need to download them when [GitLab Pages](../../administration/pages/index.md) is enabled. The same applies for textual formats (currently supported extensions: `.txt`, `.json`, and `.log`).
+> - Introduced in [GitLab 12.4][gitlab-16675], artifacts in private projects can be previewed when [GitLab Pages access control](../../administration/pages/index.md#access-control) is enabled.
+
+After a job finishes, if you visit the job's specific page, there are three
+buttons. You can download the artifacts archive or browse its contents, whereas
+the **Keep** button appears only if you have set an [expiry date] to the
+artifacts in case you changed your mind and want to keep them.
+
+![Job artifacts browser button](img/job_artifacts_browser_button.png)
+
+The archive browser shows the name and the actual file size of each file in the
+archive. If your artifacts contained directories, then you are also able to
+browse inside them.
+
+Below you can see what browsing looks like. In this case we have browsed inside
+the archive and at this point there is one directory, a couple files, and
+one HTML file that you can view directly online when
+[GitLab Pages](../../administration/pages/index.md) is enabled (opens in a new tab).
+
+![Job artifacts browser](img/job_artifacts_browser.png)
+
+## Downloading artifacts
+
+If you need to download the whole archive, there are buttons in various places
+in the GitLab UI to do this:
+
+1. While on the pipelines page, you can see the download icon for each job's
+ artifacts archive in the right corner:
+
+ ![Job artifacts in Pipelines page](img/job_artifacts_pipelines_page.png)
+
+1. While on the **Jobs** page, you can see the download icon for each job's
+ artifacts archive in the right corner:
+
+ ![Job artifacts in Builds page](img/job_artifacts_builds_page.png)
+
+1. While inside a specific job, you are presented with a download button
+ along with the one that browses the archive:
+
+ ![Job artifacts browser button](img/job_artifacts_browser_button.png)
+
+1. And finally, when browsing an archive you can see the download button at
+ the top right corner:
+
+ ![Job artifacts browser](img/job_artifacts_browser.png)
+
+## Downloading the latest artifacts
+
+It is possible to download the latest artifacts of a job via a well known URL
+so you can use it for scripting purposes.
+
+NOTE: **Note:**
+The latest artifacts are created by jobs in the **most recent** successful pipeline
+for the specific ref. If you run two types of pipelines for the same ref, the latest
+artifact will be determined by timing. For example, if a branch pipeline created
+by merging a merge request runs at the same time as a scheduled pipeline, the
+latest artifact will be from the pipeline that completed most recently.
+
+Artifacts for other pipelines can be accessed with direct access to them.
+
+The structure of the URL to download the whole artifacts archive is the following:
+
+```plaintext
+https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/download?job=<job_name>
+```
+
+To download a single file from the artifacts use the following URL:
+
+```plaintext
+https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/raw/<path_to_file>?job=<job_name>
+```
+
+For example, to download the latest artifacts of the job named `coverage` of
+the `master` branch of the `gitlab` project that belongs to the `gitlab-org`
+namespace, the URL would be:
+
+```plaintext
+https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/download?job=coverage
+```
+
+To download the file `coverage/index.html` from the same
+artifacts use the following URL:
+
+```plaintext
+https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/raw/coverage/index.html?job=coverage
+```
+
+There is also a URL to browse the latest job artifacts:
+
+```plaintext
+https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/browse?job=<job_name>
+```
+
+For example:
+
+```plaintext
+https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/browse?job=coverage
+```
+
+There is also a URL to specific files, including html files that
+are shown in [GitLab Pages](../../administration/pages/index.md):
+
+```plaintext
+https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/file/<path>?job=<job_name>
+```
+
+For example, when a job `coverage` creates the artifact `htmlcov/index.html`,
+you can access it at:
+
+```plaintext
+https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/file/htmlcov/index.html?job=coverage
+```
+
+The latest builds are also exposed in the UI in various places. Specifically,
+look for the download button in:
+
+- The main project's page
+- The branches page
+- The tags page
+
+If the latest job has failed to upload the artifacts, you can see that
+information in the UI.
+
+![Latest artifacts button](img/job_latest_artifacts_browser.png)
+
+## Erasing artifacts
+
+DANGER: **Warning:**
+This is a destructive action that leads to data loss. Use with caution.
+
+You can erase a single job via the UI, which will also remove the job's
+artifacts and trace, if you are:
+
+- The owner of the job.
+- A [Maintainer](../../user/permissions.md#gitlab-cicd-permissions) of the project.
+
+To erase a job:
+
+1. Navigate to a job's page.
+1. Click the trash icon at the top right of the job's trace.
+1. Confirm the deletion.
+
+## Retrieve artifacts of private projects when using GitLab CI
+
+In order to retrieve a job artifact of a different project, you might need to use a private token in order to [authenticate and download](../../api/jobs.md#get-job-artifacts) the artifacts.
+
+[expiry date]: ../yaml/README.md#artifactsexpire_in
+[ce-14399]: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/14399
+[gitlab-16675]: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/16675
+
+<!-- ## Troubleshooting
+
+Include any troubleshooting steps that you can foresee. If you know beforehand what issues
+one might have when setting this up, or when something is changed, or on upgrading, it's
+important to describe those, too. Think of things that may go wrong and include them here.
+This is important to minimize requests for support, and to avoid doc comments with
+questions that you know someone might ask.
+
+Each scenario can be a third-level heading, e.g. `### Getting error message X`.
+If you have none to add when creating a doc, leave this section in place
+but commented out to help encourage others to add to it in the future. -->
diff --git a/doc/ci/pipelines/pipeline_architectures.md b/doc/ci/pipelines/pipeline_architectures.md
new file mode 100644
index 00000000000..803d0130cf0
--- /dev/null
+++ b/doc/ci/pipelines/pipeline_architectures.md
@@ -0,0 +1,269 @@
+---
+type: reference
+---
+
+# Pipeline Architecture
+
+Pipelines are the fundamental building blocks for CI/CD in GitLab. This page documents
+some of the important concepts related to them.
+
+There are three main ways to structure your pipelines, each with their
+own advantages. These methods can be mixed and matched if needed:
+
+- [Basic](#basic-pipelines): Good for straightforward projects where all the configuration is in one easy to find place.
+- [Directed Acylic Graph](#directed-acyclic-graph-pipelines): Good for large, complex projects that need efficient execution.
+- [Child/Parent Pipelines](#child--parent-pipelines): Good for monorepos and projects with lots of independently defined components.
+
+For more details about
+any of the keywords used below, check out our [CI YAML reference](../yaml/README.md) for details.
+
+## Basic Pipelines
+
+This is the simplest pipeline in GitLab. It will run everything in the build stage concurrently,
+and once all of those finish, it will run everything in the test stage the same way, and so on.
+It's not the most efficient, and if you have lots of steps it can grow quite complex, but it's
+easier to maintain:
+
+```mermaid
+graph LR
+ subgraph deploy stage
+ deploy --> deploy_a
+ deploy --> deploy_b
+ end
+ subgraph test stage
+ test --> test_a
+ test --> test_b
+ end
+ subgraph build stage
+ build --> build_a
+ build --> build_b
+ end
+ build_a -.-> test
+ build_b -.-> test
+ test_a -.-> deploy
+ test_b -.-> deploy
+```
+
+Example basic `/.gitlab-ci.yml` pipeline configuration matching the diagram:
+
+```yaml
+stages:
+ - build
+ - test
+ - deploy
+
+image: alpine
+
+build_a:
+ stage: build
+ script:
+ - echo "This job builds something."
+
+build_b:
+ stage: build
+ script:
+ - echo "This job builds something else."
+
+test_a:
+ stage: test
+ script:
+ - echo "This job tests something. It will only run when all jobs in the"
+ - echo "build stage are complete."
+
+test_b:
+ stage: test
+ script:
+ - echo "This job tests something else. It will only run when all jobs in the"
+ - echo "build stage are complete too. It will start at about the same time as test_a."
+
+deploy_a:
+ stage: deploy
+ script:
+ - echo "This job deploys something. It will only run when all jobs in the"
+ - echo "test stage complete."
+
+deploy_b:
+ stage: deploy
+ script:
+ - echo "This job deploys something else. It will only run when all jobs in the"
+ - echo "test stage complete. It will start at about the same time as deploy_a."
+```
+
+## Directed Acyclic Graph Pipelines
+
+If efficiency is important to you and you want everything to run as quickly as possible,
+you can use [Directed Acylic Graphs (DAG)](../directed_acyclic_graph/index.md). Use the
+[`needs` keyword](../yaml/README.md#needs) to define dependency relationships between
+your jobs. When GitLab knows the relationships between your jobs, it can run everything
+as fast as possible, and even skips into subsequent stages when possible.
+
+In the example below, if `build_a` and `test_a` are much faster than `build_b` and
+`test_b`, GitLab will start `deploy_a` even if `build_b` is still running.
+
+```mermaid
+graph LR
+ subgraph Pipeline using DAG
+ build_a --> test_a --> deploy_a
+ build_b --> test_b --> deploy_b
+ end
+```
+
+Example DAG `/.gitlab-ci.yml` configuration matching the diagram:
+
+```yaml
+stages:
+ - build
+ - test
+ - deploy
+
+image: alpine
+
+build_a:
+ stage: build
+ script:
+ - echo "This job builds something quickly."
+
+build_b:
+ stage: build
+ script:
+ - echo "This job builds something else slowly."
+
+test_a:
+ stage: test
+ needs: build_a
+ script:
+ - echo "This test job will start as soon as build_a finishes."
+ - echo "It will not wait for build_b, or other jobs in the build stage, to finish."
+
+test_b:
+ stage: test
+ needs: build_b
+ script:
+ - echo "This test job will start as soon as build_b finishes."
+ - echo "It will not wait for other jobs in the build stage to finish."
+
+deploy_a:
+ stage: deploy
+ needs: test_a
+ script:
+ - echo "Since build_a and test_a run quickly, this deploy job can run much earlier."
+ - echo "It does not need to wait for build_b or test_b."
+
+deploy_b:
+ stage: deploy
+ needs: test_b
+ script:
+ - echo "Since build_b and test_b run slowly, this deploy job will run much later."
+```
+
+## Child / Parent Pipelines
+
+In the examples above, it's clear we've got two types of things that could be built independently.
+This is an ideal case for using [Child / Parent Pipelines](../parent_child_pipelines.md)) via
+the [`trigger` keyword](../yaml/README.md#trigger). It will separate out the configuration
+into multiple files, keeping things very simple. You can also combine this with:
+
+- The [`rules` keyword](../yaml/README.md#rules): For example, have the child pipelines triggered only
+ when there are changes to that area.
+- The [`include` keyword](../yaml/README.md#include): Bring in common behaviors, ensuring
+ you are not repeating yourself.
+- [DAG pipelines](#directed-acyclic-graph-pipelines) inside of child pipelines, achieving the benefits of both.
+
+```mermaid
+graph LR
+ subgraph Parent pipeline
+ trigger_a -.-> build_a
+ trigger_b -.-> build_b
+ subgraph child pipeline B
+ build_b --> test_b --> deploy_b
+ end
+
+ subgraph child pipeline A
+ build_a --> test_a --> deploy_a
+ end
+ end
+```
+
+Example `/.gitlab-ci.yml` configuration for the parent pipeline matching the diagram:
+
+```yaml
+stages:
+ - triggers
+
+trigger_a:
+ stage: triggers
+ trigger:
+ include: a/.gitlab-ci.yml
+ rules:
+ - changes:
+ - a/*
+
+trigger_b:
+ stage: triggers
+ trigger:
+ include: b/.gitlab-ci.yml
+ rules:
+ - changes:
+ - b/*
+```
+
+Example child `a` pipeline configuration, located in `/a/.gitlab-ci.yml`, making
+use of the DAG `needs:` keyword:
+
+```yaml
+stages:
+ - build
+ - test
+ - deploy
+
+image: alpine
+
+build_a:
+ stage: build
+ script:
+ - echo "This job builds something."
+
+test_a:
+ stage: test
+ needs: build_a
+ script:
+ - echo "This job tests something."
+
+deploy_a:
+ stage: deploy
+ needs: test_a
+ script:
+ - echo "This job deploys something."
+```
+
+Example child `b` pipeline configuration, located in `/b/.gitlab-ci.yml`, making
+use of the DAG `needs:` keyword:
+
+```yaml
+stages:
+ - build
+ - test
+ - deploy
+
+image: alpine
+
+build_b:
+ stage: build
+ script:
+ - echo "This job builds something else."
+
+test_b:
+ stage: test
+ needs: build_b
+ script:
+ - echo "This job tests something else."
+
+deploy_b:
+ stage: deploy
+ needs: test_b
+ script:
+ - echo "This job deploys something else."
+```
+
+It's also possible to set jobs to run before or after triggering child pipelines,
+for example if you have common setup steps or a unified deployment at the end.
diff --git a/doc/ci/pipelines/schedules.md b/doc/ci/pipelines/schedules.md
new file mode 100644
index 00000000000..b9a2972dc89
--- /dev/null
+++ b/doc/ci/pipelines/schedules.md
@@ -0,0 +1,142 @@
+---
+disqus_identifier: 'https://docs.gitlab.com/ee/user/project/pipelines/schedules.html'
+type: reference, howto
+---
+
+# Pipeline schedules
+
+> - Introduced in GitLab 9.1 as [Trigger Schedule](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/10533).
+> - [Renamed to Pipeline Schedule](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/10853) in GitLab 9.2.
+
+NOTE: **Note:**
+Cron notation is parsed by [Fugit](https://github.com/floraison/fugit).
+
+Pipelines are normally run based on certain conditions being met. For example, when a branch is pushed to repository.
+
+Pipeline schedules can be used to also run [pipelines](index.md) at specific intervals. For example:
+
+- Every month on the 22nd for a certain branch.
+- Once every day.
+
+In addition to using the GitLab UI, pipeline schedules can be maintained using the
+[Pipeline schedules API](../../api/pipeline_schedules.md).
+
+## Configuring pipeline schedules
+
+To schedule a pipeline for project:
+
+1. Navigate to the project's **CI / CD > Schedules** page.
+1. Click the **New schedule** button.
+1. Fill in the **Schedule a new pipeline** form.
+1. Click the **Save pipeline schedule** button.
+
+![New Schedule Form](img/pipeline_schedules_new_form.png)
+
+NOTE: **Note:**
+Pipelines execution [timing is dependent](#advanced-configuration) on Sidekiq's own schedule.
+
+In the **Schedules** index page you can see a list of the pipelines that are
+scheduled to run. The next run is automatically calculated by the server GitLab
+is installed on.
+
+![Schedules list](img/pipeline_schedules_list.png)
+
+### Using variables
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/12328) in GitLab 9.4.
+
+You can pass any number of arbitrary variables and they will be available in
+GitLab CI so that they can be used in your [`.gitlab-ci.yml` file](../../ci/yaml/README.md).
+
+![Scheduled pipeline variables](img/pipeline_schedule_variables.png)
+
+### Using only and except
+
+To configure that a job can be executed only when the pipeline has been
+scheduled (or the opposite), you can use
+[only and except](../yaml/README.md#onlyexcept-basic) configuration keywords.
+
+For example:
+
+```yaml
+job:on-schedule:
+ only:
+ - schedules
+ script:
+ - make world
+
+job:
+ except:
+ - schedules
+ script:
+ - make build
+```
+
+### Advanced configuration
+
+The pipelines won't be executed exactly on schedule because schedules are handled by
+Sidekiq, which runs according to its interval.
+
+For example, only two pipelines will be created per day if:
+
+- You set a schedule to create a pipeline every minute (`* * * * *`).
+- The Sidekiq worker runs on 00:00 and 12:00 every day (`0 */12 * * *`).
+
+To change the Sidekiq worker's frequency:
+
+1. Edit the `gitlab_rails['pipeline_schedule_worker_cron']` value in your instance's `gitlab.rb` file.
+1. [Reconfigure GitLab](../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
+
+For GitLab.com, refer to the [dedicated settings page](../../user/gitlab_com/index.md#gitlab-cicd).
+
+## Working with scheduled pipelines
+
+Once configured, GitLab supports many functions for working with scheduled pipelines.
+
+### Running manually
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/15700) in GitLab 10.4.
+
+To trigger a pipeline schedule manually, click the "Play" button:
+
+![Play Pipeline Schedule](img/pipeline_schedule_play.png)
+
+This will schedule a background job to run the pipeline schedule. A flash
+message will provide a link to the CI/CD Pipeline index page.
+
+NOTE: **Note:**
+To help avoid abuse, users are rate limited to triggering a pipeline once per
+minute.
+
+### Taking ownership
+
+Pipelines are executed as a user, who owns a schedule. This influences what projects and other resources the pipeline has access to.
+
+If a user does not own a pipeline, you can take ownership by clicking the **Take ownership** button.
+The next time a pipeline is scheduled, your credentials will be used.
+
+![Schedules list](img/pipeline_schedules_ownership.png)
+
+If the owner of a pipeline schedule doesn't have the ability to create
+pipelines on the target branch, the schedule will stop creating new
+pipelines.
+
+This can happen if, for example:
+
+- The owner is blocked or removed from the project.
+- The target branch or tag is protected.
+
+In this case, someone with sufficient privileges must take ownership of the
+schedule.
+
+<!-- ## Troubleshooting
+
+Include any troubleshooting steps that you can foresee. If you know beforehand what issues
+one might have when setting this up, or when something is changed, or on upgrading, it's
+important to describe those, too. Think of things that may go wrong and include them here.
+This is important to minimize requests for support, and to avoid doc comments with
+questions that you know someone might ask.
+
+Each scenario can be a third-level heading, e.g. `### Getting error message X`.
+If you have none to add when creating a doc, leave this section in place
+but commented out to help encourage others to add to it in the future. -->
diff --git a/doc/ci/pipelines/settings.md b/doc/ci/pipelines/settings.md
new file mode 100644
index 00000000000..13b8f4ee307
--- /dev/null
+++ b/doc/ci/pipelines/settings.md
@@ -0,0 +1,309 @@
+---
+disqus_identifier: 'https://docs.gitlab.com/ee/user/project/pipelines/settings.html'
+type: reference, howto
+---
+
+# Pipelines settings
+
+To reach the pipelines settings navigate to your project's
+**Settings > CI/CD**.
+
+The following settings can be configured per project.
+
+<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
+For an overview, watch the video [GitLab CI Pipeline, Artifacts, and Environments](https://www.youtube.com/watch?v=PCKDICEe10s).
+Watch also [GitLab CI pipeline tutorial for beginners](https://www.youtube.com/watch?v=Jav4vbUrqII).
+
+## Git strategy
+
+With Git strategy, you can choose the default way your repository is fetched
+from GitLab in a job.
+
+There are two options. Using:
+
+- `git clone`, which is slower since it clones the repository from scratch
+ for every job, ensuring that the local working copy is always pristine.
+- `git fetch`, which is faster as it re-uses the local working copy (falling
+ back to clone if it doesn't exist).
+
+The default Git strategy can be overridden by the [GIT_STRATEGY variable](../yaml/README.md#git-strategy)
+in `.gitlab-ci.yml`.
+
+## Git shallow clone
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/28919) in GitLab 12.0.
+
+NOTE: **Note**:
+As of GitLab 12.0, newly created projects will automatically have a default
+`git depth` value of `50`.
+
+It is possible to limit the number of changes that GitLab CI/CD will fetch when cloning
+a repository. Setting a limit to `git depth` can speed up Pipelines execution. Maximum
+allowed value is `1000`.
+
+To disable shallow clone and make GitLab CI/CD fetch all branches and tags each time,
+keep the value empty or set to `0`.
+
+This value can also be [overridden by `GIT_DEPTH`](../large_repositories/index.md#shallow-cloning) variable in `.gitlab-ci.yml` file.
+
+## Timeout
+
+Timeout defines the maximum amount of time in minutes that a job is able run.
+This is configurable under your project's **Settings > CI/CD > General pipelines settings**.
+The default value is 60 minutes. Decrease the time limit if you want to impose
+a hard limit on your jobs' running time or increase it otherwise. In any case,
+if the job surpasses the threshold, it is marked as failed.
+
+### Timeout overriding on Runner level
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17221) in GitLab 10.7.
+
+Project defined timeout (either specific timeout set by user or the default
+60 minutes timeout) may be [overridden on Runner level](../runners/README.md#setting-maximum-job-timeout-for-a-runner).
+
+## Maximum artifacts size **(CORE ONLY)**
+
+For information about setting a maximum artifact size for a project, see
+[Maximum artifacts size](../../user/admin_area/settings/continuous_integration.md#maximum-artifacts-size-core-only).
+
+## Custom CI configuration path
+
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/12509) in GitLab 9.4.
+> - [Support for external `.gitlab-ci.yml` locations](https://gitlab.com/gitlab-org/gitlab/issues/14376) introduced in GitLab 12.6.
+
+By default we look for the `.gitlab-ci.yml` file in the project's root
+directory. If needed, you can specify an alternate path and file name, including locations outside the project.
+
+To customize the path:
+
+1. Go to the project's **Settings > CI / CD**.
+1. Expand the **General pipelines** section.
+1. Provide a value in the **Custom CI configuration path** field.
+1. Click **Save changes**.
+
+If the CI configuration is stored within the repository in a non-default
+location, the path must be relative to the root directory. Examples of valid
+paths and file names include:
+
+- `.gitlab-ci.yml` (default)
+- `.my-custom-file.yml`
+- `my/path/.gitlab-ci.yml`
+- `my/path/.my-custom-file.yml`
+
+If the CI configuration will be hosted on an external site, the URL link must end with `.yml`:
+
+- `http://example.com/generate/ci/config.yml`
+
+If the CI configuration will be hosted in a different project within GitLab, the path must be relative
+to the root directory in the other project, with the group and project name added to the end:
+
+- `.gitlab-ci.yml@mygroup/another-project`
+- `my/path/.my-custom-file.yml@mygroup/another-project`
+
+Hosting the configuration file in a separate project allows stricter control of the
+configuration file. For example:
+
+- Create a public project to host the configuration file.
+- Give write permissions on the project only to users who are allowed to edit the file.
+
+Other users and projects will be able to access the configuration file without being
+able to edit it.
+
+## Test coverage parsing
+
+If you use test coverage in your code, GitLab can capture its output in the
+job log using a regular expression. In the pipelines settings, search for the
+"Test coverage parsing" section.
+
+![Pipelines settings test coverage](img/pipelines_settings_test_coverage.png)
+
+Leave blank if you want to disable it or enter a ruby regular expression. You
+can use <https://rubular.com> to test your regex.
+
+If the pipeline succeeds, the coverage is shown in the merge request widget and
+in the jobs table.
+
+![MR widget coverage](img/pipelines_test_coverage_mr_widget.png)
+
+![Build status coverage](img/pipelines_test_coverage_build.png)
+
+A few examples of known coverage tools for a variety of languages can be found
+in the pipelines settings page.
+
+### Removing color codes
+
+Some test coverage tools output with ANSI color codes that won't be
+parsed correctly by the regular expression and will cause coverage
+parsing to fail.
+
+If your coverage tool doesn't provide an option to disable color
+codes in the output, you can pipe the output of the coverage tool through a
+small one line script that will strip the color codes off.
+
+For example:
+
+```shell
+lein cloverage | perl -pe 's/\e\[?.*?[\@-~]//g'
+```
+
+## Visibility of pipelines
+
+Pipeline visibility is determined by:
+
+- Your current [user access level](../../user/permissions.md).
+- The **Public pipelines** project setting under your project's **Settings > CI/CD > General pipelines**.
+
+NOTE: **Note:**
+If the project visibility is set to **Private**, the [**Public pipelines** setting will have no effect](../enable_or_disable_ci.md#per-project-user-setting).
+
+This also determines the visibility of these related features:
+
+- Job output logs
+- Job artifacts
+- The [pipeline security dashboard](../../user/application_security/security_dashboard/index.md#pipeline-security-dashboard) **(ULTIMATE)**
+
+If **Public pipelines** is enabled (default):
+
+- For **public** projects, anyone can view the pipelines and related features.
+- For **internal** projects, any logged in user can view the pipelines
+ and related features.
+- For **private** projects, any project member (guest or higher) can view the pipelines
+ and related features.
+
+If **Public pipelines** is disabled:
+
+- For **public** projects, anyone can view the pipelines, but only members
+ (reporter or higher) can access the related features.
+- For **internal** projects, any logged in user can view the pipelines.
+ However, only members (reporter or higher) can access the job related features.
+- For **private** projects, only project members (reporter or higher)
+ can view the pipelines or access the related features.
+
+## Auto-cancel pending pipelines
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9362) in GitLab 9.1.
+
+If you want all pending non-HEAD pipelines on branches to auto-cancel each time
+a new pipeline is created, such as after a Git push or manually from the UI,
+you can enable this in the project settings:
+
+1. Go to **{settings}** **Settings > CI / CD**.
+1. Expand **General Pipelines**.
+1. Check the **Auto-cancel redundant, pending pipelines** checkbox.
+1. Click **Save changes**.
+
+## Skip older, pending deployment jobs
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/25276) in GitLab 12.9.
+
+Your project may have multiple concurrent deployment jobs that are
+scheduled to run within the same time frame.
+
+This can lead to a situation where an older deployment job runs after a
+newer one, which may not be what you want.
+
+To avoid this scenario:
+
+1. Go to **{settings}** **Settings > CI / CD**.
+1. Expand **General pipelines**.
+1. Check the **Skip older, pending deployment jobs** checkbox.
+1. Click **Save changes**.
+
+The pending deployment jobs will be skipped.
+
+## Pipeline Badges
+
+In the pipelines settings page you can find pipeline status and test coverage
+badges for your project. The latest successful pipeline will be used to read
+the pipeline status and test coverage values.
+
+Visit the pipelines settings page in your project to see the exact link to
+your badges, as well as ways to embed the badge image in your HTML or Markdown
+pages.
+
+![Pipelines badges](img/pipelines_settings_badges.png)
+
+### Pipeline status badge
+
+Depending on the status of your job, a badge can have the following values:
+
+- pending
+- running
+- passed
+- failed
+- skipped
+- canceled
+- unknown
+
+You can access a pipeline status badge image using the following link:
+
+```text
+https://example.gitlab.com/<namespace>/<project>/badges/<branch>/pipeline.svg
+```
+
+### Test coverage report badge
+
+GitLab makes it possible to define the regular expression for [coverage report](#test-coverage-parsing),
+that each job log will be matched against. This means that each job in the
+pipeline can have the test coverage percentage value defined.
+
+The test coverage badge can be accessed using following link:
+
+```text
+https://example.gitlab.com/<namespace>/<project>/badges/<branch>/coverage.svg
+```
+
+If you would like to get the coverage report from a specific job, you can add
+the `job=coverage_job_name` parameter to the URL. For example, the following
+Markdown code will embed the test coverage report badge of the `coverage` job
+into your `README.md`:
+
+```markdown
+![coverage](https://gitlab.com/gitlab-org/gitlab-foss/badges/master/coverage.svg?job=coverage)
+```
+
+### Badge styles
+
+Pipeline badges can be rendered in different styles by adding the `style=style_name` parameter to the URL. Currently two styles are available:
+
+#### Flat (default)
+
+```text
+https://example.gitlab.com/<namespace>/<project>/badges/<branch>/coverage.svg?style=flat
+```
+
+![Badge flat style](https://gitlab.com/gitlab-org/gitlab-foss/badges/master/coverage.svg?job=coverage&style=flat)
+
+#### Flat square
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/30120) in GitLab 11.8.
+
+```text
+https://example.gitlab.com/<namespace>/<project>/badges/<branch>/coverage.svg?style=flat-square
+```
+
+![Badge flat square style](https://gitlab.com/gitlab-org/gitlab-foss/badges/master/coverage.svg?job=coverage&style=flat-square)
+
+## Environment Variables
+
+[Environment variables](../variables/README.md#gitlab-cicd-environment-variables) can be set in an environment to be available to a runner.
+
+## Deploy Keys
+
+With Deploy Keys, GitLab allows you to import SSH public keys. You can then have
+read only or read/write access to your project from the machines the keys were generated from.
+
+SSH keys added to your project settings will be used for continuous integration,
+staging, or production servers.
+
+<!-- ## Troubleshooting
+
+Include any troubleshooting steps that you can foresee. If you know beforehand what issues
+one might have when setting this up, or when something is changed, or on upgrading, it's
+important to describe those, too. Think of things that may go wrong and include them here.
+This is important to minimize requests for support, and to avoid doc comments with
+questions that you know someone might ask.
+
+Each scenario can be a third-level heading, e.g. `### Getting error message X`.
+If you have none to add when creating a doc, leave this section in place
+but commented out to help encourage others to add to it in the future. -->
diff --git a/doc/ci/quick_start/README.md b/doc/ci/quick_start/README.md
index 6de3eaf8831..0ea37e26224 100644
--- a/doc/ci/quick_start/README.md
+++ b/doc/ci/quick_start/README.md
@@ -17,8 +17,13 @@ NOTE: **Note:**
Coming over to GitLab from Jenkins? Check out our [reference](../jenkins/index.md)
for converting your pre-existing pipelines over to our format.
+NOTE: **Note:**
+There are a few different [basic pipeline architectures](../pipelines/pipeline_architectures.md)
+that you can consider for use in your project. You may want to familiarize
+yourself with these prior to getting started.
+
GitLab offers a [continuous integration](https://about.gitlab.com/stages-devops-lifecycle/continuous-integration/) service. For each commit or push to trigger your CI
-[pipeline](../pipelines.md), you must:
+[pipeline](../pipelines/index.md), you must:
- Add a [`.gitlab-ci.yml` file](#creating-a-gitlab-ciyml-file) to your repository's root directory.
- Ensure your project is configured to use a [Runner](#configuring-a-runner).
diff --git a/doc/ci/review_apps/img/enable_review_app_v12_8.png b/doc/ci/review_apps/img/enable_review_app_v12_8.png
index 364fe402787..264e4834e72 100644
--- a/doc/ci/review_apps/img/enable_review_app_v12_8.png
+++ b/doc/ci/review_apps/img/enable_review_app_v12_8.png
Binary files differ
diff --git a/doc/ci/review_apps/index.md b/doc/ci/review_apps/index.md
index bdef2eca1f2..860eab469dd 100644
--- a/doc/ci/review_apps/index.md
+++ b/doc/ci/review_apps/index.md
@@ -188,6 +188,9 @@ With Visual Reviews, you can provide a feedback form to your Review Apps so
that reviewers can post comments directly from the app back to the merge request
that spawned the Review App.
+NOTE: **Note:** Visual Reviews currently only work for public projects. Support for private
+and internal projects [is planned](https://gitlab.com/gitlab-org/gitlab/-/issues/42750).
+
### Configuring Visual Reviews
Ensure that the `anonymous_visual_review_feedback` feature flag is enabled.
@@ -200,7 +203,8 @@ Feature.enable(:anonymous_visual_review_feedback)
The feedback form is served through a script you add to pages in your Review App.
If you have [Developer permissions](../../user/permissions.md) to the project,
you can access it by clicking the **Review** button in the **Pipeline** section
-of the merge request.
+of the merge request. The form modal will also show a dropdown for changed pages
+if [route maps](#route-maps) are configured in the project.
![review button](img/review_button.png)
diff --git a/doc/ci/runners/README.md b/doc/ci/runners/README.md
index 1e6b8bcc4a7..7e1aae4709a 100644
--- a/doc/ci/runners/README.md
+++ b/doc/ci/runners/README.md
@@ -195,11 +195,11 @@ We have following jobs in queue:
With the fair usage algorithm jobs are assigned in following order:
-1. Job 1 is chosen first, because it has the lowest job number from projects with no running jobs (i.e. all projects)
+1. Job 1 is chosen first, because it has the lowest job number from projects with no running jobs (that is, all projects)
1. Job 4 is next, because 4 is now the lowest job number from projects with no running jobs (Project 1 has a job running)
1. Job 6 is next, because 6 is now the lowest job number from projects with no running jobs (Projects 1 and 2 have jobs running)
1. Job 2 is next, because, of projects with the lowest number of jobs running (each has 1), it is the lowest job number
-1. Job 5 is next, because Project 1 now has 2 jobs running, and between Projects 2 and 3, Job 5 is the lowest remaining job number
+1. Job 5 is next, because Project 1 now has 2 jobs running and Job 5 is the lowest remaining job number between Projects 2 and 3.
1. Lastly we choose Job 3... because it's the only job left
---
@@ -217,7 +217,7 @@ We have following jobs in queue:
With the fair usage algorithm jobs are assigned in following order:
-1. Job 1 is chosen first, because it has the lowest job number from projects with no running jobs (i.e. all projects)
+1. Job 1 is chosen first, because it has the lowest job number from projects with no running jobs (that is, all projects)
1. We finish job 1
1. Job 2 is next, because, having finished Job 1, all projects have 0 jobs running again, and 2 is the lowest available job number
1. Job 4 is next, because with Project 1 running a job, 4 is the lowest number from projects running no jobs (Projects 2 and 3)
@@ -307,7 +307,7 @@ Example 2:
For each Runner you can specify a _maximum job timeout_. Such timeout,
if smaller than [project defined timeout], will take the precedence. This
feature can be used to prevent Shared Runner from being appropriated
-by a project by setting a ridiculous big timeout (e.g. one week).
+by a project by setting a ridiculous big timeout (for example, one week).
When not configured, Runner will not override project timeout.
@@ -337,8 +337,8 @@ How this feature will work:
### Be careful with sensitive information
With some [Runner Executors](https://docs.gitlab.com/runner/executors/README.html),
-if you can run a job on the Runner, you can get access to any code it runs
-and get the token of the Runner. With shared Runners, this means that anyone
+if you can run a job on the Runner, you can get full access to the file system,
+and thus any code it runs as well as the token of the Runner. With shared Runners, this means that anyone
that runs jobs on the Runner, can access anyone else's code that runs on the
Runner.
@@ -422,4 +422,4 @@ You can find the IP address of a Runner for a specific project by:
[register]: https://docs.gitlab.com/runner/register/
[protected branches]: ../../user/project/protected_branches.md
[protected tags]: ../../user/project/protected_tags.md
-[project defined timeout]: ../../user/project/pipelines/settings.html#timeout
+[project defined timeout]: ../pipelines/settings.md#timeout
diff --git a/doc/ci/services/postgres.md b/doc/ci/services/postgres.md
index a137f10949f..4cf3e429c63 100644
--- a/doc/ci/services/postgres.md
+++ b/doc/ci/services/postgres.md
@@ -17,27 +17,28 @@ First, in your `.gitlab-ci.yml` add:
```yaml
services:
- - postgres:latest
+ - postgres:12.2-alpine
variables:
POSTGRES_DB: nice_marmot
POSTGRES_USER: runner
POSTGRES_PASSWORD: ""
+ POSTGRES_HOST_AUTH_METHOD: trust
```
NOTE: **Note:**
-The `POSTGRES_DB`, `POSTGRES_USER`, and `POSTGRES_PASSWORD` variables can't be set in
-the GitLab UI. To set them, assign them to a variable
+The `POSTGRES_DB`, `POSTGRES_USER`, `POSTGRES_PASSWORD` and `POSTGRES_HOST_AUTH_METHOD`
+variables can't be set in the GitLab UI. To set them, assign them to a variable
[in the UI](../variables/README.md#via-the-ui), and then assign that
-variable to the `POSTGRES_DB`, `POSTGRES_USER`, and `POSTGRES_PASSWORD` variables in
-your `.gitlab-ci.yml`.
+variable to the `POSTGRES_DB`, `POSTGRES_USER`, `POSTGRES_PASSWORD` and `POSTGRES_HOST_AUTH_METHOD`
+variables in your `.gitlab-ci.yml`.
And then configure your application to use the database, for example:
```yaml
Host: postgres
User: runner
-Password:
+Password: ''
Database: nice_marmot
```
@@ -118,7 +119,7 @@ We have set up an [Example PostgreSQL Project][postgres-example-repo] for your
convenience that runs on [GitLab.com](https://gitlab.com) using our publicly
available [shared runners](../runners/README.md).
-Want to hack on it? Simply fork it, commit and push your changes. Within a few
+Want to hack on it? Simply fork it, commit and push your changes. Within a few
moments the changes will be picked by a public runner and the job will begin.
[hub-pg]: https://hub.docker.com/_/postgres
diff --git a/doc/ci/services/redis.md b/doc/ci/services/redis.md
index b04b36f7a04..f22ee87a9d3 100644
--- a/doc/ci/services/redis.md
+++ b/doc/ci/services/redis.md
@@ -66,5 +66,5 @@ We have set up an [Example Redis Project](https://gitlab.com/gitlab-examples/red
that runs on [GitLab.com](https://gitlab.com) using our publicly available
[shared runners](../runners/README.md).
-Want to hack on it? Simply fork it, commit and push your changes. Within a few
+Want to hack on it? Simply fork it, commit and push your changes. Within a few
moments the changes will be picked by a public runner and the job will begin.
diff --git a/doc/ci/ssh_keys/README.md b/doc/ci/ssh_keys/README.md
index 804970e08f2..b3a97d34575 100644
--- a/doc/ci/ssh_keys/README.md
+++ b/doc/ci/ssh_keys/README.md
@@ -37,7 +37,7 @@ with any type of [executor](https://docs.gitlab.com/runner/executors/)
NOTE: **Note:**
The private key will not be displayed in the job log, unless you enable
[debug logging](../variables/README.md#debug-logging). You might also want to
-check the [visibility of your pipelines](../../user/project/pipelines/settings.md#visibility-of-pipelines).
+check the [visibility of your pipelines](../pipelines/settings.md#visibility-of-pipelines).
## SSH keys when using the Docker executor
diff --git a/doc/ci/triggers/README.md b/doc/ci/triggers/README.md
index aa03add0ac2..00070594ded 100644
--- a/doc/ci/triggers/README.md
+++ b/doc/ci/triggers/README.md
@@ -123,7 +123,7 @@ settings page which provides self-explanatory examples.
When a rerun of a pipeline is triggered, the information is exposed in GitLab's
UI under the **Jobs** page and the jobs are marked as triggered 'by API'.
-![Marked rebuilds as on jobs page](img/builds_page.png)
+![Marked rebuilds as on jobs page](img/builds_page.png)
---
@@ -167,7 +167,7 @@ build_docs:
- tags
```
-Now, whenever a new tag is pushed on project A, the job will run and the
+This means that whenever a new tag is pushed on project A, the job will run and the
`build_docs` job will be executed, triggering a rebuild of project B. The
`stage: deploy` ensures that this job will run only after all jobs with
`stage: test` complete successfully.
@@ -256,7 +256,7 @@ of all types of variables.
>**Note:**
The following behavior can also be achieved through GitLab's UI with
-[pipeline schedules](../../user/project/pipelines/schedules.md).
+[pipeline schedules](../pipelines/schedules.md).
Whether you craft a script or just run cURL directly, you can trigger jobs
in conjunction with cron. The example below triggers a job on the `master`
diff --git a/doc/ci/variables/README.md b/doc/ci/variables/README.md
index 643ccd45898..5a6d30a1079 100644
--- a/doc/ci/variables/README.md
+++ b/doc/ci/variables/README.md
@@ -169,7 +169,7 @@ You can either set the variable directly in the `.gitlab-ci.yml`
file or through the UI.
NOTE: **Note:**
-It is possible to [specify variables when running manual jobs](../pipelines.md#specifying-variables-when-running-manual-jobs).
+It is possible to [specify variables when running manual jobs](../pipelines/index.md#specifying-variables-when-running-manual-jobs).
#### Via `.gitlab-ci.yml`
@@ -185,14 +185,19 @@ For a deeper look into them, see [`.gitlab-ci.yml` defined variables](#gitlab-ci
#### Via the UI
-From the UI, navigate to your project's **Settings > CI/CD** and
-expand **Variables**. Create a new variable by choosing its **type**, naming
-it in the field **Input variable key**, and defining its value in the
-**Input variable value** field:
+From within the UI, you can add or update custom environment variables:
-![CI/CD settings - new variable](img/new_custom_variables_example.png)
+1. Go to your project's **Settings > CI/CD** and expand the **Variables** section.
+1. Click the **Add variable** button. In the **Add variable** modal, fill in the details:
-You'll also see the option to mask and/or protect your variables.
+ - **Key**: Must be one line, with no spaces, using only letters, numbers, `-` or `_`.
+ - **Value**: No limitations.
+ - **Type**: `File` or `Variable`.
+ - **Environment scope**: `All`, or specific environments.
+ - **Protect variable** (Optional): If selected, the variable will only be available in pipelines that run on protected branches or tags.
+ - **Mask variable** (Optional): If selected, the variable's **Value** will be masked in job logs. The variable will fail to save if the value does not meet the [masking requirements](#masked-variables).
+
+After a variable is created, you can update any of the details by clicking on the **{pencil}** **Edit** button.
Once you've set the variables, call them from the `.gitlab-ci.yml` file:
@@ -377,7 +382,7 @@ variables, depending on where they are defined.
The order of precedence for variables is (from highest to lowest):
-1. [Trigger variables](../triggers/README.md#making-use-of-trigger-variables) or [scheduled pipeline variables](../../user/project/pipelines/schedules.md#using-variables).
+1. [Trigger variables](../triggers/README.md#making-use-of-trigger-variables) or [scheduled pipeline variables](../pipelines/schedules.md#using-variables).
1. Project-level [variables](#creating-a-custom-environment-variable) or [protected variables](#protected-environment-variables).
1. Group-level [variables](#group-level-environment-variables) or [protected variables](#protected-environment-variables).
1. YAML-defined [job-level variables](../yaml/README.md#variables).
@@ -460,7 +465,7 @@ limitations with the current Auto DevOps scripting environment.
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/44059) in GitLab 10.8.
-[Manually triggered pipelines](../pipelines.md#manually-executing-pipelines) allow you to override the value of a current variable.
+[Manually triggered pipelines](../pipelines/index.md#manually-executing-pipelines) allow you to override the value of a current variable.
For instance, suppose you added a
[custom variable `$TEST`](#creating-a-custom-environment-variable)
@@ -571,9 +576,12 @@ Below you can find supported syntax reference:
- `$VARIABLE =~ /^content.*/`
- `$VARIABLE_1 !~ /^content.*/` (introduced in GitLab 11.11)
- It is possible perform pattern matching against a variable and regular
- expression. Expression like this evaluates to truth if matches are found
- when using `=~`. It evaluates to truth if matches are not found when `!~` is used.
+ Variable pattern matching with regular expressions uses the
+ [RE2 regular expression syntax](https://github.com/google/re2/wiki/Syntax).
+ Expressions evaluate as `true` if:
+
+ - Matches are found when using `=~`.
+ - Matches are *not* found when using `!~`.
Pattern matching is case-sensitive by default. Use `i` flag modifier, like
`/pattern/i` to make a pattern case-insensitive.
@@ -613,7 +621,7 @@ variables that were set, etc.
Before enabling this, you should ensure jobs are visible to
[team members only](../../user/permissions.md#project-features). You should
-also [erase](../pipelines.md#accessing-individual-jobs) all generated job logs
+also [erase](../pipelines/index.md#accessing-individual-jobs) all generated job logs
before making them visible again.
To enable debug logs (traces), set the `CI_DEBUG_TRACE` variable to `true`:
diff --git a/doc/ci/variables/img/new_custom_variables_example.png b/doc/ci/variables/img/new_custom_variables_example.png
deleted file mode 100644
index bb60e6bab21..00000000000
--- a/doc/ci/variables/img/new_custom_variables_example.png
+++ /dev/null
Binary files differ
diff --git a/doc/ci/variables/predefined_variables.md b/doc/ci/variables/predefined_variables.md
index dd15b8c37b1..a340f8b705d 100644
--- a/doc/ci/variables/predefined_variables.md
+++ b/doc/ci/variables/predefined_variables.md
@@ -33,7 +33,7 @@ future GitLab releases.**
| `CI_COMMIT_DESCRIPTION` | 10.8 | all | The description of the commit: the message without first line, if the title is shorter than 100 characters; full message in other case. |
| `CI_COMMIT_MESSAGE` | 10.8 | all | The full commit message. |
| `CI_COMMIT_REF_NAME` | 9.0 | all | The branch or tag name for which project is built |
-| `CI_COMMIT_REF_PROTECTED` | 11.11 | all | If the job is running on a protected branch |
+| `CI_COMMIT_REF_PROTECTED` | 11.11 | all | `true` if the job is running on a protected reference, `false` if not |
| `CI_COMMIT_REF_SLUG` | 9.0 | all | `$CI_COMMIT_REF_NAME` lowercased, shortened to 63 bytes, and with everything except `0-9` and `a-z` replaced with `-`. No leading / trailing `-`. Use in URLs, host names and domain names. |
| `CI_COMMIT_SHA` | 9.0 | all | The commit revision for which project is built |
| `CI_COMMIT_SHORT_SHA` | 11.7 | all | The first eight characters of `CI_COMMIT_SHA` |
@@ -57,12 +57,15 @@ future GitLab releases.**
| `CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_NAME` | 12.3 | all | The target branch name of the pull request if [the pipelines are for external pull requests](../ci_cd_for_external_repos/index.md#pipelines-for-external-pull-requests). Available only if `only: [external_pull_requests]` is used and the pull request is open. |
| `CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_SHA` | 12.3 | all | The HEAD SHA of the target branch of the pull request if [the pipelines are for external pull requests](../ci_cd_for_external_repos/index.md#pipelines-for-external-pull-requests). Available only if `only: [external_pull_requests]` is used and the pull request is open. |
| `CI_JOB_ID` | 9.0 | all | The unique id of the current job that GitLab CI uses internally |
+| `CI_JOB_IMAGE` | 12.9 | 12.9 | The name of the image running the CI job |
| `CI_JOB_MANUAL` | 8.12 | all | The flag to indicate that job was manually started |
| `CI_JOB_NAME` | 9.0 | 0.5 | The name of the job as defined in `.gitlab-ci.yml` |
| `CI_JOB_STAGE` | 9.0 | 0.5 | The name of the stage as defined in `.gitlab-ci.yml` |
| `CI_JOB_TOKEN` | 9.0 | 1.2 | Token used for authenticating with the [GitLab Container Registry][registry] and downloading [dependent repositories][dependent-repositories] |
| `CI_JOB_URL` | 11.1 | 0.5 | Job details URL |
| `CI_MERGE_REQUEST_ASSIGNEES` | 11.9 | all | Comma-separated list of username(s) of assignee(s) for the merge request if [the pipelines are for merge requests](../merge_request_pipelines/index.md). Available only if `only: [merge_requests]` is used and the merge request is created. |
+| `CI_MERGE_REQUEST_CHANGED_PAGE_PATHS` | 12.9 | all | Comma-separated list of paths of changed pages in a deployed [Review App](../review_apps/index.md) for a [Merge Request](../merge_request_pipelines/index.md). A [Route Map](../review_apps/index.md#route-maps) must be configured. |
+| `CI_MERGE_REQUEST_CHANGED_PAGE_URLS` | 12.9 | all | Comma-separated list of URLs of changed pages in a deployed [Review App](../review_apps/index.md) for a [Merge Request](../merge_request_pipelines/index.md). A [Route Map](../review_apps/index.md#route-maps) must be configured. |
| `CI_MERGE_REQUEST_ID` | 11.6 | all | The ID of the merge request if [the pipelines are for merge requests](../merge_request_pipelines/index.md). Available only if `only: [merge_requests]` is used and the merge request is created. |
| `CI_MERGE_REQUEST_IID` | 11.6 | all | The IID of the merge request if [the pipelines are for merge requests](../merge_request_pipelines/index.md). Available only if `only: [merge_requests]` is used and the merge request is created. |
| `CI_MERGE_REQUEST_LABELS` | 11.9 | all | Comma-separated label names of the merge request if [the pipelines are for merge requests](../merge_request_pipelines/index.md). Available only if `only: [merge_requests]` is used and the merge request is created. |
diff --git a/doc/ci/yaml/README.md b/doc/ci/yaml/README.md
index 1a301481f05..404f2e07384 100644
--- a/doc/ci/yaml/README.md
+++ b/doc/ci/yaml/README.md
@@ -4,7 +4,7 @@ type: reference
# GitLab CI/CD Pipeline Configuration Reference
-GitLab CI/CD [pipelines](../pipelines.md) are configured using a YAML file called `.gitlab-ci.yml` within each project.
+GitLab CI/CD [pipelines](../pipelines/index.md) are configured using a YAML file called `.gitlab-ci.yml` within each project.
The `.gitlab-ci.yml` file defines the structure and order of the pipelines and determines:
@@ -107,7 +107,7 @@ The following table lists available parameters for jobs:
| [`when`](#when) | When to run job. Also available: `when:manual` and `when:delayed`. |
| [`environment`](#environment) | Name of an environment to which the job deploys. Also available: `environment:name`, `environment:url`, `environment:on_stop`, `environment:auto_stop_in` and `environment:action`. |
| [`cache`](#cache) | List of files that should be cached between subsequent runs. Also available: `cache:paths`, `cache:key`, `cache:untracked`, and `cache:policy`. |
-| [`artifacts`](#artifacts) | List of files and directories to attach to a job on success. Also available: `artifacts:paths`, `artifacts:expose_as`, `artifacts:name`, `artifacts:untracked`, `artifacts:when`, `artifacts:expire_in`, `artifacts:reports`, and `artifacts:reports:junit`.<br><br>In GitLab [Enterprise Edition](https://about.gitlab.com/pricing/), these are available: `artifacts:reports:codequality`, `artifacts:reports:sast`, `artifacts:reports:dependency_scanning`, `artifacts:reports:container_scanning`, `artifacts:reports:dast`, `artifacts:reports:license_management`, `artifacts:reports:performance` and `artifacts:reports:metrics`. |
+| [`artifacts`](#artifacts) | List of files and directories to attach to a job on success. Also available: `artifacts:paths`, `artifacts:expose_as`, `artifacts:name`, `artifacts:untracked`, `artifacts:when`, `artifacts:expire_in`, `artifacts:reports`, `artifacts:reports:junit`, and `artifacts:reports:cobertura`.<br><br>In GitLab [Enterprise Edition](https://about.gitlab.com/pricing/), these are available: `artifacts:reports:codequality`, `artifacts:reports:sast`, `artifacts:reports:dependency_scanning`, `artifacts:reports:container_scanning`, `artifacts:reports:dast`, `artifacts:reports:license_management`, `artifacts:reports:performance` and `artifacts:reports:metrics`. |
| [`dependencies`](#dependencies) | Restrict which artifacts are passed to a specific job by providing a list of jobs to fetch artifacts from. |
| [`coverage`](#coverage) | Code coverage settings for a given job. |
| [`retry`](#retry) | When and how many times a job can be auto-retried in case of a failure. |
@@ -158,6 +158,88 @@ rspec 2.6:
script: bundle exec rspec
```
+### `inherit`
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/207484) in GitLab 12.9.
+
+You can disable inheritance of globally defined defaults
+and variables with the `inherit:` parameter.
+
+To enable or disable the inheritance of all `variables:` or `default:` parameters, use the following format:
+
+- `default: true` or `default: false`
+- `variables: true` or `variables: false`
+
+To inherit only a subset of `default:` parameters or `variables:`, specify what
+you wish to inherit, and any not listed will **not** be inherited. Use
+one of the following formats:
+
+```yaml
+inherit:
+ default: [parameter1, parameter2]
+ variables: [VARIABLE1, VARIABLE2]
+```
+
+Or:
+
+```yaml
+inherit:
+ default:
+ - parameter1
+ - parameter2
+ variables:
+ - VARIABLE1
+ - VARIABLE2
+```
+
+In the example below:
+
+- `rubocop`:
+ - **will** inherit: Nothing.
+- `rspec`:
+ - **will** inherit: the default `image` and the `WEBHOOK_URL` variable.
+ - **will not** inherit: the default `before_script` and the `DOMAIN` variable.
+- `capybara`:
+ - **will** inherit: the default `before_script` and `image`.
+ - **will not** inherit: the `DOMAIN` and `WEBHOOK_URL` variables.
+- `karma`:
+ - **will** inherit: the default `image` and `before_script`, and the `DOMAIN` variable.
+ - **will not** inherit: `WEBHOOK_URL` variable.
+
+```yaml
+default:
+ image: 'ruby:2.4'
+ before_script:
+ - echo Hello World
+
+variables:
+ DOMAIN: example.com
+ WEBHOOK_URL: https://my-webhook.example.com
+
+rubocop:
+ inherit:
+ default: false
+ variables: false
+ script: bundle exec rubocop
+
+rspec:
+ inherit:
+ default: [image]
+ variables: [WEBHOOK_URL]
+ script: bundle exec rspec
+
+capybara:
+ inherit:
+ variables: false
+ script: bundle exec capybara
+
+karma:
+ inherit:
+ default: true
+ variables: [DOMAIN]
+ script: karma
+```
+
## Parameter details
The following are detailed explanations for parameters used to configure CI/CD pipelines.
@@ -378,6 +460,9 @@ For example, the following are equivalent configuration:
- b
```
+NOTE: **Note:**
+A pipeline will not be created if it only contains jobs in `.pre` or `.post` stages.
+
### `stage`
`stage` is defined per-job and relies on [`stages`](#stages) which is defined
@@ -605,6 +690,24 @@ With `only`, individual keys are logically joined by an AND:
> (any of refs) AND (any of variables) AND (any of changes) AND (if Kubernetes is active)
+In the example below, the `test` job will `only` be created when **all** of the following are true:
+
+- The pipeline has been [scheduled](../../user/project/pipelines/schedules.md) **or** runs for `master`.
+- The `variables` keyword matches.
+- The `kubernetes` service is active on the project.
+
+```yaml
+test:
+ script: npm run test
+ only:
+ refs:
+ - master
+ - schedules
+ variables:
+ - $CI_COMMIT_MESSAGE =~ /run-end-to-end-tests/
+ kubernetes: active
+```
+
`except` is implemented as a negation of this complete expression:
> NOT((any of refs) AND (any of variables) AND (any of changes) AND (if Kubernetes is active))
@@ -613,6 +716,21 @@ This means the keys are treated as if joined by an OR. This relationship could b
> (any of refs) OR (any of variables) OR (any of changes) OR (if Kubernetes is active)
+In the example below, the `test` job will **not** be created when **any** of the following are true:
+
+- The pipeline runs for the `master`.
+- There are changes to the `README.md` file in the root directory of the repo.
+
+```yaml
+test:
+ script: npm run test
+ except:
+ refs:
+ - master
+ changes:
+ - "README.md"
+```
+
#### `only:refs`/`except:refs`
> `refs` policy introduced in GitLab 10.0.
@@ -752,6 +870,10 @@ CAUTION: **Warning:**
There are some points to be aware of when
[using this feature with new branches or tags *without* pipelines for merge requests](#using-onlychanges-without-pipelines-for-merge-requests).
+CAUTION: **Warning:**
+There are some points to be aware of when
+[using this feature with scheduled pipelines](#using-onlychanges-with-scheduled-pipelines).
+
##### Using `only:changes` with pipelines for merge requests
With [pipelines for merge requests](../merge_request_pipelines/index.md),
@@ -816,19 +938,26 @@ This could result in some unexpected behavior, including:
- When pushing a new commit, the changed files are calculated using the previous commit
as the base SHA.
+##### Using `only:changes` with scheduled pipelines
+
+`only:changes` always evaluates as "true" in [Scheduled pipelines](../pipelines/schedules.md).
+All files are considered to have "changed" when a scheduled pipeline
+runs.
+
### `rules`
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/29011) in GitLab 12.3.
`rules` allows for a list of individual rule objects to be evaluated
*in order*, until one matches and dynamically provides attributes to the job.
+Note that `rules` cannot be used in combination with `only/except` since it is intended
+to replace that functionality. If you attempt to do this the linter will return a
+`key may not be used with rules` error.
Available rule clauses include:
-- [`if`](#rulesif)
- (similar to [`only:variables`](#onlyvariablesexceptvariables)).
-- [`changes`](#ruleschanges)
- (same as [`only:changes`](#onlychangesexceptchanges)).
+- [`if`](#rulesif) (similar to [`only:variables`](#onlyvariablesexceptvariables))
+- [`changes`](#ruleschanges) (same as [`only:changes`](#onlychangesexceptchanges))
- [`exists`](#rulesexists)
For example, using `if`. This configuration specifies that `job` should be built
@@ -859,7 +988,10 @@ In this example, if the first rule:
`rules:if` differs slightly from `only:variables` by accepting only a single
expression string, rather than an array of them. Any set of expressions to be
-evaluated should be conjoined into a single expression using `&&` or `||`. For example:
+evaluated should be conjoined into a single expression using `&&` or `||`, and use
+the [variable matching syntax](../variables/README.md#supported-syntax).
+
+For example:
```yaml
job:
@@ -895,7 +1027,6 @@ docker build:
- if: '$VAR == "string value"'
when: manual # Will include the job and set to when:manual if the expression evaluates to true, after the `changes:` rule fails to match.
- when: on_success # If neither of the first rules match, set to on_success
-
```
In this example, a job either set to:
@@ -956,6 +1087,47 @@ job:
In this example, if the first rule matches, then the job will have `when: manual` and `allow_failure: true`.
+#### Exclude jobs with `rules:` from certain pipelines
+
+Jobs with `rules:` can cause two pipelines to be created unexpectedly:
+
+- One pipeline from pushing a commit to a branch.
+- A second ["detached" pipeline for a merge request](../merge_request_pipelines/index.md).
+
+`only` and `except` jobs do not trigger merge request pipelines by default, but this
+is not the case for jobs with `rules:`, which may be surprising if migrating from `only`
+and `except` to `rules:`.
+
+If you are using `rules:` and you see two pipelines for commits to branches that have
+a merge request, you have two options:
+
+- Individually exclude each job that uses `rules:` from merge request pipelines. The
+ example below will cause the job to **not** run in *pipelines for merge requests*,
+ but it **will** run in pipelines for *new tags and pipelines running on branch refs*:
+
+ ```yaml
+ job:
+ rules:
+ - if: $CI_MERGE_REQUEST_ID
+ when: never
+ - when: manual
+ script:
+ - echo hello
+ ```
+
+- Add a global [`workflow: rules`](#workflowrules) to allow pipelines in only certain
+ situations. The example below will only run pipelines for merge requests, new tags and
+ changes to master. It will **not** run any pipelines *on any branch except master*, but
+ it will run **detached merge request pipelines** for any merge request, targeting any branch:
+
+ ```yaml
+ workflow:
+ rules:
+ - if: $CI_MERGE_REQUEST_ID
+ - if: $CI_COMMIT_TAG
+ - if: $CI_COMMIT_BRANCH == "master"
+ ```
+
#### Complex rule clauses
To conjoin `if`, `changes`, and `exists` clauses with an AND, use them in the
@@ -1792,7 +1964,7 @@ attached to the job when it [succeeds, fails, or always](#artifactswhen).
The artifacts will be sent to GitLab after the job finishes and will
be available for download in the GitLab UI.
-[Read more about artifacts](../../user/project/pipelines/job_artifacts.md).
+[Read more about artifacts](../pipelines/job_artifacts.md).
#### `artifacts:paths`
@@ -1846,7 +2018,7 @@ release-job:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/15018) in GitLab 12.5.
-The `expose_as` keyword can be used to expose [job artifacts](../../user/project/pipelines/job_artifacts.md)
+The `expose_as` keyword can be used to expose [job artifacts](../pipelines/job_artifacts.md)
in the [merge request](../../user/project/merge_requests/index.md) UI.
For example, to match a single file:
@@ -1876,7 +2048,7 @@ Note the following:
- A maximum of 10 job artifacts per merge request can be exposed.
- Glob patterns are unsupported.
-- If a directory is specified, the link will be to the job [artifacts browser](../../user/project/pipelines/job_artifacts.md#browsing-artifacts) if there is more than
+- If a directory is specified, the link will be to the job [artifacts browser](../pipelines/job_artifacts.md#browsing-artifacts) if there is more than
one file in the directory.
- For exposed single file artifacts with `.html`, `.htm`, `.txt`, `.json`, `.xml`,
and `.log` extensions, if [GitLab Pages](../../administration/pages/index.md) is:
@@ -2049,9 +2221,8 @@ job:
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/20390) in
GitLab 11.2. Requires GitLab Runner 11.2 and above.
-The `reports` keyword is used for collecting test reports from jobs and
-exposing them in GitLab's UI (merge requests, pipeline views). Read how to use
-this with [JUnit reports](#artifactsreportsjunit).
+The `reports` keyword is used for collecting test reports, code quality reports, and security reports from jobs.
+It also exposes these reports in GitLab's UI (merge requests, pipeline views, and security dashboards).
NOTE: **Note:**
The test reports are collected regardless of the job results (success or failure).
@@ -2096,6 +2267,37 @@ concatenated into a single file. Use a filename pattern (`junit: rspec-*.xml`),
an array of filenames (`junit: [rspec-1.xml, rspec-2.xml, rspec-3.xml]`), or a
combination thereof (`junit: [rspec.xml, test-results/TEST-*.xml]`).
+##### `artifacts:reports:dotenv`
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/17066) in GitLab 12.9. Requires GitLab Runner 11.5 and later.
+
+The `dotenv` report collects a set of environment variables as artifacts.
+
+The collected variables are registered as runtime-created variables of the job,
+which is useful to [set dynamic environment URLs after a job finishes](../environments.md#set-dynamic-environment-urls-after-a-job-finishes).
+It is not available for download through the web interface.
+
+There are a couple of limitations on top of the [original dotenv rules](https://github.com/motdotla/dotenv#rules).
+
+- The variable key can contain only letters, digits and underscore ('_').
+- The size of dotenv file must be smaller than 5 kilobytes.
+- The number of variables must be less than 10.
+- It doesn't support variable substitution in the dotenv file itself.
+- It doesn't support empty lines and comments (`#`) in dotenv file.
+- It doesn't support quote escape, spaces in a quote, a new line expansion in a quote, in dotenv file.
+
+##### `artifacts:reports:cobertura`
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/3708) in GitLab 12.9.
+> Requires [GitLab Runner](https://docs.gitlab.com/runner/) 11.5 and above.
+
+The `cobertura` report collects [Cobertura coverage XML files](../../user/project/merge_requests/test_coverage_visualization.md).
+The collected Cobertura coverage reports will be uploaded to GitLab as an artifact
+and will be automatically shown in merge requests.
+
+Cobertura was originally developed for Java, but there are many
+third party ports for other languages like JavaScript, Python, Ruby, etc.
+
##### `artifacts:reports:codequality` **(STARTER)**
> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
@@ -2159,7 +2361,7 @@ introduced in GitLab 12.8.
> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
-The `license_management` report collects [Licenses](../../user/application_security/license_compliance/index.md)
+The `license_management` report collects [Licenses](../../user/compliance/license_compliance/index.md)
as artifacts.
The collected License Compliance report will be uploaded to GitLab as an artifact and will
@@ -2170,7 +2372,7 @@ dashboards. It is not available for download through the web interface.
> Introduced in GitLab 12.8. Requires GitLab Runner 11.5 and above.
-The `license_scanning` report collects [Licenses](../../user/application_security/license_compliance/index.md)
+The `license_scanning` report collects [Licenses](../../user/compliance/license_compliance/index.md)
as artifacts.
The License Compliance report will be uploaded to GitLab as an artifact and will
@@ -2259,7 +2461,7 @@ deploy:
If the artifacts of the job that is set as a dependency have been
[expired](#artifactsexpire_in) or
-[erased](../../user/project/pipelines/job_artifacts.md#erasing-artifacts), then
+[erased](../pipelines/job_artifacts.md#erasing-artifacts), then
the dependent job will fail.
NOTE: **Note:**
@@ -2567,7 +2769,7 @@ test:
```
The job-level timeout can exceed the
-[project-level timeout](../../user/project/pipelines/settings.md#timeout) but can not
+[project-level timeout](../pipelines/settings.md#timeout) but can not
exceed the Runner-specific timeout.
### `parallel`
@@ -2712,6 +2914,31 @@ trigger_job:
strategy: depend
```
+##### Trigger child pipeline with generated configuration file
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/35632) in GitLab 12.9.
+
+You can also trigger a child pipeline from a [dynamically generated configuration file](../parent_child_pipelines.md#dynamic-child-pipelines):
+
+```yaml
+generate-config:
+ stage: build
+ script: generate-ci-config > generated-config.yml
+ artifacts:
+ paths:
+ - generated-config.yml
+
+child-pipeline:
+ stage: test
+ trigger:
+ include:
+ - artifact: generated-config.yml
+ job: generate-config
+```
+
+The `generated-config.yml` is extracted from the artifacts and used as the configuration
+for triggering the child pipeline.
+
#### Linking pipelines with `trigger:strategy`
By default, the `trigger` job completes with the `success` status
@@ -2738,7 +2965,7 @@ starting, at the cost of reduced parallelization.
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/23464) in GitLab 12.3.
`interruptible` is used to indicate that a job should be canceled if made redundant by a newer pipeline run. Defaults to `false`.
-This value will only be used if the [automatic cancellation of redundant pipelines feature](../../user/project/pipelines/settings.md#auto-cancel-pending-pipelines)
+This value will only be used if the [automatic cancellation of redundant pipelines feature](../pipelines/settings.md#auto-cancel-pending-pipelines)
is enabled.
When enabled, a pipeline on the same branch will be canceled when:
@@ -2867,7 +3094,7 @@ your configuration file is on. In other words, when using a `include:local`, mak
sure that both `.gitlab-ci.yml` and the local file are on the same branch.
All [nested includes](#nested-includes) will be executed in the scope of the same project,
-so it is possible to use local, project, remote or template includes.
+so it is possible to use local, project, remote, or template includes.
NOTE: **Note:**
Including local files through Git submodules paths is not supported.
@@ -2884,7 +3111,7 @@ include:
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/53903) in GitLab 11.7.
To include files from another private project under the same GitLab instance,
-use `include:file`. This file is referenced using full paths relative to the
+use `include:file`. This file is referenced using full paths relative to the
root directory (`/`). For example:
```yaml
@@ -3982,6 +4209,6 @@ but commented out to help encourage others to add to it in the future. -->
[ce-12909]: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/12909
[ce-19232]: https://gitlab.com/gitlab-org/gitlab-foss/issues/19232
[environment]: ../environments.md "CI/CD environments"
-[schedules]: ../../user/project/pipelines/schedules.md "Pipelines schedules"
+[schedules]: ../pipelines/schedules.md "Pipelines schedules"
[variables]: ../variables/README.md "CI/CD variables"
[push-option]: https://git-scm.com/docs/git-push#Documentation/git-push.txt--oltoptiongt
diff --git a/doc/development/README.md b/doc/development/README.md
index 1e5e1cdce5f..bc5f50b0499 100644
--- a/doc/development/README.md
+++ b/doc/development/README.md
@@ -9,8 +9,20 @@ description: 'Learn how to contribute to GitLab.'
- Set up GitLab's development environment with [GitLab Development Kit (GDK)](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/README.md)
- [GitLab contributing guide](contributing/index.md)
- - [Issues workflow](contributing/issue_workflow.md) (issue tracker guidelines, triaging, labels, feature proposals, issue weight, regression issues, technical and UX debt)
- - [Merge requests workflow](contributing/merge_request_workflow.md) (merge request guidelines, contribution acceptance criteria, definition of done, dependencies)
+ - [Issues workflow](contributing/issue_workflow.md). For information on:
+ - Issue tracker guidelines.
+ - Triaging.
+ - Labels.
+ - Feature proposals.
+ - Issue weight.
+ - Regression issues.
+ - Technical or UX debt.
+ - [Merge requests workflow](contributing/merge_request_workflow.md). For
+ information on:
+ - Merge request guidelines.
+ - Contribution acceptance criteria.
+ - Definition of done.
+ - Dependencies.
- [Style guides](contributing/style_guides.md)
- [Implement design & UI elements](contributing/design.md)
- [GitLab Architecture Overview](architecture.md)
@@ -22,6 +34,7 @@ description: 'Learn how to contribute to GitLab.'
- [Code review guidelines](code_review.md) for reviewing code and having code reviewed
- [Database review guidelines](database_review.md) for reviewing database-related changes and complex SQL queries, and having them reviewed
+- [Secure coding guidelines](https://gitlab.com/gitlab-com/gl-security/security-guidelines)
- [Pipelines for the GitLab project](pipelines.md)
Complementary reads:
@@ -31,7 +44,7 @@ Complementary reads:
- [Guidelines for implementing Enterprise Edition features](ee_features.md)
- [Danger bot](dangerbot.md)
- [Generate a changelog entry with `bin/changelog`](changelog.md)
-- [Requesting access to Chatops on GitLab.com](chatops_on_gitlabcom.md#requesting-access) (for GitLabbers)
+- [Requesting access to Chatops on GitLab.com](chatops_on_gitlabcom.md#requesting-access) (for GitLab team members)
## UX and Frontend guides
@@ -84,6 +97,7 @@ Complementary reads:
- [Issue types vs first-class types](issue_types.md)
- [Application limits](application_limits.md)
- [Redis guidelines](redis.md)
+- [Rails initializers](rails_initializers.md)
## Performance guides
@@ -131,6 +145,7 @@ Complementary reads:
- [Hash indexes](hash_indexes.md)
- [Storing SHA1 hashes as binary](sha1_as_binary.md)
- [Iterating tables in batches](iterating_tables_in_batches.md)
+- [Insert into tables in batches](insert_into_tables_in_batches.md)
- [Ordering table columns](ordering_table_columns.md)
- [Verifying database capabilities](verifying_database_capabilities.md)
- [Database Debugging and Troubleshooting](database_debugging.md)
@@ -148,6 +163,7 @@ Complementary reads:
- [Jira Connect app](integrations/jira_connect.md)
- [Security Scanners](integrations/secure.md)
+- [Secure Partner Integration](integrations/secure_partner_integration.md)
## Testing guides
@@ -166,11 +182,11 @@ Complementary reads:
- [Externalization](i18n/externalization.md)
- [Translation](i18n/translation.md)
-## Event tracking guides
+## Telemetry guides
-- [Introduction](event_tracking/index.md)
-- [Frontend tracking guide](event_tracking/frontend.md)
-- [Backend tracking guide](event_tracking/backend.md)
+- [Introduction](../telemetry/index.md)
+- [Frontend tracking guide](../telemetry/frontend.md)
+- [Backend tracking guide](../telemetry/backend.md)
## Experiment Guide
@@ -199,6 +215,7 @@ Complementary reads:
## Other Development guides
- [Defining relations between files using projections](projections.md)
+- [Reference processing](./reference_processing.md)
## Other GitLab Development Kit (GDK) guides
diff --git a/doc/development/adding_database_indexes.md b/doc/development/adding_database_indexes.md
index 3ddb15fa290..01b621b6631 100644
--- a/doc/development/adding_database_indexes.md
+++ b/doc/development/adding_database_indexes.md
@@ -73,7 +73,7 @@ especially the case for small tables.
If a table is expected to grow in size and you expect your query has to filter
out a lot of rows you may want to consider adding an index. If the table size is
-very small (e.g. only a handful of rows) or any existing indexes filter out
+very small (e.g. less than `1,000` records) or any existing indexes filter out
enough rows you may _not_ want to add a new index.
## Maintenance Overhead
diff --git a/doc/development/api_graphql_styleguide.md b/doc/development/api_graphql_styleguide.md
index e8aa33b31a7..3002289f6a6 100644
--- a/doc/development/api_graphql_styleguide.md
+++ b/doc/development/api_graphql_styleguide.md
@@ -21,6 +21,12 @@ and in [PDF](https://gitlab.com/gitlab-org/create-stage/uploads/8e78ea7f326b2ef6
Everything covered in this deep dive was accurate as of GitLab 11.9, and while specific
details may have changed since then, it should still serve as a good introduction.
+## GraphiQL
+
+GraphiQL is an interactive GraphQL API explorer where you can play around with existing queries.
+You can access it in any GitLab environment on `https://<your-gitlab-site.com>/-/graphql-explorer`.
+For example, the one for [GitLab.com](https://gitlab.com/-/graphql-explorer).
+
## Authentication
Authentication happens through the `GraphqlController`, right now this
@@ -76,6 +82,28 @@ a new presenter specifically for GraphQL.
The presenter is initialized using the object resolved by a field, and
the context.
+### Nullable fields
+
+GraphQL allows fields to be be "nullable" or "non-nullable". The former means
+that `null` may be returned instead of a value of the specified type. **In
+general**, you should prefer using nullable fields to non-nullable ones, for
+the following reasons:
+
+- It's common for data to switch from required to not-required, and back again
+- Even when there is no prospect of a field becoming optional, it may not be **available** at query time
+ - For instance, the `content` of a blob may need to be looked up from Gitaly
+ - If the `content` is nullable, we can return a **partial** response, instead of failing the whole query
+- Changing from a non-nullable field to a nullable field is difficult with a versionless schema
+
+Non-nullable fields should only be used when a field is required, very unlikely
+to become optional in the future, and very easy to calculate. An example would
+be `id` fields.
+
+Further reading:
+
+- [GraphQL Best Practices Guide](https://graphql.org/learn/best-practices/#nullability)
+- [Using nullability in GraphQL](https://blog.apollographql.com/using-nullability-in-graphql-2254f84c4ed7)
+
### Exposing Global IDs
When exposing an `ID` field on a type, we will by default try to
@@ -101,7 +129,7 @@ pagination models.
To expose a collection of resources we can use a connection type. This wraps the array with default pagination fields. For example a query for project-pipelines could look like this:
-```
+```graphql
query($project_path: ID!) {
project(fullPath: $project_path) {
pipelines(first: 2) {
@@ -159,7 +187,7 @@ look like this:
To get the next page, the cursor of the last known element could be
passed:
-```
+```graphql
query($project_path: ID!) {
project(fullPath: $project_path) {
pipelines(first: 2, after: "Njc=") {
@@ -239,7 +267,7 @@ the field depending on if the feature has been enabled or not.
GraphQL feature flags use the common
[GitLab feature flag](../development/feature_flags.md) system, and can be added to a
-field using the `feature_key` property.
+field using the `feature_flag` property.
For example:
@@ -247,11 +275,11 @@ For example:
field :test_field, type: GraphQL::STRING_TYPE,
null: false,
description: 'Some test field',
- feature_key: :some_feature_key
+ feature_flag: :some_feature_flag
```
In the above example, the `test_field` field will only be returned if
-the `some_feature_key` feature flag is enabled.
+the `some_feature_flag` feature flag is enabled.
If the feature flag is not enabled, an error will be returned saying the field does not exist.
@@ -297,7 +325,6 @@ module Types
value 'CLOSED', value: 'closed', description: 'An closed Epic'
end
end
-
```
## Descriptions
@@ -314,7 +341,7 @@ field :id, GraphQL::ID_TYPE, description: 'ID of the resource'
Descriptions of fields and arguments are viewable to users through:
-- The [GraphiQL explorer](../api/graphql/#graphiql).
+- The [GraphiQL explorer](#graphiql).
- The [static GraphQL API reference](../api/graphql/#reference).
### Description styleguide
@@ -615,6 +642,37 @@ found, we should raise a
`Gitlab::Graphql::Errors::ResourceNotAvailable` error. Which will be
correctly rendered to the clients.
+## Validating arguments
+
+For validations of single arguments, use the
+[`prepare` option](https://github.com/rmosolgo/graphql-ruby/blob/master/guides/fields/arguments.md)
+as normal.
+
+Sometimes a mutation or resolver may accept a number of optional
+arguments, but still want to validate that at least one of the optional
+arguments were given. In this situation, consider using the `#ready?`
+method within your mutation or resolver to provide the validation. The
+`#ready?` method will be called before any work is done within the
+`#resolve` method.
+
+Example:
+
+```ruby
+def ready?(**args)
+ if args.values_at(:body, :position).compact.blank?
+ raise Gitlab::Graphql::Errors::ArgumentError,
+ 'body or position arguments are required'
+ end
+
+ # Always remember to call `#super`
+ super(args)
+end
+```
+
+In the future this may be able to be done using `InputUnions` if
+[this RFC](https://github.com/graphql/graphql-spec/blob/master/rfcs/InputUnion.md)
+is merged.
+
## GitLab's custom scalars
### `Types::TimeType`
diff --git a/doc/development/api_styleguide.md b/doc/development/api_styleguide.md
index 2510358b4d5..bd5ea6ac506 100644
--- a/doc/development/api_styleguide.md
+++ b/doc/development/api_styleguide.md
@@ -118,3 +118,11 @@ different components are making use of.
[Entity]: https://gitlab.com/gitlab-org/gitlab/blob/master/lib/api/entities.rb
[validation, and coercion of the parameters]: https://github.com/ruby-grape/grape#parameter-validation-and-coercion
[installing GitLab under a relative URL]: https://docs.gitlab.com/ee/install/relative_url.html
+
+## Testing
+
+When writing tests for new API endpoints, consider using a schema [fixture](./testing_guide/best_practices.md#fixtures) located in `/spec/fixtures/api/schemas`. You can `expect` a response to match a given schema:
+
+```ruby
+expect(response).to match_response_schema('merge_requests')
+```
diff --git a/doc/development/application_limits.md b/doc/development/application_limits.md
index c3bfe20dd87..f50730634b7 100644
--- a/doc/development/application_limits.md
+++ b/doc/development/application_limits.md
@@ -15,10 +15,6 @@ limits](https://about.gitlab.com/handbook/product/#introducing-application-limit
## Development
-The merge request to [configure maximum number of webhooks per
-project](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20730/diffs) is a
-good example about configuring application limits.
-
### Insert database plan limits
In the `plan_limits` table, you have to create a new column and insert the
@@ -26,22 +22,38 @@ limit values. It's recommended to create separate migration script files.
1. Add new column to the `plan_limits` table with non-null default value 0, eg:
- ```ruby
- add_column(:plan_limits, :project_hooks, :integer, default: 0, null: false)
- ```
+ ```ruby
+ add_column(:plan_limits, :project_hooks, :integer, default: 0, null: false)
+ ```
- NOTE: **Note:** Plan limits entries set to `0` mean that limits are not
- enabled.
+ NOTE: **Note:** Plan limits entries set to `0` mean that limits are not
+ enabled.
1. Insert plan limits values into the database using
`create_or_update_plan_limit` migration helper, eg:
- ```ruby
- create_or_update_plan_limit('project_hooks', 'free', 10)
- create_or_update_plan_limit('project_hooks', 'bronze', 20)
- create_or_update_plan_limit('project_hooks', 'silver', 30)
- create_or_update_plan_limit('project_hooks', 'gold', 100)
- ```
+ ```ruby
+ def up
+ return unless Gitlab.com?
+
+ create_or_update_plan_limit('project_hooks', 'free', 100)
+ create_or_update_plan_limit('project_hooks', 'bronze', 100)
+ create_or_update_plan_limit('project_hooks', 'silver', 100)
+ create_or_update_plan_limit('project_hooks', 'gold', 100)
+ end
+
+ def down
+ return unless Gitlab.com?
+
+ create_or_update_plan_limit('project_hooks', 'free', 0)
+ create_or_update_plan_limit('project_hooks', 'bronze', 0)
+ create_or_update_plan_limit('project_hooks', 'silver', 0)
+ create_or_update_plan_limit('project_hooks', 'gold', 0)
+ end
+ ```
+
+NOTE: **Note:** Some plans exist only on GitLab.com. You can check if the
+migration is running on GitLab.com with `Gitlab.com?`.
### Plan limits validation
@@ -78,12 +90,37 @@ can be used to validate that a model does not exceed the limits. It ensures
that the count of the records for the current model does not exceed the defined
limit.
-NOTE: **Note:** The name (pluralized) of the plan limit introduced in the
-database (`project_hooks`) must correspond to the name of the model we are
-validating (`ProjectHook`).
+NOTE: **Note:** You must specify the limit scope of the object being validated
+and the limit name if it's different from the pluralized model name.
```ruby
class ProjectHook
include Limitable
+
+ self.limit_name = 'project_hooks' # Optional as ProjectHook corresponds with project_hooks
+ self.limit_scope = :project
end
```
+
+To test the model, you can include the shared examples.
+
+```ruby
+it_behaves_like 'includes Limitable concern' do
+ subject { build(:project_hook, project: create(:project)) }
+end
+```
+
+### Subscription Plans
+
+Self-managed:
+
+- `default` - Everyone
+
+GitLab.com:
+
+- `free` - Everyone
+- `bronze`- Namespaces with a Bronze subscription
+- `silver` - Namespaces with a Silver subscription
+- `gold` - Namespaces with a Gold subscription
+
+NOTE: **Note:** The test environment doesn't have any plans.
diff --git a/doc/development/architecture.md b/doc/development/architecture.md
index c5ac8c040f8..c75de8e8970 100644
--- a/doc/development/architecture.md
+++ b/doc/development/architecture.md
@@ -288,7 +288,7 @@ GitLab CI is the open-source continuous integration service included with GitLab
- Configuration: [Omnibus][grafana-omnibus], [Charts][grafana-charts]
- Layer: Monitoring
-Grafana is an open source, feature rich metrics dashboard and graph editor for Graphite, Elasticsearch, OpenTSDB, Prometheus and InfluxDB.
+Grafana is an open source, feature rich metrics dashboard and graph editor for Graphite, Elasticsearch, OpenTSDB, Prometheus, and InfluxDB.
#### Jaeger
@@ -321,7 +321,7 @@ Mattermost is an open source, private cloud, Slack-alternative from <https://mat
- Configuration: [Omnibus][minio-omnibus], [Charts][minio-charts], [GDK][minio-gdk]
- Layer: Core Service (Data)
-MinIO is an object storage server released under Apache License v2.0. It is compatible with Amazon S3 cloud storage service. It is best suited for storing unstructured data such as photos, videos, log files, backups and container / VM images. Size of an object can range from a few KBs to a maximum of 5TB.
+MinIO is an object storage server released under Apache License v2.0. It is compatible with Amazon S3 cloud storage service. It is best suited for storing unstructured data such as photos, videos, log files, backups, and container / VM images. Size of an object can range from a few KBs to a maximum of 5TB.
#### NGINX
@@ -542,28 +542,28 @@ See the README for more information.
The GitLab init script starts and stops Unicorn and Sidekiq:
-```
+```plaintext
/etc/init.d/gitlab
Usage: service gitlab {start|stop|restart|reload|status}
```
Redis (key-value store/non-persistent database):
-```
+```plaintext
/etc/init.d/redis
Usage: /etc/init.d/redis {start|stop|status|restart|condrestart|try-restart}
```
SSH daemon:
-```
+```plaintext
/etc/init.d/sshd
Usage: /etc/init.d/sshd {start|stop|restart|reload|force-reload|condrestart|try-restart|status}
```
Web server (one of the following):
-```
+```plaintext
/etc/init.d/httpd
Usage: httpd {start|stop|restart|condrestart|try-restart|force-reload|reload|status|fullstatus|graceful|help|configtest}
@@ -573,7 +573,7 @@ Usage: nginx {start|stop|restart|reload|force-reload|status|configtest}
Persistent database:
-```
+```plaintext
$ /etc/init.d/postgresql
Usage: /etc/init.d/postgresql {start|stop|restart|reload|force-reload|status} [version ..]
```
@@ -626,7 +626,7 @@ GitLab Shell has a configuration file at `/home/git/gitlab-shell/config.yml`.
[GitLab](https://gitlab.com/gitlab-org/gitlab/tree/master) provides rake tasks with which you see version information and run a quick check on your configuration to ensure it is configured properly within the application. See [maintenance rake tasks](https://gitlab.com/gitlab-org/gitlab/blob/master/doc/raketasks/maintenance.md).
In a nutshell, do the following:
-```
+```shell
sudo -i -u git
cd gitlab
bundle exec rake gitlab:env:info RAILS_ENV=production
diff --git a/doc/development/background_migrations.md b/doc/development/background_migrations.md
index 02d31976481..c28008c94b8 100644
--- a/doc/development/background_migrations.md
+++ b/doc/development/background_migrations.md
@@ -72,20 +72,21 @@ migration classes must be defined in the namespace
## Scheduling
-Scheduling a background migration should be done in a post-deployment migration.
+Scheduling a background migration should be done in a post-deployment
+migration that includes `Gitlab::Database::MigrationHelpers`
To do so, simply use the following code while
replacing the class name and arguments with whatever values are necessary for
your migration:
```ruby
-BackgroundMigrationWorker.perform_async('BackgroundMigrationClassName', [arg1, arg2, ...])
+migrate_async('BackgroundMigrationClassName', [arg1, arg2, ...])
```
Usually it's better to enqueue jobs in bulk, for this you can use
-`BackgroundMigrationWorker.bulk_perform_async`:
+`bulk_migrate_async`:
```ruby
-BackgroundMigrationWorker.bulk_perform_async(
+bulk_migrate_async(
[['BackgroundMigrationClassName', [1]],
['BackgroundMigrationClassName', [2]]]
)
@@ -105,7 +106,7 @@ If you would like to schedule jobs in bulk with a delay, you can use
jobs = [['BackgroundMigrationClassName', [1]],
['BackgroundMigrationClassName', [2]]]
-BackgroundMigrationWorker.bulk_perform_in(5.minutes, jobs)
+bulk_migrate_in(5.minutes, jobs)
```
### Rescheduling background migrations
@@ -300,12 +301,13 @@ It is required to write tests for:
- The background migration itself.
- A cleanup migration.
-You can use the `:migration` RSpec tag when testing the migrations.
+The `:migration` and `schema: :latest` RSpec tags are automatically set for
+background migration specs.
See the
[Testing Rails migrations](testing_guide/testing_migrations_guide.md#testing-a-non-activerecordmigration-class)
style guide.
-When you do that, keep in mind that `before` and `after` RSpec hooks are going
+Keep in mind that `before` and `after` RSpec hooks are going
to migrate you database down and up, which can result in other background
migrations being called. That means that using `spy` test doubles with
`have_received` is encouraged, instead of using regular test doubles, because
@@ -329,6 +331,6 @@ for more details.
or ask someone to measure on production).
[migrations-readme]: https://gitlab.com/gitlab-org/gitlab/blob/master/spec/migrations/README.md
-[issue-rspec-hooks]: https://gitlab.com/gitlab-org/gitlab-foss/issues/35351
+[issue-rspec-hooks]: https://gitlab.com/gitlab-org/gitlab/issues/18839
[reliable-sidekiq]: https://gitlab.com/gitlab-org/gitlab-foss/issues/36791
[import-export]: ../user/project/settings/import_export.md
diff --git a/doc/development/chaos_endpoints.md b/doc/development/chaos_endpoints.md
index 2e55f19cd91..26ff3d2def7 100644
--- a/doc/development/chaos_endpoints.md
+++ b/doc/development/chaos_endpoints.md
@@ -47,7 +47,7 @@ To simulate a memory leak in your application, use the `/-/chaos/leakmem` endpoi
NOTE: **Note:**
The memory is not retained after the request finishes. Once the request has completed, the Ruby garbage collector will attempt to recover the memory.
-```
+```plaintext
GET /-/chaos/leakmem
GET /-/chaos/leakmem?memory_mb=1024
GET /-/chaos/leakmem?memory_mb=1024&duration_s=50
@@ -72,7 +72,7 @@ This endpoint attempts to fully utilise a single core, at 100%, for the given pe
Depending on your rack server setup, your request may timeout after a predetermined period (normally 60 seconds).
If you're using Unicorn, this is done by killing the worker process.
-```
+```plaintext
GET /-/chaos/cpu_spin
GET /-/chaos/cpu_spin?duration_s=50
GET /-/chaos/cpu_spin?duration_s=50&async=true
@@ -96,7 +96,7 @@ This endpoint can be used to model yielding execution to another threads when ru
Depending on your rack server setup, your request may timeout after a predetermined period (normally 60 seconds).
If you're using Unicorn, this is done by killing the worker process.
-```
+```plaintext
GET /-/chaos/db_spin
GET /-/chaos/db_spin?duration_s=50
GET /-/chaos/db_spin?duration_s=50&async=true
@@ -119,7 +119,7 @@ This endpoint is similar to the CPU Spin endpoint but simulates off-processor ac
As with the CPU Spin endpoint, this may lead to your request timing out if duration_s exceeds the configured limit.
-```
+```plaintext
GET /-/chaos/sleep
GET /-/chaos/sleep?duration_s=50
GET /-/chaos/sleep?duration_s=50&async=true
@@ -142,7 +142,7 @@ This endpoint will simulate the unexpected death of a worker process using a `ki
NOTE: **Note:**
Since this endpoint uses the `KILL` signal, the worker is not given a chance to cleanup or shutdown.
-```
+```plaintext
GET /-/chaos/kill
GET /-/chaos/kill?async=true
```
diff --git a/doc/development/code_comments.md b/doc/development/code_comments.md
index c1d58c1bd4b..a71e2b3c792 100644
--- a/doc/development/code_comments.md
+++ b/doc/development/code_comments.md
@@ -7,7 +7,7 @@ check if a comment is still relevant and what needs to be done to address it.
Examples:
-```rb
+```ruby
# Deprecated scope until code_owner column has been migrated to rule_type.
# To be removed with https://gitlab.com/gitlab-org/gitlab/issues/11834.
scope :code_owner, -> { where(code_owner: true).or(where(rule_type: :code_owner)) }
diff --git a/doc/development/code_review.md b/doc/development/code_review.md
index 41ebcc7f2d0..471dcba4c2a 100644
--- a/doc/development/code_review.md
+++ b/doc/development/code_review.md
@@ -17,7 +17,7 @@ uncovered edge cases. The reviewer can be from a different team, but it is
recommended to pick someone who knows the domain well. You can read more about the
importance of involving reviewer(s) in the section on the responsibility of the author below.
-If you need some guidance (e.g. it's your first merge request), feel free to ask
+If you need some guidance (for example, it's your first merge request), feel free to ask
one of the [Merge request coaches](https://about.gitlab.com/company/team/).
If you need assistance with security scans or comments, feel free to include the
@@ -89,7 +89,7 @@ that it does so in the most appropriate way, that it satisfies all requirements,
and that there are no remaining bugs, logical problems, uncovered edge cases,
or known vulnerabilities. The best way to do this, and to avoid unnecessary
back-and-forth with reviewers, is to perform a self-review of your own merge
-request, following the [Code Review](#reviewing-code) guidelines.
+request, following the [Code Review](#reviewing-a-merge-request) guidelines.
To reach the required level of confidence in their solution, an author is expected
to involve other people in the investigation and implementation processes as
@@ -129,7 +129,7 @@ This
### The responsibility of the reviewer
-[Review the merge request](#reviewing-code) thoroughly. When you are confident
+[Review the merge request](#reviewing-a-merge-request) thoroughly. When you are confident
that it meets all requirements, you should:
- Click the Approve button.
@@ -148,7 +148,7 @@ architecture, code organization, separation of concerns, tests, DRYness,
consistency, and readability.
Since a maintainer's job only depends on their knowledge of the overall GitLab
-codebase, and not that of any specific domain, they can review, approve and merge
+codebase, and not that of any specific domain, they can review, approve, and merge
merge requests from any team and in any product area.
In fact, authors are encouraged to get their merge requests merged by maintainers
@@ -211,7 +211,7 @@ Instead these should be sent to the [Release Manager](https://about.gitlab.com/c
mentioning them; this will ensure they see it if their notification level is
set to "mentioned" and other people will understand they don't have to respond.
-### Having your code reviewed
+### Having your merge request reviewed
Please keep in mind that code review is a process that can take multiple
iterations, and reviewers may spot things later that they may not have seen the
@@ -244,52 +244,17 @@ first time.
If you want to have your merge request reviewed, you can assign it to any reviewer. The list of reviewers can be found on [Engineering projects](https://about.gitlab.com/handbook/engineering/projects/) page.
-You can also use `ready for review` label. That means that your merge request is ready to be reviewed and any reviewer can pick it. It is recommended to use that label only if there isn't time pressure and make sure the merge request is assigned to a reviewer.
+You can also use `workflow::ready for review` label. That means that your merge request is ready to be reviewed and any reviewer can pick it. It is recommended to use that label only if there isn't time pressure and make sure the merge request is assigned to a reviewer.
When your merge request was reviewed and can be passed to a maintainer you can either pick a specific maintainer or use a label `ready for merge`.
It is responsibility of the author of a merge request that the merge request is reviewed. If it stays in `ready for review` state too long it is recommended to assign it to a specific reviewer.
-### List of merge requests ready for review
+#### List of merge requests ready for review
-Developers who have capacity can regularly check the list of [merge requests to review](https://gitlab.com/groups/gitlab-org/-/merge_requests?scope=all&utf8=%E2%9C%93&state=opened&label_name%5B%5D=ready%20for%20review) and assign any merge request they want to review.
+Developers who have capacity can regularly check the list of [merge requests to review](https://gitlab.com/groups/gitlab-org/-/merge_requests?state=opened&label_name%5B%5D=workflow%3A%3Aready%20for%20review) and assign any merge request they want to review.
-### Review turnaround time
-
-Since [unblocking others is always a top priority](https://about.gitlab.com/handbook/values/#global-optimization),
-reviewers are expected to review assigned merge requests in a timely manner,
-even when this may negatively impact their other tasks and priorities.
-
-Doing so allows everyone involved in the merge request to iterate faster as the
-context is fresh in memory, and improves contributors' experience significantly.
-
-#### Review-response SLO
-
-To ensure swift feedback to ready-to-review code, we maintain a `Review-response` Service-level Objective (SLO). The SLO is defined as:
-
-> - review-response SLO = (time when first review response is provided) - (time MR is assigned to reviewer) < 2 business days
-
-If you don't think you'll be able to review a merge request within the `Review-response` SLO
-time frame, let the author know as soon as possible and try to help them find
-another reviewer or maintainer who will be able to, so that they can be unblocked
-and get on with their work quickly.
-
-If you think you are at capacity and are unable to accept any more reviews until
-some have been completed, communicate this through your GitLab status by setting
-the `:red_circle:` emoji and mentioning that you are at capacity in the status
-text. This will guide contributors to pick a different reviewer, helping us to
-meet the SLO.
-
-Of course, if you are out of office and have
-[communicated](https://about.gitlab.com/handbook/paid-time-off/#communicating-your-time-off)
-this through your GitLab.com Status, authors are expected to realize this and
-find a different reviewer themselves.
-
-When a merge request author has been blocked for longer than
-the `Review-response` SLO, they are free to remind the reviewer through Slack or assign
-another reviewer.
-
-### Reviewing code
+### Reviewing a merge request
Understand why the change is necessary (fixes a bug, improves the user
experience, refactors the existing code). Then:
@@ -310,26 +275,47 @@ experience, refactors the existing code). Then:
"LGTM :thumbsup:", or "Just a couple things to address."
- Assign the merge request to the author if changes are required following your
review.
-- Set the milestone before merging a merge request.
-- Ensure the target branch is not too far behind master. If
-[master is red](https://about.gitlab.com/handbook/engineering/workflow/#broken-master),
-it should be no more than 100 commits behind.
+
+### Merging a merge request
+
+Before taking the decision to merge:
+
+- Set the milestone.
- Consider warnings and errors from danger bot, code quality, and other reports.
-Unless a strong case can be made for the violation, these should be resolved
-before merge.
-- Ensure a passing CI pipeline or if [master is broken](https://about.gitlab.com/handbook/engineering/workflow/#broken-master), post a comment mentioning the failure happens in master with a
-link to the ~"master:broken" issue.
-- Avoid accepting a merge request before the job succeeds. Of course, "Merge
- When Pipeline Succeeds" (MWPS) is fine.
-- If you set the MR to "Merge When Pipeline Succeeds", you should take over
- subsequent revisions for anything that would be spotted after that.
+ Unless a strong case can be made for the violation, these should be resolved
+ before merge. A comment must to be posted if the MR is merged with any failed job.
+
+When ready to merge:
+
- Consider using the [Squash and
- merge][squash-and-merge] feature when the merge request has a lot of commits.
+ merge](../user/project/merge_requests/squash_and_merge.md#squash-and-merge)
+ feature when the merge request has a lot of commits.
When merging code a maintainer should only use the squash feature if the
author has already set this option or if the merge request clearly contains a
messy commit history that is intended to be squashed.
+- **Start a new merge request pipeline with the `Run Pipeline` button in the merge
+ request's "Pipelines" tab, and enable "Merge When Pipeline Succeeds" (MWPS).** Note that:
+ - If the **latest [Pipeline for Merged Results](../ci/merge_request_pipelines/pipelines_for_merged_results/#pipelines-for-merged-results-premium)** finished less than 2 hours ago, you
+ might merge without starting a new pipeline as the merge request is close
+ enough to `master`.
+ - If the merge request is from a fork, check how far behind `master` the
+ source branch is. If it's more than 100 commits behind, ask the author to
+ rebase it before merging.
+ - If [master is broken](https://about.gitlab.com/handbook/engineering/workflow/#broken-master),
+ in addition to the two above rules, check that any failure also happens
+ in `master` and post a link to the ~"master:broken" issue before clicking the
+ red "Merge" button.
+- When you set the MR to "Merge When Pipeline Succeeds", you should take over
+ subsequent revisions for anything that would be spotted after that.
-[squash-and-merge]: ../user/project/merge_requests/squash_and_merge.md#squash-and-merge
+NOTE: **Note:**
+Thanks to "Pipeline for Merged Results", authors won't have to rebase their
+branch as frequently anymore (only when there are conflicts) since the Merge
+Results Pipeline will already incorporate the latest changes from `master`.
+This results in faster review/merge cycles since maintainers don't have to ask
+for a final rebase: instead, they only have to start a MR pipeline and set MWPS.
+This step brings us very close to the actual Merge Trains feature by testing the
+Merge Results against the latest `master` at the time of the pipeline creation.
### The right balance
@@ -348,7 +334,7 @@ reviewee.
reviewer before doing it, but have the courage to do it when you believe it is
important.
- In the interest of [Iteration](https://about.gitlab.com/handbook/values/#iteration),
- if, as a reviewer, your suggestions are non-blocking changes or personal preference
+ if your review suggestions are non-blocking changes, or personal preference
(not a documented or agreed requirement), consider approving the merge request
before passing it back to the author. This allows them to implement your suggestions
if they agree, or allows them to pass it onto the
@@ -411,6 +397,41 @@ Enterprise Edition instance. This has some implications:
1. **Filesystem access** can be slow, so try to avoid
[shared files](shared_files.md) when an alternative solution is available.
+### Review turnaround time
+
+Since [unblocking others is always a top priority](https://about.gitlab.com/handbook/values/#global-optimization),
+reviewers are expected to review assigned merge requests in a timely manner,
+even when this may negatively impact their other tasks and priorities.
+
+Doing so allows everyone involved in the merge request to iterate faster as the
+context is fresh in memory, and improves contributors' experience significantly.
+
+#### Review-response SLO
+
+To ensure swift feedback to ready-to-review code, we maintain a `Review-response` Service-level Objective (SLO). The SLO is defined as:
+
+> - review-response SLO = (time when first review response is provided) - (time MR is assigned to reviewer) < 2 business days
+
+If you don't think you'll be able to review a merge request within the `Review-response` SLO
+time frame, let the author know as soon as possible and try to help them find
+another reviewer or maintainer who will be able to, so that they can be unblocked
+and get on with their work quickly.
+
+If you think you are at capacity and are unable to accept any more reviews until
+some have been completed, communicate this through your GitLab status by setting
+the `:red_circle:` emoji and mentioning that you are at capacity in the status
+text. This will guide contributors to pick a different reviewer, helping us to
+meet the SLO.
+
+Of course, if you are out of office and have
+[communicated](https://about.gitlab.com/handbook/paid-time-off/#communicating-your-time-off)
+this through your GitLab.com Status, authors are expected to realize this and
+find a different reviewer themselves.
+
+When a merge request author has been blocked for longer than
+the `Review-response` SLO, they are free to remind the reviewer through Slack or assign
+another reviewer.
+
## Examples
How code reviews are conducted can surprise new contributors. Here are some examples of code reviews that should help to orient you as to what to expect.
@@ -434,7 +455,7 @@ helped us with overall code quality (using delegation, `&.` those
types of things), and making the code more robust.
**["Support multiple assignees for merge requests"](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/10161)**:
-A good example of collaboration on an MR touching multiple parts of the codebase. Nick pointed out interesting edge cases, James Lopes also joined in raising concerns on import/export feature.
+A good example of collaboration on an MR touching multiple parts of the codebase. Nick pointed out interesting edge cases, James Lopes also joined in raising concerns on import/export feature.
### Credits
diff --git a/doc/development/contributing/design.md b/doc/development/contributing/design.md
index 8426db84aa4..352392931c0 100644
--- a/doc/development/contributing/design.md
+++ b/doc/development/contributing/design.md
@@ -9,7 +9,11 @@ To better understand the priority by which UX tackles issues, see the [UX sectio
Once an issue has been worked on and is ready for development, a UXer removes the ~"UX" label and applies the ~"UX ready" label to that issue.
-There is a special type label called ~"product discovery". It represents a discovery issue intended for UX, PM, FE, and BE to discuss the problem and potential solutions. The final output for this issue could be a doc of requirements, a design artifact, or even a prototype. The solution will be developed in a subsequent milestone.
+There is a special type label called ~"product discovery" intended for UX,
+PM, FE, and BE. It represents a discovery issue to discuss the problem and
+potential solutions. The final output for this issue could be a doc of
+requirements, a design artifact, or even a prototype. The solution will be
+developed in a subsequent milestone.
~"product discovery" issues are like any other issue and should contain a milestone label, ~"Deliverable" or ~"Stretch", when scheduled in the current milestone.
@@ -17,7 +21,7 @@ The initial issue should be about the problem we are solving. If a separate [pro
is needed for additional research and design work, it will be created by a PM or UX person.
Assign the ~UX, ~"product discovery" and ~"Deliverable" labels, add a milestone and
use a title that makes it clear that the scheduled issue is product discovery
-(e.g. `Product discovery for XYZ`).
+(for example, `Product discovery for XYZ`).
In order to complete a product discovery issue in a release, you must complete the following:
diff --git a/doc/development/contributing/issue_workflow.md b/doc/development/contributing/issue_workflow.md
index 7c2a7a6560e..94cf1c223dd 100644
--- a/doc/development/contributing/issue_workflow.md
+++ b/doc/development/contributing/issue_workflow.md
@@ -2,7 +2,7 @@
## Issue tracker guidelines
-**[Search the issue tracker](https://gitlab.com/gitlab-org/gitlab-foss/issues)** for similar entries before
+**[Search the issue tracker](https://gitlab.com/gitlab-org/gitlab/issues)** for similar entries before
submitting your own, there's a good chance somebody else had the same issue or
feature proposal. Show your support with an award emoji and/or join the
discussion.
@@ -35,12 +35,12 @@ project.
## Labels
To allow for asynchronous issue handling, we use [milestones](https://gitlab.com/groups/gitlab-org/-/milestones)
-and [labels](https://gitlab.com/gitlab-org/gitlab-foss/-/labels). Leads and product managers handle most of the
+and [labels](https://gitlab.com/gitlab-org/gitlab/-/labels). Leads and product managers handle most of the
scheduling into milestones. Labelling is a task for everyone.
Most issues will have labels for at least one of the following:
-- Type: `~feature`, `~bug`, `~backstage`, etc.
+- Type: `~feature`, `~bug`, `~backstage`, `~documentation`, etc.
- Stage: `~"devops::plan"`, `~"devops::create"`, etc.
- Group: `~"group::source code"`, `~"group::knowledge"`, `~"group::editor"`, etc.
- Category: `~"Category:Code Analytics"`, `~"Category:DevOps Score"`, `~"Category:Templates"`, etc.
@@ -53,10 +53,10 @@ Most issues will have labels for at least one of the following:
- Severity: ~`S1`, `~S2`, `~S3`, `~S4`
All labels, their meaning and priority are defined on the
-[labels page](https://gitlab.com/gitlab-org/gitlab-foss/-/labels).
+[labels page](https://gitlab.com/gitlab-org/gitlab/-/labels).
If you come across an issue that has none of these, and you're allowed to set
-labels, you can _always_ add the team and type, and often also the subject.
+labels, you can _always_ add the type, stage, group, and often the category/feature labels.
### Type labels
@@ -70,16 +70,19 @@ The current type labels are:
- ~backstage
- ~"support request"
- ~meta
+- ~documentation
A number of type labels have a priority assigned to them, which automatically
makes them float to the top, depending on their importance.
Type labels are always lowercase, and can have any color, besides blue (which is
-already reserved for subject labels).
+already reserved for category labels).
The descriptions on the [labels page](https://gitlab.com/groups/gitlab-org/-/labels)
explain what falls under each type label.
+The GitLab handbook documents [when something is a bug and when it is a feature request](https://about.gitlab.com/handbook/product/product-management/process/feature-or-bug.html).
+
### Facet labels
Sometimes it's useful to refine the type of an issue. In those cases, you can
@@ -89,7 +92,7 @@ Following is a non-exhaustive list of facet labels:
- ~enhancement: This label can refine an issue that has the ~feature label.
- ~"master:broken": This label can refine an issue that has the ~bug label.
-- ~"master:flaky": This label can refine an issue that has the ~bug label.
+- ~"failure::flaky-test": This label can refine an issue that has the ~bug label.
- ~"technical debt": This label can refine an issue that has the ~backstage label.
- ~"static analysis": This label can refine an issue that has the ~backstage label.
- ~"ci-build": This label can refine an issue that has the ~backstage label.
@@ -137,7 +140,7 @@ their color is `#A8D695`.
with `_` replaced with a space.
For instance, the "Continuous Integration" group is represented by the
-~"group::continuous integration" label in the `gitlab-org` group since its key
+~"group::continuous integration" label in the `gitlab-org` group since its key
under `stages.manage.groups` is `continuous_integration`.
The current group labels can be found by [searching the labels list for `group::`](https://gitlab.com/groups/gitlab-org/-/labels?search=group::).
@@ -286,12 +289,12 @@ This label documents the planned timeline & urgency which is used to measure aga
Severity labels help us clearly communicate the impact of a ~bug on users.
There can be multiple facets of the impact. The below is a guideline.
-| Label | Meaning | Functionality | Affected Users | GitLab.com Availability | Performance Degradation |
-|-------|-------------------|-------------------------------------------------------|----------------------------------|----------------------------------------------------|------------------------------|
-| ~S1 | Blocker | Unusable feature with no workaround, user is blocked | Impacts 50% or more of users | Outage, Significant impact on all of GitLab.com | |
-| ~S2 | Critical Severity | Broken Feature, workaround too complex & unacceptable | Impacts between 25%-50% of users | Significant impact on large portions of GitLab.com | Degradation is guaranteed to occur in the near future |
-| ~S3 | Major Severity | Broken feature with an acceptable workaround | Impacts up to 25% of users | Limited impact on important portions of GitLab.com | Degradation is likely to occur in the near future |
-| ~S4 | Low Severity | Functionality inconvenience or cosmetic issue | Impacts less than 5% of users | Minor impact on GitLab.com | Degradation _may_ occur but it's not likely |
+| Label | Meaning | Functionality | Affected Users | GitLab.com Availability | Performance Degradation | API/Web Response time[^1] |
+|-------|-------------------|-------------------------------------------------------|----------------------------------|----------------------------------------------------|-------------------------------------------------------|----------------------------|
+| ~S1 | Blocker | Unusable feature with no workaround, user is blocked | Impacts 50% or more of users | Outage, Significant impact on all of GitLab.com | | Above 9000ms to timing out |
+| ~S2 | Critical Severity | Broken Feature, workaround too complex & unacceptable | Impacts between 25%-50% of users | Significant impact on large portions of GitLab.com | Degradation is guaranteed to occur in the near future | Between 2000ms and 9000ms |
+| ~S3 | Major Severity | Broken feature with an acceptable workaround | Impacts up to 25% of users | Limited impact on important portions of GitLab.com | Degradation is likely to occur in the near future | Between 1000ms and 2000ms |
+| ~S4 | Low Severity | Functionality inconvenience or cosmetic issue | Impacts less than 5% of users | Minor impact on GitLab.com | Degradation _may_ occur but it's not likely | Between 500ms and 1000ms |
If a bug seems to fall between two severity labels, assign it to the higher-severity label.
@@ -372,14 +375,11 @@ A recent example of this was the issue for
## Feature proposals
-To create a feature proposal for CE, open an issue on the
-[issue tracker of CE](https://gitlab.com/gitlab-org/gitlab-foss/issues).
-
-For feature proposals for EE, open an issue on the
-[issue tracker of EE](https://gitlab.com/gitlab-org/gitlab/issues).
+To create a feature proposal, open an issue on the
+[issue tracker](https://gitlab.com/gitlab-org/gitlab/issues).
In order to help track the feature proposals, we have created a
-[`feature`](https://gitlab.com/gitlab-org/gitlab-foss/issues?label_name=feature) label. For the time being, users that are not members
+[`feature`](https://gitlab.com/gitlab-org/gitlab/issues?label_name=feature) label. For the time being, users that are not members
of the project cannot add labels. You can instead ask one of the [core team](https://about.gitlab.com/community/core-team/)
members to add the label ~feature to the issue or add the following
code snippet right after your description in a new line: `~feature`.
@@ -441,7 +441,7 @@ addressed.
## Technical and UX debt
In order to track things that can be improved in GitLab's codebase,
-we use the ~"technical debt" label in [GitLab's issue tracker](https://gitlab.com/gitlab-org/gitlab-foss/issues).
+we use the ~"technical debt" label in [GitLab's issue tracker](https://gitlab.com/gitlab-org/gitlab/issues).
For missed user experience requirements, we use the ~"UX debt" label.
These labels should be added to issues that describe things that can be improved,
@@ -505,3 +505,8 @@ to be involved in some capacity when work begins on the follow-up issue.
---
[Return to Contributing documentation](index.md)
+
+[^1]: Our current response time standard is based on the TTFB P90 results of the
+ GitLab Performance Tool (GPT) being run against the 10k-user reference
+ environment. This run happens nightly and results are outputted to the
+ [wiki on the GPT project.](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/10k)
diff --git a/doc/development/contributing/merge_request_workflow.md b/doc/development/contributing/merge_request_workflow.md
index cf71d436a15..460bb6d25df 100644
--- a/doc/development/contributing/merge_request_workflow.md
+++ b/doc/development/contributing/merge_request_workflow.md
@@ -222,6 +222,7 @@ requirements.
1. Regressions and bugs are covered with tests that reduce the risk of the issue happening
again.
1. [Performance guidelines](../merge_request_performance_guidelines.md) have been followed.
+1. [Secure coding guidelines](https://gitlab.com/gitlab-com/gl-security/security-guidelines) have been followed.
1. [Documented](../documentation/index.md) in the `/doc` directory.
1. [Changelog entry added](../changelog.md), if necessary.
1. Reviewed by relevant (UX/FE/BE/tech writing) reviewers and all concerns are addressed.
diff --git a/doc/development/contributing/style_guides.md b/doc/development/contributing/style_guides.md
index 873d90dcf79..770726f4563 100644
--- a/doc/development/contributing/style_guides.md
+++ b/doc/development/contributing/style_guides.md
@@ -1,5 +1,13 @@
# Style guides
+## Editor/IDE styling standardization
+
+We use [EditorConfig](https://editorconfig.org/) to automatically apply certain styling
+standards before files are saved locally. Most editors/IDEs will honor the `.editorconfig`
+settings automatically by default. If your editor/IDE does not automatically support `.editorconfig`,
+we suggest investigating to see if a plugin exists. For instance here is the
+[plugin for vim](https://github.com/editorconfig/editorconfig-vim).
+
## Pre-commit static analysis
You're strongly advised to install
diff --git a/doc/development/creating_enums.md b/doc/development/creating_enums.md
index 64385a2ea79..79ed465b121 100644
--- a/doc/development/creating_enums.md
+++ b/doc/development/creating_enums.md
@@ -8,7 +8,7 @@ To use this type, add `limit: 2` to the migration that creates the column.
Example:
-```rb
+```ruby
def change
add_column :ci_job_artifacts, :file_format, :integer, limit: 2
end
diff --git a/doc/development/dangerbot.md b/doc/development/dangerbot.md
index cd884a023ca..b6362f04311 100644
--- a/doc/development/dangerbot.md
+++ b/doc/development/dangerbot.md
@@ -13,6 +13,22 @@ If Danger is asking you to change something about your merge request, it's best
just to make the change. If you want to learn how Danger works, or make changes
to the existing rules, then this is the document for you.
+## Danger comments in merge requests
+
+Danger only posts one comment and updates its content on subsequent
+`danger-review` runs. Given this, it's usually one of the first few comments
+in a merge request if not the first. If you didn't see it, try to look
+from the start of the merge request.
+
+### Advantages
+
+- You don't get email notifications each time `danger-review` runs.
+
+### Disadvantages
+
+- It's not obvious Danger will update the old comment, thus you need to
+ pay attention to it if it is updated or not.
+
## Run Danger locally
A subset of the current checks can be run locally with the following rake task:
@@ -71,12 +87,6 @@ the need as part of the product in a future version of GitLab!
Implement each task as an isolated piece of functionality and place it in its
own directory under `danger` as `danger/<task-name>/Dangerfile`.
-Add a line to the top-level `Dangerfile` to ensure it is loaded like:
-
-```ruby
-danger.import_dangerfile('danger/<task-name>')
-```
-
Each task should be isolated from the others, and able to function in isolation.
If there is code that should be shared between multiple tasks, add a plugin to
`danger/plugins/...` and require it in each task that needs it. You can also
diff --git a/doc/development/database/add_foreign_key_to_existing_column.md b/doc/development/database/add_foreign_key_to_existing_column.md
new file mode 100644
index 00000000000..e08f0a3bd1e
--- /dev/null
+++ b/doc/development/database/add_foreign_key_to_existing_column.md
@@ -0,0 +1,136 @@
+# Adding foreign key constraint to an existing column
+
+Foreign keys help ensure consistency between related database tables. The current database review process **always** encourages you to add [foreign keys](../foreign_keys.md) when creating tables that reference records from other tables.
+
+Starting with Rails version 4, Rails includes migration helpers to add foreign key constraints to database tables. Before Rails 4, the only way for ensuring some level of consistency was the [`dependent`](https://guides.rubyonrails.org/association_basics.html#options-for-belongs-to-dependent) option within the association definition. Ensuring data consistency on the application level could fail in some unfortunate cases, so we might end up with inconsistent data in the table. This is mostly affecting older tables, where we simply didn't have the framework support to ensure consistency on the database level. These data inconsistencies can easily cause unexpected application behavior or bugs.
+
+Adding a foreign key to an existing database column requires database structure changes and potential data changes. In case the table is in use, we should always assume that there is inconsistent data.
+
+To add a foreign key constraint to an existing column:
+
+1. GitLab version `N.M`: Add a `NOT VALID` foreign key constraint to the column to ensure GitLab doesn't create inconsistent records.
+1. GitLab version `N.M`: Add a data migration, to fix or clean up existing records.
+1. GitLab version `N.M+1`: Validate the whole table by making the foreign key `VALID`.
+
+## Example
+
+Consider the following table structures:
+
+`users` table:
+
+- `id` (integer, primary key)
+- `name` (string)
+
+`emails` table:
+
+- `id` (integer, primary key)
+- `user_id` (integer)
+- `email` (string)
+
+Express the relationship in `ActiveRecord`:
+
+```ruby
+class User < ActiveRecord::Base
+ has_many :emails
+end
+
+class Email < ActiveRecord::Base
+ belongs_to :user
+end
+```
+
+Problem: when the user is removed, the email records related to the removed user will stay in the `emails` table:
+
+```ruby
+user = User.find(1)
+user.destroy
+
+emails = Email.where(user_id: 1) # returns emails for the deleted user
+```
+
+### Prevent invalid records
+
+Add a `NOT VALID` foreign key constraint to the table, which enforces consistency on the record changes.
+
+In the example above, you'd be still able to update records in the `emails` table. However, when you'd try to update the `user_id` with non-existent value, the constraint causes a database error.
+
+Migration file for adding `NOT VALID` foreign key:
+
+```ruby
+class AddNotValidForeignKeyToEmailsUser < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ # safe to use: it requires short lock on the table since we don't validate the foreign key
+ add_foreign_key :emails, :users, on_delete: :cascade, validate: false # rubocop:disable Migration/AddConcurrentForeignKey
+ end
+
+ def down
+ remove_foreign_key_if_exists :emails, column: :user_id
+ end
+end
+```
+
+CAUTION: **Caution:**
+Avoid using the `add_foreign_key` constraint more than once per migration file, unless the source and target tables are identical.
+
+#### Data migration to fix existing records
+
+The approach here depends on the data volume and the cleanup strategy. If we can easily find "invalid" records by doing a simple database query and the record count is not that high, then the data migration can be executed within a Rails migration.
+
+In case the data volume is higher (>1000 records), it's better to create a background migration. If unsure, please contact the database team for advice.
+
+Example for cleaning up records in the `emails` table within a database migration:
+
+```ruby
+class RemoveRecordsWithoutUserFromEmailsTable < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ class Email < ActiveRecord::Base
+ include EachBatch
+ end
+
+ def up
+ Email.where('user_id NOT IN (SELECT id FROM users)').each_batch do |relation|
+ relation.delete_all
+ end
+ end
+
+ def down
+ # Can be a no-op when data inconsistency is not affecting the pre and post deploymnet version of the application.
+ # In this case we might have records in the `emails` table where the associated record in the `users` table is not there anymore.
+ end
+end
+```
+
+### Validate the foreign key
+
+Validating the foreign key will scan the whole table and make sure that each relation is correct.
+
+NOTE: **Note:** When using [background migrations](../background_migrations.md), foreign key validation should happen in the next GitLab release.
+
+Migration file for validating the foreign key:
+
+```ruby
+# frozen_string_literal: true
+
+class ValidateForeignKeyOnEmailUsers < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ validate_foreign_key :emails, :user_id
+ end
+
+ def down
+ # Can be safely a no-op if we don't roll back the inconsistent data.
+ end
+end
+```
diff --git a/doc/development/database_debugging.md b/doc/development/database_debugging.md
index d91edba92db..e577ba6ec8f 100644
--- a/doc/development/database_debugging.md
+++ b/doc/development/database_debugging.md
@@ -39,7 +39,7 @@ If your test DB is giving you problems, it is safe to nuke it because it doesn't
Access the database via one of these commands (they all get you to the same place)
-```
+```ruby
gdk psql -d gitlabhq_development
bundle exec rails dbconsole RAILS_ENV=development
bundle exec rails db RAILS_ENV=development
diff --git a/doc/development/database_review.md b/doc/development/database_review.md
index 113314884d5..0cd7e083e2a 100644
--- a/doc/development/database_review.md
+++ b/doc/development/database_review.md
@@ -12,7 +12,7 @@ A database review is required for:
including files in:
- `db/`
- `lib/gitlab/background_migration/`
-- Changes to the database tooling, e.g.:
+- Changes to the database tooling. For example:
- migration or ActiveRecord helpers in `lib/gitlab/database/`
- load balancing
- Changes that produce SQL queries that are beyond the obvious. It is
@@ -50,7 +50,7 @@ A database **reviewer**'s role is to:
Currently we have a [critical shortage of database maintainers](https://gitlab.com/gitlab-org/gitlab/issues/29717). Until we are able to increase the number of database maintainers to support the volume of reviews, we have implemented this temporary solution. If the database **reviewer** cannot find an available database **maintainer** then:
1. Assign the MR for a second review by a **database trainee maintainer** for further review.
-1. Once satisfied with the review process, and if the database **maintainer** is still not available, skip the database maintainer approval step and assign the merge request to a backend maintainer for final review and approval.
+1. Once satisfied with the review process and if the database **maintainer** is still not available, skip the database maintainer approval step and assign the merge request to a backend maintainer for final review and approval.
A database **maintainer**'s role is to:
@@ -85,7 +85,7 @@ the following preparations into account.
- Make migrations reversible by using the `change` method or include a `down` method when using `up`.
- Include either a rollback procedure or describe how to rollback changes.
- Add the output of the migration(s) to the MR description.
-- Add tests for the migration in `spec/migrations` if necessary. See [Testing Rails migrations at GitLab](testing_guide/testing_migrations_guide.html) for more details.
+- Add tests for the migration in `spec/migrations` if necessary. See [Testing Rails migrations at GitLab](testing_guide/testing_migrations_guide.md) for more details.
#### Preparation when adding or modifying queries
@@ -119,11 +119,13 @@ the following preparations into account.
- Add foreign keys to any columns pointing to data in other tables, including [an index](migration_style_guide.md#adding-foreign-key-constraints).
- Add indexes for fields that are used in statements such as `WHERE`, `ORDER BY`, `GROUP BY`, and `JOIN`s.
-#### Preparation when removing columns, tables, indexes or other structures
+#### Preparation when removing columns, tables, indexes, or other structures
- Follow the [guidelines on dropping columns](what_requires_downtime.md#dropping-columns).
-- Generally it's best practice, but not a hard rule, to remove indexes and foreign keys in a post-deployment migration.
+- Generally it's best practice (but not a hard rule) to remove indexes and foreign keys in a post-deployment migration.
- Exceptions include removing indexes and foreign keys for small tables.
+- If you're adding a composite index, another index might become redundant, so remove that in the same migration.
+ For example adding `index(column_A, column_B, column_C)` makes the indexes `index(column_A, column_B)` and `index(column_A)` redundant.
### How to review for database
@@ -156,14 +158,14 @@ the following preparations into account.
- Check migrations are reversible and implement a `#down` method
- Check data migrations:
- Establish a time estimate for execution on GitLab.com.
- - Depending on timing, data migrations can be placed on regular, post-deploy or background migrations.
+ - Depending on timing, data migrations can be placed on regular, post-deploy, or background migrations.
- Data migrations should be reversible too or come with a description of how to reverse, when possible.
This applies to all types of migrations (regular, post-deploy, background).
- Query performance
- Check for any obviously complex queries and queries the author specifically
points out for review (if any)
- If not present yet, ask the author to provide SQL queries and query plans
- (e.g. by using [chatops](understanding_explain_plans.md#chatops) or direct
+ (for example, by using [chatops](understanding_explain_plans.md#chatops) or direct
database access)
- For given queries, review parameters regarding data distribution
- [Check query plans](understanding_explain_plans.md) and suggest improvements
@@ -172,7 +174,7 @@ the following preparations into account.
- If queries rely on prior migrations that are not present yet on production
(eg indexes, columns), you can use a [one-off instance from the restore
pipeline](https://ops.gitlab.net/gitlab-com/gl-infra/gitlab-restore/postgres-gprd)
- in order to establish a proper testing environment.
+ in order to establish a proper testing environment. If you don't have access to this project, reach out to #database on Slack to get advice on how to proceed.
- Avoid N+1 problems and minimalize the [query count](merge_request_performance_guidelines.md#query-counts).
### Timing guidelines for migrations
@@ -185,6 +187,6 @@ NOTE: **Note:** Keep in mind that all runtimes should be measured against GitLab
| Migration Type | Execution Time Recommended | Notes |
|----|----|---|
-| Regular migrations on `db/migrate` | `3 minutes` | A valid exception are index creation as this can take a long time. |
+| Regular migrations on `db/migrate` | `3 minutes` | A valid exception are index creation as this can take a long time. |
| Post migrations on `db/post_migrate` | `10 minutes` | |
-| Background migrations | --- | Since these are suitable for larger tables, it's not possible to set a precise timing guideline, however, any query must stay well below `10s` of execution time. |
+| Background migrations | --- | Since these are suitable for larger tables, it's not possible to set a precise timing guideline, however, any single query must stay below `1 second` execution time with cold caches. |
diff --git a/doc/development/db_dump.md b/doc/development/db_dump.md
index 97762a62a80..bb740d12f7b 100644
--- a/doc/development/db_dump.md
+++ b/doc/development/db_dump.md
@@ -10,7 +10,7 @@ data leaks.
On the staging VM, add the following line to `/etc/gitlab/gitlab.rb` to speed up
large database imports.
-```
+```shell
# On STAGING
echo "postgresql['checkpoint_segments'] = 64" | sudo tee -a /etc/gitlab/gitlab.rb
sudo touch /etc/gitlab/skip-auto-reconfigure
@@ -23,7 +23,7 @@ Next, we let the production environment stream a compressed SQL dump to our
local machine via SSH, and redirect this stream to a psql client on the staging
VM.
-```
+```shell
# On LOCAL MACHINE
ssh -C gitlab.example.com sudo -u gitlab-psql /opt/gitlab/embedded/bin/pg_dump -Cc gitlabhq_production |\
ssh -C staging-vm sudo -u gitlab-psql /opt/gitlab/embedded/bin/psql -d template1
@@ -37,14 +37,14 @@ use this procedure.
First, on the production server, create a list of directories you want to
re-create.
-```
+```shell
# On PRODUCTION
(umask 077; sudo find /var/opt/gitlab/git-data/repositories -maxdepth 1 -type d -print0 > directories.txt)
```
Copy `directories.txt` to the staging server and create the directories there.
-```
+```shell
# On STAGING
sudo -u git xargs -0 mkdir -p < directories.txt
```
diff --git a/doc/development/deleting_migrations.md b/doc/development/deleting_migrations.md
index 46f4b840134..3ac039a1692 100644
--- a/doc/development/deleting_migrations.md
+++ b/doc/development/deleting_migrations.md
@@ -1,7 +1,7 @@
# Delete existing migrations
When removing existing migrations from the GitLab project, you have to take into account
-the possibility of the migration already been included in past releases or in the current release, and thus already executed on GitLab.com and/or in self-hosted instances.
+the possibility of the migration already been included in past releases or in the current release, and thus already executed on GitLab.com and/or in self-managed instances.
Because of it, it's not possible to delete existing migrations, as that could lead to:
diff --git a/doc/development/documentation/index.md b/doc/development/documentation/index.md
index 68495178511..8300413a0a6 100644
--- a/doc/development/documentation/index.md
+++ b/doc/development/documentation/index.md
@@ -8,7 +8,7 @@ GitLab's documentation is [intended as the single source of truth (SSOT)](https:
In addition to this page, the following resources can help you craft and contribute documentation:
-- [Style Guide](styleguide.md) - What belongs in the docs, language guidelines, Markdown standards to follow, and more.
+- [Style Guide](styleguide.md) - What belongs in the docs, language guidelines, Markdown standards to follow, links, and more.
- [Structure and template](structure.md) - Learn the typical parts of a doc page and how to write each one.
- [Documentation process](workflow.md).
- [Markdown Guide](../../user/markdown.md) - A reference for all Markdown syntax supported by GitLab.
@@ -16,7 +16,7 @@ In addition to this page, the following resources can help you craft and contrib
## Source files and rendered web locations
-Documentation for GitLab, GitLab Runner, Omnibus GitLab and Charts is published to <https://docs.gitlab.com>. Documentation for GitLab is also published within the application at `/help` on the domain of the GitLab instance.
+Documentation for GitLab, GitLab Runner, Omnibus GitLab, and Charts is published to <https://docs.gitlab.com>. Documentation for GitLab is also published within the application at `/help` on the domain of the GitLab instance.
At `/help`, only help for your current edition and version is included. Help for other versions is available at <https://docs.gitlab.com/archives/>.
The source of the documentation exists within the codebase of each GitLab application in the following repository locations:
@@ -116,8 +116,9 @@ Things to note:
- The above `git grep` command will search recursively in the directory you run
it in for `workflow/lfs/lfs_administration` and `lfs/lfs_administration`
and will print the file and the line where this file is mentioned.
- You may ask why the two greps. Since we use relative paths to link to
- documentation, sometimes it might be useful to search a path deeper.
+ You may ask why the two greps. Since [we use relative paths to link to
+ documentation](styleguide.md#links)
+ , sometimes it might be useful to search a path deeper.
- The `*.md` extension is not used when a document is linked to GitLab's
built-in help page, that's why we omit it in `git grep`.
- Use the checklist on the "Change documentation location" MR description template.
@@ -393,15 +394,14 @@ merge request with new or changed docs is submitted, are:
- [`docs lint`](https://gitlab.com/gitlab-org/gitlab/blob/master/.gitlab/ci/docs.gitlab-ci.yml#L48):
Runs several tests on the content of the docs themselves:
- [`lint-doc.sh` script](https://gitlab.com/gitlab-org/gitlab/blob/master/scripts/lint-doc.sh)
- checks that:
+ runs the following checks and linters:
- All cURL examples use the long flags (ex: `--header`, not `-H`).
- The `CHANGELOG.md` does not contain duplicate versions.
- No files in `doc/` are executable.
- No new `README.md` was added.
- - [markdownlint](#markdownlint).
- - Nanoc tests, which you can [run locally](#previewing-the-changes-live) before
- pushing to GitLab by executing the command `bundle exec nanoc check internal_links`
- (or `internal_anchors`) on your local [`gitlab-docs`](https://gitlab.com/gitlab-org/gitlab-docs) directory:
+ - [markdownlint](#markdownlint).
+ - [Vale](#vale).
+ - Nanoc tests:
- [`internal_links`](https://gitlab.com/gitlab-org/gitlab/blob/master/.gitlab/ci/docs.gitlab-ci.yml#L67)
checks that all internal links (ex: `[link](../index.md)`) are valid.
- [`internal_anchors`](https://gitlab.com/gitlab-org/gitlab/blob/master/.gitlab/ci/docs.gitlab-ci.yml#L69)
@@ -410,6 +410,60 @@ merge request with new or changed docs is submitted, are:
- If any code or the `doc/README.md` file is changed, a full pipeline will run, which
runs tests for [`/help`](#gitlab-help-tests).
+### Running tests & lint checks locally
+
+Apart from [previewing your changes locally](#previewing-the-changes-live), you can also run all lint checks
+and Nanoc tests locally.
+
+#### Nanoc tests
+
+To execute Nanoc tests locally:
+
+1. Navigate to the [`gitlab-docs`](https://gitlab.com/gitlab-org/gitlab-docs) directory.
+1. Run:
+
+ ```shell
+ # Check for broken internal links
+ bundle exec nanoc check internal_links
+
+ # Check for broken external links (might take a lot of time to complete).
+ # This test is set to be allowed to fail and is run only in the gitlab-docs project CI
+ bundle exec nanoc check internal_anchors
+ ```
+
+#### Lint checks
+
+Lint checks are performed by the [`lint-doc.sh`](https://gitlab.com/gitlab-org/gitlab/blob/master/scripts/lint-doc.sh)
+script and can be executed as follows:
+
+1. Navigate to the `gitlab` directory.
+1. Run:
+
+ ```shell
+ MD_DOC_PATH=path/to/my_doc.md scripts/lint-doc.sh
+ ```
+
+Where `MD_DOC_PATH` points to the file or directory you would like to run lint checks for.
+If you omit it completely, it will default to the `doc/` directory.
+The output should be similar to:
+
+```
+=> Linting documents at path /path/to/gitlab as <user>...
+=> Checking for cURL short options...
+=> Checking for CHANGELOG.md duplicate entries...
+=> Checking /path/to/gitlab/doc for executable permissions...
+=> Checking for new README.md files...
+=> Linting markdown style...
+=> Linting prose...
+✔ 0 errors, 0 warnings and 0 suggestions in 1 file.
+✔ Linting passed
+```
+
+Note that this requires you to either have the required lint tools installed on your machine,
+or a working Docker installation, in which case an image with these tools pre-installed will be used.
+
+For more information on available linters refer to the [linting](#linting) section.
+
### Linting
To help adhere to the [documentation style guidelines](styleguide.md), and improve the content
diff --git a/doc/development/documentation/site_architecture/index.md b/doc/development/documentation/site_architecture/index.md
index 57a3dde55e6..c91a9882bb0 100644
--- a/doc/development/documentation/site_architecture/index.md
+++ b/doc/development/documentation/site_architecture/index.md
@@ -96,6 +96,50 @@ Read through [the global navigation documentation](global_nav.md) to understand:
TBA
-->
+## Pipelines
+
+The pipeline in the `gitlab-docs` project:
+
+- Tests changes to the docs site code.
+- Builds the Docker images used in various pipeline jobs.
+- Builds and deploys the docs site itself.
+- Generates the review apps when the `review-docs-deploy` job is triggered.
+
+### Rebuild the docs site Docker images
+
+Once a week on Mondays, a scheduled pipeline runs and rebuilds the Docker images
+used in various pipeline jobs, like `docs-lint`. The Docker image configuration files are
+located at <https://gitlab.com/gitlab-org/gitlab-docs/-/tree/master/dockerfiles>.
+
+If you need to rebuild the Docker images immediately (must have maintainer level permissions):
+
+CAUTION: **Caution**
+If you change the dockerfile configuration and rebuild the images, you can break the master
+pipeline in the main `gitlab` repo as well as in `gitlab-docs`. Create an image with
+a different name first and test it to ensure you do not break the pipelines.
+
+1. In [`gitlab-docs`](https://gitlab.com/gitlab-org/gitlab-docs), go to **{rocket}** **CI / CD > Pipelines**.
+1. Click the **Run Pipeline** button.
+1. See that a new pipeline is running. The jobs that build the images are in the first
+ stage, `build-images`. You can click the pipeline number to see the larger pipeline
+ graph, or click the first (`build-images`) stage in the mini pipeline graph to
+ expose the jobs that build the images.
+1. Click the **play** (**{play}**) button next to the images you want to rebuild.
+ - Normally, you do not need to rebuild the `image:gitlab-docs-base` image, as it
+ rarely changes. If it does need to be rebuilt, be sure to only run `image:docs-lint`
+ after it is finished rebuilding.
+
+### Deploy the docs site
+
+Every four hours a scheduled pipeline builds and deploys the docs site. The pipeline
+fetches the current docs from the main project's master branch, builds it with Nanoc
+and deploys it to <https://docs.gitlab.com>.
+
+If you need to build and deploy the site immediately (must have maintainer level permissions):
+
+1. In [`gitlab-docs`](https://gitlab.com/gitlab-org/gitlab-docs), go to **{rocket}** **CI / CD > Schedules**.
+1. For the `Build docs.gitlab.com every 4 hours` scheduled pipeline, click the **play** (**{play}**) button.
+
## Using YAML data files
The easiest way to achieve something similar to
@@ -186,7 +230,7 @@ for its search function. This is how it works:
NOTE: **For GitLab employees:**
The credentials to access the Algolia dashboard are stored in 1Password. If you
want to receive weekly reports of the search usage, search the Google doc with
-title "Email, Slack, and GitLab Groups and Aliases", search for `docsearch`,
+title `Email, Slack, and GitLab Groups and Aliases`, search for `docsearch`,
and add a comment with your email to be added to the alias that gets the weekly
reports.
diff --git a/doc/development/documentation/site_architecture/release_process.md b/doc/development/documentation/site_architecture/release_process.md
index 24608cc1a5e..59a8d3cff01 100644
--- a/doc/development/documentation/site_architecture/release_process.md
+++ b/doc/development/documentation/site_architecture/release_process.md
@@ -70,9 +70,9 @@ this needs to happen when the stable branches for all products have been created
1. Run the raketask to create the single version:
- ```shell
- ./bin/rake "release:single[12.0]"
- ```
+ ```shell
+ ./bin/rake "release:single[12.0]"
+ ```
A new `Dockerfile.12.0` should have been created and committed to a new branch.
@@ -128,16 +128,6 @@ version and rotates the old one:
- `online`: The 3 latest stable versions.
- `offline`: All the previous versions offered as an offline archive.
-1. **Add the new offline version in the 404 page redirect script:**
-
- Since we're deprecating the oldest version each month, we need to redirect
- those URLs in order not to create [404 entries](https://gitlab.com/gitlab-org/gitlab-docs/issues/221).
- There's a temporary hack for now:
-
- 1. Edit `content/404.html`, making sure all offline versions under
- `content/_data/versions.yaml` are in the JavaScript snippet at the end of
- the document.
-
1. **Update the `:latest` and `:archives` Docker images:**
The following two Dockerfiles need to be updated:
diff --git a/doc/development/documentation/structure.md b/doc/development/documentation/structure.md
index 1bb3dd3521b..e7a43f769b3 100644
--- a/doc/development/documentation/structure.md
+++ b/doc/development/documentation/structure.md
@@ -99,7 +99,7 @@ Larger instruction sets may have subsections covering specific phases of the pro
Where appropriate, provide examples of code or configuration files to better clarify intended usage.
- Write a step-by-step guide, with no gaps between the steps.
-- Include example code or configurations as part of the relevant step. Use appropriate markdown to [wrap code blocks with syntax highlighting](../../user/markdown.html#colored-code-and-syntax-highlighting).
+- Include example code or configurations as part of the relevant step. Use appropriate markdown to [wrap code blocks with syntax highlighting](../../user/markdown.md#colored-code-and-syntax-highlighting).
- Start with an h2 (`##`), break complex steps into small steps using
subheadings h3 > h4 > h5 > h6. _Never skip a hierarchy level, such
as h2 > h4_, as it will break the TOC and may affect the breadcrumbs.
diff --git a/doc/development/documentation/styleguide.md b/doc/development/documentation/styleguide.md
index e2f84e1200e..d031364964d 100644
--- a/doc/development/documentation/styleguide.md
+++ b/doc/development/documentation/styleguide.md
@@ -17,14 +17,12 @@ that apply to all GitLab content, not just documentation.
### Why a single source of truth
-The documentation is the SSOT for all information related to the implementation, usage, and troubleshooting of GitLab products and features. It evolves continually, in keeping with new products and features, and with improvements for clarity, accuracy, and completeness.
+The documentation of GitLab products and features is the SSOT for all information related to implementation, usage, and troubleshooting. It evolves continually, in keeping with new products and features, and with improvements for clarity, accuracy, and completeness.
This policy prevents information silos, ensuring that it remains easy to find information about GitLab products.
It also informs decisions about the kinds of content we include in our documentation.
-The documentation is a continually evolving SSOT for all information related to the implementation, usage, and troubleshooting of GitLab products and features.
-
### All information
Include problem-solving actions that may address rare cases or be considered 'risky', so long as proper context is provided in the form of fully detailed warnings and caveats. This kind of content should be included as it could be helpful to others and, when properly explained, its benefits outweigh the risks. If you think you have found an exception to this rule, contact the Technical Writing team.
@@ -34,7 +32,7 @@ For the Troubleshooting sections, people in GitLab Support can merge additions t
### All media types
-Include any media types/sources if the content is relevant to readers. You can freely include or link presentations, diagrams, videos, etc.; no matter who it was originally composed for, if it is helpful to any of our audiences, we can include it.
+Include any media types/sources if the content is relevant to readers. You can freely include or link presentations, diagrams, videos, and so on; no matter who it was originally composed for, if it is helpful to any of our audiences, we can include it.
- If you use an image that has a separate source file (for example, a vector or diagram format), link the image to the source file so that it may be reused or updated by anyone.
- Do not copy and paste content from other sources unless it is a limited quotation with the source cited. Typically it is better to either rephrase relevant information in your own words or link out to the other source.
@@ -63,13 +61,17 @@ Instead, link to the SSOT and explain why it is important to consume the informa
### Organize by topic, not by type
-Beyond top-level audience-type folders (e.g. `administration`), we organize content by topic, not by type, so that it can be located as easily as possible within the single-source-of-truth (SSOT) section for the subject matter.
+Beyond top-level audience-type folders (for example, `administration`), we organize content by topic, not by type, so that it can be located as easily as possible within the single-source-of-truth (SSOT) section for the subject matter.
+
+For example, do not create groupings of similar media types. For example:
-For example, do not create groupings of similar media types (e.g. glossaries, FAQs, or sets of all articles or videos).
+- Glossaries.
+- FAQs.
+- Sets of all articles or videos.
Such grouping of content by type makes
it difficult to browse for the information you need and difficult to maintain up-to-date content.
-Instead, organize content by its subject (e.g. everything related to CI goes together)
+Instead, organize content by its subject (for example, everything related to CI goes together)
and cross-link between any related content.
### Docs-first methodology
@@ -79,7 +81,10 @@ We employ a **docs-first methodology** to help ensure that the docs remain a com
- If the answer to a question exists in documentation, share the link to the docs instead of rephrasing the information.
- When you encounter new information not available in GitLab’s documentation (for example, when working on a support case or testing a feature), your first step should be to create a merge request (MR) to add this information to the docs. You can then share the MR in order to communicate this information.
-New information that would be useful toward the future usage or troubleshooting of GitLab should not be written directly in a forum or other messaging system, but added to a docs MR and then referenced, as described above. Note that among any other doc changes, you can always add a Troubleshooting section to a doc if none exists, or un-comment and use the placeholder Troubleshooting section included as part of our [doc template](structure.md#template-for-new-docs), if present.
+New information that would be useful toward the future usage or troubleshooting of GitLab should not be written directly in a forum or other messaging system, but added to a docs MR and then referenced, as described above. Note that among any other doc changes, you can either:
+
+- Add a Troubleshooting section to a doc if none exists.
+- Un-comment and use the placeholder Troubleshooting section included as part of our [doc template](structure.md#template-for-new-docs), if present.
The more we reflexively add useful information to the docs, the more (and more successfully) the docs will be used to efficiently accomplish tasks and solve problems.
@@ -98,7 +103,7 @@ Ruby gem will support all [GFM markup](../../user/markdown.md) in the future. Th
all markup that is supported for display in the GitLab application itself. For now,
use regular Markdown markup, following the rules in the linked style guide.
-Note that Kramdown-specific markup (e.g., `{:.class}`) will not render properly on GitLab instances under [`/help`](index.md#gitlab-help).
+Note that Kramdown-specific markup (for example, `{:.class}`) will not render properly on GitLab instances under [`/help`](index.md#gitlab-help).
Hard-coded HTML is valid, although it's discouraged to be used while we have `/help`. HTML is permitted as long as:
@@ -256,15 +261,6 @@ Do not include the same information in multiple places. [Link to a SSOT instead.
Some features are also objects. For example, "GitLab's Merge Requests support X" and
"Create a new merge request for Z."
-- Use common contractions when it helps create a friendly and informal tone, especially in tutorials and [UIs](https://design.gitlab.com/content/punctuation/#contractions).
- - Do use contractions like: _it's_, _can't_, _wouldn't_, _you're_, _you've_, _haven't_, don't, _we're_, _that's_, and _won't_. Contractions in instructional documentation such as tutorials can help create a friendly and informal tone.
- - Avoid less common contractions such as: _he'd_, _it'll_, _should've_, and _there'd_.
- - Do not use contractions in reference documentation. Examples:
- - You cannot set a limit higher than 1000.
- - For `parameter1`, the default is 10.
- - Do not use contractions with a proper noun and a verb, such as _GitLab's creating X_.
- - Avoid using contractions when you need to emphasize a negative, such as "Do **not** install X with Y."
-
- Avoid use of the future tense:
- Instead of "after you execute this command, GitLab will display the result", use "after you execute this command, GitLab displays the result".
- Only use the future tense to convey when the action or result will actually occur at a future time.
@@ -281,6 +277,58 @@ as even native users of English might misunderstand them.
- Instead of "e.g.", use "for example," "such as," "for instance," or "like."
- Instead of "etc.", either use "and so on" or consider editing it out, since it can be vague.
+### Contractions
+
+- Use common contractions when it helps create a friendly and informal tone, especially in tutorials, instructional documentation, and [UIs](https://design.gitlab.com/content/punctuation/#contractions).
+
+| Do | Don't |
+|----------|-----------|
+| it's | it is |
+| can't | cannot |
+| wouldn't | would not |
+| you're | you are |
+| you've | you have |
+| haven't | have not |
+| don't | do not |
+| we're | we are |
+| that's' | that is |
+| won't | will not |
+
+- Avoid less common contractions:
+
+| Do | Don't |
+|--------------|-------------|
+| he would | he'd |
+| it will | it'll |
+| should have | should've |
+| there would | there'd |
+
+- Do not use contractions with a proper noun and a verb. For example:
+
+| Do | Don't |
+|----------------------|---------------------|
+| GitLab is creating X | GitLab's creating X |
+
+- Do not use contractions when you need to emphasize a negative. For example:
+
+| Do | Don't |
+|-----------------------------|----------------------------|
+| Do **not** install X with Y | **Don't** install X with Y |
+
+- Do not use contractions in reference documentation. For example:
+
+| Do | Don't |
+|------------------------------------------|----------------------------|
+| Do **not** set a limit greater than 1000 | **Don't** set a limit greater than 1000 |
+| For `parameter1`, the default is 10 | For `parameter1`, the default's 10 |
+
+- Avoid contractions in error messages. Examples:
+
+| Do | Don't |
+|------------------------------------------|----------------------------|
+| Requests to localhost are not allowed | Requests to localhost aren't allowed |
+| Specified URL cannot be used | Specified URL can't be used |
+
## Text
- [Write in Markdown](#markdown).
@@ -428,7 +476,7 @@ as the list item. This can be done with:
Items nested in lists should always align with the first character of the list item.
In unordered lists (using `-`), this means two spaces for each level of indentation:
-~~~md
+````markdown
- Unordered list item 1
A line nested using 2 spaces to align with the `U` above.
@@ -447,11 +495,11 @@ In unordered lists (using `-`), this means two spaces for each level of indentat
- Unordered list item 4
![an image that will nest inside list item 4](image.png)
-~~~
+````
For ordered lists, use three spaces for each level of indentation:
-~~~md
+````markdown
1. Ordered list item 1
A line nested using 3 spaces to align with the `O` above.
@@ -470,7 +518,7 @@ For ordered lists, use three spaces for each level of indentation:
1. Ordered list item 4
![an image that will nest inside list item 4](image.png)
-~~~
+````
You can nest full lists inside other lists using the same rules as above. If you wish
to mix types, that is also possible, as long as you don't mix items at the same level:
@@ -540,6 +588,10 @@ Keep heading titles clear and direct. Make every word count. To accommodate sear
| GitLab Release and Maintenance Policy | This section covers GitLab's Release and Maintenance Policy |
| Backport to older releases | Backporting to older releases |
+NOTE: **Note:**
+If you change an existing title, be careful. Any such changes may affect not only [links](#anchor-links)
+within the page, but may also affect links from GitLab itself, as well as external links, to GitLab documentation.
+
### Anchor links
Headings generate anchor links automatically when rendered. `## This is an example`
@@ -576,42 +628,40 @@ do not use this option until further notice.
### Links to internal documentation
-- To link to internal documentation, use relative links, not full URLs.
- Use `../` to navigate to high-level directories. Links should not refer to root.
+NOTE: **Note:**
+_Internal_ refers to documentation in the same project. When linking to documentation in
+separate projects (for example, linking to Omnibus docs from GitLab docs), you must use absolute
+URLs.
- Don't:
+To link to internal documentation:
- ```md
- [Geo Troubleshooting](https://docs.gitlab.com/ee/administration/geo/replication/troubleshooting.html)
- [Geo Troubleshooting](/ee/administration/geo/replication/troubleshooting.md)
- ```
+- Use relative links to Markdown files in the same repository.
+- Do not use absolute URLs or URLs from `docs.gitlab.com`.
+- Use `../` to navigate to higher-level directories.
+- Do not link relative to root. For example, `/ee/user/gitlab_com/index.md`.
- Do:
+ Don't:
- ```md
- [Geo Troubleshooting](../../geo/replication/troubleshooting.md)
- ```
+ - `https://docs.gitlab.com/ee/administration/geo/replication/troubleshooting.html`
+ - `/ee/administration/geo/replication/troubleshooting.md`
+
+ Do: `../../geo/replication/troubleshooting.md`
- Always add the file name `file.md` at the end of the link with the `.md` extension, not `.html`.
Don't:
- ```md
- [merge requests](../../merge_requests/)
- [issues](../../issues/tags.html)
- [issue tags](../../issues/tags.html#stages)
- ```
+ - `../../merge_requests/`
+ - `../../issues/tags.html`
+ - `../../issues/tags.html#stages`
Do:
- ```md
- [merge requests](../../merge_requests/index.md)
- [issues](../../issues/tags.md)
- [issue tags](../../issues/tags.md#stages)
- ```
+ - `../../merge_requests/index.md`
+ - `../../issues/tags.md`
+ - `../../issues/tags.md#stages`
-- Using the Markdown extension is necessary for the [`/help`](index.md#gitlab-help)
- section of GitLab.
+- Use the Markdown extension for the [`/help`](index.md#gitlab-help) section of GitLab.
### Links requiring permissions
@@ -783,33 +833,64 @@ nicely on different mobile devices.
- When providing a shell command and its output, prefix the shell command with `$` and
leave a blank line between the command and the output.
- When providing a command without output, don't prefix the shell command with `$`.
+- If you need to include triple backticks inside a code block, use four backticks
+ for the codeblock fences instead of three.
- For regular code blocks, always use a highlighting class corresponding to the
language for better readability. Examples:
- ~~~md
+ ````markdown
```ruby
Ruby code
```
- ```js
+ ```javascript
JavaScript code
```
- ```md
+ ```markdown
[Markdown code example](example.md)
```
- ```text
+ ```plaintext
Code or text for which no specific highlighting class is available.
```
- ~~~
-
-- To display raw Markdown instead of rendered Markdown, you can use triple backticks
- with `md`, like the `Markdown code` example above, unless you want to include triple
- backticks in the code block as well. In that case, use triple tildes (`~~~`) instead.
-- [Syntax highlighting for code blocks](https://github.com/rouge-ruby/rouge/wiki/List-of-supported-languages-and-lexers)
- is available for many languages. Use `shell` instead of `bash` or `sh` for shell output.
-- For a complete reference on code blocks, check the [Kramdown guide](https://about.gitlab.com/handbook/product/technical-writing/markdown-guide/#code-blocks).
+ ````
+
+Syntax highlighting is required for code blocks added to the GitLab documentation.
+Refer to the table below for the most common language classes, or check the
+[complete list](https://github.com/rouge-ruby/rouge/wiki/List-of-supported-languages-and-lexers)
+of language classes available.
+
+| Preferred language tags | Language aliases and notes |
+|-------------------------|------------------------------------------------------------------------------|
+| `asciidoc` | |
+| `dockerfile` | Alias: `docker`. |
+| `elixir` | |
+| `erb` | |
+| `golang` | Alias: `go`. |
+| `graphql` | |
+| `haml` | |
+| `html` | |
+| `ini` | For some simple config files that are not in TOML format. |
+| `javascript` | Alias `js`. |
+| `json` | |
+| `markdown` | Alias: `md`. |
+| `mermaid` | |
+| `nginx` | |
+| `perl` | |
+| `php` | |
+| `plaintext` | Examples with no defined language, such as output from shell commands or API calls. If a codeblock has no language, it defaults to `plaintext`. Alias: `text`. |
+| `prometheus` | Prometheus configuration examples. |
+| `python` | |
+| `ruby` | Alias: `rb`. |
+| `shell` | Aliases: `bash` or `sh`. |
+| `sql` | |
+| `toml` | Runner configuration examples, and other toml formatted configuration files. |
+| `typescript` | Alias: `ts`. |
+| `xml` | |
+| `yaml` | Alias: `yml`. |
+
+For a complete reference on code blocks, check the [Kramdown guide](https://about.gitlab.com/handbook/product/technical-writing/markdown-guide/#code-blocks).
## GitLab SVG icons
@@ -1120,7 +1201,7 @@ keyword "only":
- For GitLab Premium: `**(PREMIUM ONLY)**`.
- For GitLab Ultimate: `**(ULTIMATE ONLY)**`.
-For GitLab.com only tiers (when the feature is not available for self-hosted instances):
+For GitLab.com only tiers (when the feature is not available for self-managed instances):
- For GitLab Free and higher tiers: `**(FREE ONLY)**`.
- For GitLab Bronze and higher tiers: `**(BRONZE ONLY)**`.
@@ -1291,7 +1372,7 @@ on this document. Further explanation is given below.
The following can be used as a template to get started:
-~~~md
+````markdown
## Descriptive title
One or two sentence description of what endpoint does.
@@ -1319,7 +1400,7 @@ Example response:
}
]
```
-~~~
+````
### Fake tokens
diff --git a/doc/development/documentation/workflow.md b/doc/development/documentation/workflow.md
index 7c97f6628c9..520cf3c60d3 100644
--- a/doc/development/documentation/workflow.md
+++ b/doc/development/documentation/workflow.md
@@ -20,8 +20,7 @@ directly affect the way that any user or administrator interacts with GitLab.
Regardless of the type of issue or merge request, certain labels are required when documentation
is added or updated. The following are added by the issue or merge request author:
-- An appropriate [type label](../contributing/issue_workflow.md#type-labels). For example,
- `~backstage`.
+- An appropriate [type label](../contributing/issue_workflow.md#type-labels).
- The [stage label](../contributing/issue_workflow.md#stage-labels) and
[group label](../contributing/issue_workflow.md#group-labels). For example, `~devops::create` and
`~group::source code`.
@@ -118,7 +117,7 @@ Reviewers help ensure:
Prior to merging, documentation changes committed by the developer must be reviewed by:
- The code reviewer for the merge request. This is known as a technical review.
-- Optionally, others involved in the work, such as other developers or the Product Manager.
+- Optionally, others involved in the work such as other developers or the Product Manager.
- The Technical Writer for the DevOps stage group, except in exceptional circumstances where a
[post-merge review](#post-merge-reviews) can be requested.
- A maintainer of the project.
@@ -137,11 +136,11 @@ For issues requiring any new or updated documentation, the Product Manager must:
- Confirm or add the [documentation requirements](#documentation-requirements).
- Ensure the issue contains:
- Any new or updated feature name.
- - Overview, description, and use cases, as required by the
- [documentation structure and template](structure.md), when applicable.
+ - Overview, description, and use cases when applicable (as required by the
+ [documentation structure and template](structure.md)).
-Everyone is encouraged to draft the documentation requirements in the issue, but a Product Manager
-will do the following:
+Everyone is encouraged to draft the documentation requirements in the issue. However, a Product
+Manager will:
- When the issue is assigned a release milestone, review and update the Documentation details.
- By the kickoff, finalize the documentation details.
@@ -238,7 +237,7 @@ The following details should be included:
- What concepts and procedures should the documentation guide and enable the user to understand or
accomplish?
- To this end, what new page(s) are needed, if any? What pages or subsections need updates?
- Consider user, admin, and API documentation changes and additions.
+ Consider changes and additions to user, admin, and API documentation.
- For any guide or instruction set, should it help address a single use case, or be flexible to
address a certain range of use cases?
- Do we need to update a previously recommended workflow? Should we link the new feature from
@@ -248,10 +247,139 @@ The following details should be included:
- Include suggested titles of any pages or subsection headings, if applicable.
- List any documentation that should be cross-linked, if applicable.
+### Including docs with code
+
+Currently, the Technical Writing team strongly encourages including documentation in
+the same merge request as the code that it relates to, but this is not strictly mandatory.
+It's still common for documentation to be added in an MR separate from the feature MR.
+
+Engineering teams may elect to adopt a workflow where it is **mandatory** that docs
+are included in the code MR, as part of their [definition of done](../contributing/merge_request_workflow.md#definition-of-done).
+When a team adopts this workflow, that team's engineers must include their docs in the **same**
+MR as their feature code, at all times.
+
+#### Downsides of separate docs MRs
+
+A workflow that has documentation separated into its own MR has many downsides.
+
+If the documentation merges **before** the feature:
+
+- GitLab.com users might try to use the feature before it's released, driving support tickets.
+- If the feature is delayed, the documentation might not be pulled/reverted in time and could be
+ accidentally included in the self-managed package for that release.
+
+If the documentation merges **after** the feature:
+
+- The feature might be included in the self-managed package, but without any documentation
+ if the docs MR misses the cutoff.
+- A feature might show up in the GitLab.com UI before any documentation exists for it.
+ Users surprised by this feature will search for documentation and won't find it, possibly driving
+ support tickets.
+
+Having two separate MRs means:
+
+- Two different people might be responsible for merging one feature, which is not workable
+ with an asynchronous work style. The feature might merge while the technical writer is asleep,
+ creating a potentially lengthy delay between the two merges.
+- If the docs MR is assigned to the same maintainer as responsible for the feature
+ code MR, they will have to review and juggle two MRs instead of dealing with just one.
+
+Documentation quality might be lower, because:
+
+- Having docs in a separate MR will mean far fewer people will see and verify them,
+ increasing the likelihood that issues will be missed.
+- In a "split" workflow, engineers might only create the documentation MR once the
+ feature MR is ready, or almost ready. This gives the technical writer little time
+ to learn about the feature in order to do a good review. It also increases pressure
+ on them to review and merge faster than desired, letting problems slip in due to haste.
+
+#### Benefits of always including docs with code
+
+Including docs with code (and doing it early in the development process) has many benefits:
+
+- There are no timing issues connected to releases:
+ - If a feature slips to the next release, the documentation slips too.
+ - If the feature *just* makes it into a release, the docs *just* make it in too.
+ - If a feature makes it to GitLab.com early, the documentation will be ready for
+ our early adopters.
+- Only a single person will be responsible for merging the feature (the code maintainer).
+- The technical writer will have more time to gain an understanding of the feature
+ and will be better able to verify the content of the docs in the Review App or GDK.
+ They will also be able to offer advice for improving the UI text or offer additional use cases.
+- The documentation will have increased visibility:
+ - Everyone involved in the merge request will see the docs. This could include product
+ managers, multiple engineers with deep domain knowledge, as well as the code reviewers
+ and maintainer. They will be more likely to catch issues with examples, as well
+ as background or concepts that the technical writer may not be aware of.
+ - Increasing visibility of the documentation also has the side effect of improving
+ *other* engineers' documentation. By reviewing each other's MRs, each engineer's
+ own documentation skills will improve.
+- Thinking about the documentation early can help engineers generate better examples,
+ as they will need to think about what examples a user will want, and will need to
+ make sure the code they write implements that example properly.
+
+#### Docs with code as a workflow
+
+In order to have docs included with code as a mandatory workflow, some changes might
+need to happen to a team's current workflow:
+
+- The engineers must strive to include the docs early in the development process,
+ to give ample time for review, not just from the technical writer, but also the
+ code reviewer and maintainer.
+- Reviewers and maintainers must also review the docs during code reviews, to make
+ sure the described processes match the expected use of the feature, and that examples
+ are correct. They do *not* need to worry about style, grammar, and so on.
+- The technical writer must be assigned the MR directly and not only pinged. Thanks
+ to the ability to have [multiple assignees for any MR](../../user/project/merge_requests/getting_started.md#multiple-assignees-starter),
+ this can be done at any time, but must be before the code maintainer review. It's
+ common to have both the docs and code reviews happening at the same time, with the
+ author, reviewer and technical writer discussing the docs together.
+- When the docs are ready, the technical writer will click **Approve** and usually
+ will no longer be involved in the MR. If the feature changes during code review and
+ the docs are updated, the technical writer must be reassigned the MR to verify the
+ update.
+- Maintainers are allowed to merge features with the docs "as-is", even if the technical
+ writer has not given final approval yet. The **docs reviews must not be blockers**. Therefore
+ it's important to get the docs included and assigned to the technical writers early.
+ If the feature is merged before final docs approval, the maintainer must create
+ a [post-merge follow-up issue](#post-merge-reviews), and assign it to both the engineer
+ and technical writer.
+
+Maintainers are allowed to merge features with the docs "as-is" even if the
+technical writer has not given final approval yet but the merge request has
+all other required approvals.
+
+You can visualize the parallel workflow for code and docs reviews as:
+
+```mermaid
+graph TD
+ A("Feature MR Created (Engineer)") --> |Assign| B("Code Review (reviewer)")
+ B --> |"Approve / Reassign"| C("Code Review (maintainer)")
+ C --> |Approve| F("Merge (maintainer)")
+ A --> D("Docs Added (Engineer)")
+ D --> |Assign| E("Docs Review (Tech Writer)")
+ E --> |Approve| F
+```
+
+For complex features split over multiple merge requests:
+
+- If a merge request is implementing components for a future feature, but the components
+ are not accessible to users yet, then no documentation should be included.
+- If a merge request will expose a feature to users in any way, such as an enabled
+ UI element, an API endpoint, or anything similar, then that MR **must** have docs.
+ Note that this may mean multiple docs additions could happen in the buildup to the
+ implementation of a single large feature, for example API docs and feature usage docs.
+- If it's unclear which engineer should add the feature documentation into their
+ MR, the engineering manager should decide during planning, and tie the documentation
+ to the last MR that must be merged before a feature is considered released.
+ This is often, but not always, a frontend MR.
+
## For all other documentation
-These documentation changes are not associated with the release of a new or updated feature, and are
-therefore labeled `backstage` in GitLab, rather than `feature`. They may include:
+Documentation changes that are not associated with the release of a new or updated feature
+do not take the `~feature` label, but still need the `~documentation` label.
+
+They may include:
- Documentation created or updated to improve accuracy, completeness, ease of use, or any reason
other than a [feature change](#for-a-product-change).
diff --git a/doc/development/elasticsearch.md b/doc/development/elasticsearch.md
index b8d2a873d8b..16a86c06317 100644
--- a/doc/development/elasticsearch.md
+++ b/doc/development/elasticsearch.md
@@ -36,7 +36,7 @@ Additionally, if you need large repos or multiple forks for testing, please cons
The Elasticsearch integration depends on an external indexer. We ship an [indexer written in Go](https://gitlab.com/gitlab-org/gitlab-elasticsearch-indexer). The user must trigger the initial indexing via a rake task but, after this is done, GitLab itself will trigger reindexing when required via `after_` callbacks on create, update, and destroy that are inherited from [/ee/app/models/concerns/elastic/application_versioned_search.rb](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/app/models/concerns/elastic/application_versioned_search.rb).
-All indexing after the initial one is done via `ElasticIndexerWorker` (Sidekiq jobs).
+After initial indexing is complete, create, update, and delete operations for all models except projects (see [#207494](https://gitlab.com/gitlab-org/gitlab/issues/207494)) are tracked in a Redis [`ZSET`](https://redis.io/topics/data-types#sorted-sets). A regular `sidekiq-cron` `ElasticIndexBulkCronWorker` processes this queue, updating many Elasticsearch documents at a time with the [Bulk Request API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html).
Search queries are generated by the concerns found in [ee/app/models/concerns/elastic](https://gitlab.com/gitlab-org/gitlab/tree/master/ee/app/models/concerns/elastic). These concerns are also in charge of access control, and have been a historic source of security bugs so please pay close attention to them!
diff --git a/doc/development/event_tracking/backend.md b/doc/development/event_tracking/backend.md
index c571439af8a..dc4d7279671 100644
--- a/doc/development/event_tracking/backend.md
+++ b/doc/development/event_tracking/backend.md
@@ -1,34 +1,5 @@
-# Backend tracking guide
+---
+redirect_to: '../../telemetry/backend.md'
+---
-GitLab provides `Gitlab::Tracking`, an interface that wraps the [Snowplow Ruby Tracker](https://github.com/snowplow/snowplow/wiki/ruby-tracker) for tracking custom events.
-
-## Tracking in Ruby
-
-Custom event tracking and instrumentation can be added by directly calling the `GitLab::Tracking.event` class method, which accepts the following arguments:
-
-| argument | type | default value | description |
-|:-----------|:-------|:---------------------------|:------------|
-| `category` | string | 'application' | Area or aspect of the application. This could be `HealthCheckController` or `Lfs::FileTransformer` for instance. |
-| `action` | string | 'generic' | The action being taken, which can be anything from a controller action like `create` to something like an Active Record callback. |
-| `data` | object | {} | Additional data such as `label`, `property`, `value`, and `context` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). These will be set as empty strings if you don't provide them. |
-
-Tracking can be viewed as either tracking user behavior, or can be utilized for instrumentation to monitor and visual performance over time in an area or aspect of code.
-
-For example:
-
-```ruby
-class Projects::CreateService < BaseService
- def execute
- project = Project.create(params)
-
- Gitlab::Tracking.event('Projects::CreateService', 'create_project',
- label: project.errors.full_messages.to_sentence,
- value: project.valid?
- )
- end
-end
-```
-
-### Performance
-
-We use the [AsyncEmitter](https://github.com/snowplow/snowplow/wiki/Ruby-Tracker#52-the-asyncemitter-class) when tracking events, which allows for instrumentation calls to be run in a background thread. This is still an active area of development.
+This document was moved to [another location](../../telemetry/backend.md).
diff --git a/doc/development/event_tracking/frontend.md b/doc/development/event_tracking/frontend.md
index c767efc65b2..0e98daf15bb 100644
--- a/doc/development/event_tracking/frontend.md
+++ b/doc/development/event_tracking/frontend.md
@@ -1,143 +1,5 @@
-# Frontend tracking guide
+---
+redirect_to: '../../telemetry/frontend.md'
+---
-GitLab provides `Tracking`, an interface that wraps the [Snowplow JavaScript Tracker](https://github.com/snowplow/snowplow/wiki/javascript-tracker) for tracking custom events. There are a few ways to utilize tracking, but each generally requires at minimum, a `category` and an `action`. Additional data can be provided that adheres to our [Feature instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy).
-
-| field | type | default value | description |
-|:-----------|:-------|:---------------------------|:------------|
-| `category` | string | document.body.dataset.page | Page or subsection of a page that events are being captured within. |
-| `action` | string | 'generic' | Action the user is taking. Clicks should be `click` and activations should be `activate`, so for example, focusing a form field would be `activate_form_input`, and clicking a button would be `click_button`. |
-| `data` | object | {} | Additional data such as `label`, `property`, `value`, and `context` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). |
-
-## Tracking in HAML (or Vue Templates)
-
-When working within HAML (or Vue templates) we can add `data-track-*` attributes to elements of interest. All elements that have a `data-track-event` attribute will automatically have event tracking bound on clicks.
-
-Below is an example of `data-track-*` attributes assigned to a button:
-
-```haml
-%button.btn{ data: { track: { event: "click_button", label: "template_preview", property: "my-template" } } }
-```
-
-```html
-<button class="btn"
- data-track-event="click_button"
- data-track-label="template_preview"
- data-track-property="my-template"
-/>
-```
-
-Event listeners are bound at the document level to handle click events on or within elements with these data attributes. This allows for them to be properly handled on rerendering and changes to the DOM, but it's important to know that because of the way these events are bound, click events shouldn't be stopped from propagating up the DOM tree. If for any reason click events are being stopped from propagating, you'll need to implement your own listeners and follow the instructions in [Tracking in raw JavaScript](#tracking-in-raw-javascript).
-
-Below is a list of supported `data-track-*` attributes:
-
-| attribute | required | description |
-|:----------------------|:---------|:------------|
-| `data-track-event` | true | Action the user is taking. Clicks must be prepended with `click` and activations must be prepended with `activate`. For example, focusing a form field would be `activate_form_input` and clicking a button would be `click_button`. |
-| `data-track-label` | false | The `label` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). |
-| `data-track-property` | false | The `property` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). |
-| `data-track-value` | false | The `value` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). If omitted, this will be the elements `value` property or an empty string. For checkboxes, the default value will be the element's checked attribute or `false` when unchecked. |
-| `data-track-context` | false | The `context` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). |
-
-## Tracking within Vue components
-
-There's a tracking Vue mixin that can be used in components if more complex tracking is required. To use it, first import the `Tracking` library and request a mixin.
-
-```javascript
-import Tracking from '~/tracking';
-const trackingMixin = Tracking.mixin({ label: 'right_sidebar' });
-```
-
-You can provide default options that will be passed along whenever an event is tracked from within your component. For instance, if all events within a component should be tracked with a given `label`, you can provide one at this time. Available defaults are `category`, `label`, `property`, and `value`. If no category is specified, `document.body.dataset.page` is used as the default.
-
-You can then use the mixin normally in your component with the `mixin`, Vue declaration. The mixin also provides the ability to specify tracking options in `data` or `computed`. These will override any defaults and allows the values to be dynamic from props, or based on state.
-
-```javascript
-export default {
- mixins: [trackingMixin],
- // ...[component implementation]...
- data() {
- return {
- expanded: false,
- tracking: {
- label: 'left_sidebar'
- }
- };
- },
-}
-```
-
-The mixin provides a `track` method that can be called within the template, or from component methods. An example of the whole implementation might look like the following.
-
-```javascript
-export default {
- mixins: [Tracking.mixin({ label: 'right_sidebar' })],
- data() {
- return {
- expanded: false,
- };
- },
- methods: {
- toggle() {
- this.expanded = !this.expanded;
- this.track('click_toggle', { value: this.expanded })
- }
- }
-};
-```
-
-And if needed within the template, you can use the `track` method directly as well.
-
-```html
-<template>
- <div>
- <a class="toggle" @click.prevent="toggle">Toggle</a>
- <div v-if="expanded">
- <p>Hello world!</p>
- <a @click.prevent="track('click_action')">Track an event</a>
- </div>
- </div>
-</template>
-```
-
-## Tracking in raw JavaScript
-
-Custom event tracking and instrumentation can be added by directly calling the `Tracking.event` static function. The following example demonstrates tracking a click on a button by calling `Tracking.event` manually.
-
-```javascript
-import Tracking from `~/tracking`;
-
-const button = document.getElementById('create_from_template_button');
-button.addEventListener('click', () => {
- Tracking.event('dashboard:projects:index', 'click_button', {
- label: 'create_from_template',
- property: 'template_preview',
- value: 'rails',
- });
-})
-```
-
-## Tests and test helpers
-
-In Karma tests, you can use the following:
-
-```javascript
-import { mockTracking, triggerEvent } from 'spec/helpers/tracking_helper';
-
-describe('my component', () => {
- let trackingSpy;
-
- beforeEach(() => {
- const vm = mountComponent(MyComponent);
- trackingSpy = mockTracking('_category_', vm.$el, spyOn);
- });
-
- it('tracks an event when toggled', () => {
- triggerEvent('a.toggle');
-
- expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_edit_button', {
- label: 'right_sidebar',
- property: 'confidentiality',
- });
- });
-});
-```
+This document was moved to [another location](../../telemetry/frontend.md).
diff --git a/doc/development/event_tracking/index.md b/doc/development/event_tracking/index.md
index 39714cf4af3..ae555e99c6b 100644
--- a/doc/development/event_tracking/index.md
+++ b/doc/development/event_tracking/index.md
@@ -1,72 +1,5 @@
-# Product Analytics
+---
+redirect_to: '../../telemetry/index.md'
+---
-At GitLab, we encourage event tracking so we can iterate on and improve the project and user experience.
-
-We do this by running experiments, and collecting analytics for features and feature variations. This is:
-
-- So we generally know engagement.
-- A way to approach A/B testing.
-
-As developers, we should attempt to add tracking and instrumentation where possible. This enables the Product team to better understand:
-
-- User engagement.
-- Usage patterns.
-- Other metrics that can potentially be improved on.
-
-To maintain consistency, and not adversely effect performance, we have some basic tracking functionality exposed at both the frontend and backend layers that can be utilized while building new features or updating existing features.
-
-We also encourage users to enable tracking, and we embrace full transparency with our tracking approach so it can be easily understood and trusted. By enabling tracking, users can:
-
-- Contribute back to the wider community.
-- Help GitLab improve on the product.
-
-## Implementing tracking
-
-Event tracking can be implemented on either the frontend or the backend layers, and each can be approached slightly differently since they have slightly different concerns.
-
-In GitLab, many actions can be initiated via the web interface, but they can also be initiated via an API client (an iOS applications is a good example of this), or via `git` directly. Crucially, this means that tracking should be considered holistically for the feature that's being instrumented.
-
-The data team should be involved when defining analytics and can be consulted when coming up with ways of presenting data that's being tracked. This allows our event data to be considered carefully and presented in ways that may reveal details about user engagement that may not be fully understood or interactions where we can make improvements. You can [contact the data team](https://about.gitlab.com/handbook/business-ops/data-team/#contact-us) and consult with them when defining tracking strategies.
-
-### Frontend
-
-Generally speaking, the frontend can track user actions and events, like:
-
-- Clicking links or buttons.
-- Submitting forms.
-- Other typically interface-driven actions.
-
-See [Frontend tracking guide](frontend.md).
-
-### Backend
-
-From the backend, the events that are tracked will likely consist of things like the creation or deletion of records and other events that might be triggered from layers that aren't necessarily only available in the interface.
-
-See [Backend tracking guide](backend.md).
-
-Also, see [Application performance metrics](../instrumentation.md) if you are after instrumenting application performance metrics.
-
-## Enabling tracking
-
-Tracking can be enabled at:
-
-- The instance level, which will enable tracking on both the frontend and backend layers.
-- User level, though user tracking can be disabled on a per-user basis. GitLab tracking respects the [Do Not Track](https://www.eff.org/issues/do-not-track) standard, so any user who has enabled the Do Not Track option in their browser will also not be tracked from a user level.
-
-We utilize Snowplow for the majority of our tracking strategy, and it can be enabled by navigating to:
-
-- **Admin Area > Settings > Integrations** in the UI.
-- `admin/application_settings/integrations` in your browser.
-
-The following configuration is required:
-
-| Name | Value |
-| ------------- | ------------------------- |
-| Collector | `snowplow.trx.gitlab.net` |
-| Site ID | `gitlab` |
-| Cookie domain | `.gitlab.com` |
-
-Once enabled, tracking events can be inspected locally by either:
-
-- Looking at the network panel of the browser's development tools
-- Using the [Snowplow Chrome Extension](https://chrome.google.com/webstore/detail/snowplow-inspector/maplkdomeamdlngconidoefjpogkmljm).
+This document was moved to [another location](../../telemetry/index.md).
diff --git a/doc/development/experiment_guide/index.md b/doc/development/experiment_guide/index.md
index 98086fc63ea..66b7bd8ed0f 100644
--- a/doc/development/experiment_guide/index.md
+++ b/doc/development/experiment_guide/index.md
@@ -2,7 +2,7 @@
Experiments will be conducted by teams from the [Growth Section](https://about.gitlab.com/handbook/engineering/development/growth/) and are not tied to releases, because they will primarily target GitLab.com.
-Experiments will be run as an A/B test and will be behind a feature flag to turn the test on or off. Based on the data the experiment generates, the team decides if the experiment had a positive impact and will be the new default or rolled back.
+Experiments will be run as an A/B test and will be behind a feature flag to turn the test on or off. Based on the data the experiment generates, the team will decide if the experiment had a positive impact and will be the new default or rolled back.
## Follow-up issue
@@ -55,7 +55,7 @@ The author then adds a comment to this piece of code and adds a link to the issu
end
```
-- Track necessary events. See the [event tracking guide](../event_tracking/index.md) for details.
+- Track necessary events. See the [telemetry guide](../../telemetry/index.md) for details.
- After the merge request is merged, use [`chatops`](../../ci/chatops/README.md) to enable the feature flag and start the experiment. For visibility, please run the command in the `#s_growth` channel:
```
diff --git a/doc/development/fe_guide/dependencies.md b/doc/development/fe_guide/dependencies.md
new file mode 100644
index 00000000000..0f5825992e9
--- /dev/null
+++ b/doc/development/fe_guide/dependencies.md
@@ -0,0 +1,40 @@
+# Frontend dependencies
+
+## Package manager
+
+We use [Yarn](https://yarnpkg.com/) to manage frontend dependencies. There are a few exceptions:
+
+- [FontAwesome](https://fontawesome.com/), installed via the `font-awesome-rails` gem: we are working to replace it with
+ [GitLab SVGs](https://gitlab-org.gitlab.io/gitlab-svgs/) icons library.
+- [ACE](https://ace.c9.io/) editor, installed via the `ace-rails-ap` gem.
+- Other dependencies found under `vendor/assets/`.
+
+## Updating dependencies
+
+### Renovate GitLab Bot
+
+We use the [Renovate GitLab Bot](https://gitlab.com/gitlab-org/frontend/renovate-gitlab-bot) to
+automatically create merge requests for updating dependencies of several projects. You can find the
+up-to-date list of projects managed by the renovate bot in the project’s README. Some key dependencies
+updated using renovate are:
+
+- [`@gitlab/ui`](https://gitlab.com/gitlab-org/gitlab-ui/)
+- [`@gitlab/svgs`](https://gitlab.com/gitlab-org/gitlab-svgs/)
+- [`@gitlab/eslint-config`](https://gitlab.com/gitlab-org/gitlab-eslint-config)
+
+### Blocked dependencies
+
+We discourage installing some dependencies in [GitLab repository](https://gitlab.com/gitlab-org/gitlab)
+because they can create conflicts in the dependency tree. Blocked dependencies are declared in the
+`blockDependencies` property of GitLab’s [`package.json` file](https://gitlab.com/gitlab-org/gitlab/-/blob/master/package.json).
+
+## Dependency notes
+
+### BootstrapVue
+
+[BootstrapVue](https://bootstrap-vue.js.org/) is a component library built with Vue.js and Bootstrap.
+We wrap BootstrapVue components in [GitLab UI](https://gitlab.com/gitlab-org/gitlab-ui/) with the
+purpose of applying visual styles and usage guidelines specified in the
+[Pajamas Design System](https://design.gitlab.com/). For this reason, we recommend not installing
+BootstrapVue directly in the GitLab repository. Instead create a wrapper of the BootstrapVue
+component you want to use in GitLab UI first.
diff --git a/doc/development/fe_guide/development_process.md b/doc/development/fe_guide/development_process.md
index 5b02098f020..92f3c82a6ea 100644
--- a/doc/development/fe_guide/development_process.md
+++ b/doc/development/fe_guide/development_process.md
@@ -4,7 +4,7 @@ You can find more about the organization of the frontend team in the [handbook](
## Development Checklist
-The idea is to remind us about specific topics during the time we build a new feature or start something. This is a common practice in other industries (like pilots) that also use standardised checklists to reduce problems early on.
+The idea is to remind us about specific topics during the time we build a new feature or start something. This is a common practice in other industries (like pilots) that also use standardised checklists to reduce problems early on.
Copy the content over to your issue or merge request and if something doesn't apply simply remove it from your current list.
@@ -71,7 +71,7 @@ With the purpose of being [respectful of others' time](https://about.gitlab.com/
- includes tests
- includes a changelog entry (when necessary)
- Before assigning to a maintainer, assign to a reviewer.
-- If you assigned a merge request, or pinged someone directly, keep in mind that we work in different timezones and asynchronously, so be patient. Unless the merge request is urgent (like fixing a broken master), please don't DM or reassign the merge request before waiting for a 24-hour window.
+- If you assigned a merge request or pinged someone directly, be patient because we work in different timezones and asynchronously. Unless the merge request is urgent (like fixing a broken master), please don't DM or reassign the merge request before waiting for a 24-hour window.
- If you have a question regarding your merge request/issue, make it on the merge request/issue. When we DM each other, we no longer have a SSOT and [no one else is able to contribute](https://about.gitlab.com/handbook/values/#public-by-default).
- When you have a big WIP merge request with many changes, you're advised to get the review started before adding/removing significant code. Make sure it is assigned well before the release cut-off, as the reviewer(s)/maintainer(s) would always prioritize reviewing finished MRs before WIP ones.
- Make sure to remove the WIP title before the last round of review.
diff --git a/doc/development/fe_guide/event_tracking.md b/doc/development/fe_guide/event_tracking.md
index 13f107eebb1..ae555e99c6b 100644
--- a/doc/development/fe_guide/event_tracking.md
+++ b/doc/development/fe_guide/event_tracking.md
@@ -1,5 +1,5 @@
---
-redirect_to: '../event_tracking/index.md'
+redirect_to: '../../telemetry/index.md'
---
-This document was moved to [another location](../event_tracking/frontend.md).
+This document was moved to [another location](../../telemetry/index.md).
diff --git a/doc/development/fe_guide/graphql.md b/doc/development/fe_guide/graphql.md
index a9821edff0b..4a8fca3075b 100644
--- a/doc/development/fe_guide/graphql.md
+++ b/doc/development/fe_guide/graphql.md
@@ -53,7 +53,7 @@ fragment DesignListItem on Design {
}
```
-Fragments can be stored in separate files, imported and used in queries, mutations or other fragments.
+Fragments can be stored in separate files, imported and used in queries, mutations, or other fragments.
```javascript
#import "./designList.fragment.graphql"
@@ -258,6 +258,134 @@ export default {
};
```
+### Working with pagination
+
+GitLab's GraphQL API uses [Relay-style cursor pagination](https://www.apollographql.com/docs/react/data/pagination/#cursor-based)
+for connection types. This means a "cursor" is used to keep track of where in the data
+set the next items should be fetched from.
+
+Every connection type (for example, `DesignConnection` and `DiscussionConnection`) has a field `pageInfo` that contains an information required for pagination:
+
+```javascript
+pageInfo {
+ endCursor
+ hasNextPage
+ hasPreviousPage
+ startCursor
+}
+```
+
+Here:
+
+- `startCursor` and `endCursor` display the cursor of the first and last items
+ respectively.
+- `hasPreviousPage` and `hasNextPage` allow us to check if there are more pages
+ available before or after the current page.
+
+When we fetch data with a connection type, we can pass cursor as `after` or `before`
+parameter, indicating a starting or ending point of our pagination. They should be
+followed with `first` or `last` parameter respectively to indicate _how many_ items
+we want to fetch after or before a given endpoint.
+
+For example, here we're fetching 10 designs after a cursor:
+
+```javascript
+query {
+ project(fullPath: "root/my-project") {
+ id
+ issue(iid: "42") {
+ designCollection {
+ designs(atVersion: null, after: "Ihwffmde0i", first: 10) {
+ edges {
+ node {
+ id
+ }
+ }
+ }
+ }
+ }
+ }
+}
+```
+
+#### Using `fetchMore` method in components
+
+When making an initial fetch, we usually want to start a pagination from the beginning.
+In this case, we can either:
+
+- Skip passing a cursor.
+- Pass `null` explicitly to `after`.
+
+After data is fetched, we should save a `pageInfo` object. Let's assume we're storing
+it to Vue component `data`:
+
+```javascript
+data() {
+ return {
+ pageInfo: null,
+ }
+},
+apollo: {
+ designs: {
+ query: projectQuery,
+ variables() {
+ return {
+ // rest of design variables
+ ...
+ first: 10,
+ };
+ },
+ result(res) {
+ this.pageInfo = res.data?.project?.issue?.designCollection?.designs?.pageInfo;
+ },
+ },
+},
+```
+
+When we want to move to the next page, we use an Apollo `fetchMore` method, passing a
+new cursor (and, optionally, new variables) there. In the `updateQuery` hook, we have
+to return a result we want to see in the Apollo cache after fetching the next page.
+
+```javascript
+fetchNextPage() {
+ // as a first step, we're checking if we have more pages to move forward
+ if (this.pageInfo?.hasNextPage) {
+ this.$apollo.queries.designs.fetchMore({
+ variables: {
+ // rest of design variables
+ ...
+ first: 10,
+ after: this.pageInfo?.endCursor,
+ },
+ updateQuery(previousResult, { fetchMoreResult }) {
+ // here we can implement the logic of adding new designs to fetched one (for example, if we use infinite scroll)
+ // or replacing old result with the new one if we use numbered pages
+
+ const newDesigns = fetchMoreResult.project.issue.designCollection.designs;
+ previousResult.project.issue.designCollection.designs.push(...newDesigns)
+
+ return previousResult;
+ },
+ });
+ }
+}
+```
+
+Please note we don't have to save `pageInfo` one more time; `fetchMore` triggers a query
+`result` hook as well.
+
+#### Limitations
+
+Currently, bidirectional pagination doesn't work:
+
+- `hasNextPage` returns a correct value only when we paginate forward using `endCursor`
+ and `first` parameters.
+- `hasPreviousPage` returns a correct value only when we paginate backward using
+ `startCursor` and `last` parameters.
+
+This should be resolved in the scope of the issue
+[Bi-directional Pagination in GraphQL doesn't work as expected](https://gitlab.com/gitlab-org/gitlab/-/issues/208301).
+
### Testing
#### Mocking response as component data
diff --git a/doc/development/fe_guide/icons.md b/doc/development/fe_guide/icons.md
index 36537a22e67..ea321330c41 100644
--- a/doc/development/fe_guide/icons.md
+++ b/doc/development/fe_guide/icons.md
@@ -65,7 +65,7 @@ export default {
</template>
```
-- **name** Name of the Icon in the SVG Sprite ([Overview is available here][svg-preview]).
+- **name** Name of the Icon in the SVG Sprite ([Overview is available here][svg-preview]).
- **size (optional)** Number value for the size which is then mapped to a specific CSS class
(Available Sizes: 8, 12, 16, 18, 24, 32, 48, 72 are mapped to `sXX` css classes)
- **css-classes (optional)** Additional CSS Classes to add to the svg tag.
diff --git a/doc/development/fe_guide/index.md b/doc/development/fe_guide/index.md
index ba77a51a089..4e599049470 100644
--- a/doc/development/fe_guide/index.md
+++ b/doc/development/fe_guide/index.md
@@ -9,7 +9,7 @@ GitLab is built on top of [Ruby on Rails](https://rubyonrails.org) using [Haml][
Be wary of [the limitations that come with using Hamlit][hamlit-limits]. We also use [SCSS](https://sass-lang.com) and plain JavaScript with
modern ECMAScript standards supported through [Babel][babel] and ES module support through [webpack][webpack].
-Working with our frontend assets requires Node (v8.10.0 or greater) and Yarn
+Working with our frontend assets requires Node (v10.13.0 or greater) and Yarn
(v1.10.0 or greater). You can find information on how to install these on our
[installation guide][install].
@@ -23,22 +23,22 @@ Use [BrowserStack](https://www.browserstack.com/) to test with our supported bro
Current high-level frontend goals are listed on [Frontend Epics](https://gitlab.com/groups/gitlab-org/-/epics?label_name%5B%5D=frontend).
-## [Principles](principles.md)
+## Principles
-High-level guidelines for contributing to GitLab.
+[High-level guidelines](principles.md) for contributing to GitLab.
-## [Development Process](development_process.md)
+## Development Process
-How we plan and execute the work on the frontend.
+How we [plan and execute](development_process.md) the work on the frontend.
-## [Architecture](architecture.md)
+## Architecture
-How we go about making fundamental design decisions in GitLab's frontend team
+How we go about [making fundamental design decisions](architecture.md) in GitLab's frontend team
or make changes to our frontend development guidelines.
-## [Testing](../testing_guide/frontend_testing.md)
+## Testing
-How we write frontend tests, run the GitLab test suite, and debug test related
+How we write [frontend tests](../testing_guide/frontend_testing.md), run the GitLab test suite, and debug test related
issues.
## Pajamas Design System
@@ -46,29 +46,33 @@ issues.
Reusable components with technical and usage guidelines can be found in our
[Pajamas Design System](https://design.gitlab.com/).
-## [Design Patterns](design_patterns.md)
+## Design Patterns
-Common JavaScript design patterns in GitLab's codebase.
+Common JavaScript [design patterns](design_patterns.md) in GitLab's codebase.
-## [Vue.js Best Practices](vue.md)
+## Vue.js Best Practices
-Vue specific design patterns and practices.
+Vue specific [design patterns and practices](vue.md).
-## [Vuex](vuex.md)
+## Vuex
-Vuex specific design patterns and practices.
+[Vuex](vuex.md) specific design patterns and practices.
-## [Axios](axios.md)
+## Axios
-Axios specific practices and gotchas.
+[Axios](axios.md) specific practices and gotchas.
-## [GraphQL](graphql.md)
+## GraphQL
-How to use GraphQL
+How to use [GraphQL](graphql.md).
-## [Icons and Illustrations](icons.md)
+## Icons and Illustrations
-How we use SVG for our Icons and Illustrations.
+How we use SVG for our [Icons and Illustrations](icons.md).
+
+## Dependencies
+
+General information about frontend [dependencies](dependencies.md) and how we manage them.
## Frontend FAQ
diff --git a/doc/development/fe_guide/principles.md b/doc/development/fe_guide/principles.md
index 6fb3456222f..2bef48fddcf 100644
--- a/doc/development/fe_guide/principles.md
+++ b/doc/development/fe_guide/principles.md
@@ -8,8 +8,8 @@ Discuss your architecture design in an issue before writing code. This helps dec
## Be consistent
-There are multiple ways of writing code to accomplish the same results. We should be as consistent as possible in how we write code across our codebases. This will make it more easier us to maintain our code across GitLab.
+There are multiple ways of writing code to accomplish the same results. We should be as consistent as possible in how we write code across our codebases. This will make it easier for us to maintain our code across GitLab.
## Improve code [iteratively](https://about.gitlab.com/handbook/values/#iteration)
-Whenever you see with existing code that does not follow our current style guide, update it proactively. You don't need to fix everything, but each merge request should iteratively improve our codebase, and reduce technical debt where possible.
+Whenever you see existing code that does not follow our current style guide, update it proactively. You don’t need to fix everything, but each merge request should iteratively improve our codebase, and reduce technical debt where possible.
diff --git a/doc/development/fe_guide/style/javascript.md b/doc/development/fe_guide/style/javascript.md
index f40e8c7b5df..7951c702601 100644
--- a/doc/development/fe_guide/style/javascript.md
+++ b/doc/development/fe_guide/style/javascript.md
@@ -175,6 +175,21 @@ are loaded dynamically with webpack.
Do not use `innerHTML`, `append()` or `html()` to set content. It opens up too many
vulnerabilities.
+## Avoid single-line conditional statements
+
+Indentation is important when scanning code as it gives a quick indication of the existence of branches, loops, and return points.
+This can help to quickly understand the control flow.
+
+```javascript
+// bad
+if (isThingNull) return '';
+
+// good
+if (isThingNull) {
+ return '';
+}
+```
+
## ESLint
ESLint behaviour can be found in our [tooling guide](../tooling.md).
diff --git a/doc/development/fe_guide/vuex.md b/doc/development/fe_guide/vuex.md
index 0bb9e3b7d50..675f30feba6 100644
--- a/doc/development/fe_guide/vuex.md
+++ b/doc/development/fe_guide/vuex.md
@@ -1,12 +1,19 @@
# Vuex
-To manage the state of an application you should use [Vuex][vuex-docs].
+When there's a clear benefit to separating state management from components (e.g. due to state complexity) we recommend using [Vuex][vuex-docs] over any other Flux pattern. Otherwise, feel free to manage state within the components.
+
+Vuex should be strongly considered when:
+
+- You expect multiple parts of the application to react to state changes
+- There's a need to share data between multiple components
+- There are complex interactions with Backend, e.g. multiple API calls
+- The app involves interacting with backend via both traditional REST API and GraphQL (especially when moving the REST API over to GraphQL is a pending backend task)
_Note:_ All of the below is explained in more detail in the official [Vuex documentation][vuex-docs].
## Separation of concerns
-Vuex is composed of State, Getters, Mutations, Actions and Modules.
+Vuex is composed of State, Getters, Mutations, Actions, and Modules.
When a user clicks on an action, we need to `dispatch` it. This action will `commit` a mutation that will change the state.
_Note:_ The action itself will not update the state, only a mutation should update the state.
diff --git a/doc/development/feature_flags/controls.md b/doc/development/feature_flags/controls.md
index 922995cb915..a9a75791db7 100644
--- a/doc/development/feature_flags/controls.md
+++ b/doc/development/feature_flags/controls.md
@@ -147,7 +147,7 @@ is always on or off to the users.
## Cleaning up
Once the change is deemed stable, submit a new merge request to remove the
-feature flag. This ensures the change is available to all users and self-hosted
+feature flag. This ensures the change is available to all users and self-managed
instances. Make sure to add the ~"feature flag" label to this merge request so
release managers are aware the changes are hidden behind a feature flag. If the
merge request has to be picked into a stable branch, make sure to also add the
diff --git a/doc/development/feature_flags/process.md b/doc/development/feature_flags/process.md
index 4b44c8dadca..0cca4117f1f 100644
--- a/doc/development/feature_flags/process.md
+++ b/doc/development/feature_flags/process.md
@@ -53,7 +53,7 @@ absolutely no way to use the feature until it is enabled.
### Including a feature behind feature flag in the final release
-In order to build a final release and present the feature for self-hosted
+In order to build a final release and present the feature for self-managed
users, the feature flag should be at least defaulted to **on**. If the feature
is deemed stable and there is confidence that removing the feature flag is safe,
consider removing the feature flag altogether.
@@ -126,8 +126,11 @@ need to revert a release, and because feature flags are disabled by default we
don't need to revert and pick any Git commits. In fact, all we have to do is
disable the feature, and in the worst case, perform cleanup. Let's say that
the cost of this is 2. In this case, our best case cost is 11: 10 to build the
-feature, and 1 to add the feature flag. The worst case cost is now 13: 10 to
-build the feature, 1 to add the feature flag, and 2 to disable and clean up.
+feature, and 1 to add the feature flag. The worst case cost is now 13:
+
+- 10 to build the feature.
+- 1 to add the feature flag.
+- 2 to disable and clean up.
Here we can see that in the best case scenario the work necessary is only a tiny
bit more compared to not using a feature flag. Meanwhile, the process of
diff --git a/doc/development/file_storage.md b/doc/development/file_storage.md
index 47a6babe8bc..bc3c16bd45d 100644
--- a/doc/development/file_storage.md
+++ b/doc/development/file_storage.md
@@ -21,6 +21,7 @@ There are many places where file uploading is used, according to contexts:
- CI Artifacts (archive, metadata, trace)
- LFS Objects
- Merge request diffs
+ - Design Management design thumbnails (EE)
## Disk storage
@@ -37,8 +38,9 @@ they are still not 100% standardized. You can see them below:
| Project avatars | yes | uploads/-/system/project/avatar/:id/:filename | `AvatarUploader` | Project |
| Issues/MR/Notes Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project |
| Issues/MR/Notes Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note |
+| Design Management design thumbnails (EE) | yes | uploads/-/system/design_management/action/image_v432x230/:id/:filename | `DesignManagement::DesignV432x230Uploader` | DesignManagement::Action |
| CI Artifacts (CE) | yes | `shared/artifacts/:disk_hash[0..1]/:disk_hash[2..3]/:disk_hash/:year_:month_:date/:job_id/:job_artifact_id` (:disk_hash is SHA256 digest of project_id) | `JobArtifactUploader` | Ci::JobArtifact |
-| LFS Objects (CE) | yes | shared/lfs-objects/:hex/:hex/:object_hash | `LfsObjectUploader` | LfsObject |
+| LFS Objects (CE) | yes | shared/lfs-objects/:hex/:hex/:object_hash | `LfsObjectUploader` | LfsObject |
| External merge request diffs | yes | shared/external-diffs/merge_request_diffs/mr-:parent_id/diff-:id | `ExternalDiffUploader` | MergeRequestDiff |
CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader`
diff --git a/doc/development/geo.md b/doc/development/geo.md
index a7ce09f822f..c4ce9fed283 100644
--- a/doc/development/geo.md
+++ b/doc/development/geo.md
@@ -509,6 +509,11 @@ that need to be taken care of:
- Health Check. If we can perform some pre-cheсks and make node unhealthy if something is wrong, we should do that.
The `rake gitlab:geo:check` command has to be updated too.
+### Geo self-service framework (alpha)
+
+We started developing a new [Geo self-service framework (alpha)](geo/framework.md)
+which makes it a lot easier to add a new data type.
+
## History of communication channel
The communication channel has changed since first iteration, you can
diff --git a/doc/development/geo/framework.md b/doc/development/geo/framework.md
index ada04cf2281..e58daacae13 100644
--- a/doc/development/geo/framework.md
+++ b/doc/development/geo/framework.md
@@ -142,3 +142,212 @@ ActiveRecord hooks:
The framework behind all this is located in
[`ee/lib/gitlab/geo/`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/ee/lib/gitlab/geo).
+
+## Existing Replicator Strategies
+
+Before writing a new kind of Replicator Strategy, check below to see if your
+resource can already be handled by one of the existing strategies. Consult with
+the Geo team if you are unsure.
+
+### Blob Replicator Strategy
+
+Models that use
+[CarrierWave's](https://github.com/carrierwaveuploader/carrierwave) `Uploader::Base`
+can be easily supported by Geo with the `Geo::BlobReplicatorStrategy` module.
+
+First, each file should have its own primary ID and model. Geo strongly
+recommends treating *every single file* as a first-class citizen, because in
+our experience this greatly simplifies tracking replication and verification
+state.
+
+For example, to add support for files referenced by a `Widget` model with a
+`widgets` table, you would perform the following steps:
+
+1. Add verification state fields to the `widgets` table so the Geo primary can
+ track verification state:
+
+ ```ruby
+ # frozen_string_literal: true
+
+ class AddVerificationStateToWidgets < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column :widgets, :verification_retry_at, :datetime_with_timezone
+ add_column :widgets, :last_verification_ran_at, :datetime_with_timezone
+ add_column :widgets, :verification_checksum, :string
+ add_column :widgets, :verification_failure, :string
+ add_column :widgets, :verification_retry_count, :integer
+ end
+ end
+ ```
+
+1. Add a partial index on `verification_failure` to ensure re-verification can
+ be performed efficiently:
+
+ ```ruby
+ # frozen_string_literal: true
+
+ class AddVerificationFailureIndexToWidgets < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :widgets, :verification_failure, where: "(verification_failure IS NOT NULL)", name: "widgets_verification_failure_partial"
+ end
+
+ def down
+ remove_concurrent_index :widgets, :verification_failure
+ end
+ end
+ ```
+
+1. Include `Gitlab::Geo::ReplicableModel` in the `Widget` class, and specify
+ the Replicator class `with_replicator Geo::WidgetReplicator`.
+
+ At this point the `Widget` class should look like this:
+
+ ```ruby
+ # frozen_string_literal: true
+
+ class Widget < ApplicationRecord
+ include ::Gitlab::Geo::ReplicableModel
+
+ with_replicator Geo::WidgetReplicator
+
+ mount_uploader :file, WidgetUploader
+
+ ...
+ end
+ ```
+
+1. Create `ee/app/replicators/geo/widget_replicator.rb`. Implement the
+ `#carrierwave_uploader` method which should return a `CarrierWave::Uploader`.
+ And implement the private `#model` method to return the `Widget` class.
+
+ ```ruby
+ # frozen_string_literal: true
+
+ module Geo
+ class WidgetReplicator < Gitlab::Geo::Replicator
+ include ::Geo::BlobReplicatorStrategy
+
+ def carrierwave_uploader
+ model_record.file
+ end
+
+ private
+
+ def model
+ ::Widget
+ end
+ end
+ end
+ ```
+
+1. Create `ee/spec/replicators/geo/widget_replicator_spec.rb` and perform
+ the setup necessary to define the `model_record` variable for the shared
+ examples.
+
+ ```ruby
+ # frozen_string_literal: true
+
+ require 'spec_helper'
+
+ describe Geo::WidgetReplicator do
+ let(:model_record) { build(:widget) }
+
+ it_behaves_like 'a blob replicator'
+ end
+ ```
+
+1. Create the `widget_registry` table so Geo secondaries can track the sync and
+ verification state of each Widget's file:
+
+ ```ruby
+ # frozen_string_literal: true
+
+ class CreateWidgetRegistry < ActiveRecord::Migration[5.2]
+ DOWNTIME = false
+
+ def change
+ create_table :widget_registry, id: :serial, force: :cascade do |t|
+ t.integer :widget_id, null: false
+ t.integer :state, default: 0, null: false
+ t.integer :retry_count, default: 0
+ t.string :last_sync_failure, limit: 255
+ t.datetime_with_timezone :retry_at
+ t.datetime_with_timezone :last_synced_at
+ t.datetime_with_timezone :created_at, null: false
+
+ t.index :widget_id, name: :index_widget_registry_on_repository_id, using: :btree
+ t.index :retry_at, name: :index_widget_registry_on_retry_at, using: :btree
+ t.index :state, name: :index_widget_registry_on_state, using: :btree
+ end
+ end
+ end
+ ```
+
+1. Create `ee/app/models/geo/widget_registry.rb`:
+
+ ```ruby
+ # frozen_string_literal: true
+
+ class Geo::WidgetRegistry < Geo::BaseRegistry
+ include Geo::StateMachineRegistry
+
+ belongs_to :widget, class_name: 'Widget'
+ end
+ ```
+
+1. Create `ee/spec/factories/geo/widget_registry.rb`:
+
+ ```ruby
+ # frozen_string_literal: true
+
+ FactoryBot.define do
+ factory :widget_registry, class: 'Geo::WidgetRegistry' do
+ widget
+ state { Geo::WidgetRegistry.state_value(:pending) }
+
+ trait :synced do
+ state { Geo::WidgetRegistry.state_value(:synced) }
+ last_synced_at { 5.days.ago }
+ end
+
+ trait :failed do
+ state { Geo::WidgetRegistry.state_value(:failed) }
+ last_synced_at { 1.day.ago }
+ retry_count { 2 }
+ last_sync_failure { 'Random error' }
+ end
+
+ trait :started do
+ state { Geo::WidgetRegistry.state_value(:started) }
+ last_synced_at { 1.day.ago }
+ retry_count { 0 }
+ end
+ end
+ end
+ ```
+
+1. Create `ee/spec/models/geo/widget_registry.rb`:
+
+ ```ruby
+ # frozen_string_literal: true
+
+ require 'spec_helper'
+
+ describe Geo::WidgetRegistry, :geo, type: :model do
+ let_it_be(:registry) { create(:widget_registry) }
+
+ specify 'factory is valid' do
+ expect(registry).to be_valid
+ end
+ end
+ ```
+
+Widget files should now be replicated and verified by Geo!
diff --git a/doc/development/gitaly.md b/doc/development/gitaly.md
index 32017a284d5..b275a265cc6 100644
--- a/doc/development/gitaly.md
+++ b/doc/development/gitaly.md
@@ -240,13 +240,13 @@ Here are the steps to gate a new feature in Gitaly behind a feature flag.
1. Create a package scoped flag name:
- ```go
+ ```golang
var findAllTagsFeatureFlag = "go-find-all-tags"
```
1. Create a switch in the code using the `featureflag` package:
- ```go
+ ```golang
if featureflag.IsEnabled(ctx, findAllTagsFeatureFlag) {
// go implementation
} else {
@@ -256,7 +256,7 @@ Here are the steps to gate a new feature in Gitaly behind a feature flag.
1. Create Prometheus metrics:
- ```go
+ ```golang
var findAllTagsRequests = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "gitaly_find_all_tags_requests_total",
@@ -280,7 +280,7 @@ Here are the steps to gate a new feature in Gitaly behind a feature flag.
1. Set headers in tests:
- ```go
+ ```golang
import (
"google.golang.org/grpc/metadata"
diff --git a/doc/development/github_importer.md b/doc/development/github_importer.md
index 6b8c083d55f..5d37d2f119f 100644
--- a/doc/development/github_importer.md
+++ b/doc/development/github_importer.md
@@ -9,7 +9,7 @@ importer and a parallel importer. The Rake task `import:github` uses the
sequential importer, while everything else uses the parallel importer. The
difference between these two importers is quite simple: the sequential importer
does all work in a single thread, making it more useful for debugging purposes
-or Rake tasks. The parallel importer on the other hand uses Sidekiq.
+or Rake tasks. The parallel importer, on the other hand, uses Sidekiq.
## Requirements
@@ -31,9 +31,9 @@ The importer's codebase is broken up into the following directories:
## Architecture overview
-When a GitHub project is imported we schedule and execute a job for the
-`RepositoryImportworker` worker as all other importers. However, unlike other
-importers we don't immediately perform the work necessary. Instead work is
+When a GitHub project is imported, we schedule and execute a job for the
+`RepositoryImportWorker` worker as all other importers. However, unlike other
+importers, we don't immediately perform the work necessary. Instead work is
divided into separate stages, with each stage consisting out of a set of Sidekiq
jobs that are executed. Between every stage a job is scheduled that periodically
checks if all work of the current stage is completed, advancing the import
@@ -65,9 +65,9 @@ This worker will import all pull requests. For every pull request a job for the
### 5. Stage::ImportIssuesAndDiffNotesWorker
-This worker will import all issues and pull request comments. For every issue we
+This worker will import all issues and pull request comments. For every issue, we
schedule a job for the `Gitlab::GithubImport::ImportIssueWorker` worker. For
-pull request comments we instead schedule jobs for the
+pull request comments, we instead schedule jobs for the
`Gitlab::GithubImport::DiffNoteImporter` worker.
This worker processes both issues and diff notes in parallel so we don't need to
@@ -82,7 +82,7 @@ project.
### 6. Stage::ImportNotesWorker
This worker imports regular comments for both issues and pull requests. For
-every comment we schedule a job for the
+every comment, we schedule a job for the
`Gitlab::GithubImport::ImportNoteWorker` worker.
Regular comments have to be imported at the end since the GitHub API used
@@ -116,14 +116,14 @@ schedule the worker of the next stage.
To reduce the number of `AdvanceStageWorker` jobs scheduled this worker will
briefly wait for jobs to complete before deciding what the next action should
-be. For small projects this may slow down the import process a bit, but it will
+be. For small projects, this may slow down the import process a bit, but it will
also reduce pressure on the system as a whole.
## Refreshing import JIDs
GitLab includes a worker called `StuckImportJobsWorker` that will periodically
run and mark project imports as failed if they have been running for more than
-15 hours. For GitHub projects this poses a bit of a problem: importing large
+15 hours. For GitHub projects, this poses a bit of a problem: importing large
projects could take several hours depending on how often we hit the GitHub rate
limit (more on this below), but we don't want `StuckImportJobsWorker` to mark
our import as failed because of this.
@@ -137,7 +137,7 @@ long we're still performing work.
## GitHub rate limit
-GitHub has a rate limit of 5 000 API calls per hour. The number of requests
+GitHub has a rate limit of 5,000 API calls per hour. The number of requests
necessary to import a project is largely dominated by the number of unique users
involved in a project (e.g. issue authors). Other data such as issue pages
and comments typically only requires a few dozen requests to import. This is
@@ -176,11 +176,11 @@ There are two types of lookups we cache:
in our GitLab database.
The expiration time of these keys is 24 hours. When retrieving the cache of a
-positive lookups we refresh the TTL automatically. The TTL of false lookups is
+positive lookup, we refresh the TTL automatically. The TTL of false lookups is
never refreshed.
-Because of this caching layer it's possible newly registered GitLab accounts
-won't be linked to their corresponding GitHub accounts. This however will sort
+Because of this caching layer, it's possible newly registered GitLab accounts
+won't be linked to their corresponding GitHub accounts. This, however, will sort
itself out once the cached keys expire.
The user cache lookup is shared across projects. This means that the more
@@ -194,12 +194,12 @@ The code for this resides in:
## Mapping labels and milestones
To reduce pressure on the database we do not query it when setting labels and
-milestones on issues and merge requests. Instead we cache this data when we
+milestones on issues and merge requests. Instead, we cache this data when we
import labels and milestones, then we reuse this cache when assigning them to
issues/merge requests. Similar to the user lookups these cache keys are expired
automatically after 24 hours of not being used.
-Unlike the user lookup caches these label and milestone caches are scoped to the
+Unlike the user lookup caches, these label and milestone caches are scoped to the
project that is being imported.
The code for this resides in:
diff --git a/doc/development/go_guide/index.md b/doc/development/go_guide/index.md
index 73a1dd8ad8a..95167c4adf1 100644
--- a/doc/development/go_guide/index.md
+++ b/doc/development/go_guide/index.md
@@ -109,7 +109,7 @@ become available, you will be able to share job templates like this
Dependencies should be kept to the minimum. The introduction of a new
dependency should be argued in the merge request, as per our [Approval
Guidelines](../code_review.md#approval-guidelines). Both [License
-Management](../../user/application_security/license_compliance/index.md)
+Management](../../user/compliance/license_compliance/index.md)
**(ULTIMATE)** and [Dependency
Scanning](../../user/application_security/dependency_scanning/index.md)
**(ULTIMATE)** should be activated on all projects to ensure new dependencies
@@ -195,7 +195,7 @@ When comparing expected and actual values in tests, use
and others to improve readability when comparing structs, errors,
large portions of text, or JSON documents:
-```go
+```golang
type TestData struct {
// ...
}
diff --git a/doc/development/i18n/proofreader.md b/doc/development/i18n/proofreader.md
index 3cd8bf20e13..5535011d8c1 100644
--- a/doc/development/i18n/proofreader.md
+++ b/doc/development/i18n/proofreader.md
@@ -56,6 +56,7 @@ are very appreciative of the work done by translators and proofreaders!
- Adi Ferdian - [GitLab](https://gitlab.com/adiferd), [Crowdin](https://crowdin.com/profile/adiferd)
- Ahmad Naufal Mukhtar - [GitLab](https://gitlab.com/anaufalm), [Crowdin](https://crowdin.com/profile/anaufalm)
- Italian
+ - Massimiliano Cuttini - [GitLab](https://gitlab.com/maxcuttins), [Crowdin](https://crowdin.com/profile/maxcuttins)
- Paolo Falomo - [GitLab](https://gitlab.com/paolofalomo), [Crowdin](https://crowdin.com/profile/paolo.falomo)
- Japanese
- Hiroyuki Sato - [GitLab](https://gitlab.com/hiroponz), [Crowdin](https://crowdin.com/profile/hiroponz)
diff --git a/doc/development/img/reference_architecture.png b/doc/development/img/reference_architecture.png
index 1414200d076..107135b626e 100644
--- a/doc/development/img/reference_architecture.png
+++ b/doc/development/img/reference_architecture.png
Binary files differ
diff --git a/doc/development/import_export.md b/doc/development/import_export.md
index 323ed48aaf9..9dde5422642 100644
--- a/doc/development/import_export.md
+++ b/doc/development/import_export.md
@@ -79,11 +79,11 @@ Marked stuck import jobs as failed. JIDs: xyz
| Problem | Possible solutions |
| -------- | -------- |
-| [Slow JSON](https://gitlab.com/gitlab-org/gitlab-foss/issues/54084) loading/dumping models from the database | [split the worker](https://gitlab.com/gitlab-org/gitlab-foss/issues/54085) |
+| [Slow JSON](https://gitlab.com/gitlab-org/gitlab/-/issues/25251) loading/dumping models from the database | [split the worker](https://gitlab.com/gitlab-org/gitlab/-/issues/25252) |
| | Batch export
| | Optimize SQL
| | Move away from `ActiveRecord` callbacks (difficult)
-| High memory usage (see also some [analysis](https://gitlab.com/gitlab-org/gitlab-foss/issues/35389) | DB Commit sweet spot that uses less memory |
+| High memory usage (see also some [analysis](https://gitlab.com/gitlab-org/gitlab/-/issues/18857) | DB Commit sweet spot that uses less memory |
| | [Netflix Fast JSON API](https://github.com/Netflix/fast_jsonapi) may help |
| | Batch reading/writing to disk and any SQL
@@ -195,16 +195,17 @@ module Gitlab
The [current version history](../user/project/settings/import_export.md) also displays the equivalent GitLab version
and it is useful for knowing which versions won't be compatible between them.
-| GitLab version | Import/Export version |
-| ---------------- | --------------------- |
-| 11.1 to current | 0.2.4 |
-| 10.8 | 0.2.3 |
-| 10.4 | 0.2.2 |
-| ... | ... |
-| 8.10.3 | 0.1.3 |
-| 8.10.0 | 0.1.2 |
-| 8.9.5 | 0.1.1 |
-| 8.9.0 | 0.1.0 |
+| Exporting GitLab version | Importing GitLab version |
+| -------------------------- | -------------------------- |
+| 11.7 to current | 11.7 to current |
+| 11.1 to 11.6 | 11.1 to 11.6 |
+| 10.8 to 11.0 | 10.8 to 11.0 |
+| 10.4 to 10.7 | 10.4 to 10.7 |
+| ... | ... |
+| 8.10.3 to 8.11 | 8.10.3 to 8.11 |
+| 8.10.0 to 8.10.2 | 8.10.0 to 8.10.2 |
+| 8.9.5 to 8.9.11 | 8.9.5 to 8.9.11 |
+| 8.9.0 to 8.9.4 | 8.9.0 to 8.9.4 |
### When to bump the version up
@@ -223,28 +224,6 @@ Every time we bump the version, the integration specs will fail and can be fixed
bundle exec rake gitlab:import_export:bump_version
```
-### Renaming columns or models
-
-This is a relatively common occurrence that will require a version bump.
-
-There is also the _RC problem_ - GitLab.com runs an RC, prior to any customers,
-meaning that we want to bump the version up in the next version (or patch release).
-
-For example:
-
-1. Add rename to `RelationRenameService` in X.Y
-1. Remove it from `RelationRenameService` in X.Y + 1
-1. Bump Import/Export version in X.Y + 1
-
-```ruby
-module Gitlab
- module ImportExport
- class RelationRenameService
- RENAMES = {
- 'pipelines' => 'ci_pipelines' # Added in 11.6, remove in 11.7
- }.freeze
-```
-
## A quick dive into the code
### Import/Export configuration (`import_export.yml`)
diff --git a/doc/development/import_project.md b/doc/development/import_project.md
index b969cb5f1c4..3cdf2b8977a 100644
--- a/doc/development/import_project.md
+++ b/doc/development/import_project.md
@@ -81,7 +81,7 @@ The last option is to import a project using a Rails console:
sudo -u git -H bundle exec rails console RAILS_ENV=production
```
-1. Create a project and run `ProjectTreeRestorer`:
+1. Create a project and run `Project::TreeRestorer`:
```ruby
shared_class = Struct.new(:export_path) do
@@ -98,7 +98,7 @@ The last option is to import a project using a Rails console:
begin
#Enable Request store
RequestStore.begin!
- Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project).restore
+ Gitlab::ImportExport::Project::TreeRestorer.new(user: user, shared: shared, project: project).restore
ensure
RequestStore.end!
RequestStore.clear!
@@ -115,24 +115,24 @@ The last option is to import a project using a Rails console:
project: project).restore
```
- We are storing all import failures in the `import_failures` data table.
+ We are storing all import failures in the `import_failures` data table.
- To make sure that the project import finished without any issues, check:
+ To make sure that the project import finished without any issues, check:
- ```ruby
- project.import_failures.all
- ```
+ ```ruby
+ project.import_failures.all
+ ```
## Performance testing
For Performance testing, we should:
- Import a quite large project, [`gitlabhq`](https://gitlab.com/gitlab-org/quality/performance-data#gitlab-performance-test-framework-data) should be a good example.
-- Measure the execution time of `ProjectTreeRestorer`.
+- Measure the execution time of `Project::TreeRestorer`.
- Count the number of executed SQL queries during the restore.
- Observe the number of GC cycles happening.
-You can use this [snippet](https://gitlab.com/gitlab-org/gitlab/snippets/1924954), which will restore the project, and measure the execution time of `ProjectTreeRestorer`, number of SQL queries and number of GC cycles happening.
+You can use this [snippet](https://gitlab.com/gitlab-org/gitlab/snippets/1924954), which will restore the project, and measure the execution time of `Project::TreeRestorer`, number of SQL queries and number of GC cycles happening.
You can execute the script from the `gdk/gitlab` directory like this:
diff --git a/doc/development/insert_into_tables_in_batches.md b/doc/development/insert_into_tables_in_batches.md
new file mode 100644
index 00000000000..de62d2cca52
--- /dev/null
+++ b/doc/development/insert_into_tables_in_batches.md
@@ -0,0 +1,196 @@
+---
+description: "Sometimes it is necessary to store large amounts of records at once, which can be inefficient
+when iterating collections and performing individual `save`s. With the arrival of `insert_all`
+in Rails 6, which operates at the row level (that is, using `Hash`es), GitLab has added a set
+of APIs that make it safe and simple to insert ActiveRecord objects in bulk."
+---
+
+# Insert into tables in batches
+
+Sometimes it is necessary to store large amounts of records at once, which can be inefficient
+when iterating collections and saving each record individually. With the arrival of
+[`insert_all`](https://apidock.com/rails/ActiveRecord/Persistence/ClassMethods/insert_all)
+in Rails 6, which operates at the row level (that is, using `Hash` objects), GitLab has added a set
+of APIs that make it safe and simple to insert `ActiveRecord` objects in bulk.
+
+## Prepare `ApplicationRecord`s for bulk insertion
+
+In order for a model class to take advantage of the bulk insertion API, it has to include the
+`BulkInsertSafe` concern first:
+
+```ruby
+class MyModel < ApplicationRecord
+ # other includes here
+ # ...
+ include BulkInsertSafe # include this last
+
+ # ...
+end
+```
+
+The `BulkInsertSafe` concern has two functions:
+
+- It performs checks against your model class to ensure that it does not use ActiveRecord
+ APIs that are not safe to use with respect to bulk insertions (more on that below).
+- It adds new class methods `bulk_insert!` and `bulk_upsert!`, which you can use to insert many records at once.
+
+## Insert records with `bulk_insert!` and `bulk_upsert!`
+
+If the target class passes the checks performed by `BulkInsertSafe`, you can insert an array of
+ActiveRecord model objects as follows:
+
+```ruby
+records = [MyModel.new, ...]
+
+MyModel.bulk_insert!(records)
+```
+
+Note that calls to `bulk_insert!` will always attempt to insert _new records_. If instead
+you would like to replace existing records with new values, while still inserting those
+that do not already exist, then you can use `bulk_upsert!`:
+
+```ruby
+records = [MyModel.new, existing_model, ...]
+
+MyModel.bulk_upsert!(records, unique_by: [:name])
+```
+
+In this example, `unique_by` specifies the columns by which records are considered to be
+unique and as such will be updated if they existed prior to insertion. For example, if
+`existing_model` has a `name` attribute, and if a record with the same `name` value already
+exists, its fields will be updated with those of `existing_model`.
+
+The `unique_by` parameter can also be passed as a `Symbol`, in which case it specifies
+a database index by which a column is considered unique:
+
+```ruby
+MyModel.bulk_insert!(records, unique_by: :index_on_name)
+```
+
+### Record validation
+
+The `bulk_insert!` method guarantees that `records` will be inserted transactionally, and
+will run validations on each record prior to insertion. If any record fails to validate,
+an error is raised and the transaction is rolled back. You can turn off validations via
+the `:validate` option:
+
+```ruby
+MyModel.bulk_insert!(records, validate: false)
+```
+
+### Batch size configuration
+
+In those cases where the number of `records` is above a given threshold, insertions will
+occur in multiple batches. The default batch size is defined in
+[`BulkInsertSafe::DEFAULT_BATCH_SIZE`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/models/concerns/bulk_insert_safe.rb).
+Assuming a default threshold of 500, inserting 950 records
+would result in two batches being written sequentially (of size 500 and 450 respectively.)
+You can override the default batch size via the `:batch_size` option:
+
+```ruby
+MyModel.bulk_insert!(records, batch_size: 100)
+```
+
+Assuming the same number of 950 records, this would result in 10 batches being written instead.
+Since this will also affect the number of `INSERT`s that occur, make sure you measure the
+performance impact this might have on your code. There is a trade-off between the number of
+`INSERT` statements the database has to process and the size and cost of each `INSERT`.
+
+### Handling duplicate records
+
+NOTE: **Note:**
+This parameter applies only to `bulk_insert!`. If you intend to update existing
+records, use `bulk_upsert!` instead.
+
+It may happen that some records you are trying to insert already exist, which would result in
+primary key conflicts. There are two ways to address this problem: failing fast by raising an
+error or skipping duplicate records. The default behavior of `bulk_insert!` is to fail fast
+and raise an `ActiveRecord::RecordNotUnique` error.
+
+If this is undesirable, you can instead skip duplicate records with the `skip_duplicates` flag:
+
+```ruby
+MyModel.bulk_insert!(records, skip_duplicates: true)
+```
+
+### Requirements for safe bulk insertions
+
+Large parts of ActiveRecord's persistence API are built around the notion of callbacks. Many
+of these callbacks fire in response to model life cycle events such as `save` or `create`.
+These callbacks cannot be used with bulk insertions, since they are meant to be called for
+every instance that is saved or created. Since these events do not fire when
+records are inserted in bulk, we currently disallow their use.
+
+The specifics around which callbacks are disallowed are defined in
+[`BulkInsertSafe`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/models/concerns/bulk_insert_safe.rb).
+Consult the module source code for details. If your class uses any of the blacklisted
+functionality, and you `include BulkInsertSafe`, the application will fail with an error.
+
+### `BulkInsertSafe` versus `InsertAll`
+
+Internally, `BulkInsertSafe` is based on `InsertAll`, and you may wonder when to choose
+the former over the latter. To help you make the decision,
+the key differences between these classes are listed in the table below.
+
+| | Input type | Validates input | Specify batch size | Can bypass callbacks | Transactional |
+|--------------- | -------------------- | --------------- | ------------------ | --------------------------------- | ------------- |
+| `bulk_insert!` | ActiveRecord objects | Yes (optional) | Yes (optional) | No (prevents unsafe callback use) | Yes |
+| `insert_all!` | Attribute hashes | No | No | Yes | Yes |
+
+To summarize, `BulkInsertSafe` moves bulk inserts closer to how ActiveRecord objects
+and inserts would normally behave. However, if all you need is to insert raw data in bulk, then
+`insert_all` is more efficient.
+
+## Insert `has_many` associations in bulk
+
+A common use case is to save collections of associated relations through the owner side of the relation,
+where the owned relation is associated to the owner through the `has_many` class method:
+
+```ruby
+owner = OwnerModel.new(owned_relations: array_of_owned_relations)
+# saves all `owned_relations` one-by-one
+owner.save!
+```
+
+This will issue a single `INSERT`, and transaction, for every record in `owned_relations`, which is inefficient if
+`array_of_owned_relations` is large. To remedy this, the `BulkInsertableAssociations` concern can be
+used to declare that the owner defines associations that are safe for bulk insertion:
+
+```ruby
+class OwnerModel < ApplicationRecord
+ # other includes here
+ # ...
+ include BulkInsertableAssociations # include this last
+
+ has_many :my_models
+end
+```
+
+Here `my_models` must be declared `BulkInsertSafe` (as described previously) for bulk insertions
+to happen. You can now insert any yet unsaved records as follows:
+
+```ruby
+BulkInsertableAssociations.with_bulk_insert do
+ owner = OwnerModel.new(my_models: array_of_my_model_instances)
+ # saves `my_models` using a single bulk insert (possibly via multiple batches)
+ owner.save!
+end
+```
+
+Note that you can still save relations that are not `BulkInsertSafe` in this block; they will
+simply be treated as if you had invoked `save` from outside the block.
+
+## Known limitations
+
+There are a few restrictions to how these APIs can be used:
+
+- `ON CONFLICT` behavior cannot currently be configured; an error will be raised on primary key conflicts.
+- `BulkInsertableAssociations` furthermore has the following restrictions:
+ - only compatible with `has_many` relations.
+ - does not support `has_many through: ...` relations.
+- Writing [`jsonb`](https://www.postgresql.org/docs/current/datatype-json.html) content is
+[not currently supported](https://gitlab.com/gitlab-org/gitlab/-/issues/210560).
+
+Moreover, input data should either be limited to around 1000 records at most,
+or already batched prior to calling bulk insert. The `INSERT` statement will run in a single
+transaction, so for large amounts of records it may negatively affect database stability.
diff --git a/doc/development/instrumentation.md b/doc/development/instrumentation.md
index 3ce6fda441a..55b42bb6ef9 100644
--- a/doc/development/instrumentation.md
+++ b/doc/development/instrumentation.md
@@ -1,11 +1,11 @@
-# Application Performance Metrics for Ruby Code
+# Instrumenting Ruby code
[GitLab Performance Monitoring](../administration/monitoring/performance/index.md) allows instrumenting of both methods and custom
blocks of Ruby code. Method instrumentation is the primary form of
instrumentation with block-based instrumentation only being used when we want to
drill down to specific regions of code within a method.
-Please refer to [Product analytics](event_tracking/index.md) if you are after tracking product usage patterns.
+Please refer to [Telemetry](../telemetry/index.md) if you are tracking product usage patterns.
## Instrumenting Methods
diff --git a/doc/development/integrations/jira_connect.md b/doc/development/integrations/jira_connect.md
index 9ac87a17232..838358cb540 100644
--- a/doc/development/integrations/jira_connect.md
+++ b/doc/development/integrations/jira_connect.md
@@ -15,8 +15,6 @@ The following are required to install and test the app:
or [ngrok](https://ngrok.com). These also take care of SSL for you because Jira
requires all connections to the app host to be over SSL.
-> This feature is currently behind the `:jira_connect_app` feature flag
-
## Installing the app in Jira
1. Enable Jira development mode to install apps that are not from the Atlassian Marketplace
diff --git a/doc/development/integrations/secure.md b/doc/development/integrations/secure.md
index b230927a7de..5792ce303e1 100644
--- a/doc/development/integrations/secure.md
+++ b/doc/development/integrations/secure.md
@@ -191,6 +191,15 @@ then `artifacts:reports:dependency_scanning` must be set to `depscan.json`.
Following the POSIX exit code standard, the scanner will exit with 0 for success and any number from 1 to 255 for anything else.
Success also includes the case when vulnerabilities are found.
+When executing a scanning job using the [Docker-in-Docker privileged mode](../../user/application_security/sast/index.md#requirements),
+we reserve the following standard exit codes.
+
+| Orchestrator Exit Code | Description |
+|------------------------|----------------------------------|
+| 3 | No match, no compatible analyzer |
+| 4 | Project directory empty |
+| 5 | No compatible Docker image |
+
### Logging
The scanner should log error messages and warnings so that users can easily investigate
@@ -269,7 +278,7 @@ and where the `message` repeats the `location` field:
It takes around 50k characters to block for 2 seconds making this a low severity issue."
}
```
-
+
The `description` might explain how the vulnerability works or give context about the exploit.
It should not repeat the other fields of the vulnerability object.
In particular, the `description` should not repeat the `location` (what is affected)
diff --git a/doc/development/integrations/secure_partner_integration.md b/doc/development/integrations/secure_partner_integration.md
new file mode 100644
index 00000000000..9991ee57257
--- /dev/null
+++ b/doc/development/integrations/secure_partner_integration.md
@@ -0,0 +1,103 @@
+# Secure Partner Integration - Onboarding Process
+
+If you want to integrate your product with the [Secure Stage](https://about.gitlab.com/direction/secure),
+this page will help you understand the developer workflow GitLab intends for
+our users to follow with regards to security results. These should be used as
+guidelines so you can build an integration that fits with the workflow GitLab
+users are already familiar with.
+
+This page also provides resources for the technical work associated
+with [onboarding as a partner](https://about.gitlab.com/partners/integrate/).
+The steps below are a high-level view of what needs to be done to complete an
+integration as well as linking to more detailed resources for how to do so.
+
+## What is the GitLab Developer Workflow?
+
+This workflow is how GitLab users interact with our product and expect it to
+function. Understanding how users use GitLab today will help you choose the
+best place to integrate your own product and its results into GitLab.
+
+- Developers want to write code without using a new tool to consume results
+ or address feedback about the item they are working on. Staying inside a
+ single tool, GitLab, helps them to stay focused on finishing the code and
+ projects they are working on.
+- Developers commit code to a Git branch. The developer creates a merge request (MR)
+ inside GitLab where these changes can be reviewed. The MR triggers a GitLab
+ pipeline to run associated jobs, including security checks, on the code.
+- Pipeline jobs serve a variety of purposes. Jobs can do scanning for and have
+ implications for app security, corporate policy, or compliance. When complete,
+ the job reports back on its status and creates a
+ [job artifact](../../user/project/pipelines/job_artifacts.md) as a result.
+- The [Merge Request Security Widget](../../user/project/merge_requests/index.md#security-reports-ultimate)
+ displays the results of the pipeline's security checks and the developer can
+ review them. The developer can review both a summary and a detailed version
+ of the results.
+- If certain policies (such as [merge request approvals](../../user/project/merge_requests/merge_request_approvals.md))
+ are in place for a project, developers must resolve specific findings or get
+ an approval from a specific list of people.
+- The [security dashboard](../../user/application_security/security_dashboard/index.md#gitlab-security-dashboard-ultimate)
+ also shows results which can developers can use to quickly see all the
+ vulnerabilities that need to be addressed in the code.
+- When the developer reads the details about a vulnerability, they are
+ presented with additional information and choices on next steps:
+ 1. Create Issue (Confirm finding): Creates a new issue to be prioritized.
+ 1. Add Comment and Dismiss Vulnerability: When dismissing a finding, users
+ can comment to note items that they
+ have mitigated, that they accept the vulnerability, or that the
+ vulnerability is a false positive.
+ 1. Auto-Remediation / Create Merge Request: A fix for the vulnerability can
+ be offered, allowing an easy solution that does not require extra effort
+ from users. This should be offered whenever possible.
+ 1. Links: Vulnerabilities can link out external sites or sources for users
+ to get more data around the vulnerability.
+
+## How to onboard
+
+This section describes the steps you need to complete to onboard as a partner
+and complete an intgration with the Secure stage.
+
+1. Read about our [partnerships](https://about.gitlab.com/partners/integrate/index.md).
+1. [Create an issue](https://gitlab.com/gitlab-com/alliances/alliances/issues/new?issuable_template=new_partner)
+ using our new partner issue template to begin the discussion.
+1. Get a test account to begin developing your integration. You can
+ request a [GitLab.com Gold Subscription Sandbox](https://about.gitlab.com/partners/integrate/index.md#gitlabcom-gold-subscription-sandbox-request)
+ or an [EE Developer License](https://about.gitlab.com/partners/integrate/index.md#requesting-ee-dev-license-for-rd).
+1. Provide a [pipeline job](../../development/pipelines.md)
+ template that users could integrate into their own GitLab pipelines.
+1. Create a report artifact with your pipeline jobs.
+1. Ensure your pipeline jobs create a report artifact that GitLab can process
+ to successfully display your own product's results with the rest of GitLab.
+ - See detailed [technical directions](secure.md) for this step.
+ - Read more about [job report artifacts](../../ci/yaml/README.md#artifactsreports).
+ - Read about [job artifacts](../../user/project/pipelines/job_artifacts.md).
+ - Your report artifact must be in one of our currently supported formats.
+ For more information, see the [documentation on reports](secure.md#report).
+ - Documentation for [SAST reports](../../user/application_security/sast/index.md#reports-json-format).
+ - Documentation for [Dependency Scanning reports](../../user/application_security/dependency_scanning/index.md#reports-json-format).
+ - Documentation for [Container Scanning reports](../../user/application_security/container_scanning/index.md#reports-json-format).
+ - See this [example secure job definition that also defines the artifact created](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml).
+ - If you need a new kind of scan or report, [create an issue](https://gitlab.com/gitlab-org/gitlab/issues/new#)
+ and add the label `devops::secure`.
+ - Once the job is completed, the data can be seen:
+ - In the [Merge Request Security Report](../../user/project/merge_requests/index.md#security-reports-ultimate) ([MR Security Report data flow](https://gitlab.com/snippets/1910005#merge-request-view)).
+ - While [browsing a Job Artifact](../../user/project/pipelines/job_artifacts.md).
+ - In the [Security Dashboard](../../user/application_security/security_dashboard/index.md) ([Dashboard data flow](https://gitlab.com/snippets/1910005#project-and-group-dashboards)).
+1. Optional: Provide a way to interact with results as Vulnerabilities:
+ - Users can interact with the findings from your artifact within their workflow. They can dismiss the findings or accept them and create a backlog issue.
+ - To automatically create issues without user interaction, use the [issue API](../../api/issues.md). This will be replaced by [Standalone Vulnerabilities](https://gitlab.com/groups/gitlab-org/-/epics/634) in the future.
+1. Optional: Provide auto-remediation steps:
+ - If you specified `remediations` in your artifact, it is proposed through our [auto-remediation](../../user/application_security/index.md#solutions-for-vulnerabilities-auto-remediation)
+ interface.
+1. Demo the integration to GitLab:
+ - After you have tested and are ready to demo your integration please
+ [reach out](https://about.gitlab.com/partners/integrate/index.md) to us. If you
+ skip this step you won’t be able to do supported marketing.
+1. Begin doing supported marketing of your GitLab integration.
+ - Work with our [partner team](https://about.gitlab.com/partners/integrate/index.md)
+ to support your go-to-market as appropriate.
+ - Examples of supported marketing could include being listed on our [Security Partner page](https://about.gitlab.com/partners/index.md#security),
+ doing an [Unfiltered blog post](https://about.gitlab.com/handbook/marketing/blog/unfiltered/index.md),
+ doing a co-branded webinar, or producing a co-branded whitepaper.
+
+If you have any issues while working through your integration or the steps
+above, please create an issue to discuss with us further.
diff --git a/doc/development/issuable-like-models.md b/doc/development/issuable-like-models.md
index 3892f56156e..d252735dbd8 100644
--- a/doc/development/issuable-like-models.md
+++ b/doc/development/issuable-like-models.md
@@ -1,9 +1,9 @@
# Issuable-like Rails models utilities
GitLab Rails codebase contains several models that hold common functionality and behave similarly to
-[Issues](https://docs.gitlab.com/ee/user/project/issues/). Other examples of "issuables"
-are [Merge Requests](https://docs.gitlab.com/ee/user/project/merge_requests/) and
-[Epics](https://docs.gitlab.com/ee/user/group/epics/).
+[Issues](../user/project/issues/index.md). Other examples of "issuables"
+are [Merge Requests](../user/project/merge_requests/index.md) and
+[Epics](../user/group/epics/index.md).
This guide accumulates guidelines on working with such Rails models.
diff --git a/doc/development/kubernetes.md b/doc/development/kubernetes.md
index 1a8aa7647af..9e0e686f447 100644
--- a/doc/development/kubernetes.md
+++ b/doc/development/kubernetes.md
@@ -159,7 +159,7 @@ rescues `StandardError` which can make it harder to debug issues in an
development environment. The current workaround is to temporarily
comment out the `rescue` in your local development source.
-You can also follow the installation pod logs to debug issues related to
+You can also follow the installation logs to debug issues related to
installation. Once the installation/upgrade is underway, wait for the
pod to be created. Then run the following to obtain the pods logs as
they are written:
diff --git a/doc/development/licensing.md b/doc/development/licensing.md
index 2dc77b2eec8..c7676cc2596 100644
--- a/doc/development/licensing.md
+++ b/doc/development/licensing.md
@@ -46,8 +46,8 @@ More detailed information on how the gem and its commands work is available in t
Libraries with the following licenses are acceptable for use:
-- [The MIT License](https://choosealicense.com/licenses/mit/) (the MIT Expat License specifically): The MIT License requires that the license itself is included with all copies of the source. It is a permissive (non-copyleft) license as defined by the Open Source Initiative.
-- [LGPL](https://choosealicense.com/licenses/lgpl-3.0/) (version 2, version 3): GPL constraints regarding modification and redistribution under the same license are not required of projects using an LGPL library, only upon modification of the LGPL-licensed library itself.
+- [MIT License](https://choosealicense.com/licenses/mit/) (the MIT Expat License specifically): The MIT License requires that the license itself is included with all copies of the source. It is a permissive (non-copyleft) license as defined by the Open Source Initiative.
+- [GNU Lesser General Public License (GNU LGPL)](https://choosealicense.com/licenses/lgpl-3.0/) (version 2, version 3): GPL constraints regarding modification and redistribution under the same license are not required of projects using an LGPL library, only upon modification of the LGPL-licensed library itself.
- [Apache 2.0 License](https://choosealicense.com/licenses/apache-2.0/): A permissive license that also provides an express grant of patent rights from contributors to users.
- [Ruby 1.8 License][ruby-1.8]: Dual-licensed under either itself or the GPLv2, defer to the Ruby License itself. Acceptable because of point 3b: "You may distribute the software in object code or binary form, provided that you do at least ONE of the following: b) accompany the distribution with the machine-readable source of the software."
- [Ruby 1.9 License][ruby-1.9]: Dual-licensed under either itself or the BSD 2-Clause License, defer to BSD 2-Clause.
@@ -57,6 +57,7 @@ Libraries with the following licenses are acceptable for use:
- [Creative Commons Zero (CC0)][CC0]: A public domain dedication, recommended as a way to disclaim copyright on your work to the maximum extent possible.
- [Unlicense][UNLICENSE]: Another public domain dedication.
- [OWFa 1.0][OWFa1]: An open-source license and patent grant designed for specifications.
+- [JSON License](https://www.json.org/license.html): Equivalent to the MIT license plus the statement, "The Software shall be used for Good, not Evil."
## Unacceptable Licenses
diff --git a/doc/development/logging.md b/doc/development/logging.md
index a90d78ba8d9..f10737da766 100644
--- a/doc/development/logging.md
+++ b/doc/development/logging.md
@@ -112,20 +112,57 @@ importer progresses. Here's what to do:
all messages might have `current_user_id` and `project_id` to make it easier
to search for activities by user for a given time.
-1. Do NOT mix and match types. Elasticsearch won't be able to index your
- logs properly if you [mix integer and string
- types](https://www.elastic.co/guide/en/elasticsearch/guide/current/mapping.html#_avoiding_type_gotchas):
+#### Implicit schema for JSON logging
+
+When using something like Elasticsearch to index structured logs, there is a
+schema for the types of each log field (even if that schema is implicit /
+inferred). It's important to be consistent with the types of your field values,
+otherwise this might break the ability to search/filter on these fields, or even
+cause whole log events to be dropped. While much of this section is phrased in
+an Elasticsearch-specific way, the concepts should translate to many systems you
+might use to index structured logs. GitLab.com uses Elasticsearch to index log
+data.
+
+Unless a field type is explicitly mapped, Elasticsearch will infer the type from
+the first instance of that field value it sees. Subsequent instances of that
+field value with different types will either fail to be indexed, or in some
+cases (scalar/object conflict), the whole log line will be dropped.
+
+GitLab.com's logging Elasticsearch sets
+[`ignore_malformed`](https://www.elastic.co/guide/en/elasticsearch/reference/current/ignore-malformed.html),
+which allows documents to be indexed even when there are simpler sorts of
+mapping conflict (for example, number / string), although indexing on the affected fields
+will break.
+
+Examples:
- ```ruby
- # BAD
- logger.info(message: "Import error", error: 1)
- logger.info(message: "Import error", error: "I/O failure")
- ```
+```ruby
+# GOOD
+logger.info(message: "Import error", error_code: 1, error: "I/O failure")
- ```ruby
- # GOOD
- logger.info(message: "Import error", error_code: 1, error: "I/O failure")
- ```
+# BAD
+logger.info(message: "Import error", error: 1)
+logger.info(message: "Import error", error: "I/O failure")
+
+# WORST
+logger.info(message: "Import error", error: "I/O failure")
+logger.info(message: "Import error", error: { message: "I/O failure" })
+```
+
+List elements must be the same type:
+
+```ruby
+# GOOD
+logger.info(a_list: ["foo", "1", "true"])
+
+# BAD
+logger.info(a_list: ["foo", 1, true])
+```
+
+Resources:
+
+- [Elasticsearch mapping - avoiding type gotchas](https://www.elastic.co/guide/en/elasticsearch/guide/current/mapping.html#_avoiding_type_gotchas)
+- [Elasticsearch mapping types]( https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-types.html)
## Multi-destination Logging
diff --git a/doc/development/mass_insert.md b/doc/development/mass_insert.md
index 891ce0db87d..47f993a921e 100644
--- a/doc/development/mass_insert.md
+++ b/doc/development/mass_insert.md
@@ -9,5 +9,5 @@ the following snippet in the rails console.
```ruby
u = User.find(1)
-Project.last(100).each { |p| p.set_create_timestamps && p.add_maintainer(u, current_user: u) } # Change 100 to whatever number of projects you need access to
+Project.last(100).each { |p| p.set_timestamps_for_create && p.add_maintainer(u, current_user: u) } # Change 100 to whatever number of projects you need access to
```
diff --git a/doc/development/merge_request_performance_guidelines.md b/doc/development/merge_request_performance_guidelines.md
index 6552ed29e98..031a8b515e6 100644
--- a/doc/development/merge_request_performance_guidelines.md
+++ b/doc/development/merge_request_performance_guidelines.md
@@ -33,7 +33,7 @@ to original issue and epic.
and those maintaining a GitLab setup.
Any change submitted can have an impact not only on the application itself but
-also those maintaining it and those keeping it up and running (e.g. production
+also those maintaining it and those keeping it up and running (for example, production
engineers). As a result you should think carefully about the impact of your
merge request on not only the application but also on the people keeping it up
and running.
@@ -85,37 +85,37 @@ the following:
1. Is there something that we can do differently to not process such a
big data set?
1. Should we build some fail-safe mechanism to contain
- computational complexity? Usually it is better to degrade
+ computational complexity? Usually it's better to degrade
the service for a single user instead of all users.
## Query plans and database structure
-The query plan can answer the questions whether we need additional
-indexes, or whether we perform expensive filtering (i.e. using sequential scans).
+The query plan can tell us if we will need additional
+indexes, or expensive filtering (such as using sequential scans).
Each query plan should be run against substantial size of data set.
-For example if you look for issues with specific conditions,
-you should consider validating the query against
+For example, if you look for issues with specific conditions,
+you should consider validating a query against
a small number (a few hundred) and a big number (100_000) of issues.
See how the query will behave if the result will be a few
and a few thousand.
This is needed as we have users using GitLab for very big projects and
-in a very unconventional way. Even, if it seems that it is unlikely
-that such big data set will be used, it is still plausible that one
-of our customers will have the problem with the feature.
+in a very unconventional way. Even if it seems that it's unlikely
+that such a big data set will be used, it's still plausible that one
+of our customers will encounter a problem with the feature.
-Understanding ahead of time how it is going to behave at scale even if we accept it,
-is the desired outcome. We should always have a plan or understanding what it takes
-to optimise feature to the magnitude of higher usage patterns.
+Understanding ahead of time how it's going to behave at scale, even if we accept it,
+is the desired outcome. We should always have a plan or understanding of what it will take
+to optimize the feature for higher usage patterns.
-Every database structure should be optimised and sometimes even over-described
-to be prepared to be easily extended. The hardest part after some point is
+Every database structure should be optimized and sometimes even over-described
+in preparation for easy extension. The hardest part after some point is
data migration. Migrating millions of rows will always be troublesome and
-can have negative impact on application.
+can have a negative impact on the application.
To better understand how to get help with the query plan reviews
-read this section on [how to prepare the merge request for a database review](https://docs.gitlab.com/ee/development/database_review.html#how-to-prepare-the-merge-request-for-a-database-review).
+read this section on [how to prepare the merge request for a database review](database_review.md#how-to-prepare-the-merge-request-for-a-database-review).
## Query Counts
@@ -167,14 +167,14 @@ be clearly mentioned in the merge request description.
## Batch process
-**Summary:** Iterating a single process to external services (e.g. PostgreSQL, Redis, Object Storage, etc)
+**Summary:** Iterating a single process to external services (for example, PostgreSQL, Redis, Object Storage)
should be executed in a **batch-style** in order to reduce connection overheads.
For fetching rows from various tables in a batch-style, please see [Eager Loading](#eager-loading) section.
### Example: Delete multiple files from Object Storage
-When you delete multiple files from object storage (e.g. GCS),
+When you delete multiple files from object storage, like GCS,
executing a single REST API call multiple times is a quite expensive
process. Ideally, this should be done in a batch-style, for example, S3 provides
[batch deletion API](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html),
@@ -187,23 +187,23 @@ in a batch style.
## Timeout
**Summary:** You should set a reasonable timeout when the system invokes HTTP calls
-to external services (e.g. Kubernetes), and it should be executed in Sidekiq, not
+to external services (such as Kubernetes), and it should be executed in Sidekiq, not
in Puma/Unicorn threads.
Often, GitLab needs to communicate with an external service such as Kubernetes
clusters. In this case, it's hard to estimate when the external service finishes
-the requested process, for example, if it's a user-owned cluster that is inactive for some reason,
+the requested process, for example, if it's a user-owned cluster that's inactive for some reason,
GitLab might wait for the response forever ([Example](https://gitlab.com/gitlab-org/gitlab/issues/31475)).
This could result in Puma/Unicorn timeout and should be avoided at all cost.
You should set a reasonable timeout, gracefully handle exceptions and surface the
errors in UI or logging internally.
-Using [`ReactiveCaching`](https://docs.gitlab.com/ee/development/utilities.html#reactivecaching) is one of the best solutions to fetch external data.
+Using [`ReactiveCaching`](utilities.md#reactivecaching) is one of the best solutions to fetch external data.
## Keep database transaction minimal
-**Summary:** You should avoid accessing to external services (e.g. Gitaly) during database
+**Summary:** You should avoid accessing to external services like Gitaly during database
transactions, otherwise it leads to severe contention problems
as an open transaction basically blocks the release of a Postgres backend connection.
@@ -247,14 +247,14 @@ necessary.
A merge request must not increase the memory usage of GitLab by more than the
absolute bare minimum required by the code. This means that if you have to parse
-some large document (e.g. an HTML document) it's best to parse it as a stream
+some large document (for example, an HTML document) it's best to parse it as a stream
whenever possible, instead of loading the entire input into memory. Sometimes
this isn't possible, in that case this should be stated explicitly in the merge
request.
## Lazy Rendering of UI Elements
-**Summary:** only render UI elements when they're actually needed.
+**Summary:** only render UI elements when they are actually needed.
Certain UI elements may not always be needed. For example, when hovering over a
diff line there's a small icon displayed that can be used to create a new
@@ -284,7 +284,7 @@ data should be cached for a certain time period instead of the duration of the
transaction.
For example, say you process multiple snippets of text containing username
-mentions (e.g. `Hello @alice` and `How are you doing @alice?`). By caching the
+mentions (for example, `Hello @alice` and `How are you doing @alice?`). By caching the
user objects for every username we can remove the need for running the same
query for every mention of `@alice`.
@@ -304,7 +304,7 @@ The main styles of pagination are:
and the total number of pages. This style is well supported by all components of GitLab.
1. Offset-based pagination, but without the count: user goes to a specific page, like 1.
User sees only the next page number, but does not see the total amount of pages.
-1. Next page using keyset-based pagination: user can only go to next page, as we do not know how many pages
+1. Next page using keyset-based pagination: user can only go to next page, as we don't know how many pages
are available.
1. Infinite scrolling pagination: user scrolls the page and next items are loaded asynchronously. This is ideal,
as it has exact same benefits as the previous one.
@@ -316,20 +316,20 @@ can follow the progress looking at [API: Keyset Pagination
Take into consideration the following when choosing a pagination strategy:
-1. It is very inefficient to calculate amount of objects that pass the filtering,
+1. It's very inefficient to calculate amount of objects that pass the filtering,
this operation usually can take seconds, and can time out,
-1. It is very inefficient to get entries for page at higher ordinals, like 1000.
+1. It's very inefficient to get entries for page at higher ordinals, like 1000.
The database has to sort and iterate all previous items, and this operation usually
can result in substantial load put on database.
## Badge counters
-Counters should always be truncated. It means that we do not want to present
+Counters should always be truncated. It means that we don't want to present
the exact number over some threshold. The reason for that is for the cases where we want
to calculate exact number of items, we effectively need to filter each of them for
the purpose of knowing the exact number of items matching.
-From ~UX perspective it is often acceptable to see that you have over 1000+ pipelines,
+From ~UX perspective it's often acceptable to see that you have over 1000+ pipelines,
instead of that you have 40000+ pipelines, but at a tradeoff of loading page for 2s longer.
An example of this pattern is the list of pipelines and jobs. We truncate numbers to `1000+`,
@@ -338,7 +338,7 @@ but we show an accurate number of running pipelines, which is the most interesti
There's a helper method that can be used for that purpose - `NumbersHelper.limited_counter_with_delimiter` -
that accepts an upper limit of counting rows.
-In some cases it is desired that badge counters are loaded asynchronously.
+In some cases it's desired that badge counters are loaded asynchronously.
This can speed up the initial page load and give a better user experience overall.
## Application/misuse limits
@@ -349,9 +349,9 @@ be performant and usable for the user, but **not limiting**.
**We want the features to be fully usable for the users.**
**However, we want to ensure that the feature will continue to perform well if used at its limit**
-**and it will not cause availability issues.**
+**and it won't cause availability issues.**
-Consider that it is always better to start with some kind of limitation,
+Consider that it's always better to start with some kind of limitation,
instead of later introducing a breaking change that would result in some
workflows breaking.
@@ -370,9 +370,9 @@ The intent of quotas could be different:
Examples:
-1. Pipeline Schedules: It is very unlikely that user will want to create
+1. Pipeline Schedules: It's very unlikely that user will want to create
more than 50 schedules.
- In such cases it is rather expected that this is either misuse
+ In such cases it's rather expected that this is either misuse
or abuse of the feature. Lack of the upper limit can result
in service degradation as the system will try to process all schedules
assigned the the project.
@@ -396,4 +396,109 @@ Performance deficiencies should be addressed right away after we merge initial
changes.
Read more about when and how feature flags should be used in
-[Feature flags in GitLab development](https://docs.gitlab.com/ee/development/feature_flags/process.html#feature-flags-in-gitlab-development).
+[Feature flags in GitLab development](feature_flags/process.md#feature-flags-in-gitlab-development).
+
+## Storage
+
+We can consider the following types of storages:
+
+- **Local temporary storage** (very-very short-term storage) This type of storage is system-provided storage, ex. `/tmp` folder.
+ This is the type of storage that you should ideally use for all your temporary tasks.
+ The fact that each node has its own temporary storage makes scaling significantly easier.
+ This storage is also very often SSD-based, thus is significantly faster.
+ The local storage can easily be configured for the application with
+ the usage of `TMPDIR` variable.
+
+- **Shared temporary storage** (short-term storage) This type of storage is network-based temporary storage,
+ usually run with a common NFS server. As of Feb 2020, we still use this type of storage
+ for most of our implementations. Even though this allows the above limit to be significantly larger,
+ it does not really mean that you can use more. The shared temporary storage is shared by
+ all nodes. Thus, the job that uses significant amount of that space or performs a lot
+ of operations will create a contention on execution of all other jobs and request
+ across the whole application, this can easily impact stability of the whole GitLab.
+ Be respectful of that.
+
+- **Shared persistent storage** (long-term storage) This type of storage uses
+ shared network-based storage (ex. NFS). This solution is mostly used by customers running small
+ installations consisting of a few nodes. The files on shared storage are easily accessible,
+ but any job that is uploading or downloading data can create a serious contention to all other jobs.
+ This is also an approach by default used by Omnibus.
+
+- **Object-based persistent storage** (long term storage) this type of storage uses external
+ services like [AWS S3](https://en.wikipedia.org/wiki/Amazon_S3). The Object Storage
+ can be treated as infinitely scalable and redundant. Accessing this storage usually requires
+ downloading the file in order to manipulate it. The Object Storage can be considered as an ultimate
+ solution, as by definition it can be assumed that it can handle unlimited concurrent uploads
+ and downloads of files. This is also ultimate solution required to ensure that application can
+ run in containerized deployments (Kubernetes) at ease.
+
+### Temporary storage
+
+The storage on production nodes is really sparse. The application should be built
+in a way that accomodates running under very limited temporary storage.
+You can expect the system on which your code runs has a total of `1G-10G`
+of temporary storage. However, this storage is really shared across all
+jobs being run. If your job requires to use more than `100MB` of that space
+you should reconsider the approach you have taken.
+
+Whatever your needs are, you should clearly document if you need to process files.
+If you require more than `100MB`, consider asking for help from a maintainer
+to work with you to possibly discover a better solution.
+
+#### Local temporary storage
+
+The usage of local storage is a desired solution to use,
+especially since we work on deploying applications to Kubernetes clusters.
+When you would like to use `Dir.mktmpdir`? In a case when you want for example
+to extract/create archives, perform extensive manipulation of existing data, etc.
+
+```ruby
+Dir.mktmpdir('designs') do |path|
+ # do manipulation on path
+ # the path will be removed once
+ # we go out of the block
+end
+```
+
+#### Shared temporary storage
+
+The usage of shared temporary storage is required if your intent
+is to persistent file for a disk-based storage, and not Object Storage.
+[Workhorse direct_upload](./uploads.md#direct-upload) when accepting file
+can write it to shared storage, and later GitLab Rails can perform a move operation.
+The move operation on the same destination is instantaneous.
+The system instead of performing `copy` operation just re-attaches file into a new place.
+
+Since this introduces extra complexity into application, you should only try
+to re-use well established patterns (ex.: `ObjectStorage` concern) instead of re-implementing it.
+
+The usage of shared temporary storage is otherwise deprecated for all other usages.
+
+### Persistent storage
+
+#### Object Storage
+
+It is required that all features holding persistent files support saving data
+to Object Storage. Having a persistent storage in the form of shared volume across nodes
+is not scalable, as it creates a contention on data access all nodes.
+
+GitLab offers the [ObjectStorage concern](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/uploaders/object_storage.rb)
+that implements a seamless support for Shared and Object Storage-based persistent storage.
+
+#### Data access
+
+Each feature that accepts data uploads or allows to download them needs to use
+[Workhorse direct_upload](./uploads.md#direct-upload). It means that uploads needs to be
+saved directly to Object Storage by Workhorse, and all downloads needs to be served
+by Workhorse.
+
+Performing uploads/downloads via Unicorn/Puma is an expensive operation,
+as it blocks the whole processing slot (worker or thread) for the duration of the upload.
+
+Performing uploads/downloads via Unicorn/Puma also has a problem where the operation
+can time out, which is especially problematic for slow clients. If clients take a long time
+to upload/download the processing slot might be killed due to request processing
+timeout (usually between 30s-60s).
+
+For the above reasons it is required that [Workhorse direct_upload](./uploads.md#direct-upload) is implemented
+for all file uploads and downloads.
diff --git a/doc/development/migration_style_guide.md b/doc/development/migration_style_guide.md
index 2c9ad8c00cf..776ac252b76 100644
--- a/doc/development/migration_style_guide.md
+++ b/doc/development/migration_style_guide.md
@@ -15,8 +15,8 @@ offline unless _absolutely necessary_.
When downtime is necessary the migration has to be approved by:
1. The VP of Engineering
-1. A Backend Lead
-1. A Database Specialist
+1. A Backend Maintainer
+1. A Database Maintainer
An up-to-date list of people holding these titles can be found at
<https://about.gitlab.com/company/team/>.
@@ -29,6 +29,10 @@ Please don't depend on GitLab-specific code since it can change in future
versions. If needed copy-paste GitLab code into the migration to make it forward
compatible.
+For GitLab.com, please take into consideration that regular migrations (under `db/migrate`)
+are run before [Canary is deployed](https://about.gitlab.com/handbook/engineering/infrastructure/library/canary/#configuration-and-deployment),
+and post-deployment migrations (`db/post_migrate`) are run after the deployment to production has finished.
+
## Schema Changes
Migrations that make changes to the database schema (e.g. adding a column) can
@@ -85,6 +89,21 @@ be possible to downgrade in case of a vulnerability or bugs.
In your migration, add a comment describing how the reversibility of the
migration was tested.
+Some migrations cannot be reversed. For example, some data migrations can't be
+reversed because we lose information about the state of the database before the migration.
+You should still create a `down` method with a comment, explaining why
+the changes performed by the `up` method can't be reversed, so that the
+migration itself can be reversed, even if the changes performed during the migration
+can't be reversed:
+
+```ruby
+def down
+ # no-op
+
+ # comment explaining why changes performed by `up` cannot be reversed.
+end
+```
+
## Atomicity
By default, migrations are single transaction. That is, a transaction is opened
@@ -157,9 +176,15 @@ Removing a column:
```ruby
include Gitlab::Database::MigrationHelpers
-def change
+def up
+ with_lock_retries do
+ remove_column :users, :full_name
+ end
+end
+
+def down
with_lock_retries do
- remove_column :users, :full_name, :string
+ add_column :users, :full_name, :string
end
end
```
@@ -169,11 +194,17 @@ Removing a foreign key:
```ruby
include Gitlab::Database::MigrationHelpers
-def change
+def up
with_lock_retries do
remove_foreign_key :issues, :projects
end
end
+
+def down
+ with_lock_retries do
+ add_foreign_key :issues, :projects
+ end
+end
```
Changing default value for a column:
@@ -181,11 +212,17 @@ Changing default value for a column:
```ruby
include Gitlab::Database::MigrationHelpers
-def change
+def up
with_lock_retries do
change_column_default :merge_requests, :lock_version, from: nil, to: 0
end
end
+
+def down
+ with_lock_retries do
+ change_column_default :merge_requests, :lock_version, from: 0, to: nil
+ end
+end
```
### When to use the helper method
@@ -212,6 +249,8 @@ Example changes:
**Note:** `with_lock_retries` method **cannot** be used with `disable_ddl_transaction!`.
+**Note:** `with_lock_retries` method **cannot** be used within the `change` method, you must manually define the `up` and `down` methods to make the migration reversible.
+
### How the helper method works
1. Iterate 50 times.
@@ -304,10 +343,16 @@ combining it with other operations that don't require `disable_ddl_transaction!`
## Adding indexes
-If you need to add a unique index, please keep in mind there is the possibility
-of existing duplicates being present in the database. This means that should
-always _first_ add a migration that removes any duplicates, before adding the
-unique index.
+Before adding an index, consider if this one is necessary. There are situations in which an index
+might not be required, like:
+
+- The table is small (less than `1,000` records) and it's not expected to exponentially grow in size.
+- Any existing indexes filter out enough rows.
+- The reduction in query timings after the index is added is not significant.
+
+Additionally, wide indexes are not required to match all filter criteria of queries, we just need
+to cover enough columns so that the index lookup has a small enough selectivity. Please review our
+[Adding Database indexes](adding_database_indexes.md) guide for more details.
When adding an index to a non-empty table make sure to use the method
`add_concurrent_index` instead of the regular `add_index` method.
@@ -334,6 +379,11 @@ class MyMigration < ActiveRecord::Migration[4.2]
end
```
+If you need to add a unique index, please keep in mind there is the possibility
+of existing duplicates being present in the database. This means that should
+always _first_ add a migration that removes any duplicates, before adding the
+unique index.
+
For a small table (such as an empty one or one with less than `1,000` records),
it is recommended to use `add_index` in a single-transaction migration, combining it with other
operations that don't require `disable_ddl_transaction!`.
@@ -369,6 +419,8 @@ For an empty table (such as a fresh one), it is recommended to use
`add_reference` in a single-transaction migration, combining it with other
operations that don't require `disable_ddl_transaction!`.
+You can read more about adding [foreign key constraints to an existing column](database/add_foreign_key_to_existing_column.md).
+
## Adding Columns With Default Values
When adding columns with default values to non-empty tables, you must use
@@ -400,10 +452,6 @@ default values if absolutely necessary. There is a RuboCop cop that will fail if
this method is used on some tables that are very large on GitLab.com, which
would cause other issues.
-For a small table (such as an empty one or one with less than `1,000` records),
-use `add_column` and `change_column_default` in a single-transaction migration,
-combining it with other operations that don't require `disable_ddl_transaction!`.
-
## Changing the column default
One might think that changing a default column with `change_column_default` is an
@@ -413,16 +461,10 @@ Take the following migration as an example:
```ruby
class DefaultRequestAccessGroups < ActiveRecord::Migration[5.2]
- include Gitlab::Database::MigrationHelpers
-
DOWNTIME = false
- def up
- change_column_default :namespaces, :request_access_enabled, true
- end
-
- def down
- change_column_default :namespaces, :request_access_enabled, false
+ def change
+ change_column_default(:namespaces, :request_access_enabled, from: false, to: true)
end
end
```
@@ -463,7 +505,7 @@ end
```
If a computed update is needed, the value can be wrapped in `Arel.sql`, so Arel
-treats it as an SQL literal. It's also a required deprecation for [Rails 6](https://gitlab.com/gitlab-org/gitlab-foss/issues/61451).
+treats it as an SQL literal. It's also a required deprecation for [Rails 6](https://gitlab.com/gitlab-org/gitlab/issues/28497).
The below example is the same as the one above, but
the value is set to the product of the `bar` and `baz` columns:
diff --git a/doc/development/module_with_instance_variables.md b/doc/development/module_with_instance_variables.md
index 1687a9f5ed4..b0eab95190b 100644
--- a/doc/development/module_with_instance_variables.md
+++ b/doc/development/module_with_instance_variables.md
@@ -30,11 +30,11 @@ People are saying multiple inheritance is bad. Mixing multiple modules with
multiple instance variables scattering everywhere suffer from the same issue.
The same applies to `ActiveSupport::Concern`. See:
[Consider replacing concerns with dedicated classes & composition](
-https://gitlab.com/gitlab-org/gitlab-foss/issues/23786)
+https://gitlab.com/gitlab-org/gitlab/issues/16270)
There's also a similar idea:
[Use decorators and interface segregation to solve overgrowing models problem](
-https://gitlab.com/gitlab-org/gitlab-foss/issues/13484)
+https://gitlab.com/gitlab-org/gitlab/issues/14235)
Note that `included` doesn't solve the whole issue. They define the
dependencies, but they still allow each modules to talk implicitly via the
diff --git a/doc/development/namespaces_storage_statistics.md b/doc/development/namespaces_storage_statistics.md
index 71c9a0b96fb..f175739e55e 100644
--- a/doc/development/namespaces_storage_statistics.md
+++ b/doc/development/namespaces_storage_statistics.md
@@ -25,7 +25,7 @@ by [`Namespaces#with_statistics`](https://gitlab.com/gitlab-org/gitlab/blob/4ab5
Additionally, the pattern that is currently used to update the project statistics
(the callback) doesn't scale adequately. It is currently one of the largest
-[database queries transactions on production](https://gitlab.com/gitlab-org/gitlab-foss/issues/62488)
+[database queries transactions on production](https://gitlab.com/gitlab-org/gitlab/issues/29070)
that takes the most time overall. We can't add one more query to it as
it will increase the transaction's length.
@@ -142,7 +142,7 @@ but we refresh them through Sidekiq jobs and in different transactions:
1. Create a second table (`namespace_aggregation_schedules`) with two columns `id` and `namespace_id`.
1. Whenever the statistics of a project changes, insert a row into `namespace_aggregation_schedules`
- We don't insert a new row if there's already one related to the root namespace.
- - Keeping in mind the length of the transaction that involves updating `project_statistics`(<https://gitlab.com/gitlab-org/gitlab-foss/issues/62488>), the insertion should be done in a different transaction and through a Sidekiq Job.
+ - Keeping in mind the length of the transaction that involves updating `project_statistics`(<https://gitlab.com/gitlab-org/gitlab/issues/29070>), the insertion should be done in a different transaction and through a Sidekiq Job.
1. After inserting the row, we schedule another worker to be executed async at two different moments:
- One enqueued for immediate execution and another one scheduled in `1.5h` hours.
- We only schedule the jobs, if we can obtain a `1.5h` lease on Redis on a key based on the root namespace ID.
@@ -162,7 +162,7 @@ This implementation has the following benefits:
The only downside of this approach is that namespaces' statistics are updated up to `1.5` hours after the change is done,
which means there's a time window in which the statistics are inaccurate. Because we're still not
-[enforcing storage limits](https://gitlab.com/gitlab-org/gitlab-foss/issues/30421), this is not a major problem.
+[enforcing storage limits](https://gitlab.com/gitlab-org/gitlab/issues/17664), this is not a major problem.
## Conclusion
diff --git a/doc/development/new_fe_guide/development/accessibility.md b/doc/development/new_fe_guide/development/accessibility.md
index ae5c4c6a6cc..7a15e9eb6be 100644
--- a/doc/development/new_fe_guide/development/accessibility.md
+++ b/doc/development/new_fe_guide/development/accessibility.md
@@ -4,7 +4,7 @@ Using semantic HTML plays a key role when it comes to accessibility.
## Accessible Rich Internet Applications - ARIA
-WAI-ARIA, the Accessible Rich Internet Applications specification, defines a way to make Web content and Web applications more accessible to people with disabilities.
+WAI-ARIA (the Accessible Rich Internet Applications specification) defines a way to make Web content and Web applications more accessible to people with disabilities.
> Note: It is [recommended][using-aria] to use semantic elements as the primary method to achieve accessibility rather than adding aria attributes. Adding aria attributes should be seen as a secondary method for creating accessible elements.
diff --git a/doc/development/new_fe_guide/index.md b/doc/development/new_fe_guide/index.md
index 152ddcdae64..9e9c367807f 100644
--- a/doc/development/new_fe_guide/index.md
+++ b/doc/development/new_fe_guide/index.md
@@ -1,7 +1,7 @@
# Frontend Development Guidelines
This guide contains all the information to successfully contribute to GitLab's frontend.
-This is a living document, and we welcome contributions, feedback and suggestions.
+This is a living document, and we welcome contributions, feedback, and suggestions.
## [Development](development/index.md)
diff --git a/doc/development/packages.md b/doc/development/packages.md
index 487d1243c97..0880e053901 100644
--- a/doc/development/packages.md
+++ b/doc/development/packages.md
@@ -23,7 +23,7 @@ The existing database model requires the following:
- A package can have one or more package files.
- The package model is based on storing information about the package and its version.
-## API endpoints
+### API endpoints
Package systems work with GitLab via API. For example `ee/lib/api/npm_packages.rb`
implements API endpoints to work with NPM clients. So, the first thing to do is to
@@ -45,7 +45,7 @@ PUT https://gitlab.com/api/v4/projects/<your_project_id>/packages/npm/
Group-level and instance-level endpoints are good to have but are optional.
-### Remote hierarchy
+#### Remote hierarchy
Packages are scoped within various levels of access, which is generally configured by setting your remote. A
remote endpoint may be set at the project level, meaning when installing packages, only packages belonging to that
@@ -68,50 +68,27 @@ NOTE: **Note:** NPM is currently a hybrid of the instance level and group level.
It is using the top-level group or namespace as the defining portion of the name
(for example, `@my-group-name/my-package-name`).
-## Naming conventions
+### Naming conventions
To avoid name conflict for instance-level endpoints you will need to define a package naming convention
that gives a way to identify the project that the package belongs to. This generally involves using the project
id or full project path in the package name. See
[Conan's naming convention](../user/packages/conan_repository/index.md#package-recipe-naming-convention) as an example.
-For group and project-level endpoints, naming can be less constrained, and it will be up to the group and project
-members to be certain that there is no conflict between two package names, however the system should prevent
+For group and project-level endpoints, naming can be less constrained and it will be up to the group and project
+members to be certain that there is no conflict between two package names. However, the system should prevent
a user from reusing an existing name within a given scope.
Otherwise, naming should follow the package manager's naming conventions and include a validation in the `package.md`
model for that package type.
-## File uploads
-
-File uploads should be handled by GitLab Workhorse using object accelerated uploads. What this means is that
-the workhorse proxy that checks all incoming requests to GitLab will intercept the upload request,
-upload the file, and forward a request to the main GitLab codebase only containing the metadata
-and file location rather than the file itself. An overview of this process can be found in the
-[development documentation](uploads.md#direct-upload).
-
-In terms of code, this means a route will need to be added to the
-[GitLab Workhorse project](https://gitlab.com/gitlab-org/gitlab-workhorse) for each level of remote being added
-(instance, group, project). [This merge request](https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/412/diffs)
-demonstrates adding an instance-level endpoint for Conan to workhorse. You can also see the Maven project level endpoint
-implemented in the same file.
-
-Once the route has been added, you will need to add an additional `/authorize` version of the upload endpoint to your API file.
-[Here is an example](https://gitlab.com/gitlab-org/gitlab/blob/398fef1ca26ae2b2c3dc89750f6b20455a1e5507/ee/lib/api/maven_packages.rb#L164)
-of the additional endpoint added for Maven. The `/authorize` endpoint verifies and authorizes the request from workhorse,
-then the normal upload endpoint is implemented below, consuming the metadata that workhorse provides in order to
-create the package record. Workhorse provides a variety of file metadata such as type, size, and different checksum formats.
-
-For testing purposes, you may want to [enable object storage](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/object_storage.md)
-in your local development environment.
-
-## Services and finders
+### Services and finders
Logic for performing tasks such as creating package or package file records or finding packages should not live
within the API file, but should live in services and finders. Existing services and finders should be used or
extended when possible to keep the common package logic grouped as much as possible.
-## Configuration
+### Configuration
GitLab has a `packages` section in its configuration file (`gitlab.rb`).
It applies to all package systems supported by GitLab. Usually you don't need
@@ -119,7 +96,96 @@ to add anything there.
Packages can be configured to use object storage, therefore your code must support it.
-## Database and handling metadata
+## MVC Approach
+
+The way new package systems are integrated in GitLab is using an [MVC](https://about.gitlab.com/handbook/values/#minimum-viable-change-mvc). Therefore, the first iteration should support the bare minimum user actions:
+
+- Authentication
+- Uploading a package
+- Pulling a package
+- Required actions
+
+Required actions are all the additional requests that GitLab will need to handle so the corresponding package manager CLI can work properly. It could be a search feature or an endpoint providing meta information about a package. For example:
+
+- For NuGet, the search request was implemented during the first MVC iteration, to support Visual Studio.
+- For NPM, there is a metadata endpoint used by `npm` to get the tarball url.
+
+For the first MVC iteration, it's recommended to stay at the project level of the [remote hierarchy](#remote-hierarchy). Other levels can be tackled with [future Merge Requests](#future-work).
+
+There are usually 2 phases for the MVC:
+
+- [Analysis](#analysis)
+- [Implementation](#implementation)
+
+### Keep iterations small
+
+When implementing a new package manager, it is tempting to create one large merge request containing all of the
+necessary endpoints and services necessary to support basic usage. Instead, put the
+API endpoints behind a [feature flag](feature_flags/development.md) and
+submit each endpoint or behavior (download, upload, etc) in a different merge request to shorten the review
+process.
+
+### Analysis
+
+During this phase, the idea is to collect as much information as possible about the API used by the package system. Here some aspects that can be useful to include:
+
+- **Authentication**: What authentication mechanisms are available (OAuth, Basic
+ Authorization, other). Keep in mind that GitLab users will often want to use their
+ [Personal Access Tokens](../user/profile/personal_access_tokens.md).
+ Although not needed for the MVC first iteration, the [CI job tokens](../user/project/new_ci_build_permissions_model.md#job-token)
+ have to be supported at some point in the future.
+- **Requests**: Which requests are needed to have a working MVC. Ideally, produce
+ a list of all the requests needed for the MVC (including required actions). Further
+ investigation could provide an example for each request with the request and the response bodies.
+- **Upload**: Carefully analyse how the upload process works. This will probably be the most
+ complex request to implement. A detailed analysis is desired here as uploads can be
+ encoded in different ways (body or multipart) and can even be in a totally different
+ format (for example, a JSON structure where the package file is a Base64 value of
+ a particular field). These different encodings lead to slightly different implementations
+ on GitLab and GitLab Workhorse. For more detailed information, review [file uploads](#file-uploads).
+- **Endpoints**: Suggest a list of endpoint URLs that will be implemented in GitLab.
+- **Split work**: Suggest a list of changes to do to incrementally build the MVC.
+ This will give a good idea of how much work there is to be done. Here is an example
+ list that would need to be adapted on a case by case basis:
+ 1. Empty file structure (API file, base service for this package)
+ 1. Authentication system for "logging in" to the package manager
+ 1. Identify metadata and create applicable tables
+ 1. Workhorse route for [object storage direct upload](uploads.md#direct-upload)
+ 1. Endpoints required for upload/publish
+ 1. Endpoints required for install/download
+ 1. Endpoints required for required actions
+
+The analysis usually takes a full milestone to complete, though it's not impossible to start the implementation in the same milestone.
+
+In particular, the upload request can have some [requirements in the GitLab Workhorse project](#file-uploads). This project has a different release cycle than the rails backend. It's **strongly** recommended that you open an issue there as soon as the upload request analysis is done. This way GitLab Worhorse is already ready when the upload request is implemented on the rails backend.
+
+### Implementation
+
+The implementation of the different Merge Requests will vary between different package system integrations. Contributors should take into account some important aspects of the implementation phase.
+
+#### Authentication
+
+The MVC must support [Personal Access Tokens](../user/profile/personal_access_tokens.md) right from the start. We currently support two options for these tokens: OAuth and Basic Access.
+
+OAuth authentication is already supported. You can see an example in the [npm API](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/api/npm_packages.rb).
+
+[Basic Access authentication](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication)
+support is done by overriding a specific function in the API helpers, like
+[this example in the Conan API](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/api/conan_packages.rb).
+For this authentication mechanism, keep in mind that some clients can send an unauthenticated
+request first, wait for the 401 Unauthorized response with the [`WWW-Authenticate`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/WWW-Authenticate)
+field, then send an updated (authenticated) request. This case is more involved as
+GitLab needs to handle the 401 Unauthorized response. The [Nuget API](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/api/nuget_packages.rb)
+supports this case.
+
+#### Authorization
+
+There are project and group level permissions for `read_package`, `create_package`, and `destroy_package`. Each
+endpoint should
+[authorize the requesting user](https://gitlab.com/gitlab-org/gitlab/blob/398fef1ca26ae2b2c3dc89750f6b20455a1e5507/ee/lib/api/conan_packages.rb)
+against the project or group before continuing.
+
+#### Database and handling metadata
The current database model allows you to store a name and a version for each package.
Every time you upload a new package, you can either create a new record of `Package`
@@ -137,44 +203,41 @@ delegate from the package model.
Note that the existing package UI only displays information within the `packages_packages` and `packages_package_files`
tables. If the data stored in the metadata tables need to be displayed, a ~frontend change will be required.
-## Authorization
+#### File uploads
-There are project and group level permissions for `read_package`, `create_package`, and `destroy_package`. Each
-endpoint should
-[authorize the requesting user](https://gitlab.com/gitlab-org/gitlab/blob/398fef1ca26ae2b2c3dc89750f6b20455a1e5507/ee/lib/api/conan_packages.rb#L84)
-against the project or group before continuing.
-
-## Keep iterations small
-
-When implementing a new package manager, it is easy to end up creating one large merge request containing all of the
-necessary endpoints and services necessary to support basic usage. If this is the case, consider putting the
-API endpoints behind a [feature flag](feature_flags/development.md) and
-submitting each endpoint or behavior (download, upload, etc) in different merge requests to shorten the review
-process.
+File uploads should be handled by GitLab Workhorse using object accelerated uploads. What this means is that
+the workhorse proxy that checks all incoming requests to GitLab will intercept the upload request,
+upload the file, and forward a request to the main GitLab codebase only containing the metadata
+and file location rather than the file itself. An overview of this process can be found in the
+[development documentation](uploads.md#direct-upload).
-### Potential MRs for any given package system
+In terms of code, this means a route will need to be added to the
+[GitLab Workhorse project](https://gitlab.com/gitlab-org/gitlab-workhorse) for each upload endpoint being added
+(instance, group, project). [This merge request](https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/412/diffs)
+demonstrates adding an instance-level endpoint for Conan to workhorse. You can also see the Maven project level endpoint
+implemented in the same file.
-#### MVC MRs
+Once the route has been added, you will need to add an additional `/authorize` version of the upload endpoint to your API file.
+[Here is an example](https://gitlab.com/gitlab-org/gitlab/blob/398fef1ca26ae2b2c3dc89750f6b20455a1e5507/ee/lib/api/maven_packages.rb#L164)
+of the additional endpoint added for Maven. The `/authorize` endpoint verifies and authorizes the request from workhorse,
+then the normal upload endpoint is implemented below, consuming the metadata that workhorse provides in order to
+create the package record. Workhorse provides a variety of file metadata such as type, size, and different checksum formats.
-These changes represent all that is needed to deliver a minimally usable package management system.
+For testing purposes, you may want to [enable object storage](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/object_storage.md)
+in your local development environment.
-1. Empty file structure (API file, base service for this package)
-1. Authentication system for 'logging in' to the package manager
-1. Identify metadata and create applicable tables
-1. Workhorse route for [object storage direct upload](uploads.md#direct-upload)
-1. Endpoints required for upload/publish
-1. Endpoints required for install/download
-1. Endpoints required for remove/delete
+### Future Work
-#### Possible post-MVC MRs
+While working on the MVC, contributors will probably find features that are not mandatory for the MVC but can provide a better user experience. It's generally a good idea to keep an eye on those and open issues.
-These updates are not essential to be able to publish and consume packages, but may be desired as the system is
-released for general use.
+Here are some examples
1. Endpoints required for search
1. Front end updates to display additional package information and metadata
1. Limits on file sizes
1. Tracking for metrics
+1. Read more metadata fields from the package to make it available to the front end. For example, it's usual to be able to tag a package. Those tags can be read and saved by backend and then displayed on the packages UI.
+1. Endpoints for the upper levels of the [remote hierarchy](#remote-hierarchy). This step might need to create a [naming convention](#naming-conventions)
## Exceptions
diff --git a/doc/development/performance.md b/doc/development/performance.md
index a211fddc141..72eb85c623b 100644
--- a/doc/development/performance.md
+++ b/doc/development/performance.md
@@ -7,16 +7,15 @@ consistent performance of GitLab.
The process of solving performance problems is roughly as follows:
-1. Make sure there's an issue open somewhere (e.g., on the GitLab CE issue
- tracker), create one if there isn't. See [#15607][#15607] for an example.
+1. Make sure there's an issue open somewhere (for example, on the GitLab CE issue
+ tracker), and create one if there is not. See [#15607][#15607] for an example.
1. Measure the performance of the code in a production environment such as
GitLab.com (see the [Tooling](#tooling) section below). Performance should be
measured over a period of _at least_ 24 hours.
1. Add your findings based on the measurement period (screenshots of graphs,
timings, etc) to the issue mentioned in step 1.
1. Solve the problem.
-1. Create a merge request, assign the "Performance" label and assign it to
- [@yorickpeterse][yorickpeterse] for reviewing.
+1. Create a merge request, assign the "Performance" label and follow the [performance review process](merge_request_performance_guidelines.md).
1. Once a change has been deployed make sure to _again_ measure for at least 24
hours to see if your changes have any impact on the production environment.
1. Repeat until you're done.
@@ -44,16 +43,16 @@ GitLab provides built-in tools to help improve performance and availability:
- [QueryRecoder](query_recorder.md) for preventing `N+1` regressions.
- [Chaos endpoints](chaos_endpoints.md) for testing failure scenarios. Intended mainly for testing availability.
-GitLab employees can use GitLab.com's performance monitoring systems located at
+GitLab team members can use [GitLab.com's performance monitoring systems](https://about.gitlab.com/handbook/engineering/monitoring/) located at
<https://dashboards.gitlab.net>, this requires you to log in using your
-`@gitlab.com` Email address. Non-GitLab employees are advised to set up their
-own InfluxDB + Grafana stack.
+`@gitlab.com` email address. Non-GitLab team-members are advised to set up their
+own InfluxDB and Grafana stack.
## Benchmarks
Benchmarks are almost always useless. Benchmarks usually only test small bits of
code in isolation and often only measure the best case scenario. On top of that,
-benchmarks for libraries (e.g., a Gem) tend to be biased in favour of the
+benchmarks for libraries (such as a Gem) tend to be biased in favour of the
library. After all there's little benefit to an author publishing a benchmark
that shows they perform worse than their competitors.
@@ -68,8 +67,8 @@ When writing benchmarks you should almost always use
[benchmark-ips](https://github.com/evanphx/benchmark-ips). Ruby's `Benchmark`
module that comes with the standard library is rarely useful as it runs either a
single iteration (when using `Benchmark.bm`) or two iterations (when using
-`Benchmark.bmbm`). Running this few iterations means external factors (e.g. a
-video streaming in the background) can very easily skew the benchmark
+`Benchmark.bmbm`). Running this few iterations means external factors, such as a
+video streaming in the background, can very easily skew the benchmark
statistics.
Another problem with the `Benchmark` module is that it displays timings, not
@@ -114,17 +113,18 @@ the behaviour of suspect code in detail.
It's important to note that profiling an application *alters its performance*,
and will generally be done *in an unrepresentative environment*. In particular,
-a method is not necessarily troublesome just because it is executed many times,
+a method is not necessarily troublesome just because it's executed many times,
or takes a long time to execute. Profiles are tools you can use to better
understand what is happening in an application - using that information wisely
is up to you!
Keeping that in mind, to create a profile, identify (or create) a spec that
exercises the troublesome code path, then run it using the `bin/rspec-stackprof`
-helper, e.g.:
+helper, for example:
```shell
$ LIMIT=10 bin/rspec-stackprof spec/policies/project_policy_spec.rb
+
8/8 |====== 100 ======>| Time: 00:00:18
Finished in 18.19 seconds (files took 4.8 seconds to load)
@@ -170,10 +170,11 @@ kcachegrind project_policy_spec.callgrind # Linux
qcachegrind project_policy_spec.callgrind # Mac
```
-It may be useful to zoom in on a specific method, e.g.:
+It may be useful to zoom in on a specific method, for example:
```shell
$ stackprof tmp/project_policy_spec.rb.dump --method warm_asset_cache
+
TestEnv#warm_asset_cache (/Users/lupine/dev/gitlab.com/gitlab-org/gitlab-development-kit/gitlab/spec/support/test_env.rb:164)
samples: 0 self (0.0%) / 6288 total (36.9%)
callers:
@@ -239,6 +240,7 @@ shell:
```shell
$ rake rspec_profiling:console
+
irb(main):001:0> results.count
=> 231
irb(main):002:0> results.last.attributes.keys
@@ -257,10 +259,10 @@ One of the reasons of the increased memory footprint could be Ruby memory fragme
To diagnose it, you can visualize Ruby heap as described in [this post by Aaron Patterson](https://tenderlovemaking.com/2017/09/27/visualizing-your-ruby-heap.html).
-To start, you want to dump the heap of the process you are investigating to a JSON file.
+To start, you want to dump the heap of the process you're investigating to a JSON file.
-You need to run the command inside the process you are exploring, you may do that with `rbtrace`.
-`rbtrace` is already present in GitLab `Gemfile`, you just need to require it.
+You need to run the command inside the process you're exploring, you may do that with `rbtrace`.
+`rbtrace` is already present in GitLab `Gemfile`, you just need to require it.
It could be achieved running webserver or Sidekiq with the environment variable set to `ENABLE_RBTRACE=1`.
To get the heap dump:
@@ -274,12 +276,12 @@ Having the JSON, you finally could render a picture using the script [provided b
```shell
ruby heapviz.rb heap.json
```
-
+
Fragmented Ruby heap snapshot could look like this:
![Ruby heap fragmentation](img/memory_ruby_heap_fragmentation.png)
-Memory fragmentation could be reduced by tuning GC parameters as described in [this post by Nate Berkopec](https://www.speedshop.co/2017/12/04/malloc-doubles-ruby-memory.html), which should be considered as a tradeoff, as it may affect overall performance of memory allocation and GC cycles.
+Memory fragmentation could be reduced by tuning GC parameters as described in [this post by Nate Berkopec](https://www.speedshop.co/2017/12/04/malloc-doubles-ruby-memory.html). This should be considered as a tradeoff, as it may affect overall performance of memory allocation and GC cycles.
## Importance of Changes
@@ -295,11 +297,11 @@ There is no clear set of steps that you can follow to determine if a certain
piece of code is worth optimizing. The only two things you can do are:
1. Think about what the code does, how it's used, how many times it's called and
- how much time is spent in it relative to the total execution time (e.g., the
+ how much time is spent in it relative to the total execution time (for example, the
total time spent in a web request).
1. Ask others (preferably in the form of an issue).
-Some examples of changes that aren't really important/worth the effort:
+Some examples of changes that are not really important/worth the effort:
- Replacing double quotes with single quotes.
- Replacing usage of Array with Set when the list of values is very small.
@@ -309,7 +311,7 @@ Some examples of changes that aren't really important/worth the effort:
## Slow Operations & Sidekiq
-Slow operations (e.g. merging branches) or operations that are prone to errors
+Slow operations, like merging branches, or operations that are prone to errors
(using external APIs) should be performed in a Sidekiq worker instead of
directly in a web request as much as possible. This has numerous benefits such
as:
@@ -416,7 +418,7 @@ as omitting it may lead to style check failures.
## Anti-Patterns
This is a collection of [anti-patterns][anti-pattern] that should be avoided
-unless these changes have a measurable, significant and positive impact on
+unless these changes have a measurable, significant, and positive impact on
production environments.
### Moving Allocations to Constants
@@ -446,9 +448,12 @@ SOME_CONSTANT = 'bar'
You might want millions of project rows in your local database, for example,
in order to compare relative query performance, or to reproduce a bug. You could
-do this by hand with SQL commands, but since you have ActiveRecord models, you
-might find using these gems more convenient:
+do this by hand with SQL commands or using [Mass Inserting Rails
+Models](mass_insert.md) functionality.
+
+Assuming you are working with ActiveRecord models, you might also find these links helpful:
+- [Insert records in batches](insert_into_tables_in_batches.md)
- [BulkInsert gem](https://github.com/jamis/bulk_insert)
- [ActiveRecord::PgGenerateSeries gem](https://github.com/ryu39/active_record-pg_generate_series)
@@ -458,5 +463,4 @@ You may find some useful examples in this snippet:
<https://gitlab.com/gitlab-org/gitlab-foss/snippets/33946>
[#15607]: https://gitlab.com/gitlab-org/gitlab-foss/issues/15607
-[yorickpeterse]: https://gitlab.com/yorickpeterse
[anti-pattern]: https://en.wikipedia.org/wiki/Anti-pattern
diff --git a/doc/development/permissions.md b/doc/development/permissions.md
index 9e67079d1bc..bca137337fc 100644
--- a/doc/development/permissions.md
+++ b/doc/development/permissions.md
@@ -9,11 +9,11 @@ anything that deals with permissions, all of them should be considered.
Groups and projects can have the following visibility levels:
-- public (20) - an entity is visible to everyone
+- public (20) - an entity is visible to everyone
- internal (10) - an entity is visible to logged in users
- private (0) - an entity is visible only to the approved members of the entity
-The visibility level of a group can be changed only if all subgroups and
+The visibility level of a group can be changed only if all subgroups and
subprojects have the same or lower visibility level. (e.g., a group can be set
to internal only if all subgroups and projects are internal or private).
diff --git a/doc/development/pipelines.md b/doc/development/pipelines.md
index 29208c1bf76..1454f91ac20 100644
--- a/doc/development/pipelines.md
+++ b/doc/development/pipelines.md
@@ -42,7 +42,7 @@ The default image is currently
`registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.5-golang-1.12-git-2.24-lfs-2.9-chrome-73.0-node-12.x-yarn-1.21-postgresql-9.6-graphicsmagick-1.3.34`.
It includes Ruby 2.6.5, Go 1.12, Git 2.24, Git LFS 2.9, Chrome 73, Node 12, Yarn 1.21,
-PostgreSQL 9.6, and Graphics Magick 1.3.33.
+PostgreSQL 9.6, and Graphics Magick 1.3.34.
The images used in our pipelines are configured in the
[`gitlab-org/gitlab-build-images`](https://gitlab.com/gitlab-org/gitlab-build-images)
@@ -61,183 +61,84 @@ each pipeline includes default variables defined in
## Common job definitions
Most of the jobs [extend from a few CI definitions](../ci/yaml/README.md#extends)
-that are scoped to a single
-[configuration parameter](../ci/yaml/README.md#configuration-parameters).
-
-These common definitions are:
-
-- `.default-tags`: Ensures a job has the `gitlab-org` tag to ensure it's using
- our dedicated runners.
-- `.default-retry`: Allows a job to [retry](../ci/yaml/README.md#retry) upon `unknown_failure`, `api_failure`,
- `runner_system_failure`, `job_execution_timeout`, or `stuck_or_timeout_failure`.
-- `.default-before_script`: Allows a job to use a default `before_script` definition
- suitable for Ruby/Rails tasks that may need a database running (e.g. tests).
-- `.default-cache`: Allows a job to use a default `cache` definition suitable for
- Ruby/Rails and frontend tasks.
-- `.default-only`: Restricts the cases where a job is created. This currently
- includes `master`, `/^[\d-]+-stable(-ee)?$/` (stable branches),
- `/^\d+-\d+-auto-deploy-\d+$/` (auto-deploy branches), `/^security\//` (security branches), `merge_requests`, `tags`.
- Note that jobs won't be created for branches with this default configuration.
-- `.only:variables-canonical-dot-com`: Only creates a job if the project is
- located under <https://gitlab.com/gitlab-org>.
-- `.only:variables_refs-canonical-dot-com-schedules`: Same as
- `.only:variables-canonical-dot-com` but add the condition that pipeline is scheduled.
-- `.except:refs-deploy`: Don't create a job if the `ref` is an auto-deploy branch.
-- `.except:refs-master-tags-stable-deploy`: Don't create a job if the `ref` is one of:
- - `master`
- - a tag
- - a stable branch
- - an auto-deploy branch
-- `.only:kubernetes`: Only creates a job if a Kubernetes integration is enabled
- on the project.
-- `.only-review`: This extends from:
- - `.only:variables-canonical-dot-com`
- - `.only:kubernetes`
- - `.except:refs-master-tags-stable-deploy`
-- `.only-review-schedules`: This extends from:
- - `.only:variables_refs-canonical-dot-com-schedules`
- - `.only:kubernetes`
- - `.except:refs-deploy`
-- `.use-pg9`: Allows a job to use the `postgres:9.6` and `redis:alpine` services.
-- `.use-pg10`: Allows a job to use the `postgres:10.9` and `redis:alpine` services.
-- `.use-pg9-ee`: Same as `.use-pg9` but also use the
- `docker.elastic.co/elasticsearch/elasticsearch:5.6.12` services.
-- `.use-pg10-ee`: Same as `.use-pg10` but also use the
- `docker.elastic.co/elasticsearch/elasticsearch:5.6.12` services.
-- `.only-ee`: Only creates a job for the `gitlab` or `gitlab-ee` project.
-- `.only-ee-as-if-foss`: Same as `.only-ee` but simulate the FOSS project by
- setting the `FOSS_ONLY='1'` environment variable.
-
-## Changes detection
-
-If a job extends from `.default-only` (and most of the jobs should), it can restrict
-the cases where it should be created
-[based on the changes](../ci/yaml/README.md#onlychangesexceptchanges)
-from a commit or MR by extending from the following CI definitions:
-
-- `.only:changes-code`: Allows a job to only be created upon code-related changes.
-- `.only:changes-qa`: Allows a job to only be created upon QA-related changes.
-- `.only:changes-docs`: Allows a job to only be created upon docs-related changes.
-- `.only:changes-graphql`: Allows a job to only be created upon GraphQL-related changes.
-- `.only:changes-code-backstage`: Allows a job to only be created upon code-related or backstage-related (e.g. Danger, RuboCop, specs) changes.
-- `.only:changes-code-qa`: Allows a job to only be created upon code-related or QA-related changes.
-- `.only:changes-code-backstage-qa`: Allows a job to only be created upon code-related, backstage-related (e.g. Danger, RuboCop, specs) or QA-related changes.
-
-**See <https://gitlab.com/gitlab-org/gitlab/blob/master/.gitlab/ci/global.gitlab-ci.yml>
-for the list of exact patterns.**
-
-## Rules conditions and changes patterns
-
-We're making use of the [`rules` keyword](https://docs.gitlab.com/ee/ci/yaml/#rules) but we're currently
-duplicating the `if` conditions and `changes` patterns lists since they cannot be shared across
-`include`d files as we do with `extends`.
-
-**If you update an `if` condition or `changes`
-patterns list, make sure to mass-update those across all the CI config files (i.e. `.gitlab/ci/*.yml`).**
-
-### Canonical/security namespace merge requests only
-
-This condition limits jobs creation to merge requests under the `gitlab-org/` top-level group
-on GitLab.com only (i.e. this won't run for `master`, stable or auto-deploy branches).
-This is similar to the `.only:variables-canonical-dot-com` + `only:refs: [merge_requests]`
-CI definitions.
-
-The definition for `if-canonical-dot-com-gitlab-org-groups-merge-request` can be
-seen in <https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/docs.gitlab-ci.yml>.
-
-### Canonical/security namespace tags only
-
-This condition limits jobs creation to tags under the `gitlab-org/` top-level group
-on GitLab.com only.
-This is similar to the `.only:variables-canonical-dot-com` + `only:refs: [tags]` CI definition:
-
-The definition for `if-canonical-dot-com-gitlab-org-groups-tag` can be seen in
-<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/cng.gitlab-ci.yml>.
-
-### Canonical namespace `master` only
-
-This condition limits jobs creation to `master` pipelines for the `gitlab-org` top-level group
-on GitLab.com only.
-This is similar to the `.only:variables-canonical-dot-com` + `only:refs: [master]` CI definition:
-
-The definition for `if-canonical-dot-com-gitlab-org-group-master-refs` can be
-seen in <https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/pages.gitlab-ci.yml>.
-
-### Canonical namespace schedules only
-
-This condition limits jobs creation to scheduled pipelines for the `gitlab-org` top-level group
-on GitLab.com only.
-This is similar to the `.only:variables-canonical-dot-com` + `only:refs: [schedules]` CI definition:
-
-The definition for `if-canonical-dot-com-gitlab-org-group-schedule` can be seen
-in <https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/qa.gitlab-ci.yml>.
-
-### Not canonical/security namespace
-
-This condition matches if the project isn't in the canonical/security namespace.
-Useful to **not** create a job if the project is a fork, or in other words, when
-a job should only run in the canonical projects.
-
-The definition for `if-not-canonical-namespace` can be seen in
-<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml>.
-
-### Not EE
-
-This condition matches if the project isn't EE. Useful to **not** create a job if
-the project is GitLab, or in other words, when a job should only run in the GitLab
-FOSS project.
-
-The definition for `if-not-ee` can be seen in
-<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml>.
-
-### Default refs only
-
-This condition is the equivalent of `.default-only`.
-
-The definition for `if-default-refs` can be seen in
-<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml>.
-
-### `master` refs only
-
-This condition is the equivalent of `only:refs: [master]`.
-
-The definition for `if-master-refs` can be seen in
-<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml>.
-
-### Code changes patterns
-
-Similar patterns as for `.only:changes-code`:
-
-The definition for `code-patterns` can be seen in
-<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/qa.gitlab-ci.yml>.
-
-### QA changes patterns
-
-Similar patterns as for `.only:changes-qa`:
-
-The definition for `qa-patterns` can be seen in
-<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/qa.gitlab-ci.yml>.
-
-### Docs changes patterns
-
-Similar patterns as for `.only:changes-docs`:
-
-The definition for `docs-patterns` can be seen in
-<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/docs.gitlab-ci.yml>.
-
-### Code and QA changes patterns
-
-Similar patterns as for `.only:changes-code-qa`:
-
-The definition for `code-qa-patterns` can be seen in
-<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/review.gitlab-ci.yml>.
-
-### Code, backstage and QA changes patterns
-
-Similar patterns as for `.only:changes-code-backstage-qa`:
-
-The definition for `code-backstage-qa-patterns` can be seen in
-<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml>.
+defined in [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/blob/master/.gitlab/ci/global.gitlab-ci.yml)
+that are scoped to a single [configuration parameter](../ci/yaml/README.md#configuration-parameters).
+
+| Job definitions | Description |
+|------------------|-------------|
+| `.default-tags` | Ensures a job has the `gitlab-org` tag to ensure it's using our dedicated runners. |
+| `.default-retry` | Allows a job to [retry](../ci/yaml/README.md#retry) upon `unknown_failure`, `api_failure`, `runner_system_failure`, `job_execution_timeout`, or `stuck_or_timeout_failure`. |
+| `.default-before_script` | Allows a job to use a default `before_script` definition suitable for Ruby/Rails tasks that may need a database running (e.g. tests). |
+| `.default-cache` | Allows a job to use a default `cache` definition suitable for Ruby/Rails and frontend tasks. |
+| `.use-pg9` | Allows a job to use the `postgres:9.6.17` and `redis:alpine` services. |
+| `.use-pg10` | Allows a job to use the `postgres:10.12` and `redis:alpine` services. |
+| `.use-pg11` | Allows a job to use the `postgres:11.6` and `redis:alpine` services. |
+| `.use-pg9-ee` | Same as `.use-pg9` but also use the `docker.elastic.co/elasticsearch/elasticsearch:6.4.2` services. |
+| `.use-pg10-ee` | Same as `.use-pg10` but also use the `docker.elastic.co/elasticsearch/elasticsearch:6.4.2` services. |
+| `.use-pg11-ee` | Same as `.use-pg11` but also use the `docker.elastic.co/elasticsearch/elasticsearch:6.4.2` services. |
+| `.as-if-foss` | Simulate the FOSS project by setting the `FOSS_ONLY='1'` environment variable. |
+
+## `workflow:rules`
+
+We're using the [`workflow:rules` keyword](../ci/yaml/README.md#workflowrules) to
+define default rules to determine whether or not a pipeline is created.
+
+These rules are defined in <https://gitlab.com/gitlab-org/gitlab/blob/master/.gitlab-ci.yml>
+and are as follows:
+
+1. If `$FORCE_GITLAB_CI` is set, create a pipeline.
+1. For merge requests, create a pipeline.
+1. For `master` branch, create a pipeline (this includes on schedules, pushes, merges, etc.).
+1. For tags, create a pipeline.
+1. If `$GITLAB_INTERNAL` isn't set, don't create a pipeline.
+1. For stable, auto-deploy, and security branches, create a pipeline.
+1. For any other cases (e.g. when pushing a branch with no MR for it), no pipeline is created.
+
+## `rules`, `if:` conditions and `changes:` patterns
+
+We're using the [`rules` keyword](../ci/yaml/README.md#rules) extensively.
+
+All `rules` definitions are defined in
+<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rules.gitlab-ci.yml>,
+then included in individual jobs via [`extends`](../ci/yaml/README.md#extends).
+
+The `rules` definitions are composed of `if:` conditions and `changes:` patterns,
+which are also defined in
+<https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rules.gitlab-ci.yml>
+and included in `rules` definitions via [YAML anchors](../ci/yaml/README.md#anchors)
+
+### `if:` conditions
+
+| `if:` conditions | Description | Notes |
+|------------------|-------------|-------|
+| `if-not-canonical-namespace` | Matches if the project isn't in the canonical (`gitlab-org/`) or security (`gitlab-org/security`) namespace. | Use to create a job for forks (by using `when: on_success\|manual`), or **not** create a job for forks (by using `when: never`). |
+| `if-not-ee` | Matches if the project isn't EE (i.e. project name isn't `gitlab` or `gitlab-ee`). | Use to create a job only in the FOSS project (by using `when: on_success|manual`), or **not** create a job if the project is EE (by using `when: never`). |
+| `if-not-foss` | Matches if the project isn't FOSS (i.e. project name isn't `gitlab-foss`, `gitlab-ce`, or `gitlabhq`). | Use to create a job only in the EE project (by using `when: on_success|manual`), or **not** create a job if the project is FOSS (by using `when: never`). |
+| `if-default-refs` | Matches if the pipeline is for `master`, `/^[\d-]+-stable(-ee)?$/` (stable branches), `/^\d+-\d+-auto-deploy-\d+$/` (auto-deploy branches), `/^security\//` (security branches), merge requests, and tags. | Note that jobs won't be created for branches with this default configuration. |
+| `if-master-refs` | Matches if the current branch is `master`. | |
+| `if-master-or-tag` | Matches if the pipeline is for the `master` branch or for a tag. | |
+| `if-merge-request` | Matches if the pipeline is for a merge request. | |
+| `if-nightly-master-schedule` | Matches if the pipeline is for a `master` scheduled pipeline with `$NIGHTLY` set. | |
+| `if-dot-com-gitlab-org-schedule` | Limits jobs creation to scheduled pipelines for the `gitlab-org` group on GitLab.com. | |
+| `if-dot-com-gitlab-org-master` | Limits jobs creation to the `master` branch for the `gitlab-org` group on GitLab.com. | |
+| `if-dot-com-gitlab-org-merge-request` | Limits jobs creation to merge requests for the `gitlab-org` group on GitLab.com. | |
+| `if-dot-com-gitlab-org-and-security-tag` | Limits job creation to tags for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
+| `if-dot-com-gitlab-org-and-security-merge-request` | Limit jobs creation to merge requests for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
+| `if-dot-com-ee-schedule` | Limits jobs to scheduled pipelines for the `gitlab-org/gitlab` project on GitLab.com. | |
+| `if-cache-credentials-schedule` | Limits jobs to scheduled pipelines with the `$CI_REPO_CACHE_CREDENTIALS` variable set. | |
+
+### `changes:` patterns
+
+| `changes:` patterns | Description |
+|------------------------------|--------------------------------------------------------------------------|
+| `yaml-patterns` | Only create job for YAML-related changes. |
+| `docs-patterns` | Only create job for docs-related changes. |
+| `backstage-patterns` | Only create job for backstage-related changes. |
+| `code-patterns` | Only create job for code-related changes. |
+| `qa-patterns` | Only create job for QA-related changes. |
+| `code-backstage-patterns` | Combination of `code-patterns` and `backstage-patterns`. |
+| `code-qa-patterns` | Combination of `code-patterns` and `qa-patterns`. |
+| `code-backstage-qa-patterns` | Combination of `code-patterns`, `backstage-patterns`, and `qa-patterns`. |
## Directed acyclic graph
@@ -253,17 +154,24 @@ graph RL;
E[review-build-cng];
F[build-qa-image];
G[review-deploy];
- I["karma, jest, webpack-dev-server, static-analysis"];
- I2["karma-foss, jest-foss<br/>(EE default refs only)"];
+ I["karma, jest"];
+ I2["karma-as-if-foss, jest-as-if-foss<br/>(EE default refs only)"];
J["compile-assets pull-push-cache<br/>(master only)"];
- J2["compile-assets pull-push-cache foss<br/>(EE master only)"];
+ J2["compile-assets pull-push-cache as-if-foss<br/>(EE master only)"];
K[compile-assets pull-cache];
- K2["compile-assets pull-cache foss<br/>(EE default refs only)"];
+ K2["compile-assets pull-cache as-if-foss<br/>(EE default refs only)"];
+ U[frontend-fixtures];
+ U2["frontend-fixtures-as-if-foss<br/>(EE default refs only)"];
+ V["webpack-dev-server, static-analysis"];
M[coverage];
+ O[coverage-frontend];
N["pages (master only)"];
Q[package-and-qa];
S["RSpec<br/>(e.g. rspec unit pg9)"]
T[retrieve-tests-metadata];
+ QA["qa:internal, qa:selectors"];
+ QA2["qa:internal-as-if-foss, qa:selectors-as-if-foss<br/>(EE default refs only)"];
+ X["docs lint, code_quality, sast, dependency_scanning, danger-review"];
subgraph "`prepare` stage"
A
@@ -277,21 +185,31 @@ subgraph "`prepare` stage"
T
end
+subgraph "`fixture` stage"
+ U -.-> |needs and depends on| A;
+ U -.-> |needs and depends on| K;
+ U2 -.-> |needs and depends on| A;
+ U2 -.-> |needs and depends on| K2;
+ end
+
subgraph "`test` stage"
D -.-> |needs| A;
- I -.-> |needs and depends on| A;
- I -.-> |needs and depends on| K;
- I2 -.-> |needs and depends on| A;
- I2 -.-> |needs and depends on| K;
+ I -.-> |needs and depends on| U;
+ I2 -.-> |needs and depends on| U2;
L -.-> |needs and depends on| A;
S -.-> |needs and depends on| A;
S -.-> |needs and depends on| K;
S -.-> |needs and depends on| T;
L["db:*, gitlab:setup, graphql-docs-verify, downtime_check"] -.-> |needs| A;
+ V -.-> |needs and depends on| K;
+ X -.-> |needs| T;
+ QA -.-> |needs| T;
+ QA2 -.-> |needs| T;
end
subgraph "`post-test` stage"
M --> |happens after| S
+ O --> |needs `jest`| I
end
subgraph "`review-prepare` stage"
diff --git a/doc/development/profiling.md b/doc/development/profiling.md
index 316273f37b8..04713055117 100644
--- a/doc/development/profiling.md
+++ b/doc/development/profiling.md
@@ -120,3 +120,16 @@ Bullet will log query problems to both the Rails log as well as the Chrome
console.
As a follow up to finding `N+1` queries with Bullet, consider writing a [QueryRecoder test](query_recorder.md) to prevent a regression.
+
+## Settings that impact performance
+
+1. `development` environment by default works with hot-reloading enabled, this makes Rails to check file changes every request, and create a potential contention lock, as hot reload is single threaded.
+1. `development` environment can load code lazily once the request is fired which results in first request to always be slow.
+
+To disable those features for profiling/benchmarking set the `RAILS_PROFILE` environment variable to `true` before starting GitLab. For example when using GDK:
+
+- create a file [`env.runit`](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/master/doc/runit.md#modifying-environment-configuration-for-services) in the root GDK directory
+- add `export RAILS_PROFILE=true` to your `env.runit` file
+- restart GDK with `gdk restart`
+
+*This environment variable is only applicable for the development mode.*
diff --git a/doc/development/prometheus_metrics.md b/doc/development/prometheus_metrics.md
index d6622c72b0d..004b1884bf0 100644
--- a/doc/development/prometheus_metrics.md
+++ b/doc/development/prometheus_metrics.md
@@ -12,7 +12,10 @@ The requirement for adding a new metric is to make each query to have an unique
- group: Response metrics (NGINX Ingress)
metrics:
- title: "Throughput"
- y_label: "Requests / Sec"
+ y_axis:
+ name: "Requests / Sec"
+ format: "number"
+ precision: 2
queries:
- id: response_metrics_nginx_ingress_throughput_status_code
query_range: 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)'
diff --git a/doc/development/query_recorder.md b/doc/development/query_recorder.md
index 75a8a33e02a..f2c5b8b9848 100644
--- a/doc/development/query_recorder.md
+++ b/doc/development/query_recorder.md
@@ -45,49 +45,73 @@ This could lead to false successes where subsequent "requests" could have querie
## Finding the source of the query
-It may be useful to identify the source of the queries by looking at the call backtrace.
-To enable this, run the specs with the `QUERY_RECORDER_DEBUG` environment variable set. For example:
-
-```shell
-QUERY_RECORDER_DEBUG=1 bundle exec rspec spec/requests/api/projects_spec.rb
-```
-
-This will log calls to QueryRecorder into the `test.log`. For example:
-
-```plaintext
-QueryRecorder SQL: SELECT COUNT(*) FROM "issues" WHERE "issues"."deleted_at" IS NULL AND "issues"."project_id" = $1 AND ("issues"."state" IN ('opened')) AND "issues"."confidential" = $2
- --> /home/user/gitlab/gdk/gitlab/spec/support/query_recorder.rb:19:in `callback'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:127:in `finish'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:46:in `block in finish'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:46:in `each'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:46:in `finish'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/instrumenter.rb:36:in `finish'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/instrumenter.rb:25:in `instrument'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract_adapter.rb:478:in `log'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/postgresql_adapter.rb:601:in `exec_cache'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/postgresql_adapter.rb:585:in `execute_and_clear'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/postgresql/database_statements.rb:160:in `exec_query'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract/database_statements.rb:356:in `select'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract/database_statements.rb:32:in `select_all'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract/query_cache.rb:68:in `block in select_all'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract/query_cache.rb:83:in `cache_sql'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract/query_cache.rb:68:in `select_all'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/relation/calculations.rb:270:in `execute_simple_calculation'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/relation/calculations.rb:227:in `perform_calculation'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/relation/calculations.rb:133:in `calculate'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/relation/calculations.rb:48:in `count'
- --> /home/user/gitlab/gdk/gitlab/app/services/base_count_service.rb:20:in `uncached_count'
- --> /home/user/gitlab/gdk/gitlab/app/services/base_count_service.rb:12:in `block in count'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:299:in `block in fetch'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:585:in `block in save_block_result_to_cache'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:547:in `block in instrument'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications.rb:166:in `instrument'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:547:in `instrument'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:584:in `save_block_result_to_cache'
- --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:299:in `fetch'
- --> /home/user/gitlab/gdk/gitlab/app/services/base_count_service.rb:12:in `count'
- --> /home/user/gitlab/gdk/gitlab/app/models/project.rb:1296:in `open_issues_count'
-```
+There are multiple ways to find the source of queries.
+
+1. The `QueryRecorder` `data` attribute stores queries by `file_name:line_number:method_name`.
+ Each entry is a `hash` with the following fields:
+
+ - `count`: the number of times a query from this `file_name:line_number:method_name` was called
+ - `occurrences`: the actual `SQL` of each call
+ - `backtrace`: the stack trace of each call (if either of the two following options were enabled)
+
+ `QueryRecorder#find_query` allows filtering queries by their `file_name:line_number:method_name` and
+ `count` attributes. For example:
+
+ ```ruby
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { visit_some_page }
+ control.find_query(/.*note.rb.*/, 0, first_only: true)
+ ```
+
+ `QueryRecorder#occurrences_by_line_method` returns a sorted array based on `data`, sorted by `count`.
+
+1. You can output the call backtrace for the specific `QueryRecorder` instance you want
+ by using `ActiveRecord::QueryRecorder.new(query_recorder_debug: true)`. The output
+ will be in `test.log`
+
+1. Using the environment variable `QUERY_RECORDER_DEBUG`, the call backtrace will be output for all tests.
+
+ To enable this, run the specs with the `QUERY_RECORDER_DEBUG` environment variable set. For example:
+
+ ```shell
+ QUERY_RECORDER_DEBUG=1 bundle exec rspec spec/requests/api/projects_spec.rb
+ ```
+
+ This will log calls to QueryRecorder into the `test.log` file. For example:
+
+ ```plaintext
+ QueryRecorder SQL: SELECT COUNT(*) FROM "issues" WHERE "issues"."deleted_at" IS NULL AND "issues"."project_id" = $1 AND ("issues"."state" IN ('opened')) AND "issues"."confidential" = $2
+ --> /home/user/gitlab/gdk/gitlab/spec/support/query_recorder.rb:19:in `callback'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:127:in `finish'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:46:in `block in finish'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:46:in `each'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:46:in `finish'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/instrumenter.rb:36:in `finish'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/instrumenter.rb:25:in `instrument'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract_adapter.rb:478:in `log'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/postgresql_adapter.rb:601:in `exec_cache'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/postgresql_adapter.rb:585:in `execute_and_clear'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/postgresql/database_statements.rb:160:in `exec_query'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract/database_statements.rb:356:in `select'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract/database_statements.rb:32:in `select_all'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract/query_cache.rb:68:in `block in select_all'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract/query_cache.rb:83:in `cache_sql'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/connection_adapters/abstract/query_cache.rb:68:in `select_all'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/relation/calculations.rb:270:in `execute_simple_calculation'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/relation/calculations.rb:227:in `perform_calculation'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/relation/calculations.rb:133:in `calculate'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activerecord-4.2.8/lib/active_record/relation/calculations.rb:48:in `count'
+ --> /home/user/gitlab/gdk/gitlab/app/services/base_count_service.rb:20:in `uncached_count'
+ --> /home/user/gitlab/gdk/gitlab/app/services/base_count_service.rb:12:in `block in count'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:299:in `block in fetch'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:585:in `block in save_block_result_to_cache'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:547:in `block in instrument'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications.rb:166:in `instrument'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:547:in `instrument'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:584:in `save_block_result_to_cache'
+ --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:299:in `fetch'
+ --> /home/user/gitlab/gdk/gitlab/app/services/base_count_service.rb:12:in `count'
+ --> /home/user/gitlab/gdk/gitlab/app/models/project.rb:1296:in `open_issues_count'
+ ```
## See also
diff --git a/doc/development/rails_initializers.md b/doc/development/rails_initializers.md
new file mode 100644
index 00000000000..6473baf58d4
--- /dev/null
+++ b/doc/development/rails_initializers.md
@@ -0,0 +1,16 @@
+# Rails initializers
+
+By default, Rails loads Zeitwerk after the initializers in `config/initializers` are loaded.
+Autoloading before Zeitwerk is loaded is now deprecated but because we use a lot of autoloaded
+constants in our initializers, we had to move the loading of Zeitwerk earlier than these
+initializers.
+
+A side-effect of this is that in the initializers, `config.autoload_paths` is already frozen.
+
+To run an initializer before Zeitwerk is loaded, you need put them in `config/initializers_before_autoloader`.
+Ruby files in this folder are loaded in alphabetical order just like the default Rails initializers.
+
+Some examples where you would need to do this are:
+
+1. Modifying Rails' `config.autoload_paths`
+1. Changing configuration that Zeitwerk uses, e.g. inflections
diff --git a/doc/development/redis.md b/doc/development/redis.md
index a4a87155f5a..a8b7b84bb65 100644
--- a/doc/development/redis.md
+++ b/doc/development/redis.md
@@ -8,7 +8,7 @@ GitLab uses [Redis](https://redis.io) for three distinct purposes:
Every application process is configured to use the same Redis servers, so they
can be used for inter-process communication in cases where [PostgreSQL](sql.md)
-is less appropriate, for example, transient state or data that is written much
+is less appropriate. For example, transient state or data that is written much
more often than it is read.
If [Geo](geo.md) is enabled, each Geo node gets its own, independent Redis
diff --git a/doc/development/reference_processing.md b/doc/development/reference_processing.md
new file mode 100644
index 00000000000..ef1f2f5269c
--- /dev/null
+++ b/doc/development/reference_processing.md
@@ -0,0 +1,157 @@
+---
+description: 'An introduction to reference parsers and reference filters, and a guide to their implementation.'
+---
+
+# Reference processing
+
+[GitLab Flavored Markdown](../user/markdown.md) includes the ability to process
+references to a range of GitLab domain objects. This is implemented by two
+abstractions in the `Banzai` pipeline: `ReferenceFilter` and `ReferenceParser`.
+This page explains what these are, how they are used, and how you would
+implement a new filter/parser pair.
+
+NOTE: **Note:**
+Each `ReferenceFilter` must have a corresponding `ReferenceParser`.
+
+It is possible to share reference parsers between filters - if two filters find
+and link the same type of objects (as specified by the `data-reference-type`
+attribute), then we only need one reference parser for that type of domain
+object.
+
+## Reference filters
+
+The first way that references are handled is by reference filters. These are
+the tools that identify short-code and URI references from markup documents and
+transform them into structured links to the resources they represent.
+
+For example, the class
+[`Banzai::Filter::IssueReferenceFilter`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/banzai/filter/issue_reference_filter.rb)
+is responsible for handling references to issues, such as
+`gitlab-org/gitlab#123` and `https://gitlab.com/gitlab-org/gitlab/issues/200048`.
+
+All reference filters are instances of [`HTML::Pipeline::Filter`](https://www.rubydoc.info/github/jch/html-pipeline/v1.11.0/HTML/Pipeline/Filter),
+and inherit (often indirectly) from [`Banzai::Filter::ReferenceFilter`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/banzai/filter/reference_filter.rb).
+
+`HTML::Pipeline::Filter` has a simple interface consisting of `#call`, a void
+method that mutates the current document. `ReferenceFilter` provides methods
+that make defining suitable `#call` methods easier. Most reference filters
+however do not inherit from either of these classes directly, but from
+[`AbstractReferenceFilter`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/banzai/filter/abstract_reference_filter.rb),
+which provides a higher-level interface.
+
+Subclasses of `AbstractReferenceFilter` generally do not override `#call`; instead,
+a minimum implementation of `AbstractReferenceFilter` should define:
+
+- `.reference_type`: The type of domain object.
+
+ This is usually a keyword, and is used to set the `data-reference-type` attribute
+ on the generated link, and is an important part of the interaction with the
+ corresponding `ReferenceParser` (see below).
+
+- `.object_class`: a reference to the class of the objects a filter refers to.
+
+ This is used to:
+
+ - Find the regular expressions used to find references. The class should
+ include [`Referable`](https://gitlab.com/gitlab-org/gitlab/blob/master/app/models/concerns/referable.rb)
+ and thus define two regular expressions: `.link_reference_pattern` and
+ `.reference_pattern`, both of which should contain a named capture group
+ named the value of `ReferenceFilter.object_sym`.
+ - Compute the `.object_name`.
+ - Compute the `.object_sym` (the group name in the reference patterns).
+
+- `.parse_symbol(string)`: parse the text value to an object identifier (`#to_i` by default).
+- `#record_identifier(record)`: the inverse of `.parse_symbol`, that is, transform a domain object to an identifier (`#id` by default).
+- `#url_for_object(object, parent_object)`: generate the URL for a domain object.
+- `#find_object(parent_object, id)`: given the parent (usually a [`Project`](https://gitlab.com/gitlab-org/gitlab/blob/master/app/models/project.rb))
+ and an identifier, find the object. For example, this in a reference filter for
+ merge requests, this might be `project.merge_requests.where(iid: iid)`.
+
+### Performance
+
+This default implementation is not very efficient, because we need to call
+`#find_object` for each reference, which may require issuing a DB query every
+time. For this reason, most reference filter implementations will instead use an
+optimization included in `AbstractReferenceFilter`:
+
+> `AbstractReferenceFilter` provides a lazily initialized value
+> `#records_per_parent`, which is a mapping from parent object to a collection
+> of domain objects.
+
+To use this mechanism, the reference filter must implement the
+method: `#parent_records(parent, set_of_identifiers)`, which must return an
+enumerable of domain objects.
+
+This allows such classes to define `#find_object` (as
+[`IssuableReferenceFilter`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/banzai/filter/issuable_reference_filter.rb)
+does) as:
+
+```ruby
+def find_object(parent, iid)
+ records_per_parent[parent][iid]
+end
+```
+
+This makes the number of queries linear in the number of projects. We only need
+to implement `parent_records` method when we call `records_per_parent` in our
+reference filter.
+
+## Reference parsers
+
+In a number of cases, as a performance optimization, we render Markdown to HTML
+once, cache the result and then present it to users from the cached value. For
+example this happens for notes, issue descriptions, and merge request
+descriptions. A consequence of this is that a rendered document might refer to
+a resource that some subsequent readers should not be able to see.
+
+For example, you might create an issue, and refer to a confidential issue `#1234`,
+which you have access to. This is rendered in the cached HTML as a link to
+that confidential issue, with data attributes containing its ID, the ID of the
+project and other confidential data. A later reader, who has access to your issue
+might not have permission to read issue `#1234`, and so we need to redact
+these sensitive pieces of data. This is what `ReferenceParser` classes do.
+
+A reference parser is linked to the object that it handles by the link
+advertising this relationship in the `data-reference-type` attribute (set by the
+reference filter). This is used by the
+[`ReferenceRedactor`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/banzai/reference_redactor.rb)
+to compute which nodes should be visible to users:
+
+```ruby
+def nodes_visible_to_user(nodes)
+ per_type = Hash.new { |h, k| h[k] = [] }
+ visible = Set.new
+
+ nodes.each do |node|
+ per_type[node.attr('data-reference-type')] << node
+ end
+
+ per_type.each do |type, nodes|
+ parser = Banzai::ReferenceParser[type].new(context)
+
+ visible.merge(parser.nodes_visible_to_user(user, nodes))
+ end
+
+ visible
+end
+```
+
+The key part here is `Banzai::ReferenceParser[type]`, which is used to look up
+the correct reference parser for each type of domain object. This requires that
+each reference parser must:
+
+- Be placed in the `Banzai::ReferenceParser` namespace.
+- Implement the `.nodes_visible_to_user(user, nodes)` method.
+
+In practice, all reference parsers inherit from [`BaseParser`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/banzai/reference_parser/base_parser.rb), and are implemented by defining:
+
+- `.reference_type`, which should equal `ReferenceFilter.reference_type`.
+- And by implementing one or more of:
+ - `#nodes_visible_to_user(user, nodes)` for finest grain control.
+ - `#can_read_reference?` needed if `nodes_visible_to_user` is not overridden.
+ - `#references_relation` an active record relation for objects by ID.
+ - `#nodes_user_can_reference(user, nodes)` to filter nodes directly.
+
+NOTE: **Note:**
+A failure to implement this class for each reference type means that the
+application will raise exceptions during Markdown processing.
diff --git a/doc/development/scalability.md b/doc/development/scalability.md
index 3a9f5387b11..b3387526ccd 100644
--- a/doc/development/scalability.md
+++ b/doc/development/scalability.md
@@ -67,7 +67,7 @@ kind of partitioning.
Sharding is likely more difficult and will require significant changes
to the schema and application. For example, if we have to store projects
in many different databases, we immediately run into the question, "How
-can we retrieve data across different projects?" One answer to this is
+can we retrieve data across different projects?" One answer to this is
to abstract data access into API calls that abstract the database from
the application, but this is a significant amount of work.
diff --git a/doc/development/serializing_data.md b/doc/development/serializing_data.md
index 37332c20147..af791f5a4e0 100644
--- a/doc/development/serializing_data.md
+++ b/doc/development/serializing_data.md
@@ -20,7 +20,7 @@ alternative.
## Serialized Data Is Less Powerful
When using a relational database you have the ability to query individual
-fields, change the schema, index data and so forth. When you use serialized data
+fields, change the schema, index data, and so forth. When you use serialized data
all of that becomes either very difficult or downright impossible. While
PostgreSQL does offer the ability to query JSON fields it is mostly meant for
very specialized use cases, and not for more general use. If you use YAML in
diff --git a/doc/development/shell_commands.md b/doc/development/shell_commands.md
index b8952cae33e..37d851f8b7e 100644
--- a/doc/development/shell_commands.md
+++ b/doc/development/shell_commands.md
@@ -215,4 +215,4 @@ When importing, GitLab would execute the following command, passing the `import_
git clone file://git:/tmp/lol
```
-Git would simply ignore the `git:` part, interpret the path as `file:///tmp/lol` and import the repository into the new project, in turn potentially giving the attacker access to any repository in the system, whether private or not.
+Git would simply ignore the `git:` part, interpret the path as `file:///tmp/lol`, and import the repository into the new project. This action could potentially give the attacker access to any repository in the system, whether private or not.
diff --git a/doc/development/sidekiq_debugging.md b/doc/development/sidekiq_debugging.md
index 2b3a9481b93..b6a11dd813d 100644
--- a/doc/development/sidekiq_debugging.md
+++ b/doc/development/sidekiq_debugging.md
@@ -2,20 +2,5 @@
## Log arguments to Sidekiq jobs
-If you want to see what arguments are being passed to Sidekiq jobs you can set
-the `SIDEKIQ_LOG_ARGUMENTS` [environment variable](https://docs.gitlab.com/omnibus/settings/environment-variables.html) to `1` (true).
-
-Example:
-
-```
-gitlab_rails['env'] = {"SIDEKIQ_LOG_ARGUMENTS" => "1"}
-```
-
-Please note: It is not recommend to enable this setting in production because some
-Sidekiq jobs (such as sending a password reset email) take secret arguments (for
-example the password reset token).
-
-When using [Sidekiq JSON logging](../administration/logs.md#sidekiqlog),
-arguments logs are limited to a maximum size of 10 kilobytes of text;
-any arguments after this limit will be discarded and replaced with a
-single argument containing the string `"..."`.
+This content has been moved to the
+[Troubleshooting Sidekiq docs](../administration/troubleshooting/sidekiq.md).
diff --git a/doc/development/sidekiq_style_guide.md b/doc/development/sidekiq_style_guide.md
index 8b906e60dc2..1590c7a6d44 100644
--- a/doc/development/sidekiq_style_guide.md
+++ b/doc/development/sidekiq_style_guide.md
@@ -64,7 +64,87 @@ the extra jobs will take resources away from jobs from workers that were already
there, if the resources available to the Sidekiq process handling the namespace
are not adjusted appropriately.
-## Latency Sensitive Jobs
+## Idempotent Jobs
+
+It's known that a job can fail for multiple reasons. For example, network outages or bugs.
+In order to address this, Sidekiq has a built-in retry mechanism that is
+used by default by most workers within GitLab.
+
+It's expected that a job can run again after a failure without major side-effects for the
+application or users, which is why Sidekiq encourages
+jobs to be [idempotent and transactional](https://github.com/mperham/sidekiq/wiki/Best-Practices#2-make-your-job-idempotent-and-transactional).
+
+As a general rule, a worker can be considered idempotent if:
+
+- It can safely run multiple times with the same arguments.
+- Application side-effects are expected to happen only once
+ (or side-effects of a second run are not impactful).
+
+A good example of that would be a cache expiration worker.
+
+### Ensuring a worker is idempotent
+
+Make sure the worker tests pass using the following shared example:
+
+```ruby
+include_examples 'an idempotent worker' do
+ it 'marks the MR as merged' do
+ # Using subject inside this block will process the job multiple times
+ subject
+
+ expect(merge_request.state).to eq('merged')
+ end
+end
+```
+
+Use the `perform_multiple` method directly instead of `job.perform` (this
+helper method is automatically included for workers).
+
+### Declaring a worker as idempotent
+
+```ruby
+class IdempotentWorker
+ include ApplicationWorker
+
+ # Declares a worker is idempotent and can
+ # safely run multiple times.
+ idempotent!
+
+ # ...
+end
+```
+
+It's encouraged to only have the `idempotent!` call in the top-most worker class, even if
+the `perform` method is defined in another class or module.
+
+NOTE: **Note:**
+Note that a cop will fail if the worker class is not marked as idempotent.
+Consider skipping the cop if you're not confident your job can safely run multiple times.
+
+## Job urgency
+
+Jobs can have an `urgency` attribute set, which can be `:high`,
+`:low`, or `:throttled`. These have the below targets:
+
+| **Urgency** | **Queue Scheduling Target** | **Execution Latency Requirement** |
+|--------------|-----------------------------|------------------------------------|
+| `:high` | 100 milliseconds | p50 of 1 second, p99 of 10 seconds |
+| `:low` | 1 minute | Maximum run time of 1 hour |
+| `:throttled` | None | Maximum run time of 1 hour |
+
+To set a job's urgency, use the `urgency` class method:
+
+```ruby
+class HighUrgencyWorker
+ include ApplicationWorker
+
+ urgency :high
+
+ # ...
+end
+```
+
+### Latency sensitive jobs
If a large number of background jobs get scheduled at once, queueing of jobs may
occur while jobs wait for a worker node to be become available. This is normal
@@ -83,7 +163,7 @@ of these jobs include:
When these jobs are delayed, the user may perceive the delay as a bug: for
example, they may push a branch and then attempt to create a merge request for
that branch, but be told in the UI that the branch does not exist. We deem these
-jobs to be `latency_sensitive`.
+jobs to be `urgency :high`.
Extra effort is made to ensure that these jobs are started within a very short
period of time after being scheduled. However, in order to ensure throughput,
@@ -93,35 +173,15 @@ these jobs also have very strict execution duration requirements:
1. 99% of jobs should complete within 10 seconds.
If a worker cannot meet these expectations, then it cannot be treated as a
-`latency_sensitive` worker: consider redesigning the worker, or splitting the
-work between two different workers, one with `latency_sensitive` code that
-executes quickly, and the other with non-`latency_sensitive`, which has no
+`urgency :high` worker: consider redesigning the worker, or splitting the
+work between two different workers, one with `urgency :high` code that
+executes quickly, and the other with `urgency :low`, which has no
execution latency requirements (but also has lower scheduling targets).
-This can be summed up in the following table:
-
-| **Latency Sensitivity** | **Queue Scheduling Target** | **Execution Latency Requirement** |
-|-------------------------|-----------------------------|-------------------------------------|
-| Not `latency_sensitive` | 1 minute | Maximum run time of 1 hour |
-| `latency_sensitive` | 100 milliseconds | p50 of 1 second, p99 of 10 seconds |
-
-To mark a worker as being `latency_sensitive`, use the
-`latency_sensitive_worker!` attribute, as shown in this example:
-
-```ruby
-class LatencySensitiveWorker
- include ApplicationWorker
-
- latency_sensitive_worker!
-
- # ...
-end
-```
-
## Jobs with External Dependencies
Most background jobs in the GitLab application communicate with other GitLab
-services, eg Postgres, Redis, Gitaly and Object Storage. These are considered
+services. For example, Postgres, Redis, Gitaly, and Object Storage. These are considered
to be "internal" dependencies for a job.
However, some jobs will be dependent on external services in order to complete
@@ -137,7 +197,7 @@ the background processing cluster in several ways:
therefore we cannot guarantee the execution latencies on these jobs. Since we
cannot guarantee execution latency, we cannot ensure throughput and
therefore, in high-traffic environments, we need to ensure that jobs with
- external dependencies are separated from `latency_sensitive` jobs, to ensure
+ external dependencies are separated from high urgency jobs, to ensure
throughput on those queues.
1. Errors in jobs with external dependencies have higher alerting thresholds as
there is a likelihood that the cause of the error is external.
@@ -155,7 +215,7 @@ class ExternalDependencyWorker
end
```
-NOTE: **Note:** Note that a job cannot be both latency sensitive and have
+NOTE: **Note:** Note that a job cannot be both high urgency and have
external dependencies.
## CPU-bound and Memory-bound Workers
@@ -167,7 +227,7 @@ Most workers tend to spend most of their time blocked, wait on network responses
from other services such as Redis, Postgres and Gitaly. Since Sidekiq is a
multithreaded environment, these jobs can be scheduled with high concurrency.
-Some workers, however, spend large amounts of time _on-cpu_ running logic in
+Some workers, however, spend large amounts of time _on-CPU_ running logic in
Ruby. Ruby MRI does not support true multithreading - it relies on the
[GIL](https://thoughtbot.com/blog/untangling-ruby-threads#the-global-interpreter-lock)
to greatly simplify application development by only allowing one section of Ruby
@@ -187,13 +247,17 @@ performance.
Likewise, if a worker uses large amounts of memory, we can run these on a
bespoke low concurrency, high memory fleet.
-Note that Memory-bound workers create heavy GC workloads, with pauses of
+Note that memory-bound workers create heavy GC workloads, with pauses of
10-50ms. This will have an impact on the latency requirements for the
-worker. For this reason, `memory` bound, `latency_sensitive` jobs are not
+worker. For this reason, `memory` bound, `urgency :high` jobs are not
permitted and will fail CI. In general, `memory` bound workers are
discouraged, and alternative approaches to processing the work should be
considered.
+If a worker needs large amounts of both memory and CPU time, it should
+be marked as memory-bound, due to the above restrction on high urgency
+memory-bound workers.
+
## Declaring a Job as CPU-bound
This example shows how to declare a job as being CPU-bound.
@@ -327,7 +391,7 @@ requests. We do this to avoid incorrect metadata when other jobs are
scheduled from the cron-worker.
Cron-Workers themselves run instance wide, so they aren't scoped to
-users, namespaces, projects or other resources that should be added to
+users, namespaces, projects, or other resources that should be added to
the context.
However, they often schedule other jobs that _do_ require context.
diff --git a/doc/development/testing_guide/best_practices.md b/doc/development/testing_guide/best_practices.md
index 7eb5bb21be8..97ebe9bd5a7 100644
--- a/doc/development/testing_guide/best_practices.md
+++ b/doc/development/testing_guide/best_practices.md
@@ -52,6 +52,13 @@ bundle exec guard
When using spring and guard together, use `SPRING=1 bundle exec guard` instead to make use of spring.
+Use [Factory Doctor](https://test-prof.evilmartians.io/#/factory_doctor.md) to find cases on un-necessary database manipulation, which can cause slow tests.
+
+```shell
+# run test for path
+FDOC=1 bin/rspec spec/[path]/[to]/[spec].rb
+```
+
### General guidelines
- Use a single, top-level `describe ClassName` block.
diff --git a/doc/development/testing_guide/end_to_end/best_practices.md b/doc/development/testing_guide/end_to_end/best_practices.md
index 97daf4885ca..e1807f2b53f 100644
--- a/doc/development/testing_guide/end_to_end/best_practices.md
+++ b/doc/development/testing_guide/end_to_end/best_practices.md
@@ -10,7 +10,7 @@ But if the login feature is already covered with end-to-end tests through the GU
Let's say that, on average, the process to perform a successful login through the GUI takes 2 seconds.
-Now, realize that almost all tests need the user to be logged in, and that we need every test to run in isolation, meaning that tests cannot interfere with each other. This would mean that for every test the user needs to log in, and "waste 2 seconds".
+Now, realize that almost all tests need the user to be logged in, and that we need every test to run in isolation, meaning that tests cannot interfere with each other. This would mean that for every test the user needs to log in, and "waste 2 seconds".
Now, multiply the number of tests per 2 seconds, and as your test suite grows, the time to run it grows with it, and this is not sustainable.
diff --git a/doc/development/testing_guide/end_to_end/index.md b/doc/development/testing_guide/end_to_end/index.md
index b2ecffc593c..b6f6faa6052 100644
--- a/doc/development/testing_guide/end_to_end/index.md
+++ b/doc/development/testing_guide/end_to_end/index.md
@@ -88,6 +88,27 @@ subgraph "gitlab-qa pipeline"
Please note, we plan to [add more specific information](https://gitlab.com/gitlab-org/quality/team-tasks/issues/156)
about the tests included in each job/scenario that runs in `gitlab-qa`.
+#### With Pipeline for Merged Results
+
+In a Pipeline for Merged Results, the pipeline runs on a new ref that contains the merge result of the source and target branch.
+However, this ref is not available to the `gitlab-qa` pipeline.
+
+For this reason, the end-to-end tests on a Pipeline for Merged Results would use the head of the merge request source branch.
+
+```mermaid
+graph LR
+
+A["a1b1c1 - branch HEAD (CI_MERGE_REQUEST_SOURCE_BRANCH_SHA)"]
+B["x1y1z1 - master HEAD"]
+C["d1e1f1 - merged results (CI_COMMIT_SHA)"]
+
+A --> C
+B --> C
+
+A --> E["E2E tests"]
+C --> D["Pipeline for merged results"]
+ ```
+
##### Running custom tests
The [existing scenarios](https://gitlab.com/gitlab-org/gitlab-qa/blob/master/docs/what_tests_can_be_run.md)
@@ -145,6 +166,10 @@ environment, you can use the [GitLab Development Kit (GDK)](https://gitlab.com/g
Please refer to the instructions in the [QA README](https://gitlab.com/gitlab-org/gitlab/tree/master/qa/README.md#how-can-i-use-it)
and the section below.
+### Running tests that require special setup
+
+Learn how to perform [tests that require special setup or consideration to run on your local environment](running_tests_that_require_special_setup.md).
+
## How do I write tests?
In order to write new tests, you first need to learn more about GitLab QA
@@ -162,6 +187,7 @@ Continued reading:
- [Best Practices](best_practices.md)
- [Testing with feature flags](feature_flags.md)
- [Flows](flows.md)
+- [RSpec metadata/tags](rspec_metadata_tests.md)
## Where can I ask for help?
diff --git a/doc/development/testing_guide/end_to_end/quick_start_guide.md b/doc/development/testing_guide/end_to_end/quick_start_guide.md
index be00129a2bc..e1024eace40 100644
--- a/doc/development/testing_guide/end_to_end/quick_start_guide.md
+++ b/doc/development/testing_guide/end_to_end/quick_start_guide.md
@@ -2,7 +2,12 @@
In this tutorial, you will find different examples, and the steps involved, in the creation of end-to-end (_e2e_) tests for GitLab CE and GitLab EE, using GitLab QA.
-> When referring to end-to-end tests in this document, this means testing a specific feature end-to-end, such as a user logging in, the creation of a project, the management of labels, breaking down epics into sub-epics and issues, etc.
+When referring to end-to-end tests in this document, this means testing a specific feature end-to-end such as:
+
+- A user logging in.
+- The creation of a project.
+- The management of labels.
+- Breaking down epics into sub-epics and issues.
## Important information before we start writing tests
@@ -34,7 +39,7 @@ Sometimes you may notice that there is already good coverage in lower test level
- Take a look at the [How to test at the correct level?](https://gitlab.com/gitlab-org/gitlab/blob/master/doc/development/testing_guide/testing_levels.md#how-to-test-at-the-correct-level) section of the [Testing levels](https://gitlab.com/gitlab-org/gitlab/blob/master/doc/development/testing_guide/testing_levels.md) document
-- Look into the frequency in which such a feature is changed (_Stable features that don't change very often might not be worth covering with end-to-end tests if they're already covered in lower levels_)
+- Look into the frequency in which such a feature is changed (_Stable features that don't change very often might not be worth covering with end-to-end tests if they're already covered in lower levels_)
- Finally, discuss with the developer(s) involved in developing the feature and the tests themselves, to get their feeling
@@ -209,7 +214,11 @@ First, we remove the duplication of strings by defining the global variables `@i
Then, by creating a reusable `select_label_and_refresh` method, we remove the code duplication of this action, and later we can move this method to a Page Object class that will be created for easier maintenance purposes.
-> Notice that the reusable method is created at the bottom of the file. The reason for that is that reading the code should be similar to reading a newspaper, where high-level information is at the top, like the title and summary of the news, while low level, or more specific information, is at the bottom (this helps readability).
+Notice that the reusable method is created at the bottom of the file. This helps readability,
+where reading the code should be similar to reading a newspaper:
+
+- High-level information is at the top, like the title and summary of the news.
+- Low level, or more specific information, is at the bottom.
### 5. Tests' pre-conditions using resources and Page Objects
@@ -353,7 +362,7 @@ You can think of [Resources] as anything that can be created on GitLab CE or EE,
With that in mind, resources can be a project, an epic, an issue, a label, a commit, etc.
-As you saw in the tests' pre-conditions and the optimization sections, we're already creating some of these resources, and we are doing that by calling the `fabricate_via_api!` method.
+As you saw in the tests' pre-conditions and the optimization sections, we're already creating some of these resources. We are doing that by calling the `fabricate_via_api!` method.
> We could be using the `fabricate!` method instead, which would use the `fabricate_via_api!` method if it exists, and fallback to GUI fabrication otherwise, but we recommend being explicit to make it clear what the test does. Also, we always recommend fabricating resources via API since this makes tests faster and more reliable.
diff --git a/doc/development/testing_guide/end_to_end/rspec_metadata_tests.md b/doc/development/testing_guide/end_to_end/rspec_metadata_tests.md
new file mode 100644
index 00000000000..4f0e506a964
--- /dev/null
+++ b/doc/development/testing_guide/end_to_end/rspec_metadata_tests.md
@@ -0,0 +1,15 @@
+# RSpec metadata for end-to-end tests
+
+This is a partial list of the [RSpec metadata](https://relishapp.com/rspec/rspec-core/docs/metadata/user-defined-metadata)
+(a.k.a. tags) that are used in our end-to-end tests.
+
+<!-- Please keep the tags in alphabetical order -->
+
+| Tag | Description |
+|-----|-------------|
+| `:elasticsearch` | The test requires an Elasticsearch service. It is used by the [instance-level scenario](https://gitlab.com/gitlab-org/gitlab-qa#definitions) [`Test::Integration::Elasticsearch`](https://gitlab.com/gitlab-org/gitlab/-/blob/72b62b51bdf513e2936301cb6c7c91ec27c35b4d/qa/qa/ee/scenario/test/integration/elasticsearch.rb) to include only tests that require Elasticsearch. |
+| `:kubernetes` | The test includes a GitLab instance that is configured to be run behind an SSH tunnel, allowing a TLS-accessible GitLab. This test will also include provisioning of at least one Kubernetes cluster to test against. *This tag is often be paired with `:orchestrated`.* |
+| `:orchestrated` | The GitLab instance under test may be [configured by `gitlab-qa`](https://gitlab.com/gitlab-org/gitlab-qa/-/blob/master/docs/what_tests_can_be_run.md#orchestrated-tests) to be different to the default GitLab configuration, or `gitlab-qa` may launch additional services in separate docker containers, or both. Tests tagged with `:orchestrated` are excluded when testing environments where we can't dynamically modify GitLab's configuration (for example, Staging). |
+| `:quarantine` | The test has been [quarantined](https://about.gitlab.com/handbook/engineering/quality/guidelines/debugging-qa-test-failures/#quarantining-tests), will run in a separate job that only includes quarantined tests, and is allowed to fail. The test will be skipped in its regular job so that if it fails it will not hold up the pipeline. |
+| `:reliable` | The test has been [promoted to a reliable test](https://about.gitlab.com/handbook/engineering/quality/guidelines/reliable-tests/#promoting-an-existing-test-to-reliable) meaning it passes consistently in all pipelines, including merge requests. |
+| `:requires_admin` | The test requires an admin account. Tests with the tag are excluded when run against Canary and Production environments. |
diff --git a/doc/development/testing_guide/end_to_end/running_tests_that_require_special_setup.md b/doc/development/testing_guide/end_to_end/running_tests_that_require_special_setup.md
new file mode 100644
index 00000000000..f360226d922
--- /dev/null
+++ b/doc/development/testing_guide/end_to_end/running_tests_that_require_special_setup.md
@@ -0,0 +1,50 @@
+# Running tests that require special setup
+
+## Jenkins spec
+
+The [`jenkins_build_status_spec`](https://gitlab.com/gitlab-org/gitlab/blob/163c8a8c814db26d11e104d1cb2dcf02eb567dbe/qa/qa/specs/features/ee/browser_ui/3_create/jenkins/jenkins_build_status_spec.rb) spins up a Jenkins instance in a docker container based on an image stored in the [GitLab-QA container registry](https://gitlab.com/gitlab-org/gitlab-qa/container_registry).
+The docker image it uses is preconfigured with some base data and plugins.
+The test then configures the GitLab plugin in Jenkins with a URL of the GitLab instance that will be used
+to run the tests. Unfortunately, the GitLab Jenkins plugin does not accept ports so `http://localhost:3000` would
+not be accepted. Therefore, this requires us to run GitLab on port 80 or inside a docker container.
+
+To start a docker container for GitLab based on the nightly image:
+
+```shell
+docker run \
+ --publish 80:80 \
+ --name gitlab \
+ --hostname localhost \
+ gitlab/gitlab-ee:nightly
+```
+
+To run the tests from the `/qa` directory:
+
+```shell
+CHROME_HEADLESS=false bin/qa Test::Instance::All http://localhost -- qa/specs/features/ee/browser_ui/3_create/jenkins/jenkins_build_status_spec.rb
+```
+
+The test will automatically spinup a docker container for Jenkins and tear down once the test completes.
+
+However, if you need to run Jenkins manually outside of the tests, use this command:
+
+```shell
+docker run \
+ --hostname localhost \
+ --name jenkins-server \
+ --env JENKINS_HOME=jenkins_home \
+ --publish 8080:8080 \
+ registry.gitlab.com/gitlab-org/gitlab-qa/jenkins-gitlab:version1
+```
+
+Jenkins will be available on `http://localhost:8080`.
+
+Admin username is `admin` and password is `password`.
+
+It is worth noting that this is not an orchestrated test. It is [tagged with the `:orchestrated` meta](https://gitlab.com/gitlab-org/gitlab/blob/163c8a8c814db26d11e104d1cb2dcf02eb567dbe/qa/qa/specs/features/ee/browser_ui/3_create/jenkins/jenkins_build_status_spec.rb#L5)
+only to prevent it from running in the pipelines for live environments such as Staging.
+
+### Troubleshooting
+
+If Jenkins docker container exits without providing any information in the logs, try increasing the memory used by
+the Docker Engine.
diff --git a/doc/development/testing_guide/flaky_tests.md b/doc/development/testing_guide/flaky_tests.md
index 9ec0a8e803f..ef8676ddf32 100644
--- a/doc/development/testing_guide/flaky_tests.md
+++ b/doc/development/testing_guide/flaky_tests.md
@@ -44,9 +44,8 @@ On our CI, we use [rspec-retry] to automatically retry a failing example a few
times (see [`spec/spec_helper.rb`] for the precise retries count).
We also use a home-made `RspecFlaky::Listener` listener which records flaky
-examples in a JSON report file on `master` (`retrieve-tests-metadata` and `update-tests-metadata` jobs), and warns when a new flaky example
-is detected in any other branch (`flaky-examples-check` job). In the future, the
-`flaky-examples-check` job will not be allowed to fail.
+examples in a JSON report file on `master` (`retrieve-tests-metadata` and
+`update-tests-metadata` jobs).
This was originally implemented in: <https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/13021>.
@@ -87,6 +86,7 @@ For instance `RETRIES=1 bin/rspec ...` would retry the failing examples once.
- [Dropdowns rendering upward or downward due to window size and scroll position](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/17660)
- [Lazy loaded images can cause Capybara to misclick](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18713)
- [Triggering JS events before the event handlers are set up](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18742)
+- [Wait for the image to be lazy-loaded when asserting on a Markdown image's src attribute](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25408)
#### Capybara viewport size related issues
diff --git a/doc/development/testing_guide/frontend_testing.md b/doc/development/testing_guide/frontend_testing.md
index 26357d4fdfd..5ba63cc07fa 100644
--- a/doc/development/testing_guide/frontend_testing.md
+++ b/doc/development/testing_guide/frontend_testing.md
@@ -58,7 +58,7 @@ which could arise (especially with testing against browser specific features).
### Differences to Karma
-- Jest runs in a Node.js environment, not in a browser. Support for running Jest tests in a browser [is planned](https://gitlab.com/gitlab-org/gitlab-foss/issues/58205).
+- Jest runs in a Node.js environment, not in a browser. Support for running Jest tests in a browser [is planned](https://gitlab.com/gitlab-org/gitlab/-/issues/26982).
- Because Jest runs in a Node.js environment, it uses [jsdom](https://github.com/jsdom/jsdom) by default. See also its [limitations](#limitations-of-jsdom) below.
- Jest does not have access to Webpack loaders or aliases.
The aliases used by Jest are defined in its [own config](https://gitlab.com/gitlab-org/gitlab/blob/master/jest.config.js).
@@ -81,7 +81,7 @@ This comes with a number of limitations, namely:
- [No element sizes or positions](https://github.com/jsdom/jsdom/blob/15.1.1/lib/jsdom/living/nodes/Element-impl.js#L334-L371)
- [No layout engine](https://github.com/jsdom/jsdom/issues/1322) in general
-See also the issue for [support running Jest tests in browsers](https://gitlab.com/gitlab-org/gitlab-foss/issues/58205).
+See also the issue for [support running Jest tests in browsers](https://gitlab.com/gitlab-org/gitlab/-/issues/26982).
### Debugging Jest tests
@@ -782,7 +782,7 @@ All tests in `spec/javascripts/` will eventually be migrated to `spec/frontend/`
Before May 2018, `features/` also contained feature tests run by Spinach. These tests were removed from the codebase in May 2018 ([#23036](https://gitlab.com/gitlab-org/gitlab-foss/issues/23036)).
-See also [Notes on testing Vue components](../fe_guide/vue.html#testing-vue-components).
+See also [Notes on testing Vue components](../fe_guide/vue.md#testing-vue-components).
## Test helpers
diff --git a/doc/development/testing_guide/review_apps.md b/doc/development/testing_guide/review_apps.md
index 3307f29a98e..2214d86542a 100644
--- a/doc/development/testing_guide/review_apps.md
+++ b/doc/development/testing_guide/review_apps.md
@@ -9,42 +9,43 @@ pipeline](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/6665).
```mermaid
graph TD
- build-qa-image -->|once the `prepare` stage is done| gitlab:assets:compile
- gitlab:assets:compile -->|once the `gitlab:assets:compile` job is done| review-build-cng
- review-build-cng -.->|triggers a CNG-mirror pipeline and wait for it to be done| CNG-mirror
- CNG-mirror -.->|polls until completed| review-build-cng
- review-build-cng -->|once the `review-build-cng` job is done| review-deploy
- review-deploy -->|once the `review-deploy` job is done| review-qa-smoke
-
-subgraph "1. gitlab-foss/gitlab `prepare` stage"
- build-qa-image
- end
-
-subgraph "2. gitlab-foss/gitlab `test` stage"
- gitlab:assets:compile
- end
-
-subgraph "3. gitlab-foss/gitlab `review-prepare` stage"
- review-build-cng
- end
-
-subgraph "4. gitlab-foss/gitlab `review` stage"
- review-deploy["review-deploy<br><br>Helm deploys the Review App using the Cloud<br/>Native images built by the CNG-mirror pipeline.<br><br>Cloud Native images are deployed to the `review-apps-ce` or `review-apps-ee`<br>Kubernetes (GKE) cluster, in the GCP `gitlab-review-apps` project."]
- end
-
-subgraph "5. gitlab-foss/gitlab `qa` stage"
- review-qa-smoke[review-qa-smoke<br><br>gitlab-qa runs the smoke suite against the Review App.]
- end
+ A["build-qa-image, gitlab:assets:compile pull-cache<br/>(canonical default refs only)"];
+ B[review-build-cng];
+ C[review-deploy];
+ D[CNG-mirror];
+ E[review-qa-smoke];
+
+ A -->|once the `prepare` stage is done| B
+ B -.->|triggers a CNG-mirror pipeline and wait for it to be done| D
+ D -.->|polls until completed| B
+ B -->|once the `review-build-cng` job is done| C
+ C -->|once the `review-deploy` job is done| E
+
+subgraph "1. gitlab `prepare` stage"
+ A
+ end
+
+subgraph "2. gitlab `review-prepare` stage"
+ B
+ end
+
+subgraph "3. gitlab `review` stage"
+ C["review-deploy<br><br>Helm deploys the Review App using the Cloud<br/>Native images built by the CNG-mirror pipeline.<br><br>Cloud Native images are deployed to the `review-apps-ce` or `review-apps-ee`<br>Kubernetes (GKE) cluster, in the GCP `gitlab-review-apps` project."]
+ end
+
+subgraph "4. gitlab `qa` stage"
+ E[review-qa-smoke<br><br>gitlab-qa runs the smoke suite against the Review App.]
+ end
subgraph "CNG-mirror pipeline"
- CNG-mirror>Cloud Native images are built];
- end
+ D>Cloud Native images are built];
+ end
```
### Detailed explanation
1. On every [pipeline][gitlab-pipeline] during the `test` stage, the
- [`gitlab:assets:compile`][gitlab:assets:compile] job is automatically started.
+ [`gitlab:assets:compile`][gitlab:assets:compile pull-cache] job is automatically started.
- Once it's done, it starts the [`review-build-cng`][review-build-cng]
manual job since the [`CNG-mirror`][cng-mirror] pipeline triggered in the
following step depends on it.
@@ -79,27 +80,39 @@ subgraph "CNG-mirror pipeline"
**Additional notes:**
- If the `review-deploy` job keep failing (note that we already retry it twice),
- please post a message in the `#quality` channel and/or create a ~Quality ~bug
+ please post a message in the `#g_qe_engineering_productivity` channel and/or create a `~"Engineering Productivity"` `~"ep::review apps"` `~bug`
issue with a link to your merge request. Note that the deployment failure can
reveal an actual problem introduced in your merge request (i.e. this isn't
necessarily a transient failure)!
-- If the `review-qa-smoke` job keep failing (note that we already retry it twice),
+- If the `review-qa-smoke` job keeps failing (note that we already retry it twice),
please check the job's logs: you could discover an actual problem introduced in
your merge request. You can also download the artifacts to see screenshots of
the page at the time the failures occurred. If you don't find the cause of the
failure or if it seems unrelated to your change, please post a message in the
`#quality` channel and/or create a ~Quality ~bug issue with a link to your
merge request.
-- The manual [`review-stop`][gitlab-ci-yml] in the `test` stage can be used to
+- The manual `review-stop` can be used to
stop a Review App manually, and is also started by GitLab once a merge
request's branch is deleted after being merged.
-- Review Apps are cleaned up regularly via a pipeline schedule that runs
- the [`schedule:review-cleanup`][gitlab-ci-yml] job.
- The Kubernetes cluster is connected to the `gitlab-{ce,ee}` projects using
[GitLab's Kubernetes integration][gitlab-k8s-integration]. This basically
allows to have a link to the Review App directly from the merge request
widget.
+### Auto-stopping of Review Apps
+
+Review Apps are automatically stopped 2 days after the last deployment thanks to
+the [Environment auto-stop](../../ci/environments.md#environments-auto-stop) feature.
+
+If you need your Review App to stay up for a longer time, you can
+[pin its environment](../../ci/environments.md#auto-stop-example) or retry the
+`review-deploy` job to update the "latest deployed at" time.
+
+The `review-cleanup` job that automatically runs in scheduled
+pipelines (and is manual in merge request) stops stale Review Apps after 5 days,
+deletes their environment after 6 days, and cleans up any dangling Helm releases
+and Kubernetes resources after 7 days.
+
## QA runs
On every [pipeline][gitlab-pipeline] in the `qa` stage (which comes after the
@@ -206,12 +219,12 @@ aids in identifying load spikes on the cluster, and if nodes are problematic or
**Potential cause:**
-That could be a sign that the [`schedule:review-cleanup`][gitlab-ci-yml] job is
+That could be a sign that the `review-cleanup` job is
failing to cleanup stale Review Apps and Kubernetes resources.
**Where to look for further debugging:**
-Look at the latest `schedule:review-cleanup` job log, and identify look for any
+Look at the latest `review-cleanup` job log, and identify look for any
unexpected failure.
### p99 CPU utilization is at 100% for most of the nodes and/or many components
@@ -270,7 +283,7 @@ kubectl get cm --sort-by='{.metadata.creationTimestamp}' | grep 'review-' | grep
### Using K9s
-[K9s] is a powerful command line dashboard which allows you to filter by labels. This can help identify trends with apps exceeding the [review-app resource requests](https://gitlab.com/gitlab-org/gitlab/blob/master/scripts/review_apps/base-config.yaml). Kubernetes will schedule pods to nodes based on resource requests and allow for CPU usage up to the limits.
+[K9s] is a powerful command line dashboard which allows you to filter by labels. This can help identify trends with apps exceeding the [review-app resource requests](https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/review_apps/base-config.yaml). Kubernetes will schedule pods to nodes based on resource requests and allow for CPU usage up to the limits.
- In K9s you can sort or add filters by typing the `/` character
- `-lrelease=<review-app-slug>` - filters down to all pods for a release. This aids in determining what is having issues in a single deployment
@@ -376,10 +389,10 @@ find a way to limit it to only us.**
- [Stern](https://github.com/wercker/stern) - enables cross pod log tailing based on label/field selectors
[charts-1068]: https://gitlab.com/gitlab-org/charts/gitlab/issues/1068
-[gitlab-pipeline]: https://gitlab.com/gitlab-org/gitlab-foss/pipelines/44362587
-[gitlab:assets:compile]: https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/149511610
-[review-build-cng]: https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/149511623
-[review-deploy]: https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/149511624
+[gitlab-pipeline]: https://gitlab.com/gitlab-org/gitlab/pipelines/125315730
+[gitlab:assets:compile pull-cache]: https://gitlab.com/gitlab-org/gitlab/-/jobs/467724487
+[review-build-cng]: https://gitlab.com/gitlab-org/gitlab/-/jobs/467724808
+[review-deploy]: https://gitlab.com/gitlab-org/gitlab/-/jobs/467724810
[cng-mirror]: https://gitlab.com/gitlab-org/build/CNG-mirror
[cng]: https://gitlab.com/gitlab-org/build/CNG
[cng-mirror-pipeline]: https://gitlab.com/gitlab-org/build/CNG-mirror/pipelines/44364657
@@ -387,13 +400,11 @@ find a way to limit it to only us.**
[helm-chart]: https://gitlab.com/gitlab-org/charts/gitlab/
[review-apps-ce]: https://console.cloud.google.com/kubernetes/clusters/details/us-central1-a/review-apps-ce?project=gitlab-review-apps
[review-apps-ee]: https://console.cloud.google.com/kubernetes/clusters/details/us-central1-b/review-apps-ee?project=gitlab-review-apps
-[review-apps.sh]: https://gitlab.com/gitlab-org/gitlab/blob/master/scripts/review_apps/review-apps.sh
-[automated_cleanup.rb]: https://gitlab.com/gitlab-org/gitlab/blob/master/scripts/review_apps/automated_cleanup.rb
-[Auto-DevOps.gitlab-ci.yml]: https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
-[gitlab-ci-yml]: https://gitlab.com/gitlab-org/gitlab/blob/master/.gitlab-ci.yml
+[review-apps.sh]: https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/review_apps/review-apps.sh
+[automated_cleanup.rb]: https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/review_apps/automated_cleanup.rb
+[Auto-DevOps.gitlab-ci.yml]: https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
[gitlab-k8s-integration]: ../../user/project/clusters/index.md
[K9s]: https://github.com/derailed/k9s
-[password-bug]: https://gitlab.com/gitlab-org/gitlab-foss/issues/53621
---
diff --git a/doc/development/testing_guide/testing_levels.md b/doc/development/testing_guide/testing_levels.md
index f7dec82724f..295aa6609a8 100644
--- a/doc/development/testing_guide/testing_levels.md
+++ b/doc/development/testing_guide/testing_levels.md
@@ -103,7 +103,7 @@ graph RL
For complex Vuex mutations, you should separate the tests from other parts of the Vuex store to simplify problem-solving.
#### When *not* to use unit tests
-
+
- **Non-exported functions or classes**:
Anything not exported from a module can be considered private or an implementation detail, and doesn't need to be tested.
- **Constants**:
@@ -200,7 +200,7 @@ graph RL
- **All server requests**:
Similar to unit tests, when running component tests, the backend may not be reachable, so all outgoing requests need to be mocked.
- **Asynchronous background operations**:
- Similar to unit tests, background operations cannot be stopped or waited on, so they will continue running in the following tests and cause side effects.
+ Similar to unit tests, background operations cannot be stopped or waited on. This means they will continue running in the following tests and cause side effects.
- **Child components**:
Every component is tested individually, so child components are mocked.
See also [`shallowMount()`](https://vue-test-utils.vuejs.org/api/#shallowmount)
@@ -314,7 +314,7 @@ controller.instance_variable_set(:@user, user)
and use methods which are deprecated in Rails 5 ([#23768]).
-[#23768]: https://gitlab.com/gitlab-org/gitlab-foss/issues/23768
+[#23768]: https://gitlab.com/gitlab-org/gitlab/-/issues/16260
### About Karma
diff --git a/doc/development/testing_guide/testing_migrations_guide.md b/doc/development/testing_guide/testing_migrations_guide.md
index 392911b1fda..a03b940fe40 100644
--- a/doc/development/testing_guide/testing_migrations_guide.md
+++ b/doc/development/testing_guide/testing_migrations_guide.md
@@ -18,7 +18,7 @@ a database schema.
Adding a `:migration` tag to a test signature enables some custom RSpec
`before` and `after` hooks in our
-[`spec_helper.rb`](https://gitlab.com/gitlab-org/gitlab/blob/3b29908a64ff729c0cf6d93452fe00ab23079c75/spec%2Fspec_helper.rb#L259)
+[`spec/support/migration.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/f81fa6ab1dd788b70ef44b85aaba1f31ffafae7d/spec/support/migration.rb)
to run.
A `before` hook will revert all migrations to the point that a migration
@@ -49,7 +49,7 @@ require Rails.root.join('db', 'post_migrate', '20170526185842_migrate_pipeline_s
#### `table`
Use the `table` helper to create a temporary `ActiveRecord::Base`-derived model
-for a table. [FactoryBot](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#factories)
+for a table. [FactoryBot](best_practices.md#factories)
**should not** be used to create data for migration specs. For example, to
create a record in the `projects` table:
@@ -112,7 +112,7 @@ migration. You can find the complete spec in
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170526185842_migrate_pipeline_stages.rb')
-describe MigratePipelineStages, :migration do
+describe MigratePipelineStages do
# Create test data - pipeline and CI/CD jobs.
let(:jobs) { table(:ci_builds) }
let(:stages) { table(:ci_stages) }
@@ -158,12 +158,14 @@ end
To test a non-`ActiveRecord::Migration` test (a background migration),
you will need to manually provide a required schema version. Please add a
-schema tag to a context that you want to switch the database schema within.
+`schema` tag to a context that you want to switch the database schema within.
+
+If not set, `schema` defaults to `:latest`.
Example:
```ruby
-describe SomeClass, :migration, schema: 20170608152748 do
+describe SomeClass, schema: 20170608152748 do
# ...
end
```
@@ -178,7 +180,7 @@ background migration. You can find the complete spec on
```ruby
require 'spec_helper'
-describe Gitlab::BackgroundMigration::ArchiveLegacyTraces, :migration, schema: 20180529152628 do
+describe Gitlab::BackgroundMigration::ArchiveLegacyTraces, schema: 20180529152628 do
include TraceHelpers
let(:namespaces) { table(:namespaces) }
diff --git a/doc/development/what_requires_downtime.md b/doc/development/what_requires_downtime.md
index a25d065f735..b7ea56be873 100644
--- a/doc/development/what_requires_downtime.md
+++ b/doc/development/what_requires_downtime.md
@@ -50,7 +50,7 @@ The reason we spread this out across three releases is that dropping a column is
a destructive operation that can't be rolled back easily.
Following this procedure helps us to make sure there are no deployments to GitLab.com
-and upgrade processes for self-hosted installations that lump together any of these steps.
+and upgrade processes for self-managed installations that lump together any of these steps.
### Step 1: Ignoring the column (release M)
@@ -162,6 +162,9 @@ class CleanupUsersUpdatedAtRename < ActiveRecord::Migration[4.2]
end
```
+NOTE: **Note:** If you're renaming a large table, please carefully consider the state when the first migration has run but the second cleanup migration hasn't been run yet.
+With [Canary](https://about.gitlab.com/handbook/engineering/infrastructure/library/canary/) it is possible that the system runs in this state for a significant amount of time.
+
## Changing Column Constraints
Adding or removing a NOT NULL clause (or another constraint) can typically be
diff --git a/doc/gitlab-basics/create-your-ssh-keys.md b/doc/gitlab-basics/create-your-ssh-keys.md
index 98f2679c9d6..9b3431a5a42 100644
--- a/doc/gitlab-basics/create-your-ssh-keys.md
+++ b/doc/gitlab-basics/create-your-ssh-keys.md
@@ -1,14 +1,13 @@
---
type: howto
---
-
-# Create and add your SSH public key
+# Create and add your SSH key pair
It is best practice to use [Git over SSH instead of Git over HTTP](https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols).
In order to use SSH, you will need to:
-1. [Create an SSH key pair](#creating-your-ssh-key-pair) on your local computer.
-1. [Add the key to GitLab](#adding-your-ssh-public-key-to-gitlab).
+1. Create an SSH key pair
+1. Add your SSH public key to GitLab
## Creating your SSH key pair
diff --git a/doc/install/aws/img/aws_ha_architecture_diagram.png b/doc/install/aws/img/aws_ha_architecture_diagram.png
index 8cff5658b32..2064b0f49ae 100644
--- a/doc/install/aws/img/aws_ha_architecture_diagram.png
+++ b/doc/install/aws/img/aws_ha_architecture_diagram.png
Binary files differ
diff --git a/doc/install/aws/index.md b/doc/install/aws/index.md
index 096d724717e..061030765a3 100644
--- a/doc/install/aws/index.md
+++ b/doc/install/aws/index.md
@@ -33,6 +33,9 @@ In addition to having a basic familiarity with [AWS](https://docs.aws.amazon.com
- [To create or upload an SSH key](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html)
to connect to the instance via SSH
- A domain name for the GitLab instance
+- An SSL/TLS certificate to secure your domain. If you do not already own one, you can provision a free public SSL/TLS certificate through [AWS Certificate Manager](https://aws.amazon.com/certificate-manager/)(ACM) for use with the [Elastic Load Balancer](#load-balancer) we'll create.
+
+NOTE: **Note:** It can take a few hours to validate a certificate provisioned through ACM. To avoid delays later, request your certificate as soon as possible.
## Architecture
@@ -49,12 +52,10 @@ Here's a list of the AWS services we will use, with links to pricing information
will apply. If you want to run it on a dedicated or reserved instance,
consult the [EC2 pricing page](https://aws.amazon.com/ec2/pricing/) for more
information on the cost.
-- **EBS**: We will also use an EBS volume to store the Git data. See the
- [Amazon EBS pricing](https://aws.amazon.com/ebs/pricing/).
- **S3**: We will use S3 to store backups, artifacts, LFS objects, etc. See the
[Amazon S3 pricing](https://aws.amazon.com/s3/pricing/).
-- **ALB**: An Application Load Balancer will be used to route requests to the
- GitLab instance. See the [Amazon ELB pricing](https://aws.amazon.com/elasticloadbalancing/pricing/).
+- **ELB**: A Classic Load Balancer will be used to route requests to the
+ GitLab instances. See the [Amazon ELB pricing](https://aws.amazon.com/elasticloadbalancing/pricing/).
- **RDS**: An Amazon Relational Database Service using PostgreSQL will be used
to provide a High Availability database configuration. See the
[Amazon RDS pricing](https://aws.amazon.com/rds/postgresql/pricing/).
@@ -199,13 +200,10 @@ create the actual RDS instance.
### RDS Subnet Group
1. Navigate to the RDS dashboard and select **Subnet Groups** from the left menu.
-1. Give it a name (`gitlab-rds-group`), a description, and choose the VPC from
- the VPC dropdown.
-1. Click "Add all the subnets related to this VPC" and
- remove the public ones, we only want the **private subnets**.
- In the end, you should see `10.0.1.0/24` and `10.0.3.0/24` (as
- we defined them in the [subnets section](#subnets)).
- Click **Create** when ready.
+1. Click on **Create DB Subnet Group**.
+1. Under **Subnet group details**, enter a name (we'll use `gitlab-rds-group`), a description, and choose the `gitlab-vpc` from the VPC dropdown.
+1. Under **Add subnets**, click **Add all the subnets related to this VPC** and remove the public ones, we only want the **private subnets**. In the end, you should see `10.0.1.0/24` and `10.0.3.0/24` (as we defined them in the [subnets section](#subnets)).
+1. Click **Create** when ready.
![RDS Subnet Group](img/rds_subnet_group.png)
@@ -214,33 +212,31 @@ create the actual RDS instance.
Now, it's time to create the database:
1. Select **Databases** from the left menu and click **Create database**.
-1. Select PostgreSQL and click **Next**.
-1. Since this is a production server, let's choose "Production". Click **Next**.
-1. Let's see the instance specifications:
- 1. Leave the license model as is (`postgresql-license`).
- 1. For the version, select the latest of the 9.6 series (check the
- [database requirements](../../install/requirements.md#postgresql-requirements))
- if there are any updates on this).
- 1. For the size, let's select a `t2.medium` instance.
- 1. Multi-AZ-deployment is recommended as redundancy, so choose "Create
- replica in different zone". Read more at
- [High Availability (Multi-AZ)](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html).
- 1. A Provisioned IOPS (SSD) storage type is best suited for HA (though you can
- choose a General Purpose (SSD) to reduce the costs). Read more about it at
- [Storage for Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html).
-
-1. The rest of the settings on this page request a DB instance identifier, username
- and a master password. We've chosen to use `gitlab-db-ha`, `gitlab` and a
- very secure password respectively. Keep these in hand for later.
-1. Click **Next** to proceed to the advanced settings.
-1. Make sure to choose our GitLab VPC, our subnet group, set public accessibility to
- **No**, and to leave it to create a new security group. The only additional
- change which will be helpful is the database name for which we can use
- `gitlabhq_production`. At the very bottom, there's an option to enable
- auto updates to minor versions. You may want to turn it off.
-1. When done, click **Create database**.
-
-Now that the database is created, let's move on to setting up Redis with ElasticCache.
+1. Select **Standard Create** for the database creation method.
+1. Select **PostgreSQL** as the database engine and select **PostgreSQL 10.9-R1** from the version dropdown menu (check the [database requirements](../../install/requirements.md#postgresql-requirements) to see if there are any updates on this for your chosen version of GitLab).
+1. Since this is a production server, let's choose **Production** from the **Templates** section.
+1. Under **Settings**, set a DB instance identifier, a master username, and a master password. We'll use `gitlab-db-ha`, `gitlab`, and a very secure password respectively. Make a note of these as we'll need them later.
+1. For the DB instance size, select **Standard classes** and select an instance size that meets your requirements from the dropdown menu. We'll use a `db.m4.large` instance.
+1. Under **Storage**, configure the following:
+ 1. Select **Provisioned IOPS (SSD)** from the storage type dropdown menu. Provisioned IOPS (SSD) storage is best suited for HA (though you can choose General Purpose (SSD) to reduce the costs). Read more about it at [Storage for Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html).
+ 1. Allocate storage and set provisioned IOPS. We'll use the minimum values, `100` and `1000`, respectively.
+ 1. Enable storage autoscaling (optional) and set a maximum storage threshold.
+1. Under **Availability & durability**, select **Create a standby instance** to have a standby RDS instance provisioned in a different Availability Zone. Read more at [High Availability (Multi-AZ)](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html).
+1. Under **Connectivity**, configure the following:
+ 1. Select the VPC we created earlier (`gitlab-vpc`) from the **Virtual Private Cloud (VPC)** dropdown menu.
+ 1. Expand the **Additional connectivity configuration** section and select the subnet group (`gitlab-rds-group`) we created earlier.
+ 1. Set public accessibility to **No**.
+ 1. Under **VPC security group**, select **Create new** and enter a name. We'll use `gitlab-rds-sec-group`.
+ 1. Leave the database port as the default `5432`.
+1. For **Database authentication**, select **Password authentication**.
+1. Expand the **Additional configuration** section and complete the following:
+ 1. The initial database name. We'll use `gitlabhq_production`.
+ 1. Configure your preferred backup settings.
+ 1. The only other change we'll make here is to disable auto minor version updates under **Maintenance**.
+ 1. Leave all the other settings as is or tweak according to your needs.
+ 1. Once you're happy, click **Create database**.
+
+Now that the database is created, let's move on to setting up Redis with ElastiCache.
## Redis with ElastiCache
@@ -291,29 +287,92 @@ and add a custom TCP rule for port `6379` accessible within itself.
## Load Balancer
-On the EC2 dashboard, look for Load Balancer on the left column:
+On the EC2 dashboard, look for Load Balancer in the left navigation bar:
1. Click the **Create Load Balancer** button.
- 1. Choose the Application Load Balancer.
- 1. Give it a name (`gitlab-loadbalancer`) and set the scheme to "internet-facing".
- 1. In the "Listeners" section, make sure it has HTTP and HTTPS.
- 1. In the "Availability Zones" section, select the `gitlab-vpc` we have created
- and associate the **public subnets**.
-1. Click **Configure Security Settings** to go to the next section to
- select the TLS certificate. When done, go to the next step.
-1. In the "Security Groups" section, create a new one by giving it a name
- (`gitlab-loadbalancer-sec-group`) and allow both HTTP ad HTTPS traffic
+ 1. Choose the **Classic Load Balancer**.
+ 1. Give it a name (we'll use `gitlab-loadbalancer`) and for the **Create LB Inside** option, select `gitlab-vpc` from the dropdown menu.
+ 1. In the **Listeners** section, set HTTP port 80, HTTPS port 443, and TCP port 22 for both load balancer and instance protocols and ports.
+ 1. In the **Select Subnets** section, select both public subnets from the list.
+1. Click **Assign Security Groups** and select **Create a new security group**, give it a name
+ (we'll use `gitlab-loadbalancer-sec-group`) and description, and allow both HTTP and HTTPS traffic
from anywhere (`0.0.0.0/0, ::/0`).
-1. In the next step, configure the routing and select an existing target group
- (`gitlab-public`). The Load Balancer Health will allow us to indicate where to
- ping and what makes up a healthy or unhealthy instance.
-1. Leave the "Register Targets" section as is, and finally review the settings
- and create the ELB.
+1. Click **Configure Security Settings** and select an SSL/TLS certificate from ACM or upload a certificate to IAM.
+1. Click **Configure Health Check** and set up a health check for your EC2 instances.
+ 1. For **Ping Protocol**, select HTTP.
+ 1. For **Ping Port**, enter 80.
+ 1. For **Ping Path**, enter `/explore`. (We use `/explore` as it's a public endpoint that does
+ not require authorization.)
+ 1. Keep the default **Advanced Details** or adjust them according to your needs.
+1. Click **Add EC2 Instances** but, as we don't have any instances to add yet, come back
+to your load balancer after creating your GitLab instances and add them.
+1. Click **Add Tags** and add any tags you need.
+1. Click **Review and Create**, review all your settings, and click **Create** if you're happy.
After the Load Balancer is up and running, you can revisit your Security
-Groups to refine the access only through the ELB and any other requirement
+Groups to refine the access only through the ELB and any other requirements
you might have.
+### Configure DNS for Load Balancer
+
+On the Route 53 dashboard, click **Hosted zones** in the left navigation bar:
+
+1. Select an existing hosted zone or, if you do not already have one for your domain, click **Create Hosted Zone**, enter your domain name, and click **Create**.
+1. Click **Create Record Set** and provide the following values:
+ 1. **Name:** Use the domain name (the default value) or enter a subdomain.
+ 1. **Type:** Select **A - IPv4 address**.
+ 1. **Alias Target:** Find the **ELB Classic Load Balancers** section and select the classic load balancer we created earlier.
+ 1. **Routing Policy:** We'll use **Simple** but you can choose a different policy based on your use case.
+ 1. **Evaluate Target Health:** We'll set this to **No** but you can choose to have the load balancer route traffic based on target health.
+ 1. Click **Create**.
+1. Update your DNS records with your domain registrar. The steps for doing this vary depending on which registrar you use and is beyond the scope of this guide.
+
+## Setting up Bastion Hosts
+
+Since our GitLab instances will be in private subnets, we need a way to connect to these instances via SSH to make configuration changes, perform upgrades, etc. One way of doing this is via a [bastion host](https://en.wikipedia.org/wiki/Bastion_host), sometimes also referred to as a jump box.
+
+TIP: **Tip:** If you do not want to maintain bastion hosts, you can set up [AWS Systems Manager Session Manager](https://docs.aws.amazon.com/systems-manager/latest/userguide/session-manager.html) for access to instances. This is beyond the scope of this document.
+
+### Create Bastion Host A
+
+1. Navigate to the EC2 Dashboard and click on **Launch instance**.
+1. Select the **Ubuntu Server 18.04 LTS (HVM)** AMI.
+1. Choose an instance type. We'll use a `t2.micro` as we'll only use the bastion host to SSH into our other instances.
+1. Click **Configure Instance Details**.
+ 1. Under **Network**, select the `gitlab-vpc` from the dropdown menu.
+ 1. Under **Subnet**, select the public subnet we created earlier (`gitlab-public-10.0.0.0`).
+ 1. Double check that under **Auto-assign Public IP** you have **Use subnet setting (Enable)** selected.
+ 1. Leave everything else as default and click **Add Storage**.
+1. For storage, we'll leave everything as default and only add an 8GB root volume. We won't store anything on this instance.
+1. Click **Add Tags** and on the next screen click **Add Tag**.
+ 1. We’ll only set `Key: Name` and `Value: Bastion Host A`.
+1. Click **Configure Security Group**.
+ 1. Select **Create a new security group**, enter a **Security group name** (we'll use `bastion-sec-group`), and add a description.
+ 1. We'll enable SSH access from anywhere (`0.0.0.0/0`). If you want stricter security, specify a single IP address or an IP address range in CIDR notation.
+ 1. Click **Review and Launch**
+1. Review all your settings and, if you're happy, click **Launch**.
+1. Acknowledge that you have access to an existing key pair or create a new one. Click **Launch Instance**.
+
+Confirm that you can SSH into the instance:
+
+1. On the EC2 Dashboard, click on **Instances** in the left menu.
+1. Select **Bastion Host A** from your list of instances.
+1. Click **Connect** and follow the connection instructions.
+1. If you are able to connect successfully, let's move on to setting up our second bastion host for redundancy.
+
+### Create Bastion Host B
+
+1. Create an EC2 instance following the same steps as above with the following changes:
+ 1. For the **Subnet**, select the second public subnet we created earlier (`gitlab-public-10.0.2.0`).
+ 1. Under the **Add Tags** section, we’ll set `Key: Name` and `Value: Bastion Host B` so that we can easily identify our two instances.
+ 1. For the security group, select the existing `bastion-sec-group` we created above.
+
+### Use SSH Agent Forwarding
+
+EC2 instances running Linux use private key files for SSH authentication. You'll connect to your bastion host using an SSH client and the private key file stored on your client. Since the private key file is not present on the bastion host, you will not be able to connect to your instances in private subnets.
+
+Storing private key files on your bastion host is a bad idea. To get around this, use SSH agent forwarding on your client. See [Securely Connect to Linux Instances Running in a Private Amazon VPC](https://aws.amazon.com/blogs/security/securely-connect-to-linux-instances-running-in-a-private-amazon-vpc/) for a step-by-step guide on how to use SSH agent forwarding.
+
## Deploying GitLab inside an auto scaling group
We'll use AWS's wizard to deploy GitLab and then SSH into the instance to
@@ -484,7 +543,7 @@ If everything looks good, you should be able to reach GitLab in your browser.
### Setting up Gitaly
-CAUTION: **Caution:** In this architecture, having a single Gitaly server creates a single point of failure. This limitation will be removed once [Gitaly HA](https://gitlab.com/groups/gitlab-org/-/epics/842) is released.
+CAUTION: **Caution:** In this architecture, having a single Gitaly server creates a single point of failure. This limitation will be removed once [Gitaly HA](https://gitlab.com/groups/gitlab-org/-/epics/842) is released.
Gitaly is a service that provides high-level RPC access to Git repositories.
It should be enabled and configured on a separate EC2 instance in one of the
@@ -509,7 +568,7 @@ Let's create an EC2 instance where we'll install Gitaly:
1. Click **Review and launch** followed by **Launch** if you're happy with your settings.
1. Finally, acknowledge that you have access to the selected private key file or create a new one. Click **Launch Instances**.
- > **Optional:** Instead of storing configuration _and_ repository data on the root volume, you can also choose to add an additional EBS volume for repository storage. Follow the same guidance as above.
+ > **Optional:** Instead of storing configuration _and_ repository data on the root volume, you can also choose to add an additional EBS volume for repository storage. Follow the same guidance as above. See the [Amazon EBS pricing](https://aws.amazon.com/ebs/pricing/).
Now that we have our EC2 instance ready, follow the [documentation to install GitLab and set up Gitaly on its own server](../../administration/gitaly/index.md#running-gitaly-on-its-own-server).
@@ -639,7 +698,7 @@ And the more complex the solution, the more work is involved in setting up and
maintaining it.
Have a read through these other resources and feel free to
-[open an issue](https://gitlab.com/gitlab-org/gitlab-foss/issues/new)
+[open an issue](https://gitlab.com/gitlab-org/gitlab/issues/new)
to request additional material:
- [GitLab High Availability](../../administration/high_availability/README.md):
diff --git a/doc/install/installation.md b/doc/install/installation.md
index 161116473b9..fa708f6d5cf 100644
--- a/doc/install/installation.md
+++ b/doc/install/installation.md
@@ -263,7 +263,7 @@ Since GitLab 8.17, GitLab requires the use of Node to compile JavaScript
assets, and Yarn to manage JavaScript dependencies. The current minimum
requirements for these are:
-- `node` >= v8.10.0. (We recommend node 12.x as it is faster)
+- `node` >= v10.13.0. (We recommend node 12.x as it is faster)
- `yarn` >= v1.10.0.
In many distros,
@@ -457,16 +457,13 @@ sudo chmod -R u+rwX shared/artifacts/
# Change the permissions of the directory where GitLab Pages are stored
sudo chmod -R ug+rwX shared/pages/
-# Copy the example Unicorn config
-sudo -u git -H cp config/unicorn.rb.example config/unicorn.rb
+# Copy the example Puma config
+sudo -u git -H cp config/puma.rb.example config/puma.rb
-# Find number of cores
-nproc
-
-# Enable cluster mode if you expect to have a high load instance
-# Set the number of workers to at least the number of cores
-# Ex. change the amount of workers to 3 for 2GB RAM server
-sudo -u git -H editor config/unicorn.rb
+# Refer to https://github.com/puma/puma#configuration for more information.
+# You should scale Puma workers and threads based on the number of CPU
+# cores you have available. You can get that number via the `nproc` command.
+sudo -u git -H editor config/puma.rb
# Copy the example Rack attack config
sudo -u git -H cp config/initializers/rack_attack.rb.example config/initializers/rack_attack.rb
@@ -495,8 +492,8 @@ sudo -u git -H editor config/resque.yml
```
CAUTION: **Caution:**
-Make sure to edit both `gitlab.yml` and `unicorn.rb` to match your setup.
-If you want to use Puma web server, see [Using Puma](#using-puma) for the additional steps.
+Make sure to edit both `gitlab.yml` and `puma.rb` to match your setup.
+If you want to use the Unicorn web server, see [Using Unicorn](#using-unicorn) for the additional steps.
NOTE: **Note:**
If you want to use HTTPS, see [Using HTTPS](#using-https) for the additional steps.
@@ -563,7 +560,7 @@ NOTE: **Note:**
If you want to use HTTPS, see [Using HTTPS](#using-https) for the additional steps.
NOTE: **Note:**
-Make sure your hostname can be resolved on the machine itself by either a proper DNS record or an additional line in `/etc/hosts` ("127.0.0.1 hostname"). This might be necessary, for example, if you set up GitLab behind a reverse proxy. If the hostname cannot be resolved, the final installation check will fail with `Check GitLab API access: FAILED. code: 401` and pushing commits will be rejected with `[remote rejected] master -> master (hook declined)`.
+Make sure your hostname can be resolved on the machine itself by either a proper DNS record or an additional line in `/etc/hosts` ("127.0.0.1 hostname"). This might be necessary, for example, if you set up GitLab behind a reverse proxy. If the hostname cannot be resolved, the final installation check will fail with `Check GitLab API access: FAILED. code: 401` and pushing commits will be rejected with `[remote rejected] master -> master (hook declined)`.
NOTE: **Note:**
GitLab Shell application startup time can be greatly reduced by disabling RubyGems. This can be done in several ways:
@@ -947,23 +944,22 @@ You also need to change the corresponding options (e.g. `ssh_user`, `ssh_host`,
Apart from the always supported Markdown style, there are other rich text files that GitLab can display. But you might have to install a dependency to do so. See the [`github-markup` gem README](https://github.com/gitlabhq/markup#markups) for more information.
-### Using Puma
-
-Puma is a multi-threaded HTTP 1.1 server for Ruby applications.
+### Using Unicorn
-To use GitLab with Puma:
+As of GitLab 12.9, [Puma](https://github.com/puma/puma) has replaced Unicorn as the default web server for installations from source.
+If you want to switch back to Unicorn, follow these steps:
-1. Finish GitLab setup so you have it up and running.
-1. Copy the supplied example Puma config file into place:
+1. Finish the GitLab setup so you have it up and running.
+1. Copy the supplied example Unicorn config file into place:
```shell
cd /home/git/gitlab
# Copy config file for the web server
- sudo -u git -H cp config/puma.rb.example config/puma.rb
+ sudo -u git -H cp config/unicorn.rb.example config/unicorn.rb
```
-1. Edit the system `init.d` script to use `EXPERIMENTAL_PUMA=1` flag. If you have `/etc/default/gitlab`, then you should edit it instead.
+1. Edit the system `init.d` script to set the `USE_UNICORN=1` flag. If you have `/etc/default/gitlab`, then you should edit it instead.
1. Restart GitLab.
## Troubleshooting
diff --git a/doc/install/requirements.md b/doc/install/requirements.md
index 9bc1658d59c..375137adad3 100644
--- a/doc/install/requirements.md
+++ b/doc/install/requirements.md
@@ -42,7 +42,7 @@ Please consider using a virtual machine to run GitLab.
### Ruby versions
-GitLab requires Ruby (MRI) 2.6. Beginning in GitLab 12.2, we no longer support Ruby 2.5 and lower.
+GitLab requires Ruby (MRI) 2.6. Beginning in GitLab 12.2, we no longer support Ruby 2.5 and lower.
You must use the standard MRI implementation of Ruby.
We love [JRuby](https://www.jruby.org/) and [Rubinius](https://rubinius.com), but GitLab
@@ -59,16 +59,16 @@ GitLab 11.11 and higher only supports Git 2.21.x and newer, and
### Node.js versions
-Beginning in GitLab 11.8, we only support Node.js 8.10.0 or higher, and dropped
-support for Node.js 6.
+Beginning in GitLab 12.9, we only support node.js 10.13.0 or higher, and we have dropped
+support for node.js 8. (node.js 6 support was dropped in GitLab 11.8)
We recommend Node 12.x, as it is faster.
GitLab uses [webpack](https://webpack.js.org/) to compile frontend assets, which requires a minimum
-version of Node.js 8.10.0.
+version of Node.js 10.13.0.
You can check which version you are running with `node -v`. If you are running
-a version older than `v8.10.0`, you need to update to a newer version. You
+a version older than `v10.13.0`, you need to update to a newer version. You
can find instructions to install from community maintained packages or compile
from source at the [Node.js website](https://nodejs.org/en/download).
@@ -186,6 +186,23 @@ As long as you have enough available CPU and memory capacity, it's okay to incre
To change the Unicorn workers when you have the Omnibus package (which defaults to the recommendation above) please see [the Unicorn settings in the Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/unicorn.html).
+## Puma Workers
+
+For most instances we recommend using: max(CPU cores * 0.9, 2) = Puma workers.
+For example a node with 4 cores would have 3 Unicorn workers.
+
+For all machines that have 4GB and up we recommend a minimum of three Puma workers.
+If you have a 2GB machine we recommend to configure only one Puma worker to prevent excessive swapping.
+
+By default each Puma worker runs with 4 threads. We do not recommend setting more,
+due to how [Ruby MRI multi-threading](https://en.wikipedia.org/wiki/Global_interpreter_lock) works.
+
+For cases when you have to use [Legacy Rugged code](../development/gitaly.md#legacy-rugged-code) it is recommended to set number of threads to 1.
+
+As long as you have enough available CPU and memory capacity, it's okay to increase the number of Puma workers and this will usually help to reduce the response time of the applications and increase the ability to handle parallel requests.
+
+To change the Puma workers when you have the Omnibus package (which defaults to the recommendation above) please see [the Puma settings in the Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/puma.html).
+
## Redis and Sidekiq
Redis stores all user sessions and the background task queue.
@@ -236,18 +253,21 @@ For reference, GitLab.com's [auto-scaling shared runner](../user/gitlab_com/inde
## Supported web browsers
+CAUTION: **Caution:** With GitLab 13.0 (May 2020) we are removing official support for Internet Explorer 11.
+With the release of GitLab 13.4 (September 2020) we will remove all code that supports Internet Explorer 11.
+You can provide feedback [on this issue](https://gitlab.com/gitlab-org/gitlab/issues/197987) or via your usual support channels.
+
GitLab supports the following web browsers:
- Firefox
- Chrome/Chromium
- Safari
- Microsoft Edge
-- Internet Explorer 11
+- Internet Explorer 11 (until May 2020)
For the listed web browsers, GitLab supports:
- The current and previous major versions of browsers except Internet Explorer.
-- Only version 11 of Internet Explorer.
- The current minor version of a supported major version.
NOTE: **Note:** We do not support running GitLab with JavaScript disabled in the browser and have no plans of supporting that
diff --git a/doc/integration/bitbucket.md b/doc/integration/bitbucket.md
index 7cead234709..1ce361761ff 100644
--- a/doc/integration/bitbucket.md
+++ b/doc/integration/bitbucket.md
@@ -1,7 +1,8 @@
# Integrate your GitLab server with Bitbucket Cloud
NOTE: **Note:**
-You need to [enable OmniAuth](omniauth.md) in order to use this.
+Starting from GitLab 11.4, OmniAuth is enabled by default. If you're using an
+earlier version, you'll need to explicitly enable it.
Import projects from Bitbucket.org and login to your GitLab instance with your
Bitbucket.org account.
@@ -89,8 +90,6 @@ you to use.
For Omnibus packages:
```ruby
- gitlab_rails['omniauth_enabled'] = true
-
gitlab_rails['omniauth_providers'] = [
{
"name" => "bitbucket",
diff --git a/doc/integration/elasticsearch.md b/doc/integration/elasticsearch.md
index 2d53e021f81..e7667ea8080 100644
--- a/doc/integration/elasticsearch.md
+++ b/doc/integration/elasticsearch.md
@@ -47,24 +47,26 @@ updated automatically.
For indexing Git repository data, GitLab uses an [indexer written in Go](https://gitlab.com/gitlab-org/gitlab-elasticsearch-indexer).
-The Go indexer was included in Omnibus GitLab 11.8 as an optional replacement to a
-Ruby-based indexer. [Since GitLab v12.3](https://gitlab.com/gitlab-org/gitlab/issues/6481),
-all indexing is done by the Go indexer, and the Ruby indexer is removed.
+The way you install the Go indexer depends on your version of GitLab:
-If you would like to use the Elasticsearch Go indexer with a source installation or an older version of GitLab, please follow the instructions below.
+- For GitLab Omnibus 11.8 and above, see [GitLab Omnibus](#gitlab-omnibus).
+- For installations from source or older versions of GitLab Omnibus, install the indexer [From Source](#from-source).
-### Installation
+### GitLab Omnibus
+
+Since GitLab 11.8 the Go indexer is included in GitLab Omnibus.
+The former Ruby-based indexer was removed in [GitLab 12.3](https://gitlab.com/gitlab-org/gitlab/issues/6481).
+
+### From source
First, we need to install some dependencies, then we'll build and install
the indexer itself.
-#### Dependencies
-
This project relies on [ICU](http://site.icu-project.org/) for text encoding,
therefore we need to ensure the development packages for your platform are
installed before running `make`.
-##### Debian / Ubuntu
+#### Debian / Ubuntu
To install on Debian or Ubuntu, run:
@@ -72,7 +74,7 @@ To install on Debian or Ubuntu, run:
sudo apt install libicu-dev
```
-##### CentOS / RHEL
+#### CentOS / RHEL
To install on CentOS or RHEL, run:
@@ -89,7 +91,7 @@ brew install icu4c
export PKG_CONFIG_PATH="/usr/local/opt/icu4c/lib/pkgconfig:$PKG_CONFIG_PATH"
```
-#### Building and installing
+### Building and installing
To build and install the indexer, run:
@@ -258,7 +260,7 @@ If the database size is less than 500 MiB, and the size of all hosted repos is l
CAUTION: **Warning**:
Performing asynchronous indexing will generate a lot of Sidekiq jobs.
-Make sure to prepare for this task by either [Horizontally Scaling](../administration/high_availability/README.md#basic-scaling)
+Make sure to prepare for this task by having a [Scalable and Highly Available Setup](README.md)
or creating [extra Sidekiq processes](../administration/operations/extra_sidekiq_processes.md)
1. [Configure your Elasticsearch host and port](#enabling-elasticsearch).
@@ -511,7 +513,7 @@ Here are some common pitfalls and how to overcome them:
If you see `Elasticsearch::Model::Response::Records`, you are using Elasticsearch.
NOTE: **Note**:
- The above instructions are used to verify that GitLab is using Elasticsearch only when indexing all namespaces. This is not to be used for scenarios that only index a [subset of namespaces](https://docs.gitlab.com/ee/integration/elasticsearch.html#limiting-namespaces-and-projects).
+ The above instructions are used to verify that GitLab is using Elasticsearch only when indexing all namespaces. This is not to be used for scenarios that only index a [subset of namespaces](#limiting-namespaces-and-projects).
- **I updated GitLab and now I can't find anything**
@@ -534,7 +536,7 @@ Here are some common pitfalls and how to overcome them:
```
NOTE: **Note**:
- The above instructions are not to be used for scenarios that only index a [subset of namespaces](https://docs.gitlab.com/ee/integration/elasticsearch.html#limiting-namespaces-and-projects).
+ The above instructions are not to be used for scenarios that only index a [subset of namespaces](#limiting-namespaces-and-projects).
See [Elasticsearch Index Scopes](#elasticsearch-index-scopes) for more information on searching for specific types of data.
@@ -597,7 +599,7 @@ Here are some common pitfalls and how to overcome them:
AWS has [fixed limits](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-limits.html)
for this setting ("Maximum Size of HTTP Request Payloads"), based on the size of
the underlying instance.
-
+
- **My single node Elasticsearch cluster status never goes from `yellow` to `green` even though everything seems to be running properly**
**For a single node Elasticsearch cluster the functional cluster health status will be yellow** (will never be green) because the primary shard is allocated but replicas can not be as there is no other node to which Elasticsearch can assign a replica. This also applies if you are using using the
@@ -614,7 +616,7 @@ Here are some common pitfalls and how to overcome them:
}
}'
```
-
+
- **I'm getting a `health check timeout: no Elasticsearch node available` error in Sidekiq during the indexing process**
```
@@ -622,7 +624,7 @@ Here are some common pitfalls and how to overcome them:
```
You probably have not used either `http://` or `https://` as part of your value in the **"URL"** field of the Elasticseach Integration Menu. Please make sure you are using either `http://` or `https://` in this field as the [Elasticsearch client for Go](https://github.com/olivere/elastic) that we are using [needs the prefix for the URL to be acceped as valid](https://github.com/olivere/elastic/commit/a80af35aa41856dc2c986204e2b64eab81ccac3a).
- Once you have corrected the formatting of the URL please delete the index (via the [dedicated rake task](#gitlab-elasticsearch-rake-tasks)) and [index the content of your intance](#adding-gitlabs-data-to-the-elasticsearch-index) once more.
+ Once you have corrected the formatting of the URL, delete the index (via the [dedicated rake task](#gitlab-elasticsearch-rake-tasks)) and [reindex the content of your instance](#adding-gitlabs-data-to-the-elasticsearch-index).
### Reverting to basic search
diff --git a/doc/integration/img/jira_dev_panel_jira_setup_1-1.png b/doc/integration/img/jira_dev_panel_jira_setup_1-1.png
index e3c6c01c153..cef903ac9b4 100644
--- a/doc/integration/img/jira_dev_panel_jira_setup_1-1.png
+++ b/doc/integration/img/jira_dev_panel_jira_setup_1-1.png
Binary files differ
diff --git a/doc/integration/img/jira_dev_panel_setup_com_1.png b/doc/integration/img/jira_dev_panel_setup_com_1.png
new file mode 100644
index 00000000000..18f0d5da043
--- /dev/null
+++ b/doc/integration/img/jira_dev_panel_setup_com_1.png
Binary files differ
diff --git a/doc/integration/img/jira_dev_panel_setup_com_2.png b/doc/integration/img/jira_dev_panel_setup_com_2.png
new file mode 100644
index 00000000000..31dc13e1271
--- /dev/null
+++ b/doc/integration/img/jira_dev_panel_setup_com_2.png
Binary files differ
diff --git a/doc/integration/img/jira_dev_panel_setup_com_3.png b/doc/integration/img/jira_dev_panel_setup_com_3.png
new file mode 100644
index 00000000000..eb3c573a4bb
--- /dev/null
+++ b/doc/integration/img/jira_dev_panel_setup_com_3.png
Binary files differ
diff --git a/doc/integration/jira_development_panel.md b/doc/integration/jira_development_panel.md
index 05a83e0987a..8b83f885fdd 100644
--- a/doc/integration/jira_development_panel.md
+++ b/doc/integration/jira_development_panel.md
@@ -4,8 +4,10 @@
Complementary to our [existing Jira][existing-jira] project integration, you're now able to integrate
GitLab projects with [Jira Development Panel][jira-development-panel]. Both can be used
-simultaneously. This works with self-hosted GitLab or GitLab.com integrated with self-hosted Jira
-or cloud Jira.
+simultaneously. This works with self-managed GitLab or GitLab.com integrated with:
+
+- Jira hosted by you.
+- Cloud Jira.
By doing this you can easily access related GitLab merge requests, branches, and commits directly from a Jira issue.
@@ -19,7 +21,7 @@ Note this is different from the [existing Jira][existing-jira] project integrati
is one GitLab project to the entire Jira instance.
We recommend that a GitLab group admin
-or instance admin (in the case of self-hosted GitLab) set up the integration,
+or instance admin (in the case of self-managed GitLab) set up the integration,
in order to simplify administration.
TIP: **Tip:**
@@ -28,9 +30,9 @@ regular users won't impact your integration.
## Requirements
-### Self-hosted GitLab
+### Self-managed GitLab
-If you are using self-hosted GitLab, make sure your GitLab instance is accessible by Jira.
+If you are using self-managed GitLab, make sure your GitLab instance is accessible by Jira.
- If you are connecting to Jira Cloud, make sure your instance is accessible via the internet.
- If you are using Jira Server, make sure your instance is accessible however your network is set up.
@@ -65,6 +67,8 @@ There are no special requirements if you are using GitLab.com.
## Jira Configuration
+### GitLab self-managed
+
1. In Jira, go to **Jira Settings > Applications > DVCS accounts**, then click **Link GitHub Enterprise account** to start creating a new integration.
(We are pretending to be GitHub in this integration until there is further platform support from Jira.)
@@ -108,7 +112,45 @@ There are no special requirements if you are using GitLab.com.
To connect additional GitLab projects from other GitLab top-level groups (or personal namespaces), repeat the above
steps with additional Jira DVCS accounts.
-You may now refer any Jira issue by its ID in branch names, commit messages and merge request names on GitLab's side,
+### GitLab.com
+
+You can integrate GitLab.com and Jira Cloud using the **GitLab for Jira** App in the [Atlassian Marketplace](https://marketplace.atlassian.com/apps/1221011/gitlab-for-jira).
+
+GitLab and Jira can also be integrated using the DVCS connector as described in the [GitLab self-managed section](#gitlab-self-managed). The [GitLab for Jira App](https://marketplace.atlassian.com/apps/1221011/gitlab-for-jira) is recommended when using GitLab.com and Jira Cloud because data is synchronized in real time, while the DVCS connector updates data only once per hour.
+
+<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
+For a walkthrough of the integration with GitLab for Jira, watch [Configure GitLab Jira Integration using Marketplace App](https://youtu.be/SwR-g1s1zTo) on YouTube.
+
+NOTE: **Note:**
+The **GitLab for Jira** App is only compatible with GitLab.com **and** Jira Cloud.
+
+1. Go to **Jira Settings > Apps > Find new apps**, then search for GitLab.
+1. Click **GitLab for Jira**, then click **Get it now**. Or go the [App in the marketplace directly](https://marketplace.atlassian.com/apps/1221011/gitlab-for-jira)
+
+ ![Install GitLab App on Jira](img/jira_dev_panel_setup_com_1.png)
+1. After installing, click **Get started** to go to the configurations page. This page is always available under **Jira Settings > Apps > Manage apps**.
+
+ ![Start GitLab App configuration on Jira](img/jira_dev_panel_setup_com_2.png)
+1. Enter the group or personal namespace in the **Namespace** field and click **Link namespace to Jira**. Make sure you are logged in on GitLab.com and the namespace has a Silver or above license. The user setting up _GitLab for Jira_ must have **Maintainer** access to the GitLab namespace.
+
+NOTE: **Note:**
+The GitLab user only needs access when adding a new namespace. For syncing with Jira, we do not depend on the user's token.
+
+ ![Confure namespace on GitLab Jira App](img/jira_dev_panel_setup_com_3.png)
+
+After a namespace is added, all future commits, branches and merge requests of all projects under that namespace will be synced to Jira. Past data cannot be synced at the moment.
+
+#### Troubleshooting GitLab for Jira
+
+The GitLab for Jira App uses an iframe to add namespaces on the settings page. Some browsers block cross-site cookies which can lead to a message saying that the user needs to log in on GitLab.com even though the user is already logged in.
+
+> "You need to sign in or sign up before continuing."
+
+In this case, enable cross-site cookies in your browser.
+
+## Usage
+
+Once the integration is set up on GitLab and Jira you may refer any Jira issue by its ID in branch names, commit messages and merge request titles on GitLab's side,
and you will be able to see the linked `branches`, `commits`, and `merge requests` when entering a Jira issue
(inside the Jira issue, merge requests will be called "pull requests").
diff --git a/doc/integration/kerberos.md b/doc/integration/kerberos.md
index 3998089a0e4..68cf193a434 100644
--- a/doc/integration/kerberos.md
+++ b/doc/integration/kerberos.md
@@ -72,7 +72,6 @@ For source installations, make sure the `kerberos` gem group
1. Edit `/etc/gitlab/gitlab.rb`:
```ruby
- gitlab_rails['omniauth_enabled'] = true
gitlab_rails['omniauth_allow_single_sign_on'] = ['kerberos']
gitlab_rails['kerberos_enabled'] = true
@@ -249,7 +248,7 @@ OmniauthKerberosSpnegoController: failed to process Negotiate/Kerberos authentic
```
This is usually seen when the browser is unable to contact the Kerberos server
-directly. It will fall back to an unsupported mechanism known as
+directly. It will fall back to an unsupported mechanism known as
[`IAKERB`](https://k5wiki.kerberos.org/wiki/Projects/IAKERB), which tries to use
the GitLab server as an intermediary to the Kerberos server.
diff --git a/doc/integration/omniauth.md b/doc/integration/omniauth.md
index 286d06d52a8..5634ad95cf7 100644
--- a/doc/integration/omniauth.md
+++ b/doc/integration/omniauth.md
@@ -36,6 +36,7 @@ contains some settings that are common for all providers.
- [OpenID Connect](../administration/auth/oidc.md)
- [UltraAuth](ultra_auth.md)
- [Salesforce](salesforce.md)
+- [AWS Cognito](../administration/auth/cognito.md)
## Initial OmniAuth Configuration
@@ -51,7 +52,7 @@ that are in common for all providers that we need to consider.
be created manually or they will not be able to sign in via OmniAuth.
- `auto_link_ldap_user` can be used if you have [LDAP / ActiveDirectory](ldap.md)
integration enabled. It defaults to false. When enabled, users automatically
- created through OmniAuth will be linked to their LDAP entry as well.
+ created through an OmniAuth provider will have their LDAP identity created in GitLab as well.
- `block_auto_created_users` defaults to `true`. If `true` auto created users will
be blocked by default and will have to be unblocked by an administrator before
they are able to sign in.
@@ -80,9 +81,6 @@ To change these settings:
and change:
```ruby
- # Versions prior to 11.4 require this to be set to true
- # gitlab_rails['omniauth_enabled'] = nil
-
# CAUTION!
# This allows users to login without having a user account first. Define the allowed providers
# using an array, e.g. ["saml", "twitter"], or as true/false to allow all providers or none.
diff --git a/doc/integration/saml.md b/doc/integration/saml.md
index 10319b83233..001e2883de0 100644
--- a/doc/integration/saml.md
+++ b/doc/integration/saml.md
@@ -1,9 +1,11 @@
# SAML OmniAuth Provider
-> This topic is for SAML on self-managed GitLab instances. For SAML on GitLab.com, see [SAML SSO for GitLab.com Groups](../user/group/saml_sso/index.md).
+Note that:
-NOTE: **Note:**
-You need to [enable OmniAuth](omniauth.md) in order to use this.
+- SAML OmniAuth Provider is for SAML on self-managed GitLab instances. For SAML on
+ GitLab.com, see [SAML SSO for GitLab.com Groups](../user/group/saml_sso/index.md).
+- Starting from GitLab 11.4, OmniAuth is enabled by default. If you're using an
+ earlier version, you'll need to explicitly enable it.
GitLab can be configured to act as a SAML 2.0 Service Provider (SP). This allows
GitLab to consume assertions from a SAML 2.0 Identity Provider (IdP) such as
@@ -37,7 +39,6 @@ in your SAML IdP:
For Omnibus package:
```ruby
- gitlab_rails['omniauth_enabled'] = true
gitlab_rails['omniauth_allow_single_sign_on'] = ['saml']
gitlab_rails['omniauth_block_auto_created_users'] = false
```
@@ -187,7 +188,7 @@ tell GitLab which groups are external via the `external_groups:` element:
} }
```
-## Required groups
+## Required groups **(STARTER ONLY)**
>**Note:**
This setting is only available on GitLab 10.2 EE and above.
@@ -214,7 +215,7 @@ Example:
} }
```
-## Admin Groups
+## Admin Groups **(STARTER ONLY)**
>**Note:**
This setting is only available on GitLab 8.8 EE and above.
@@ -238,7 +239,7 @@ considered `admin groups`.
} }
```
-## Auditor Groups
+## Auditor Groups **(STARTER ONLY)**
>**Note:**
This setting is only available on GitLab 11.4 EE and above.
diff --git a/doc/integration/shibboleth.md b/doc/integration/shibboleth.md
index 885a6fe59da..4cc686cc0b6 100644
--- a/doc/integration/shibboleth.md
+++ b/doc/integration/shibboleth.md
@@ -16,7 +16,7 @@ The following changes are needed to enable Shibboleth:
1. Protect OmniAuth Shibboleth callback URL:
- ```
+ ```apache
<Location /users/auth/shibboleth/callback>
AuthType shibboleth
ShibRequestSetting requireSession 1
@@ -36,7 +36,7 @@ The following changes are needed to enable Shibboleth:
1. Exclude Shibboleth URLs from rewriting. Add `RewriteCond %{REQUEST_URI} !/Shibboleth.sso` and `RewriteCond %{REQUEST_URI} !/shibboleth-sp`. Config should look like this:
- ```
+ ```apache
# Apache equivalent of Nginx try files
RewriteEngine on
RewriteCond %{DOCUMENT_ROOT}/%{REQUEST_FILENAME} !-f
@@ -46,8 +46,12 @@ The following changes are needed to enable Shibboleth:
RequestHeader set X_FORWARDED_PROTO 'https'
```
-1. Edit `/etc/gitlab/gitlab.rb` configuration file to enable OmniAuth and add
- Shibboleth as an OmniAuth provider. User attributes will be sent from the
+ **NOTE:**
+ Starting from GitLab 11.4, OmniAuth is enabled by default. If you're using an
+ earlier version, you'll need to explicitly enable it in `/etc/gitlab/gitlab.rb`.
+
+1. In addition, add Shibboleth to `/etc/gitlab/gitlab.rb` as an OmniAuth provider.
+ User attributes will be sent from the
Apache reverse proxy to GitLab as headers with the names from the Shibboleth
attribute mapping. Therefore the values of the `args` hash
should be in the form of `"HTTP_ATTRIBUTE"`. The keys in the hash are arguments
@@ -71,7 +75,6 @@ The following changes are needed to enable Shibboleth:
gitlab_rails['omniauth_allow_single_sign_on'] = true
gitlab_rails['omniauth_block_auto_created_users'] = false
- gitlab_rails['omniauth_enabled'] = true
gitlab_rails['omniauth_providers'] = [
{
"name" => "'shibboleth"',
diff --git a/doc/integration/sourcegraph.md b/doc/integration/sourcegraph.md
index ae818f285b8..da384fa9528 100644
--- a/doc/integration/sourcegraph.md
+++ b/doc/integration/sourcegraph.md
@@ -46,13 +46,13 @@ sudo -u git -H bin/rails console RAILS_ENV=production
Then run the following command to enable the feature flag:
-```
+```ruby
Feature.enable(:sourcegraph)
```
You can also enable the feature flag only for specific projects with:
-```
+```ruby
Feature.enable(:sourcegraph, Project.find_by_full_path('my_group/my_project'))
```
diff --git a/doc/integration/twitter.md b/doc/integration/twitter.md
index cf6492d7372..8056c672d67 100644
--- a/doc/integration/twitter.md
+++ b/doc/integration/twitter.md
@@ -64,7 +64,7 @@ To enable the Twitter OmniAuth provider you must register your application with
For installations from source:
- ```
+ ```yaml
- { name: 'twitter', app_id: 'YOUR_APP_ID',
app_secret: 'YOUR_APP_SECRET' }
```
diff --git a/doc/integration/ultra_auth.md b/doc/integration/ultra_auth.md
index 091887d8eb8..3188213dac9 100644
--- a/doc/integration/ultra_auth.md
+++ b/doc/integration/ultra_auth.md
@@ -60,7 +60,7 @@ To get the credentials (a pair of Client ID and Client Secret), you must registe
For installation from source:
- ```
+ ```yaml
- { name: 'ultraauth',
app_id: 'OPENID_CLIENT_ID',
app_secret: 'OPENID_CLIENT_SECRET',
diff --git a/doc/integration/vault.md b/doc/integration/vault.md
index 2afa1ba7e32..c216fdfca4a 100644
--- a/doc/integration/vault.md
+++ b/doc/integration/vault.md
@@ -15,106 +15,109 @@ The following assumes you already have Vault installed and running.
1. **Get the OpenID Connect client ID and secret from GitLab:**
- First you'll need to create a GitLab application to obtain an application ID and secret for authenticating into Vault. To do this, sign in to GitLab and follow these steps:
+ First you'll need to create a GitLab application to obtain an application ID and secret for authenticating into Vault. To do this, sign in to GitLab and follow these steps:
- 1. On GitLab, click your avatar on the top-right corner, and select your user **Settings > Applications**.
- 1. Fill out the application **Name** and [**Redirect URI**](https://www.vaultproject.io/docs/auth/jwt.html#redirect-uris),
- making sure to select the **OpenID** scope.
- 1. Save application.
- 1. Copy client ID and secret, or keep the page open for reference.
- ![GitLab OAuth provider](img/gitlab_oauth_vault_v12_6.png)
+ 1. On GitLab, click your avatar on the top-right corner, and select your user **Settings > Applications**.
+ 1. Fill out the application **Name** and [**Redirect URI**](https://www.vaultproject.io/docs/auth/jwt.html#redirect-uris),
+ making sure to select the **OpenID** scope.
+ 1. Save application.
+ 1. Copy client ID and secret, or keep the page open for reference.
+
+ ![GitLab OAuth provider](img/gitlab_oauth_vault_v12_6.png)
1. **Enable OIDC auth on Vault:**
- OpenID Connect is not enabled in Vault by default. This needs to be enabled in the terminal.
+ OpenID Connect is not enabled in Vault by default. This needs to be enabled in the terminal.
- Open a terminal session and run the following command to enable the OpenID Connect authentication provider in Vault:
+ Open a terminal session and run the following command to enable the OpenID Connect authentication provider in Vault:
- ```shell
- vault auth enable oidc
- ```
+ ```shell
+ vault auth enable oidc
+ ```
- You should see the following output in the terminal:
+ You should see the following output in the terminal:
- ```shell
- Success! Enabled oidc auth method at: oidc/
- ```
+ ```plaintext
+ Success! Enabled oidc auth method at: oidc/
+ ```
1. **Write the OIDC config:**
- Next, Vault needs to be given the application ID and secret generated by GitLab.
+ Next, Vault needs to be given the application ID and secret generated by GitLab.
- In the terminal session, run the following command to give Vault access to the GitLab application you've just created with an OpenID scope. This allows Vault to authenticate through GitLab.
+ In the terminal session, run the following command to give Vault access to the GitLab application you've just created with an OpenID scope. This allows Vault to authenticate through GitLab.
- Replace `your_application_id` and `your_secret` in the example below with the application ID and secret generated for your app:
+ Replace `your_application_id` and `your_secret` in the example below with the application ID and secret generated for your app:
- ```shell
- $ vault write auth/oidc/config \
- oidc_discovery_url="https://gitlab.com" \
- oidc_client_id="your_application_id" \
- oidc_client_secret="your_secret" \
- default_role="demo" \
- bound_issuer="localhost"
- ```
+ ```shell
+ $ vault write auth/oidc/config \
+ oidc_discovery_url="https://gitlab.com" \
+ oidc_client_id="your_application_id" \
+ oidc_client_secret="your_secret" \
+ default_role="demo" \
+ bound_issuer="localhost"
+ ```
- You should see the following output in the terminal:
+ You should see the following output in the terminal:
- ```shell
- Success! Data written to: auth/oidc/config
- ```
+ ```shell
+ Success! Data written to: auth/oidc/config
+ ```
1. **Write the OIDC Role Config:**
- Now that Vault has a GitLab application ID and secret, it needs to know the [**Redirect URIs**](https://www.vaultproject.io/docs/auth/jwt.html#redirect-uris) and scopes given to GitLab during the application creation process. The redirect URIs need to match where your Vault instance is running. The `oidc_scopes` field needs to include the `openid`. Similarly to the previous step, replace `your_application_id` with the generated application ID from GitLab:
+ Now that Vault has a GitLab application ID and secret, it needs to know the [**Redirect URIs**](https://www.vaultproject.io/docs/auth/jwt.html#redirect-uris) and scopes given to GitLab during the application creation process. The redirect URIs need to match where your Vault instance is running. The `oidc_scopes` field needs to include the `openid`. Similarly to the previous step, replace `your_application_id` with the generated application ID from GitLab:
- This configuration is saved under the name of the role you are creating. In this case, we are creating a `demo` role. Later, we'll show how you can access this role through the Vault CLI.
+ This configuration is saved under the name of the role you are creating. In this case, we are creating a `demo` role. Later, we'll show how you can access this role through the Vault CLI.
- ```shell
- vault write auth/oidc/role/demo \
- user_claim="sub" \
- allowed_redirect_uris="http://localhost:8250/oidc/callback,http://127.0.0.1:8200/ui/vault/auth/oidc/oidc/callback" \
- bound_audiences="your_application_id" \
- role_type="oidc" \
- oidc_scopes="openid" \
- policies=demo \
- ttl=1h
- ```
+ ```shell
+ vault write auth/oidc/role/demo \
+ user_claim="sub" \
+ allowed_redirect_uris="http://localhost:8250/oidc/callback,http://127.0.0.1:8200/ui/vault/auth/oidc/oidc/callback" \
+ bound_audiences="your_application_id" \
+ role_type="oidc" \
+ oidc_scopes="openid" \
+ policies=demo \
+ ttl=1h
+ ```
1. **Sign in to Vault:**
- 1. Go to your Vault UI (example: [http://127.0.0.1:8200/ui/vault/auth?with=oidc](http://127.0.0.1:8200/ui/vault/auth?with=oidc)).
- 1. If the `OIDC` method is not currently selected, open the dropdown and select it.
- 1. Click the **Sign in With GitLab** button, which will open a modal window:
- ![Sign into Vault with GitLab](img/sign_into_vault_with_gitlab_v12_6.png)
+ 1. Go to your Vault UI (example: [http://127.0.0.1:8200/ui/vault/auth?with=oidc](http://127.0.0.1:8200/ui/vault/auth?with=oidc)).
+ 1. If the `OIDC` method is not currently selected, open the dropdown and select it.
+ 1. Click the **Sign in With GitLab** button, which will open a modal window:
+
+ ![Sign into Vault with GitLab](img/sign_into_vault_with_gitlab_v12_6.png)
+
+ 1. Click **Authorize** on the modal to allow Vault to sign in through GitLab. This will redirect you back to your Vault UI as a signed-in user.
- 1. Click **Authorize** on the modal to allow Vault to sign in through GitLab. This will redirect you back to your Vault UI as a signed-in user.
- ![Authorize Vault to connect with GitLab](img/authorize_vault_with_gitlab_v12_6.png)
+ ![Authorize Vault to connect with GitLab](img/authorize_vault_with_gitlab_v12_6.png)
1. **Sign in using the Vault CLI** (optional):
- Vault also allows you to sign in via their CLI.
+ Vault also allows you to sign in via their CLI.
- After writing the same configurations from above, you can run the command below in your terminal to sign in with the role configuration created in step 4 above:
+ After writing the same configurations from above, you can run the command below in your terminal to sign in with the role configuration created in step 4 above:
- ```shell
- vault login -method=oidc port=8250 role=demo
- ```
+ ```shell
+ vault login -method=oidc port=8250 role=demo
+ ```
- Here is a short explaination of what this command does:
+ Here is a short explaination of what this command does:
- 1. In the **Write the OIDC Role Config** (step 4), we created a role called `demo`. We set `role=demo` so Vault knows which configuration we'd like to login in with.
- 1. To set Vault to use the `OIDC` sign-in method, we set `-method=oidc`.
- 1. To set the port that GitLab should redirect to, we set `port=8250` or another port number that matches the port given to GitLab when listing [Redirect URIs](https://www.vaultproject.io/docs/auth/jwt.html#redirect-uris).
+ 1. In the **Write the OIDC Role Config** (step 4), we created a role called `demo`. We set `role=demo` so Vault knows which configuration we'd like to login in with.
+ 1. To set Vault to use the `OIDC` sign-in method, we set `-method=oidc`.
+ 1. To set the port that GitLab should redirect to, we set `port=8250` or another port number that matches the port given to GitLab when listing [Redirect URIs](https://www.vaultproject.io/docs/auth/jwt.html#redirect-uris).
- Once you run the command above, it will present a link in the terminal.
- Click the link in the terminal and a tab will open in the browser confirming you're signed into Vault via OIDC:
+ Once you run the command above, it will present a link in the terminal.
+ Click the link in the terminal and a tab will open in the browser confirming you're signed into Vault via OIDC:
- ![Signed into Vault via OIDC](img/signed_into_vault_via_oidc_v12_6.png)
+ ![Signed into Vault via OIDC](img/signed_into_vault_via_oidc_v12_6.png)
- The terminal will output:
+ The terminal will output:
- ```
- Success! You are now authenticated. The token information displayed below
- is already stored in the token helper. You do NOT need to run "vault login"
- again. Future Vault requests will automatically use this token.
- ```
+ ```plaintext
+ Success! You are now authenticated. The token information displayed below
+ is already stored in the token helper. You do NOT need to run "vault login"
+ again. Future Vault requests will automatically use this token.
+ ```
diff --git a/doc/legal/corporate_contributor_license_agreement.md b/doc/legal/corporate_contributor_license_agreement.md
index c8782a2cfc2..018c4b575b5 100644
--- a/doc/legal/corporate_contributor_license_agreement.md
+++ b/doc/legal/corporate_contributor_license_agreement.md
@@ -21,7 +21,7 @@ You accept and agree to the following terms and conditions for Your present and
- **Contributions:**
You represent that each of Your Contributions is Your original creation.
-
+
Should You wish to submit work that is not Your original creation, You may submit it to GitLab B.V. separately from any Contribution, identifying the complete details of its source and of any license or other restriction (including, but not limited to, related patents, trademarks, and license agreements) of which you are personally aware, and conspicuously marking the work as "Submitted on behalf of a third-party: (named here)".
You are not expected to provide support for Your Contributions, except to the extent You desire to provide support. You may provide support for free, for a fee, or not at all. Unless required by applicable law or agreed to in writing, You provide Your Contributions on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE.
diff --git a/doc/policy/maintenance.md b/doc/policy/maintenance.md
index 1739c07ccd5..441360aa812 100644
--- a/doc/policy/maintenance.md
+++ b/doc/policy/maintenance.md
@@ -127,7 +127,7 @@ one major version. For example, it is safe to:
- `9.5.5` -> `9.5.9`
- `10.6.3` -> `10.6.6`
- `11.11.1` -> `11.11.8`
- - `12.0.4` -> `12.0.9`
+ - `12.0.4` -> `12.0.12`
- Upgrade the minor version:
- `8.9.4` -> `8.12.3`
- `9.2.3` -> `9.5.5`
@@ -144,9 +144,20 @@ It's also important to ensure that any background migrations have been fully com
before upgrading to a new major version. To see the current size of the `background_migration` queue,
[Check for background migrations before upgrading](../update/README.md#checking-for-background-migrations-before-upgrading).
-To ensure background migrations are successful, increment by one minor version during the version jump before installing newer releases.
+### Version 12 onwards: Extra step for major upgrades
+
+From version 12 onwards, an additional step is required. More significant migrations
+may occur during major release upgrades.
+
+To ensure these are successful:
+
+1. Increment to the first minor version (`x.0.x`) during the major version jump.
+1. Proceed with upgrading to a newer release.
+
+For example: `11.11.x` -> `12.0.x` -> `12.8.x`
+
+### Example upgrade paths
-For example: `11.11.x` -> `12.0.x`
Please see the table below for some examples:
| Latest stable version | Your version | Recommended upgrade path | Note |
@@ -154,7 +165,10 @@ Please see the table below for some examples:
| 9.4.5 | 8.13.4 | `8.13.4` -> `8.17.7` -> `9.4.5` | `8.17.7` is the last version in version `8` |
| 10.1.4 | 8.13.4 | `8.13.4 -> 8.17.7 -> 9.5.10 -> 10.1.4` | `8.17.7` is the last version in version `8`, `9.5.10` is the last version in version `9` |
| 11.3.4 | 8.13.4 | `8.13.4` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> `11.3.4` | `8.17.7` is the last version in version `8`, `9.5.10` is the last version in version `9`, `10.8.7` is the last version in version `10` |
-| 12.5.8 | 11.3.4 | `11.3.4` -> `11.11.8` -> `12.0.9` -> `12.5.8` | `11.11.8` is the last version in version `11` |
+| 12.5.8 | 11.3.4 | `11.3.4` -> `11.11.8` -> `12.0.12` -> `12.5.8` | `11.11.8` is the last version in version `11`. `12.0.x` [is a required step](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23211#note_272842444). |
+| 12.8.5 | 9.2.6 | `9.2.6` -> `9.5.10` -> `10.8.7` -> `11.11.8` -> `12.0.12` -> `12.8.5` | Four intermediate versions are required: the final 9.5, 10.8, 11.11 releases, plus 12.0. |
+
+## More information
More information about the release procedures can be found in our
[release documentation](https://gitlab.com/gitlab-org/release/docs). You may also want to read our
diff --git a/doc/raketasks/backup_restore.md b/doc/raketasks/backup_restore.md
index 8123b4ca7a7..54c550f999c 100644
--- a/doc/raketasks/backup_restore.md
+++ b/doc/raketasks/backup_restore.md
@@ -115,7 +115,7 @@ also running Unicorn/Puma and/or Sidekiq.
Example output:
-```
+```plaintext
Dumping database tables:
- Dumping table events... [DONE]
- Dumping table issues... [DONE]
@@ -271,6 +271,31 @@ For installations from source:
sudo -u git -H bundle exec rake gitlab:backup:create SKIP=db,uploads RAILS_ENV=production
```
+### Skipping tar creation
+
+The last part of creating a backup is generation of a `.tar` file containing
+all the parts. In some cases (for example, if the backup is picked up by other
+backup software) creating a `.tar` file might be wasted effort or even directly
+harmful, so you can skip this step by adding `tar` to the `SKIP` environment
+variable.
+
+Adding `tar` to the `SKIP` variable leaves the files and directories containing the
+backup in the directory used for the intermediate files. These files will be
+overwritten when a new backup is created, so you should make sure they are copied
+elsewhere, because you can only have one backup on the system.
+
+For Omnibus GitLab packages:
+
+```shell
+sudo gitlab-backup create SKIP=tar
+```
+
+For installations from source:
+
+```shell
+sudo -u git -H bundle exec rake gitlab:backup:create SKIP=tar RAILS_ENV=production
+```
+
### Uploading backups to a remote (cloud) storage
Starting with GitLab 7.4 you can let the backup script upload the '.tar' file it creates.
@@ -465,7 +490,7 @@ For installations from source:
Note: This option only works for remote storage. If you want to group your backups
you can pass a `DIRECTORY` environment variable:
-```
+```shell
sudo gitlab-backup create DIRECTORY=daily
sudo gitlab-backup create DIRECTORY=weekly
```
@@ -586,7 +611,7 @@ crontab -e
There, add the following line to schedule the backup for everyday at 2 AM:
-```
+```plaintext
0 2 * * * /opt/gitlab/bin/gitlab-backup create CRON=1
```
@@ -614,7 +639,7 @@ sudo -u git crontab -e # Edit the crontab for the git user
Add the following lines at the bottom:
-```
+```plaintext
# Create a full backup of the GitLab repositories and SQL database every day at 4am
0 4 * * * cd /home/git/gitlab && PATH=/usr/local/bin:/usr/bin:/bin bundle exec rake gitlab:backup:create RAILS_ENV=production CRON=1
```
@@ -658,6 +683,10 @@ lose access to your GitLab server.
You may also want to restore any TLS keys, certificates, or [SSH host keys](https://superuser.com/questions/532040/copy-ssh-keys-from-one-server-to-another-server/532079#532079).
+Starting with GitLab 12.9 if an untarred backup (like the ones made with
+`SKIP=tar`) is found, and no backup is chosen with `BACKUP=<timestamp>`, the
+untarred backup is used.
+
Depending on your case, you might want to run the restore command with one or
more of the following options:
@@ -674,7 +703,7 @@ Read more on [configuring NFS mounts](../administration/high_availability/nfs.md
### Restore for installation from source
-```
+```shell
# Stop processes that are connected to the database
sudo service gitlab stop
@@ -683,7 +712,7 @@ bundle exec rake gitlab:backup:restore RAILS_ENV=production
Example output:
-```
+```plaintext
Unpacking backup... [DONE]
Restoring database tables:
-- create_table("events", {:force=>true})
@@ -853,7 +882,7 @@ will have all your repositories, but not any other data.
If you are using backup restore procedures you might encounter the following warnings:
-```
+```plaintext
psql:/var/opt/gitlab/backups/db/database.sql:22: ERROR: must be owner of extension plpgsql
psql:/var/opt/gitlab/backups/db/database.sql:2931: WARNING: no privileges could be revoked for "public" (two occurrences)
psql:/var/opt/gitlab/backups/db/database.sql:2933: WARNING: no privileges were granted for "public" (two occurrences)
@@ -1003,7 +1032,7 @@ GitLab instance after restoring the registry data.
These failures will mention permission issues in the registry logs, like:
-```
+```plaintext
level=error
msg="response completed with error"
err.code=unknown
diff --git a/doc/raketasks/cleanup.md b/doc/raketasks/cleanup.md
index 937f15554b4..bbae713676d 100644
--- a/doc/raketasks/cleanup.md
+++ b/doc/raketasks/cleanup.md
@@ -6,7 +6,7 @@ Clean up local project upload files if they don't exist in GitLab database. The
task attempts to fix the file if it can find its project, otherwise it moves the
file to a lost and found directory.
-```
+```shell
# omnibus-gitlab
sudo gitlab-rake gitlab:cleanup:project_uploads
@@ -16,8 +16,9 @@ bundle exec rake gitlab:cleanup:project_uploads RAILS_ENV=production
Example output:
-```
+```shell
$ sudo gitlab-rake gitlab:cleanup:project_uploads
+
I, [2018-07-27T12:08:27.671559 #89817] INFO -- : Looking for orphaned project uploads to clean up. Dry run...
D, [2018-07-27T12:08:28.293568 #89817] DEBUG -- : Processing batch of 500 project upload file paths, starting with /opt/gitlab/embedded/service/gitlab-rails/public/uploads/test.out
I, [2018-07-27T12:08:28.689869 #89817] INFO -- : Can move to lost and found /opt/gitlab/embedded/service/gitlab-rails/public/uploads/test.out -> /opt/gitlab/embedded/service/gitlab-rails/public/uploads/-/project-lost-found/test.out
@@ -35,7 +36,7 @@ I, [2018-07-27T12:08:33.760257 #89817] INFO -- : Did move to lost and found /op
Remove object store upload files if they don't exist in GitLab database.
-```
+```shell
# omnibus-gitlab
sudo gitlab-rake gitlab:cleanup:remote_upload_files
@@ -45,7 +46,7 @@ bundle exec rake gitlab:cleanup:remote_upload_files RAILS_ENV=production
Example output:
-```
+```shell
$ sudo gitlab-rake gitlab:cleanup:remote_upload_files
I, [2018-08-02T10:26:13.995978 #45011] INFO -- : Looking for orphaned remote uploads to remove. Dry run...
@@ -54,7 +55,7 @@ I, [2018-08-02T10:26:14.120482 #45011] INFO -- : Can be moved to lost and found
I, [2018-08-02T10:26:14.120634 #45011] INFO -- : To cleanup these files run this command with DRY_RUN=false
```
-```
+```shell
$ sudo gitlab-rake gitlab:cleanup:remote_upload_files DRY_RUN=false
I, [2018-08-02T10:26:47.598424 #45087] INFO -- : Looking for orphaned remote uploads to remove...
@@ -109,7 +110,7 @@ level with `NICENESS`. Below are the valid levels, but consult
## Remove expired ActiveSession lookup keys
-```
+```shell
# omnibus-gitlab
sudo gitlab-rake gitlab:cleanup:sessions:active_sessions_lookup_keys
diff --git a/doc/raketasks/features.md b/doc/raketasks/features.md
index c06800a2aa3..5425a0a0667 100644
--- a/doc/raketasks/features.md
+++ b/doc/raketasks/features.md
@@ -15,6 +15,6 @@ Old path: `git@example.org:myrepo.git`
New path: `git@example.org:username/myrepo.git` or `git@example.org:groupname/myrepo.git`
-```
+```shell
bundle exec rake gitlab:enable_namespaces RAILS_ENV=production
```
diff --git a/doc/raketasks/generate_sample_prometheus_data.md b/doc/raketasks/generate_sample_prometheus_data.md
index bb0ed68ec0f..78e8ef188bf 100644
--- a/doc/raketasks/generate_sample_prometheus_data.md
+++ b/doc/raketasks/generate_sample_prometheus_data.md
@@ -11,6 +11,6 @@ which loads the appropriate data set if it is present within the `sample_metrics
**Example:**
-```
+```shell
bundle exec rake gitlab:generate_sample_prometheus_data[21]
```
diff --git a/doc/raketasks/import.md b/doc/raketasks/import.md
index d50272174ab..09d919d0120 100644
--- a/doc/raketasks/import.md
+++ b/doc/raketasks/import.md
@@ -15,7 +15,7 @@
The new folder needs to have Git user ownership and read/write/execute access for Git user and its group:
-```
+```shell
sudo -u git mkdir -p /var/opt/gitlab/git-data/repository-import-<date>/new_group
```
@@ -27,7 +27,7 @@ sudo -u git mkdir -p /var/opt/gitlab/git-data/repository-import-<date>/new_group
If we copy the repos to `/var/opt/gitlab/git-data/repository-import-<date>`, and repo A needs to be under the groups G1 and G2, it will
have to be created under those folders: `/var/opt/gitlab/git-data/repository-import-<date>/G1/G2/A.git`.
-```
+```shell
sudo cp -r /old/git/foo.git /var/opt/gitlab/git-data/repository-import-<date>/new_group/
# Do this once when you are done copying git repositories
@@ -57,7 +57,7 @@ sudo -u git -H bundle exec rake gitlab:import:repos['/var/opt/gitlab/git-data/re
#### Example output
-```
+```plaintext
Processing /var/opt/gitlab/git-data/repository-import-1/a/b/c/blah.git
* Using namespace: a/b/c
* Created blah (a/b/c/blah)
@@ -98,7 +98,7 @@ To support importing bare repositories from hashed storage, GitLab 10.4 and
later stores the full project path with each repository, in a special section of
the Git repository's config file. This section is formatted as follows:
-```
+```ini
[gitlab]
fullpath = gitlab-org/gitlab
```
@@ -128,7 +128,7 @@ Until then, you may wish to manually migrate repositories yourself. You can use
[Rails console](https://docs.gitlab.com/omnibus/maintenance/#starting-a-rails-console-session)
to do so. In a Rails console session, run the following to migrate a project:
-```
+```ruby
project = Project.find_by_full_path('gitlab-org/gitlab')
project.write_repository_config
```
@@ -136,7 +136,7 @@ project.write_repository_config
In a Rails console session, run the following to migrate all of a namespace's
projects (this may take a while if there are 1000s of projects in a namespace):
-```
+```ruby
namespace = Namespace.find_by_full_path('gitlab-org')
namespace.send(:write_projects_repository_config)
```
diff --git a/doc/raketasks/list_repos.md b/doc/raketasks/list_repos.md
index 21de27c5249..10e6cb04bfa 100644
--- a/doc/raketasks/list_repos.md
+++ b/doc/raketasks/list_repos.md
@@ -3,7 +3,7 @@
You can print a list of all Git repositories on disk managed by
GitLab with the following command:
-```
+```shell
# Omnibus
sudo gitlab-rake gitlab:list_repos
@@ -17,7 +17,7 @@ a date with the 'SINCE' environment variable. The time you specify
is parsed by the Rails [TimeZone#parse
function](https://api.rubyonrails.org/classes/ActiveSupport/TimeZone.html#method-i-parse).
-```
+```shell
# Omnibus
sudo gitlab-rake gitlab:list_repos SINCE='Sep 1 2015'
diff --git a/doc/raketasks/user_management.md b/doc/raketasks/user_management.md
index 04f8ad59153..637f3604d98 100644
--- a/doc/raketasks/user_management.md
+++ b/doc/raketasks/user_management.md
@@ -91,7 +91,7 @@ production:
Next, generate a new secret:
-```
+```shell
# omnibus-gitlab
sudo gitlab-rake secret
@@ -102,7 +102,7 @@ bundle exec rake secret RAILS_ENV=production
Now you need to stop the GitLab server, back up the existing secrets file and
update the database:
-```
+```shell
# omnibus-gitlab
sudo gitlab-ctl stop
sudo cp config/secrets.yml config/secrets.yml.bak
@@ -122,7 +122,7 @@ error.
Finally, change `config/secrets.yml` to set `otp_key_base` to `<new key>` and
restart. Again, make sure you're operating in the **production** section.
-```
+```shell
# omnibus-gitlab
sudo gitlab-ctl start
@@ -133,7 +133,7 @@ sudo /etc/init.d/gitlab start
If there are any problems (perhaps using the wrong value for `old_key`), you can
restore your backup of `config/secrets.yml` and rollback the changes:
-```
+```shell
# omnibus-gitlab
sudo gitlab-ctl stop
sudo gitlab-rake gitlab:two_factor:rotate_key:rollback filename=backup.csv
diff --git a/doc/security/crime_vulnerability.md b/doc/security/crime_vulnerability.md
index 77592f1b440..23ee60ab930 100644
--- a/doc/security/crime_vulnerability.md
+++ b/doc/security/crime_vulnerability.md
@@ -33,7 +33,7 @@ Although SPDY is enabled in Omnibus installations, CRIME relies on compression
The Nessus scanner, [reports a possible CRIME vulnerability][nessus] in GitLab
similar to the following format:
-```
+```plaintext
Description
This remote service has one of two configurations that are known to be required for the CRIME attack:
diff --git a/doc/security/rack_attack.md b/doc/security/rack_attack.md
index bb51fc38e61..c5bbd0db035 100644
--- a/doc/security/rack_attack.md
+++ b/doc/security/rack_attack.md
@@ -60,7 +60,7 @@ default['gitlab']['gitlab-rails']['rack_attack_protected_paths'] = [
This header is included in responses to blocked requests:
-```
+```plaintext
Retry-After: 60
```
@@ -109,7 +109,7 @@ No response headers are provided.
1. Reconfigure GitLab:
- ```
+ ```shell
sudo gitlab-ctl reconfigure
```
@@ -202,7 +202,7 @@ the load balancer. In that case, you will need to:
1. Whitelist the load balancer's IP address(es) in the Rack Attack [settings](#settings).
1. Reconfigure GitLab:
- ```
+ ```shell
sudo gitlab-ctl reconfigure
```
diff --git a/doc/security/unlock_user.md b/doc/security/unlock_user.md
index fdf7ba90a3b..befb5d12877 100644
--- a/doc/security/unlock_user.md
+++ b/doc/security/unlock_user.md
@@ -4,7 +4,7 @@ type: howto
# How to unlock a locked user from the command line
-After six failed login attempts a user gets in a locked state.
+After ten failed login attempts a user gets in a locked state.
To unlock a locked user:
@@ -13,7 +13,7 @@ To unlock a locked user:
```shell
## For Omnibus GitLab
- sudo gitlab-rails console production
+ sudo gitlab-rails console -e production
## For installations from source
sudo -u git -H bundle exec rails console RAILS_ENV=production
diff --git a/doc/security/webhooks.md b/doc/security/webhooks.md
index 010f5aa2d43..bd05cbff05e 100644
--- a/doc/security/webhooks.md
+++ b/doc/security/webhooks.md
@@ -4,6 +4,9 @@ type: concepts, reference, howto
# Webhooks and insecure internal web services
+NOTE: **Note:**
+On GitLab.com the [maximum number of webhooks](../user/gitlab_com/index.md#maximum-number-of-webhooks) per project is limited.
+
If you have non-GitLab web services running on your GitLab server or within its
local network, these may be vulnerable to exploitation via Webhooks.
@@ -68,16 +71,24 @@ use IDNA encoding.
The whitelist can hold a maximum of 1000 entries. Each entry can be a maximum of
255 characters.
+You can whitelist a particular port by specifying it in the whitelist entry.
+For example `127.0.0.1:8080` will only allow connections to port 8080 on `127.0.0.1`.
+If no port is mentioned, all ports on that IP/domain are whitelisted. An IP range
+will whitelist all ports on all IPs in that range.
+
Example:
```text
example.com;gitlab.example.com
127.0.0.1,1:0:0:0:0:0:0:1
127.0.0.0/8 1:0:0:0:0:0:0:0/124
+[1:0:0:0:0:0:0:1]:8080
+127.0.0.1:8080
+example.com:8080
```
NOTE: **Note:**
-Wildcards (`*.example.com`) and ports (`127.0.0.1:3000`) are not currently supported.
+Wildcards (`*.example.com`) are not currently supported.
<!-- ## Troubleshooting
diff --git a/doc/ssh/README.md b/doc/ssh/README.md
index 07abfa4cdac..c08e83677ea 100644
--- a/doc/ssh/README.md
+++ b/doc/ssh/README.md
@@ -124,7 +124,7 @@ To create a new SSH key pair:
If, in any case, you want to add or change the password of your SSH key pair,
you can use the `-p` flag:
- ```
+ ```shell
ssh-keygen -p -f <keyname>
```
@@ -186,8 +186,11 @@ Now, it's time to add the newly created public key to your GitLab account.
1. Navigating to **SSH Keys** and pasting your **public** key from the clipboard into the **Key** field. If you:
- Created the key with a comment, this will appear in the **Title** field.
- Created the key without a comment, give your key an identifiable title like _Work Laptop_ or _Home Workstation_.
+ 1. Choose an (optional) expiry date for the key under "Expires at" section. (Introduced in [GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/-/issues/36243))
1. Click the **Add key** button.
+SSH keys that have "expired" using this procedure will still be valid in GitLab workflows. As the GitLab-configured expiration date is not included in the SSH key itself, you can still export public SSH keys as needed.
+
NOTE: **Note:**
If you manually copied your public SSH key make sure you copied the entire
key starting with `ssh-ed25519` (or `ssh-rsa`) and ending with your email.
@@ -206,7 +209,7 @@ authenticity of the GitLab host you are connecting to.
For example, when connecting to GitLab.com, answer `yes` to add GitLab.com to
the list of trusted hosts:
-```
+```plaintext
The authenticity of host 'gitlab.com (35.231.145.151)' can't be established.
ECDSA key fingerprint is SHA256:HbW3g8zUjNSksFbqTiUWPWg2Bq1x8xdGUrliXFzSnUw.
Are you sure you want to continue connecting (yes/no)? yes
@@ -334,25 +337,32 @@ git remote set-url origin git@<user_1.gitlab.com>:gitlab-org/gitlab.git
## Deploy keys
-### Per-repository deploy keys
-
Deploy keys allow read-only or read-write (if enabled) access to one or
-multiple projects with a single SSH key pair.
+multiple repositories with a single SSH key pair.
-This is really useful for cloning repositories to your Continuous
+This is useful for cloning repositories to your Continuous
Integration (CI) server. By using deploy keys, you don't have to set up a
dummy user account.
-If you are a project maintainer or owner, you can add a deploy key in the
-project's **Settings > Repository** page by expanding the
-**Deploy Keys** section. Specify a title for the new
-deploy key and paste a public SSH key. After this, the machine that uses
-the corresponding private SSH key has read-only or read-write (if enabled)
-access to the project.
+If you don't have a key pair, you might want to use a
+[deploy token](../user/project/deploy_tokens/index.md#deploy-tokens) instead.
+
+### Per-repository deploy keys
+
+Project maintainers and owners can add a deploy key for a repository.
+
+1. Navigate to the project's **Settings** page, then:
+ - On GitLab 12.8 and earlier, click **Repository**.
+ - On GitLab 12.9 and later, click **CI / CD**.
+1. Expand the **Deploy Keys** section.
+1. Specify a title for the new deploy key and paste a public SSH key.
+
+After this, the machine that uses the corresponding private SSH key has read-only or
+read-write (if enabled) access to the project.
You can't add the same deploy key twice using the form.
If you want to add the same key to another project, please enable it in the
-list that says 'Deploy keys from projects available to you'. All the deploy
+list that says **Deploy keys from projects available to you**. All the deploy
keys of all the projects you have access to are available. This project
access can happen through being a direct member of the project, or through
a group.
@@ -362,10 +372,10 @@ project.
### Global shared deploy keys
-Global Shared Deploy keys allow read-only or read-write (if enabled) access to
-be configured on any repository in the entire GitLab installation.
+Global Shared Deploy keys allow read-only or read-write access to
+any repository in the entire GitLab installation.
-This is really useful for integrating repositories to secured, shared Continuous
+This is useful for integrating repositories to secured, shared Continuous
Integration (CI) services or other shared services.
GitLab administrators can set up the Global Shared Deploy key in GitLab and
add the private key to any shared systems. Individual repositories opt into
@@ -387,9 +397,14 @@ of broader usage for something like "Anywhere you need to give read access to
your repository".
Once a GitLab administrator adds the Global Deployment key, project maintainers
-and owners can add it in project's **Settings > Repository** page by expanding the
-**Deploy Keys** section and clicking **Enable** next to the appropriate key listed
-under **Public deploy keys available to any project**.
+and owners can add it by:
+
+1. Navigating the settings page:
+ - On GitLab 12.8 and earlier, the project's **Settings > Repository** page.
+ - On GitLab 12.9 and later, the project's **Settings > CI / CD** page.
+1. Expanding the **Deploy Keys** section.
+1. Clicking **Enable** next to the appropriate key listed under
+ **Public deploy keys available to any project**.
NOTE: **Note:**
The heading **Public deploy keys available to any project** only appears
@@ -423,8 +438,9 @@ security risks.
The GitLab check process includes a check for this condition, and will direct you
to this section if your server is configured like this, e.g.:
-```
+```shell
$ gitlab-rake gitlab:check
+
# ...
Git user has default SSH configuration? ... no
Try fixing it:
diff --git a/doc/subscriptions/index.md b/doc/subscriptions/index.md
index 1c541c77737..925f162d0fe 100644
--- a/doc/subscriptions/index.md
+++ b/doc/subscriptions/index.md
@@ -2,70 +2,82 @@
type: index, reference
---
-# Customer Docs
+# GitLab subscription
-This section contains information for:
+GitLab offers tiers of features. Your subscription determines which tier you have access to. Subscriptions are valid for 12 months.
-- New customers about choosing [which GitLab](#which-gitlab) is right for you.
-- Existing customers about [managing subscriptions](#managing-subscriptions).
+GitLab provides special subscriptions to participants in the [GitLab Education Program](https://about.gitlab.com/solutions/education/) and [GitLab Open Source Program](https://about.gitlab.com/solutions/open-source/). For details on obtaining and renewing these subscriptions, see:
-Also see our [subscription FAQ](https://about.gitlab.com/pricing/licensing-faq/).
+- [GitLab Education Program subscriptions](#gitlab-education-program-subscriptions)
+- [GitLab Open Source Program subscriptions](#gitlab-open-source-program-subscriptions)
-## Which GitLab?
+## Choosing a GitLab subscription
-There are two ways to use GitLab:
+When choosing a subscription, consider the following factors:
-- [GitLab.com](#gitlabcom): GitLab's SaaS offering. You don't need to install
- anything to use GitLab.com, you only need to
- [sign up](https://gitlab.com/users/sign_in) and start using GitLab straight away.
-- [GitLab self-managed](#gitlab-self-managed): Install, administer, and maintain
- your own GitLab instance.
+- [GitLab tier](#choosing-a-gitlab-tier)
+- [GitLab.com or self-managed](#choosing-between-gitlabcom-or-self-managed)
+- [Group or personal subscription (GitLab.com only)](#choosing-a-gitlabcom-group-or-personal-subscription)
+- [Number of users](#choosing-the-number-of-users)
-The following sections outline tiers and features within GitLab.com
-and GitLab self-managed.
+### Choosing a GitLab tier
-### GitLab.com
+Pricing is [tier-based](https://about.gitlab.com/pricing/), allowing you to choose the features which fit your budget. See the [feature comparison](https://about.gitlab.com/pricing/gitlab-com/feature-comparison/) for information on what features are available at each tier.
-GitLab.com is hosted, managed, and administered by GitLab, Inc., with
-[free and paid subscriptions](https://about.gitlab.com/pricing/) for individuals
-and teams in the following tiers:
+### Choosing between GitLab.com or self-managed
-| Tier | Includes same features available in |
-|:-------|:----------------------------------------------------|
-| Free | [Core](#gitlab-self-managed) self-managed tier. |
-| Bronze | [Starter](#gitlab-self-managed) self-managed tier. |
-| Silver | [Premium](#gitlab-self-managed) self-managed tier. |
-| Gold | [Ultimate](#gitlab-self-managed) self-managed tier. |
+There are some differences in how a subscription applies, depending if you use GitLab.com or a self-managed instance.
-GitLab.com subscriptions grant access
-to the same features available in GitLab self-managed, **except
-[administration](../administration/index.md) tools and settings**.
+- [GitLab.com](#gitlabcom): GitLab's software-as-a-service offering. You don't need to install anything to use GitLab.com, you only need to [sign up](https://gitlab.com/users/sign_in) and start using GitLab straight away.
+- [GitLab self-managed](#self-managed): Install, administer, and maintain your own GitLab instance.
-GitLab.com allows you to apply your subscription to a group or your personal user.
+On a self-managed instance, a GitLab subscription provides the same set of features for all users. On GitLab.com you can apply a subscription to either a group or a personal namespace.
+
+### Choosing a GitLab.com group or personal subscription
+
+On GitLab.com you can apply a subscription to either a group or a personal namespace.
When applied to:
- A **group**, the group, all subgroups, and all projects under the selected
- group on GitLab.com will have the features of the associated plan. It is
- recommended to go with a group plan when managing projects and users of an
- organization.
+ group on GitLab.com will have the features of the associated tier. GitLab recommends
+ choosing a group plan when managing an organization's projects and users.
- A **personal userspace** instead, all projects will have features with the
- subscription applied, but as it is not a group, group features will not be available.
+ subscription applied, but as it's not a group, group features won't be available.
+
+### Choosing the number of users
+
+There are some differences between who is counted in a subscription, depending if you use GitLab.com or a self-managed instance.
+
+#### GitLab.com
+
+A GitLab.com subscription uses a concurrent (_seat_) model. You pay for a subscription according to the maximum number of users enabled at once. You can add and remove users during the subscription period, as long as the total users at any given time is within your subscription count.
+
+Every occupied seat, whether by person, job, or bot is counted in the subscription, with the following exception:
+
+- Members with Guest permissions on a Gold subscription.
TIP: **Tip:**
To support the open source community and encourage the development of open
source projects, GitLab grants access to **Gold** features for all GitLab.com
**public** projects, regardless of the subscription.
-The following resources are available for more information on GitLab.com:
+#### Self-managed
+
+A self-managed subscription uses a hybrid model. You pay for a subscription according to the maximum number of users enabled during the subscription period. For instances that aren't air-gapped or on a closed network, the maximum number of simultaneous users in the self-managed installation is checked each quarter, using [Seat Link](#seat-link).
-- [Feature comparison](https://about.gitlab.com/pricing/gitlab-com/feature-comparison/), for information on what features are available at each tier.
-- [GitLab pricing page](https://about.gitlab.com/pricing/), for subscription information and a free trial.
-- Our [product marketing page](https://about.gitlab.com/handbook/marketing/product-marketing/), for additional information including:
- - How [different tiers are licensed](https://about.gitlab.com/handbook/marketing/product-marketing/#tiers).
- - The different [GitLab distributions](https://about.gitlab.com/handbook/marketing/product-marketing/#distributions).
+Every occupied seat, whether by person, job, or bot is counted in the subscription, with the following exceptions:
-#### Subscribing to GitLab.com
+- Blocked users who are blocked prior to the renewal of a subscription won't be counted as active users for the renewal subscription. They may count as active users in the subscription period in which they were originally added.
+- Members with Guest permissions on an Ultimate subscription.
+- GitLab-created service accounts: `Ghost User` and `Support Bot`.
+
+NOTE: **Note:**
+If you have LDAP integration enabled, anyone in the configured domain can sign up for a GitLab account. This can result in an unexpected bill at time of renewal. Consider [disabling new signups](../user/admin_area/settings/sign_up_restrictions.md) and managing new users manually instead.
+
+## Obtain a GitLab subscription
+
+### Subscribe to GitLab.com
To subscribe to GitLab.com:
@@ -78,10 +90,10 @@ To subscribe to GitLab.com:
1. Create additional users and
[add them to the group](../user/group/index.md#add-users-to-a-group).
1. Select the **Bronze**, **Silver**, or **Gold** GitLab.com plan through the
- [GitLab Subscription Manager](https://customers.gitlab.com/).
-1. Link your GitLab.com account with your GitLab Subscription Manager account.
- Once signed into the GitLab Subscription Manager, if your account is not
- already linked, you will prompted to link your account with a
+ [Customers Portal](https://customers.gitlab.com/).
+1. Link your GitLab.com account with your Customers Portal account.
+ Once signed into the Customers Portal, if your account is not
+ already linked, you will be prompted to link your account with a
**Link my GitLab Account** button.
1. Associate the group with the subscription.
@@ -89,112 +101,60 @@ TIP: **Tip:**
You can also go to the [**My Account**](https://customers.gitlab.com/customers/edit)
page to add or change the GitLab.com account link.
-### GitLab self-managed
-
-With GitLab self-managed, you deploy your own GitLab instance on-premises or on a cloud of your choice.
-GitLab self-managed is available for [free and with paid subscriptions](https://about.gitlab.com/pricing/#self-managed) in the following tiers:
-
-| Tier | Includes |
-|:---------|:-----------------------------------------------|
-| Core | Core features. |
-| Starter | Core and Starter features. |
-| Premium | Core, Starter, and Premium features. |
-| Ultimate | Core, Starter, Premium, and Ultimate features. |
-
-The following resources are available for more information on GitLab self-managed:
-
-- [Feature comparison](https://about.gitlab.com/pricing/self-managed/feature-comparison/), for information on what features are available at each tier.
-- [GitLab pricing page](https://about.gitlab.com/pricing/#self-managed), for subscription information and a free trial.
-- Our [product marketing page](https://about.gitlab.com/handbook/marketing/product-marketing/), for additional information including:
- - How [different tiers are licensed](https://about.gitlab.com/handbook/marketing/product-marketing/#tiers).
- - The different [GitLab distributions](https://about.gitlab.com/handbook/marketing/product-marketing/#distributions).
-
-#### Subscribing through GitLab self-managed
+### Subscribe through GitLab self-managed
To subscribe to GitLab through a self-managed installation:
-1. [Install](https://about.gitlab.com/install/) GitLab.
-1. Complete the installation with
- [administration tasks](../administration/index.md).
-1. Select the **Starter**, **Premium**, or **Ultimate** self-managed plan
- through the [GitLab Subscription Manager](https://customers.gitlab.com/).
-1. Apply your license file. After purchase, a license file is sent to the email
- address associated to the GitLab Subscription Manager account,
- which needs to be
- [uploaded to your GitLab instance](../user/admin_area/license.md#uploading-your-license).
+1. Go to the [Customers Portal](https://customers.gitlab.com/) and purchase a **Starter**, **Premium**, or **Ultimate** self-managed plan.
+1. After purchase, a license file is sent to the email address associated to the Customers Portal account,
+ which must be [uploaded to your GitLab instance](../user/admin_area/license.md#uploading-your-license).
TIP: **Tip:**
-If you are purchasing a subscription for an existing **Core** self-managed
-instance, ensure you are purchasing enough seats to
+If you're purchasing a subscription for an existing **Core** self-managed
+instance, ensure you're purchasing enough seats to
[cover your users](../user/admin_area/index.md#administering-users).
-## Managing subscriptions
-
-You can view and manage subscriptions through our
-[GitLab Subscription Manager](https://customers.gitlab.com/).
-
-### View subscription and seats
-
-Visit the
-[GitLab Subscription Manager](https://customers.gitlab.com/subscriptions) to
-view and manage:
-
-- The subscriptions you have purchased.
-- The number of seats associated with the subscription.
-- Retrieve copies of invoices.
-- Change the credit card on file.
+## Manage your GitLab account
-For more information, please see our:
+With the [Customers Portal](https://customers.gitlab.com/) you can:
-- [Subscription FAQ](https://about.gitlab.com/pricing/licensing-faq/).
-- [Pricing page](https://about.gitlab.com/pricing/), which includes information
- on our [true-up pricing policy](https://about.gitlab.com/handbook/ceo/pricing/#true-up-pricing)
- when adding more users other than at the time of purchase.
-
-NOTE: **Note:**
-The GitLab Subscription Manager account can have the same email address as your
-GitLab.com account, but is a _separate_ login. If the two accounts are
-linked together, you can use the **Or sign in with GitLab.com**
-link underneath the **Sign In** button.
+- [Change billing information](#change-billing-information)
+- [Change the linked account](#change-the-linked-account)
+- [Change the associated namespace](#change-the-associated-namespace)
### Change billing information
To change billing information:
-1. Log in to [GitLab Subscription Manager](https://customers.gitlab.com/customers/sign_in).
+1. Log in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in).
1. Go to the **My Account** page.
1. Make the required changes to the **Account Details** information.
1. Click **Update Account**.
NOTE: **Note:**
Future purchases will use the information in this section.
-The email listed in this section is used for the GitLab Subscription Manager
+The email listed in this section is used for the Customers Portal
login and for license-related email communication.
-### Manage GitLab.com account
-
-This section provided information specific to managing subscriptions with a
-GitLab.com account.
-
-#### Change linked account
+### Change the linked account
-To change the GitLab.com account associated with a GitLab Subscription Manager
+To change the GitLab.com account associated with a Customers Portal
account:
1. Log in to the
- [GitLab Subscription Manager](https://customers.gitlab.com/customers/sign_in).
+ [Customers Portal](https://customers.gitlab.com/customers/sign_in).
1. Go to [GitLab.com](https://gitlab.com) in a separate browser tab. Ensure you
are not logged in.
-1. On the GitLab Subscription Manager page, click
+1. On the Customers Portal page, click
[**My Account**](https://customers.gitlab.com/customers/edit) in the top menu.
1. Under **Your GitLab.com account**, click **Change linked account** button.
-1. Log in to [GitLab.com](https://gitlab.com) account to link to.
+1. Log in to the [GitLab.com](https://gitlab.com) account you want to link to the Customers Portal.
-#### Change associated namespace
+### Change the associated namespace
With a linked GitLab.com account:
-1. Log in to the [GitLab Subscription Manager](https://customers.gitlab.com/customers/sign_in).
+1. Log in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in).
1. Navigate to the **Manage Purchases** page.
1. Click **Change linked group**.
1. Select the desired group from the **This subscription is for** dropdown.
@@ -202,15 +162,15 @@ With a linked GitLab.com account:
Subscription charges are calculated based on the total number of users in a group, including its subgroups and nested projects. If the total number of users exceeds the number of seats in your subscription, you will be charged for the additional users.
-### Confirm or upgrade your subscription
+## View your subscription
-To see the status of your GitLab.com subscription, you can click on the
-**Billings** section of the relevant namespace:
+To see the status of your GitLab.com subscription, log into GitLab.com and go to the **Billing** section of the relevant namespace:
- For individuals:
1. Go to **User Avatar > Settings**.
1. Click **Billing**.
-- For groups, go to the group's **Settings** dropdown, under **Billing**.
+- For groups:
+ 1. From the group page (*not* from a project within the group), go to **Settings > Billing**.
The following table describes details of your subscription for groups:
@@ -219,166 +179,258 @@ The following table describes details of your subscription for groups:
| Seats in subscription | If this is a paid plan, represents the number of seats you've paid to support in your group. |
| Seats currently in use | Number of active seats currently in use. |
| Max seats used | Highest number of seats you've used. If this exceeds the seats in subscription, you may owe an additional fee for the additional users. |
-| Seats owed | If your max seats used exceeds the seats in your subscription, you'll owe an additional fee for the users you've added. |
+| Seats owed | If your maximum seats used exceeds the seats in your subscription, you'll owe an additional fee for the users you've added. |
| Subscription start date | Date your subscription started. If this is for a Free plan, is the date you transitioned off your group's paid plan. |
| Subscription end date | Date your current subscription will end. Does not apply to Free plans. |
-#### CI pipeline minutes
+## Renew your subscription
-CI pipeline minutes are the execution time for your [pipelines](../ci/pipelines.md) on GitLab's shared runners. Each [GitLab.com tier](https://about.gitlab.com/pricing/) includes a monthly quota of CI pipeline minutes.
+To renew your subscription, [prepare for renewal by reviewing your account](#prepare-for-renewal-by-reviewing-your-account), then do one of the following:
-Quotas apply to:
-
-- Groups, where the minutes are shared across all members of the group, its subgroups, and nested projects. To view the group's usage, navigate to the group's page, then **Settings > Usage Quotas**.
+- [Renew a GitLab.com subscription](#renew-or-change-a-gitlabcom-subscription).
+- [Renew a self-managed subscription](#renew-a-self-managed-subscription).
-- Your personal account, where the minutes are available for your personal projects. To view and buy personal minutes, click your avatar, then **Settings > Pipeline quota**.
-
-Only pipeline minutes for GitLab shared runners are restricted. If you have a specific runner set up for your projects, there is no limit to your build time on GitLab.com.
+### Prepare for renewal by reviewing your account
-The minutes limit does not apply to public projects.
+The [Customers Portal](https://customers.gitlab.com/customers/sign_in) is your tool for renewing and modifying your subscription. Before going ahead with renewal, log in and verify or update:
-The available quota is reset on the first of each calendar month at midnight UTC.
+- The invoice contact details on the **My Account** page.
+- The credit card on file in the **Payment Methods** page.
-If you reach your limit, you can [purchase additional CI minutes](#extra-shared-runners-pipeline-minutes), or upgrade your account to [Silver or Gold](https://about.gitlab.com/pricing/). Your own runners can still be used even if you reach your limits.
+TIP: **Tip:**
+Contact our [support team](https://support.gitlab.com/hc/en-us/requests/new?ticket_form_id=360000071293) if you need assistance accessing the Customers Portal or if you need to change the contact person who manages your subscription.
+
+It's important to regularly review your user accounts, because:
+
+- A GitLab subscription is based on the number of users. You will pay more than you should if you renew for too many users, while the renewal will fail if you attempt to renew a subscription for too few users.
+- Stale user accounts can be a security risk. A regular review helps reduce this risk.
+
+#### Users over License
+
+A GitLab subscription is valid for a specific number of users. For details, see [Choose the number of users](#choosing-the-number-of-users). If the active user count exceeds the number included in the subscription, known as the number of _users over license_, you must pay for the excess number of users either before renewal, or at the time of renewal. This is also known the _true up_ process.
+
+##### Purchase additional seats for GitLab.com
+
+There is no self-service option for purchasing additional seats. You must request a quotation from GitLab Sales. To do so, contact GitLab via our [support form](https://support.gitlab.com/hc/en-us/requests/new) and select **Licensing and Renewals Problems** from the menu.
+
+The amount charged per seat is calculated by one of the following methods:
+
+- If paid before renewal, the amount per seat is calculated on a prorated basis. For example, if the user was added 3 months before the end of the subscription period, the amount owing is calculated as: (3 / 12) x annual fee.
+- If paid on renewal, the amount per seat is the standard annual fee.
+
+##### Purchase additional users for self-managed
+
+Self-managed instances can add users to a subscription any time during the subscription period. The cost of additional users added during the subscription period is prorated from the date of purchase through the end of the subscription period.
+
+To add users to a subscription:
+
+1. Log in to the [Customers Portal](https://customers.gitlab.com/).
+1. Select **Manage Purchases**.
+1. Select **Add more seats**.
+1. Enter the number of additional users.
+1. Select **Proceed to checkout**.
+1. Review the **Subscription Upgrade Detail**. The system lists the total price for all users on the system and a credit for what you've already paid. You will only be charged for the net change.
+1. Select **Confirm Upgrade**.
+
+The following will be emailed to you:
+
+- A payment receipt. You can also access this information in the Customers Portal under **Payment History**.
+- A new license. [Upload this license](../user/admin_area/license.md#uploading-your-license) to your instance to use it.
+
+### Seat Link
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/208832) in [GitLab Starter](https://about.gitlab.com/pricing) 12.9.
+
+Seat Link allows us to provide our self-managed customers with prorated charges for user growth throughout the year using a quarterly reconciliation process.
+
+Seat Link sends to GitLab daily a count of all users in connected self-managed instances. That information is used to automate prorated reconciliations. The data is sent securely through an encrypted HTTPS connection.
+
+Seat Link is mandatory because we need the user count data to enable prorated billing. Seat Link provides **only** the following information to GitLab:
+
+- Date
+- Historical maximum user count
+- License key
+
+Here is an example of the POST request:
+
+```plaintext
+{
+ date: '2020-01-29',
+ license_key: 'ZXlKa1lYUmhJam9pWm5WNmVsTjVZekZ2YTJoV2NucDBh
+RXRxTTA5amQxcG1VMVZqDQpXR3RwZEc5SGIyMVhibmxuZDJ0NWFrNXJTVzVH
+UzFCT1hHNVRiVFIyT0ZaUFlVSm1OV1ZGV0VObE1uVk4NCk4xY3ZkM1F4Y2to
+MFFuVklXSFJvUWpSM01VdE9SVE5rYkVjclZrdDJORkpOTlhka01qaE5aalpj
+YmxSMg0KWVd3MFNFTldTRmRtV1ZGSGRDOUhPR05oUVZvNUsxVnRXRUZIZFU1
+U1VqUm5aVFZGZUdwTWIxbDFZV1EyDQphV1JTY1V4c1ZYSjNPVGhrYVZ4dVlu
+TkpWMHRJZUU5dmF6ZEJRVVkxTlVWdFUwMTNSMGRHWm5SNlJFcFYNClQyVkJl
+VXc0UzA0NWFFb3ZlSFJrZW0xbVRqUlZabkZ4U1hWcWNXRnZYRzVaTm5GSmVW
+UnJVR1JQYTJKdA0KU0ZZclRHTmFPRTVhZEVKMUt6UjRkSE15WkRCT1UyNWlS
+MGRJZDFCdmRFWk5Za2h4Tm5sT1VsSktlVlYyDQpXRmhjYmxSeU4wRnRNMU5q
+THpCVWFGTmpTMnh3UWpOWVkyc3pkbXBST1dnelZHY3hUV3hxVDIwdlZYRlQN
+Ck9EWTJSVWx4WlVOT01EQXhVRlZ3ZGs1Rk0xeHVSVEJTTDFkMWJUQTVhV1ZK
+WjBORFdWUktaRXNyVnpsTw0KTldkWWQwWTNZa05VWlZBMmRUVk9kVUpxT1hV
+Mk5VdDFTUzk0TUU5V05XbFJhWGh0WEc1cVkyWnhaeTlXDQpTMEpyZWt0cmVY
+bzBOVGhFVG1oU1oxSm5WRFprY0Uwck0wZEdhVUpEV1d4a1RXZFRjVU5tYTB0
+a2RteEQNCmNWTlFSbFpuWlZWY2JpdFVVbXhIV0d4MFRuUnRWbkJKTkhwSFJt
+TnRaMGsyV0U1MFFUUXJWMUJVTWtOSA0KTVhKUWVGTkxPVTkzV1VsMlVUUldk
+R3hNTWswNU1USlNjRnh1U1UxTGJTdHRRM1l5YTFWaWJtSlBTMkUxDQplRkpL
+SzJSckszaG1hVXB1ZVRWT1UwdHZXV0ZOVG1WamMyVjRPV0pSUlZkUU9UUnpU
+VWh2Wlc5cFhHNUgNClNtRkdVMDUyY1RGMWNGTnhVbU5JUkZkeGVWcHVRMnBh
+VTBSUGR6VnRNVGhvWTFBM00zVkZlVzFOU0djMA0KY1ZFM1FWSlplSFZ5UzFS
+aGIxTmNia3BSUFQxY2JpSXNJbxRsZVNJNkltZFhiVzFGVkRZNWNFWndiV2Rt
+DQpNWEIyY21SbFFrdFNZamxaYURCdVVHcHhiRlV3Tm1WQ2JGSlFaSFJ3Y0Rs
+cFMybGhSMnRPTkZOMWNVNU0NClVGeHVTa3N6TUUxcldVOTVWREl6WVVWdk5U
+ZGhWM1ZvVjJkSFRtZFBZVXRJTkVGcE55dE1NRE5dWnpWeQ0KWlV0aWJsVk9T
+RmRzVVROUGRHVXdWR3hEWEc1MWjWaEtRMGQ2YTAxWFpUZHJURTVET0doV00w
+ODRWM0V2DQphV2M1YWs5cWFFWk9aR3BYTm1aVmJXNUNaazlXVUVRMWRrMXpj
+bTFDV0V4dldtRmNibFpTTWpWU05VeFMNClEwTjRNMWxWCUtSVGEzTTJaV2xE
+V0hKTFRGQmpURXRsZFVaQlNtRnJTbkpPZGtKdlUyUmlNVWxNWWpKaQ0KT0dw
+c05YbE1kVnh1YzFWbk5VZDFhbU56ZUM5Tk16TXZUakZOVW05cVpsVTNObEo0
+TjJ4eVlVUkdkWEJtDQpkSHByYWpreVJrcG9UVlo0Y0hKSU9URndiV2RzVFdO
+VlhHNXRhVmszTkV0SVEzcEpNMWRyZEVoRU4ydHINCmRIRnFRVTlCVUVVM1pV
+SlRORE4xUjFaYVJGb3JlWGM5UFZ4dUlpd2lhWFlpt2lKV00yRnNVbk5RTjJk
+Sg0KU1hNMGExaE9SVGR2V2pKQlBUMWNiaUo5DQo=',
+ max_historical_user_count: 10
+}
+```
-##### How pipeline quota usage is calculated
+For air-gapped or closed network customers, the existing [true-up model](#users-over-license) will be used. Prorated charges are not possible without user count data.
-Pipeline quota usage is calculated as the sum of the duration of each individual job. This is slightly different to how pipeline _duration_ is [calculated](https://docs.gitlab.com/ee/ci/pipelines.html#how-pipeline-duration-is-calculated). Pipeline quota usage doesn't consider the intersection of jobs.
+### Renew or change a GitLab.com subscription
-A simple example is:
+To renew for more users than are currently active in your GitLab.com system, contact our sales team via `renewals@gitlab.com` for assistance as this can't be done in the Customers Portal.
-A (1, 3)
-B (2, 4)
-C (6, 7)
+To change the [GitLab tier](https://about.gitlab.com/pricing/), select **Upgrade** under your subscription on the [My Account](https://customers.gitlab.com/subscriptions) page.
-In the example:
+#### Automatic renewal
-A begins at 1 and ends at 3.
-B begins at 2 and ends at 4.
-C begins at 6 and ends at 7.
-Visually, it can be viewed as:
+To view or change automatic subscription renewal (at the same tier as the previous period), log in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in), and:
-```
-0 1 2 3 4 5 6 7
- AAAAAAA
- BBBBBBB
- CCCC
-```
+- If you see a **Resume subscription** button, your subscription was cancelled previously. Click it to resume automatic renewal.
+- If you see **Cancel subscription**, your subscription is set to automatically renew at the end of the subscription period. Click it to cancel automatic renewal.
-The sum of each individual job is being calculated therefore in this example, `8` runner minutes would be used for this pipeline:
+With automatic renewal enabled, the subscription will automatically renew on the expiration date and there will be no gap in available service.
+An invoice will be generated for the renewal and available for viewing or download in the [Payment History](https://customers.gitlab.com/receipts) page. If you have difficulty during the renewal process, contact our [support team](https://support.gitlab.com/hc/en-us/requests/new?ticket_form_id=360000071293) for assistance.
-```
-A + B + C = 3 + 3 + 2 => 8
-```
+### Renew a self-managed subscription
-#### Extra Shared Runners pipeline minutes
+Starting 30 days before a subscription expires, GitLab notifies administrators of the date of expiry with a banner in the GitLab user interface.
-If you're using GitLab.com, you can purchase additional CI minutes so your
-pipelines will not be blocked after you have used all your CI minutes from your
-main quota. Additional minutes:
+We recommend following these steps during renewal:
-- Are only used once the shared quota included in your subscription runs out.
-- Roll over month to month.
+1. Prune any inactive or unwanted users by [blocking them](../user/admin_area/blocking_unblocking_users.md#blocking-a-user).
+1. Determine if you have a need for user growth in the upcoming subscription.
+1. Log in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in) and select the **Renew** button beneath your existing subscription.
-Each month, any minutes that you used will be deducted from your balance of additional minutes.
-Therefore, the number of minutes used and available will reflect your *current*
-month's usage and availability. Purchased remaining minutes not used in the
-current month will be rolled out over to the next month.
+ TIP: **Tip:**
+ If you need to change your [GitLab tier](https://about.gitlab.com/pricing/), contact our sales team via `renewals@gitlab.com` for assistance as this can't be done in the Customers Portal.
-For example:
+1. In the first box, enter the total number of user licenses you’ll need for the upcoming year. Be sure this number is at least **equal to, or greater than** the number of active users in the system at the time of performing the renewal.
+1. Enter the number of [users over license](#users-over-license) in the second box for the user overage incurred in your previous subscription term.
-- February 15: A group buys 4000 minutes. The count reads 0/4000 minutes.
-- February 28: The group has used 1500 minutes. The count reads 1500/4000 minutes. Thus, there are 2500 minutes remaining.
-- March 1: The counter reads: 0/2500 minutes rolled out from February's remaining quota.
+ TIP: **Tip:**
+ You can find the _users over license_ in your instance's **Admin** dashboard by clicking on **{admin}** (**Admin Area**) in the top bar, or going to `/admin`.
-##### Purchasing additional minutes
+1. Review your renewal details and complete the payment process.
+1. A license for the renewal term will be available on the [Manage Purchases](https://customers.gitlab.com/subscriptions) page beneath your new subscription details.
+1. [Upload](../user/admin_area/license.md) your new license to your instance.
-To purchase additional minutes, follow these steps.
+An invoice will be generated for the renewal and available for viewing or download in the [Payment History](https://customers.gitlab.com/receipts) page. If you have difficulty during the renewal process, contact our [support team](https://support.gitlab.com/hc/en-us/requests/new?ticket_form_id=360000071293) for assistance.
-1. For group minutes, go to **Group > Settings > Pipelines quota**.
+## Subscription expiry
- For personal project minutes, click your avatar, then **Settings > Pipeline quota**.
+When your subscription or trial expires, GitLab does not delete your data, but it may become inaccessible, depending on the tier at expiry. Some features may not behave as expected if you're not prepared for the expiry. For example, [environment specific variables not being passed](https://gitlab.com/gitlab-org/gitlab/issues/24759).
-1. Click **Buy additional minutes**.
+If you renew or upgrade, your data will again be accessible.
-1. Locate the subscription card that is linked to your group on GitLab.com,
- click **Buy more CI minutes**, and complete the details about the transaction.
+### Self-managed GitLab data
- ![Buy additional minutes](img/buy_minutes_card.png)
+For self-managed customers, there is a two-week grace period when your features
+will continue to work as-is, after which the entire instance will become read
+only.
-1. Once we have processed your payment, the extra CI minutes
- will be synced to your Group and you can visualize it from the
- **Group > Settings > Pipelines quota** page:
+However, if you remove the license, you will immediately revert to Core
+features, and the instance will be read / write again.
- ![Additional minutes](img/additional_minutes.png)
+## CI pipeline minutes
- The **Additional minutes** displayed now includes the purchased additional CI minutes, plus any
- minutes rolled over from last month.
+CI pipeline minutes are the execution time for your [pipelines](../ci/pipelines/index.md) on GitLab's shared runners. Each [GitLab.com tier](https://about.gitlab.com/pricing/) includes a monthly quota of CI pipeline minutes.
-Be aware that:
+Quotas apply to:
-1. If you have purchased extra CI minutes before the purchase of a paid plan,
- we will calculate a pro-rated charge for your paid plan. That means you may
- be charged for less than one year since your subscription was previously
- created with the extra CI minutes.
-1. Once the extra CI minutes has been assigned to a Group they cannot be transferred
- to a different Group.
-1. If you have some minutes used over your default quota, these minutes will
- be deducted from your Additional Minutes quota immediately after your purchase of additional
- minutes.
+- Groups, where the minutes are shared across all members of the group, its subgroups, and nested projects. To view the group's usage, navigate to the group, then **{settings}** **Settings > Usage Quotas**.
+- Your personal account, where the minutes are available for your personal projects. To view and buy personal minutes, click your avatar, then **{settings}** **Settings > Pipeline quota**.
-##### What happens when my CI minutes run out
+Only pipeline minutes for GitLab shared runners are restricted. If you have a specific runner set up for your projects, there is no limit to your build time on GitLab.com.
-When the CI minutes run out, an email is sent automatically to notify the owner(s)
-of the group/namespace, including a link to [purchase more minutes](https://customers.gitlab.com/plans).
+The available quota is reset on the first of each calendar month at midnight UTC.
-If you are not the owner of the group, you will need to contact them to let them know they need to
-[purchase more minutes](https://customers.gitlab.com/plans).
+When the CI minutes are depleted, an email is sent automatically to notify the owner(s)
+of the group/namespace. You can [purchase additional CI minutes](#purchasing-additional-ci-minutes), or upgrade your account to [Silver or Gold](https://about.gitlab.com/pricing/). Your own runners can still be used even if you reach your limits.
-## Subscription changes and your data
+### Purchasing additional CI minutes
-When your subscription or trial expires, GitLab does not delete your data.
+If you're using GitLab.com, you can purchase additional CI minutes so your
+pipelines won't be blocked after you have used all your CI minutes from your
+main quota. Additional minutes:
-However, depending on the tier and feature, your data may become inaccessible.
+- Are only used once the shared quota included in your subscription runs out.
+- Roll over month to month.
-Please note that some features may not behave as expected if a graceful
-fallback is not currently implemented. For example,
-[environment specific variables not being passed](https://gitlab.com/gitlab-org/gitlab-foss/issues/52825).
+To purchase additional minutes for your group on GitLab.com:
-If you renew or upgrade, your data will again be accessible.
+1. From your group, go to **{settings}** **Settings > Usage Quotas**.
+1. Locate the subscription card that's linked to your group on GitLab.com, click **Buy more CI minutes**, and complete the details about the transaction.
+1. Once we have processed your payment, the extra CI minutes will be synced to your group.
+1. To confirm the available CI minutes, go to your group, then **{settings}** **Settings > Usage Quotas**.
+ The **Additional minutes** displayed now includes the purchased additional CI minutes, plus any minutes rolled over from last month.
-### Self-managed data
+To purchase additional minutes for your personal namespace:
-For self-managed customers, there is a two-week grace period when your features
-will continue to work as-is, after which the entire instance will become read
-only.
+1. Click your avatar, then go to **Settings > Pipeline quota**.
+1. Locate the subscription card that's linked to your personal namespace on GitLab.com, click **Buy more CI minutes**, and complete the details about the transaction. Once we have processed your payment, the extra CI minutes will be synced to your Group.
+1. To confirm the available CI minutes for your personal projects, click your avatar, then go to **Settings > Pipeline quota**.
+ The **Additional minutes** displayed now includes the purchased additional CI minutes, plus any minutes rolled over from last month.
-However, if you remove the license, you will immediately revert to Core
-features.
+Be aware that:
-## Need help?
+- If you have purchased extra CI minutes before the purchase of a paid plan,
+ we will calculate a pro-rated charge for your paid plan. That means you may
+ be charged for less than one year since your subscription was previously
+ created with the extra CI minutes.
+- Once the extra CI minutes have been assigned to a Group, they can't be transferred
+ to a different Group.
+- If you have used more minutes than your default quota, these minutes will
+ be deducted from your Additional Minutes quota immediately after your purchase of additional
+ minutes.
-[GitLab's Documentation](https://docs.gitlab.com/) offers a wide range of
-topics covering the use and administration of GitLab.
+## Contact Support
We also encourage all users to search our project trackers for known issues and
-existing feature requests in:
-
-- [GitLab CE](https://gitlab.com/gitlab-org/gitlab-foss/issues/) for features
- included in all tiers.
-- [GitLab EE](https://gitlab.com/gitlab-org/gitlab/issues/) for paid-tier
- features.
+existing feature requests in the [GitLab](https://gitlab.com/gitlab-org/gitlab/issues/) project.
These issues are the best avenue for getting updates on specific product plans
and for communicating directly with the relevant GitLab team members.
-### Contacting Support
-
Learn more about:
- The tiers of [GitLab Support](https://about.gitlab.com/support/).
- [Submit a request via the Support Portal](https://support.gitlab.com/hc/en-us/requests/new).
+## GitLab Education Program subscriptions
+
+To renew a [GitLab Education Program](https://about.gitlab.com/solutions/education/) subscription, send an email to `education@gitlab.com` with the following information:
+
+1. The number of seats for the renewal. You can add seats if needed.
+1. The use case for the license. Specifically, we need verification that the use meets the conditions of the [End User License Agreement](https://about.gitlab.com/terms/#edu-oss). Note that university infrastructure operations and information technology operations don't fall within the stated terms of the Education Program. For details, see the [Education FAQ](https://about.gitlab.com/solutions/education/#FAQ).
+1. The full name, email address, and phone number of the primary contact who will be signing the renewal quote. Only signatures by faculty or staff with proper signing authority on the behalf of the university will be accepted.
+
+After we receive the above information, we will process the request and return a renewal quote for signature. Please allow a minimum of 2 business days for return. Email us at `education@gitlab.com` with any questions.
+
+## GitLab Open Source Program subscriptions
+
+All requests for our GitLab Open Source program, including subscription renewals, must be made by using the [Open Source Program](https://about.gitlab.com/solutions/open-source/program/) application process. If you have any questions, send an email to `opensource@gitlab.com` for assistance.
+
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
diff --git a/doc/system_hooks/system_hooks.md b/doc/system_hooks/system_hooks.md
index 30d080eab29..5ca8c0687f4 100644
--- a/doc/system_hooks/system_hooks.md
+++ b/doc/system_hooks/system_hooks.md
@@ -41,7 +41,7 @@ for Push and Tag events, but we never display commits.
**Request header**:
-```
+```plaintext
X-Gitlab-Event: System Hook
```
@@ -296,7 +296,7 @@ If the user is blocked via LDAP, `state` will be `ldap_blocked`.
}
```
-`owner_name` and `owner_email` are always `null`. Please see <https://gitlab.com/gitlab-org/gitlab-foss/issues/39675>.
+`owner_name` and `owner_email` are always `null`. Please see <https://gitlab.com/gitlab-org/gitlab/issues/20011>.
**Group removed:**
@@ -313,7 +313,7 @@ If the user is blocked via LDAP, `state` will be `ldap_blocked`.
}
```
-`owner_name` and `owner_email` are always `null`. Please see <https://gitlab.com/gitlab-org/gitlab-foss/issues/39675>.
+`owner_name` and `owner_email` are always `null`. Please see <https://gitlab.com/gitlab-org/gitlab/issues/20011>.
**Group renamed:**
@@ -333,7 +333,7 @@ If the user is blocked via LDAP, `state` will be `ldap_blocked`.
}
```
-`owner_name` and `owner_email` are always `null`. Please see <https://gitlab.com/gitlab-org/gitlab-foss/issues/39675>.
+`owner_name` and `owner_email` are always `null`. Please see <https://gitlab.com/gitlab-org/gitlab/issues/20011>.
**New Group Member:**
@@ -396,7 +396,7 @@ It generates one event per modified branch.
**Request header**:
-```
+```plaintext
X-Gitlab-Event: System Hook
```
@@ -446,8 +446,8 @@ X-Gitlab-Event: System Hook
"timestamp": "2013-05-13T18:18:08+00:00",
"url": "https://dev.gitlab.org/gitlab/gitlabhq/commit/c5feabde2d8cd023215af4d2ceeb7a64839fc428",
"author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
+ "name": "Example User",
+ "email": "user@example.com"
}
}
],
@@ -462,7 +462,7 @@ It generates one event per modified tag.
**Request header**:
-```
+```plaintext
X-Gitlab-Event: System Hook
```
@@ -516,7 +516,7 @@ updated/merged/closed or a commit is added in the source branch.
**Request header**:
-```
+```plaintext
X-Gitlab-Event: System Hook
```
@@ -643,7 +643,7 @@ Triggered only once when you push to the repository (including tags).
**Request header**:
-```
+```plaintext
X-Gitlab-Event: System Hook
```
diff --git a/doc/telemetry/backend.md b/doc/telemetry/backend.md
new file mode 100644
index 00000000000..c571439af8a
--- /dev/null
+++ b/doc/telemetry/backend.md
@@ -0,0 +1,34 @@
+# Backend tracking guide
+
+GitLab provides `Gitlab::Tracking`, an interface that wraps the [Snowplow Ruby Tracker](https://github.com/snowplow/snowplow/wiki/ruby-tracker) for tracking custom events.
+
+## Tracking in Ruby
+
+Custom event tracking and instrumentation can be added by directly calling the `GitLab::Tracking.event` class method, which accepts the following arguments:
+
+| argument | type | default value | description |
+|:-----------|:-------|:---------------------------|:------------|
+| `category` | string | 'application' | Area or aspect of the application. This could be `HealthCheckController` or `Lfs::FileTransformer` for instance. |
+| `action` | string | 'generic' | The action being taken, which can be anything from a controller action like `create` to something like an Active Record callback. |
+| `data` | object | {} | Additional data such as `label`, `property`, `value`, and `context` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). These will be set as empty strings if you don't provide them. |
+
+Tracking can be viewed as either tracking user behavior, or can be utilized for instrumentation to monitor and visual performance over time in an area or aspect of code.
+
+For example:
+
+```ruby
+class Projects::CreateService < BaseService
+ def execute
+ project = Project.create(params)
+
+ Gitlab::Tracking.event('Projects::CreateService', 'create_project',
+ label: project.errors.full_messages.to_sentence,
+ value: project.valid?
+ )
+ end
+end
+```
+
+### Performance
+
+We use the [AsyncEmitter](https://github.com/snowplow/snowplow/wiki/Ruby-Tracker#52-the-asyncemitter-class) when tracking events, which allows for instrumentation calls to be run in a background thread. This is still an active area of development.
diff --git a/doc/telemetry/frontend.md b/doc/telemetry/frontend.md
new file mode 100644
index 00000000000..fcd394500ec
--- /dev/null
+++ b/doc/telemetry/frontend.md
@@ -0,0 +1,167 @@
+# Frontend tracking guide
+
+GitLab provides `Tracking`, an interface that wraps the [Snowplow JavaScript Tracker](https://github.com/snowplow/snowplow/wiki/javascript-tracker) for tracking custom events. There are a few ways to utilize tracking, but each generally requires at minimum, a `category` and an `action`. Additional data can be provided that adheres to our [Feature instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy).
+
+| field | type | default value | description |
+|:-----------|:-------|:---------------------------|:------------|
+| `category` | string | document.body.dataset.page | Page or subsection of a page that events are being captured within. |
+| `action` | string | 'generic' | Action the user is taking. Clicks should be `click` and activations should be `activate`, so for example, focusing a form field would be `activate_form_input`, and clicking a button would be `click_button`. |
+| `data` | object | {} | Additional data such as `label`, `property`, `value`, and `context` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). |
+
+## Tracking in HAML (or Vue Templates)
+
+When working within HAML (or Vue templates) we can add `data-track-*` attributes to elements of interest. All elements that have a `data-track-event` attribute will automatically have event tracking bound on clicks.
+
+Below is an example of `data-track-*` attributes assigned to a button:
+
+```haml
+%button.btn{ data: { track: { event: "click_button", label: "template_preview", property: "my-template" } } }
+```
+
+```html
+<button class="btn"
+ data-track-event="click_button"
+ data-track-label="template_preview"
+ data-track-property="my-template"
+/>
+```
+
+Event listeners are bound at the document level to handle click events on or within elements with these data attributes. This allows for them to be properly handled on rerendering and changes to the DOM, but it's important to know that because of the way these events are bound, click events shouldn't be stopped from propagating up the DOM tree. If for any reason click events are being stopped from propagating, you'll need to implement your own listeners and follow the instructions in [Tracking in raw JavaScript](#tracking-in-raw-javascript).
+
+Below is a list of supported `data-track-*` attributes:
+
+| attribute | required | description |
+|:----------------------|:---------|:------------|
+| `data-track-event` | true | Action the user is taking. Clicks must be prepended with `click` and activations must be prepended with `activate`. For example, focusing a form field would be `activate_form_input` and clicking a button would be `click_button`. |
+| `data-track-label` | false | The `label` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). |
+| `data-track-property` | false | The `property` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). |
+| `data-track-value` | false | The `value` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). If omitted, this will be the elements `value` property or an empty string. For checkboxes, the default value will be the element's checked attribute or `false` when unchecked. |
+| `data-track-context` | false | The `context` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). |
+
+## Tracking within Vue components
+
+There's a tracking Vue mixin that can be used in components if more complex tracking is required. To use it, first import the `Tracking` library and request a mixin.
+
+```javascript
+import Tracking from '~/tracking';
+const trackingMixin = Tracking.mixin({ label: 'right_sidebar' });
+```
+
+You can provide default options that will be passed along whenever an event is tracked from within your component. For instance, if all events within a component should be tracked with a given `label`, you can provide one at this time. Available defaults are `category`, `label`, `property`, and `value`. If no category is specified, `document.body.dataset.page` is used as the default.
+
+You can then use the mixin normally in your component with the `mixin`, Vue declaration. The mixin also provides the ability to specify tracking options in `data` or `computed`. These will override any defaults and allows the values to be dynamic from props, or based on state.
+
+```javascript
+export default {
+ mixins: [trackingMixin],
+ // ...[component implementation]...
+ data() {
+ return {
+ expanded: false,
+ tracking: {
+ label: 'left_sidebar'
+ }
+ };
+ },
+}
+```
+
+The mixin provides a `track` method that can be called within the template, or from component methods. An example of the whole implementation might look like the following.
+
+```javascript
+export default {
+ mixins: [Tracking.mixin({ label: 'right_sidebar' })],
+ data() {
+ return {
+ expanded: false,
+ };
+ },
+ methods: {
+ toggle() {
+ this.expanded = !this.expanded;
+ this.track('click_toggle', { value: this.expanded })
+ }
+ }
+};
+```
+
+And if needed within the template, you can use the `track` method directly as well.
+
+```html
+<template>
+ <div>
+ <a class="toggle" @click.prevent="toggle">Toggle</a>
+ <div v-if="expanded">
+ <p>Hello world!</p>
+ <a @click.prevent="track('click_action')">Track an event</a>
+ </div>
+ </div>
+</template>
+```
+
+## Tracking in raw JavaScript
+
+Custom event tracking and instrumentation can be added by directly calling the `Tracking.event` static function. The following example demonstrates tracking a click on a button by calling `Tracking.event` manually.
+
+```javascript
+import Tracking from '~/tracking';
+
+const button = document.getElementById('create_from_template_button');
+button.addEventListener('click', () => {
+ Tracking.event('dashboard:projects:index', 'click_button', {
+ label: 'create_from_template',
+ property: 'template_preview',
+ value: 'rails',
+ });
+})
+```
+
+## Tests and test helpers
+
+In Jest particularly in vue tests, you can use the following:
+
+```javascript
+import { mockTracking } from 'helpers/tracking_helper';
+
+describe('MyTracking', () => {
+ let spy;
+
+ beforeEach(() => {
+ spy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ });
+
+ it('tracks an event when clicked on feedback', () => {
+ wrapper.find('.discover-feedback-icon').trigger('click');
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'security-discover-feedback-cta',
+ property: '0',
+ });
+ });
+});
+
+```
+
+In obsolete Karma tests it's used as below:
+
+```javascript
+import { mockTracking, triggerEvent } from 'spec/helpers/tracking_helper';
+
+describe('my component', () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ const vm = mountComponent(MyComponent);
+ trackingSpy = mockTracking('_category_', vm.$el, spyOn);
+ });
+
+ it('tracks an event when toggled', () => {
+ triggerEvent('a.toggle');
+
+ expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_edit_button', {
+ label: 'right_sidebar',
+ property: 'confidentiality',
+ });
+ });
+});
+```
diff --git a/doc/telemetry/index.md b/doc/telemetry/index.md
new file mode 100644
index 00000000000..55a7fad86be
--- /dev/null
+++ b/doc/telemetry/index.md
@@ -0,0 +1,72 @@
+# Event tracking
+
+At GitLab, we encourage event tracking so we can iterate on and improve the project and user experience.
+
+We do this by running experiments, and collecting analytics for features and feature variations. This is:
+
+- So we generally know engagement.
+- A way to approach A/B testing.
+
+As developers, we should attempt to add tracking and instrumentation where possible. This enables the Product team to better understand:
+
+- User engagement.
+- Usage patterns.
+- Other metrics that can potentially be improved on.
+
+To maintain consistency and not adversely effect performance, we have some basic tracking functionality exposed at both the frontend and backend layers that can be utilized while building new features or updating existing features.
+
+We also encourage users to enable tracking, and we embrace full transparency with our tracking approach so it can be easily understood and trusted. By enabling tracking, users can:
+
+- Contribute back to the wider community.
+- Help GitLab improve on the product.
+
+## Implementing tracking
+
+Event tracking can be implemented on either the frontend or the backend layers, and each can be approached slightly differently since they have slightly different concerns.
+
+In GitLab, many actions can be initiated via the web interface, but they can also be initiated via an API client (an iOS applications is a good example of this), or via `git` directly. Crucially, this means that tracking should be considered holistically for the feature that's being instrumented.
+
+The data team should be involved when defining analytics and can be consulted when coming up with ways of presenting data that's being tracked. This allows our event data to be considered carefully and presented in ways that may reveal details about user engagement that may not be fully understood or interactions where we can make improvements. You can [contact the data team](https://about.gitlab.com/handbook/business-ops/data-team/#contact-us) and consult with them when defining tracking strategies.
+
+### Frontend
+
+Generally speaking, the frontend can track user actions and events, like:
+
+- Clicking links or buttons.
+- Submitting forms.
+- Other typically interface-driven actions.
+
+See [Frontend tracking guide](frontend.md).
+
+### Backend
+
+From the backend, the events that are tracked will likely consist of things like the creation or deletion of records and other events that might be triggered from layers that aren't necessarily only available in the interface.
+
+See [Backend tracking guide](backend.md).
+
+Also, see [Instrumenting Ruby code](../development/instrumentation.md) if you are instrumenting application performance metrics for Ruby code.
+
+## Enabling tracking
+
+Tracking can be enabled at:
+
+- The instance level, which will enable tracking on both the frontend and backend layers.
+- User level, though user tracking can be disabled on a per-user basis. GitLab tracking respects the [Do Not Track](https://www.eff.org/issues/do-not-track) standard, so any user who has enabled the Do Not Track option in their browser will also not be tracked from a user level.
+
+We utilize Snowplow for the majority of our tracking strategy, and it can be enabled by navigating to:
+
+- **Admin Area > Settings > Integrations** in the UI.
+- `admin/application_settings/integrations` in your browser.
+
+The following configuration is required:
+
+| Name | Value |
+| ------------- | ------------------------- |
+| Collector | `snowplow.trx.gitlab.net` |
+| Site ID | `gitlab` |
+| Cookie domain | `.gitlab.com` |
+
+Once enabled, tracking events can be inspected locally by either:
+
+- Looking at the network panel of the browser's development tools
+- Using the [Snowplow Chrome Extension](https://chrome.google.com/webstore/detail/snowplow-inspector/maplkdomeamdlngconidoefjpogkmljm).
diff --git a/doc/topics/airgap/index.md b/doc/topics/airgap/index.md
new file mode 100644
index 00000000000..abeb3a4b1d3
--- /dev/null
+++ b/doc/topics/airgap/index.md
@@ -0,0 +1,10 @@
+# Air-gapped GitLab
+
+Computers in an air-gapped network are isolated from the public internet as a security measure.
+This page lists all the information available for running GitLab in an air-gapped environment.
+
+## Features
+
+Follow these best practices to use GitLab's features in an offline environment:
+
+- [Operating the GitLab Secure scanners in an offline environment](../../user/application_security/offline_deployments/index.md).
diff --git a/doc/topics/application_development_platform/index.md b/doc/topics/application_development_platform/index.md
index 2ea561eb943..8de440c7f00 100644
--- a/doc/topics/application_development_platform/index.md
+++ b/doc/topics/application_development_platform/index.md
@@ -59,4 +59,4 @@ responsibility. The Application Development Platform integrates key performance
into GitLab, automatically. The following features are included:
- [Auto Monitoring](../autodevops/index.md#auto-monitoring)
-- [In-app Kubernetes Pod Logs](../../user/project/clusters/kubernetes_pod_logs.md)
+- [In-app Kubernetes Logs](../../user/project/clusters/kubernetes_pod_logs.md)
diff --git a/doc/topics/autodevops/index.md b/doc/topics/autodevops/index.md
index dcd822705f9..57cceba8c0d 100644
--- a/doc/topics/autodevops/index.md
+++ b/doc/topics/autodevops/index.md
@@ -40,7 +40,7 @@ If you are using GitLab.com, see the [quick start guide](quick_start_guide.md)
for how to use Auto DevOps with GitLab.com and a Kubernetes cluster on Google Kubernetes
Engine (GKE).
-If you are using a self-hosted instance of GitLab, you will need to configure the
+If you are using a self-managed instance of GitLab, you will need to configure the
[Google OAuth2 OmniAuth Provider](../../integration/google.md) before
you can configure a cluster on GKE. Once this is set up, you can follow the steps on the
[quick start guide](quick_start_guide.md) to get started.
@@ -55,7 +55,7 @@ in multiple ways:
- Auto DevOps works with any Kubernetes cluster; you're not limited to running
on GitLab's infrastructure. (Note that many features also work without Kubernetes).
- There is no additional cost (no markup on the infrastructure costs), and you
- can use a self-hosted Kubernetes cluster or Containers as a Service on any
+ can use a Kubernetes cluster you host or Containers as a Service on any
public cloud (for example, [Google Kubernetes Engine](https://cloud.google.com/kubernetes-engine/)).
- Auto DevOps has more features including security testing, performance testing,
and code quality testing.
@@ -93,7 +93,8 @@ knowledge of the following:
Auto DevOps provides great defaults for all the stages; you can, however,
[customize](#customizing) almost everything to your needs.
-For an overview on the creation of Auto DevOps, read the blog post [From 2/3 of the Self-Hosted Git Market, to the Next-Generation CI System, to Auto DevOps](https://about.gitlab.com/blog/2017/06/29/whats-next-for-gitlab-ci/).
+For an overview on the creation of Auto DevOps, read more
+[in this blog post](https://about.gitlab.com/blog/2017/06/29/whats-next-for-gitlab-ci/).
NOTE: **Note**
Kubernetes clusters can [be used without](../../user/project/clusters/index.md)
@@ -181,7 +182,7 @@ To make full use of Auto DevOps, you will need:
If you have configured GitLab's Kubernetes integration, you can deploy it to
your cluster by installing the
[GitLab-managed app for cert-manager](../../user/clusters/applications.md#cert-manager).
-
+
If you do not have Kubernetes or Prometheus installed, then Auto Review Apps,
Auto Deploy, and Auto Monitoring will be silently skipped.
@@ -419,7 +420,7 @@ tests, it's up to you to add them.
### Auto Code Quality **(STARTER)**
Auto Code Quality uses the
-[Code Quality image](https://gitlab.com/gitlab-org/security-products/codequality) to run
+[Code Quality image](https://gitlab.com/gitlab-org/ci-cd/codequality) to run
static analysis and other code checks on the current code. The report is
created, and is uploaded as an artifact which you can later download and check
out.
@@ -473,7 +474,7 @@ report is created, it's uploaded as an artifact which you can later download and
check out.
Any licenses are also shown in the merge request widget. Read more how
-[License Compliance works](../../user/application_security/license_compliance/index.md).
+[License Compliance works](../../user/compliance/license_compliance/index.md).
### Auto Container Scanning **(ULTIMATE)**
@@ -638,7 +639,8 @@ as it will be attempting to fetch the image using
#### Kubernetes 1.16+
-> [Introduced](https://gitlab.com/gitlab-org/charts/auto-deploy-app/-/merge_requests/51) in GitLab 12.8.
+> - [Introduced](https://gitlab.com/gitlab-org/charts/auto-deploy-app/-/merge_requests/51) in GitLab 12.8.
+> - Support for deploying a PostgreSQL version that supports Kubernetes 1.16+ was [introduced](https://gitlab.com/gitlab-org/cluster-integration/auto-deploy-image/-/merge_requests/49) in GitLab 12.9.
CAUTION: **Deprecation**
The default value of `extensions/v1beta1` for the `deploymentApiVersion` setting is
@@ -656,9 +658,17 @@ To use Auto Deploy on a Kubernetes 1.16+ cluster, you must:
deploymentApiVersion: apps/v1
```
-1. Set the `POSTGRES_ENABLED` variable to `false`. This will disable Auto Deploy's deployment of PostgreSQL.
-Support for enabling Auto Deploy's deployment of PostgreSQL in a Kubernetes 1.16+ cluster
-is [planned](https://gitlab.com/gitlab-org/charts/auto-deploy-app/issues/28).
+1. Set the:
+
+ - `AUTO_DEVOPS_POSTGRES_CHANNEL` variable to `2`.
+ - `POSTGRES_VERSION` variable to `9.6.16` or higher.
+
+ This will opt-in to using a version of the PostgreSQL chart that supports Kubernetes
+ 1.16 and higher.
+
+DANGER: **Danger:** Opting into `AUTO_DEVOPS_POSTGRES_CHANNEL` version `2` will delete
+the version `1` PostgreSQL database. Please backup the contents of the PostgreSQL database
+first before opting into version `2`, so that you can restore into the version `2` database.
#### Migrations
@@ -742,15 +752,15 @@ workers:
> [Introduced](https://gitlab.com/gitlab-org/charts/auto-deploy-app/-/merge_requests/30) in GitLab 12.7.
By default, all Kubernetes pods are
-[non-isolated](https://kubernetes.io/docs/concepts/services-networking/network-policies/#isolated-and-non-isolated-pods)
-and accept traffic from any source. You can use
+[non-isolated](https://kubernetes.io/docs/concepts/services-networking/network-policies/#isolated-and-non-isolated-pods),
+meaning that they will accept traffic to and from any source. You can use
[NetworkPolicy](https://kubernetes.io/docs/concepts/services-networking/network-policies/)
-to restrict connections to selected pods or namespaces.
+to restrict connections to and from selected pods, namespaces, and the Internet.
NOTE: **Note:**
You must use a Kubernetes network plugin that implements support for
-`NetworkPolicy`, the default network plugin for Kubernetes (`kubenet`)
-[doesn't implement](https://kubernetes.io/docs/concepts/extend-kubernetes/compute-storage-net/network-plugins/#kubenet)
+`NetworkPolicy`. The default network plugin for Kubernetes (`kubenet`)
+[does not implement](https://kubernetes.io/docs/concepts/extend-kubernetes/compute-storage-net/network-plugins/#kubenet)
support for it. The [Cilium](https://cilium.io/) network plugin can be
installed as a [cluster application](../../user/clusters/applications.md#install-cilium-using-gitlab-ci)
to enable support for network policies.
@@ -758,20 +768,20 @@ to enable support for network policies.
You can enable deployment of a network policy by setting the following
in the `.gitlab/auto-deploy-values.yaml` file:
-```yml
+```yaml
networkPolicy:
enabled: true
```
The default policy deployed by the auto deploy pipeline will allow
traffic within a local namespace and from the `gitlab-managed-apps`
-namespace, all other inbound connection will be blocked. Outbound
-traffic is not affected by the default policy.
+namespace. All other inbound connection will be blocked. Outbound
+traffic (for example, to the Internet) is not affected by the default policy.
You can also provide a custom [policy specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.16/#networkpolicyspec-v1-networking-k8s-io)
via the `.gitlab/auto-deploy-values.yaml` file, for example:
-```yml
+```yaml
networkPolicy:
enabled: true
spec:
@@ -787,6 +797,9 @@ networkPolicy:
app.gitlab.com/managed_by: gitlab
```
+For more information on how to install Network Policies, see
+[Install Cilium using GitLab CI](../../user/clusters/applications.md#install-cilium-using-gitlab-ci).
+
#### Web Application Firewall (ModSecurity) customization
> [Introduced](https://gitlab.com/gitlab-org/charts/auto-deploy-app/-/merge_requests/44) in GitLab 12.8.
@@ -918,12 +931,12 @@ instead of the default `ruby:latest`:
1. Set `AUTO_DEVOPS_BUILD_IMAGE_EXTRA_ARGS` to `--build-arg=RUBY_VERSION=alpine`.
1. Add the following to a custom `Dockerfile`:
- ```docker
- ARG RUBY_VERSION=latest
- FROM ruby:$RUBY_VERSION
+ ```dockerfile
+ ARG RUBY_VERSION=latest
+ FROM ruby:$RUBY_VERSION
- # ... put your stuff here
- ```
+ # ... put your stuff here
+ ```
NOTE: **Note:**
Passing in complex values (newlines and spaces, for example) will likely
@@ -955,14 +968,14 @@ In projects:
1. Activate the experimental `Dockerfile` syntax by adding the following
to the top of the file:
- ```docker
+ ```dockerfile
# syntax = docker/dockerfile:experimental
```
1. To make secrets available in any `RUN $COMMAND` in the `Dockerfile`, mount
the secret file and source it prior to running `$COMMAND`:
- ```docker
+ ```dockerfile
RUN --mount=type=secret,id=auto-devops-build-secrets . /run/secrets/auto-devops-build-secrets && $COMMAND
```
@@ -1030,6 +1043,32 @@ It is also possible to copy and paste the contents of the [Auto DevOps
template] into your project and edit this as needed. You may prefer to do it
that way if you want to specifically remove any part of it.
+### Customizing the Kubernetes namespace
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/27630) in GitLab 12.6.
+
+For **non**-GitLab-managed clusters, the namespace can be customized using
+`.gitlab-ci.yml` by specifying
+[`environment:kubernetes:namespace`](../../ci/environments.md#configuring-kubernetes-deployments).
+For example, the following configuration overrides the namespace used for
+`production` deployments:
+
+```yaml
+include:
+ - template: Auto-DevOps.gitlab-ci.yml
+
+production:
+ environment:
+ kubernetes:
+ namespace: production
+```
+
+When deploying to a custom namespace with Auto DevOps, the service account
+provided with the cluster needs at least the `edit` role within the namespace.
+
+- If the service account can create namespaces, then the namespace can be created on-demand.
+- Otherwise, the namespace must exist prior to deployment.
+
### Using components of Auto DevOps
If you only require a subset of the features offered by Auto DevOps, you can include
@@ -1336,7 +1375,7 @@ increasing the rollout up to 100%.
If `INCREMENTAL_ROLLOUT_MODE` is set to `manual` in your project, then instead
of the standard `production` job, 4 different
-[manual jobs](../../ci/pipelines.md#manual-actions-from-pipeline-graphs)
+[manual jobs](../../ci/pipelines/index.md#manual-actions-from-pipeline-graphs)
will be created:
1. `rollout 10%`
diff --git a/doc/topics/autodevops/quick_start_guide.md b/doc/topics/autodevops/quick_start_guide.md
index 32dcd60624f..d5730d16296 100644
--- a/doc/topics/autodevops/quick_start_guide.md
+++ b/doc/topics/autodevops/quick_start_guide.md
@@ -7,7 +7,7 @@ We will use GitLab's native Kubernetes integration, so you will not need
to create a Kubernetes cluster manually using the Google Cloud Platform console.
We will create and deploy a simple application that we create from a GitLab template.
-These instructions will also work for a self-hosted GitLab instance; you'll just
+These instructions will also work for a self-managed GitLab instance; you'll just
need to ensure your own [Runners are configured](../../ci/runners/README.md) and
[Google OAuth is enabled](../../integration/google.md).
diff --git a/doc/topics/git/numerous_undo_possibilities_in_git/index.md b/doc/topics/git/numerous_undo_possibilities_in_git/index.md
index cea052f3a90..3b93c978931 100644
--- a/doc/topics/git/numerous_undo_possibilities_in_git/index.md
+++ b/doc/topics/git/numerous_undo_possibilities_in_git/index.md
@@ -253,7 +253,7 @@ In our example we will end up with commit `B`, that introduced bug/error. We hav
- Undo changes on a single file or directory from commit `B`, but retain them in the unstaged state:
```shell
- git reset commit-B-id <file>
+ git reset commit-B-id <file>
```
- There is one command we also must not forget: **creating a new branch**
@@ -487,8 +487,8 @@ git filter-branch --tree-filter 'rm filename' HEAD
Since `git filter-branch` command might be slow on big repositories, there are
tools that can use some of Git specifics to enable faster execution of common
tasks (which is exactly what removing sensitive information file is about).
-An alternative is the open source community-maintained tool [BFG][bfg-repo-cleaner].
-Keep in mind that these tools are faster because they do not provide the same
+An alternative is the open source community-maintained tool [BFG][bfg-repo-cleaner].
+Keep in mind that these tools are faster because they do not provide the same
feature set as `git filter-branch` does, but focus on specific use cases.
## Conclusion
diff --git a/doc/topics/git/troubleshooting_git.md b/doc/topics/git/troubleshooting_git.md
index 446c2c0db4c..8270fad7086 100644
--- a/doc/topics/git/troubleshooting_git.md
+++ b/doc/topics/git/troubleshooting_git.md
@@ -101,19 +101,38 @@ ssh_exchange_identification: read: Connection reset by peer
fatal: Could not read from remote repository.
```
+or
+
+```text
+ssh_exchange_identification: Connection closed by remote host
+fatal: The remote end hung up unexpectedly
+```
+
This error usually indicates that SSH daemon's `MaxStartups` value is throttling
-SSH connections. This setting specifies the maximum number of unauthenticated
+SSH connections. This setting specifies the maximum number of concurrent, unauthenticated
connections to the SSH daemon. This affects users with proper authentication
credentials (SSH keys) because every connection is 'unauthenticated' in the
beginning. The default value is `10`.
-Increase `MaxStartups` by adding or modifying the value in `/etc/ssh/sshd_config`:
+Increase `MaxStartups` on the GitLab server
+by adding or modifying the value in `/etc/ssh/sshd_config`:
```text
-MaxStartups 100
+MaxStartups 100:30:200
```
-Restart SSHD for the change to take effect.
+`100:30:200` means up to 100 SSH sessions are allowed without restriction,
+after which 30% of connections will be dropped until reaching an absolute maximum of 200.
+
+Once configured, restart the SSH daemon for the change to take effect.
+
+```shell
+# Debian/Ubuntu
+sudo systemctl restart ssh
+
+# CentOS/RHEL
+sudo service sshd restart
+```
## Timeout during `git push` / `git pull`
diff --git a/doc/topics/web_application_firewall/img/guide_waf_ingress_installation.png b/doc/topics/web_application_firewall/img/guide_waf_ingress_installation.png
deleted file mode 100644
index a150fa4e46f..00000000000
--- a/doc/topics/web_application_firewall/img/guide_waf_ingress_installation.png
+++ /dev/null
Binary files differ
diff --git a/doc/topics/web_application_firewall/img/guide_waf_ingress_installation_v12_9.png b/doc/topics/web_application_firewall/img/guide_waf_ingress_installation_v12_9.png
new file mode 100644
index 00000000000..5c4718b0487
--- /dev/null
+++ b/doc/topics/web_application_firewall/img/guide_waf_ingress_installation_v12_9.png
Binary files differ
diff --git a/doc/topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.png b/doc/topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.png
new file mode 100644
index 00000000000..df1223f12ec
--- /dev/null
+++ b/doc/topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.png
Binary files differ
diff --git a/doc/topics/web_application_firewall/index.md b/doc/topics/web_application_firewall/index.md
index 6c847f12bba..27cf60d5662 100644
--- a/doc/topics/web_application_firewall/index.md
+++ b/doc/topics/web_application_firewall/index.md
@@ -49,7 +49,7 @@ The Web Application Firewall requires:
If you are using GitLab.com, see the [quick start guide](quick_start_guide.md) for
how to use the WAF with GitLab.com and a Kubernetes cluster on Google Kubernetes Engine (GKE).
-If you are using a self-hosted instance of GitLab, you need to configure the
+If you are using a self-managed instance of GitLab, you need to configure the
[Google OAuth2 OmniAuth Provider](../../integration/google.md) before
you can configure a cluster on GKE. Once this is set up, you can follow the steps on the [quick start guide](quick_start_guide.md) to get started.
diff --git a/doc/topics/web_application_firewall/quick_start_guide.md b/doc/topics/web_application_firewall/quick_start_guide.md
index e3cf0bcd498..417e1973b87 100644
--- a/doc/topics/web_application_firewall/quick_start_guide.md
+++ b/doc/topics/web_application_firewall/quick_start_guide.md
@@ -7,23 +7,13 @@ We will use GitLab's native Kubernetes integration, so you will not need
to create a Kubernetes cluster manually using the Google Cloud Platform console.
We will create and deploy a simple application that we create from a GitLab template.
-These instructions will also work for a self-hosted GitLab instance. However, you will
+These instructions will also work for a self-managed GitLab instance. However, you will
need to ensure your own [Runners are configured](../../ci/runners/README.md) and
[Google OAuth is enabled](../../integration/google.md).
**Note**: GitLab's Web Application Firewall is deployed with [Ingress](../../user/clusters/applications.md#Ingress),
so it will be available to your applications no matter how you deploy them to Kubernetes.
-## Enable or disable ModSecurity
-
-ModSecurity is enabled by default on GitLab.com. You can toggle the feature flag to false by running the following command in the Rails console:
-
-```ruby
-Feature.disable(:ingress_modsecurity)
-```
-
-Once disabled, you must uninstall and reinstall your Ingress application for the changes to take effect. See the [Feature Flag](../../user/project/operations/feature_flags.md) documentation for more information.
-
## Configuring your Google account
Before creating and connecting your Kubernetes cluster to your GitLab project,
@@ -80,11 +70,11 @@ under which this application will be deployed.
![Google auth](../autodevops/img/guide_google_auth_v12_3.png)
1. The last step is to provide the cluster details.
- 1. Give it a name, leave the environment scope as is, and choose the GCP project under which the cluster
- will be created (per the instructions to [configure your Google account](#configuring-your-google-account), a project should have already been created for you).
- 1. Choose the [region/zone](https://cloud.google.com/compute/docs/regions-zones/) under which the cluster will be created.
- 1. Enter the number of nodes you want it to have.
- 1. Choose the [machine type](https://cloud.google.com/compute/docs/machine-types).
+ 1. Give it a name, leave the environment scope as is, and choose the GCP project under which the cluster
+ will be created (per the instructions to [configure your Google account](#configuring-your-google-account), a project should have already been created for you).
+ 1. Choose the [region/zone](https://cloud.google.com/compute/docs/regions-zones/) under which the cluster will be created.
+ 1. Enter the number of nodes you want it to have.
+ 1. Choose the [machine type](https://cloud.google.com/compute/docs/machine-types).
![GitLab GKE cluster details](../autodevops/img/guide_gitlab_gke_details_v12_3.png)
@@ -112,10 +102,9 @@ Once it is installed, the other applications that rely on it will each have thei
For this guide, we need to install Ingress. Ingress provides load balancing,
SSL termination, and name-based virtual hosting, using NGINX behind
-the scenes. Make sure that the **Enable Web Application Firewall** button is checked
-before installing.
+the scenes. Make sure to switch the toogle to the enabled position before installing.
-![Cluster applications](./img/guide_waf_ingress_installation.png)
+![Cluster applications](./img/guide_waf_ingress_installation_v12_9.png)
After Ingress is installed, wait a few seconds and copy the IP address that
is displayed in order to add in your base **Domain** at the top of the page. For
@@ -180,40 +169,40 @@ your cluster either using [Cloud Shell](https://cloud.google.com/shell/) or the
1. After connecting to your cluster, check if the Ingress-NGINX controller is running and ModSecurity is enabled.
- This is done by running the following commands:
+ This is done by running the following commands:
- ```bash
- $ kubectl get pods -n gitlab-managed-apps | grep 'ingress-controller'
- ingress-nginx-ingress-controller-55f9cf6584-dxljn 2/2 Running
+ ```shell
+ $ kubectl get pods -n gitlab-managed-apps | grep 'ingress-controller'
+ ingress-nginx-ingress-controller-55f9cf6584-dxljn 2/2 Running
- $ kubectl -n gitlab-managed-apps exec -it $(kubectl get pods -n gitlab-managed-apps | grep 'ingress-controller' | awk '{print $1}') -- cat /etc/nginx/nginx.conf | grep 'modsecurity on;'
- modsecurity on;
- ```
+ $ kubectl -n gitlab-managed-apps exec -it $(kubectl get pods -n gitlab-managed-apps | grep 'ingress-controller' | awk '{print $1}') -- cat /etc/nginx/nginx.conf | grep 'modsecurity on;'
+ modsecurity on;
+ ```
1. Verify the Rails application has been installed properly.
- ```bash
- $ kubectl get ns
- auto-devv-2-16730183-production Active
+ ```shell
+ $ kubectl get ns
+ auto-devv-2-16730183-production Active
- $ kubectl get pods -n auto-devv-2-16730183-production
- NAME READY STATUS RESTARTS
- production-5778cfcfcd-nqjcm 1/1 Running 0
- production-postgres-6449f8cc98-r7xgg 1/1 Running 0
- ```
+ $ kubectl get pods -n auto-devv-2-16730183-production
+ NAME READY STATUS RESTARTS
+ production-5778cfcfcd-nqjcm 1/1 Running 0
+ production-postgres-6449f8cc98-r7xgg 1/1 Running 0
+ ```
1. To make sure the Rails application is responding, send a request to it by running:
- ```bash
- $ kubectl get ing -n auto-devv-2-16730183-production
- NAME HOSTS PORTS
- production-auto-deploy fjdiaz-auto-devv-2.34.68.60.207.nip.io,le-16730183.34.68.60.207.nip.io 80, 443
+ ```shell
+ $ kubectl get ing -n auto-devv-2-16730183-production
+ NAME HOSTS PORTS
+ production-auto-deploy fjdiaz-auto-devv-2.34.68.60.207.nip.io,le-16730183.34.68.60.207.nip.io 80, 443
- $ curl --location --insecure fjdiaz-auto-devv-2.34.68.60.207.nip.io | grep 'Rails!' --after 2 --before 2
- <body>
- <p>You're on Rails!</p>
- </body>
- ```
+ $ curl --location --insecure fjdiaz-auto-devv-2.34.68.60.207.nip.io | grep 'Rails!' --after 2 --before 2
+ <body>
+ <p>You're on Rails!</p>
+ </body>
+ ```
Now that we have confirmed our system is properly setup, we can go ahead and test
the WAF with OWASP CRS!
@@ -223,7 +212,7 @@ the WAF with OWASP CRS!
Now let's send a potentially malicious request, as if we were a scanner,
checking for vulnerabilities within our application and examine the modsecurity logs:
-```bash
+```shell
$ curl --location --insecure fjdiaz-auto-devv-2.34.68.60.207.nip.io --header "User-Agent: absinthe" | grep 'Rails!' --after 2 --before 2
<body>
<p>You're on Rails!</p>
diff --git a/doc/university/training/topics/subtree.md b/doc/university/training/topics/subtree.md
index e1ee7b6a836..5b08832084c 100644
--- a/doc/university/training/topics/subtree.md
+++ b/doc/university/training/topics/subtree.md
@@ -11,11 +11,11 @@ comments: false
## Subtree Aliases
-- Add: `git subtree add --prefix <target-folder> <url> <branch> --squash`.
-- Pull: `git subtree add --prefix <target-folder> <url> <branch> --squash`.
-- Push: `git subtree add --prefix <target-folder> <url> <branch>`.
+- Add: `git subtree add --prefix <target-folder> <url> <branch> --squash`
+- Pull: `git subtree pull --prefix <target-folder> <url> <branch> --squash`
+- Push: `git subtree add --prefix <target-folder> <url> <branch>`
- Ex: `git config alias.sbp 'subtree pull --prefix st /
- git@gitlab.com:balameb/subtree-nested-example.git master --squash'`.
+ git@gitlab.com:balameb/subtree-nested-example.git master --squash'`
```shell
# Add an alias
diff --git a/doc/update/README.md b/doc/update/README.md
index 1bd2770b957..93879efb19e 100644
--- a/doc/update/README.md
+++ b/doc/update/README.md
@@ -50,6 +50,7 @@ However, for this to work there are the following requirements:
migrations](../development/post_deployment_migrations.md) (included in
zero downtime update steps below).
- You are using PostgreSQL. Starting from GitLab 12.1, MySQL is not supported.
+- Multi-node GitLab instance. Single-node instances may experience brief interruptions as services restart.
Most of the time you can safely upgrade from a patch release to the next minor
release if the patch release is not the latest. For example, upgrading from
@@ -115,17 +116,35 @@ following command:
**For Omnibus installations**
+If using GitLab 12.9 and newer, run:
+
```shell
sudo gitlab-rails runner -e production 'puts Gitlab::BackgroundMigration.remaining'
```
-**For installations from source**
+If using GitLab 12.8 and older, run the following using a Rails console:
+```ruby
+puts Sidekiq::Queue.new("background_migration").size
+Sidekiq::ScheduledSet.new.select { |r| r.klass == 'BackgroundMigrationWorker' }.size
```
+
+**For installations from source**
+
+If using GitLab 12.9 and newer, run:
+
+```shell
cd /home/git/gitlab
sudo -u git -H bundle exec rails runner -e production 'puts Gitlab::BackgroundMigration.remaining'
```
+If using GitLab 12.8 and older, run the following using a Rails console:
+
+```ruby
+puts Sidekiq::Queue.new("background_migration").size
+Sidekiq::ScheduledSet.new.select { |r| r.klass == 'BackgroundMigrationWorker' }.size
+```
+
## Upgrading to a new major version
Major versions are reserved for backwards incompatible changes. We recommend that
diff --git a/doc/update/mysql_to_postgresql.md b/doc/update/mysql_to_postgresql.md
index 08794b8a101..9ad77a80d50 100644
--- a/doc/update/mysql_to_postgresql.md
+++ b/doc/update/mysql_to_postgresql.md
@@ -54,13 +54,13 @@ pgloader within the container as it is not included in the container image.
1. Start a shell session in the context of the running container:
- ``` bash
+ ```shell
docker exec -it gitlab bash
```
1. Install pgloader:
- ``` bash
+ ```shell
apt-get update
apt-get -y install pgloader
```
@@ -78,7 +78,7 @@ need to enable the bundled PostgreSQL:
1. Edit `/etc/gitlab/gitlab.rb` to enable bundled PostgreSQL:
- ```
+ ```ruby
postgresql['enable'] = true
```
@@ -116,7 +116,7 @@ new PostgreSQL one:
1. Save the following snippet in a `commands.load` file, and edit with your
MySQL database `username`, `password` and `host`:
- ```
+ ```sql
LOAD DATABASE
FROM mysql://username:password@host/gitlabhq_production
INTO postgresql://gitlab-psql@unix://var/opt/gitlab/postgresql:/gitlabhq_production
@@ -143,7 +143,7 @@ new PostgreSQL one:
1. Once the migration finishes, you should see a summary table that looks like
the following:
- ```
+ ```plaintext
table name read imported errors total time
----------------------------------------------- --------- --------- --------- --------------
fetch meta data 119 119 0 0.388s
@@ -217,7 +217,7 @@ new PostgreSQL one:
1. Save the following snippet in a `commands.load` file, and edit with your
MySQL `username`, `password` and `host`:
- ```
+ ```sql
LOAD DATABASE
FROM mysql://username:password@host/gitlabhq_production
INTO postgresql://postgres@unix://var/run/postgresql:/gitlabhq_production
@@ -244,7 +244,7 @@ new PostgreSQL one:
1. Once the migration finishes, you should see a summary table that looks like
the following:
- ```
+ ```plaintext
table name read imported errors total time
----------------------------------------------- --------- --------- --------- --------------
fetch meta data 119 119 0 0.388s
@@ -284,7 +284,7 @@ Sometimes, you might encounter some errors during or after the migration.
The PostgreSQL user that you use for the migration MUST have **superuser** privileges.
Otherwise, you may see a similar message to the following:
-```
+```plaintext
debugger invoked on a CL-POSTGRES-ERROR:INSUFFICIENT-PRIVILEGE in thread
#<THREAD "lparallel" RUNNING {10078A3513}>:
Database error 42501: permission denied: "RI_ConstraintTrigger_a_20937" is a system trigger
diff --git a/doc/update/patch_versions.md b/doc/update/patch_versions.md
index ac7aee779c0..02792730d2b 100644
--- a/doc/update/patch_versions.md
+++ b/doc/update/patch_versions.md
@@ -96,7 +96,7 @@ sudo -u git -H make
### 8. Install/Update `gitlab-elasticsearch-indexer` **(STARTER ONLY)**
-Please follow the [install instruction](../integration/elasticsearch.md#installation).
+Please follow the [install instruction](../integration/elasticsearch.md#installing-elasticsearch).
### 9. Start application
diff --git a/doc/update/restore_after_failure.md b/doc/update/restore_after_failure.md
index 964a3c76c04..99329fdceb3 100644
--- a/doc/update/restore_after_failure.md
+++ b/doc/update/restore_after_failure.md
@@ -28,7 +28,7 @@ may need to manually correct the problem next time you upgrade.
Example error:
-```
+```plaintext
== 20151103134857 CreateLfsObjects: migrating =================================
-- create_table(:lfs_objects)
rake aborted!
@@ -48,7 +48,7 @@ need to do.
Pass the version to a database rake task to manually mark the migration as
complete.
-```
+```shell
# Source install
sudo -u git -H bundle exec rake gitlab:db:mark_migration_complete[20151103134857] RAILS_ENV=production
@@ -62,9 +62,9 @@ migrations are marked complete.
### GitLab < 8.6
-```
+```shell
# Source install
-sudo -u git -H bundle exec rails console production
+sudo -u git -H bundle exec rails console -e production
# Omnibus install
sudo gitlab-rails console
@@ -72,7 +72,7 @@ sudo gitlab-rails console
At the Rails console, type the following commands:
-```
+```ruby
ActiveRecord::Base.connection.execute("INSERT INTO schema_migrations (version) VALUES('20151103134857')")
exit
```
diff --git a/doc/update/upgrading_from_ce_to_ee.md b/doc/update/upgrading_from_ce_to_ee.md
index 5c395ddba70..28d5fe7aa5f 100644
--- a/doc/update/upgrading_from_ce_to_ee.md
+++ b/doc/update/upgrading_from_ce_to_ee.md
@@ -79,7 +79,7 @@ sudo -u git -H bundle exec rake cache:clear RAILS_ENV=production
### 4. Install `gitlab-elasticsearch-indexer` **(STARTER ONLY)**
-Please follow the [install instruction](../integration/elasticsearch.md#installation).
+Please follow the [install instruction](../integration/elasticsearch.md#installing-elasticsearch).
### 5. Start application
diff --git a/doc/user/admin_area/activating_deactivating_users.md b/doc/user/admin_area/activating_deactivating_users.md
index dcd7407ac85..9c153497e74 100644
--- a/doc/user/admin_area/activating_deactivating_users.md
+++ b/doc/user/admin_area/activating_deactivating_users.md
@@ -41,7 +41,7 @@ Please note that for the deactivation option to be visible to an admin, the user
Users can also be deactivated using the [GitLab API](../../api/users.md#deactivate-user).
NOTE: **Note:**
-A deactivated user does not consume a [seat](../../subscriptions/index.md#managing-subscriptions).
+A deactivated user does not consume a [seat](../../subscriptions/index.md#choosing-the-number-of-users).
## Activating a user
@@ -60,7 +60,7 @@ Users can also be activated using the [GitLab API](../../api/users.md#activate-u
NOTE: **Note:**
Activating a user will change the user's state to active and it consumes a
-[seat](../../subscriptions/index.md#managing-subscriptions).
+[seat](../../subscriptions/index.md#choosing-the-number-of-users).
TIP: **Tip:**
A deactivated user can also activate their account by themselves by simply logging back via the UI.
diff --git a/doc/user/admin_area/appearance.md b/doc/user/admin_area/appearance.md
index 374502018c4..80440b63f71 100644
--- a/doc/user/admin_area/appearance.md
+++ b/doc/user/admin_area/appearance.md
@@ -5,7 +5,7 @@ disqus_identifier: 'https://docs.gitlab.com/ee/customization/branded_login_page.
# GitLab Appearance **(CORE ONLY)**
-There are several options for customizing the appearance of a self hosted instance
+There are several options for customizing the appearance of a self-managed instance
of GitLab. These settings are accessed from the **Admin Area** in the **Appearance**
section.
@@ -47,7 +47,7 @@ instance, including the sign in / sign up page. The default color is white text
an orange background, but this can be customized by clicking on **Customize colors**.
Limited [Markdown](../markdown.md) is supported, such as bold, italics, and links, for
-example. Other Markdown features, including lists, images and quotes, are not supported,
+example. Other Markdown features, including lists, images, and quotes are not supported
as the header and footer messages can only be a single line.
![header and footer screenshot](img/appearance_header_footer_v12_3.png)
diff --git a/doc/user/admin_area/blocking_unblocking_users.md b/doc/user/admin_area/blocking_unblocking_users.md
index cb86e28ff1e..e3b9cd1218c 100644
--- a/doc/user/admin_area/blocking_unblocking_users.md
+++ b/doc/user/admin_area/blocking_unblocking_users.md
@@ -30,7 +30,7 @@ Personal projects, and group and user history of the blocked user will be left i
Users can also be blocked using the [GitLab API](../../api/users.md#block-user).
NOTE: **Note:**
-A blocked user does not consume a [seat](../../subscriptions/index.md#managing-subscriptions).
+A blocked user does not consume a [seat](../../subscriptions/index.md#choosing-the-number-of-users).
## Unblocking a user
@@ -45,4 +45,4 @@ Users can also be unblocked using the [GitLab API](../../api/users.md#unblock-us
NOTE: **Note:**
Unblocking a user will change the user's state to active and it consumes a
-[seat](../../subscriptions/index.md#managing-subscriptions).
+[seat](../../subscriptions/index.md#choosing-the-number-of-users).
diff --git a/doc/user/admin_area/broadcast_messages.md b/doc/user/admin_area/broadcast_messages.md
index bc51552603d..416bd3bfd00 100644
--- a/doc/user/admin_area/broadcast_messages.md
+++ b/doc/user/admin_area/broadcast_messages.md
@@ -28,7 +28,7 @@ To add a broadcast message:
NOTE: **Note:**
Once a broadcast message has expired, it is no longer displayed in the UI but is still listed in the
-list of broadcast messages.
+list of broadcast messages. User can also dismiss a broadcast message if the option **Dismissable** is set.
## Editing a broadcast message
diff --git a/doc/user/admin_area/index.md b/doc/user/admin_area/index.md
index 2e502c1b6fb..6ec09c071ca 100644
--- a/doc/user/admin_area/index.md
+++ b/doc/user/admin_area/index.md
@@ -8,7 +8,7 @@ The Admin Area provides a web UI for administering some features of GitLab self-
To access the Admin Area, either:
-- Click the Admin Area icon (the spanner or wrench icon).
+- Click the Admin Area icon (**{admin}**).
- Visit `/admin` on your self-managed instance.
NOTE: **Note:**
@@ -18,24 +18,24 @@ Only admin users can access the Admin Area.
The Admin Area is made up of the following sections:
-| Section | Description |
-|:--------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [Overview](#overview-section) | View your GitLab [Dashboard](#admin-dashboard), and administer [projects](#administering-projects), [users](#administering-users), [groups](#administering-groups), [jobs](#administering-jobs), [Runners](#administering-runners), and [Gitaly servers](#administering-gitaly-servers). |
-| Monitoring | View GitLab [system information](#system-info), and information on [background jobs](#background-jobs), [logs](#logs), [health checks](monitoring/health_check.md), [requests profiles](#requests-profiles), and [audit logs](#audit-log-premium-only). |
-| Messages | Send and manage [broadcast messages](broadcast_messages.md) for your users. |
-| System Hooks | Configure [system hooks](../../system_hooks/system_hooks.md) for many events. |
-| Applications | Create system [OAuth applications](../../integration/oauth_provider.md) for integrations with other services. |
-| Abuse Reports | Manage [abuse reports](abuse_reports.md) submitted by your users. |
-| License **(STARTER ONLY)** | Upload, display, and remove [licenses](license.md). |
-| Kubernetes | Create and manage instance-level [Kubernetes clusters](../instance/clusters/index.md). |
-Push Rules **(STARTER ONLY)** | Configure pre-defined Git [push rules](../../push_rules/push_rules.md) for projects. Also, configure [merge requests approvers rules](merge_requests_approvals.md). **(PREMIUM ONLY)** |
-| Geo **(PREMIUM ONLY)** | Configure and maintain [Geo nodes](geo_nodes.md). |
-| Deploy Keys | Create instance-wide [SSH deploy keys](../../ssh/README.md#deploy-keys). |
-| Credentials **(ULTIMATE ONLY)** | View [credentials](credentials_inventory.md) that can be used to access your instance. |
-| Service Templates | Create [service templates](../project/integrations/services_templates.md) for projects. |
-| Labels | Create and maintain [labels](labels.md) for your GitLab instance. |
-| Appearance | Customize [GitLab's appearance](appearance.md). |
-| Settings | Modify the [settings](settings/index.md) for your GitLab instance. |
+| Section | Description |
+|:-----------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **{overview}** [Overview](#overview-section) | View your GitLab [Dashboard](#admin-dashboard), and administer [projects](#administering-projects), [users](#administering-users), [groups](#administering-groups), [jobs](#administering-jobs), [Runners](#administering-runners), and [Gitaly servers](#administering-gitaly-servers). |
+| **{monitor}** Monitoring | View GitLab [system information](#system-info), and information on [background jobs](#background-jobs), [logs](#logs), [health checks](monitoring/health_check.md), [requests profiles](#requests-profiles), and [audit logs](#audit-log-premium-only). |
+| **{messages}** Messages | Send and manage [broadcast messages](broadcast_messages.md) for your users. |
+| **{hook}** System Hooks | Configure [system hooks](../../system_hooks/system_hooks.md) for many events. |
+| **{applications}** Applications | Create system [OAuth applications](../../integration/oauth_provider.md) for integrations with other services. |
+| **{slight-frown}** Abuse Reports | Manage [abuse reports](abuse_reports.md) submitted by your users. |
+| **{license}** License **(STARTER ONLY)** | Upload, display, and remove [licenses](license.md). |
+| **{cloud-gear}** Kubernetes | Create and manage instance-level [Kubernetes clusters](../instance/clusters/index.md). |
+| **{push-rules}** Push Rules **(STARTER ONLY)** | Configure pre-defined Git [push rules](../../push_rules/push_rules.md) for projects. Also, configure [merge requests approvers rules](merge_requests_approvals.md). **(PREMIUM ONLY)** |
+| **{location-dot}** Geo **(PREMIUM ONLY)** | Configure and maintain [Geo nodes](geo_nodes.md). |
+| **{key}** Deploy Keys | Create instance-wide [SSH deploy keys](../../ssh/README.md#deploy-keys). |
+| **{lock}** Credentials **(ULTIMATE ONLY)** | View [credentials](credentials_inventory.md) that can be used to access your instance. |
+| **{template}** Service Templates | Create [service templates](../project/integrations/services_templates.md) for projects. |
+| **{labels}** Labels | Create and maintain [labels](labels.md) for your GitLab instance. |
+| **{appearance}** Appearance | Customize [GitLab's appearance](appearance.md). |
+| **{settings}** Settings | Modify the [settings](settings/index.md) for your GitLab instance. |
## Admin Dashboard
@@ -43,7 +43,7 @@ The Dashboard provides statistics and system information about the GitLab instan
To access the Dashboard, either:
-- Click the Admin Area icon (the wrench icon).
+- Click the Admin Area icon (**{admin}**).
- Visit `/admin` on your self-managed instance.
The Dashboard is the default view of the Admin Area, and is made up of the following sections:
@@ -59,13 +59,13 @@ The Dashboard is the default view of the Admin Area, and is made up of the follo
## Overview section
-The following topics document the **Overview** section of the Admin Area.
+The following topics document the **{overview}** **Overview** section of the Admin Area.
### Administering Projects
You can administer all projects in the GitLab instance from the Admin Area's Projects page.
-To access the Projects page, go to **Admin Area > Overview > Projects**.
+To access the Projects page, go to **{admin}** **Admin Area >** **{overview}** **Overview > Projects**.
Click the **All**, **Private**, **Internal**, or **Public** tab to list only projects of that
criteria.
@@ -105,7 +105,7 @@ You can combine the filter options. For example, to list only public projects wi
You can administer all users in the GitLab instance from the Admin Area's Users page.
-To access the Users page, go to **Admin Area > Overview > Users**.
+To access the Users page, go to **{admin}** **Admin Area >** **{overview}** **Overview > Users**.
To list users matching a specific criteria, click on one of the following tabs on the **Users** page:
@@ -138,7 +138,7 @@ you must provide the complete email address.
You can administer all groups in the GitLab instance from the Admin Area's Groups page.
-To access the Groups page, go to **Admin Area > Overview > Groups**.
+To access the Groups page, go to **{admin}** **Admin Area >** **{overview}** **Overview > Groups**.
For each group, the page displays their name, description, size, number of projects in the group,
number of members, and whether the group is private, internal, or public. To edit a group, click
@@ -157,7 +157,7 @@ To [Create a new group](../group/index.md#create-a-new-group) click **New group*
You can administer all jobs in the GitLab instance from the Admin Area's Jobs page.
-To access the Jobs page, go to **Admin Area > Overview > Jobs**.
+To access the Jobs page, go to **{admin}** **Admin Area >** **{overview}** **Overview > Jobs**.
All jobs are listed, in descending order of job ID.
@@ -182,7 +182,7 @@ For each job, the following details are listed:
You can administer all Runners in the GitLab instance from the Admin Area's **Runners** page. See
[GitLab Runner](https://docs.gitlab.com/runner/) for more information on Runner itself.
-To access the **Runners** page, go to **Admin Area > Overview > Runners**.
+To access the **Runners** page, go to **{admin}** **Admin Area >** **{overview}** **Overview > Runners**.
The **Runners** page features:
@@ -228,7 +228,7 @@ You can also edit, pause, or remove each Runner.
You can list all Gitaly servers in the GitLab instance from the Admin Area's **Gitaly Servers**
page. For more details, see [Gitaly](../../administration/gitaly/index.md).
-To access the **Gitaly Servers** page, go to **Admin Area > Overview > Gitaly Servers**.
+To access the **Gitaly Servers** page, go to **{admin}** **Admin Area >** **{overview}** **Overview > Gitaly Servers**.
For each Gitaly server, the following details are listed:
@@ -242,7 +242,7 @@ For each Gitaly server, the following details are listed:
## Monitoring section
-The following topics document the **Monitoring** section of the Admin Area.
+The following topics document the **{monitor}** **Monitoring** section of the Admin Area.
### System Info
diff --git a/doc/user/admin_area/settings/account_and_limit_settings.md b/doc/user/admin_area/settings/account_and_limit_settings.md
index 26497d77851..265a43367d9 100644
--- a/doc/user/admin_area/settings/account_and_limit_settings.md
+++ b/doc/user/admin_area/settings/account_and_limit_settings.md
@@ -15,7 +15,7 @@ If you choose a size larger than what is currently configured for the web server
you will likely get errors. See the [troubleshooting section](#troubleshooting) for more
details.
-## Repository size limit **(STARTER)**
+## Repository size limit **(STARTER ONLY)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/740) in [GitLab Enterprise Edition 8.12](https://about.gitlab.com/blog/2016/09/22/gitlab-8-12-released/#limit-project-size-ee).
> Available in [GitLab Starter](https://about.gitlab.com/pricing/).
diff --git a/doc/user/admin_area/settings/continuous_integration.md b/doc/user/admin_area/settings/continuous_integration.md
index da982acd28e..1244f0da0e9 100644
--- a/doc/user/admin_area/settings/continuous_integration.md
+++ b/doc/user/admin_area/settings/continuous_integration.md
@@ -4,7 +4,7 @@ type: reference
# Continuous Integration and Deployment Admin settings **(CORE ONLY)**
-In this area, you will find settings for Auto DevOps, Runners and job artifacts.
+In this area, you will find settings for Auto DevOps, Runners, and job artifacts.
You can find it in the **Admin Area > Settings > CI/CD**.
![Admin Area settings button](../img/admin_area_settings_button.png)
@@ -53,7 +53,7 @@ To change it at the:
1. Change the value of **maximum artifacts size (in MB)**.
1. Press **Save changes** for the changes to take effect.
-- [Project level](../../project/pipelines/settings.md) (this will override the instance and group settings):
+- [Project level](../../../ci/pipelines/settings.md) (this will override the instance and group settings):
1. Go to the project's **Settings > CI / CD > General Pipelines**.
1. Change the value of **maximum artifacts size (in MB)**.
@@ -152,7 +152,7 @@ Area of your GitLab instance (`.gitlab-ci.yml` if not set):
1. Input the new path in the **Default CI configuration path** field.
1. Hit **Save changes** for the changes to take effect.
-It is also possible to specify a [custom CI configuration path for a specific project](../../project/pipelines/settings.md#custom-ci-configuration-path).
+It is also possible to specify a [custom CI configuration path for a specific project](../../../ci/pipelines/settings.md#custom-ci-configuration-path).
<!-- ## Troubleshooting
@@ -191,3 +191,16 @@ To set required pipeline configuration:
1. Click **Save changes**.
![Required pipeline](img/admin_required_pipeline.png)
+
+## Package Registry configuration **(PREMIUM ONLY)**
+
+GitLab administrators can disable the forwarding of NPM requests to [npmjs.com](https://www.npmjs.com/).
+
+To disable it:
+
+1. Go to **Admin Area > Settings > CI/CD**.
+1. Expand the **Package Registry** section.
+1. Uncheck **Enable forwarding of NPM package requests to npmjs.org**.
+1. Click **Save changes**.
+
+![NPM package requests forwarding](img/admin_package_registry_npm_package_requests_forward.png)
diff --git a/doc/user/admin_area/settings/gitaly_timeouts.md b/doc/user/admin_area/settings/gitaly_timeouts.md
new file mode 100644
index 00000000000..775a99d7574
--- /dev/null
+++ b/doc/user/admin_area/settings/gitaly_timeouts.md
@@ -0,0 +1,34 @@
+---
+type: reference
+---
+
+# Gitaly timeouts
+
+![gitaly timeouts](img/gitaly_timeouts.png)
+
+3 timeout types can be configured to make sure that long running
+Gitaly calls don't needlessly take up resources.
+
+- Default timeout
+
+This timeout is the default for most Gitaly calls.
+It should be shorter than the worker timeout that can be configured
+for
+[Puma](https://docs.gitlab.com/omnibus/settings/puma.html#puma-settings)
+or [Unicorn](https://docs.gitlab.com/omnibus/settings/unicorn.html).
+This makes sure that Gitaly calls made within a web request cannot
+exceed these the entire request timeout.
+
+The default for this timeout is 55 seconds.
+
+- Fast timeout
+
+This is the timeout for very short Gitaly calls.
+
+The default for this timeout is 10 seconds.
+
+- Medium timeout
+
+This timeout should be between the default and the fast timeout
+
+The default for this timeout is 30 seconds.
diff --git a/doc/user/admin_area/settings/img/admin_package_registry_npm_package_requests_forward.png b/doc/user/admin_area/settings/img/admin_package_registry_npm_package_requests_forward.png
new file mode 100644
index 00000000000..b6068f5d19b
--- /dev/null
+++ b/doc/user/admin_area/settings/img/admin_package_registry_npm_package_requests_forward.png
Binary files differ
diff --git a/doc/user/admin_area/settings/img/gitaly_timeouts.png b/doc/user/admin_area/settings/img/gitaly_timeouts.png
new file mode 100644
index 00000000000..28394d238f7
--- /dev/null
+++ b/doc/user/admin_area/settings/img/gitaly_timeouts.png
Binary files differ
diff --git a/doc/user/admin_area/settings/index.md b/doc/user/admin_area/settings/index.md
index 07d614b449b..103a3f7230d 100644
--- a/doc/user/admin_area/settings/index.md
+++ b/doc/user/admin_area/settings/index.md
@@ -24,6 +24,7 @@ include:
- [Protected paths](protected_paths.md) **(CORE ONLY)**
- [Help messages for the `/help` page and the login page](help_page.md)
- [Push event activities limit and bulk push events](push_event_activities_limit.md)
+- [Gitaly timeouts](gitaly_timeouts.md)
NOTE: **Note:**
You can change the [first day of the week](../../profile/preferences.md) for the entire GitLab instance
diff --git a/doc/user/admin_area/settings/usage_statistics.md b/doc/user/admin_area/settings/usage_statistics.md
index 52b92c98482..fff7544139e 100644
--- a/doc/user/admin_area/settings/usage_statistics.md
+++ b/doc/user/admin_area/settings/usage_statistics.md
@@ -10,6 +10,10 @@ to perform various actions.
All statistics are opt-out. You can enable/disable them in the
**Admin Area > Settings > Metrics and profiling** section **Usage statistics**.
+NOTE: **Note:**
+Allow network traffic from your GitLab instance to IP address `104.196.17.203:443`, to send
+usage statistics to GitLab Inc.
+
## Version Check **(CORE ONLY)**
If enabled, version check will inform you if a new version is available and the
@@ -70,6 +74,8 @@ You can view the exact JSON payload in the administration panel. To view the pay
You can see how [the usage ping data maps to different stages of the product](https://gitlab.com/gitlab-data/analytics/blob/master/transform/snowflake-dbt/data/version_usage_stats_to_stage_mappings.csv).
+Usage ping is important to GitLab as we use it to calculate our [Action Monthly Active Users (AMAU)](https://about.gitlab.com/handbook/product/metrics/#action-monthly-active-users-amau) which helps us measure the success of our features.
+
### Request flow example
The following example shows a basic request/response flow between the self-managed GitLab instance, GitLab Version Application,
@@ -144,3 +150,292 @@ but commented out to help encourage others to add to it in the future. -->
[ee-735]: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/735
[ce-23361]: https://gitlab.com/gitlab-org/gitlab-foss/issues/23361
[ee-6602]: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/6602
+
+## Usage Statistics Collected
+
+| Statistic | Section | Stage | Description |
+|---|---|---|---|
+|uuid|||
+|hostname|||
+|version|||
+|installation_type|||
+|active_user_count|||
+|recorded_at|||
+|edition|||
+|license_md5|||
+|license_id|||
+|historical_max_users|||
+|Name|licensee||
+|Email|licensee||
+|Company|licensee||
+|license_user_count|||
+|license_starts_at|||
+|license_expires_at|||
+|license_plan|||
+|license_trial|||
+|assignee_lists|counts||
+|boards|counts||
+|ci_builds|counts||
+|ci_internal_pipelines|counts||
+|ci_external_pipelines|counts||
+|ci_pipeline_config_auto_devops|counts||
+|ci_pipeline_config_repository|counts||
+|ci_runners|counts||
+|ci_triggers|counts||
+|ci_pipeline_schedules|counts||
+|auto_devops_enabled|counts||
+|auto_devops_disabled|counts||
+|deploy_keys|counts||
+|deployments|counts||
+|successful_deployments|counts||
+|failed_deployments|counts||
+|environments|counts||
+|clusters|counts||
+|clusters_enabled|counts||
+|project_clusters_enabled|counts||
+|group_clusters_enabled|counts||
+|clusters_disabled|counts||
+|project_clusters_disabled|counts||
+|group_clusters_disabled|counts||
+|clusters_platforms_eks|counts||
+|clusters_platforms_gke|counts||
+|clusters_platforms_user|counts||
+|clusters_applications_helm|counts||
+|clusters_applications_ingress|counts||
+|clusters_applications_cert_managers|counts||
+|clusters_applications_crossplane|counts||
+|clusters_applications_prometheus|counts||
+|clusters_applications_runner|counts||
+|clusters_applications_knative|counts||
+|clusters_applications_elastic_stack|counts||
+|in_review_folder|counts||
+|grafana_integrated_projects|counts||
+|groups|counts||
+|issues|counts||
+|issues_created_from_gitlab_error_tracking_ui|counts||
+|issues_with_associated_zoom_link|counts||
+|issues_using_zoom_quick_actions|counts||
+|issues_with_embedded_grafana_charts_approx|counts||
+|keys|counts||
+|label_lists|counts||
+|lfs_objects|counts||
+|milestone_lists|counts||
+|milestones|counts||
+|pages_domains|counts||
+|pool_repositories|counts||
+|projects|counts||
+|projects_imported_from_github|counts||
+|projects_with_repositories_enabled|counts||
+|projects_with_error_tracking_enabled|counts||
+|protected_branches|counts||
+|releases|counts||
+|remote_mirrors|counts||
+|snippets|counts||
+|suggestions|counts||
+|todos|counts||
+|uploads|counts||
+|web_hooks|counts||
+|projects_alerts_active|counts||
+|projects_asana_active|counts||
+|projects_assembla_active|counts||
+|projects_bamboo_active|counts||
+|projects_bugzilla_active|counts||
+|projects_buildkite_active|counts||
+|projects_campfire_active|counts||
+|projects_custom_issue_tracker_active|counts||
+|projects_discord_active|counts||
+|projects_drone_ci_active|counts||
+|projects_emails_on_push_active|counts||
+|projects_external_wiki_active|counts||
+|projects_flowdock_active|counts||
+|projects_github_active|counts||
+|projects_hangouts_chat_active|counts||
+|projects_hipchat_active|counts||
+|projects_irker_active|counts||
+|projects_jenkins_active|counts||
+|projects_jenkins_deprecated_active|counts||
+|projects_jira_active -|counts||
+|projects_mattermost_active|counts||
+|projects_mattermost_slash_commands_active|counts||
+|projects_microsoft_teams_active|counts||
+|projects_packagist_active|counts||
+|projects_pipelines_email_active|counts||
+|projects_pivotaltracker_active|counts||
+|projects_prometheus_active|counts||
+|projects_pushover_active|counts||
+|projects_redmine_active|counts||
+|projects_slack_active|counts||
+|projects_slack_slash_commands_active|counts||
+|projects_teamcity_active|counts||
+|projects_unify_circuit_active|counts||
+|projects_youtrack_active|counts||
+|projects_slack_notifications_active|counts||
+|projects_slack_slash_active|counts||
+|projects_jira_server_active|counts||
+|projects_jira_cloud_active|counts||
+|projects_jira_dvcs_cloud_active|counts||
+|projects_jira_dvcs_server_active|counts||
+|labels|counts||
+|merge_requests|counts||
+|notes|counts||
+|wiki_pages_create|counts||
+|wiki_pages_update|counts||
+|wiki_pages_delete|counts||
+|web_ide_commits|counts||
+|web_ide_views|counts||
+|web_ide_merge_requests|counts||
+|web_ide_previews|counts||
+|snippet_comment|counts||
+|commit_comment|counts||
+|merge_request_comment|counts||
+|snippet_create|counts||
+|snippet_update|counts||
+|navbar_searches|counts||
+|cycle_analytics_views|counts||
+|productivity_analytics_views|counts||
+|source_code_pushes|counts||
+|merge_request_create|counts||
+|design_management_designs_create|counts||
+|design_management_designs_update|counts||
+|design_management_designs_delete|counts||
+|licenses_list_views|counts||
+|user_preferences_group_overview_details|counts||
+|user_preferences_group_overview_security_dashboard|counts||
+|ingress_modsecurity_blocking|counts||
+|ingress_modsecurity_disabled|counts||
+|dependency_list_usages_total|counts||
+|epics|counts||
+|feature_flags|counts||
+|geo_nodes|counts||
+|incident_issues|counts||
+|ldap_group_links|counts||
+|ldap_keys|counts||
+|ldap_users|counts||
+|pod_logs_usages_total|counts||
+|projects_enforcing_code_owner_approval|counts||
+|projects_mirrored_with_pipelines_enabled|counts||
+|projects_reporting_ci_cd_back_to_github|counts||
+|projects_with_packages|counts||
+|projects_with_prometheus_alerts|counts||
+|projects_with_tracing_enabled|counts||
+|projects_with_alerts_service_enabled|counts||
+|template_repositories|counts||
+|container_scanning_jobs|counts||
+|dependency_scanning_jobs|counts||
+|license_management_jobs|counts||
+|sast_jobs|counts||
+|epics_deepest_relationship_level|counts||
+|operations_dashboard_default_dashboard|counts||
+|operations_dashboard_users_with_projects_added|counts||
+|container_registry_enabled|||
+|dependency_proxy_enabled|||
+|gitlab_shared_runners_enabled|||
+|gravatar_enabled|||
+|influxdb_metrics_enabled|||
+|ldap_enabled|||
+|mattermost_enabled|||
+|omniauth_enabled|||
+|prometheus_metrics_enabled|||
+|reply_by_email_enabled|||
+|signup_enabled|||
+|web_ide_clientside_preview_enabled|||
+|ingress_modsecurity_enabled|||
+|elasticsearch_enabled|||
+|license_trial_ends_on|||
+|geo_enabled|||
+|version|Git||
+|version|Gitaly||
+|servers|Gitaly||
+|filesystems|Gitaly||
+|enabled|gitlab_pages||
+|version|gitlab_pages||
+|adapter|database||
+|version|database||
+|average|avg_cycle_analytics - issue||
+|sd|avg_cycle_analytics - issue||
+|missing|avg_cycle_analytics - issue||
+|average|avg_cycle_analytics - plan||
+|sd|avg_cycle_analytics - plan||
+|missing|avg_cycle_analytics - plan||
+|average|avg_cycle_analytics - code||
+|sd|avg_cycle_analytics - code||
+|missing|avg_cycle_analytics - code||
+|average|avg_cycle_analytics - test||
+|sd|avg_cycle_analytics - test||
+|missing|avg_cycle_analytics - test||
+|average|avg_cycle_analytics - review||
+|sd|avg_cycle_analytics - review||
+|missing|avg_cycle_analytics - review||
+|average|avg_cycle_analytics - staging||
+|sd|avg_cycle_analytics - staging||
+|missing|avg_cycle_analytics - staging||
+|average|avg_cycle_analytics - production||
+|sd|avg_cycle_analytics - production||
+|missing|avg_cycle_analytics - production||
+|total|avg_cycle_analytics||
+|clusters_applications_cert_managers|usage_activity_by_stage|configure|
+|clusters_applications_helm|usage_activity_by_stage|configure|
+|clusters_applications_ingress|usage_activity_by_stage|configure|
+|clusters_applications_knative|usage_activity_by_stage|configure|
+|clusters_disabled|usage_activity_by_stage|configure|
+|clusters_enabled|usage_activity_by_stage|configure|
+|clusters_platforms_gke|usage_activity_by_stage|configure|
+|clusters_platforms_eks|usage_activity_by_stage|configure|
+|clusters_platforms_user|usage_activity_by_stage|configure|
+|group_clusters_disabled|usage_activity_by_stage|configure|
+|group_clusters_enabled|usage_activity_by_stage|configure|
+|project_clusters_disabled|usage_activity_by_stage|configure|
+|project_clusters_enabled|usage_activity_by_stage|configure|
+|projects_slack_notifications_active|usage_activity_by_stage|configure|
+|projects_slack_slash_active|usage_activity_by_stage|configure|
+|projects_with_prometheus_alerts: 0|usage_activity_by_stage|configure|
+|deploy_keys|usage_activity_by_stage|create|
+|keys|usage_activity_by_stage|create|
+|merge_requests|usage_activity_by_stage|create|
+|projects_enforcing_code_owner_approval|usage_activity_by_stage|create|
+|projects_imported_from_github|usage_activity_by_stage|create|
+|projects_with_repositories_enabled|usage_activity_by_stage|create|
+|protected_branches|usage_activity_by_stage|create|
+|remote_mirrors|usage_activity_by_stage|create|
+|snippets|usage_activity_by_stage|create|
+|suggestions:|usage_activity_by_stage|create|
+|groups|usage_activity_by_stage|manage|
+|ldap_keys|usage_activity_by_stage|manage|
+|ldap_users: 0|usage_activity_by_stage|manage|
+|clusters|usage_activity_by_stage|monitor|
+|clusters_applications_prometheus|usage_activity_by_stage|monitor|
+|operations_dashboard_default_dashboard|usage_activity_by_stage|monitor|
+|operations_dashboard_users_with_projects_added|usage_activity_by_stage|monitor|
+|projects_prometheus_active|usage_activity_by_stage|monitor|
+|projects_with_error_tracking_enabled|usage_activity_by_stage|monitor|
+|projects_with_tracing_enabled: 0|usage_activity_by_stage|monitor|
+|projects_with_packages: 0|usage_activity_by_stage|package|
+|assignee_lists|usage_activity_by_stage|plan|
+|epics|usage_activity_by_stage|plan|
+|issues|usage_activity_by_stage|plan|
+|label_lists|usage_activity_by_stage|plan|
+|milestone_lists|usage_activity_by_stage|plan|
+|notes|usage_activity_by_stage|plan|
+|projects|usage_activity_by_stage|plan|
+|projects_jira_active|usage_activity_by_stage|plan|
+|projects_jira_dvcs_cloud_active|usage_activity_by_stage|plan|
+|projects_jira_dvcs_server_active|usage_activity_by_stage|plan|
+|service_desk_enabled_projects|usage_activity_by_stage|plan|
+|service_desk_issues|usage_activity_by_stage|plan|
+|todos: 0|usage_activity_by_stage|plan|
+|deployments|usage_activity_by_stage|release|
+|failed_deployments|usage_activity_by_stage|release|
+|projects_mirrored_with_pipelines_enabled|usage_activity_by_stage|release|
+|releases|usage_activity_by_stage|release|
+|successful_deployments: 0|usage_activity_by_stage|release|
+|user_preferences_group_overview_security_dashboard: 0|usage_activity_by_stage|secure|
+|ci_builds|usage_activity_by_stage|verify|
+|ci_external_pipelines|usage_activity_by_stage|verify|
+|ci_internal_pipelines|usage_activity_by_stage|verify|
+|ci_pipeline_config_auto_devops|usage_activity_by_stage|verify|
+|ci_pipeline_config_repository|usage_activity_by_stage|verify|
+|ci_pipeline_schedules|usage_activity_by_stage|verify|
+|ci_pipelines|usage_activity_by_stage|verify|
+|ci_triggers|usage_activity_by_stage|verify|
+|clusters_applications_runner|usage_activity_by_stage|verify|
+|projects_reporting_ci_cd_back_to_github: 0|usage_activity_by_stage|verify|
diff --git a/doc/user/admin_area/settings/visibility_and_access_controls.md b/doc/user/admin_area/settings/visibility_and_access_controls.md
index 8f64e9207b5..704dd89ede2 100644
--- a/doc/user/admin_area/settings/visibility_and_access_controls.md
+++ b/doc/user/admin_area/settings/visibility_and_access_controls.md
@@ -26,6 +26,8 @@ To change the default branch protection:
For more details, see [Protected branches](../../project/protected_branches.md).
+To change this setting for a specific group, see [Default branch protection for groups](../../group/index.md#changing-the-default-branch-protection-of-a-group)
+
## Default project creation protection
Project creation protection specifies which roles can create projects.
diff --git a/doc/user/analytics/img/code_review_analytics_v12_8.png b/doc/user/analytics/img/code_review_analytics_v12_8.png
index 228e03e628a..3b23e74130a 100644
--- a/doc/user/analytics/img/code_review_analytics_v12_8.png
+++ b/doc/user/analytics/img/code_review_analytics_v12_8.png
Binary files differ
diff --git a/doc/user/analytics/productivity_analytics.md b/doc/user/analytics/productivity_analytics.md
index 36dd9b5d6bf..0fa990150d7 100644
--- a/doc/user/analytics/productivity_analytics.md
+++ b/doc/user/analytics/productivity_analytics.md
@@ -8,19 +8,17 @@ For many companies, the development cycle is a blackbox and getting an estimate
long, on average, it takes to deliver features is an enormous endeavor.
While [Value Stream Analytics](../project/cycle_analytics.md) focuses on the entire
-Software Development Life Cycle (SDLC) process, Productivity Analytics provides a way for Engineering Management to drill down in a systematic way to uncover patterns and causes for success or failure at an individual, project or group level.
+Software Development Life Cycle (SDLC) process, Productivity Analytics provides a way for Engineering Management to drill down in a systematic way to uncover patterns and causes for success or failure at an individual, project, or group level.
Productivity can slow down for many reasons ranging from degrading code base to quickly growing teams. In order to investigate, department or team leaders can start by visualizing the time it takes for merge requests to be merged.
-By default, a data migration job covering three months of historical data will kick off when deploying Productivity Analytics for the first time.
-
## Supported features
Productivity Analytics allows GitLab users to:
- Visualize typical merge request (MR) lifetime and statistics. Use a histogram that shows the distribution of the time elapsed between creating and merging merge requests.
- Drill down into the most time consuming merge requests, select a number of outliers, and filter down all subsequent charts to investigate potential causes.
-- Filter by group, project, author, label, milestone, or a specific date range. Filter down, for example, to the merge requests of a specific author in a group or project during a milestone or specific date range.
+- Filter by group, project, author, label, milestone, or a specific date range. For example, filter down to the merge requests of a specific author in a group or project during a milestone or specific date range.
- Measure velocity over time. Visualize the trends of each metric from the charts above over time in order to observe progress. Zoom in on a particular date range if you notice outliers.
## Accessing metrics and visualizations
diff --git a/doc/user/analytics/value_stream_analytics.md b/doc/user/analytics/value_stream_analytics.md
index 718367dc69d..9d925b00d8b 100644
--- a/doc/user/analytics/value_stream_analytics.md
+++ b/doc/user/analytics/value_stream_analytics.md
@@ -25,12 +25,7 @@ calculates a separate median for each stage.
Value Stream Analytics is available:
-- From GitLab 12.3, at the group level in the analytics workspace (top navigation bar) at
- **Analytics > Value Stream Analytics**. **(PREMIUM)**
-
- In the future, multiple groups will be selectable which will effectively make this an
- instance-level feature.
-
+- From GitLab 12.9, at the group level via **Group > Analytics > Value Stream**. **(PREMIUM)**
- At the project level via **Project > Value Stream Analytics**.
There are seven stages that are tracked as part of the Value Stream Analytics calculations.
@@ -78,8 +73,8 @@ Each stage of Value Stream Analytics is further described in the table below.
| Plan | Measures the median time between the action you took for the previous stage, and pushing the first commit to the branch. The very first commit of the branch is the one that triggers the separation between **Plan** and **Code**, and at least one of the commits in the branch needs to contain the related issue number (e.g., `#42`). If none of the commits in the branch mention the related issue number, it is not considered to the measurement time of the stage. |
| Code | Measures the median time between pushing a first commit (previous stage) and creating a merge request (MR) related to that commit. The key to keep the process tracked is to include the [issue closing pattern](../project/issues/managing_issues.md#closing-issues-automatically) to the description of the merge request (for example, `Closes #xxx`, where `xxx` is the number of the issue related to this merge request). If the issue closing pattern is not present in the merge request description, the MR is not considered to the measurement time of the stage. |
| Test | Measures the median time to run the entire pipeline for that project. It's related to the time GitLab CI takes to run every job for the commits pushed to that merge request defined in the previous stage. It is basically the start->finish time for all pipelines. |
-| Review | Measures the median time taken to review the merge request that has closing issue pattern, between its creation and until it's merged. |
-| Staging | Measures the median time between merging the merge request with closing issue pattern until the very first deployment to production. It's tracked by the environment set to `production` or matching `production/*` (case-sensitive, `Production` won't work) in your GitLab CI configuration. If there isn't a production environment, this is not tracked. |
+| Review | Measures the median time taken to review the merge request that has a closing issue pattern, between its creation and until it's merged. |
+| Staging | Measures the median time between merging the merge request with a closing issue pattern until the very first deployment to production. It's tracked by the environment set to `production` or matching `production/*` (case-sensitive, `Production` won't work) in your GitLab CI configuration. If there isn't a production environment, this is not tracked. |
| Total | The sum of all time (medians) taken to run the entire process, from issue creation to deploying the code to production. [Previously known](https://gitlab.com/gitlab-org/gitlab/issues/38317) as **Production**. |
How this works, behind the scenes:
@@ -133,12 +128,12 @@ environments is configured.
From the above example you can conclude the time it took each stage to complete
as long as their total time:
-- **Issue**: 2h (11:00 - 09:00)
-- **Plan**: 1h (12:00 - 11:00)
-- **Code**: 2h (14:00 - 12:00)
-- **Test**: 5min
+- **Issue**: 2h (11:00 - 09:00)
+- **Plan**: 1h (12:00 - 11:00)
+- **Code**: 2h (14:00 - 12:00)
+- **Test**: 5min
- **Review**: 5h (19:00 - 14:00)
-- **Staging**: 30min (19:30 - 19:00)
+- **Staging**: 30min (19:30 - 19:00)
- **Total**: Since this stage measures the sum of median time of all
previous stages, we cannot calculate it if we don't know the status of the
stages before. In case this is the very first cycle that is run in the project,
diff --git a/doc/user/application_security/compliance_dashboard/index.md b/doc/user/application_security/compliance_dashboard/index.md
index afe3ce185e6..d9af9d66c36 100644
--- a/doc/user/application_security/compliance_dashboard/index.md
+++ b/doc/user/application_security/compliance_dashboard/index.md
@@ -1,31 +1,5 @@
---
-type: reference, howto
+redirect_to: '../../compliance/compliance_dashboard/index.md'
---
-# Compliance Dashboard **(ULTIMATE)**
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/36524) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.8.
-
-The Compliance Dashboard gives you the ability to see a group's Merge Request activity
-by providing a high-level view for all projects in the group. For example, code approved
-for merging into production.
-
-## Overview
-
-To access the Compliance Dashboard for a group, navigate to **{shield}** **Security & Compliance > Compliance** on the group's menu.
-
-![Compliance Dashboard](img/compliance_dashboard_v12_8.png)
-
-## Use cases
-
-This feature is for people who care about the compliance status of projects within their group.
-
-You can use the dashboard to:
-
-- Get an overview of the latest Merge Request for each project.
-- See if Merge Requests were approved and by whom.
-
-## Permissions
-
-- On [GitLab Ultimate](https://about.gitlab.com/pricing/) tier.
-- By **Administrators** and **Group Owners**.
+This document was moved to [another location](../../compliance/compliance_dashboard/index.md).
diff --git a/doc/user/application_security/configuration/index.md b/doc/user/application_security/configuration/index.md
index 29137c9b50c..131247910ab 100644
--- a/doc/user/application_security/configuration/index.md
+++ b/doc/user/application_security/configuration/index.md
@@ -11,9 +11,9 @@ type: reference, howto
The security configuration page displays the configuration state of each of the security
features and can be accessed through a project's sidebar nav.
-![Screenshot of security configuration page](../img/security_configuration_page_v12_6.png)
+![Screenshot of security configuration page](../img/security_configuration_page_v12_9.png)
-The page uses the project's latest default branch [CI pipeline](../../../ci/pipelines.md) to determine the configuration
+The page uses the project's latest default branch [CI pipeline](../../../ci/pipelines/index.md) to determine the configuration
state of each feature. If a job with the expected security report artifact exists in the pipeline,
the feature is considered configured.
diff --git a/doc/user/application_security/container_scanning/img/container_scanning.png b/doc/user/application_security/container_scanning/img/container_scanning.png
deleted file mode 100644
index e47f62acd9d..00000000000
--- a/doc/user/application_security/container_scanning/img/container_scanning.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/application_security/container_scanning/img/container_scanning_v12_9.png b/doc/user/application_security/container_scanning/img/container_scanning_v12_9.png
new file mode 100644
index 00000000000..dd96fc7aacb
--- /dev/null
+++ b/doc/user/application_security/container_scanning/img/container_scanning_v12_9.png
Binary files differ
diff --git a/doc/user/application_security/container_scanning/index.md b/doc/user/application_security/container_scanning/index.md
index ff15b299cea..075536ce9ad 100644
--- a/doc/user/application_security/container_scanning/index.md
+++ b/doc/user/application_security/container_scanning/index.md
@@ -23,7 +23,7 @@ GitLab checks the Container Scanning report, compares the found vulnerabilities
between the source and target branches, and shows the information right on the
merge request.
-![Container Scanning Widget](img/container_scanning.png)
+![Container Scanning Widget](img/container_scanning_v12_9.png)
## Use cases
@@ -35,6 +35,10 @@ Having an extra job in your pipeline that checks for those vulnerabilities,
and the fact that they are displayed inside a merge request, makes it very easy
to perform audits for your Docker-based apps.
+[//]: # "NOTE: The container scanning tool references the following heading in the code, so if you"
+[//]: # " make a change to this heading, make sure to update the documentation URLs used in the"
+[//]: # " container scanning tool (https://gitlab.com/gitlab-org/security-products/analyzers/klar)"
+
## Requirements
To enable Container Scanning in your pipeline, you need:
@@ -65,7 +69,7 @@ To enable Container Scanning in your pipeline, you need:
services:
- docker:19.03.1-dind
variables:
- IMAGE_TAG: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_REF_SHA
+ IMAGE_TAG: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA
script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build -t $IMAGE_TAG .
@@ -103,24 +107,20 @@ artifact available. Behind the scenes, the
[GitLab Klar analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/klar/)
is used and runs the scans.
-## Example
-
-The following is a sample `.gitlab-ci.yml` that will build your Docker Image, push it to the container registry and run Container Scanning.
+The following is a sample `.gitlab-ci.yml` that will build your Docker image,
+push it to the Container Registry, and run Container Scanning:
```yaml
variables:
DOCKER_DRIVER: overlay2
services:
- - docker:stable-dind
+ - docker:19.03.5-dind
stages:
- build
- test
-include:
- - template: Container-Scanning.gitlab-ci.yml
-
build:
image: docker:stable
stage: build
@@ -131,88 +131,87 @@ build:
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
- docker build -t $IMAGE .
- docker push $IMAGE
-```
-## Vulnerability Whitelisting
-
-If you want to whitelist specific vulnerabilities, you'll need to:
+include:
+ - template: Container-Scanning.gitlab-ci.yml
+```
- 1. Set [`GIT_STRATEGY: fetch`](../../../ci/yaml/README.md#git-strategy) in your `.gitlab-ci.yml` file by following the instructions described in the
- [overriding the Container Scanning template](#overriding-the-container-scanning-template) section of this document.
- 1. Define the whitelisted vulnerabilities in a YAML file named `clair-whitelist.yml` which must use the format described
- in the [following whitelist example file](https://github.com/arminc/clair-scanner/blob/v12/example-whitelist.yaml).
- 1. Add the `clair-whitelist.yml` file to the Git repository of your project
+### Customizing the Container Scanning settings
-### Overriding the Container Scanning template
+You can change container scanning settings by using the [`variables`](../../../ci/yaml/README.md#variables)
+parameter in your `.gitlab-ci.yml` to change [environment variables](#available-variables).
-If you want to override the job definition (for example, change properties like
-`variables`), you need to declare a `container_scanning` job after the
-template inclusion and specify any additional keys under it. For example:
+In the following example, we [include](../../../ci/yaml/README.md#include) the template and also
+set the `CLAIR_OUTPUT` variable to `High`:
```yaml
include:
- - template: Container-Scanning.gitlab-ci.yml
+ template: Container-Scanning.gitlab-ci.yml
-container_scanning:
- variables:
- GIT_STRATEGY: fetch
+variables:
+ CLAIR_OUTPUT: High
```
-### Available variables
+The `CLAIR_OUTPUT` variable defined in the main `gitlab-ci.yml` will overwrite what's
+defined in `Container-Scanning.gitlab-ci.yml`, changing the Container Scanning behavior.
-Container Scanning can be [configured](#overriding-the-container-scanning-template)
-using environment variables.
+[//]: # "NOTE: The container scanning tool references the following heading in the code, so if you"
+[//]: # " make a change to this heading, make sure to update the documentation URLs used in the"
+[//]: # " container scanning tool (https://gitlab.com/gitlab-org/security-products/analyzers/klar)"
-| Environment Variable | Description | Default |
-| ------ | ------ | ------ |
-| `KLAR_TRACE` | Set to true to enable more verbose output from klar. | `"false"` |
-| `DOCKER_USER` | Username for accessing a Docker registry requiring authentication. | `$CI_REGISTRY_USER` |
-| `DOCKER_PASSWORD` | Password for accessing a Docker registry requiring authentication. | `$CI_REGISTRY_PASSWORD` |
-| `CLAIR_OUTPUT` | Severity level threshold. Vulnerabilities with severity level higher than or equal to this threshold will be outputted. Supported levels are `Unknown`, `Negligible`, `Low`, `Medium`, `High`, `Critical` and `Defcon1`. | `Unknown` |
-| `REGISTRY_INSECURE` | Allow [Klar](https://github.com/optiopay/klar) to access insecure registries (HTTP only). Should only be set to `true` when testing the image locally. | `"false"` |
-| `CLAIR_VULNERABILITIES_DB_URL` | This variable is explicitly set in the [services section](https://gitlab.com/gitlab-org/gitlab/blob/30522ca8b901223ac8c32b633d8d67f340b159c1/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml#L17-19) of the `Container-Scanning.gitlab-ci.yml` file and defaults to `clair-vulnerabilities-db`. This value represents the address that the [postgres server hosting the vulnerabilities definitions](https://hub.docker.com/r/arminc/clair-db) is running on and **shouldn't be changed** unless you're running the image locally as described in the [Running the scanning tool](https://gitlab.com/gitlab-org/security-products/analyzers/klar/#running-the-scanning-tool) section of the [GitLab klar analyzer readme](https://gitlab.com/gitlab-org/security-products/analyzers/klar). | `clair-vulnerabilities-db` |
-| `CI_APPLICATION_REPOSITORY` | Docker repository URL for the image to be scanned. | `$CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG` |
-| `CI_APPLICATION_TAG` | Docker respository tag for the image to be scanned. | `$CI_COMMIT_SHA` |
-| `CLAIR_DB_IMAGE` | The Docker image name and tag for the [Postgres server hosting the vulnerabilities definitions](https://hub.docker.com/r/arminc/clair-db). It can be useful to override this value with a specific version, for example, to provide a consistent set of vulnerabilities for integration testing purposes, or to refer to a locally hosted vulnerabilities database for an on-premise air-gapped installation. | `arminc/clair-db:latest` |
-| `CLAIR_DB_IMAGE_TAG` | (**DEPRECATED - use `CLAIR_DB_IMAGE` instead**) The Docker image tag for the [Postgres server hosting the vulnerabilities definitions](https://hub.docker.com/r/arminc/clair-db). It can be useful to override this value with a specific version, for example, to provide a consistent set of vulnerabilities for integration testing purposes. | `latest` |
-| `DOCKERFILE_PATH` | The path to the `Dockerfile` to be used for generating remediations. By default, the scanner will look for a file named `Dockerfile` in the root directory of the project, so this variable should only be configured if your `Dockerfile` is in a non-standard location, such as a subdirectory. See [Solutions for vulnerabilities](#solutions-for-vulnerabilities-auto-remediation) for more details. | `Dockerfile` |
+#### Available variables
-## Security Dashboard
+Container Scanning can be [configured](#customizing-the-container-scanning-settings)
+using environment variables.
-The Security Dashboard is a good place to get an overview of all the security
-vulnerabilities in your groups, projects and pipelines. Read more about the
-[Security Dashboard](../security_dashboard/index.md).
+| Environment Variable | Description | Default |
+| ------ | ------ | ------ |
+| `KLAR_TRACE` | Set to true to enable more verbose output from klar. | `"false"` |
+| `DOCKER_USER` | Username for accessing a Docker registry requiring authentication. | `$CI_REGISTRY_USER` |
+| `DOCKER_PASSWORD` | Password for accessing a Docker registry requiring authentication. | `$CI_REGISTRY_PASSWORD` |
+| `CLAIR_OUTPUT` | Severity level threshold. Vulnerabilities with severity level higher than or equal to this threshold will be outputted. Supported levels are `Unknown`, `Negligible`, `Low`, `Medium`, `High`, `Critical` and `Defcon1`. | `Unknown` |
+| `REGISTRY_INSECURE` | Allow [Klar](https://github.com/optiopay/klar) to access insecure registries (HTTP only). Should only be set to `true` when testing the image locally. | `"false"` |
+| `DOCKER_INSECURE` | Allow [Klar](https://github.com/optiopay/klar) to access secure Docker registries using HTTPS with bad (or self-signed) SSL certificates. | `"false"` |
+| `CLAIR_VULNERABILITIES_DB_URL` | (**DEPRECATED - use `CLAIR_DB_CONNECTION_STRING` instead**) This variable is explicitly set in the [services section](https://gitlab.com/gitlab-org/gitlab/-/blob/898c5da43504eba87b749625da50098d345b60d6/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml#L23) of the `Container-Scanning.gitlab-ci.yml` file and defaults to `clair-vulnerabilities-db`. This value represents the address that the [Postgres server hosting the vulnerabilities definitions](https://hub.docker.com/r/arminc/clair-db) is running on and **shouldn't be changed** unless you're running the image locally as described in the [Running the standalone Container Scanning Tool](#running-the-standalone-container-scanning-tool) section. | `clair-vulnerabilities-db` |
+| `CLAIR_DB_CONNECTION_STRING` | This variable represents the [connection string](https://www.postgresql.org/docs/9.3/libpq-connect.html#AEN39692) to the [Postgres server hosting the vulnerabilities definitions](https://hub.docker.com/r/arminc/clair-db) database and **shouldn't be changed** unless you're running the image locally as described in the [Running the standalone Container Scanning Tool](#running-the-standalone-container-scanning-tool) section. The host value for the connection string must match the [alias](https://gitlab.com/gitlab-org/gitlab/-/blob/898c5da43504eba87b749625da50098d345b60d6/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml#L23) value of the `Container-Scanning.gitlab-ci.yml` template file, which defaults to `clair-vulnerabilities-db`. | `postgresql://postgres:password@clair-vulnerabilities-db:5432/postgres?sslmode=disable&statement_timeout=60000` |
+| `CI_APPLICATION_REPOSITORY` | Docker repository URL for the image to be scanned. | `$CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG` |
+| `CI_APPLICATION_TAG` | Docker respository tag for the image to be scanned. | `$CI_COMMIT_SHA` |
+| `CLAIR_DB_IMAGE` | The Docker image name and tag for the [Postgres server hosting the vulnerabilities definitions](https://hub.docker.com/r/arminc/clair-db). It can be useful to override this value with a specific version, for example, to provide a consistent set of vulnerabilities for integration testing purposes, or to refer to a locally hosted vulnerabilities database for an on-premise air-gapped installation. | `arminc/clair-db:latest` |
+| `CLAIR_DB_IMAGE_TAG` | (**DEPRECATED - use `CLAIR_DB_IMAGE` instead**) The Docker image tag for the [Postgres server hosting the vulnerabilities definitions](https://hub.docker.com/r/arminc/clair-db). It can be useful to override this value with a specific version, for example, to provide a consistent set of vulnerabilities for integration testing purposes. | `latest` |
+| `DOCKERFILE_PATH` | The path to the `Dockerfile` to be used for generating remediations. By default, the scanner will look for a file named `Dockerfile` in the root directory of the project, so this variable should only be configured if your `Dockerfile` is in a non-standard location, such as a subdirectory. See [Solutions for vulnerabilities](#solutions-for-vulnerabilities-auto-remediation) for more details. | `Dockerfile` |
-## Interacting with the vulnerabilities
+### Overriding the Container Scanning template
-Once a vulnerability is found, you can interact with it. Read more on how to
-[interact with the vulnerabilities](../index.md#interacting-with-the-vulnerabilities).
+If you want to override the job definition (for example, change properties like
+`variables`), you need to declare a `container_scanning` job after the
+template inclusion and specify any additional keys under it. For example:
-## Solutions for vulnerabilities (auto-remediation)
+```yaml
+include:
+ template: Container-Scanning.gitlab-ci.yml
-Some vulnerabilities can be fixed by applying the solution that GitLab
-automatically generates.
+container_scanning:
+ variables:
+ GIT_STRATEGY: fetch
+```
-To enable remediation support, the scanning tool _must_ have access to the `Dockerfile` specified by
-the `DOCKERFILE_PATH` environment variable. To ensure that the scanning tool has access to this
-file, it's necessary to set [`GIT_STRATEGY: fetch`](../../../ci/yaml/README.md#git-strategy) in
-your `.gitlab-ci.yml` file by following the instructions described in this document's
-[overriding the Container Scanning template](#overriding-the-container-scanning-template) section.
+### Vulnerability whitelisting
-Read more about the [solutions for vulnerabilities](../index.md#solutions-for-vulnerabilities-auto-remediation).
-
-## Vulnerabilities database update
+If you want to whitelist specific vulnerabilities, you'll need to:
-For more information about the vulnerabilities database update, check the
-[maintenance table](../index.md#maintenance-and-update-of-the-vulnerabilities-database).
+1. Set `GIT_STRATEGY: fetch` in your `.gitlab-ci.yml` file by following the instructions described in the
+ [overriding the Container Scanning template](#overriding-the-container-scanning-template) section of this document.
+1. Define the whitelisted vulnerabilities in a YAML file named `clair-whitelist.yml` which must use the format described
+ in the [whitelist example file](https://github.com/arminc/clair-scanner/blob/v12/example-whitelist.yaml).
+1. Add the `clair-whitelist.yml` file to the Git repository of your project.
-## Running Container Scanning in an offline air-gapped installation
+### Running Container Scanning in an offline, air-gapped installation
Container Scanning can be executed on an offline air-gapped GitLab Ultimate installation using the following process:
1. Host the following Docker images on a [local Docker container registry](../../packages/container_registry/index.md):
- [arminc/clair-db vulnerabilities database](https://hub.docker.com/r/arminc/clair-db)
- - [GitLab klar analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/klar)
+ - GitLab klar analyzer: `registry.gitlab.com/gitlab-org/security-products/analyzers/klar`
1. [Override the container scanning template](#overriding-the-container-scanning-template) in your `.gitlab-ci.yml` file to refer to the Docker images hosted on your local Docker container registry:
```yaml
@@ -225,13 +224,17 @@ Container Scanning can be executed on an offline air-gapped GitLab Ultimate inst
CLAIR_DB_IMAGE: $CI_REGISTRY/namespace/clair-vulnerabilities-db
```
-It may be worthwhile to set up a [scheduled pipeline](../../project/pipelines/schedules.md) to automatically build a new version of the vulnerabilities database on a preset schedule. You can use the following `.gitlab-yml.ci` as a template:
+1. If your local Docker container registry is running securely over `HTTPS`, but you're using a
+ self-signed certificate, then you must set `DOCKER_INSECURE: true` in the above
+ `container_scanning` section of your `.gitlab-ci.yml`.
+
+It may be worthwhile to set up a [scheduled pipeline](../../../ci/pipelines/schedules.md) to automatically build a new version of the vulnerabilities database on a preset schedule. You can use the following `.gitlab-yml.ci` as a template:
```yaml
image: docker:stable
services:
- - docker:stable-dind
+ - docker:19.03.5-dind
stages:
- build
@@ -247,6 +250,40 @@ build_latest_vulnerabilities:
The above template will work for a GitLab Docker registry running on a local installation, however, if you're using a non-GitLab Docker registry, you'll need to change the `$CI_REGISTRY` value and the `docker login` credentials to match the details of your local registry.
+## Running the standalone Container Scanning Tool
+
+It's possible to run the [GitLab Container Scanning Tool](https://gitlab.com/gitlab-org/security-products/analyzers/klar)
+against a Docker container without needing to run it within the context of a CI job. To scan an
+image directly, follow these steps:
+
+1. Run [Docker Desktop](https://www.docker.com/products/docker-desktop) or [Docker Machine](https://github.com/docker/machine).
+1. Run the latest [prefilled vulnerabilities database](https://cloud.docker.com/repository/docker/arminc/clair-db) Docker image:
+
+ ```shell
+ docker run -p 5432:5432 -d --name clair-db arminc/clair-db:latest
+ ```
+
+1. Configure an environment variable to point to your local machine's IP address (or insert your IP address instead of the `LOCAL_MACHINE_IP_ADDRESS` variable in the `CLAIR_DB_CONNECTION_STRING` in the next step):
+
+ ```shell
+ export LOCAL_MACHINE_IP_ADDRESS=your.local.ip.address
+ ```
+
+1. Run the analyzer's Docker image, passing the image and tag you want to analyze in the `CI_APPLICATION_REPOSITORY` and `CI_APPLICATION_TAG` environment variables:
+
+ ```shell
+ docker run \
+ --interactive --rm \
+ --volume "$PWD":/tmp/app \
+ -e CI_PROJECT_DIR=/tmp/app \
+ -e CLAIR_DB_CONNECTION_STRING="postgresql://postgres:password@${LOCAL_MACHINE_IP_ADDRESS}:5432/postgres?sslmode=disable&statement_timeout=60000" \
+ -e CI_APPLICATION_REPOSITORY=registry.gitlab.com/gitlab-org/security-products/dast/webgoat-8.0@sha256 \
+ -e CI_APPLICATION_TAG=bc09fe2e0721dfaeee79364115aeedf2174cce0947b9ae5fe7c33312ee019a4e \
+ registry.gitlab.com/gitlab-org/security-products/analyzers/klar
+ ```
+
+The results are stored in `gl-container-scanning-report.json`.
+
## Reports JSON format
CAUTION: **Caution:**
@@ -310,6 +347,9 @@ it highlighted:
}
```
+CAUTION: **Deprecation:**
+Beginning with GitLab 12.9, container scanning no longer reports `undefined` severity and confidence levels.
+
Here is the description of the report file structure nodes and their meaning. All fields are mandatory to be present in
the report JSON unless stated otherwise. Presence of optional fields depends on the underlying analyzers being used.
@@ -317,7 +357,7 @@ the report JSON unless stated otherwise. Presence of optional fields depends on
|------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `version` | Report syntax version used to generate this JSON. |
| `vulnerabilities` | Array of vulnerability objects. |
-| `vulnerabilities[].category` | Where this vulnerability belongs (SAST, Container Scanning etc.). For Container Scanning, it will always be `container_scanning`. |
+| `vulnerabilities[].category` | Where this vulnerability belongs (for example, SAST or Container Scanning). For Container Scanning, it will always be `container_scanning`. |
| `vulnerabilities[].message` | A short text that describes the vulnerability, it may include occurrence's specific information. Optional. |
| `vulnerabilities[].description` | A long text that describes the vulnerability. Optional. |
| `vulnerabilities[].cve` | A fingerprint string value that represents a concrete occurrence of the vulnerability. It's used to determine whether two vulnerability occurrences are same or different. May not be 100% accurate. **This is NOT a [CVE](https://cve.mitre.org/)**. |
@@ -348,12 +388,39 @@ the report JSON unless stated otherwise. Presence of optional fields depends on
| `remediations[].summary` | Overview of how the vulnerabilities have been fixed. |
| `remediations[].diff` | base64-encoded remediation code diff, compatible with [`git apply`](https://git-scm.com/docs/git-format-patch#_discussion). |
+## Security Dashboard
+
+The [Security Dashboard](../security_dashboard/index.md) shows you an overview of all
+the security vulnerabilities in your groups, projects and pipelines.
+
+## Vulnerabilities database update
+
+For more information about the vulnerabilities database update, check the
+[maintenance table](../index.md#maintenance-and-update-of-the-vulnerabilities-database).
+
+## Interacting with the vulnerabilities
+
+Once a vulnerability is found, you can [interact with it](../index.md#interacting-with-the-vulnerabilities).
+
+## Solutions for vulnerabilities (auto-remediation)
+
+Some vulnerabilities can be fixed by applying the solution that GitLab
+automatically generates.
+
+To enable remediation support, the scanning tool _must_ have access to the `Dockerfile` specified by
+the `DOCKERFILE_PATH` environment variable. To ensure that the scanning tool has access to this
+file, it's necessary to set [`GIT_STRATEGY: fetch`](../../../ci/yaml/README.md#git-strategy) in
+your `.gitlab-ci.yml` file by following the instructions described in this document's
+[overriding the Container Scanning template](#overriding-the-container-scanning-template) section.
+
+Read more about the [solutions for vulnerabilities](../index.md#solutions-for-vulnerabilities-auto-remediation).
+
## Troubleshooting
### docker: Error response from daemon: failed to copy xattrs
When the GitLab Runner uses the Docker executor and NFS is used
-(e.g., `/var/lib/docker` is on an NFS mount), Container Scanning might fail with
+(for example, `/var/lib/docker` is on an NFS mount), Container Scanning might fail with
an error like the following:
```text
diff --git a/doc/user/application_security/dast/img/dast_all.png b/doc/user/application_security/dast/img/dast_all.png
deleted file mode 100644
index b6edc928dc3..00000000000
--- a/doc/user/application_security/dast/img/dast_all.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/application_security/dast/img/dast_all_v12_9.png b/doc/user/application_security/dast/img/dast_all_v12_9.png
new file mode 100644
index 00000000000..9871d1e6a43
--- /dev/null
+++ b/doc/user/application_security/dast/img/dast_all_v12_9.png
Binary files differ
diff --git a/doc/user/application_security/dast/img/dast_single.png b/doc/user/application_security/dast/img/dast_single.png
deleted file mode 100644
index 26ca4bde786..00000000000
--- a/doc/user/application_security/dast/img/dast_single.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/application_security/dast/img/dast_single_v12_9.png b/doc/user/application_security/dast/img/dast_single_v12_9.png
new file mode 100644
index 00000000000..a8a4b1c1d4f
--- /dev/null
+++ b/doc/user/application_security/dast/img/dast_single_v12_9.png
Binary files differ
diff --git a/doc/user/application_security/dast/index.md b/doc/user/application_security/dast/index.md
index c96f0f8b0d3..c82ba04b697 100644
--- a/doc/user/application_security/dast/index.md
+++ b/doc/user/application_security/dast/index.md
@@ -31,12 +31,16 @@ that is provided by [Auto DevOps](../../../topics/autodevops/index.md).
GitLab checks the DAST report, compares the found vulnerabilities between the source and target
branches, and shows the information right on the merge request.
-![DAST Widget](img/dast_all.png)
+NOTE: **Note:**
+This comparison logic uses only the latest pipeline executed for the target branch's base commit.
+Running the pipeline on any other commit has no effect on the merge request.
+
+![DAST Widget](img/dast_all_v12_9.png)
By clicking on one of the detected linked vulnerabilities, you will be able to
see the details and the URL(s) affected.
-![DAST Widget Clicked](img/dast_single.png)
+![DAST Widget Clicked](img/dast_single_v12_9.png)
[Dynamic Application Security Testing (DAST)](https://en.wikipedia.org/wiki/Dynamic_Application_Security_Testing)
is using the popular open source tool [OWASP ZAProxy](https://github.com/zaproxy/zaproxy)
@@ -307,6 +311,7 @@ DAST can be [configured](#customizing-the-dast-settings) using environment varia
| `DAST_TARGET_AVAILABILITY_TIMEOUT` | no | Time limit in seconds to wait for target availability. Scan is attempted nevertheless if it runs out. Integer. Defaults to `60`. |
| `DAST_FULL_SCAN_ENABLED` | no | Switches the tool to execute [ZAP Full Scan](https://github.com/zaproxy/zaproxy/wiki/ZAP-Full-Scan) instead of [ZAP Baseline Scan](https://github.com/zaproxy/zaproxy/wiki/ZAP-Baseline-Scan). Boolean. `true`, `True`, or `1` are considered as true value, otherwise false. Defaults to `false`. |
| `DAST_FULL_SCAN_DOMAIN_VALIDATION_REQUIRED` | no | Requires [domain validation](#domain-validation) when running DAST full scans. Boolean. `true`, `True`, or `1` are considered as true value, otherwise false. Defaults to `false`. |
+| `DAST_AUTO_UPDATE_ADDONS` | no | Set to `false` to pin the versions of ZAProxy add-ons to those provided with the DAST image. Defaults to `true`. |
### DAST command-line options
@@ -350,6 +355,36 @@ dast:
- /analyze -z"-config replacer.full_list\(0\).description=auth -config replacer.full_list\(0\).enabled=true -config replacer.full_list\(0\).matchtype=REQ_HEADER -config replacer.full_list\(0\).matchstr=Authorization -config replacer.full_list\(0\).regex=false -config replacer.full_list\(0\).replacement=TOKEN" -t $DAST_WEBSITE
```
+### Cloning the project's repository
+
+The DAST job does not require the project's repository to be present when running, so by default
+[`GIT_STRATEGY`](../../../ci/yaml/README.md#git-strategy) is set to `none`.
+
+## Running DAST in an offline air-gapped installation
+
+DAST can be executed on an offline air-gapped GitLab Ultimate installation using the following process:
+
+1. Host the DAST image `registry.gitlab.com/gitlab-org/security-products/dast:latest` in your local
+ Docker container registry.
+1. Add the following configuration to your `.gitlab-ci.yml` file. You must replace `image` to refer
+ to the DAST Docker image hosted on your local Docker container registry:
+
+ ```yaml
+ include:
+ - template: DAST.gitlab-ci.yml
+
+ dast:
+ image: registry.example.com/namespace/dast:latest
+ script:
+ - export DAST_WEBSITE=${DAST_WEBSITE:-$(cat environment_url.txt)}
+ - /analyze -t $DAST_WEBSITE --auto-update-addons false -z"-silent"
+ ```
+
+The option `--auto-update-addons false` instructs ZAP not to update add-ons.
+
+The option `-z` passes the quoted `-silent` parameter to ZAP. The `-silent` parameter ensures ZAP
+does not make any unsolicited requests including checking for updates.
+
## Reports
The DAST job can emit various reports.
diff --git a/doc/user/application_security/dependency_list/index.md b/doc/user/application_security/dependency_list/index.md
index 992f4137bb8..b9c3b6521d6 100644
--- a/doc/user/application_security/dependency_list/index.md
+++ b/doc/user/application_security/dependency_list/index.md
@@ -48,8 +48,8 @@ vulnerability will then be displayed below it.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/10536) in GitLab Ultimate 12.3.
-If the [License Compliance](../license_compliance/index.md) CI job is configured,
-the [discovered licenses](../license_compliance/index.md#supported-languages-and-package-managers) will be displayed on this page.
+If the [License Compliance](../../compliance/license_compliance/index.md) CI job is configured,
+the [discovered licenses](../../compliance/license_compliance/index.md#supported-languages-and-package-managers) will be displayed on this page.
## Downloading the Dependency List
diff --git a/doc/user/application_security/dependency_scanning/index.md b/doc/user/application_security/dependency_scanning/index.md
index 07b5da1fd93..1a0a7a7711f 100644
--- a/doc/user/application_security/dependency_scanning/index.md
+++ b/doc/user/application_security/dependency_scanning/index.md
@@ -14,6 +14,7 @@ application is using an external (open source) library which is known to be vuln
If you are using [GitLab CI/CD](../../../ci/README.md), you can analyze your dependencies for known
vulnerabilities using Dependency Scanning.
+All dependencies are scanned, including the transitive dependencies (also known as nested dependencies).
You can take advantage of Dependency Scanning by either [including the CI job](#configuration)
in your existing `.gitlab-ci.yml` file or by implicitly using
@@ -128,31 +129,44 @@ dependency_scanning:
Dependency Scanning can be [configured](#customizing-the-dependency-scanning-settings)
using environment variables.
-| Environment variable | Description |
-| --------------------------------------- | ----------- |
-| `DS_ANALYZER_IMAGES` | Comma separated list of custom images. The official default images are still enabled. Read more about [customizing analyzers](analyzers.md). |
-| `DS_ANALYZER_IMAGE_PREFIX` | Override the name of the Docker registry providing the official default images (proxy). Read more about [customizing analyzers](analyzers.md). |
-| `DS_ANALYZER_IMAGE_TAG` | Override the Docker tag of the official default images. Read more about [customizing analyzers](analyzers.md). |
-| `DS_PYTHON_VERSION` | Version of Python. If set to 2, dependencies are installed using Python 2.7 instead of Python 3.6. ([Introduced](https://gitlab.com/gitlab-org/gitlab/issues/12296) in GitLab 12.1)|
-| `DS_PIP_VERSION` | Force the install of a specific pip version (example: `"19.3"`), otherwise the pip installed in the Docker image is used. ([Introduced](https://gitlab.com/gitlab-org/gitlab/issues/12811) in GitLab 12.7) |
-| `DS_PIP_DEPENDENCY_PATH` | Path to load Python pip dependencies from. ([Introduced](https://gitlab.com/gitlab-org/gitlab/issues/12412) in GitLab 12.2) |
-| `GEMNASIUM_DB_LOCAL_PATH` | Path to local gemnasium database (default `/gemnasium-db`).
-| `GEMNASIUM_DB_REMOTE_URL` | Repository URL for fetching the gemnasium database (default `https://gitlab.com/gitlab-org/security-products/gemnasium-db.git`).
-| `GEMNASIUM_DB_REF_NAME` | Branch name for remote repository database (default `master`). `GEMNASIUM_DB_REMOTE_URL` is required.
-| `DS_DEFAULT_ANALYZERS` | Override the names of the official default images. Read more about [customizing analyzers](analyzers.md). |
-| `DS_DISABLE_DIND` | Disable Docker in Docker and run analyzers [individually](#disabling-docker-in-docker-for-dependency-scanning).|
-| `DS_PULL_ANALYZER_IMAGES` | Pull the images from the Docker registry (set to `0` to disable). |
-| `DS_EXCLUDED_PATHS` | Exclude vulnerabilities from output based on the paths. A comma-separated list of patterns. Patterns can be globs, file or folder paths (e.g., `doc,spec`). Parent directories will also match patterns. |
-| `DS_DOCKER_CLIENT_NEGOTIATION_TIMEOUT` | Time limit for Docker client negotiation. Timeouts are parsed using Go's [`ParseDuration`](https://golang.org/pkg/time/#ParseDuration). Valid time units are `ns`, `us` (or `µs`), `ms`, `s`, `m`, `h`. For example, `300ms`, `1.5h`, or `2h45m`. |
-| `DS_PULL_ANALYZER_IMAGE_TIMEOUT` | Time limit when pulling the image of an analyzer. Timeouts are parsed using Go's [`ParseDuration`](https://golang.org/pkg/time/#ParseDuration). Valid time units are `ns`, `us` (or `µs`), `ms`, `s`, `m`, `h`. For example, `300ms`, `1.5h`, or `2h45m`. |
-| `DS_RUN_ANALYZER_TIMEOUT` | Time limit when running an analyzer. Timeouts are parsed using Go's [`ParseDuration`](https://golang.org/pkg/time/#ParseDuration). Valid time units are `ns`, `us` (or `µs`), `ms`, `s`, `m`, `h`. For example, `300ms`, `1.5h`, or `2h45m`. |
-| `PIP_INDEX_URL` | Base URL of Python Package Index (default `https://pypi.org/simple`). |
-| `PIP_EXTRA_INDEX_URL` | Array of [extra URLs](https://pip.pypa.io/en/stable/reference/pip_install/#cmdoption-extra-index-url) of package indexes to use in addition to `PIP_INDEX_URL`. Comma separated. |
-| `PIP_REQUIREMENTS_FILE` | Pip requirements file to be scanned. |
-| `MAVEN_CLI_OPTS` | List of command line arguments that will be passed to `maven` by the analyzer. The default is `"-DskipTests --batch-mode"`. See an example for [using private repos](#using-private-maven-repos). |
-| `BUNDLER_AUDIT_UPDATE_DISABLED` | Disable automatic updates for the `bundler-audit` analyzer (default: `"false"`). Useful if you're running Dependency Scanning in an offline, air-gapped environment.|
-| `BUNDLER_AUDIT_ADVISORY_DB_URL` | URL of the advisory database used by bundler-audit (default: `https://github.com/rubysec/ruby-advisory-db`). |
-| `BUNDLER_AUDIT_ADVISORY_DB_REF_NAME` | Git ref for the advisory database specified by `BUNDLER_AUDIT_ADVISORY_DB_URL` (default: `master`). |
+#### Configuring Dependency Scanning
+
+The following variables allow configuration of global dependency scanning settings.
+
+| Environment variable | Default | Description |
+| --------------------------------------- | ----------- | ----------- |
+| `DS_ANALYZER_IMAGES` | | Comma separated list of custom images. The official default images are still enabled. Read more about [customizing analyzers](analyzers.md). |
+| `DS_ANALYZER_IMAGE_PREFIX` | | Override the name of the Docker registry providing the official default images (proxy). Read more about [customizing analyzers](analyzers.md). |
+| `DS_ANALYZER_IMAGE_TAG` | | Override the Docker tag of the official default images. Read more about [customizing analyzers](analyzers.md). |
+| `DS_DEFAULT_ANALYZERS` | | Override the names of the official default images. Read more about [customizing analyzers](analyzers.md). |
+| `DS_DISABLE_DIND` | | Disable Docker in Docker and run analyzers [individually](#disabling-docker-in-docker-for-dependency-scanning).|
+| `DS_PULL_ANALYZER_IMAGES` | | Pull the images from the Docker registry (set to `0` to disable). |
+| `DS_EXCLUDED_PATHS` | | Exclude vulnerabilities from output based on the paths. A comma-separated list of patterns. Patterns can be globs, file or folder paths (for example, `doc,spec`). Parent directories will also match patterns. |
+| `DS_DOCKER_CLIENT_NEGOTIATION_TIMEOUT` | 2m | Time limit for Docker client negotiation. Timeouts are parsed using Go's [`ParseDuration`](https://golang.org/pkg/time/#ParseDuration). Valid time units are `ns`, `us` (or `µs`), `ms`, `s`, `m`, or `h`. For example, `300ms`, `1.5h`, or `2h45m`. |
+| `DS_PULL_ANALYZER_IMAGE_TIMEOUT` | 5m | Time limit when pulling an analyzer's image. Timeouts are parsed using Go's [`ParseDuration`](https://golang.org/pkg/time/#ParseDuration). Valid time units are `ns`, `us` (or `µs`), `ms`, `s`, `m`, or `h`. For example, `300ms`, `1.5h`, or `2h45m`. |
+| `DS_RUN_ANALYZER_TIMEOUT` | 20m | Time limit when running an analyzer. Timeouts are parsed using Go's [`ParseDuration`](https://golang.org/pkg/time/#ParseDuration). Valid time units are `ns`, `us` (or `µs`), `ms`, `s`, `m`, or `h`. For example, `300ms`, `1.5h`, or `2h45m`. |
+
+#### Configuring specific analyzers used by Dependency Scanning
+
+The following variables are used for configuring specific analyzers (used for a specific language/framework).
+
+| Environment variable | Analyzer | Default | Description |
+| --------------------------------------- | ------------------ | ---------------------------- |------------ |
+| `GEMNASIUM_DB_LOCAL_PATH` | `gemnasium` | `/gemnasium-db` | Path to local gemnasium database. |
+| `GEMNASIUM_DB_REMOTE_URL` | `gemnasium` | `https://gitlab.com/gitlab-org/security-products/gemnasium-db.git` | Repository URL for fetching the gemnasium database. |
+| `GEMNASIUM_DB_REF_NAME` | `gemnasium` | `master` | Branch name for remote repository database. `GEMNASIUM_DB_REMOTE_URL` is required. |
+| `PIP_INDEX_URL` | `gemnasium-python` | `https://pypi.org/simple` | Base URL of Python Package Index. |
+| `PIP_EXTRA_INDEX_URL` | `gemnasium-python` | | Array of [extra URLs](https://pip.pypa.io/en/stable/reference/pip_install/#cmdoption-extra-index-url) of package indexes to use in addition to `PIP_INDEX_URL`. Comma separated. |
+| `PIP_REQUIREMENTS_FILE` | `gemnasium-python` | | Pip requirements file to be scanned. |
+| `DS_PIP_VERSION` | `gemnasium-python` | | Force the install of a specific pip version (example: `"19.3"`), otherwise the pip installed in the Docker image is used. ([Introduced](https://gitlab.com/gitlab-org/gitlab/issues/12811) in GitLab 12.7) |
+| `DS_PIP_DEPENDENCY_PATH` | `gemnasium-python` | | Path to load Python pip dependencies from. ([Introduced](https://gitlab.com/gitlab-org/gitlab/issues/12412) in GitLab 12.2) |
+| `DS_PYTHON_VERSION` | `retire.js` | | Version of Python. If set to 2, dependencies are installed using Python 2.7 instead of Python 3.6. ([Introduced](https://gitlab.com/gitlab-org/gitlab/issues/12296) in GitLab 12.1)|
+| `MAVEN_CLI_OPTS` | `gemnasium-maven` | `"-DskipTests --batch-mode"` | List of command line arguments that will be passed to `maven` by the analyzer. See an example for [using private repos](#using-private-maven-repos). |
+| `BUNDLER_AUDIT_UPDATE_DISABLED` | `bundler-audit` | `false` | Disable automatic updates for the `bundler-audit` analyzer. Useful if you're running Dependency Scanning in an offline, air-gapped environment.|
+| `BUNDLER_AUDIT_ADVISORY_DB_URL` | `bundler-audit` | `https://github.com/rubysec/ruby-advisory-db` | URL of the advisory database used by bundler-audit. |
+| `BUNDLER_AUDIT_ADVISORY_DB_REF_NAME` | `bundler-audit` | `master` | Git ref for the advisory database specified by `BUNDLER_AUDIT_ADVISORY_DB_URL`. |
+| `RETIREJS_JS_ADVISORY_DB` | `retire.js` | `https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/jsrepository.json` | Path or URL to Retire.js js vulnerability data file. |
+| `RETIREJS_NODE_ADVISORY_DB` | `retire.js` | `https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/npmrepository.json` | Path or URL to Retire.js node vulnerability data file. |
### Using private Maven repos
@@ -341,6 +355,9 @@ it highlighted:
}
```
+CAUTION: **Deprecation:**
+Beginning with GitLab 12.9, dependency scanning no longer reports `undefined` severity and confidence levels.
+
Here is the description of the report file structure nodes and their meaning. All fields are mandatory to be present in
the report JSON unless stated otherwise. Presence of optional fields depends on the underlying analyzers being used.
diff --git a/doc/user/application_security/img/multi_select_v12_9.png b/doc/user/application_security/img/multi_select_v12_9.png
new file mode 100644
index 00000000000..b2b171e13d0
--- /dev/null
+++ b/doc/user/application_security/img/multi_select_v12_9.png
Binary files differ
diff --git a/doc/user/application_security/img/outdated_report_branch_v12_9.png b/doc/user/application_security/img/outdated_report_branch_v12_9.png
new file mode 100644
index 00000000000..6e23cf04b26
--- /dev/null
+++ b/doc/user/application_security/img/outdated_report_branch_v12_9.png
Binary files differ
diff --git a/doc/user/application_security/img/outdated_report_pipeline_v12_9.png b/doc/user/application_security/img/outdated_report_pipeline_v12_9.png
new file mode 100644
index 00000000000..2bb1fcaa302
--- /dev/null
+++ b/doc/user/application_security/img/outdated_report_pipeline_v12_9.png
Binary files differ
diff --git a/doc/user/application_security/img/security_configuration_page_v12_6.png b/doc/user/application_security/img/security_configuration_page_v12_6.png
deleted file mode 100644
index d838b648c1f..00000000000
--- a/doc/user/application_security/img/security_configuration_page_v12_6.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/application_security/img/security_configuration_page_v12_9.png b/doc/user/application_security/img/security_configuration_page_v12_9.png
new file mode 100644
index 00000000000..a81d82e03c3
--- /dev/null
+++ b/doc/user/application_security/img/security_configuration_page_v12_9.png
Binary files differ
diff --git a/doc/user/application_security/index.md b/doc/user/application_security/index.md
index 13ea45816b8..8e97427e061 100644
--- a/doc/user/application_security/index.md
+++ b/doc/user/application_security/index.md
@@ -13,18 +13,45 @@ information provided, you can immediately begin risk analysis and remediation.
For an overview of application security with GitLab, see
[Security Deep Dive](https://www.youtube.com/watch?v=k4vEJnGYy84).
+## Quick start
+
+Get started quickly with Dependency Scanning, License Scanning, and Static Application Security
+Testing (SAST) by adding the following to your `.gitlab-ci.yml`:
+
+```yaml
+include:
+ - template: Dependency-Scanning.gitlab-ci.yml
+ - template: License-Scanning.gitlab-ci.yml
+ - template: SAST.gitlab-ci.yml
+```
+
+To add Dynamic Application Security Testing (DAST) scanning, add the following to your
+`.gitlab-ci.yml` and replace `https://staging.example.com` with a staging server's web address:
+
+```yaml
+include:
+ - template: DAST.gitlab-ci.yml
+
+variables:
+ DAST_WEBSITE: https://staging.example.com
+```
+
+To ensure the DAST scanner runs *after* deploying the application to the staging server, review the [DAST full documentation](dast/index.md).
+
+To add Container Scanning, follow the steps listed in the [Container Scanning documentation](container_scanning/index.md#requirements).
+
+To further configure any of the other scanners, refer to each scanner's documentation.
+
## Security scanning tools
GitLab uses the following tools to scan and report known vulnerabilities found in your project.
| Secure scanning tool | Description |
|:-----------------------------------------------------------------------------|:-----------------------------------------------------------------------|
-| [Compliance Dashboard](compliance_dashboard/index.md) **(ULTIMATE)** | View the most recent Merge Request activity in a group. |
| [Container Scanning](container_scanning/index.md) **(ULTIMATE)** | Scan Docker containers for known vulnerabilities. |
| [Dependency List](dependency_list/index.md) **(ULTIMATE)** | View your project's dependencies and their known vulnerabilities. |
| [Dependency Scanning](dependency_scanning/index.md) **(ULTIMATE)** | Analyze your dependencies for known vulnerabilities. |
| [Dynamic Application Security Testing (DAST)](dast/index.md) **(ULTIMATE)** | Analyze running web applications for known vulnerabilities. |
-| [License Compliance](license_compliance/index.md) **(ULTIMATE)** | Search your project's dependencies for their licenses. |
| [Security Dashboard](security_dashboard/index.md) **(ULTIMATE)** | View vulnerabilities in all your projects and groups. |
| [Static Application Security Testing (SAST)](sast/index.md) **(ULTIMATE)** | Analyze source code for known vulnerabilities. |
@@ -35,7 +62,7 @@ The scanning tools and vulnerabilities database are updated regularly.
| Secure scanning tool | Vulnerabilities database updates |
|:-------------------------------------------------------------|-------------------------------------------|
| [Container Scanning](container_scanning/index.md) | Uses `clair`. The latest `clair-db` version is used for each job by running the [`latest` docker image tag](https://gitlab.com/gitlab-org/gitlab/blob/438a0a56dc0882f22bdd82e700554525f552d91b/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml#L37). The `clair-db` database [is updated daily according to the author](https://github.com/arminc/clair-local-scan#clair-server-or-local). |
-| [Dependency Scanning](dependency_scanning/index.md) | Relies on `bundler-audit` (for Rubygems), `retire.js` (for NPM packages), and `gemnasium` (GitLab's own tool for all libraries). Both `bundler-audit` and `retire.js` fetch their vulnerabilities data from GitHub repositories, so vulnerabilities added to `ruby-advisory-db` and `retire.js` are immediately available. The tools themselves are updated once per month if there's a new version. The [Gemnasium DB](https://gitlab.com/gitlab-org/security-products/gemnasium-db) is updated at least once a week. |
+| [Dependency Scanning](dependency_scanning/index.md) | Relies on `bundler-audit` (for Rubygems), `retire.js` (for NPM packages), and `gemnasium` (GitLab's own tool for all libraries). Both `bundler-audit` and `retire.js` fetch their vulnerabilities data from GitHub repositories, so vulnerabilities added to `ruby-advisory-db` and `retire.js` are immediately available. The tools themselves are updated once per month if there's a new version. The [Gemnasium DB](https://gitlab.com/gitlab-org/security-products/gemnasium-db) is updated at least once a week. See our [current measurement of time from CVE being issued to our product being updated](https://about.gitlab.com/handbook/engineering/development/performance-indicators/#cve-issue-to-update). |
| [Dynamic Application Security Testing (DAST)](dast/index.md) | The scanning engine is updated on a periodic basis. See the [version of the underlying tool `zaproxy`](https://gitlab.com/gitlab-org/security-products/dast/blob/master/Dockerfile#L1). The scanning rules are downloaded at scan runtime. |
| [Static Application Security Testing (SAST)](sast/index.md) | Relies exclusively on [the tools GitLab wraps](sast/index.md#supported-languages-and-frameworks). The underlying analyzers are updated at least once per month if a relevant update is available. The vulnerabilities database is updated by the upstream tools. |
@@ -86,6 +113,19 @@ context for a vulnerability as you learn more over time.
![Dismissed vulnerability comment](img/dismissed_info_v12_3.png)
+#### Dismissing multiple vulnerabilities
+
+> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.9.
+
+You can dismiss multiple vulnerabilities at once, providing an optional reason.
+Selecting the checkboxes on the side of each vulnerability in the list will select that individual vulnerability.
+Alternatively, you can select all the vulnerabilities in the list by selecting the checkbox in the table header.
+Deselecting the checkbox in the header will deselect all the vulnerabilities in the list.
+Once you have selected some vulnerabilities, a menu appears at the top of the table that allows you to select a dismissal reason.
+Pressing the "Dismiss Selected" button will dismiss all the selected vulnerabilities at once, with the reason you chose.
+
+![Multiple vulnerability dismissal](img/multi_select_v12_9.png)
+
### Creating an issue for a vulnerability
You can create an issue for a vulnerability by selecting the **Create issue**
@@ -175,7 +215,9 @@ An approval is optional when a security report:
- Contains no new vulnerabilities.
- Contains only new vulnerabilities of `low` or `medium` severity.
-### Enabling License Approvals within a project
+## Enabling License Approvals within a project
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/13067) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.3.
To enable License Approvals, a [project approval rule](../project/merge_requests/merge_request_approvals.md#multiple-approval-rules-premium)
must be created with the case-sensitive name `License-Check`. This approval group must be set
@@ -183,7 +225,7 @@ with the number of approvals required greater than zero.
Once this group is added to your project, the approval rule is enabled for all Merge Requests. To
configure how this rule behaves, you can choose which licenses to `approve` or `blacklist` in the
-[project policies for License Compliance](license_compliance/index.md#project-policies-for-license-compliance)
+[project policies for License Compliance](../compliance/license_compliance/index.md#project-policies-for-license-compliance)
section.
Any code changes cause the approvals required to reset.
@@ -198,6 +240,44 @@ An approval is optional when a license report:
- Contains no software license violations.
- Contains only new licenses that are `approved` or unknown.
+## Working in an offline environment
+
+It is possible to run most of the GitLab security scanners when not
+connected to the internet, in what is sometimes known as an offline,
+limited connectivity, Local Area Network (LAN), Intranet, or "air-gap"
+environment.
+
+Read how to [operate the Secure scanners in an offline environment](offline_deployments/index.md).
+
+## Outdated security reports
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/4913) in GitLab 12.7.
+
+When a security report generated for a merge request becomes outdated, the merge request shows a warning
+message in the security widget and prompts you to take an appropriate action.
+
+This can happen in two scenarios:
+
+1. Your [source branch is behind the target branch](#source-branch-is-behind-the-target-branch).
+1. The [target branch security report is out of date](#target-branch-security-report-is-out-of-date).
+
+### Source branch is behind the target branch
+
+This means the most recent common ancestor commit between the target branch and the source branch is
+not the most recent commit on the target branch. This is by far the most common situation.
+
+In this case you must rebase or merge to incorporate the changes from the target branch.
+
+![Incorporate target branch changes](img/outdated_report_branch_v12_9.png)
+
+### Target branch security report is out of date
+
+This can happen for many reasons, including failed jobs or new advisories. When the merge request shows that a
+security report is out of date, you must run a new pipeline on the target branch.
+You can do it quickly by following the hyperlink given to run a new pipeline.
+
+![Run a new pipeline](img/outdated_report_pipeline_v12_9.png)
+
## Troubleshooting
### Getting error message `sast job: stage parameter should be [some stage name here]`
diff --git a/doc/user/application_security/license_compliance/index.md b/doc/user/application_security/license_compliance/index.md
index dfe7a714f6c..ed81eb8ca10 100644
--- a/doc/user/application_security/license_compliance/index.md
+++ b/doc/user/application_security/license_compliance/index.md
@@ -1,326 +1,5 @@
---
-type: reference, howto
+redirect_to: '../../compliance/license_compliance/index.md'
---
-# License Compliance **(ULTIMATE)**
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5483) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.0.
-
-## Overview
-
-If you are using [GitLab CI/CD](../../../ci/README.md), you can search your project dependencies for their licenses
-using License Compliance.
-
-You can take advantage of License Compliance by either [including the job](#configuration)
-in your existing `.gitlab-ci.yml` file or by implicitly using
-[Auto License Compliance](../../../topics/autodevops/index.md#auto-license-compliance-ultimate)
-that is provided by [Auto DevOps](../../../topics/autodevops/index.md).
-
-GitLab checks the License Compliance report, compares the licenses between the
-source and target branches, and shows the information right on the merge request.
-Blacklisted licenses will be clearly visible with an `x` red icon next to them
-as well as new licenses which need a decision from you. In addition, you can
-[manually approve or blacklist](#project-policies-for-license-compliance)
-licenses in your project's settings.
-
-NOTE: **Note:**
-If the license compliance report doesn't have anything to compare to, no information
-will be displayed in the merge request area. That is the case when you add the
-`license_scanning` job in your `.gitlab-ci.yml` for the first time.
-Consecutive merge requests will have something to compare to and the license
-compliance report will be shown properly.
-
-![License Compliance Widget](img/license_compliance.png)
-
-If you are a project or group Maintainer, you can click on a license to be given
-the choice to approve it or blacklist it.
-
-![License approval decision](img/license_compliance_decision.png)
-
-## Use cases
-
-It helps you find what licenses your project uses in its dependencies, and decide for each of then
-whether to allow it or forbid it. For example, your application is using an external (open source)
-library whose license is incompatible with yours.
-
-## Supported languages and package managers
-
-The following languages and package managers are supported.
-
-| Language | Package managers | Scan Tool |
-|------------|-------------------------------------------------------------------|----------------------------------------------------------|
-| JavaScript | [Bower](https://bower.io/), [npm](https://www.npmjs.com/), [yarn](https://yarnpkg.com/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)|
-| Go | [Godep](https://github.com/tools/godep), go get ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)), gvt ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)), glide ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)), dep ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)), trash ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) and govendor ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)), [go mod](https://github.com/golang/go/wiki/Modules) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)|
-| Java | [Gradle](https://gradle.org/), [Maven](https://maven.apache.org/) |[License Finder](https://github.com/pivotal/LicenseFinder)|
-| .NET | [Nuget](https://www.nuget.org/) (.NET Framework is supported via the [mono project](https://www.mono-project.com/). Windows specific dependencies are not supported at this time.) |[License Finder](https://github.com/pivotal/LicenseFinder)|
-| Python | [pip](https://pip.pypa.io/en/stable/) |[License Finder](https://github.com/pivotal/LicenseFinder)|
-| Ruby | [gem](https://rubygems.org/) |[License Finder](https://github.com/pivotal/LicenseFinder)|
-| Erlang | [rebar](https://www.rebar3.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
-| Objective-C, Swift | [Carthage](https://github.com/Carthage/Carthage) , [CocoaPods v0.39 and below](https://cocoapods.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)|
-| Elixir | [mix](https://elixir-lang.org/getting-started/mix-otp/introduction-to-mix.html) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)|
-| C++/C | [conan](https://conan.io/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
-| Scala | [sbt](https://www.scala-sbt.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
-| Rust | [cargo](https://crates.io) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
-| PHP | [composer](https://getcomposer.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
-
-## Requirements
-
-To run a License Compliance scanning job, you need GitLab Runner with the
-[`docker` executor](https://docs.gitlab.com/runner/executors/docker.html).
-
-## Configuration
-
-For GitLab 12.8 and later, to enable License Compliance, you must
-[include](../../../ci/yaml/README.md#includetemplate) the
-[`License-Scanning.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml)
-that's provided as a part of your GitLab installation.
-For older versions of GitLab from 11.9 to 12.7, you must
-[include](../../../ci/yaml/README.md#includetemplate) the
-[`License-Management.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/License-Management.gitlab-ci.yml).
-For GitLab versions earlier than 11.9, you can copy and use the job as defined
-that template.
-
-NOTE: **Note:**
-In GitLab 13.0, the `License-Management.gitlab-ci.yml` template is scheduled to be removed.
-Use `License-Scanning.gitlab-ci.yml` instead.
-
-Add the following to your `.gitlab-ci.yml` file:
-
-```yaml
-include:
- - template: License-Scanning.gitlab-ci.yml
-```
-
-The included template will create a `license_scanning` job in your CI/CD pipeline
-and scan your dependencies to find their licenses.
-
-NOTE: **Note:**
-Before GitLab 12.8, the `license_scanning` job was named `license_management`.
-In GitLab 13.0, the `license_management` job is scheduled to be removed completely,
-so you're advised to migrate to the `license_scanning` job and used the new
-`License-Scanning.gitlab-ci.yml` template.
-
-The results will be saved as a
-[License Compliance report artifact](../../../ci/yaml/README.md#artifactsreportslicense_scanning-ultimate)
-that you can later download and analyze. Due to implementation limitations, we
-always take the latest License Compliance artifact available. Behind the scenes, the
-[GitLab License Compliance Docker image](https://gitlab.com/gitlab-org/security-products/license-management)
-is used to detect the languages/frameworks and in turn analyzes the licenses.
-
-The License Compliance settings can be changed through [environment variables](#available-variables) by using the
-[`variables`](../../../ci/yaml/README.md#variables) parameter in `.gitlab-ci.yml`.
-
-### Available variables
-
-License Compliance can be configured using environment variables.
-
-| Environment variable | Required | Description |
-|-----------------------|----------|-------------|
-| `MAVEN_CLI_OPTS` | no | Additional arguments for the mvn executable. If not supplied, defaults to `-DskipTests`. |
-| `LICENSE_FINDER_CLI_OPTS` | no | Additional arguments for the `license_finder` executable. For example, if your project has both Golang and Ruby code stored in different directories and you want to only scan the Ruby code, you can update your `.gitlab-ci-yml` template to specify which project directories to scan, like `LICENSE_FINDER_CLI_OPTS: '--debug --aggregate-paths=. ruby'`. |
-| `LM_JAVA_VERSION` | no | Version of Java. If set to `11`, Maven and Gradle use Java 11 instead of Java 8. |
-| `LM_PYTHON_VERSION` | no | Version of Python. If set to `3`, dependencies are installed using Python 3 instead of Python 2.7. |
-| `SETUP_CMD` | no | Custom setup for the dependency installation. (experimental) |
-
-### Installing custom dependencies
-
-> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.4.
-
-The `license_management` image already embeds many auto-detection scripts, languages,
-and packages. Nevertheless, it's almost impossible to cover all cases for all projects.
-That's why sometimes it's necessary to install extra packages, or to have extra steps
-in the project automated setup, like the download and installation of a certificate.
-For that, a `LICENSE_MANAGEMENT_SETUP_CMD` environment variable can be passed to the container,
-with the required commands to run before the license detection.
-
-If present, this variable will override the setup step necessary to install all the packages
-of your application (e.g.: for a project with a `Gemfile`, the setup step could be
-`bundle install`).
-
-For example:
-
-```yaml
-include:
- - template: License-Scanning.gitlab-ci.yml
-
-variables:
- LICENSE_MANAGEMENT_SETUP_CMD: sh my-custom-install-script.sh
-```
-
-In this example, `my-custom-install-script.sh` is a shell script at the root
-directory of your project.
-
-### Overriding the template
-
-If you want to override the job definition (for example, change properties like
-`variables` or `dependencies`), you need to declare a `license_scanning` job
-after the template inclusion and specify any additional keys under it. For example:
-
-```yaml
-include:
- - template: License-Scanning.gitlab-ci.yml
-
-license_scanning:
- variables:
- CI_DEBUG_TRACE: "true"
-```
-
-### Configuring Maven projects
-
-The License Compliance tool provides a `MAVEN_CLI_OPTS` environment variable which can hold
-the command line arguments to pass to the `mvn install` command which is executed under the hood.
-Feel free to use it for the customization of Maven execution. For example:
-
-```yaml
-include:
- - template: License-Scanning.gitlab-ci.yml
-
-license_scanning:
- variables:
- MAVEN_CLI_OPTS: --debug
-```
-
-`mvn install` runs through all of the [build life cycle](http://maven.apache.org/guides/introduction/introduction-to-the-lifecycle.html)
-stages prior to `install`, including `test`. Running unit tests is not directly
-necessary for the license scanning purposes and consumes time, so it's skipped
-by having the default value of `MAVEN_CLI_OPTS` as `-DskipTests`. If you want
-to supply custom `MAVEN_CLI_OPTS` and skip tests at the same time, don't forget
-to explicitly add `-DskipTests` to your options.
-If you still need to run tests during `mvn install`, add `-DskipTests=false` to
-`MAVEN_CLI_OPTS`.
-
-### Selecting the version of Python
-
-> - [Introduced](https://gitlab.com/gitlab-org/security-products/license-management/-/merge_requests/36) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.0.
-> - In [GitLab 12.2](https://gitlab.com/gitlab-org/gitlab/issues/12032), Python 3.5 became the default.
-> - In [GitLab 12.7](https://gitlab.com/gitlab-org/security-products/license-management/-/merge_requests/101), Python 3.8 became the default.
-
-License Compliance uses Python 3.8 and pip 19.1 by default.
-If your project requires Python 2, you can switch to Python 2.7 and pip 10.0
-by setting the `LM_PYTHON_VERSION` environment variable to `2`.
-
-```yaml
-include:
- - template: License-Scanning.gitlab-ci.yml
-
-license_scanning:
- variables:
- LM_PYTHON_VERSION: 2
-```
-
-### Migration from `license_management` to `license_scanning`
-
-In GitLab 12.8 a new name for `license_management` job was introduced. This change was made to improve clarity around the purpose of the scan, which is to scan and collect the types of licenses present in a projects dependencies.
-The support of `license_management` is scheduled to be dropped in GitLab 13.0.
-If you're using a custom setup for License Compliance, you're required
-to update your CI config accordingly:
-
-1. Change the CI template to `License-Scanning.gitlab-ci.yml`.
-1. Change the job name to `license_management` (if you mention it in `.gitlab-ci.yml`).
-1. Change the artifact name to `gl-license-scanning-report.json` (if you mention it in `.gitlab-ci.yml`).
-
-For example, the following `.gitlab-ci.yml`:
-
-```yaml
-include:
- - template: License-Management.gitlab-ci.yml
-
-license_management:
- artifacts:
- reports:
- license_management: gl-license-management-report.json
-```
-
-Should be changed to:
-
-```yaml
-include:
- - template: License-Scanning.gitlab-ci.yml
-
-license_scanning:
- artifacts:
- reports:
- license_scanning: gl-license-scanning-report.json
-```
-
-## Project policies for License Compliance
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5940) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.4.
-
-From the project's settings:
-
-- The list of licenses and their status can be managed.
-- Licenses can be manually approved or blacklisted.
-
-To approve or blacklist a license:
-
-1. Either use the **Manage licenses** button in the merge request widget, or
- navigate to the project's **Settings > CI/CD** and expand the
- **License Compliance** section.
-1. Click the **Add a license** button.
-
- ![License Compliance Add License](img/license_compliance_add_license_v12_3.png)
-
-1. In the **License name** dropdown, either:
- - Select one of the available licenses. You can search for licenses in the field
- at the top of the list.
- - Enter arbitrary text in the field at the top of the list. This will cause the text to be
- added as a license name to the list.
-1. Select the **Approve** or **Blacklist** radio button to approve or blacklist respectively
- the selected license.
-
-To modify an existing license:
-
-1. In the **License Compliance** list, click the **Approved/Declined** dropdown to change it to the desired status.
-
- ![License Compliance Settings](img/license_compliance_settings_v12_3.png)
-
-Searching for Licenses:
-
-1. Use the **Search** box to search for a specific license.
-
- ![License Compliance Search](img/license_compliance_search_v12_3.png)
-
-## License Compliance report under pipelines
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5491) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.2.
-
-From your project's left sidebar, navigate to **CI/CD > Pipelines** and click on the
-pipeline ID that has a `license_management` job to see the Licenses tab with the listed
-licenses (if any).
-
-![License Compliance Pipeline Tab](img/license_compliance_pipeline_tab_v12_3.png)
-
-<!-- ## Troubleshooting
-
-Include any troubleshooting steps that you can foresee. If you know beforehand what issues
-one might have when setting this up, or when something is changed, or on upgrading, it's
-important to describe those, too. Think of things that may go wrong and include them here.
-This is important to minimize requests for support, and to avoid doc comments with
-questions that you know someone might ask.
-
-Each scenario can be a third-level heading, e.g. `### Getting error message X`.
-If you have none to add when creating a doc, leave this section in place
-but commented out to help encourage others to add to it in the future. -->
-
-## License list
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/13582) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.7.
-
-The License list allows you to see your project's licenses and key
-details about them.
-
-In order for the licenses to appear under the license list, the following
-requirements must be met:
-
-1. The License Compliance CI job must be [configured](#configuration) for your project.
-1. Your project must use at least one of the
- [supported languages and package managers](#supported-languages-and-package-managers).
-
-Once everything is set, navigate to **Security & Compliance > License Compliance**
-in your project's sidebar, and you'll see the licenses displayed, where:
-
-- **Name:** The name of the license.
-- **Component:** The components which have this license.
-
-![License List](img/license_list_v12_6.png)
+This document was moved to [another location](../../compliance/license_compliance/index.md).
diff --git a/doc/user/application_security/license_management/index.md b/doc/user/application_security/license_management/index.md
index 319da2c3a6e..df44041600b 100644
--- a/doc/user/application_security/license_management/index.md
+++ b/doc/user/application_security/license_management/index.md
@@ -1,5 +1,5 @@
---
-redirect_to: ../license_compliance/index.md
+redirect_to: ../../compliance/license_compliance/index.md
---
-This document was moved to [another location](../license_compliance/index.md).
+This document was moved to [another location](../../compliance/license_compliance/index.md).
diff --git a/doc/user/application_security/offline_deployments/index.md b/doc/user/application_security/offline_deployments/index.md
new file mode 100644
index 00000000000..f72b632ff82
--- /dev/null
+++ b/doc/user/application_security/offline_deployments/index.md
@@ -0,0 +1,55 @@
+---
+type: reference, howto
+---
+
+# Offline deployments
+
+This document describes how to operate Secure scanners offline.
+
+## Overview
+
+It is possible to run most of the GitLab security scanners when not
+connected to the internet, in what is sometimes known as an offline,
+limited connectivity, Local Area Network (LAN), Intranet, or "air-gap"
+environment.
+
+In this situation, the GitLab instance can be one, or more, servers and services running in a network that can talk to one another, but have zero, or perhaps very restricted access to the internet. Assume anything within the GitLab instance and supporting infrastrusture (private maven repository for example) can be accessed via local network connection. Assume any files from the internet must come in via physical media (USB drive, hard drive).
+
+GitLab scanners generally will connect to the internet to download the
+latest sets of signatures, rules, and patches. A few extra steps are necessary
+to configure the tools to not do this and to still function properly.
+
+### Container registries and package repositories
+
+At a high-level, each of the security analyzers are delivered as Docker
+containers and reference various package repositories. When you run a job on
+an internet-connected GitLab installation, GitLab checks the GitLab.com-hosted
+container registry and package repositories to ensure that you have
+the latest versions.
+
+In an air-gapped environment, this must be disabled so that GitLab.com is not
+queried. Because the GitLab.com registry and repositories are not available,
+you must update each of the scanners to either reference a different,
+internally-hosted registry or provide access to the individual scanner images.
+
+You must also ensure that your app has access to common package repos
+that are not hosted on GitLab.com, such as npm, yarn, or rubygems. Packages
+from these repos can be obtained by temporarily connecting to a network or by
+mirroring the packages inside your own offline network.
+
+### Scanner signature and rule updates
+
+When connected to the internet, some scanners will reference public databases
+for the latest sets of signatures and rules to check against. Without connectivity,
+this is not possible. Depending on the scanner, you must therefore disable
+these automatic update checks and either use the databases that they came
+with or manually update those databases.
+
+## Specific scanner instructions
+
+Each individual scanner may be slightly different than the steps described
+above. You can find more info at each of the pages below:
+
+- [Container scanning offline directions](../container_scanning/index.md#running-container-scanning-in-an-offline-air-gapped-installation)
+- [SAST offline directions](../sast/index.md#gitlab-sast-in-an-offline-air-gapped-installation)
+- [DAST offline directions](../dast/index.md#running-dast-in-an-offline-air-gapped-installation)
diff --git a/doc/user/application_security/sast/img/sast.png b/doc/user/application_security/sast/img/sast.png
deleted file mode 100644
index 2c75592c32a..00000000000
--- a/doc/user/application_security/sast/img/sast.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/application_security/sast/img/sast_v12_9.png b/doc/user/application_security/sast/img/sast_v12_9.png
new file mode 100644
index 00000000000..91f4b8a8e2e
--- /dev/null
+++ b/doc/user/application_security/sast/img/sast_v12_9.png
Binary files differ
diff --git a/doc/user/application_security/sast/index.md b/doc/user/application_security/sast/index.md
index 7bf61981db9..70d31f8e1d6 100644
--- a/doc/user/application_security/sast/index.md
+++ b/doc/user/application_security/sast/index.md
@@ -25,7 +25,7 @@ that is provided by [Auto DevOps](../../../topics/autodevops/index.md).
GitLab checks the SAST report, compares the found vulnerabilities between the
source and target branches, and shows the information right on the merge request.
-![SAST Widget](img/sast.png)
+![SAST Widget](img/sast_v12_9.png)
The results are sorted by the priority of the vulnerability:
@@ -278,14 +278,14 @@ The following are Docker image-related variables.
Some analyzers make it possible to filter out vulnerabilities under a given threshold.
-| Environment variable | Default value | Description |
-|----------------------|---------------|-------------|
+| Environment variable | Default value | Description |
+|-------------------------|---------------|-------------|
+| `SAST_EXCLUDED_PATHS` | - | Exclude vulnerabilities from output based on the paths. This is a comma-separated list of patterns. Patterns can be globs, or file or folder paths (for example, `doc,spec` ). Parent directories will also match patterns. |
| `SAST_BANDIT_EXCLUDED_PATHS` | - | comma-separated list of paths to exclude from scan. Uses Python's [`fnmatch` syntax](https://docs.python.org/2/library/fnmatch.html) |
| `SAST_BRAKEMAN_LEVEL` | 1 | Ignore Brakeman vulnerabilities under given confidence level. Integer, 1=Low 3=High. |
| `SAST_FLAWFINDER_LEVEL` | 1 | Ignore Flawfinder vulnerabilities under given risk level. Integer, 0=No risk, 5=High risk. |
| `SAST_GITLEAKS_ENTROPY_LEVEL` | 8.0 | Minimum entropy for secret detection. Float, 0.0 = low, 8.0 = high. |
| `SAST_GOSEC_LEVEL` | 0 | Ignore gosec vulnerabilities under given confidence level. Integer, 0=Undefined, 1=Low, 2=Medium, 3=High. |
-| `SAST_EXCLUDED_PATHS` | - | Exclude vulnerabilities from output based on the paths. This is a comma-separated list of patterns. Patterns can be globs, file or folder paths (e.g., `doc,spec` ). Parent directories will also match patterns. |
#### Timeouts
@@ -413,6 +413,9 @@ it highlighted:
}
```
+CAUTION: **Deprecation:**
+Beginning with GitLab 12.9, SAST no longer reports `undefined` severity and confidence levels.
+
Here is the description of the report file structure nodes and their meaning. All fields are mandatory in
the report JSON unless stated otherwise. Presence of optional fields depends on the underlying analyzers being used.
@@ -476,6 +479,77 @@ Once a vulnerability is found, you can interact with it. Read more on how to
For more information about the vulnerabilities database update, check the
[maintenance table](../index.md#maintenance-and-update-of-the-vulnerabilities-database).
+## GitLab SAST in an offline air-gapped installation
+
+For self-managed GitLab instances in an environment with limited, restricted, or intermittent access
+to external resources via the internet, some adjustments are required for the SAST job to
+successfully run.
+
+### Requirements for offline SAST
+
+To use SAST in an offline environment, you need:
+
+- GitLab Runner with the [`docker` or `kubernetes` executor](#requirements).
+- Docker Container Registry with locally available copies of SAST [analyzer](https://gitlab.com/gitlab-org/security-products/analyzers) images.
+
+NOTE: **Note:**
+GitLab Runner has a [default `pull policy` of `always`](https://docs.gitlab.com/runner/executors/docker.html#using-the-always-pull-policy),
+meaning the runner may try to pull remote images even if a local copy is available. Set GitLab
+Runner's [`pull_policy` to `if-not-present`](https://docs.gitlab.com/runner/executors/docker.html#using-the-if-not-present-pull-policy)
+in an offline environment if you prefer using only locally available Docker images.
+
+### Make GitLab SAST analyzer images available inside your Docker registry
+
+For SAST with all [supported languages and frameworks](#supported-languages-and-frameworks),
+import the following default SAST analyzer images from `registry.gitlab.com` to your local "offline"
+registry:
+
+```
+registry.gitlab.com/gitlab-org/security-products/analyzers/bandit:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/brakeman:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/eslint:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/flawfinder:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/go-ast-scanner:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/gosec:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/kubesec:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/nodejs-scan:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/phpcs-security-audit:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/pmd-apex:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/secrets:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/security-code-scan:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/sobelow:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/spotbugs:2
+registry.gitlab.com/gitlab-org/security-products/analyzers/tslint:2
+```
+
+The process for importing Docker images into a local offline Docker registry depends on
+**your network security policy**. Please consult your IT staff to find an accepted and approved
+process by which external resources can be imported or temporarily accessed. Note that these scanners are [updated periodically](../index.md#maintenance-and-update-of-the-vulnerabilities-database)
+with new definitions, so consider if you are able to make periodic updates yourself.
+
+For details on saving and transporting Docker images as a file, see Docker's documentation on
+[`docker save`](https://docs.docker.com/engine/reference/commandline/save/), [`docker load`](https://docs.docker.com/engine/reference/commandline/load/),
+[`docker export`](https://docs.docker.com/engine/reference/commandline/export/), and [`docker import`](https://docs.docker.com/engine/reference/commandline/import/).
+
+### Set SAST CI job variables to use local SAST analyzers
+
+[Override SAST environment variables](#customizing-the-sast-settings) to use to your [local container registry](./analyzers.md#using-a-custom-docker-mirror)
+as the source for SAST analyzer images.
+
+For example, assuming a local Docker registry repository of `localhost:5000/analyzers`:
+
+ ```yaml
+include:
+ - template: SAST.gitlab-ci.yml
+
+variables:
+ SAST_ANALYZER_IMAGE_PREFIX: "localhost:5000/analyzers"
+ SAST_DISABLE_DIND: "true"
+ ```
+
+The SAST job should now use local copies of the SAST analyzers to scan your code and generate
+security reports without requiring internet access.
+
## Troubleshooting
### Error response from daemon: error processing tar file: docker-tar: relocation error
diff --git a/doc/user/application_security/security_dashboard/index.md b/doc/user/application_security/security_dashboard/index.md
index a376ac1f26b..4ce6a9403c5 100644
--- a/doc/user/application_security/security_dashboard/index.md
+++ b/doc/user/application_security/security_dashboard/index.md
@@ -26,7 +26,7 @@ The Security Dashboard supports the following reports:
## Requirements
-To use the instance, group, project or pipeline security dashboard:
+To use the instance, group, project, or pipeline security dashboard:
1. At least one project inside a group must be configured with at least one of
the [supported reports](#supported-reports).
@@ -147,7 +147,7 @@ information on the Security Dashboard can become outdated as new vulnerabilities
are discovered.
To ensure the information on the Security Dashboard is regularly updated,
-[configure a scheduled pipeline](../../project/pipelines/schedules.md) to run a
+[configure a scheduled pipeline](../../../ci/pipelines/schedules.md) to run a
daily security scan. This will update the information displayed on the Security
Dashboard regardless of how often the default branch is updated.
diff --git a/doc/user/asciidoc.md b/doc/user/asciidoc.md
index da6bf287955..b9b346d3be4 100644
--- a/doc/user/asciidoc.md
+++ b/doc/user/asciidoc.md
@@ -282,11 +282,11 @@ source - a listing that is embellished with (colorized) syntax highlighting
----
```
-~~~asciidoc
+````asciidoc
\```language
fenced code - a shorthand syntax for the source block
\```
-~~~
+````
```asciidoc
[,attribution,citetitle]
diff --git a/doc/user/clusters/applications.md b/doc/user/clusters/applications.md
index c526f7339d5..4768fcc2970 100644
--- a/doc/user/clusters/applications.md
+++ b/doc/user/clusters/applications.md
@@ -9,10 +9,6 @@ and [deployments](../../ci/environments.md) when using [Auto DevOps](../../topic
You can install them after you
[create a cluster](../project/clusters/add_remove_clusters.md).
-Interested in contributing a new GitLab managed app? Visit the
-[development guidelines page](../../development/kubernetes.md#gitlab-managed-apps)
-to get started.
-
## Installing applications
Applications managed by GitLab will be installed onto the `gitlab-managed-apps` namespace.
@@ -26,8 +22,9 @@ This namespace:
To see a list of available applications to install. For a:
- [Project-level cluster](../project/clusters/index.md), navigate to your project's
- **Operations > Kubernetes**.
-- [Group-level cluster](../group/clusters/index.md), navigate to your group's **Kubernetes** page.
+ **{cloud-gear}** **Operations > Kubernetes**.
+- [Group-level cluster](../group/clusters/index.md), navigate to your group's
+ **{cloud-gear}** **Kubernetes** page.
Install Helm first as it's used to install other applications.
@@ -117,7 +114,7 @@ service included with GitLab that coordinates the jobs.
If the project is on GitLab.com, shared Runners are available
(the first 2000 minutes are free, you can
-[buy more later](../../subscriptions/index.md#extra-shared-runners-pipeline-minutes))
+[buy more later](../../subscriptions/index.md#purchasing-additional-ci-minutes))
and you do not have to deploy one if they are enough for your needs. If a
project-specific Runner is desired, or there are no shared Runners, it is easy
to deploy one.
@@ -257,7 +254,7 @@ use an A record. If your external endpoint is a hostname, use a CNAME record.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21966) in GitLab 12.7.
-A Web Application Firewall (WAF) is able to examine traffic being sent/received
+A Web Application Firewall (WAF) examines traffic being sent or received,
and can block malicious traffic before it reaches your application. The benefits
of a WAF are:
@@ -266,7 +263,7 @@ of a WAF are:
- Access control for your application
- Highly configurable logging and blocking rules
-Out of the box, GitLab provides you with a WAF known as [`ModSecurity`](https://www.modsecurity.org/)
+Out of the box, GitLab provides you with a WAF known as [`ModSecurity`](https://www.modsecurity.org/).
ModSecurity is a toolkit for real-time web application monitoring, logging,
and access control. With GitLab's offering, the [OWASP's Core Rule Set](https://www.modsecurity.org/CRS/Documentation/),
@@ -282,24 +279,42 @@ This feature:
kubectl logs -n gitlab-managed-apps $(kubectl get pod -n gitlab-managed-apps -l app=nginx-ingress,component=controller --no-headers=true -o custom-columns=:metadata.name) modsecurity-log -f
```
-To enable ModSecurity, check the **Enable Web Application Firewall** checkbox
-when installing your [Ingress application](#ingress).
+To enable WAF, switch its respective toggle to the enabled position when installing or updating [Ingress application](#ingress).
If this is your first time using GitLab's WAF, we recommend you follow the
[quick start guide](../../topics/web_application_firewall/quick_start_guide.md).
-There is a small performance overhead by enabling ModSecurity. However,
-if this is considered significant for your application, you can disable it.
-
There is a small performance overhead by enabling ModSecurity. If this is
considered significant for your application, you can disable ModSecurity's
-rule engine for your deployed application by setting
-[the deployment variable](../../topics/autodevops/index.md)
+rule engine for your deployed application in any of the following ways:
+
+1. Setting [the deployment variable](../../topics/autodevops/index.md)
`AUTO_DEVOPS_MODSECURITY_SEC_RULE_ENGINE` to `Off`. This will prevent ModSecurity
from processing any requests for the given application or environment.
-To permanently disable it, you must [uninstall](#uninstalling-applications) and
-reinstall your Ingress application for the changes to take effect.
+1. Switching its respective toggle to the disabled position and applying changes through the **Save changes** button. This will reinstall
+Ingress with the recent changes.
+
+![Disabling WAF](../../topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.png)
+
+##### Viewing Web Application Firewall traffic
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/14707) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.9.
+
+You can view Web Application Firewall traffic by navigating to your project's
+**Security & Compliance > Threat Monitoring** page.
+
+From there, you can see tracked over time:
+
+- The total amount of traffic to your application.
+- The proportion of traffic that is considered anomalous by the Web Application
+ Firewall's default [OWASP ruleset](https://www.modsecurity.org/CRS/Documentation/).
+
+If a significant percentage of traffic is anomalous, it should be investigated
+for potential threats, which can be done by
+[examining the application logs](#web-application-firewall-modsecurity).
+
+![Threat Monitoring](img/threat_monitoring_v12_9.png)
### JupyterHub
@@ -468,6 +483,52 @@ The chart will deploy 5 Elasticsearch nodes: 2 masters, 2 data and 1 client node
with resource requests totalling 0.125 CPU and 4.5GB RAM. Each data node requests 1.5GB of memory,
which makes it incompatible with clusters of `f1-micro` and `g1-small` instance types.
+NOTE: **Note:**
+The Elastic Stack cluster application is intended as a log aggregation solution and is not related to our
+[Advanced Global Search](../search/advanced_global_search.md) functionality, which uses a separate
+Elasticsearch cluster.
+
+#### Optional: deploy Kibana to perform advanced queries
+
+If you are an advanced user and have direct access to your Kubernetes cluster using `kubectl` and `helm`, you can deploy Kibana manually.
+
+The following assumes that `helm` has been [initialized](https://v2.helm.sh/docs/helm/) with `helm init`.
+
+Save the following to `kibana.yml`:
+
+```yaml
+elasticsearch:
+ enabled: false
+
+logstash:
+ enabled: false
+
+kibana:
+ enabled: true
+ env:
+ ELASTICSEARCH_HOSTS: http://elastic-stack-elasticsearch-client.gitlab-managed-apps.svc.cluster.local:9200
+```
+
+Then install it on your cluster:
+
+```shell
+helm install --name kibana stable/elastic-stack --values kibana.yml
+```
+
+To access kibana, forward the port to your local machine:
+
+```shell
+kubectl port-forward svc/kibana 5601:443
+```
+
+Then, you can visit Kibana at `http://localhost:5601`.
+
+### Future apps
+
+Interested in contributing a new GitLab managed app? Visit the
+[development guidelines page](../../development/kubernetes.md#gitlab-managed-apps)
+to get started.
+
## Install using GitLab CI (alpha)
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20822) in GitLab 12.6.
@@ -487,8 +548,10 @@ Supported applications:
- [Sentry](#install-sentry-using-gitlab-ci)
- [GitLab Runner](#install-gitlab-runner-using-gitlab-ci)
- [Cilium](#install-cilium-using-gitlab-ci)
+- [Vault](#install-vault-using-gitlab-ci)
- [JupyterHub](#install-jupyterhub-using-gitlab-ci)
- [Elastic Stack](#install-elastic-stack-using-gitlab-ci)
+- [Crossplane](#install-crossplane-using-gitlab-ci)
### Usage
@@ -501,20 +564,20 @@ To install applications using GitLab CI:
1. Connect the cluster to a [cluster management project](management_project.md).
1. In that project, add a `.gitlab-ci.yml` file with the following content:
- ```yaml
- include:
- - template: Managed-Cluster-Applications.gitlab-ci.yml
- ```
+ ```yaml
+ include:
+ - template: Managed-Cluster-Applications.gitlab-ci.yml
+ ```
1. Add a `.gitlab/managed-apps/config.yaml` file to define which
applications you would like to install. Define the `installed` key as
`true` to install the application and `false` to uninstall the
application. For example, to install Ingress:
- ```yaml
- ingress:
- installed: true
- ```
+ ```yaml
+ ingress:
+ installed: true
+ ```
1. Optionally, define `.gitlab/managed-apps/<application>/values.yaml` file to
customize values for the installed application.
@@ -523,7 +586,7 @@ A GitLab CI pipeline will then run on the `master` branch to install the
applications you have configured. In case of pipeline failure, the
output of the [Helm
Tiller](https://v2.helm.sh/docs/install/#running-tiller-locally) binary
-will be saved as a [CI job artifact](../project/pipelines/job_artifacts.md).
+will be saved as a [CI job artifact](../../ci/pipelines/job_artifacts.md).
### Install Ingress using GitLab CI
@@ -658,7 +721,7 @@ GitLab Runner is installed into the `gitlab-managed-apps` namespace of your clus
In order for GitLab Runner to function, you **must** specify the following:
-- `gitlabUrl` - the GitLab server full URL (e.g., `https://example.gitlab.com`) to register the Runner against.
+- `gitlabUrl` - the GitLab server full URL (for example, `https://example.gitlab.com`) to register the Runner against.
- `runnerRegistrationToken` - The registration token for adding new Runners to GitLab. This must be
[retrieved from your GitLab instance](../../ci/runners/README.md).
@@ -680,7 +743,7 @@ available configuration options.
[Cilium](https://cilium.io/) is a networking plugin for Kubernetes
that you can use to implement support for
[NetworkPolicy](https://kubernetes.io/docs/concepts/services-networking/network-policies/)
-resources.
+resources. For more information on [Network Policies](../../topics/autodevops/index.md#network-policy), see the documentation.
Enable Cilium in the `.gitlab/managed-apps/config.yaml` file to install it:
@@ -693,7 +756,7 @@ cilium:
```
The `clusterType` variable enables the recommended Helm variables for
-a corresponding cluster type, the default value is blank. You can
+a corresponding cluster type. The default value is blank. You can
check the recommended variables for each cluster type in the official
documentation:
@@ -720,13 +783,13 @@ information.
By default, Cilium will drop all non-whitelisted packets upon policy
deployment. The audit mode is scheduled for release in
[Cilium 1.8](https://github.com/cilium/cilium/pull/9970). In the audit
-mode non-whitelisted packets will not be dropped, instead audit
-notifications will be generated. GitLab provides alternative Docker
+mode, non-whitelisted packets will not be dropped, and audit
+notifications will be generated instead. GitLab provides alternative Docker
images for Cilium with the audit patch included. You can switch to the
custom build and enable the audit mode by adding the following to
`.gitlab/managed-apps/cilium/values.yaml`:
-```yml
+```yaml
global:
registry: registry.gitlab.com/gitlab-org/defend/cilium
policyAuditMode: true
@@ -737,25 +800,115 @@ agent:
```
The Cilium monitor log for traffic is logged out by the
-`cilium-monitor` sidecar container. You can check these logs via:
+`cilium-monitor` sidecar container. You can check these logs with the following command:
```shell
kubectl -n gitlab-managed-apps logs cilium-XXXX cilium-monitor
```
-You can disable the monitor log via `.gitlab/managed-apps/cilium/values.yaml`:
+You can disable the monitor log in `.gitlab/managed-apps/cilium/values.yaml`:
-```yml
+```yaml
agent:
monitor:
enabled: false
```
+### Install Vault using GitLab CI
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9982) in GitLab 12.9.
+
+[Hashicorp Vault](https://vaultproject.io/) is a secrets management solution which
+can be used to safely manage and store passwords, credentials, certificates and more. A Vault
+installation could be leveraged to provide a single secure data store for credentials
+used in your applications, GitLab CI jobs, and more. It could also serve as a way of
+providing SSL/TLS certificates to systems and deployments in your infrastructure. Leveraging
+Vault as a single source for all these credentials allows greater security by having
+a single source of access, control, and auditability around all your sensitive
+credentials and certificates.
+
+To install Vault, enable it in the `.gitlab/managed-apps/config.yaml` file:
+
+```yaml
+vault:
+ installed: true
+```
+
+By default you will get a basic Vault setup with no high availability nor any scalable
+storage backend. This is enough for simple testing and small scale deployments, though has limits
+to how much it can scale, and as it is a single instance deployment, you will experience downtime
+when upgrading the Vault application.
+
+To optimally use Vault in a production environment, it's ideal to have a good understanding
+of the internals of Vault and how to configure it. This can be done by reading the
+[the Vault documentation](https://www.vaultproject.io/docs/internals/) as well as
+the Vault Helm chart [values.yaml file](https://github.com/hashicorp/vault-helm/blob/v0.3.3/values.yaml).
+
+At a minimum you will likely set up:
+
+- A [seal](https://www.vaultproject.io/docs/configuration/seal/) for extra encryption
+ of the master key.
+- A [storage backend](https://www.vaultproject.io/docs/configuration/storage/) that is
+ suitable for environment and storage security requirements.
+- [HA Mode](https://www.vaultproject.io/docs/concepts/ha/).
+- [The Vault UI](https://www.vaultproject.io/docs/configuration/ui/).
+
+The following is an example values file (`.gitlab/managed-apps/vault/values.yaml`)
+that configures Google Key Management Service for auto-unseal, using a Google Cloud Storage backend, enabling
+the Vault UI, and enabling HA with 3 pod replicas. The `storage` and `seal` stanzas
+below are examples and should be replaced with settings specific to your environment.
+
+```yaml
+# Enable the Vault WebUI
+ui:
+ enabled: true
+server:
+ # Disable the built in data storage volume as it's not safe for Hight Availablity mode
+ dataStorage:
+ enabled: false
+ # Enable High Availability Mode
+ ha:
+ enabled: true
+ # Configure Vault to listen on port 8200 for normal traffic and port 8201 for inter-cluster traffic
+ config: |
+ listener "tcp" {
+ tls_disable = 1
+ address = "[::]:8200"
+ cluster_address = "[::]:8201"
+ }
+ # Configure Vault to store its data in a GCS Bucket backend
+ storage "gcs" {
+ path = "gcs://my-vault-storage/vault-bucket"
+ ha_enabled = "true"
+ }
+ # Configure Vault to automatically unseal storage using a GKMS key
+ seal "gcpckms" {
+ project = "vault-helm-dev-246514"
+ region = "global"
+ key_ring = "vault-helm-unseal-kr"
+ crypto_key = "vault-helm-unseal-key"
+ }
+```
+
+Once you have successfully installed Vault, you will need to [initialize the Vault](https://learn.hashicorp.com/vault/getting-started/deploy#initializing-the-vault)
+and obtain the initial root token. You will need access to your Kubernetes cluster that Vault has been deployed into in order to do this.
+To initialise the Vault, get a shell to one of the Vault pods running inside Kubernetes (typically this is done by using the `kubectl` command line tool).
+Once you have a shell into the pod, run the `vault operator init` command:
+
+```shell
+kubectl -n gitlab-managed-apps exec -it vault-0 sh
+/ $ vault operator init
+```
+
+This should give you your unseal keys and initial root token. Make sure to note these down
+and keep these safe as you will need them to unseal the Vault throughout its lifecycle.
+
### Install JupyterHub using GitLab CI
> [Introduced](https://gitlab.com/gitlab-org/cluster-integration/cluster-applications/-/merge_requests/40) in GitLab 12.8.
-Enable JupyterHub in the `.gitlab/managed-apps/config.yaml` file to install it:
+JupyterHub is installed using GitLab CI by defining configuration in
+`.gitlab/managed-apps/config.yaml` as follows:
```yaml
jupyterhub:
@@ -764,33 +917,40 @@ jupyterhub:
gitlabGroupWhitelist: []
```
-`gitlabProjectIdWhitelist` restricts GitLab authentication to only members of the specified projects. `gitlabGroupWhitelist` restricts GitLab authentication to only members of the specified groups. Specifying an empty array for both will allow any user on the GitLab instance to log in.
+In the configuration:
-JupyterHub is installed into the `gitlab-managed-apps` namespace of your
-cluster.
+- `gitlabProjectIdWhitelist` restricts GitLab authentication to only members of the specified projects.
+- `gitlabGroupWhitelist` restricts GitLab authentication to only members of the specified groups.
+- Specifying an empty array for both will allow any user on the GitLab instance to sign in.
-In order for JupyterHub to function, you must setup an [OAuth Application](../../integration/oauth_provider.md). Using the following values:
+JupyterHub is installed into the `gitlab-managed-apps` namespace of your cluster.
-- "Redirect URI" to `http://<JupyterHub Host>/hub/oauth_callback`
-- "Scope" to `api read_repository write_repository`
+For JupyterHub to function, you must set up an [OAuth Application](../../integration/oauth_provider.md).
+Set:
-In addition the following variables must be specified using [CI variables](../../ci/variables/README.md):
+- "Redirect URI" to `http://<JupyterHub Host>/hub/oauth_callback`.
+- "Scope" to `api read_repository write_repository`.
-- `JUPYTERHUB_PROXY_SECRET_TOKEN` will set [`proxy.secretToken`](https://zero-to-jupyterhub.readthedocs.io/en/stable/reference.html#proxy-secrettoken). Generate this using `openssl rand -hex 32`.
-- `JUPYTERHUB_COOKIE_SECRET` will set [`hub.cookieSecret`](https://zero-to-jupyterhub.readthedocs.io/en/stable/reference.html#hub-cookiesecret). Generate this using `openssl rand -hex 32`.
-- `JUPYTERHUB_HOST` is the hostname used for the installation (e.g., `jupyter.example.gitlab.com`).
-- `JUPYTERHUB_GITLAB_HOST` is the hostname of the GitLab instance used for authentication (e.g., `example.gitlab.com`).
-- `JUPYTERHUB_AUTH_CRYPTO_KEY` will set [`auth.state.cryptoKey`](https://zero-to-jupyterhub.readthedocs.io/en/stable/reference.html#auth-state-cryptokey). Generate this using `openssl rand -hex 32`.
-- `JUPYTERHUB_AUTH_GITLAB_CLIENT_ID` the "Application ID" for the OAuth Application.
-- `JUPYTERHUB_AUTH_GITLAB_CLIENT_SECRET` the "Secret" for the OAuth Application.
+In addition, the following variables must be specified using [CI variables](../../ci/variables/README.md):
-By default JupyterHub will be installed using a
+| CI Variable | Description |
+|:---------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| `JUPYTERHUB_PROXY_SECRET_TOKEN` | Sets [`proxy.secretToken`](https://zero-to-jupyterhub.readthedocs.io/en/stable/reference.html#proxy-secrettoken). Generate using `openssl rand -hex 32`. |
+| `JUPYTERHUB_COOKIE_SECRET` | Sets [`hub.cookieSecret`](https://zero-to-jupyterhub.readthedocs.io/en/stable/reference.html#hub-cookiesecret). Generate using `openssl rand -hex 32`. |
+| `JUPYTERHUB_HOST` | Hostname used for the installation. For example, `jupyter.gitlab.example.com`. |
+| `JUPYTERHUB_GITLAB_HOST` | Hostname of the GitLab instance used for authentication. For example, `gitlab.example.com`. |
+| `JUPYTERHUB_AUTH_CRYPTO_KEY` | Sets [`auth.state.cryptoKey`](https://zero-to-jupyterhub.readthedocs.io/en/stable/reference.html#auth-state-cryptokey). Generate using `openssl rand -hex 32`. |
+| `JUPYTERHUB_AUTH_GITLAB_CLIENT_ID` | "Application ID" for the OAuth Application. |
+| `JUPYTERHUB_AUTH_GITLAB_CLIENT_SECRET` | "Secret" for the OAuth Application. |
+
+By default, JupyterHub will be installed using a
[default values file](https://gitlab.com/gitlab-org/cluster-integration/cluster-applications/-/blob/master/src/default-data/jupyterhub/values.yaml.gotmpl).
-You can customize the installation of JupyterHub by defining
-`.gitlab/managed-apps/jupyterhub/values.yaml` file in your cluster management
-project. Refer to the
-[chart reference](https://zero-to-jupyterhub.readthedocs.io/en/stable/reference.html)
-for the available configuration options.
+You can customize the installation of JupyterHub by defining a
+`.gitlab/managed-apps/jupyterhub/values.yaml` file in your cluster management project.
+
+Refer to the
+[chart reference](https://zero-to-jupyterhub.readthedocs.io/en/stable/reference.html) for the
+available configuration options.
### Install Elastic Stack using GitLab CI
@@ -817,7 +977,31 @@ management project. Refer to the
available configuration options.
NOTE: **Note:**
-In this alpha implementation of installing Elastic Stack through CI, reading the environment pod logs through Elasticsearch is unsupported. This is supported if [installed via the UI](#elastic-stack).
+In this alpha implementation of installing Elastic Stack through CI, reading the environment logs through Elasticsearch is unsupported. This is supported if [installed via the UI](#elastic-stack).
+
+### Install Crossplane using GitLab CI
+
+> [Introduced](https://gitlab.com/gitlab-org/cluster-integration/cluster-applications/-/merge_requests/68) in GitLab 12.9.
+
+Crossplane is installed using GitLab CI by defining configuration in
+`.gitlab/managed-apps/config.yaml`.
+
+The following configuration is required to install Crossplane using GitLab CI:
+
+```yaml
+Crossplane:
+ installed: true
+```
+
+Crossplane is installed into the `gitlab-managed-apps` namespace of your cluster.
+
+You can check the default [values.yaml](https://github.com/crossplane/crossplane/blob/master/cluster/charts/crossplane/values.yaml.tmpl) we set for this chart.
+
+You can customize the installation of Crossplane by defining
+`.gitlab/managed-apps/crossplane/values.yaml` file in your cluster
+management project. Refer to the
+[chart](https://github.com/crossplane/crossplane/tree/master/cluster/charts/crossplane#configuration) for the
+available configuration options. Note that this link points to the docs for the current development release, which may differ from the version you have installed. You can check out a specific version in the branch/tag switcher.
## Upgrading applications
diff --git a/doc/user/clusters/crossplane.md b/doc/user/clusters/crossplane.md
index 1e8a3129fed..12a5626937a 100644
--- a/doc/user/clusters/crossplane.md
+++ b/doc/user/clusters/crossplane.md
@@ -35,43 +35,39 @@ export REGION=us-central1 # the GCP region where the GKE cluster is provisioned.
## Configure RBAC permissions
-- For a non-GitLab managed cluster(s), ensure that the service account for the token provided can manage resources in the `database.crossplane.io` API group.
-Manually grant GitLab's service account the ability to manage resources in the
-`database.crossplane.io` API group. The Aggregated ClusterRole allows us to do that.
-​
-NOTE: **Note:**
-For a non-GitLab managed cluster, ensure that the service account for the token provided can manage resources in the `database.crossplane.io` API group.
-​1. Save the following YAML as `crossplane-database-role.yaml`:
-
-```shell
-cat > crossplane-database-role.yaml <<EOF
-apiVersion: rbac.authorization.k8s.io/v1
-kind: ClusterRole
-metadata:
- name: crossplane-database-role
- labels:
- rbac.authorization.k8s.io/aggregate-to-edit: "true"
-rules:
-- apiGroups:
- - database.crossplane.io
- resources:
- - postgresqlinstances
- verbs:
- - get
- - list
- - create
- - update
- - delete
- - patch
- - watch
-EOF
-```
-
-Once the file is created, apply it with the following command in order to create the necessary role:
-
-```shell
-kubectl apply -f crossplane-database-role.yaml
-```
+- For GitLab-managed clusters, RBAC is configured automatically.
+
+- For non-GitLab managed clusters, ensure that the service account for the token provided can manage resources in the `database.crossplane.io` API group:
+
+ 1. Save the following YAML as `crossplane-database-role.yaml`:
+
+ ```yaml
+ apiVersion: rbac.authorization.k8s.io/v1
+ kind: ClusterRole
+ metadata:
+ name: crossplane-database-role
+ labels:
+ rbac.authorization.k8s.io/aggregate-to-edit: "true"
+ rules:
+ - apiGroups:
+ - database.crossplane.io
+ resources:
+ - postgresqlinstances
+ verbs:
+ - get
+ - list
+ - create
+ - update
+ - delete
+ - patch
+ - watch
+ ```
+
+ 1. Apply the cluster role to the cluster:
+
+ ```shell
+ kubectl apply -f crossplane-database-role.yaml
+ ```
## Configure Crossplane with a cloud provider
diff --git a/doc/user/clusters/img/threat_monitoring_v12_9.png b/doc/user/clusters/img/threat_monitoring_v12_9.png
new file mode 100644
index 00000000000..9097f9334a8
--- /dev/null
+++ b/doc/user/clusters/img/threat_monitoring_v12_9.png
Binary files differ
diff --git a/doc/user/application_security/compliance_dashboard/img/compliance_dashboard_v12_8.png b/doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_8.png
index 5fc54927de7..5fc54927de7 100644
--- a/doc/user/application_security/compliance_dashboard/img/compliance_dashboard_v12_8.png
+++ b/doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_8.png
Binary files differ
diff --git a/doc/user/compliance/compliance_dashboard/index.md b/doc/user/compliance/compliance_dashboard/index.md
new file mode 100644
index 00000000000..afe3ce185e6
--- /dev/null
+++ b/doc/user/compliance/compliance_dashboard/index.md
@@ -0,0 +1,31 @@
+---
+type: reference, howto
+---
+
+# Compliance Dashboard **(ULTIMATE)**
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/36524) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.8.
+
+The Compliance Dashboard gives you the ability to see a group's Merge Request activity
+by providing a high-level view for all projects in the group. For example, code approved
+for merging into production.
+
+## Overview
+
+To access the Compliance Dashboard for a group, navigate to **{shield}** **Security & Compliance > Compliance** on the group's menu.
+
+![Compliance Dashboard](img/compliance_dashboard_v12_8.png)
+
+## Use cases
+
+This feature is for people who care about the compliance status of projects within their group.
+
+You can use the dashboard to:
+
+- Get an overview of the latest Merge Request for each project.
+- See if Merge Requests were approved and by whom.
+
+## Permissions
+
+- On [GitLab Ultimate](https://about.gitlab.com/pricing/) tier.
+- By **Administrators** and **Group Owners**.
diff --git a/doc/user/compliance/index.md b/doc/user/compliance/index.md
new file mode 100644
index 00000000000..fd4af74e086
--- /dev/null
+++ b/doc/user/compliance/index.md
@@ -0,0 +1,10 @@
+# Compliance **(ULTIMATE)**
+
+The compliance tools provided by GitLab let you keep an eye on various aspects of your project. The
+following compliance tools are available:
+
+- [Compliance Dashboard](compliance_dashboard/index.md): View recent merge request activity across
+ all projects in a group. This lets you see if merge requests were approved, and by whom.
+- [License Compliance](license_compliance/index.md): Search your project's dependencies for their
+ licenses. This lets you determine if the licenses of your project's dependencies are compatible
+ with your project's license.
diff --git a/doc/user/application_security/license_compliance/img/license_compliance.png b/doc/user/compliance/license_compliance/img/license_compliance.png
index cdce6b5fe38..cdce6b5fe38 100644
--- a/doc/user/application_security/license_compliance/img/license_compliance.png
+++ b/doc/user/compliance/license_compliance/img/license_compliance.png
Binary files differ
diff --git a/doc/user/application_security/license_compliance/img/license_compliance_add_license_v12_3.png b/doc/user/compliance/license_compliance/img/license_compliance_add_license_v12_3.png
index 79f6160e63f..79f6160e63f 100644
--- a/doc/user/application_security/license_compliance/img/license_compliance_add_license_v12_3.png
+++ b/doc/user/compliance/license_compliance/img/license_compliance_add_license_v12_3.png
Binary files differ
diff --git a/doc/user/application_security/license_compliance/img/license_compliance_decision.png b/doc/user/compliance/license_compliance/img/license_compliance_decision.png
index fbf90bec7fd..fbf90bec7fd 100644
--- a/doc/user/application_security/license_compliance/img/license_compliance_decision.png
+++ b/doc/user/compliance/license_compliance/img/license_compliance_decision.png
Binary files differ
diff --git a/doc/user/application_security/license_compliance/img/license_compliance_pipeline_tab_v12_3.png b/doc/user/compliance/license_compliance/img/license_compliance_pipeline_tab_v12_3.png
index fd519d63b3e..fd519d63b3e 100644
--- a/doc/user/application_security/license_compliance/img/license_compliance_pipeline_tab_v12_3.png
+++ b/doc/user/compliance/license_compliance/img/license_compliance_pipeline_tab_v12_3.png
Binary files differ
diff --git a/doc/user/application_security/license_compliance/img/license_compliance_search_v12_3.png b/doc/user/compliance/license_compliance/img/license_compliance_search_v12_3.png
index 4a7cff2e85c..4a7cff2e85c 100644
--- a/doc/user/application_security/license_compliance/img/license_compliance_search_v12_3.png
+++ b/doc/user/compliance/license_compliance/img/license_compliance_search_v12_3.png
Binary files differ
diff --git a/doc/user/application_security/license_compliance/img/license_compliance_settings_v12_3.png b/doc/user/compliance/license_compliance/img/license_compliance_settings_v12_3.png
index 72d0888a9dc..72d0888a9dc 100644
--- a/doc/user/application_security/license_compliance/img/license_compliance_settings_v12_3.png
+++ b/doc/user/compliance/license_compliance/img/license_compliance_settings_v12_3.png
Binary files differ
diff --git a/doc/user/application_security/license_compliance/img/license_list_v12_6.png b/doc/user/compliance/license_compliance/img/license_list_v12_6.png
index 8f2b510be0d..8f2b510be0d 100644
--- a/doc/user/application_security/license_compliance/img/license_list_v12_6.png
+++ b/doc/user/compliance/license_compliance/img/license_list_v12_6.png
Binary files differ
diff --git a/doc/user/compliance/license_compliance/img/policies_maintainer_add_v12_9.png b/doc/user/compliance/license_compliance/img/policies_maintainer_add_v12_9.png
new file mode 100644
index 00000000000..6dc7d3a0924
--- /dev/null
+++ b/doc/user/compliance/license_compliance/img/policies_maintainer_add_v12_9.png
Binary files differ
diff --git a/doc/user/compliance/license_compliance/img/policies_maintainer_edit_v12_9.png b/doc/user/compliance/license_compliance/img/policies_maintainer_edit_v12_9.png
new file mode 100644
index 00000000000..31abbcf2d44
--- /dev/null
+++ b/doc/user/compliance/license_compliance/img/policies_maintainer_edit_v12_9.png
Binary files differ
diff --git a/doc/user/compliance/license_compliance/img/policies_v12_9.png b/doc/user/compliance/license_compliance/img/policies_v12_9.png
new file mode 100644
index 00000000000..6c6247320dc
--- /dev/null
+++ b/doc/user/compliance/license_compliance/img/policies_v12_9.png
Binary files differ
diff --git a/doc/user/compliance/license_compliance/index.md b/doc/user/compliance/license_compliance/index.md
new file mode 100644
index 00000000000..2679dc85f42
--- /dev/null
+++ b/doc/user/compliance/license_compliance/index.md
@@ -0,0 +1,342 @@
+---
+type: reference, howto
+---
+
+# License Compliance **(ULTIMATE)**
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5483) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.0.
+
+## Overview
+
+If you are using [GitLab CI/CD](../../../ci/README.md), you can search your project dependencies for their licenses
+using License Compliance.
+
+You can take advantage of License Compliance by either [including the job](#configuration)
+in your existing `.gitlab-ci.yml` file or by implicitly using
+[Auto License Compliance](../../../topics/autodevops/index.md#auto-license-compliance-ultimate)
+that is provided by [Auto DevOps](../../../topics/autodevops/index.md).
+
+GitLab checks the License Compliance report, compares the licenses between the
+source and target branches, and shows the information right on the merge request.
+Blacklisted licenses will be clearly visible with an `x` red icon next to them
+as well as new licenses which need a decision from you. In addition, you can
+[manually approve or blacklist](#project-policies-for-license-compliance)
+licenses in your project's settings.
+
+NOTE: **Note:**
+If the license compliance report doesn't have anything to compare to, no information
+will be displayed in the merge request area. That is the case when you add the
+`license_scanning` job in your `.gitlab-ci.yml` for the first time.
+Consecutive merge requests will have something to compare to and the license
+compliance report will be shown properly.
+
+![License Compliance Widget](img/license_compliance.png)
+
+If you are a project or group Maintainer, you can click on a license to be given
+the choice to approve it or blacklist it.
+
+![License approval decision](img/license_compliance_decision.png)
+
+## Use cases
+
+It helps you find what licenses your project uses in its dependencies, and decide for each of then
+whether to allow it or forbid it. For example, your application is using an external (open source)
+library whose license is incompatible with yours.
+
+## Supported languages and package managers
+
+The following languages and package managers are supported.
+
+| Language | Package managers | Scan Tool |
+|------------|-------------------------------------------------------------------|----------------------------------------------------------|
+| JavaScript | [Bower](https://bower.io/), [npm](https://www.npmjs.com/), [yarn](https://yarnpkg.com/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)|
+| Go | [Godep](https://github.com/tools/godep), go get ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)), gvt ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)), glide ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)), dep ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)), trash ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) and govendor ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)), [go mod](https://github.com/golang/go/wiki/Modules) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)|
+| Java | [Gradle](https://gradle.org/), [Maven](https://maven.apache.org/) |[License Finder](https://github.com/pivotal/LicenseFinder)|
+| .NET | [Nuget](https://www.nuget.org/) (.NET Framework is supported via the [mono project](https://www.mono-project.com/). Windows specific dependencies are not supported at this time.) |[License Finder](https://github.com/pivotal/LicenseFinder)|
+| Python | [pip](https://pip.pypa.io/en/stable/) |[License Finder](https://github.com/pivotal/LicenseFinder)|
+| Ruby | [gem](https://rubygems.org/) |[License Finder](https://github.com/pivotal/LicenseFinder)|
+| Erlang | [rebar](https://www.rebar3.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
+| Objective-C, Swift | [Carthage](https://github.com/Carthage/Carthage) , [CocoaPods v0.39 and below](https://cocoapods.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)|
+| Elixir | [mix](https://elixir-lang.org/getting-started/mix-otp/introduction-to-mix.html) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)|
+| C++/C | [conan](https://conan.io/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
+| Scala | [sbt](https://www.scala-sbt.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
+| Rust | [cargo](https://crates.io) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
+| PHP | [composer](https://getcomposer.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
+
+## Requirements
+
+To run a License Compliance scanning job, you need GitLab Runner with the
+[`docker` executor](https://docs.gitlab.com/runner/executors/docker.html).
+
+## Configuration
+
+For GitLab 12.8 and later, to enable License Compliance, you must
+[include](../../../ci/yaml/README.md#includetemplate) the
+[`License-Scanning.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml)
+that's provided as a part of your GitLab installation.
+For older versions of GitLab from 11.9 to 12.7, you must
+[include](../../../ci/yaml/README.md#includetemplate) the
+[`License-Management.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/License-Management.gitlab-ci.yml).
+For GitLab versions earlier than 11.9, you can copy and use the job as defined
+that template.
+
+NOTE: **Note:**
+In GitLab 13.0, the `License-Management.gitlab-ci.yml` template is scheduled to be removed.
+Use `License-Scanning.gitlab-ci.yml` instead.
+
+Add the following to your `.gitlab-ci.yml` file:
+
+```yaml
+include:
+ - template: License-Scanning.gitlab-ci.yml
+```
+
+The included template will create a `license_scanning` job in your CI/CD pipeline
+and scan your dependencies to find their licenses.
+
+NOTE: **Note:**
+Before GitLab 12.8, the `license_scanning` job was named `license_management`.
+In GitLab 13.0, the `license_management` job is scheduled to be removed completely,
+so you're advised to migrate to the `license_scanning` job and used the new
+`License-Scanning.gitlab-ci.yml` template.
+
+The results will be saved as a
+[License Compliance report artifact](../../../ci/yaml/README.md#artifactsreportslicense_scanning-ultimate)
+that you can later download and analyze. Due to implementation limitations, we
+always take the latest License Compliance artifact available. Behind the scenes, the
+[GitLab License Compliance Docker image](https://gitlab.com/gitlab-org/security-products/license-management)
+is used to detect the languages/frameworks and in turn analyzes the licenses.
+
+The License Compliance settings can be changed through [environment variables](#available-variables) by using the
+[`variables`](../../../ci/yaml/README.md#variables) parameter in `.gitlab-ci.yml`.
+
+### Available variables
+
+License Compliance can be configured using environment variables.
+
+| Environment variable | Required | Description |
+|-----------------------|----------|-------------|
+| `MAVEN_CLI_OPTS` | no | Additional arguments for the mvn executable. If not supplied, defaults to `-DskipTests`. |
+| `LICENSE_FINDER_CLI_OPTS` | no | Additional arguments for the `license_finder` executable. For example, if your project has both Golang and Ruby code stored in different directories and you want to only scan the Ruby code, you can update your `.gitlab-ci-yml` template to specify which project directories to scan, like `LICENSE_FINDER_CLI_OPTS: '--debug --aggregate-paths=. ruby'`. |
+| `LM_JAVA_VERSION` | no | Version of Java. If set to `11`, Maven and Gradle use Java 11 instead of Java 8. |
+| `LM_PYTHON_VERSION` | no | Version of Python. If set to `3`, dependencies are installed using Python 3 instead of Python 2.7. |
+| `SETUP_CMD` | no | Custom setup for the dependency installation. (experimental) |
+
+### Installing custom dependencies
+
+> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.4.
+
+The `license_management` image already embeds many auto-detection scripts, languages,
+and packages. Nevertheless, it's almost impossible to cover all cases for all projects.
+That's why sometimes it's necessary to install extra packages, or to have extra steps
+in the project automated setup, like the download and installation of a certificate.
+For that, a `LICENSE_MANAGEMENT_SETUP_CMD` environment variable can be passed to the container,
+with the required commands to run before the license detection.
+
+If present, this variable will override the setup step necessary to install all the packages
+of your application (e.g.: for a project with a `Gemfile`, the setup step could be
+`bundle install`).
+
+For example:
+
+```yaml
+include:
+ - template: License-Scanning.gitlab-ci.yml
+
+variables:
+ LICENSE_MANAGEMENT_SETUP_CMD: sh my-custom-install-script.sh
+```
+
+In this example, `my-custom-install-script.sh` is a shell script at the root
+directory of your project.
+
+### Overriding the template
+
+If you want to override the job definition (for example, change properties like
+`variables` or `dependencies`), you need to declare a `license_scanning` job
+after the template inclusion and specify any additional keys under it. For example:
+
+```yaml
+include:
+ - template: License-Scanning.gitlab-ci.yml
+
+license_scanning:
+ variables:
+ CI_DEBUG_TRACE: "true"
+```
+
+### Configuring Maven projects
+
+The License Compliance tool provides a `MAVEN_CLI_OPTS` environment variable which can hold
+the command line arguments to pass to the `mvn install` command which is executed under the hood.
+Feel free to use it for the customization of Maven execution. For example:
+
+```yaml
+include:
+ - template: License-Scanning.gitlab-ci.yml
+
+license_scanning:
+ variables:
+ MAVEN_CLI_OPTS: --debug
+```
+
+`mvn install` runs through all of the [build life cycle](http://maven.apache.org/guides/introduction/introduction-to-the-lifecycle.html)
+stages prior to `install`, including `test`. Running unit tests is not directly
+necessary for the license scanning purposes and consumes time, so it's skipped
+by having the default value of `MAVEN_CLI_OPTS` as `-DskipTests`. If you want
+to supply custom `MAVEN_CLI_OPTS` and skip tests at the same time, don't forget
+to explicitly add `-DskipTests` to your options.
+If you still need to run tests during `mvn install`, add `-DskipTests=false` to
+`MAVEN_CLI_OPTS`.
+
+### Selecting the version of Python
+
+> - [Introduced](https://gitlab.com/gitlab-org/security-products/license-management/-/merge_requests/36) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.0.
+> - In [GitLab 12.2](https://gitlab.com/gitlab-org/gitlab/issues/12032), Python 3.5 became the default.
+> - In [GitLab 12.7](https://gitlab.com/gitlab-org/security-products/license-management/-/merge_requests/101), Python 3.8 became the default.
+
+License Compliance uses Python 3.8 and pip 19.1 by default.
+If your project requires Python 2, you can switch to Python 2.7 and pip 10.0
+by setting the `LM_PYTHON_VERSION` environment variable to `2`.
+
+```yaml
+include:
+ - template: License-Scanning.gitlab-ci.yml
+
+license_scanning:
+ variables:
+ LM_PYTHON_VERSION: 2
+```
+
+### Migration from `license_management` to `license_scanning`
+
+In GitLab 12.8 a new name for `license_management` job was introduced. This change was made to improve clarity around the purpose of the scan, which is to scan and collect the types of licenses present in a projects dependencies.
+The support of `license_management` is scheduled to be dropped in GitLab 13.0.
+If you're using a custom setup for License Compliance, you're required
+to update your CI config accordingly:
+
+1. Change the CI template to `License-Scanning.gitlab-ci.yml`.
+1. Change the job name to `license_scanning` (if you mention it in `.gitlab-ci.yml`).
+1. Change the artifact name to `gl-license-scanning-report.json` (if you mention it in `.gitlab-ci.yml`).
+
+For example, the following `.gitlab-ci.yml`:
+
+```yaml
+include:
+ - template: License-Management.gitlab-ci.yml
+
+license_management:
+ artifacts:
+ reports:
+ license_management: gl-license-management-report.json
+```
+
+Should be changed to:
+
+```yaml
+include:
+ - template: License-Scanning.gitlab-ci.yml
+
+license_scanning:
+ artifacts:
+ reports:
+ license_scanning: gl-license-scanning-report.json
+```
+
+## Project policies for License Compliance
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5940) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.4.
+
+From the project's settings:
+
+- The list of licenses and their status can be managed.
+- Licenses can be manually approved or blacklisted.
+
+To approve or blacklist a license:
+
+1. Either use the **Manage licenses** button in the merge request widget, or
+ navigate to the project's **Settings > CI/CD** and expand the
+ **License Compliance** section.
+1. Click the **Add a license** button.
+
+ ![License Compliance Add License](img/license_compliance_add_license_v12_3.png)
+
+1. In the **License name** dropdown, either:
+ - Select one of the available licenses. You can search for licenses in the field
+ at the top of the list.
+ - Enter arbitrary text in the field at the top of the list. This will cause the text to be
+ added as a license name to the list.
+1. Select the **Approve** or **Blacklist** radio button to approve or blacklist respectively
+ the selected license.
+
+To modify an existing license:
+
+1. In the **License Compliance** list, click the **Approved/Declined** dropdown to change it to the desired status.
+
+ ![License Compliance Settings](img/license_compliance_settings_v12_3.png)
+
+Searching for Licenses:
+
+1. Use the **Search** box to search for a specific license.
+
+ ![License Compliance Search](img/license_compliance_search_v12_3.png)
+
+## License Compliance report under pipelines
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5491) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.2.
+
+From your project's left sidebar, navigate to **CI/CD > Pipelines** and click on the
+pipeline ID that has a `license_management` job to see the Licenses tab with the listed
+licenses (if any).
+
+![License Compliance Pipeline Tab](img/license_compliance_pipeline_tab_v12_3.png)
+
+<!-- ## Troubleshooting
+
+Include any troubleshooting steps that you can foresee. If you know beforehand what issues
+one might have when setting this up, or when something is changed, or on upgrading, it's
+important to describe those, too. Think of things that may go wrong and include them here.
+This is important to minimize requests for support, and to avoid doc comments with
+questions that you know someone might ask.
+
+Each scenario can be a third-level heading, e.g. `### Getting error message X`.
+If you have none to add when creating a doc, leave this section in place
+but commented out to help encourage others to add to it in the future. -->
+
+## License list
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/13582) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.7.
+
+The License list allows you to see your project's licenses and key
+details about them.
+
+In order for the licenses to appear under the license list, the following
+requirements must be met:
+
+1. The License Compliance CI job must be [configured](#configuration) for your project.
+1. Your project must use at least one of the
+ [supported languages and package managers](#supported-languages-and-package-managers).
+
+Once everything is set, navigate to **Security & Compliance > License Compliance**
+in your project's sidebar, and you'll see the licenses displayed, where:
+
+- **Name:** The name of the license.
+- **Component:** The components which have this license.
+
+![License List](img/license_list_v12_6.png)
+
+## Policies
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22465) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.9.
+
+The **Policies** tab allows you to see your project's software license policies
+and the associated classifications for each.
+
+Policies can be configured by maintainers of the project.
+
+![Edit Policy](img/policies_maintainer_edit_v12_9.png)
+![Add Policy](img/policies_maintainer_add_v12_9.png)
+
+Developers of the project can view the policies configured in a project.
+
+![View Policies](img/policies_v12_9.png)
diff --git a/doc/user/discussions/img/suggestion_code_block_output_v12_8.png b/doc/user/discussions/img/suggestion_code_block_output_v12_8.png
index 74833253aa0..6f29107146d 100644
--- a/doc/user/discussions/img/suggestion_code_block_output_v12_8.png
+++ b/doc/user/discussions/img/suggestion_code_block_output_v12_8.png
Binary files differ
diff --git a/doc/user/gitlab_com/index.md b/doc/user/gitlab_com/index.md
index 7d02346af67..7d0614d411a 100644
--- a/doc/user/gitlab_com/index.md
+++ b/doc/user/gitlab_com/index.md
@@ -74,8 +74,9 @@ Below are the current settings regarding [GitLab CI/CD](../../ci/README.md).
| ----------- | ----------------- | ------------- |
| Artifacts maximum size (uncompressed) | 1G | 100M |
| Artifacts [expiry time](../../ci/yaml/README.md#artifactsexpire_in) | kept forever | deleted after 30 days unless otherwise specified |
-| Scheduled Pipeline Cron | `*/5 * * * *` | `*/19 * * * *` |
+| Scheduled Pipeline Cron | `*/5 * * * *` | `19 * * * *` |
| [Max jobs in active pipelines](../../administration/instance_limits.md#number-of-jobs-in-active-pipelines) | `500` for Free tier, unlimited otherwise | Unlimited
+| [Max pipeline schedules in projects](../../administration/instance_limits.md#number-of-pipeline-schedules) | `10` for Free tier, `50` for all paid tiers | Unlimited |
## Repository size limit
@@ -88,11 +89,21 @@ or over the size limit, you can [reduce your repository size with Git](../projec
## IP range
-GitLab.com, CI/CD, and related services are deployed into Google Cloud Platform (GCP). Any
-IP based firewall can be configured by looking up all
+GitLab.com is using the IP range `34.74.90.64/28` for traffic from its Web/API
+fleet. You can expect connections from webhooks or repository mirroring to come
+from those IPs and whitelist them.
+
+For connections from CI/CD runners we are not providing static IP addresses.
+All our runners are deployed into Google Cloud Platform (GCP) - any IP based
+firewall can be configured by looking up all
[IP address ranges or CIDR blocks for GCP](https://cloud.google.com/compute/docs/faq#where_can_i_find_product_name_short_ip_ranges).
-[Static endpoints](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/97) are being considered.
+## Maximum number of webhooks
+
+A limit of:
+
+- 100 webhooks applies to projects.
+- 50 webhooks applies to groups. **(BRONZE ONLY)**
## Shared Runners
@@ -104,7 +115,7 @@ Linux Shared Runners on GitLab.com run in [autoscale mode] and are powered by Go
Autoscaling means reduced waiting times to spin up CI/CD jobs, and isolated VMs for each project,
thus maximizing security. They're free to use for public open source projects and limited
to 2000 CI minutes per month per group for private projects. More minutes
-[can be purchased](../../subscriptions/index.md#extra-shared-runners-pipeline-minutes), if
+[can be purchased](../../subscriptions/index.md#purchasing-additional-ci-minutes), if
needed. Read about all [GitLab.com plans](https://about.gitlab.com/pricing/).
All your CI/CD jobs run on [n1-standard-1 instances](https://cloud.google.com/compute/docs/machine-types) with 3.75GB of RAM, CoreOS and the latest Docker Engine
@@ -365,15 +376,6 @@ NOTE: **Note:**
The `SIDEKIQ_MEMORY_KILLER_MAX_RSS` setting is `16000000` on Sidekiq import
nodes and Sidekiq export nodes.
-## Cron jobs
-
-Periodically executed jobs by Sidekiq, to self-heal GitLab, do external
-synchronizations, run scheduled pipelines, etc.:
-
-| Setting | GitLab.com | Default |
-|-------- |------------- |------------- |
-| `pipeline_schedule_worker` | `19 * * * *` | `19 * * * *` |
-
## PostgreSQL
GitLab.com being a fairly large installation of GitLab means we have changed
@@ -523,6 +525,14 @@ On GitLab.com, projects, groups, and snippets created
As of GitLab 12.2 (July 2019), projects, groups, and snippets have the
[**Internal** visibility](../../public_access/public_access.md#internal-projects) setting [disabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/issues/12388).
+### SSH maximum number of connections
+
+GitLab.com defines the maximum number of concurrent, unauthenticated SSH connections by
+using the [MaxStartups setting](http://man.openbsd.org/sshd_config.5#MaxStartups).
+If more than the maximum number of allowed connections occur concurrently, they are
+dropped and users get
+[an `ssh_exchange_identification` error](../../topics/git/troubleshooting_git.md#ssh_exchange_identification-error).
+
## GitLab.com Logging
We use [Fluentd](https://gitlab.com/gitlab-com/runbooks/tree/master/logging/doc#fluentd) to parse our logs. Fluentd sends our logs to
@@ -579,7 +589,7 @@ Service discovery:
- [`gitlab-cookbooks` / `gitlab_consul` · GitLab](https://gitlab.com/gitlab-cookbooks/gitlab_consul)
-### Haproxy
+### HAProxy
High Performance TCP/HTTP Load Balancer:
diff --git a/doc/user/group/epics/img/epic_view_roadmap_v12.3.png b/doc/user/group/epics/img/epic_view_roadmap_v12.3.png
deleted file mode 100644
index a17c56c618b..00000000000
--- a/doc/user/group/epics/img/epic_view_roadmap_v12.3.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/group/epics/img/epic_view_roadmap_v12_9.png b/doc/user/group/epics/img/epic_view_roadmap_v12_9.png
new file mode 100644
index 00000000000..035adc5e7ac
--- /dev/null
+++ b/doc/user/group/epics/img/epic_view_roadmap_v12_9.png
Binary files differ
diff --git a/doc/user/group/epics/index.md b/doc/user/group/epics/index.md
index 91d6984efc5..7712b86bbe2 100644
--- a/doc/user/group/epics/index.md
+++ b/doc/user/group/epics/index.md
@@ -5,7 +5,7 @@ type: reference, howto
# Epics **(PREMIUM)**
> - Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.2.
-> - In [GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/issues/37081), single-level Epics were moved to Premium tier.
+> - In [GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/issues/37081), single-level Epics were moved to the Premium tier.
Epics let you manage your portfolio of projects more efficiently and with less
effort by tracking groups of issues that share a theme, across projects and
@@ -23,7 +23,7 @@ graph TD
Parent_epic --> Issue1
Parent_epic --> Child_epic
Child_epic --> Issue2
-````
+```
## Use cases
@@ -156,10 +156,12 @@ These are dynamic dates which are recalculated if any of the following occur:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/7332) in GitLab 12.5 to replace **From milestones**.
-If you select **Inherited** for the start date, GitLab will scan all child epics and issues assigned to the epic,
-and will set the start date to match the earliest found start date or milestone. Similarly, if you select
-**Inherited** for the due date, GitLab will set the due date to match the latest due date or milestone
-found among its child epics and issues.
+If you select:
+
+- **Inherited** for the start date, GitLab will scan all child epics and issues assigned to the epic,
+ and will set the start date to match the earliest found start date or milestone.
+- **Inherited** for the due date, GitLab will set the due date to match the latest due date or
+ milestone found among its child epics and issues.
These are dynamic dates and recalculated if any of the following occur:
@@ -180,7 +182,9 @@ If your epic contains one or more [child epics](#multi-level-child-epics-ultimat
have a [start or due date](#start-date-and-due-date), a
[roadmap](../roadmap/index.md) view of the child epics is listed under the parent epic.
-![Child epics roadmap](img/epic_view_roadmap_v12.3.png)
+![Child epics roadmap](img/epic_view_roadmap_v12_9.png)
+
+---
## Reordering issues and child epics
@@ -238,6 +242,8 @@ You can always reopen it using the reopen button.
![reopen epic - button](img/button_reopen_epic.png)
+---
+
### Using quick actions
You can close or reopen an epic using [Quick actions](../../project/quick_actions.md)
@@ -249,9 +255,12 @@ link in the issue sidebar.
![containing epic](img/containing_epic.png)
+---
+
## Promoting an issue to an epic
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/3777) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.6.
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/3777) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.6.
+> - In [GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/issues/37081), it was moved to the Premium tier.
If you have [permissions](../../permissions.md) to close an issue and create an
epic in the parent group, you can promote an issue to an epic with the `/promote`
@@ -274,10 +283,11 @@ The following issue metadata will be copied to the epic:
## Searching for an epic from epics list page
-> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.5.
+> - Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.5.
+> - In [GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/issues/37081), it was moved to the Premium tier.
You can search for an epic from the list of epics using filtered search bar (similar to
-that of Issues and Merge requests) based on following parameters:
+that of Issues and Merge Requests) based on following parameters:
- Title or description
- Author name / username
@@ -285,22 +295,26 @@ that of Issues and Merge requests) based on following parameters:
![epics search](img/epics_search.png)
-To search, go to the list of epics and click on the field **Search or filter results...**.
+To search, go to the list of epics and click on the field **Search or filter results**.
It will display a dropdown menu, from which you can add an author. You can also enter plain
text to search by epic title or description. When done, press <kbd>Enter</kbd> on your
keyboard to filter the list.
You can also sort epics list by:
-- **Created date**
-- **Last updated**
-- **Start date**
-- **Due date**
+- Created date
+- Last updated
+- Start date
+- Due date
-Each option contains a button that can toggle the order between **ascending** and **descending**. The sort option and order will be persisted to be used wherever epics are browsed including the [roadmap](../roadmap/index.md).
+Each option contains a button that can toggle the order between **Ascending** and **Descending**.
+The sort option and order is saved and used wherever you browse epics, including the
+[Roadmap](../roadmap/index.md).
![epics sort](img/epics_sort.png)
+---
+
## Permissions
If you have access to view an epic and have access to view an issue already
@@ -313,7 +327,7 @@ Note that for a given group, the visibility of all projects must be the same as
the group, or less restrictive. That means if you have access to a group's epic,
then you already have access to its projects' issues.
-You may also consult the [group permissions table](../../permissions.md#group-members-permissions).
+You can also consult the [group permissions table](../../permissions.md#group-members-permissions).
## Thread
@@ -321,20 +335,20 @@ You may also consult the [group permissions table](../../permissions.md#group-me
These text fields also fully support
[GitLab Flavored Markdown](../../markdown.md#gitlab-flavored-markdown-gfm).
-## Comment, or start a thread
+## Comment or start a thread
-Once you wrote your comment, you can either:
+Once you write your comment, you can either:
-- Click "Comment" and your comment will be published.
-- Click "Start thread": start a thread within that epic's discussion to discuss specific points.
+- Click **Comment**, and your comment will be published.
+- Click **Start thread**, and you will start a thread within that epic's discussion.
## Award emoji
-- You can [award an emoji](../../award_emojis.md) to that epic or its comments.
+You can [award an emoji](../../award_emojis.md) to that epic or its comments.
## Notifications
-- [Receive notifications](../../profile/notifications.md) for epic events.
+You can [turn on notifications](../../profile/notifications.md) to be alerted about epic events.
<!-- ## Troubleshooting
diff --git a/doc/user/group/index.md b/doc/user/group/index.md
index 6b76e070c41..8135b8e38ab 100644
--- a/doc/user/group/index.md
+++ b/doc/user/group/index.md
@@ -181,6 +181,21 @@ of a group:
1. Give a different member **Owner** permissions.
1. Have the new owner sign in and remove **Owner** permissions from you.
+## Changing the default branch protection of a group
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/7583) in GitLab 12.9.
+
+By default, every group inherits the branch protection set at the global level.
+
+To change this setting for a specific group:
+
+1. Go to the group's **{settings}** **Settings > General** page.
+1. Expand the **Permissions, LFS, 2FA** section.
+1. Select the desired option in the **Default branch protection** dropdown list.
+1. Click **Save changes**.
+
+To change this setting globally, see [Default branch protection](../admin_area/settings/visibility_and_access_controls.md#default-branch-protection).
+
## Add projects to a group
There are two different ways to add a new project to a group:
@@ -241,10 +256,9 @@ and give all group members access to the project at once.
Alternatively, you can [lock the sharing with group feature](#share-with-group-lock).
-## Sharing a group with another group **(CORE ONLY)**
+## Sharing a group with another group
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/18328) in GitLab 12.7.
-> This feature has been [disabled on GitLab.com](https://gitlab.com/gitlab-com/gl-infra/production/issues/1635).
Similarly to [sharing a project with a group](#sharing-a-project-with-a-group),
you can share a group with another group to give direct group members access
@@ -332,7 +346,7 @@ Changing a group's path can have unintended side effects. Read
before proceeding.
If you are vacating the path so it can be claimed by another group or user,
-you may need to rename the group, too, since both names and paths must
+you may need to rename the group too, since both names and paths must
be unique.
To change your group path:
diff --git a/doc/user/group/issues_analytics/img/issues_created_per_month_v12_8.png b/doc/user/group/issues_analytics/img/issues_created_per_month_v12_8.png
index b7dc2d3da8d..fccfa949779 100644
--- a/doc/user/group/issues_analytics/img/issues_created_per_month_v12_8.png
+++ b/doc/user/group/issues_analytics/img/issues_created_per_month_v12_8.png
Binary files differ
diff --git a/doc/user/group/issues_analytics/index.md b/doc/user/group/issues_analytics/index.md
index 6c710885b98..4477b9bb1e6 100644
--- a/doc/user/group/issues_analytics/index.md
+++ b/doc/user/group/issues_analytics/index.md
@@ -4,13 +4,14 @@ type: reference
# Issues Analytics **(PREMIUM)**
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/7478) in [GitLab Premium](https://about.gitlab.com/pricing/) 11.5.
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/7478) in [GitLab Premium](https://about.gitlab.com/pricing/) 11.5.
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/196561) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.9 at the project level.
Issues Analytics is a bar graph which illustrates the number of issues created each month.
The default timespan is 13 months, which includes the current month, and the 12 months
prior.
-To access the chart, navigate to a group's sidebar and select **Analytics > Issues Analytics**.
+To access the chart, navigate to your group or project sidebar and select **{chart}** **Analytics > Issues Analytics**.
Hover over each bar to see the total number of issues.
diff --git a/doc/user/group/roadmap/img/roadmap_view.png b/doc/user/group/roadmap/img/roadmap_view.png
deleted file mode 100644
index 2be3849ba1b..00000000000
--- a/doc/user/group/roadmap/img/roadmap_view.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/group/roadmap/img/roadmap_view_v12_9.png b/doc/user/group/roadmap/img/roadmap_view_v12_9.png
new file mode 100644
index 00000000000..093e8af8702
--- /dev/null
+++ b/doc/user/group/roadmap/img/roadmap_view_v12_9.png
Binary files differ
diff --git a/doc/user/group/roadmap/index.md b/doc/user/group/roadmap/index.md
index a72cd990706..e603457b1a1 100644
--- a/doc/user/group/roadmap/index.md
+++ b/doc/user/group/roadmap/index.md
@@ -2,15 +2,21 @@
type: reference
---
-# Roadmap **(ULTIMATE)**
+# Roadmap **(PREMIUM)**
-> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.5.
+> - Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.5.
+> - In [GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/issues/198062), Roadmaps were moved to the Premium tier.
An Epic within a group containing **Start date** and/or **Due date**
can be visualized in a form of a timeline (e.g. a Gantt chart). The Epics Roadmap page
shows such a visualization for all the epics which are under a group and/or its subgroups.
-![roadmap view](img/roadmap_view.png)
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5164) in GitLab 12.9.
+
+On the epic bars, you can see their title, progress, and completed weight percentage.
+When you hover over an epic bar, a popover appears with its title, start and due dates, and weight completed.
+
+![roadmap view](img/roadmap_view_v12_9.png)
A dropdown allows you to show only open or closed epics. By default, all epics are shown.
@@ -30,7 +36,8 @@ Roadmaps can also be [visualized inside an epic](../epics/index.md#roadmap-in-ep
## Timeline duration
-> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.0.
+> - Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.0.
+> - In [GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/issues/198062), Timelines were moved to the Premium tier.
Roadmap supports the following date ranges:
@@ -68,11 +75,7 @@ the timeline header represent the days of the week.
## Timeline bar for an epic
-The timeline bar indicates the approximate position of an epic based on its start
-and due date. If an epic doesn't have a due date, the timeline bar fades
-away towards the future. Similarly, if an epic doesn't have a start date, the
-timeline bar becomes more visible as it approaches the epic's due date on the
-timeline.
+The timeline bar indicates the approximate position of an epic based on its start and due date.
<!-- ## Troubleshooting
diff --git a/doc/user/group/saml_sso/index.md b/doc/user/group/saml_sso/index.md
index 6fa7a0397f4..25493a42d83 100644
--- a/doc/user/group/saml_sso/index.md
+++ b/doc/user/group/saml_sso/index.md
@@ -51,10 +51,8 @@ We recommend setting the NameID format to `Persistent` unless using a field (suc
### SSO enforcement
-SSO enforcement was:
-
-- [Introduced in GitLab 11.8](https://gitlab.com/gitlab-org/gitlab/issues/5291).
-- [Improved upon in GitLab 11.11 with ongoing enforcement in the GitLab UI](https://gitlab.com/gitlab-org/gitlab/issues/9255).
+- [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5291) in GitLab 11.8.
+- [Improved](https://gitlab.com/gitlab-org/gitlab/issues/9255) in GitLab 11.11 with ongoing enforcement in the GitLab UI.
With this option enabled, users must use your group's GitLab single sign on URL to be added to the group or be added via SCIM. Users cannot be added manually, and may only access project/group resources via the UI by signing in through the SSO URL.
@@ -62,9 +60,11 @@ However, users will not be prompted to log via SSO on each visit. GitLab will ch
We intend to add a similar SSO requirement for [Git and API activity](https://gitlab.com/gitlab-org/gitlab/issues/9152) in the future.
+When SSO enforcement is enabled for a group, users cannot share a project in the group outside the top-level group, even if the project is forked.
+
#### Group-managed accounts
-[Introduced in GitLab 12.1](https://gitlab.com/groups/gitlab-org/-/epics/709).
+> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/709) in GitLab 12.1.
When SSO is being enforced, groups can enable an additional level of protection by enforcing the creation of dedicated user accounts to access the group.
@@ -73,8 +73,13 @@ Without group-managed accounts, users can link their SAML identity with any exis
When this option is enabled:
- All existing and new users in the group will be required to log in via the SSO URL associated with the group.
-- On successfully authenticating, GitLab will prompt the user to create a new, dedicated account using the email address received from the configured identity provider.
- After the group-managed account has been created, group activity will require the use of this user account.
+- Users can't share a project in the group outside the top-level group (also applies to forked projects).
+
+Upon successful authentication, GitLab prompts the user with options, based on the email address received from the configured identity provider:
+
+- To create a unique account with the newly received email address.
+- If the received email address matches one of the user's verified GitLab email addresses, the option to convert the existing account to a group-managed account. ([Introduced in GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/issues/13481).)
Since use of the group-managed account requires the use of SSO, users of group-managed accounts will lose access to these accounts when they are no longer able to authenticate with the connected identity provider. In the case of an offboarded employee who has been removed from your identity provider:
@@ -83,7 +88,7 @@ Since use of the group-managed account requires the use of SSO, users of group-m
##### Credentials inventory for Group-managed accounts **(ULTIMATE)**
-> [Introduced in GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/issues/38133)
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/38133) in GitLab 12.8.
Owners who manage user accounts in a group can view the following details of personal access tokens and SSH keys:
@@ -95,6 +100,25 @@ To access the Credentials inventory of a group, navigate to **{shield}** **Secur
This feature is similar to the [Credentials inventory for self-managed instances](../../admin_area/credentials_inventory.md).
+##### Outer forks restriction for Group-managed accounts
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/34648) in GitLab 12.9.
+
+Groups with enabled group-managed accounts can allow or disallow forking of projects outside of root group
+by using separate toggle. If forking is disallowed any project of given root group or its subgroups can be forked to
+a subgroup of the same root group only.
+
+##### Other restrictions for Group-managed accounts
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/12420) in GitLab 12.9.
+
+Projects within groups with enabled group-managed accounts are not to be shared with:
+
+- Groups outside of the parent group.
+- Members who are not users managed by this group.
+
+This restriction also applies to projects forked from or to those groups.
+
#### Assertions
When using group-managed accounts, the following user details need to be passed to GitLab as SAML
@@ -171,7 +195,7 @@ NOTE: **Note:** GitLab is unable to provide support for IdPs that are not listed
| JumpCloud | [Single Sign On (SSO) with GitLab](https://support.jumpcloud.com/support/s/article/single-sign-on-sso-with-gitlab-2019-08-21-10-36-47) |
| Okta | [Setting up a SAML application in Okta](https://developer.okta.com/docs/guides/saml-application-setup/overview/) |
| OneLogin | [Use the OneLogin SAML Test Connector](https://onelogin.service-now.com/support?id=kb_article&sys_id=93f95543db109700d5505eea4b96198f) |
-| Ping Identity | [Add and configure a new SAML application](https://support.pingidentity.com/s/document-item?bundleId=pingone&topicId=xsh1564020480660-1.html) |
+| Ping One for Enterprise | [Add and configure a new SAML application](https://support.pingidentity.com/s/document-item?bundleId=pingone&topicId=xsh1564020480660-1.html) |
When [configuring your identify provider](#configuring-your-identity-provider), please consider the notes below for specific providers to help avoid common issues and as a guide for terminology used.
@@ -222,6 +246,29 @@ Recommended `NameID` value: `OneLogin ID`.
Set parameters according to the [assertions table](#assertions).
+### Additional setup options
+
+GitLab [isn't limited to the SAML providers listed above](#my-identity-provider-isnt-listed) but your Identity Provider may require additional configuration, such as the following:
+
+| Field | Value | Notes |
+|-------|-------|-------|
+| SAML Profile | Web browser SSO profile | GitLab uses SAML to sign users in via their browser. We don't make requests direct to the Identity Provider. |
+| SAML Request Binding | HTTP Redirect | GitLab (the service provider) redirects users to your Identity Provider with a base64 encoded `SAMLRequest` HTTP parameter. |
+| SAML Response Binding | HTTP POST | Your Identity Provider responds to users with an HTTP form including the `SAMLResponse`, which a user's browser submits back to GitLab. |
+| Sign SAML Response | Yes | We require this to prevent tampering. |
+| X509 Certificate in response | Yes | This is used to sign the response and checked against the provided fingerprint. |
+| Fingerprint Algorithm | SHA-1 | We need a SHA-1 hash of the certificate used to sign the SAML Response. |
+| Signature Algorithm | SHA-1/SHA-256/SHA-384/SHA-512 | Also known as the Digest Method, this can be specified in the SAML response. It determines how a response is signed. |
+| Encrypt SAML Assertion | No | TLS is used between your Identity Provider, the user's browser, and GitLab. |
+| Sign SAML Assertion | Optional | We don't require Assertions to be signed. We validate their integrity by requiring the whole response to be signed. |
+| Check SAML Request Signature | No | GitLab does not sign SAML requests, but does check the signature on the SAML response. |
+| Default RelayState | Optional | The URL users should end up on after signing in via a button on your Identity Provider. |
+| NameID Format | `Persistent` | See [details above](#nameid-format). |
+| Additional URLs | | You may need to use the `Identifier` or `Assertion consumer service URL` in other fields on some providers. |
+| Single Sign Out URL | | Not supported |
+
+If the information information you need isn't listed above you may wish to check our [troubleshooting docs below](#i-need-additional-information-to-configure-my-identity-provider).
+
## Linking SAML to your existing GitLab.com account
To link SAML to your existing GitLab.com account:
@@ -254,7 +301,7 @@ For example, to unlink the `MyOrg` account, the following **Disconnect** button
| Term | Description |
|------|-------------|
-| Identity Provider | The service which manages your user identities such as ADFS, Okta, Onelogin or Ping Identity. |
+| Identity Provider | The service which manages your user identities such as ADFS, Okta, Onelogin, or Ping Identity. |
| Service Provider | SAML considers GitLab to be a service provider. |
| Assertion | A piece of information about a user's identity, such as their name or role. Also know as claims or attributes. |
| SSO | Single Sign On. |
@@ -287,6 +334,8 @@ For convenience, we've included some [example resources](../../../administration
In troubleshooting the Group SAML setup, any authenticated user can use the API to verify the NameID GitLab already has linked to the user by visiting [https://gitlab.com/api/v4/user](https://gitlab.com/api/v4/user) and checking the `extern_uid` under identities.
+Similarly, group members of a role with the appropriate permissions can make use of the [members API](../../../api/members.md) to view group SAML identity information for members of the group.
+
This can then be compared to the [NameID](#nameid) being sent by the Identity Provider by decoding the message with a [SAML debugging tool](#saml-debugging-tools). We require that these match in order to identify users.
### Message: "SAML authentication failed: Extern uid has already been taken"
@@ -306,3 +355,20 @@ To change which identity you sign in with, you can [unlink the previous SAML ide
Getting both of these errors at the same time suggests the NameID capitalization provided by the Identity Provider didn't exactly match the previous value for that user.
This can be prevented by configuring the [NameID](#nameid) to return a consistent value. Fixing this for an individual user involves [unlinking SAML in the GitLab account](#unlinking-accounts), although this will cause group membership and Todos to be lost.
+
+### My identity provider isn't listed
+
+Not a problem, the SAML standard means that a wide range of identity providers will work with GitLab. Unfortunately we aren't familiar with all of them so can only offer support configuring the [listed providers](#providers).
+
+### I need additional information to configure my identity provider
+
+Many SAML terms can vary between providers. It is possible that the information you are looking for is listed under another name.
+
+For more information, start with your Identity Provider's documentation. Look for their options and examples to see how they configure SAML. This can provide hints on what you'll need to configure GitLab to work with these providers.
+
+It can also help to look at our [more detailed docs for self-managed GitLab](../../../integration/saml.md).
+SAML configuration for GitLab.com is mostly the same as for self-managed instances.
+However, self-managed GitLab instances use a configuration file that supports more options as described in the external [OmniAuth SAML documentation](https://github.com/omniauth/omniauth-saml/).
+Internally that uses the [`ruby-saml` library](https://github.com/onelogin/ruby-saml), so we sometimes check there to verify low level details of less commonly used options.
+
+It can also help to compare the XML response from your provider with our [example XML used for internal testing](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/spec/fixtures/saml/response.xml).
diff --git a/doc/user/group/subgroups/index.md b/doc/user/group/subgroups/index.md
index 997cb1ba6c5..e73623a2f0e 100644
--- a/doc/user/group/subgroups/index.md
+++ b/doc/user/group/subgroups/index.md
@@ -6,7 +6,7 @@ type: reference, howto, concepts
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/2772) in GitLab 9.0.
-Subgroups, also known as nested groups or hierarchical groups, allow you to have up to 20
+GitLab supports up to 20 levels of subgroups, also known as nested groups or hierarchical groups.
levels of groups.
By using subgroups you can do the following:
@@ -64,9 +64,6 @@ Another example of GitLab as a company would be the following:
---
-The maximum subgroups a group can have, including the first one in the
-hierarchy, is 21.
-
When performing actions such as transferring or importing a project between
subgroups, the behavior is the same as when performing these actions at the
`group/project` level.
diff --git a/doc/user/img/markdown_copy_from_spreadsheet_v12_7.png b/doc/user/img/markdown_copy_from_spreadsheet_v12_7.png
index dccd6f10450..637ad49b6b5 100644
--- a/doc/user/img/markdown_copy_from_spreadsheet_v12_7.png
+++ b/doc/user/img/markdown_copy_from_spreadsheet_v12_7.png
Binary files differ
diff --git a/doc/user/img/markdown_paste_table_v12_7.png b/doc/user/img/markdown_paste_table_v12_7.png
index d3ba61da7d7..919599723dd 100644
--- a/doc/user/img/markdown_paste_table_v12_7.png
+++ b/doc/user/img/markdown_paste_table_v12_7.png
Binary files differ
diff --git a/doc/user/incident_management/index.md b/doc/user/incident_management/index.md
index 880083bf815..3ddc6894653 100644
--- a/doc/user/incident_management/index.md
+++ b/doc/user/incident_management/index.md
@@ -39,6 +39,8 @@ To select your issue template for use within Incident Management:
GitLab can react to the alerts that your applications and services may be
triggering by automatically creating issues, and alerting developers via email.
+The emails will be sent to [owners and maintainers](../permissions.md) of the project and will contain details on the alert as well as a link to see more information.
+
### Prometheus alerts
Prometheus alerts can be set up in both:
@@ -88,15 +90,16 @@ dropdown box above the upper right corner of the panel:
The options are:
-- [View logs](#view-logs-ultimate) **(ULTIMATE)**
+- [View logs](#view-logs)
- [Download CSV](#download-csv)
-##### View logs **(ULTIMATE)**
+##### View logs
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/201846) in GitLab Ultimate 12.8.
+> [Moved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25455) to [GitLab Core](https://about.gitlab.com/pricing/) 12.9.
This can be useful if you are triaging an application incident and need to
-[explore logs](../project/integrations/prometheus.md#view-pod-logs-ultimate)
+[explore logs](../project/integrations/prometheus.md#view-logs-ultimate)
from across your application. It also helps you to understand
what is affecting your application's performance and quickly resolve any problems.
@@ -136,3 +139,5 @@ Incident Management features can be easily enabled & disabled via the Project se
#### Auto-creation
GitLab Issues can automatically be created as a result of an Alert notification. An Issue created this way will contain error information to help you further debug the error.
+
+For [GitLab-managed alerting rules](../project/integrations/prometheus.md#setting-up-alerts-for-prometheus-metrics-ultimate), the issue will include an embedded chart for the query corresponding to the alert. The chart will show an hour of data surrounding the starting point of the incident, 30 minutes before and after.
diff --git a/doc/user/index.md b/doc/user/index.md
index ec8a53b842d..fdcc87aca0b 100644
--- a/doc/user/index.md
+++ b/doc/user/index.md
@@ -14,7 +14,7 @@ and upgrade your GitLab instance.
Admin privileges for [GitLab.com](https://gitlab.com/) are restricted to the GitLab team.
-For more information on configuring GitLab self-managed instances, see [Administrator documentation](../administration/index.md).
+For more information on configuring GitLab self-managed instances, see the [Administrator documentation](../administration/index.md).
## Overview
diff --git a/doc/user/instance_statistics/user_cohorts.md b/doc/user/instance_statistics/user_cohorts.md
index e664c38a21a..a61c4274286 100644
--- a/doc/user/instance_statistics/user_cohorts.md
+++ b/doc/user/instance_statistics/user_cohorts.md
@@ -25,5 +25,5 @@ How do we measure the activity of users? GitLab considers a user active if:
- The user signs in.
- The user has Git activity (whether push or pull).
-- The user visits pages related to Dashboards, Projects, Issues and Merge Requests ([introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/54947) in GitLab 11.8).
+- The user visits pages related to Dashboards, Projects, Issues, and Merge Requests ([introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/54947) in GitLab 11.8).
- The user uses the API
diff --git a/doc/user/markdown.md b/doc/user/markdown.md
index 7ad810317f0..29163f98fb4 100644
--- a/doc/user/markdown.md
+++ b/doc/user/markdown.md
@@ -131,26 +131,26 @@ Supported formats (named colors are not supported):
Color written inside backticks will be followed by a color "chip":
```markdown
-`#F00`
-`#F00A`
-`#FF0000`
-`#FF0000AA`
-`RGB(0,255,0)`
-`RGB(0%,100%,0%)`
-`RGBA(0,255,0,0.3)`
-`HSL(540,70%,50%)`
-`HSLA(540,70%,50%,0.3)`
-```
-
-`#F00`
-`#F00A`
-`#FF0000`
-`#FF0000AA`
-`RGB(0,255,0)`
-`RGB(0%,100%,0%)`
-`RGBA(0,255,0,0.3)`
-`HSL(540,70%,50%)`
-`HSLA(540,70%,50%,0.3)`
+- `#F00`
+- `#F00A`
+- `#FF0000`
+- `#FF0000AA`
+- `RGB(0,255,0)`
+- `RGB(0%,100%,0%)`
+- `RGBA(0,255,0,0.3)`
+- `HSL(540,70%,50%)`
+- `HSLA(540,70%,50%,0.3)`
+```
+
+- `#F00`
+- `#F00A`
+- `#FF0000`
+- `#FF0000AA`
+- `RGB(0,255,0)`
+- `RGB(0%,100%,0%)`
+- `RGBA(0,255,0,0.3)`
+- `HSL(540,70%,50%)`
+- `HSLA(540,70%,50%,0.3)`
### Diagrams and flowcharts
@@ -165,7 +165,7 @@ Visit the [official page](https://mermaidjs.github.io/) for more details. If you
In order to generate a diagram or flowchart, you should write your text inside the `mermaid` block:
-~~~
+````markdown
```mermaid
graph TD;
A-->B;
@@ -173,7 +173,7 @@ graph TD;
B-->D;
C-->D;
```
-~~~
+````
```mermaid
graph TD;
@@ -185,7 +185,7 @@ graph TD;
Subgraphs can also be included:
-~~~
+````markdown
```mermaid
graph TB
@@ -202,7 +202,7 @@ graph TB
SubGraph1 --> FinalThing[Final Thing]
end
```
-~~~
+````
```mermaid
graph TB
@@ -280,27 +280,27 @@ The following delimiters are supported:
- YAML (`---`):
- ~~~yaml
+ ```yaml
---
title: About Front Matter
example:
language: yaml
---
- ~~~
+ ```
- TOML (`+++`):
- ~~~toml
+ ```toml
+++
title = "About Front Matter"
[example]
language = "toml"
+++
- ~~~
+ ```
- JSON (`;;;`):
- ~~~json
+ ```json
;;;
{
"title": "About Front Matter"
@@ -309,7 +309,7 @@ The following delimiters are supported:
}
}
;;;
- ~~~
+ ```
Other languages are supported by adding a specifier to any of the existing
delimiters. For example:
@@ -364,7 +364,7 @@ Math written between dollar signs `$` will be rendered inline with the text. Mat
inside a [code block](#code-spans-and-blocks) with the language declared as `math`, will be rendered
on a separate line:
-~~~
+````markdown
This math is inline $`a^2+b^2=c^2`$.
This is on a separate line
@@ -372,7 +372,7 @@ This is on a separate line
```math
a^2+b^2=c^2
```
-~~~
+````
This math is inline $`a^2+b^2=c^2`$.
@@ -390,7 +390,7 @@ the [asciidoctor user manual](https://asciidoctor.org/docs/user-manual/#activati
### Special GitLab references
GFM recognizes special GitLab related references. For example, you can easily reference
-an issue, a commit, a team member or even the whole team within a project. GFM will turn
+an issue, a commit, a team member, or even the whole team within a project. GFM will turn
that reference into a link so you can navigate between them easily.
Additionally, GFM recognizes certain cross-project references, and also has a shorthand
@@ -408,7 +408,6 @@ GFM will recognize the following:
| merge request | `!123` | `namespace/project!123` | `project!123` |
| snippet | `$123` | `namespace/project$123` | `project$123` |
| epic **(ULTIMATE)** | `&123` | `group1/subgroup&123` | |
-| design **(PREMIUM)** | `#123[file.jpg]` or `#123["file.png"]` | `group1/subgroup#123[file.png]` | `project#123[file.png]` |
| label by ID | `~123` | `namespace/project~123` | `project~123` |
| one-word label by name | `~bug` | `namespace/project~bug` | `project~bug` |
| multi-word label by name | `~"feature request"` | `namespace/project~"feature request"` | `project~"feature request"` |
@@ -421,6 +420,12 @@ GFM will recognize the following:
| repository file references | `[README](doc/README)` | | |
| repository file line references | `[README](doc/README#L13)` | | |
+In addition to this, links to some objects are also recognized and formatted. Some examples of these are:
+
+- Comments on issues: `"https://gitlab.com/gitlab-org/gitlab/-/issues/1234#note_101075757"`, which will be rendered as `#1234 (note1)`
+- The issues designs tab: `"https://gitlab.com/gitlab-org/gitlab/issues/1234/designs"`, which will be rendered as `#1234 (designs)`.
+ **(PREMIUM)**
+
### Task lists
> If this is not rendered correctly, [view it in GitLab itself](https://gitlab.com/gitlab-org/gitlab/blob/master/doc/user/markdown.md#task-lists).
@@ -581,7 +586,7 @@ Quote break.
GFM extends the standard Markdown standard by also supporting multiline blockquotes
fenced by `>>>`:
-```
+```markdown
>>>
If you paste a message from somewhere else
@@ -613,12 +618,12 @@ Inline `code` has `back-ticks around` it.
---
-Similarly, a whole block of code can be fenced with triple backticks ```` ``` ````,
+Similarly, a whole block of code can be fenced with triple backticks (```` ``` ````),
triple tildes (`~~~`), or indented 4 or more spaces to achieve a similar effect for
a larger body of code.
-~~~
-```
+````markdown
+```python
def function():
#indenting works just fine in the fenced code block
s = "Python code"
@@ -628,9 +633,9 @@ def function():
Using 4 spaces
is like using
3-backtick fences.
-~~~
+````
-```
+```plaintext
~~~
Tildes are OK too.
~~~
@@ -638,22 +643,22 @@ Tildes are OK too.
The three examples above render as:
-```
+```python
def function():
#indenting works just fine in the fenced code block
s = "Python code"
print s
```
-```
+```plaintext
Using 4 spaces
is like using
3-backtick fences.
```
-~~~
+```plaintext
Tildes are OK too.
-~~~
+```
#### Colored code and syntax highlighting
@@ -665,10 +670,10 @@ highlighting in code blocks. For a list of supported languages visit the
Syntax highlighting is only supported in code blocks, it is not possible to highlight
code when it is inline.
-Blocks of code are fenced by lines with three back-ticks ```` ``` ```` or three tildes `~~~`, and have
+Blocks of code are fenced by lines with three back-ticks (```` ``` ````) or three tildes (`~~~`), and have
the language identified at the end of the first fence:
-~~~
+````markdown
```javascript
var s = "JavaScript syntax highlighting";
alert(s);
@@ -692,7 +697,7 @@ No language indicated, so no syntax highlighting.
s = "There is no highlighting for this."
But let's throw in a <b>tag</b>.
```
-~~~
+````
The four examples above render as:
@@ -714,7 +719,7 @@ markdown = Redcarpet.new("Hello World!")
puts markdown.to_html
```
-```
+```plaintext
No language indicated, so no syntax highlighting.
s = "There is no highlighting for this."
But let's throw in a <b>tag</b>.
@@ -756,7 +761,7 @@ dealing with code and names that often appear with multiple underscores. As a re
GFM extends the standard Markdown standard by ignoring multiple underlines in words,
to allow better rendering of Markdown documents discussing code:
-```md
+```markdown
perform_complicated_task
do_this_and_do_that_and_another_thing
@@ -852,7 +857,7 @@ The IDs are generated from the content of the header according to the following
Example:
-```
+```markdown
# This header has spaces in it
## This header has a :thumbsup: in it
# This header has Unicode in it: 한글
@@ -973,7 +978,7 @@ class for the list of allowed HTML tags and attributes. In addition to the defau
<dd>Is something people use sometimes.</dd>
<dt>Markdown in HTML</dt>
- <dd>Does *not* work **very** well. HTML <em>tags</em> will <b>always</b> work.</dd>
+ <dd>Does *not* work **very** well. HTML <em>tags</em> will <b>work</b>, in most cases.</dd>
</dl>
```
@@ -982,7 +987,7 @@ class for the list of allowed HTML tags and attributes. In addition to the defau
<dd>Is something people use sometimes.</dd>
<dt>Markdown in HTML</dt>
- <dd>Does *not* work **very** well. HTML <em>tags</em> will <b>always</b> work.</dd>
+ <dd>Does *not* work **very** well. HTML <em>tags</em> will <b>work</b>, in most cases.</dd>
</dl>
---
@@ -993,12 +998,12 @@ are separated into their own lines:
```html
<dl>
<dt>Markdown in HTML</dt>
- <dd>Does *not* work **very** well. HTML tags will always work.</dd>
+ <dd>Does *not* work **very** well. HTML tags will work, in most cases.</dd>
<dt>Markdown in HTML</dt>
<dd>
- Does *not* work **very** well. HTML tags will always work.
+ Does *not* work **very** well. HTML tags will work, in most cases.
</dd>
</dl>
@@ -1008,12 +1013,12 @@ are separated into their own lines:
<dl>
<dt>Markdown in HTML</dt>
- <dd>Does *not* work **very** well. HTML tags will always work.</dd>
+ <dd>Does *not* work **very** well. HTML tags will work, in most cases.</dd>
<dt>Markdown in HTML</dt>
<dd>
- Does <em>not</em> work <b>very</b> well. HTML tags will always work.
+ Does <em>not</em> work <b>very</b> well. HTML tags will work, in most cases.
</dd>
</dl>
@@ -1148,7 +1153,7 @@ A new line due to the previous backslash.
There are two ways to create links, inline-style and reference-style:
-```md
+```markdown
- This is an [inline-style link](https://www.google.com)
- This is a [link to a repository file in the same directory](index.md)
- This is a [relative link to a readme one directory higher](../README.md)
@@ -1319,7 +1324,7 @@ the paragraph will appear outside the list, instead of properly indented under t
Example:
-```
+```markdown
1. First ordered list item
Paragraph of first item.
diff --git a/doc/user/packages/conan_repository/index.md b/doc/user/packages/conan_repository/index.md
index c21e539f332..3b993303391 100644
--- a/doc/user/packages/conan_repository/index.md
+++ b/doc/user/packages/conan_repository/index.md
@@ -39,7 +39,7 @@ conan --version
You should see the Conan version printed in the output:
-```
+```plaintext
Conan version 1.20.5
```
@@ -198,7 +198,7 @@ Add the Conan recipe to the `[requires]` section of the file:
cmake
```
-Next, from the root of your project, create a build directory and navigate to it:
+Next, create a build directory from the root of your project and navigate to it:
```shell
mkdir build && cd build
diff --git a/doc/user/packages/container_registry/img/expiration-policy-app.png b/doc/user/packages/container_registry/img/expiration-policy-app.png
index e353fe27b2a..e2d3d668e38 100644
--- a/doc/user/packages/container_registry/img/expiration-policy-app.png
+++ b/doc/user/packages/container_registry/img/expiration-policy-app.png
Binary files differ
diff --git a/doc/user/packages/index.md b/doc/user/packages/index.md
index 142541b28cf..78ddc06173c 100644
--- a/doc/user/packages/index.md
+++ b/doc/user/packages/index.md
@@ -13,7 +13,7 @@ The Packages feature allows GitLab to act as a repository for the following:
| [Conan Repository](conan_repository/index.md) **(PREMIUM)** | The GitLab Conan Repository enables every project in GitLab to have its own space to store [Conan](https://conan.io/) packages. | 12.6+ |
| [Maven Repository](maven_repository/index.md) **(PREMIUM)** | The GitLab Maven Repository enables every project in GitLab to have its own space to store [Maven](https://maven.apache.org/) packages. | 11.3+ |
| [NPM Registry](npm_registry/index.md) **(PREMIUM)** | The GitLab NPM Registry enables every project in GitLab to have its own space to store [NPM](https://www.npmjs.com/) packages. | 11.7+ |
-| [NuGet Repository](nuget_repository/index.md) **(PREMIUM)** | *PLANNED* The GitLab NuGet Repository will enable every project in GitLab to have its own space to store [NuGet](https://www.nuget.org/) packages. | 12.8+ |
+| [NuGet Repository](nuget_repository/index.md) **(PREMIUM)** | The GitLab NuGet Repository will enable every project in GitLab to have its own space to store [NuGet](https://www.nuget.org/) packages. | 12.8+ |
## Suggested contributions
diff --git a/doc/user/packages/maven_repository/index.md b/doc/user/packages/maven_repository/index.md
index c1f943017bc..f4e2b71dd3c 100644
--- a/doc/user/packages/maven_repository/index.md
+++ b/doc/user/packages/maven_repository/index.md
@@ -60,7 +60,7 @@ published to the GitLab Package Registry.
Start by opening your terminal and creating a directory where you would like to
store the project in your environment. From inside the directory, you can run
-the following Maven command to initalize a new package:
+the following Maven command to initialize a new package:
```shell
mvn archetype:generate -DgroupId=com.mycompany.mydepartment -DartifactId=my-project -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false
@@ -71,7 +71,7 @@ The arguments are as follows:
- `DgroupId`: A unique string that identifies your package. You should follow
the [Maven naming conventions](https://maven.apache.org/guides/mini/guide-naming-conventions.html).
- `DartifactId`: The name of the JAR, appended to the end of the `DgroupId`.
-- `DarchetypeArtifactId`: The archetype used to create the intial structure of
+- `DarchetypeArtifactId`: The archetype used to create the initial structure of
the project.
- `DinteractiveMode`: Create the project using batch mode (optional).
@@ -207,7 +207,7 @@ The `id` must be the same with what you
Replace `PROJECT_ID` with your project ID which can be found on the home page
of your project.
-If you have a self-hosted GitLab installation, replace `gitlab.com` with your
+If you have a self-managed GitLab installation, replace `gitlab.com` with your
domain name.
NOTE: **Note:**
@@ -260,7 +260,7 @@ The `id` must be the same with what you
Replace `my-group` with your group name and `PROJECT_ID` with your project ID
which can be found on the home page of your project.
-If you have a self-hosted GitLab installation, replace `gitlab.com` with your
+If you have a self-managed GitLab installation, replace `gitlab.com` with your
domain name.
NOTE: **Note:**
@@ -315,7 +315,7 @@ The `id` must be the same with what you
Replace `PROJECT_ID` with your project ID which can be found on the home page
of your project.
-If you have a self-hosted GitLab installation, replace `gitlab.com` with your
+If you have a self-managed GitLab installation, replace `gitlab.com` with your
domain name.
NOTE: **Note:**
diff --git a/doc/user/packages/npm_registry/index.md b/doc/user/packages/npm_registry/index.md
index 7504137fbea..5801a30cc4e 100644
--- a/doc/user/packages/npm_registry/index.md
+++ b/doc/user/packages/npm_registry/index.md
@@ -49,7 +49,7 @@ npm --version
You should see the NPM version printed in the output:
-```
+```plaintext
6.10.3
```
@@ -67,7 +67,7 @@ yarn --version
You should see the version printed like so:
-```
+```plaintext
1.19.1
```
@@ -117,17 +117,17 @@ npm config set @foo:registry https://gitlab.com/api/v4/packages/npm/
# Add the token for the scoped packages URL. This will allow you to download
# `@foo/` packages from private projects.
-npm config set '//gitlab.com/api/v4/projects/<your_project_id>/packages/npm/:_authToken' "<your_token>"
+npm config set '//gitlab.com/api/v4/packages/npm/:_authToken' "<your_token>"
# Add token for uploading to the registry. Replace <your_project_id>
# with the project you want your package to be uploaded to.
-npm config set '//gitlab.com/api/v4/packages/npm/:_authToken' "<your_token>"
+npm config set '//gitlab.com/api/v4/projects/<your_project_id>/packages/npm/:_authToken' "<your_token>"
```
Replace `<your_project_id>` with your project ID which can be found on the home page
of your project and `<your_token>` with your personal access token.
-If you have a self-hosted GitLab installation, replace `gitlab.com` with your
+If you have a self-managed GitLab installation, replace `gitlab.com` with your
domain name.
You should now be able to download and upload NPM packages to your project.
@@ -185,7 +185,7 @@ for NPM. To do this, add the following section to the bottom of `package.json`:
Replace `<your_project_id>` with your project ID, which can be found on the home
page of your project, and replace `@foo` with your own scope.
-If you have a self-hosted GitLab installation, replace `gitlab.com` with your
+If you have a self-managed GitLab installation, replace `gitlab.com` with your
domain name.
Once you have enabled it and set up [authentication](#authenticating-to-the-gitlab-npm-registry),
@@ -269,6 +269,14 @@ Or if you're using Yarn:
yarn add @my-project-scope/my-package
```
+### Forwarding requests to npmjs.org
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/55344) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.9.
+
+By default, when an NPM package is not found in the GitLab NPM Registry, the request will be forwarded to [npmjs.com](https://www.npmjs.com/).
+
+Administrators can disable this behavior in the [Continuous Integration settings](../../admin_area/settings/continuous_integration.md).
+
## Removing a package
In the packages view of your project page, you can delete packages by clicking
@@ -280,7 +288,7 @@ page.
To work with NPM commands within [GitLab CI](./../../../ci/README.md), you can use
`CI_JOB_TOKEN` in place of the personal access token in your commands.
-A simple example `gitlab-ci.yml` file for publishing NPM packages:
+A simple example `.gitlab-ci.yml` file for publishing NPM packages:
```yml
image: node:latest
diff --git a/doc/user/packages/nuget_repository/img/visual_studio_adding_nuget_source.png b/doc/user/packages/nuget_repository/img/visual_studio_adding_nuget_source.png
index 94b037ced42..7397403f4bf 100644
--- a/doc/user/packages/nuget_repository/img/visual_studio_adding_nuget_source.png
+++ b/doc/user/packages/nuget_repository/img/visual_studio_adding_nuget_source.png
Binary files differ
diff --git a/doc/user/packages/nuget_repository/img/visual_studio_nuget_source_added.png b/doc/user/packages/nuget_repository/img/visual_studio_nuget_source_added.png
index d2f4791a25a..e4f6068f28c 100644
--- a/doc/user/packages/nuget_repository/img/visual_studio_nuget_source_added.png
+++ b/doc/user/packages/nuget_repository/img/visual_studio_nuget_source_added.png
Binary files differ
diff --git a/doc/user/packages/nuget_repository/index.md b/doc/user/packages/nuget_repository/index.md
index d32683e1d40..fdc4c6894b9 100644
--- a/doc/user/packages/nuget_repository/index.md
+++ b/doc/user/packages/nuget_repository/index.md
@@ -2,18 +2,21 @@
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/20050) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.8.
-CAUTION: **Work in progress**
-This feature is in development, sections on uploading and installing packages will be coming soon, please follow along and help us make sure we're building the right solution for you in the [NuGet issue](https://gitlab.com/gitlab-org/gitlab/issues/20050).
-
With the GitLab NuGet Repository, every project can have its own space to store NuGet packages.
-The GitLab NuGet Repository works with either [nuget CLI](https://www.nuget.org/) or [Visual Studio](https://visualstudio.microsoft.com/vs/).
+The GitLab NuGet Repository works with:
+
+- [NuGet CLI](https://docs.microsoft.com/en-us/nuget/reference/nuget-exe-cli-reference)
+- [.NET Core CLI](https://docs.microsoft.com/en-us/dotnet/core/tools/)
+- [Visual Studio](https://visualstudio.microsoft.com/vs/)
## Setting up your development environment
-You will need [nuget CLI](https://www.nuget.org/) 5.2 or above. Previous versions have not been tested against the GitLab NuGet Repository and might not work. You can install it by visiting the [downloads page](https://www.nuget.org/downloads).
+You will need [NuGet CLI 5.2 or later](https://www.nuget.org/downloads). Earlier versions have not been tested
+against the GitLab NuGet Repository and might not work. If you have [Visual Studio](https://visualstudio.microsoft.com/vs/),
+NuGet CLI is probably already installed.
-If you have [Visual Studio](https://visualstudio.microsoft.com/vs/), [nuget CLI](https://www.nuget.org/) is probably already installed.
+Alternatively, you can use [.NET SDK 3.0 or later](https://dotnet.microsoft.com/download/dotnet-core/3.0), which installs NuGet CLI.
You can confirm that [nuget CLI](https://www.nuget.org/) is properly installed with:
@@ -23,7 +26,7 @@ nuget help
You should see something similar to:
-```
+```plaintext
NuGet Version: 5.2.0.6090
usage: NuGet <command> [args] [options]
Type 'NuGet help <command>' for help on a specific command.
@@ -37,7 +40,7 @@ Available commands:
NOTE: **Note:**
This option is available only if your GitLab administrator has
-[enabled support for the NuGet Repository](../../../administration/packages/index.md).**(PREMIUM ONLY)**
+[enabled support for the Package Registry](../../../administration/packages/index.md). **(PREMIUM ONLY)**
After the NuGet Repository is enabled, it will be available for all new projects
by default. To enable it for existing projects, or if you want to disable it:
@@ -48,7 +51,7 @@ by default. To enable it for existing projects, or if you want to disable it:
You should then be able to see the **Packages** section on the left sidebar.
-## Adding the GitLab NuGet Repository as a source to nuget
+## Adding the GitLab NuGet Repository as a source to NuGet
You will need the following:
@@ -57,23 +60,23 @@ You will need the following:
- A suitable name for your source.
- Your project ID which can be found on the home page of your project.
-You can now add a new source to nuget either using [nuget CLI](https://www.nuget.org/) or [Visual Studio](https://visualstudio.microsoft.com/vs/).
+You can now add a new source to NuGet with:
-### Using nuget CLI
+- [NuGet CLI](#add-nuget-repository-source-with-nuget-cli)
+- [Visual Studio](#add-nuget-repository-source-with-visual-studio).
+- [.NET CLI](#add-nuget-repository-source-with-net-cli)
+
+### Add NuGet Repository source with NuGet CLI
To add the GitLab NuGet Repository as a source with `nuget`:
```shell
-nuget source Add -Name <source_name> -Source "https://example.gitlab.com/api/v4/projects/<your_project_id>/packages/nuget/index.json" -UserName <gitlab_username> -Password <gitlab_token>
+nuget source Add -Name <source_name> -Source "https://gitlab-instance.example.com/api/v4/projects/<your_project_id>/packages/nuget/index.json" -UserName <gitlab_username> -Password <gitlab_personal_access_token>
```
-Replace:
+Where:
-- `<source_name>` with your desired source name.
-- `<your_project_id>` with your project ID.
-- `<gitlab-username>` with your GitLab username.
-- `<gitlab-token>` with your personal access token.
-- `example.gitlab.com` with the URL of the GitLab instance you're using.
+- `<source_name>` is your desired source name.
For example:
@@ -81,7 +84,7 @@ For example:
nuget source Add -Name "GitLab" -Source "https//gitlab.example/api/v4/projects/10/packages/nuget/index.json" -UserName carol -Password 12345678asdf
```
-### Using Visual Studio
+### Add NuGet Repository source with Visual Studio
1. Open [Visual Studio](https://visualstudio.microsoft.com/vs/).
1. Open the **FILE > OPTIONS** (Windows) or **Visual Studio > Preferences** (Mac OS).
@@ -91,7 +94,7 @@ nuget source Add -Name "GitLab" -Source "https//gitlab.example/api/v4/projects/1
- **Name**: Desired name for the source
- **Location**: `https://gitlab.com/api/v4/projects/<your_project_id>/packages/nuget/index.json`
- Replace `<your_project_id>` with your project ID.
- - If you have a self-hosted GitLab installation, replace `gitlab.com` with your domain name.
+ - If you have a self-managed GitLab installation, replace `gitlab.com` with your domain name.
- **Username**: Your GitLab username
- **Password**: Your personal access token
@@ -101,4 +104,110 @@ nuget source Add -Name "GitLab" -Source "https//gitlab.example/api/v4/projects/1
![Visual Studio NuGet source added](img/visual_studio_nuget_source_added.png)
-In case of any warning, please make sure that the **Location**, **Username** and **Password** are correct.
+In case of any warning, please make sure that the **Location**, **Username**, and **Password** are correct.
+
+### Add NuGet Repository source with .NET CLI
+
+To add the GitLab NuGet Repository as a source for .NET, create a file named `nuget.config` in the root of your project with the following content:
+
+```xml
+<?xml version="1.0" encoding="utf-8"?>
+<configuration>
+ <packageSources>
+ <clear />
+ <add key="gitlab" value="https://gitlab-instance.example.com/api/v4/projects/<your_project_id>/packages/nuget/index.json" />
+ </packageSources>
+ <packageSourceCredentials>
+ <gitlab>
+ <add key="Username" value="<gitlab_username>" />
+ <add key="ClearTextPassword" value="<gitlab_personal_access_token>" />
+ </gitlab>
+ </packageSourceCredentials>
+</configuration>
+```
+
+## Uploading packages
+
+When uploading packages, note that:
+
+- The maximum allowed size is 50 Megabytes.
+- If you upload the same package with the same version multiple times, each consecutive upload
+ is saved as a separate file. When installing a package, GitLab will serve the most recent file.
+- When uploading packages to GitLab, they will not be displayed in the packages UI of your project
+ immediately. It can take up to 10 minutes to process a package.
+
+### Upload packages with NuGet CLI
+
+This section assumes that your project is properly built and you already [created a NuGet package with NuGet CLI](https://docs.microsoft.com/en-us/nuget/create-packages/creating-a-package).
+Upload your package using the following command:
+
+```shell
+nuget push <package_file> -Source <source_name>
+```
+
+Where:
+
+- `<package_file>` is your package filename, ending in `.nupkg`.
+- `<source_name>` is the [source name used during setup](#adding-the-gitlab-nuget-repository-as-a-source-to-nuget).
+
+### Upload packages with .NET CLI
+
+This section assumes that your project is properly built and you already [created a NuGet package with .NET CLI](https://docs.microsoft.com/en-us/nuget/create-packages/creating-a-package-dotnet-cli).
+Upload your package using the following command:
+
+```shell
+dotnet nuget push <package_file> --source <source_name>
+```
+
+Where:
+
+- `<package_file>` is your package filename, ending in `.nupkg`.
+- `<source_name>` is the [source name used during setup](#adding-the-gitlab-nuget-repository-as-a-source-to-nuget).
+
+For example:
+
+```shell
+dotnet nuget push MyPackage.1.0.0.nupkg --source gitlab
+```
+
+## Install packages
+
+### Install a package with NuGet CLI
+
+CAUTION: **Warning:**
+By default, `nuget` checks the official source at `nuget.org` first. If you have a package in the
+GitLab NuGet Repository with the same name as a package at `nuget.org`, you must specify the source
+name or the wrong package will be installed.
+Install the latest version of a package using the following command:
+
+```shell
+nuget install <package_id> -OutputDirectory <output_directory> \
+ -Version <package_version> \
+ -Source <source_name>
+```
+
+Where:
+
+- `<package_id>` is the package id.
+- `<output_directory>` is the output directory, where the package will be installed.
+- `<package_version>` (Optional) is the package version.
+- `<source_name>` (Optional) is the source name.
+
+### Install a package with .NET CLI
+
+CAUTION: **Warning:**
+If you have a package in the GitLab NuGet Repository with the same name as a package at a different source,
+you should verify the order in which `dotnet` checks sources during install. This is defined in the
+`nuget.config` file.
+
+Install the latest version of a package using the following command:
+
+```shell
+dotnet add package <package_id> \
+ -v <package_version>
+```
+
+Where:
+
+- `<package_id>` is the package id.
+- `<package_version>` (Optional) is the package version.
diff --git a/doc/user/packages/workflows/monorepo.md b/doc/user/packages/workflows/monorepo.md
index 0c7fb4a1a20..5acd4fd0735 100644
--- a/doc/user/packages/workflows/monorepo.md
+++ b/doc/user/packages/workflows/monorepo.md
@@ -38,14 +38,14 @@ If you follow the instructions you can publish `MyProject` by running
`npm publish` from the root directory.
Publishing `Foo` is almost exactly the same, you simply have to follow the steps
-while in the `Foo` directory. `Foo` will need it's own `package.json` file,
-which can be added manually or using `npm init`. And it will need it's own
+while in the `Foo` directory. `Foo` will need its own `package.json` file,
+which can be added manually or using `npm init`. And it will need its own
configuration settings. Since you are publishing to the same place, if you
used `npm config set` to set the registry for the parent project, then no
additional setup is necessary. If you used a `.npmrc` file, you will need an
additional `.npmrc` file in the `Foo` directory (be sure to add `.npmrc` files
to the `.gitignore` file or use environment variables in place of your access
-tokens to preven them from being exposed). It can be identical to the
+tokens to prevent them from being exposed). It can be identical to the
one you used in `MyProject`. You can now run `npm publish` from the `Foo`
directory and you will be able to publish `Foo` separately from `MyProject`
diff --git a/doc/user/permissions.md b/doc/user/permissions.md
index 8cf60342446..4f7284fb05b 100644
--- a/doc/user/permissions.md
+++ b/doc/user/permissions.md
@@ -9,7 +9,11 @@ particular group or project. If a user is both in a project's group and the
project itself, the highest permission level is used.
On public and internal projects the Guest role is not enforced. All users will
-be able to create issues, leave comments, and clone or download the project code.
+be able to:
+
+- Create issues.
+- Leave comments.
+- Clone or download the project code.
When a member leaves a team's project, all the assigned [Issues](project/issues/index.md) and [Merge Requests](project/merge_requests/index.md)
will be unassigned automatically.
@@ -65,6 +69,7 @@ The following table depicts the various user permission levels in a project.
| See related issues | ✓ | ✓ | ✓ | ✓ | ✓ |
| Create confidential issue | ✓ (*1*) | ✓ | ✓ | ✓ | ✓ |
| View confidential issues | (*2*) | ✓ | ✓ | ✓ | ✓ |
+| View [Releases](project/releases/index.md) | ✓ (*6*) | ✓ | ✓ | ✓ | ✓ |
| Assign issues | | ✓ | ✓ | ✓ | ✓ |
| Label issues | | ✓ | ✓ | ✓ | ✓ |
| Set issue weight | | ✓ | ✓ | ✓ | ✓ |
@@ -79,6 +84,7 @@ The following table depicts the various user permission levels in a project.
| See a list of merge requests | | ✓ | ✓ | ✓ | ✓ |
| View project statistics | | ✓ | ✓ | ✓ | ✓ |
| View Error Tracking list | | ✓ | ✓ | ✓ | ✓ |
+| Create/edit/delete [Releases](project/releases/index.md)| | | ✓ | ✓ | ✓ |
| Pull from [Conan repository](packages/conan_repository/index.md), [Maven repository](packages/maven_repository/index.md), or [NPM registry](packages/npm_registry/index.md) **(PREMIUM)** | | ✓ | ✓ | ✓ | ✓ |
| Publish to [Conan repository](packages/conan_repository/index.md), [Maven repository](packages/maven_repository/index.md), or [NPM registry](packages/npm_registry/index.md) **(PREMIUM)** | | | ✓ | ✓ | ✓ |
| Upload [Design Management](project/issues/design_management.md) files **(PREMIUM)** | | | ✓ | ✓ | ✓ |
@@ -116,6 +122,7 @@ The following table depicts the various user permission levels in a project.
| Turn on/off protected branch push for devs | | | | ✓ | ✓ |
| Enable/disable tag protections | | | | ✓ | ✓ |
| Edit project | | | | ✓ | ✓ |
+| Edit project badges | | | | ✓ | ✓ |
| Add deploy keys to project | | | | ✓ | ✓ |
| Configure project hooks | | | | ✓ | ✓ |
| Manage Runners | | | | ✓ | ✓ |
@@ -141,11 +148,13 @@ The following table depicts the various user permission levels in a project.
| Remove protected branches (*4*) | | | | | |
\* Owner permission is only available at the group or personal namespace level (and for instance admins) and is inherited by its projects.
-(*1*): Guest users are able to perform this action on public and internal projects, but not private projects.
-(*2*): Guest users can only view the confidential issues they created themselves.
-(*3*): If **Public pipelines** is enabled in **Project Settings > CI/CD**.
-(*4*): Not allowed for Guest, Reporter, Developer, Maintainer, or Owner. See [Protected Branches](./project/protected_branches.md).
-(*5*): If the [branch is protected](./project/protected_branches.md#using-the-allowed-to-merge-and-allowed-to-push-settings), this depends on the access Developers and Maintainers are given.
+
+1. Guest users are able to perform this action on public and internal projects, but not private projects.
+1. Guest users can only view the confidential issues they created themselves.
+1. If **Public pipelines** is enabled in **Project Settings > CI/CD**.
+1. Not allowed for Guest, Reporter, Developer, Maintainer, or Owner. See [Protected Branches](./project/protected_branches.md).
+1. If the [branch is protected](./project/protected_branches.md#using-the-allowed-to-merge-and-allowed-to-push-settings), this depends on the access Developers and Maintainers are given.
+1. Guest users can access GitLab [**Releases**](project/releases/index.md) for downloading assets but are not allowed to download the source code nor see repository information like tags and commits.
## Project features permissions
@@ -192,17 +201,6 @@ Confidential issues can be accessed by reporters and higher permission levels,
as well as by guest users that create a confidential issue. To learn more,
read through the documentation on [permissions and access to confidential issues](project/issues/confidential_issues.md#permissions-and-access-to-confidential-issues).
-### Releases permissions
-
-[Project Releases](project/releases/index.md) can be read by project
-members with Reporter, Developer, Maintainer, and Owner permissions.
-Guest users can access Release pages for downloading assets but
-are not allowed to download the source code nor see repository
-information such as tags and commits.
-
-Releases can be created, updated, or deleted via [Releases APIs](../api/releases/index.md)
-by project Developers, Maintainers, and Owners.
-
## Group members permissions
NOTE: **Note:**
@@ -234,12 +232,12 @@ group.
| Disable notification emails | | | | | ✓ |
| View/manage group-level Kubernetes cluster | | | | ✓ | ✓ |
-- (1): Groups can be set to [allow either Owners or Owners and
+1. Groups can be set to [allow either Owners or Owners and
Maintainers to create subgroups](group/subgroups/index.md#creating-a-subgroup)
-- (2): Introduced in GitLab 12.2.
-- (3): Default project creation role can be changed at:
- - The [instance level](admin_area/settings/visibility_and_access_controls.md#default-project-creation-protection).
- - The [group level](group/index.html#default-project-creation-level).
+1. Introduced in GitLab 12.2.
+1. Default project creation role can be changed at:
+ - The [instance level](admin_area/settings/visibility_and_access_controls.md#default-project-creation-protection).
+ - The [group level](group/index.md#default-project-creation-level).
### Subgroup permissions
@@ -265,7 +263,7 @@ External users:
logged out).
Access can be granted by adding the user as member to the project or group.
-They will, like usual users, receive a role in the project or group with all
+Like usual users, they will receive a role in the project or group with all
the abilities that are mentioned in the [permissions table above](#project-members-permissions).
For example, if an external user is added as Guest, and your project is
private, they will not have access to the code; you would need to grant the external
@@ -380,7 +378,7 @@ instance and project. In addition, all admins can use the admin interface under
| See events in the system | | | | ✓ |
| Admin interface | | | | ✓ |
-- *1*: Only if the job was triggered by the user
+1. Only if the job was triggered by the user
### Job permissions
@@ -409,8 +407,8 @@ users:
| Push container images to other projects | | | | |
| Push source and LFS | | | | |
-- *1*: Only if the user is not an external one
-- *2*: Only if the user is a member of the project
+1. Only if the user is not an external one
+1. Only if the user is a member of the project
### New CI job permissions model
@@ -422,7 +420,7 @@ read through the documentation on the [new CI/CD permissions model](project/new_
The permission to merge or push to protected branches is used to define if a user can
run CI/CD pipelines and execute actions on jobs that are related to those branches.
-See [Security on protected branches](../ci/pipelines.md#security-on-protected-branches)
+See [Security on protected branches](../ci/pipelines/index.md#security-on-protected-branches)
for details about the pipelines security model.
## LDAP users permissions
diff --git a/doc/user/profile/account/two_factor_authentication.md b/doc/user/profile/account/two_factor_authentication.md
index ab701337c2a..2949a2d4fc8 100644
--- a/doc/user/profile/account/two_factor_authentication.md
+++ b/doc/user/profile/account/two_factor_authentication.md
@@ -39,7 +39,7 @@ To enable 2FA:
1. **In GitLab:**
1. Log in to your GitLab account.
- 1. Go to your **Profile Settings**.
+ 1. Go to your [**Profile settings**](../index.md#profile-settings).
1. Go to **Account**.
1. Click **Enable Two-factor Authentication**.
1. **On your device (usually your phone):**
@@ -84,7 +84,7 @@ Search for `security.webauth.u2f` and double click on it to toggle to `true`.
To set up 2FA with a U2F device:
1. Log in to your GitLab account.
-1. Go to your **Profile Settings**.
+1. Go to your [**Profile settings**](../index.md#profile-settings).
1. Go to **Account**.
1. Click **Enable Two-Factor Authentication**.
1. Plug in your U2F device.
@@ -110,6 +110,7 @@ the **Download codes** button for storage in a safe place. If you choose to
download them, the file will be called `gitlab-recovery-codes.txt`.
If you lose the recovery codes or just want to generate new ones, you can do so
+from the [two-factor authentication account settings page](#regenerate-2fa-recovery-codes) or
[using SSH](#generate-new-recovery-codes-using-ssh).
## Logging in with 2FA Enabled
@@ -139,7 +140,7 @@ request and you will be automatically logged in.
If you ever need to disable 2FA:
1. Log in to your GitLab account.
-1. Go to your **Profile Settings**.
+1. Go to your [**Profile settings**](../index.md#profile-settings).
1. Go to **Account**.
1. Click **Disable**, under **Two-Factor Authentication**.
@@ -160,6 +161,7 @@ have lost your code generation device) you can:
- [Use a saved recovery code](#use-a-saved-recovery-code).
- [Generate new recovery codes using SSH](#generate-new-recovery-codes-using-ssh).
+- [Regenerate 2FA recovery codes](#regenerate-2fa-recovery-codes).
- [Ask a GitLab administrator to disable two-factor authentication on your account](#ask-a-gitlab-administrator-to-disable-two-factor-authentication-on-your-account).
### Use a saved recovery code
@@ -219,6 +221,20 @@ a new set of recovery codes with SSH:
After signing in, visit your **Profile settings > Account** immediately to set
up two-factor authentication with a new device.
+### Regenerate 2FA recovery codes
+
+To regenerate 2FA recovery codes, you need access to a desktop browser:
+
+1. Navigate to GitLab.
+1. Sign in to your GitLab account.
+1. Go to your [**Profile settings**](../index.md#profile-settings).
+1. Select **{account}** **Account > Two-Factor Authentication (2FA)**.
+1. If you've already configured 2FA, click **Manage two-factor authentication**.
+1. In the **Register Two-Factor Authenticator** pane, click **Regenerate recovery codes**.
+
+NOTE: **Note:**
+If you regenerate 2FA recovery codes, save them. You won't be able to use any previously created 2FA codes.
+
### Ask a GitLab administrator to disable two-factor authentication on your account
If you cannot use a saved recovery code or generate new recovery codes, ask a
diff --git a/doc/user/profile/notifications.md b/doc/user/profile/notifications.md
index d3444b9aa13..3f843bf8b5e 100644
--- a/doc/user/profile/notifications.md
+++ b/doc/user/profile/notifications.md
@@ -159,6 +159,9 @@ In most of the below cases, the notification will be sent to:
- Subscribers: anyone who manually subscribed to the issue, merge request, or epic **(ULTIMATE)**
- Custom: Users with notification level "custom" who turned on notifications for any of the events present in the table below
+NOTE: **Note:**
+To minimize the number of notifications that do not require any action, from [GitLab 12.9 onwards](https://gitlab.com/gitlab-org/gitlab/issues/616), eligible approvers are no longer notified for all the activities in their projects. To receive them they have to change their user notification settings to **Watch** instead.
+
| Event | Sent to |
|------------------------|---------|
| New issue | |
@@ -178,7 +181,8 @@ In most of the below cases, the notification will be sent to:
| Remove milestone merge request | Subscribers, participants mentioned, and Custom notification level with this event selected |
| New comment | The above, plus anyone mentioned by `@username` in the comment, with notification level "Mention" or higher |
| Failed pipeline | The author of the pipeline |
-| Successful pipeline | The author of the pipeline, if they have the custom notification setting for successful pipelines set |
+| Fixed pipeline | The author of the pipeline. Disabled by default. To activate it you must [enable the `ci_pipeline_fixed_notifications` feature flag](../../development/feature_flags/development.md#enabling-a-feature-flag-in-development). |
+| Successful pipeline | The author of the pipeline, if they have the custom notification setting for successful pipelines set. If the pipeline failed previously, a `Fixed pipeline` message will be sent for the first successful pipeline after the failure, then a `Successful pipeline` message for any further successful pipelines. |
| New epic **(ULTIMATE)** | |
| Close epic **(ULTIMATE)** | |
| Reopen epic **(ULTIMATE)** | |
@@ -203,16 +207,18 @@ Notification email messages include GitLab-specific headers. You can filter the
The following table lists all GitLab-specific email headers:
| Header | Description |
-|-----------------------------|-------------------------------------------------------------------------|
-| X-GitLab-Project | The name of the project the notification belongs to |
-| X-GitLab-Project-Id | The ID of the project |
-| X-GitLab-Project-Path | The path of the project |
-| X-GitLab-(Resource)-ID | The ID of the resource the notification is for, where resource is `Issue`, `MergeRequest`, `Commit`, etc|
-| X-GitLab-Discussion-ID | Only in comment emails, the ID of the thread the comment is from |
-| X-GitLab-Pipeline-Id | Only in pipeline emails, the ID of the pipeline the notification is for |
-| X-GitLab-Reply-Key | A unique token to support reply by email |
-| X-GitLab-NotificationReason | The reason for being notified: one of `mentioned`, `assigned`, or `own_activity` |
-| List-Id | The path of the project in a RFC 2919 mailing list identifier useful for email organization, for example, with Gmail filters |
+|------------------------------------|-------------------------------------------------------------------------|
+| X-GitLab-Group-Id **(PREMIUM)** | The group's ID. Only present on notification emails for epics. |
+| X-GitLab-Group-Path **(PREMIUM)** | The group's path. Only present on notification emails for epics. |
+| X-GitLab-Project | The name of the project the notification belongs to. |
+| X-GitLab-Project-Id | The project's ID. |
+| X-GitLab-Project-Path | The project's path. |
+| X-GitLab-(Resource)-ID | The ID of the resource the notification is for. The resource, for example, can be `Issue`, `MergeRequest`, `Commit`, or another such resource. |
+| X-GitLab-Discussion-ID | The ID of the thread the comment belongs to, in notification emails for comments. |
+| X-GitLab-Pipeline-Id | The ID of the pipeline the notification is for, in notification emails for pipelines. |
+| X-GitLab-Reply-Key | A unique token to support reply by email. |
+| X-GitLab-NotificationReason | The reason for the notification. This can be `mentioned`, `assigned`, or `own_activity`. |
+| List-Id | The path of the project in an RFC 2919 mailing list identifier. This is useful for email organization with filters, for example. |
### X-GitLab-NotificationReason
diff --git a/doc/user/project/badges.md b/doc/user/project/badges.md
index afd8def0245..aaf70e4e4d6 100644
--- a/doc/user/project/badges.md
+++ b/doc/user/project/badges.md
@@ -12,7 +12,7 @@ or ways to contact the project maintainers.
## Project badges
-Badges can be added to a project and will then be visible on the project's overview page.
+Badges can be added to a project by Maintainers or Owners, and will then be visible on the project's overview page.
If you find that you have to add the same badges to several projects, you may want to add them at the [group level](#group-badges).
To add a new badge to a project:
@@ -75,5 +75,5 @@ You can also configure badges via the GitLab API. As in the settings, there is
a distinction between endpoints for badges on the
[project level](../../api/project_badges.md) and [group level](../../api/group_badges.md).
-[pipeline status]: pipelines/settings.md#pipeline-status-badge
-[test coverage]: pipelines/settings.md#test-coverage-report-badge
+[pipeline status]: ../../ci/pipelines/settings.md#pipeline-status-badge
+[test coverage]: ../../ci/pipelines/settings.md#test-coverage-report-badge
diff --git a/doc/user/project/clusters/add_remove_clusters.md b/doc/user/project/clusters/add_remove_clusters.md
index 6106c86ce39..3e1e1694f0d 100644
--- a/doc/user/project/clusters/add_remove_clusters.md
+++ b/doc/user/project/clusters/add_remove_clusters.md
@@ -5,7 +5,7 @@ GitLab offers integrated cluster creation for the following Kubernetes providers
- Google Kubernetes Engine (GKE).
- Amazon Elastic Kubernetes Service (EKS).
-In addition, GitLab can integrate with any standard Kubernetes provider, either on-premise or hosted.
+GitLab can also integrate with any standard Kubernetes provider, either on-premise or hosted.
TIP: **Tip:**
Every new Google Cloud Platform (GCP) account receives [$300 in credit upon sign up](https://console.cloud.google.com/freetrial),
diff --git a/doc/user/project/clusters/img/kubernetes_pod_logs_v12_8.png b/doc/user/project/clusters/img/kubernetes_pod_logs_v12_8.png
deleted file mode 100644
index 7be0cd01768..00000000000
--- a/doc/user/project/clusters/img/kubernetes_pod_logs_v12_8.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/clusters/img/kubernetes_pod_logs_v12_9.png b/doc/user/project/clusters/img/kubernetes_pod_logs_v12_9.png
new file mode 100644
index 00000000000..02b7cad1e3f
--- /dev/null
+++ b/doc/user/project/clusters/img/kubernetes_pod_logs_v12_9.png
Binary files differ
diff --git a/doc/user/project/clusters/index.md b/doc/user/project/clusters/index.md
index 819e5a26c22..4594d415fa0 100644
--- a/doc/user/project/clusters/index.md
+++ b/doc/user/project/clusters/index.md
@@ -20,14 +20,14 @@ NOTE: **Scalable app deployment with GitLab and Google Cloud Platform**
Using the GitLab project Kubernetes integration, you can:
- Use [Review Apps](../../../ci/review_apps/index.md).
-- Run [pipelines](../../../ci/pipelines.md).
+- Run [pipelines](../../../ci/pipelines/index.md).
- [Deploy](#deploying-to-a-kubernetes-cluster) your applications.
- Detect and [monitor Kubernetes](#kubernetes-monitoring).
- Use it with [Auto DevOps](#auto-devops).
- Use [Web terminals](#web-terminals).
- Use [Deploy Boards](#deploy-boards-premium). **(PREMIUM)**
- Use [Canary Deployments](#canary-deployments-premium). **(PREMIUM)**
-- View [Pod logs](#pod-logs-ultimate). **(ULTIMATE)**
+- View [Logs](#logs).
- Run serverless workloads on [Kubernetes with Knative](serverless/index.md).
### Deploy Boards **(PREMIUM)**
@@ -48,11 +48,11 @@ the need to leave GitLab.
[Read more about Canary Deployments](../canary_deployments.md)
-### Pod logs **(ULTIMATE)**
+### Logs
GitLab makes it easy to view the logs of running pods in connected Kubernetes clusters. By displaying the logs directly in GitLab, developers can avoid having to manage console tools or jump to a different interface.
-[Read more about Kubernetes pod logs](kubernetes_pod_logs.md)
+[Read more about Kubernetes logs](kubernetes_pod_logs.md)
### Kubernetes monitoring
@@ -281,22 +281,28 @@ GitLab CI/CD build environment.
| `KUBECONFIG` | Path to a file containing `kubeconfig` for this deployment. CA bundle would be embedded if specified. This config also embeds the same token defined in `KUBE_TOKEN` so you likely will only need this variable. This variable name is also automatically picked up by `kubectl` so you won't actually need to reference it explicitly if using `kubectl`. |
| `KUBE_INGRESS_BASE_DOMAIN` | From GitLab 11.8, this variable can be used to set a domain per cluster. See [cluster domains](#base-domain) for more information. |
-NOTE: **NOTE:**
+NOTE: **Note:**
Prior to GitLab 11.5, `KUBE_TOKEN` was the Kubernetes token of the main
service account of the cluster integration.
NOTE: **Note:**
If your cluster was created before GitLab 12.2, default `KUBE_NAMESPACE` will be set to `<project_name>-<project_id>`.
-When deploying a custom namespace:
+### Custom namespace
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/27630) in GitLab 12.6.
+
+The Kubernetes integration defaults to project-environment-specific namespaces
+of the form `<project_name>-<project_id>-<environment>` (see [Deployment
+variables](#deployment-variables)).
-- The custom namespace must exist in your cluster.
-- The project's deployment service account must have permission to deploy to the namespace.
-- `KUBECONFIG` must be updated to use the custom namespace instead of the GitLab-provided default (this is [not automatic](https://gitlab.com/gitlab-org/gitlab/issues/31519)).
-- If deploying with Auto DevOps, you must *also* override `KUBE_NAMESPACE` with the custom namespace.
+For **non**-GitLab-managed clusters, the namespace can be customized using
+[`environment:kubernetes:namespace`](../../../ci/environments.md#configuring-kubernetes-deployments)
+in `.gitlab-ci.yml`.
-CAUTION: **Caution:**
-GitLab does not save custom namespaces in the database. So while deployments work with custom namespaces, GitLab's integration for already-deployed environments will not pick up the customized values. For example, [Deploy Boards](../deploy_boards.md) will not work as intended for those deployments. For more information, see the [related issue](https://gitlab.com/gitlab-org/gitlab/issues/27630).
+NOTE: **Note:** When using a [GitLab-managed cluster](#gitlab-managed-clusters), the
+namespaces are created automatically prior to deployment and [can not be
+customized](https://gitlab.com/gitlab-org/gitlab/issues/38054).
### Troubleshooting
diff --git a/doc/user/project/clusters/kubernetes_pod_logs.md b/doc/user/project/clusters/kubernetes_pod_logs.md
index a36b712ae76..709eefe07dd 100644
--- a/doc/user/project/clusters/kubernetes_pod_logs.md
+++ b/doc/user/project/clusters/kubernetes_pod_logs.md
@@ -1,6 +1,7 @@
-# Kubernetes Pod Logs **(ULTIMATE)**
+# Kubernetes Logs
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/4752) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.0.
+> [Moved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25455) to [GitLab Core](https://about.gitlab.com/pricing/) 12.9.
GitLab makes it easy to view the logs of running pods in [connected Kubernetes clusters](index.md).
By displaying the logs directly in GitLab, developers can avoid having to manage console tools or jump to a different interface.
@@ -11,17 +12,17 @@ Everything you need to build, test, deploy, and run your app at scale.
## Overview
-[Kubernetes](https://kubernetes.io) pod logs can be viewed directly within GitLab.
+[Kubernetes](https://kubernetes.io) logs can be viewed directly within GitLab.
-![Pod logs](img/kubernetes_pod_logs_v12_8.png)
+![Pod logs](img/kubernetes_pod_logs_v12_9.png)
## Requirements
-[Deploying to a Kubernetes environment](../deploy_boards.md#enabling-deploy-boards) is required in order to be able to use Pod Logs.
+[Deploying to a Kubernetes environment](../deploy_boards.md#enabling-deploy-boards) is required in order to be able to use Logs.
## Usage
-To access pod logs, you must have the right [permissions](../../permissions.md#project-members-permissions).
+To access logs, you must have the right [permissions](../../permissions.md#project-members-permissions).
You can access them in two ways.
@@ -29,7 +30,7 @@ You can access them in two ways.
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/22011) in GitLab 12.5.
-Go to **Operations > Pod logs** on the sidebar menu.
+Go to **{cloud-gear}** **Operations > Logs** on the sidebar menu.
![Sidebar menu](img/sidebar_menu_pod_logs_v12_5.png)
@@ -37,7 +38,7 @@ Go to **Operations > Pod logs** on the sidebar menu.
Logs can be displayed by clicking on a specific pod from [Deploy Boards](../deploy_boards.md):
-1. Go to **Operations > Environments** and find the environment which contains the desired pod, like `production`.
+1. Go to **{cloud-gear}** **Operations > Environments** and find the environment which contains the desired pod, like `production`.
1. On the **Environments** page, you should see the status of the environment's pods with [Deploy Boards](../deploy_boards.md).
1. When mousing over the list of pods, a tooltip will appear with the exact pod name and status.
![Deploy Boards pod list](img/pod_logs_deploy_board.png)
@@ -45,13 +46,15 @@ Logs can be displayed by clicking on a specific pod from [Deploy Boards](../depl
### Logs view
-The logs view will contain the last 500 lines for a pod, and has control to filter via:
+The logs view lets you filter the logs by:
- Pods.
- [From GitLab 12.4](https://gitlab.com/gitlab-org/gitlab/issues/5769), environments.
- [From GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21656), [full text search](#full-text-search).
- [From GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/issues/197879), dates.
+Loading more than 500 log lines is possible from [GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/-/issues/198050) onwards.
+
Support for pods with multiple containers is coming [in a future release](https://gitlab.com/gitlab-org/gitlab/issues/13404).
Support for historical data is coming [in a future release](https://gitlab.com/gitlab-org/gitlab/issues/196191).
@@ -62,14 +65,14 @@ Support for historical data is coming [in a future release](https://gitlab.com/g
When you enable [Elastic Stack](../../clusters/applications.md#elastic-stack) on your cluster, you can filter by date.
-Click on "Show last" to see the available options.
+Click on **Show last** to see the available options.
### Full text search
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21656) in GitLab 12.7.
When you enable [Elastic Stack](../../clusters/applications.md#elastic-stack) on your cluster,
-you can search the content of your logs via a search bar.
+you can search the content of your logs through a search bar.
The search is passed on to Elasticsearch using the [simple_query_string](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html)
Elasticsearch function, which supports the following operators:
diff --git a/doc/user/project/clusters/serverless/aws.md b/doc/user/project/clusters/serverless/aws.md
index afe48f8c7f4..cb35e598803 100644
--- a/doc/user/project/clusters/serverless/aws.md
+++ b/doc/user/project/clusters/serverless/aws.md
@@ -2,43 +2,46 @@
GitLab allows users to easily deploy AWS Lambda functions and create rich serverless applications.
-GitLab supports deployment of functions to AWS Lambda using a combination of:
+GitLab supports deployment of AWS Lambda functions through GitLab CI/CD using the following Serverless frameworks:
-- [Serverless Framework with AWS](https://serverless.com/framework/docs/providers/aws/)
-- GitLab CI/CD
+- [Serverless Framework with AWS](#serverless-framework)
+- [AWS' Serverless Application Model (SAM)](#aws-serverless-application-model)
+
+## Serverless Framework
+
+The [Serverless Framework can deploy to AWS](https://serverless.com/framework/docs/providers/aws/).
We have prepared an example with a step-by-step guide to create a simple function and deploy it on AWS.
-Additionally, in the [How To section](#how-to), you can read about different use cases,
-like:
+Additionally, in the [How To section](#how-to), you can read about different use cases like:
- Running a function locally.
- Working with secrets.
- Setting up CORS.
-Alternatively, you can quickly [create a new project with a template](https://docs.gitlab.com/ee/gitlab-basics/create-project.html#project-templates). The [`Serverless Framework/JS` template](https://gitlab.com/gitlab-org/project-templates/serverless-framework/) already includes all parts described below.
+Alternatively, you can quickly [create a new project with a template](../../../../gitlab-basics/create-project.md#project-templates). The [`Serverless Framework/JS` template](https://gitlab.com/gitlab-org/project-templates/serverless-framework/) already includes all parts described below.
-## Example
+### Example
In the following example, you will:
1. Create a basic AWS Lambda Node.js function.
1. Link the function to an API Gateway `GET` endpoint.
-### Steps
+#### Steps
The example consists of the following steps:
-1. Creating a Lambda handler function
-1. Creating a `serverless.yml` file
-1. Crafting the `.gitlab-ci.yml` file
-1. Setting up your AWS credentials with your GitLab account
-1. Deploying your function
-1. Testing the deployed function
+1. Creating a Lambda handler function.
+1. Creating a `serverless.yml` file.
+1. Crafting the `.gitlab-ci.yml` file.
+1. Setting up your AWS credentials with your GitLab account.
+1. Deploying your function.
+1. Testing the deployed function.
Lets take it step by step.
-### Creating a Lambda handler function
+#### Creating a Lambda handler function
Your Lambda function will be the primary handler of requests. In this case we will create a very simple Node.js `hello` function:
@@ -67,7 +70,7 @@ In our case, `module.exports.hello` defines the `hello` handler that will be ref
You can learn more about the AWS Lambda Node.js function handler and all its various options here: <https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-handler.html>
-### Creating a `serverless.yml` file
+#### Creating a `serverless.yml` file
In the root of your project, create a `serverless.yml` file that will contain configuration specifics for the Serverless Framework.
@@ -94,7 +97,7 @@ The `events` declaration will create a AWS API Gateway `GET` endpoint to receive
You can read more about the available properties and additional configuration possibilities of the Serverless Framework here: <https://serverless.com/framework/docs/providers/aws/guide/serverless.yml/>
-### Crafting the `.gitlab-ci.yml` file
+#### Crafting the `.gitlab-ci.yml` file
In a `.gitlab-ci.yml` file in the root of your project, place the following code:
@@ -122,7 +125,7 @@ This example code does the following:
- Deploys the serverless function to your AWS account using the AWS credentials
defined above.
-### Setting up your AWS credentials with your GitLab account
+#### Setting up your AWS credentials with your GitLab account
In order to interact with your AWS account, the GitLab CI/CD pipelines require both `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` to be defined in your GitLab settings under **Settings > CI/CD > Variables**.
For more information please see: <https://docs.gitlab.com/ee/ci/variables/README.html#via-the-ui>
@@ -130,7 +133,7 @@ For more information please see: <https://docs.gitlab.com/ee/ci/variables/README
NOTE: **Note:**
The AWS credentials you provide must include IAM policies that provision correct access control to AWS Lambda, API Gateway, CloudFormation, and IAM resources.
-### Deploying your function
+#### Deploying your function
`git push` the changes to your GitLab repository and the GitLab build pipeline will automatically deploy your function.
@@ -142,7 +145,7 @@ endpoints:
GET - https://u768nzby1j.execute-api.us-east-1.amazonaws.com/production/hello
```
-### Manually testing your function
+#### Manually testing your function
Running the following `curl` command should trigger your function.
@@ -165,7 +168,7 @@ Hooray! You now have a AWS Lambda function deployed via GitLab CI.
Nice work!
-## How To
+### How To
In this section, we show you how to build on the basic example to:
@@ -173,7 +176,7 @@ In this section, we show you how to build on the basic example to:
- Set up secret variables.
- Set up CORS.
-### Running function locally
+#### Running function locally
The `serverless-offline` plugin allows to run your code locally. To run your code locally:
@@ -204,7 +207,7 @@ It should output:
}
```
-### Secret variables
+#### Secret variables
Secrets are injected into your functions using environment variables.
@@ -225,7 +228,7 @@ NOTE: **Note:**
Anyone with access to the AWS environment may be able to see the values of those
variables persisted in the lambda definition.
-### Setting up CORS
+#### Setting up CORS
If you want to set up a web page that makes calls to your function, like we have done in the [template](https://gitlab.com/gitlab-org/project-templates/serverless-framework/), you need to deal with the Cross-Origin Resource Sharing (CORS).
@@ -269,19 +272,241 @@ module.exports.hello = async event => {
For more information, see the [Your CORS and API Gateway survival guide](https://serverless.com/blog/cors-api-gateway-survival-guide/)
blog post written by the Serverless Framework team.
-### Writing automated tests
+#### Writing automated tests
The [Serverless Framework](https://gitlab.com/gitlab-org/project-templates/serverless-framework/)
example project shows how to use Jest, Axios, and `serverless-offline` plugin to do
automated testing of both local and deployed serverless function.
-## Examples and template
+### Examples and template
The example code is available:
- As a [cloneable repository](https://gitlab.com/gitlab-org/serverless/examples/serverless-framework-js).
- In a version with [tests and secret variables](https://gitlab.com/gitlab-org/project-templates/serverless-framework/).
-You can also use a [template](https://docs.gitlab.com/ee/gitlab-basics/create-project.html#project-templates)
+You can also use a [template](../../../../gitlab-basics/create-project.md#project-templates)
(based on the version with tests and secret variables) from within the GitLab UI (see
the `Serverless Framework/JS` template).
+
+## AWS Serverless Application Model
+
+AWS Serverless Application Model is an open source framework for building serverless
+applications. It makes it easier to build and deploy serverless applications. For more
+details, please take a look at AWS documentation on [AWS Serverless Application Model](https://docs.aws.amazon.com/serverless-application-model/).
+
+### Deploying AWS Lambda function using AWS SAM and GitLab CI/CD
+
+GitLab allows developers to build and deploy serverless applications using the combination of:
+
+- [AWS Serverless Application Model (AWS SAM)](https://aws.amazon.com/serverless/sam/).
+- GitLab CI/CD.
+
+### Example
+
+In the following example, you will:
+
+- Install SAM CLI.
+- Create a sample SAM application including a Lambda function and API Gateway.
+- Build and deploy the application to your AWS account using GitLab CI/CD.
+
+### Steps
+
+The example consists of the following steps:
+
+1. Installing SAM CLI.
+1. Creating an AWS SAM application using SAM CLI.
+1. Crafting the `.gitlab-ci.yml` file.
+1. Setting up your AWS credentials with your GitLab account.
+1. Deploying your application.
+1. Testing the deployed function.
+
+### Installing SAM CLI
+
+AWS SAM provides a CLI called AWS SAM CLI to make it easier to create and manage
+applications.
+
+Some steps in this documentation use SAM CLI. Follow the instructions for
+[installing SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)
+to install and configure SAM CLI.
+
+If you use [AWS Cloud9](https://aws.amazon.com/cloud9/) as your integrated development
+environment (IDE), the following are installed for you:
+
+- [AWS Command Line Interface](https://docs.aws.amazon.com/en_pv/cli/latest/userguide/cli-chap-install.html)
+- [SAM CLI](https://docs.aws.amazon.com/en_pv/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)
+- [Docker](https://docs.docker.com/install/) and necessary Docker images.
+
+### Creating an AWS SAM application using SAM CLI
+
+To create a new AWS SAM application:
+
+1. Create a new GitLab project.
+1. `git clone` the project into your local environment.
+1. Change to the newly cloned project and create a new SAM app using the following command:
+
+ ```shell
+ sam init -r python3.8 -n gitlabpoc --app-template "hello-world"
+ ```
+
+1. `git push` the application back to the GitLab project.
+
+This creates a SAM app named `gitlabpoc` using the default configuration, a single
+Python 3.8 function invoked by an [Amazon API Gateway](https://aws.amazon.com/api-gateway/)
+endpoint. To see additional runtimes supported by SAM and options for `sam init`, run:
+
+```shell
+sam init -h
+```
+
+### Setting up your AWS credentials with your GitLab account
+
+In order to interact with your AWS account, the GitLab CI/CD pipelines require both
+`AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` to be set in the project's CI/CD
+variables.
+
+To set these:
+
+1. Navigate to the project's **{settings}** **Settings > CI / CD**.
+1. Expand the **Variables** section and create entires for `AWS_ACCESS_KEY_ID` and
+ `AWS_SECRET_ACCESS_KEY`.
+1. Mask the credentials so they do not show in logs using the **Masked** toggle.
+
+NOTE: **Note:**
+The AWS credentials you provide must include IAM policies that provision correct access
+control to AWS Lambda, API Gateway, CloudFormation, and IAM resources.
+
+### Crafting the `.gitlab-ci.yml` file
+
+In a [`.gitlab-ci.yml`](../../../../ci/yaml/README.md) file in the root of your project,
+add the following and replace <S3_bucket_name> with the name of the S3 bucket where you
+want to store your package:
+
+```yaml
+image: python:latest
+
+stages:
+
+ - deploy
+
+production:
+
+ stage: deploy
+
+ before_script:
+
+ - pip3 install awscli --upgrade
+
+ - pip3 install aws-sam-cli --upgrade
+
+ script:
+
+ - sam build
+
+ - sam package --output-template-file packaged.yaml --s3-bucket <S3_bucket_name>
+
+ - sam deploy --template-file packaged.yaml --stack-name gitlabpoc --s3-bucket <S3_bucket_name> --capabilities CAPABILITY_IAM --region us-east-1
+
+ environment: production
+ ```
+
+Let’s examine the config file more closely:
+
+- `image` specifies the Docker image to use for this build. This is the latest Python
+ image since the sample application is written in Python.
+- AWS CLI and AWS SAM CLI are installed in the `before_script` section.
+- SAM build, package, and deploy commands are used to build, package, and deploy the
+ application.
+
+### Deploying your application
+
+Push changes to your GitLab repository and the GitLab build pipeline will automatically
+deploy your application. If your:
+
+- Build and deploy are successful, [test your deployed application](#testing-the-deployed-application).
+- Build fails, look at the build log to see why the build failed. Some common reasons
+ the build might fail are:
+
+ - Incompatible versions of software. For example, Python runtime version might be
+ different from the Python on the build machine. Address this by installing the
+ required versions of the software.
+ - You may not be able to access your AWS account from GitLab. Check the environment
+ variables you set up with AWS credentials.
+ - You may not have permission to deploy a serverless application. Make sure you
+ provide all required permissions to deploy a serverless application.
+
+### Testing the deployed application
+
+To test the application you deployed, please go to the build log and follow the following steps:
+
+1. Click on “Show complete raw” on the upper right-hand corner:
+
+ ![sam-complete-raw](img/sam-complete-raw.png)
+
+1. Look for HelloWorldApi – API Gateway endpoint similar to shown below:
+
+ ![sam-api-endpoint](img/sam-api-endpoint.png)
+
+1. Use curl to test the API. For example:
+
+ ```shell
+ curl https://py4rg7qtlg.execute-api.us-east-1.amazonaws.com/Prod/hello/
+ ```
+
+Output should be:
+
+```json
+{"message": "hello world"}
+```
+
+### Testing Locally
+
+AWS SAM provides functionality to test your applications locally. You must have AWS SAM
+CLI installed locally for you to test locally.
+
+First, test the function.
+
+SAM provides a default event in `events/event.json` that includes a message body of:
+
+```json
+{\“message\”: \“hello world\”}
+```
+
+If you pass that event into the `HelloWorldFunction`, it should respond with the same
+body.
+
+Invoke the function by running:
+
+```shell
+sam local invoke HelloWorldFunction -e events/event.json
+```
+
+Output should be:
+
+```json
+{"message": "hello world"}
+```
+
+After you confirm that Lambda function is working as expected, test the API Gateway
+using following steps.
+
+Start the API locally by running:
+
+```shell
+sam local start-api
+```
+
+SAM again launches a Docker container, this time with a mocked Amazon API Gateway
+listening on `localhost:3000`.
+
+Call the `hello` API by running:
+
+```shell
+curl http://127.0.0.1:3000/hello
+```
+
+Output again should be:
+
+```json
+{"message": "hello world"}
+```
diff --git a/doc/user/project/clusters/serverless/img/deploy-stage.png b/doc/user/project/clusters/serverless/img/deploy-stage.png
deleted file mode 100644
index a4a6b363b64..00000000000
--- a/doc/user/project/clusters/serverless/img/deploy-stage.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/clusters/serverless/img/function-list_v12_7.png b/doc/user/project/clusters/serverless/img/function-list_v12_7.png
new file mode 100644
index 00000000000..f2a27ce7b0f
--- /dev/null
+++ b/doc/user/project/clusters/serverless/img/function-list_v12_7.png
Binary files differ
diff --git a/doc/user/project/clusters/serverless/img/knative-app.png b/doc/user/project/clusters/serverless/img/knative-app.png
deleted file mode 100644
index 931830d83ae..00000000000
--- a/doc/user/project/clusters/serverless/img/knative-app.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/clusters/serverless/img/sam-api-endpoint.png b/doc/user/project/clusters/serverless/img/sam-api-endpoint.png
new file mode 100644
index 00000000000..3407b2684fd
--- /dev/null
+++ b/doc/user/project/clusters/serverless/img/sam-api-endpoint.png
Binary files differ
diff --git a/doc/user/project/clusters/serverless/img/sam-complete-raw.png b/doc/user/project/clusters/serverless/img/sam-complete-raw.png
new file mode 100644
index 00000000000..1130cd29d56
--- /dev/null
+++ b/doc/user/project/clusters/serverless/img/sam-complete-raw.png
Binary files differ
diff --git a/doc/user/project/clusters/serverless/index.md b/doc/user/project/clusters/serverless/index.md
index 8f68390a270..0b5ebf3c74c 100644
--- a/doc/user/project/clusters/serverless/index.md
+++ b/doc/user/project/clusters/serverless/index.md
@@ -311,7 +311,7 @@ Explanation of the fields used above:
| Parameter | Description |
|-----------|-------------|
| `name` | Indicates which provider is used to execute the `serverless.yml` file. In this case, the TriggerMesh middleware. |
-| `envs` | Includes the environment variables to be passed as part of function execution for **all** functions in the file, where `FOO` is the variable name and `BAR` are he variable contents. You may replace this with you own variables. |
+| `envs` | Includes the environment variables to be passed as part of function execution for **all** functions in the file, where `FOO` is the variable name and `BAR` are the variable contents. You may replace this with your own variables. |
| `secrets` | Includes the contents of the Kubernetes secret as environment variables accessible to be passed as part of function execution for **all** functions in the file. The secrets are expected in ini format. |
### `functions`
@@ -384,7 +384,8 @@ The sample function can now be triggered from any HTTP client using a simple `PO
### Secrets
-To access your Kubernetes secrets from within your function, the secrets should be created under the namespace of your serverless deployment.
+To access your Kubernetes secrets from within your function, the secrets should be created under the namespace of your serverless deployment and specified in your `serverless.yml` file as above.
+You can create secrets in several ways. The following sections show some examples.
#### CLI example
@@ -483,34 +484,29 @@ A `serverless.yml` file is not required when deploying serverless applications.
With all the pieces in place, the next time a CI pipeline runs, the Knative application will be deployed. Navigate to
**CI/CD > Pipelines** and click the most recent pipeline.
-### Obtain the URL for the Knative deployment
+### Function details
-Go to the **CI/CD > Pipelines** and click on the pipeline that deployed your app. Once all the stages of the pipeline finish, click the **deploy** stage.
+Go to the **Operations > Serverless** page to see the final URL of your functions.
-![deploy stage](img/deploy-stage.png)
+![function_details](img/function-list_v12_7.png)
-The output will look like this:
+### Invocation metrics
-```shell
-Running with gitlab-runner 12.1.0-rc1 (6da35412)
- on prm-com-gitlab-org ae3bfce3
-Using Docker executor with image registry.gitlab.com/gitlab-org/gitlabktl:latest ...
-Running on runner-ae3bfc-concurrent-0 via runner-ae3bfc ...
-Fetching changes...
-Authenticating with credentials from job payload (GitLab Registry)
-$ /usr/bin/gitlabktl application deploy
-Welcome to gitlabktl tool
-time="2019-07-15T10:51:07Z" level=info msg="deploying registry credentials"
-Creating app-hello function
-Waiting for app-hello ready state
-Service app-hello URL: http://app-hello.serverless.example.com
-Job succeeded
-```
+On the same page as above, click on one of the function
+rows to bring up the function details page.
+
+![function_details](img/function-details-loaded.png)
+
+The pod count will give you the number of pods running the serverless function instances on a given cluster.
-The second to last line, labeled **Service domain** contains the URL for the
-deployment. Copy and paste the domain into your browser to see the app live.
+For the Knative function invocations to appear,
+[Prometheus must be installed](../index.md#installing-applications).
-![knative app](img/knative-app.png)
+Once Prometheus is installed, a message may appear indicating that the metrics data _is
+loading or is not available at this time._ It will appear upon the first access of the
+page, but should go away after a few seconds. If the message does not disappear, then it
+is possible that GitLab is unable to connect to the Prometheus instance running on the
+cluster.
## Configuring logging
@@ -559,26 +555,6 @@ Or:
1. Click on **Discover**, then select `filebeat-*` from the dropdown on the left.
1. Enter `kubernetes.container.name:"queue-proxy" AND message:/httpRequest/` into the search box.
-## Function details
-
-Go to the **Operations > Serverless** page and click on one of the function
-rows to bring up the function details page.
-
-![function_details](img/function-details-loaded.png)
-
-The pod count will give you the number of pods running the serverless function instances on a given cluster.
-
-### Prometheus support
-
-For the Knative function invocations to appear,
-[Prometheus must be installed](../index.md#installing-applications).
-
-Once Prometheus is installed, a message may appear indicating that the metrics data _is
-loading or is not available at this time._ It will appear upon the first access of the
-page, but should go away after a few seconds. If the message does not disappear, then it
-is possible that GitLab is unable to connect to the Prometheus instance running on the
-cluster.
-
## Enabling TLS for Knative services
By default, a GitLab serverless deployment will be served over `http`. In order to serve over `https` you
diff --git a/doc/user/project/code_owners.md b/doc/user/project/code_owners.md
index a81c0beb6d0..4ab615a1008 100644
--- a/doc/user/project/code_owners.md
+++ b/doc/user/project/code_owners.md
@@ -25,8 +25,18 @@ specify themselves as a code owner, all before the new changes
get merged to the default branch.
When a file matches multiple entries in the `CODEOWNERS` file,
-the users from all entries are displayed on the blob page of
-the given file.
+the users from last pattern matching the file are displayed on the
+blob page of the given file. For example, you have the following
+`CODEOWNERS` file:
+
+```
+README.md @user1
+
+# This line would also match the file README.md
+*.md @user2
+```
+
+The user that would show for `README.md` would be `@user2`.
## Approvals by Code Owners
diff --git a/doc/user/project/deploy_tokens/index.md b/doc/user/project/deploy_tokens/index.md
index 728f09ca787..03b580c320a 100644
--- a/doc/user/project/deploy_tokens/index.md
+++ b/doc/user/project/deploy_tokens/index.md
@@ -1,19 +1,21 @@
# Deploy Tokens
-> [Introduced][ce-17894] in GitLab 10.7.
+> - [Introduced][ce-17894] in GitLab 10.7.
+> - [Moved](https://gitlab.com/gitlab-org/gitlab/issues/199370) from **Settings > Repository** in GitLab 12.9.
-Deploy tokens allow to download (through `git clone`), or read the container registry images of a project without the need of having a user and a password.
+Deploy tokens allow you to download (`git clone`) or read the container registry images of a project without having a user and a password.
-Please note, that the expiration of deploy tokens happens on the date you define,
-at midnight UTC and that they can be only managed by [maintainers](../../permissions.md).
+Deploy tokens can be managed by [maintainers only](../../permissions.md).
+
+If you have a key pair, you might want to use [deploy keys](../../../ssh/README.md#deploy-keys) instead.
## Creating a Deploy Token
-You can create as many deploy tokens as you like from the settings of your project:
+You can create as many deploy tokens as you like from the settings of your project. Alternatively, you can also create [group-scoped deploy tokens](#group-deploy-token).
1. Log in to your GitLab account.
-1. Go to the project you want to create Deploy Tokens for.
-1. Go to **Settings** > **Repository**.
+1. Go to the project (or group) you want to create Deploy Tokens for.
+1. Go to **{settings}** **Settings** > **CI / CD**.
1. Click on "Expand" on **Deploy Tokens** section.
1. Choose a name, expiry date (optional), and username (optional) for the token.
1. Choose the [desired scopes](#limiting-scopes-of-a-deploy-token).
@@ -23,6 +25,10 @@ You can create as many deploy tokens as you like from the settings of your proje
![Personal access tokens page](img/deploy_tokens.png)
+## Deploy token expiration
+
+Deploy tokens expire on the date you define, at midnight UTC.
+
## Revoking a deploy token
At any time, you can revoke any deploy token by just clicking the
@@ -77,6 +83,22 @@ docker login -u <username> -p <deploy_token> registry.example.com
Just replace `<username>` and `<deploy_token>` with the proper values. Then you can simply
pull images from your Container Registry.
+### Group Deploy Token
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/21765) in GitLab 12.9.
+
+A deploy token created at the group level can be used across all projects that
+belong either to the specific group or to one of its subgroups.
+
+To use a group deploy token:
+
+1. [Create](#creating-a-deploy-token) a deploy token for a group.
+1. Use it the same way you use a project deploy token when
+ [cloning a repository](#git-clone-a-repository).
+
+The scopes applied to a group deploy token (such as `read_repository`) will
+apply consistently when cloning the repository of related projects.
+
### GitLab Deploy Token
> [Introduced][ce-18414] in GitLab 10.8.
diff --git a/doc/user/project/description_templates.md b/doc/user/project/description_templates.md
index d59d4eec174..84b74692725 100644
--- a/doc/user/project/description_templates.md
+++ b/doc/user/project/description_templates.md
@@ -91,7 +91,7 @@ It is possible to use [quick actions](quick_actions.md) within description templ
Here is an example for a Bug report template:
-```
+```plaintext
Summary
(Summarize the bug encountered concisely)
diff --git a/doc/user/project/img/deploy_boards_landing_page.png b/doc/user/project/img/deploy_boards_landing_page.png
index 73b3724d657..bed5f11018e 100644
--- a/doc/user/project/img/deploy_boards_landing_page.png
+++ b/doc/user/project/img/deploy_boards_landing_page.png
Binary files differ
diff --git a/doc/user/project/img/issue_boards_blocked_icon_v12_8.png b/doc/user/project/img/issue_boards_blocked_icon_v12_8.png
index ede57b760ed..779f643ba56 100644
--- a/doc/user/project/img/issue_boards_blocked_icon_v12_8.png
+++ b/doc/user/project/img/issue_boards_blocked_icon_v12_8.png
Binary files differ
diff --git a/doc/user/project/img/issue_boards_multi_select.png b/doc/user/project/img/issue_boards_multi_select_v12_4.png
index eebe06b04ae..eebe06b04ae 100644
--- a/doc/user/project/img/issue_boards_multi_select.png
+++ b/doc/user/project/img/issue_boards_multi_select_v12_4.png
Binary files differ
diff --git a/doc/user/project/img/labels_key_value_v12_1.png b/doc/user/project/img/labels_key_value_v12_1.png
index 82a6856bca7..ccda944a647 100644
--- a/doc/user/project/img/labels_key_value_v12_1.png
+++ b/doc/user/project/img/labels_key_value_v12_1.png
Binary files differ
diff --git a/doc/user/project/import/bitbucket_server.md b/doc/user/project/import/bitbucket_server.md
index fd62165053e..32f986890b9 100644
--- a/doc/user/project/import/bitbucket_server.md
+++ b/doc/user/project/import/bitbucket_server.md
@@ -39,7 +39,7 @@ Import your projects from Bitbucket Server to GitLab with minimal effort.
The Bitbucket Server importer works as follows:
-1. The user will be prompted to enter the URl, username, and password or personal access token to login to Bitbucket.
+1. The user will be prompted to enter the URL, username, and password (or personal access token) to log in to Bitbucket.
These credentials are preserved only as long as the importer is running.
1. The importer will attempt to list all the current repositories on the Bitbucket Server.
1. Upon selection, the importer will clone the repository and import pull requests and comments.
diff --git a/doc/user/project/import/gemnasium.md b/doc/user/project/import/gemnasium.md
index 20614bf5cc5..cf9ac15f5ac 100644
--- a/doc/user/project/import/gemnasium.md
+++ b/doc/user/project/import/gemnasium.md
@@ -44,7 +44,7 @@ some steps to migrate your projects. There is no automatic import since GitLab
doesn't know anything about any projects which existed on Gemnasium.com.
Security features are free for public (open-source) projects hosted on GitLab.com.
-### If your project is hosted on GitLab (`https://gitlab.com` / self-hosted)
+### If your project is hosted on GitLab (`https://gitlab.com` / self-managed)
You're almost set! If you're already using
[Auto DevOps](../../../topics/autodevops/), you are already covered.
@@ -102,5 +102,5 @@ back to both GitLab and GitHub when completed.
NOTE: **Note:**
If you don't commit very often to your project, you may want to use
-[scheduled pipelines](../pipelines/schedules.md) to run the job on a regular
+[scheduled pipelines](../../../ci/pipelines/schedules.md) to run the job on a regular
basis.
diff --git a/doc/user/project/import/github.md b/doc/user/project/import/github.md
index 175110cd535..80b14c40e0a 100644
--- a/doc/user/project/import/github.md
+++ b/doc/user/project/import/github.md
@@ -1,19 +1,19 @@
# Import your project from GitHub to GitLab
Using the importer, you can import your GitHub repositories to GitLab.com or to
-your self-hosted GitLab instance.
+your self-managed GitLab instance.
## Overview
NOTE: **Note:**
These instructions work for users on GitLab.com, but if you are an
-administrator of a self-hosted GitLab instance or if you are importing from GitHub Enterprise,
+administrator of a self-managed GitLab instance or if you are importing from GitHub Enterprise,
you must enable [GitHub integration][gh-import]. GitHub integration is the only method for
importing from GitHub Enterprise. If you are using GitLab.com, you can alternatively import
GitHub repositories using a [personal access token](#using-a-github-token),
but this method is not recommended because it cannot associate all user activity
(such as issues and pull requests) with matching GitLab users.
-If you are an administrator of a self-hosted GitLab instance, you can also use the
+If you are an administrator of a self-managed GitLab instance, you can also use the
[GitHub rake task](../../../administration/raketasks/github_import.md) to import projects from
GitHub without the constraints of a Sidekiq worker.
@@ -40,10 +40,13 @@ in which case it defaults to the default project visibility.
When issues and pull requests are being imported, the importer attempts to find their GitHub authors and
assignees in the database of the GitLab instance (note that pull requests are called "merge requests" in GitLab).
-For this association to succeed, prior to the import, each GitHub author and assignee in the repository must
-have either previously logged in to a GitLab account using the GitHub icon **or** have a GitHub account with
-a [primary email address](https://help.github.com/en/github/setting-up-and-managing-your-github-user-account/setting-your-commit-email-address) that
-matches their GitLab account's email address.
+For this association to succeed, each GitHub author and assignee in the repository
+must meet one of the following conditions prior to the import:
+
+- Have previously logged in to a GitLab account using the GitHub icon.
+- Have a GitHub account with a
+ [primary email address](https://help.github.com/en/github/setting-up-and-managing-your-github-user-account/setting-your-commit-email-address)
+ that matches their GitLab account's email address.
If a user referenced in the project is not found in GitLab's database, the project creator (typically the user
that initiated the import process) is set as the author/assignee, but a note on the issue mentioning the original
@@ -77,7 +80,7 @@ User-matching attempts occur in that order, and if a user is not identified eith
the user account that is performing the import.
NOTE: **Note:**
-If you are using a self-hosted GitLab instance or if you are importing from GitHub Enterprise, this process requires that you have configured
+If you are using a self-managed GitLab instance or if you are importing from GitHub Enterprise, this process requires that you have configured
[GitHub integration][gh-import].
1. From the top navigation bar, click **+** and select **New project**.
@@ -92,7 +95,7 @@ NOTE: **Note:**
Using a personal access token to import projects is not recommended. If you are a GitLab.com user,
you can use a personal access token to import your project from GitHub, but this method cannot
associate all user activity (such as issues and pull requests) with matching GitLab users.
-If you are an administrator of a self-hosted GitLab instance or if you are importing from
+If you are an administrator of a self-managed GitLab instance or if you are importing from
GitHub Enterprise, you cannot use a personal access token.
The [GitHub integration method (above)](#using-the-github-integration) is recommended for all users.
Read more in the [How it works](#how-it-works) section.
@@ -134,7 +137,7 @@ Additionally, you can configure GitLab to send pipeline status updates back GitH
If you import your project using [CI/CD for external repo](../../../ci/ci_cd_for_external_repos/index.md), then both
of the above are automatically configured. **(PREMIUM)**
-## Improving the speed of imports on self-hosted instances
+## Improving the speed of imports on self-managed instances
NOTE: **Note:**
Admin access to the GitLab server is required.
diff --git a/doc/user/project/import/index.md b/doc/user/project/import/index.md
index 571968dd065..a114e23b3e2 100644
--- a/doc/user/project/import/index.md
+++ b/doc/user/project/import/index.md
@@ -22,22 +22,21 @@ repository is too large the import can timeout.
There is also the option of [connecting your external repository to get CI/CD benefits](../../../ci/ci_cd_for_external_repos/index.md). **(PREMIUM)**
-## Migrating from self-hosted GitLab to GitLab.com
+## Migrating from self-managed GitLab to GitLab.com
-If you only need to migrate Git repos, you can [import each project by URL](repo_by_url.md), but issues and merge requests can't be imported.
+If you only need to migrate Git repos, you can [import each project by URL](repo_by_url.md). Issues and merge requests can't be imported.
If you want to retain all metadata like issues and merge requests, you can use
-the [import/export feature](../settings/import_export.md) to export projects from self-hosted GitLab and import those projects into GitLab.com.
+the [import/export feature](../settings/import_export.md) to export projects from self-managed GitLab and import those projects into GitLab.com.
-NOTE: **Note:**
-This approach assumes all users from the self-hosted instance have already been migrated.
-If the users haven't been migrated yet, the user conducting the import
-will take the place of all references to the missing user(s).
+All GitLab user associations (such as comment author) will be changed to the user importing the project. For more information, please see [the import notes](../settings/import_export.md#important-notes).
+
+If you need to migrate all data over, you can leverage our [API](../../../api/README.md) to migrate from self-managed to GitLab.com.
+The order of assets to migrate from a self-managed instance to GitLab.com is the following:
-If you need to migrate all data over, you can leverage our [API](../../../api/README.md) to migrate from self-hosted to GitLab.com.
-The order of assets to migrate from a self-hosted instance to GitLab is the following:
+NOTE: **Note:**
+When migrating to GitLab.com, users would need to be manually created unless [SCIM](../../../user/group/saml_sso/scim_setup.md) is going to be used. Creating users with the API is limited to self-managed instances as it requires administrator access.
-1. [Users](../../../api/users.md)
1. [Groups](../../../api/groups.md)
1. [Projects](../../../api/projects.md)
1. [Project variables](../../../api/project_level_variables.md)
@@ -47,12 +46,14 @@ Keep in mind the limitations of the [import/export feature](../settings/import_e
You will still need to migrate your Container Registry over a series of
Docker pulls and pushes and re-run any CI pipelines to retrieve any build artifacts.
-## Migrating between two self-hosted GitLab instances
+## Migrating between two self-managed GitLab instances
-The best method for migrating a project from one GitLab instance to another,
+The best method for migrating from one GitLab instance to another,
perhaps from an old server to a new server for example, is to
-[back up the project](../../../raketasks/backup_restore.md),
+[back up the instance](../../../raketasks/backup_restore.md),
then restore it on the new server.
In the event of merging two GitLab instances together (for example, both instances have existing data on them and one can't be wiped),
-refer to the instructions in [Migrating from self-hosted GitLab to GitLab.com](#migrating-from-self-hosted-gitlab-to-gitlabcom).
+refer to the instructions in [Migrating from self-managed GitLab to GitLab.com](#migrating-from-self-managed-gitlab-to-gitlabcom).
+
+Additionally, you can migrate users using the [Users API](../../../api/users.md) with an admin user.
diff --git a/doc/user/project/index.md b/doc/user/project/index.md
index 87837d50bbe..3a356ec0418 100644
--- a/doc/user/project/index.md
+++ b/doc/user/project/index.md
@@ -66,15 +66,15 @@ When you create a project in GitLab, you'll have access to a large number of
- [Auto Deploy](../../topics/autodevops/index.md#auto-deploy): Configure GitLab CI/CD
to automatically set up your app's deployment
- [Enable and disable GitLab CI](../../ci/enable_or_disable_ci.md)
- - [Pipelines](../../ci/pipelines.md): Configure and visualize
+ - [Pipelines](../../ci/pipelines/index.md): Configure and visualize
your GitLab CI/CD pipelines from the UI
- - [Scheduled Pipelines](pipelines/schedules.md): Schedule a pipeline
+ - [Scheduled Pipelines](../../ci/pipelines/schedules.md): Schedule a pipeline
to start at a chosen time
- - [Pipeline Graphs](../../ci/pipelines.md#visualizing-pipelines): View your
+ - [Pipeline Graphs](../../ci/pipelines/index.md#visualizing-pipelines): View your
entire pipeline from the UI
- - [Job artifacts](pipelines/job_artifacts.md): Define,
+ - [Job artifacts](../../ci/pipelines/job_artifacts.md): Define,
browse, and download job artifacts
- - [Pipeline settings](pipelines/settings.md): Set up Git strategy (choose the default way your repository is fetched from GitLab in a job),
+ - [Pipeline settings](../../ci/pipelines/settings.md): Set up Git strategy (choose the default way your repository is fetched from GitLab in a job),
timeout (defines the maximum amount of time in minutes that a job is able run), custom path for `.gitlab-ci.yml`, test coverage parsing, pipeline's visibility, and much more
- [Kubernetes cluster integration](clusters/index.md): Connecting your GitLab project
with a Kubernetes cluster
@@ -94,13 +94,13 @@ When you create a project in GitLab, you'll have access to a large number of
your code blocks, overriding GitLab's default choice of language.
- [Badges](badges.md): badges for the project overview.
- [Releases](releases/index.md): a way to track deliverables in your project as snapshot in time of
- the source, build output, and other metadata or artifacts
+ the source, build output, other metadata, and other artifacts
associated with a released version of your code.
- [Conan packages](../packages/conan_repository/index.md): your private Conan repository in GitLab. **(PREMIUM)**
- [Maven packages](../packages/maven_repository/index.md): your private Maven repository in GitLab. **(PREMIUM)**
- [NPM packages](../packages/npm_registry/index.md): your private NPM package registry in GitLab. **(PREMIUM)**
- [Code owners](code_owners.md): specify code owners for certain files **(STARTER)**
-- [License Compliance](../application_security/license_compliance/index.md): approve and blacklist licenses for projects. **(ULTIMATE)**
+- [License Compliance](../compliance/license_compliance/index.md): approve and blacklist licenses for projects. **(ULTIMATE)**
- [Dependency List](../application_security/dependency_list/index.md): view project dependencies. **(ULTIMATE)**
### Project integrations
diff --git a/doc/user/project/insights/index.md b/doc/user/project/insights/index.md
index 4af6f47ce7b..52fcad8dd80 100644
--- a/doc/user/project/insights/index.md
+++ b/doc/user/project/insights/index.md
@@ -61,6 +61,7 @@ bugsCharts:
title: "Charts for bugs"
charts:
- title: "Monthly bugs created"
+ description: "Open bugs created per month"
type: bar
query:
issuable_type: issue
@@ -77,6 +78,7 @@ For example, here's single chart definition:
```yaml
- title: "Monthly bugs created"
+ description: "Open bugs created per month"
type: bar
query:
issuable_type: issue
@@ -96,6 +98,7 @@ The following table lists available parameters for charts:
| Keyword | Description |
|:---------------------------------------------------|:------------|
| [`title`](#title) | The title of the chart. This will displayed on the Insights page. |
+| [`description`](#description) | A description for the individual chart. This will be displayed above the relevant chart. |
| [`type`](#type) | The type of chart: `bar`, `line` or `stacked-bar`. |
| [`query`](#query) | A hash that defines the conditions for issues / merge requests to be part of the chart. |
@@ -114,6 +117,17 @@ monthlyBugsCreated:
title: "Monthly bugs created"
```
+### `description`
+
+The `description` text is displayed above the chart, but below the title. It's used
+to give extra details regarding the chart, for example:
+
+```yaml
+monthlyBugsCreated:
+ title: "Monthly bugs created"
+ description: "Open bugs created per month"
+```
+
### `type`
`type` is the chart type.
@@ -145,6 +159,7 @@ Example:
```yaml
monthlyBugsCreated:
title: "Monthly bugs created"
+ description: "Open bugs created per month"
type: bar
query:
issuable_type: issue
@@ -283,6 +298,7 @@ a group's insights:
```yaml
monthlyBugsCreated:
title: "Monthly bugs created"
+ description: "Open bugs created per month"
type: bar
query:
issuable_type: issue
@@ -311,6 +327,7 @@ bugsCharts:
title: "Charts for bugs"
charts:
- title: "Monthly bugs created"
+ description: "Open bugs created per month"
type: bar
<<: *projectsOnly
query:
diff --git a/doc/user/project/integrations/gitlab_slack_application.md b/doc/user/project/integrations/gitlab_slack_application.md
index c1e6f93de30..4bc44d1d7d8 100644
--- a/doc/user/project/integrations/gitlab_slack_application.md
+++ b/doc/user/project/integrations/gitlab_slack_application.md
@@ -3,9 +3,9 @@
NOTE: **Note:**
The GitLab Slack application is only configurable for GitLab.com. It will **not**
work for on-premises installations where you can configure the
-[Slack slash commands](slack_slash_commands.md) service instead. We're working
-with Slack on making this configurable for all GitLab installations, but there's
-no ETA.
+[Slack slash commands](slack_slash_commands.md) service instead. We're planning
+to make this configurable for all GitLab installations, but there's
+no ETA - see [#28164](https://gitlab.com/gitlab-org/gitlab/issues/28164).
It was first introduced in GitLab 9.4 and distributed to Slack App Directory in
GitLab 10.2.
@@ -56,7 +56,7 @@ We are working on making this configurable in the future.
For example, to show the issue number `1001` under the `gitlab-org/gitlab`
project, you would do:
-```
+```plaintext
/gitlab gitlab-org/gitlab issue show 1001
```
diff --git a/doc/user/project/integrations/hipchat.md b/doc/user/project/integrations/hipchat.md
index 85c3eda1208..347f7973c84 100644
--- a/doc/user/project/integrations/hipchat.md
+++ b/doc/user/project/integrations/hipchat.md
@@ -25,7 +25,7 @@ allow GitLab to send messages only to *one* room.
1. In the "Send messages to this room by posting this URL" column, you should
see a URL in the format:
-```
+```plaintext
https://api.hipchat.com/v2/room/<room>/notification?auth_token=<token>
```
diff --git a/doc/user/project/integrations/img/grafana_embedded.png b/doc/user/project/integrations/img/grafana_embedded.png
index bc9018c97af..c7946aa4b10 100644
--- a/doc/user/project/integrations/img/grafana_embedded.png
+++ b/doc/user/project/integrations/img/grafana_embedded.png
Binary files differ
diff --git a/doc/user/project/integrations/img/prometheus_cluster_health_embed_v12_9.png b/doc/user/project/integrations/img/prometheus_cluster_health_embed_v12_9.png
new file mode 100644
index 00000000000..c669467757f
--- /dev/null
+++ b/doc/user/project/integrations/img/prometheus_cluster_health_embed_v12_9.png
Binary files differ
diff --git a/doc/user/project/integrations/img/prometheus_dashboard_edit_metric_link_v_12_9.png b/doc/user/project/integrations/img/prometheus_dashboard_edit_metric_link_v_12_9.png
new file mode 100644
index 00000000000..b66b1a9f39b
--- /dev/null
+++ b/doc/user/project/integrations/img/prometheus_dashboard_edit_metric_link_v_12_9.png
Binary files differ
diff --git a/doc/user/project/integrations/img/prometheus_dashboard_label_variable_shorthand.png b/doc/user/project/integrations/img/prometheus_dashboard_label_variable_shorthand.png
new file mode 100644
index 00000000000..15111a97464
--- /dev/null
+++ b/doc/user/project/integrations/img/prometheus_dashboard_label_variable_shorthand.png
Binary files differ
diff --git a/doc/user/project/integrations/img/prometheus_dashboard_label_variables.png b/doc/user/project/integrations/img/prometheus_dashboard_label_variables.png
new file mode 100644
index 00000000000..9b94d0c6afa
--- /dev/null
+++ b/doc/user/project/integrations/img/prometheus_dashboard_label_variables.png
Binary files differ
diff --git a/doc/user/project/integrations/img/prometheus_dashboard_repeated_label.png b/doc/user/project/integrations/img/prometheus_dashboard_repeated_label.png
new file mode 100644
index 00000000000..d43a890f0fa
--- /dev/null
+++ b/doc/user/project/integrations/img/prometheus_dashboard_repeated_label.png
Binary files differ
diff --git a/doc/user/project/integrations/irker.md b/doc/user/project/integrations/irker.md
index 47017843233..cadf01c382a 100644
--- a/doc/user/project/integrations/irker.md
+++ b/doc/user/project/integrations/irker.md
@@ -11,7 +11,7 @@ See the project homepage for further info: <https://gitlab.com/esr/irker>
You will first need an Irker daemon. You can download the Irker code from its
repository on <https://gitlab.com/esr/irker>:
-```
+```shell
git clone https://gitlab.com/esr/irker.git
```
diff --git a/doc/user/project/integrations/jira.md b/doc/user/project/integrations/jira.md
index 0bfb13ba54f..76b1d187698 100644
--- a/doc/user/project/integrations/jira.md
+++ b/doc/user/project/integrations/jira.md
@@ -45,11 +45,11 @@ In order to enable the Jira service in GitLab, you need to first configure the p
#### Jira Server
-When connecting to **Jira Server**, which supports basic authentication, a **username and password** are required. Note that connecting to Jira Server via CAS is not possible. [Set up a user in Jira Server](jira_server_configuration.md) first and then proceed to [Configuring GitLab](#configuring-gitlab).
+**Jira Server** supports basic authentication. When connecting, a **username and password** are required. Note that connecting to Jira Server via CAS is not possible. [Set up a user in Jira Server](jira_server_configuration.md) first and then proceed to [Configuring GitLab](#configuring-gitlab).
#### Jira Cloud
-When connecting to **Jira Cloud**, which supports authentication via API token, an **email and API token**, are required. [Set up a user in Jira Cloud](jira_cloud_configuration.md) first and then proceed to [Configuring GitLab](#configuring-gitlab).
+**Jira Cloud** supports authentication through an API token. When connecting to **Jira Cloud**, an **email and API token** are required. [Set up a user in Jira Cloud](jira_cloud_configuration.md) first and then proceed to [Configuring GitLab](#configuring-gitlab).
### Configuring GitLab
@@ -71,8 +71,8 @@ in the table below.
| Field | Description |
| ----- | ----------- |
-| `Web URL` | The base URL to the Jira instance web interface which is being linked to this GitLab project. E.g., `https://Jira.example.com`. |
-| `Jira API URL` | The base URL to the Jira instance API. Web URL value will be used if not set. E.g., `https://jira-api.example.com`. |
+| `Web URL` | The base URL to the Jira instance web interface which is being linked to this GitLab project. E.g., `https://jira.example.com`. |
+| `Jira API URL` | The base URL to the Jira instance API. Web URL value will be used if not set. E.g., `https://jira-api.example.com`. Leave this field blank (or use the same value of `Web URL`) if using **Jira Cloud**. |
| `Username/Email` | Created when [configuring Jira step](#configuring-jira). Use `username` for **Jira Server** or `email` for **Jira Cloud**. |
| `Password/API token` |Created in [configuring Jira step](#configuring-jira). Use `password` for **Jira Server** or `API token` for **Jira Cloud**. |
| `Transition ID` | This is the ID of a transition that moves issues to the desired state. It is possible to insert transition ids separated by `,` or `;` which means the issue will be moved to each state after another using the given order. **Closing Jira issues via commits or Merge Requests won't work if you don't set the ID correctly.** |
@@ -119,7 +119,7 @@ link back to GitLab. This means that in comments in merge requests and commits
referencing an issue, e.g., `PROJECT-7`, will add a comment in Jira issue in the
format:
-```
+```plaintext
USER mentioned this issue in RESOURCE_NAME of [PROJECT_NAME|LINK_TO_COMMENT]:
ENTITY_TITLE
```
diff --git a/doc/user/project/integrations/prometheus.md b/doc/user/project/integrations/prometheus.md
index a1c551db604..c64077c00f1 100644
--- a/doc/user/project/integrations/prometheus.md
+++ b/doc/user/project/integrations/prometheus.md
@@ -76,7 +76,7 @@ The Prometheus server will [automatically detect and monitor](https://prometheus
- `prometheus.io/port` to define the port of the metrics endpoint.
- `prometheus.io/path` to define the path of the metrics endpoint. Defaults to `/metrics`.
-CPU and Memory consumption is monitored, but requires [naming conventions](prometheus_library/kubernetes.html#specifying-the-environment) in order to determine the environment. If you are using [Auto DevOps](../../../topics/autodevops/), this is handled automatically.
+CPU and Memory consumption is monitored, but requires [naming conventions](prometheus_library/kubernetes.md#specifying-the-environment) in order to determine the environment. If you are using [Auto DevOps](../../../topics/autodevops/), this is handled automatically.
The [NGINX Ingress](../clusters/index.md#installing-applications) that is deployed by GitLab to clusters, is automatically annotated for monitoring providing key response metrics: latency, throughput, and error rates.
@@ -102,16 +102,27 @@ Installing and configuring Prometheus to monitor applications is fairly straight
#### Configuration in GitLab
The actual configuration of Prometheus integration within GitLab is very simple.
-All you will need is the DNS or IP address of the Prometheus server you'd like
+All you will need is the domain name or IP address of the Prometheus server you'd like
to integrate with.
-1. Navigate to the [Integrations page](project_services.md#accessing-the-project-services)
-1. Click the **Prometheus** service
-1. Provide the base URL of your server, for example `http://prometheus.example.com/`
-1. Click **Save changes**
+1. Navigate to the [Integrations page](project_services.md#accessing-the-project-services).
+1. Click the **Prometheus** service.
+1. Provide the domain name or IP address of your server, for example `http://prometheus.example.com/` or `http://192.0.2.1/`.
+1. Click **Save changes**.
![Configure Prometheus Service](img/prometheus_service_configuration.png)
+#### Thanos configuration in GitLab
+
+You can configure [Thanos](https://thanos.io/) as a drop-in replacement for Prometheus
+with GitLab. You will need the domain name or IP address of the Thanos server you'd like
+to integrate with.
+
+1. Navigate to the [Integrations page](project_services.md#accessing-the-project-services).
+1. Click the **Prometheus** service.
+1. Provide the domain name or IP address of your server, for example `http://thanos.example.com/` or `http://192.0.2.1/`.
+1. Click **Save changes**.
+
## Monitoring CI/CD Environments
Once configured, GitLab will attempt to retrieve performance metrics for any
@@ -144,15 +155,30 @@ Multiple metrics can be displayed on the same chart if the fields **Name**, **Ty
#### Query Variables
-GitLab supports a limited set of [CI variables](../../../ci/variables/README.md) in the Prometheus query. This is particularly useful for identifying a specific environment, for example with `CI_ENVIRONMENT_SLUG`. The supported variables are:
+GitLab supports a limited set of [CI variables](../../../ci/variables/README.md) in the Prometheus query. This is particularly useful for identifying a specific environment, for example with `ci_environment_slug`. The supported variables are:
-- CI_ENVIRONMENT_SLUG
-- KUBE_NAMESPACE
+- `ci_environment_slug`
+- `kube_namespace`
+- `ci_project_name`
+- `ci_project_namespace`
+- `ci_project_path`
+- `ci_environment_name`
+
+NOTE: **Note:**
+Variables for Prometheus queries must be lowercase.
There are 2 methods to specify a variable in a query or dashboard:
1. Variables can be specified using the [Liquid template format](https://help.shopify.com/en/themes/liquid/basics), for example `{{ci_environment_slug}}` ([added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20793) in GitLab 12.6).
-1. You can also enclose it in quotation marks with curly braces with a leading percent, for example `"%{ci_environment_slug}"`. This method is deprecated though and support will be [removed in the next major release](https://gitlab.com/gitlab-org/gitlab/issues/37990).
+1. You can also enclose it in quotation marks with curly braces with a leading percent, for example `"%{ci_environment_slug}"`. This method is deprecated though and support will be [removed in the next major release](https://gitlab.com/gitlab-org/gitlab/issues/37990).
+
+#### Editing additional metrics from the dashboard
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/208976) in GitLab 12.9.
+
+You can edit existing additional custom metrics by clicking the **{ellipsis_v}** **More actions** dropdown and selecting **Edit metric**.
+
+![Edit metric](img/prometheus_dashboard_edit_metric_link_v_12_9.png)
### Defining custom dashboards per project
@@ -185,14 +211,17 @@ For example:
panel_groups:
- group: 'Group Title'
panels:
- - type: area-chart
- title: "Chart Title"
- y_label: "Y-Axis"
- metrics:
- - id: metric_of_ages
- query_range: 'http_requests_total'
- label: "Metric of Ages"
- unit: "count"
+ - type: area-chart
+ title: "Chart Title"
+ y_label: "Y-Axis"
+ y_axis:
+ format: number
+ precision: 0
+ metrics:
+ - id: my_metric_id
+ query_range: 'http_requests_total'
+ label: "Instance: {{instance}}, method: {{method}}"
+ unit: "count"
```
The above sample dashboard would display a single area chart. Each file should
@@ -258,19 +287,74 @@ The following tables outline the details of expected properties.
| `type` | enum | no, defaults to `area-chart` | Specifies the chart type to use, can be: `area-chart`, `line-chart` or `anomaly-chart`. |
| `title` | string | yes | Heading for the panel. |
| `y_label` | string | no, but highly encouraged | Y-Axis label for the panel. |
+| `y_axis` | string | no | Y-Axis configuration for the panel. |
| `weight` | number | no, defaults to order in file | Order to appear within the grouping. Lower number means higher priority, which will be higher on the page. Numbers do not need to be consecutive. |
| `metrics` | array | yes | The metrics which should be displayed in the panel. Any number of metrics can be displayed when `type` is `area-chart` or `line-chart`, whereas only 3 can be displayed when `type` is `anomaly-chart`. |
+**Axis (`panels[].y_axis`) properties:**
+
+| Property | Type | Required | Description |
+| ----------- | ------ | ------------------------- | -------------------------------------------------------------------- |
+| `name` | string | no, but highly encouraged | Y-Axis label for the panel, it will replace `y_label` if set. |
+| `format` | string | no, defaults to `number` | Unit format used. See the [full list of units](prometheus_units.md). |
+| `precision` | number | no, defaults to `2` | Number of decimals to display in the number. |
+
**Metrics (`metrics`) properties:**
| Property | Type | Required | Description |
| ------ | ------ | ------ | ------ |
| `id` | string | no | Used for associating dashboard metrics with database records. Must be unique across dashboard configuration files. Required for [alerting](#setting-up-alerts-for-prometheus-metrics-ultimate) (support not yet enabled, see [relevant issue](https://gitlab.com/gitlab-org/gitlab-foss/issues/60319)). |
| `unit` | string | yes | Defines the unit of the query's return data. |
-| `label` | string | no, but highly encouraged | Defines the legend-label for the query. Should be unique within the panel's metrics. |
+| `label` | string | no, but highly encouraged | Defines the legend-label for the query. Should be unique within the panel's metrics. Can contain time series labels as interpolated variables. |
| `query` | string | yes if `query_range` is not defined | Defines the Prometheus query to be used to populate the chart/panel. If defined, the `query` endpoint of the [Prometheus API](https://prometheus.io/docs/prometheus/latest/querying/api/) will be utilized. |
| `query_range` | string | yes if `query` is not defined | Defines the Prometheus query to be used to populate the chart/panel. If defined, the `query_range` endpoint of the [Prometheus API](https://prometheus.io/docs/prometheus/latest/querying/api/) will be utilized. |
+##### Dynamic labels
+
+Dynamic labels are useful when multiple time series are returned from a Prometheus query.
+
+When a static label is used and a query returns multiple time series, then all the legend items will be labeled the same, which makes identifying each time series difficult:
+
+```yaml
+metrics:
+ - id: my_metric_id
+ query_range: 'http_requests_total'
+ label: "Time Series"
+ unit: "count"
+```
+
+This may render a legend like this:
+
+![repeated legend label chart](img/prometheus_dashboard_repeated_label.png)
+
+For labels to be more explicit, using variables that reflect time series labels is a good practice. The variables will be replaced by the values of the time series labels when the legend is rendered:
+
+```yaml
+metrics:
+ - id: my_metric_id
+ query_range: 'http_requests_total'
+ label: "Instance: {{instance}}, method: {{method}}"
+ unit: "count"
+```
+
+The resulting rendered legend will look like this:
+
+![legend with label variables](img/prometheus_dashboard_label_variables.png)
+
+There is also a shorthand value for dynamic dashboard labels that make use of only one time series label:
+
+```yaml
+metrics:
+ - id: my_metric_id
+ query_range: 'http_requests_total'
+ label: "Method"
+ unit: "count"
+```
+
+This works by lowercasing the value of `label` and, if there are more words separated by spaces, replacing those spaces with an underscore (`_`). The transformed value is then checked against the labels of the time series returned by the Prometheus query. If a time series label is found that is equal to the transformed value, then the label value will be used and rendered in the legend like this:
+
+![legend with label shorthand variable](img/prometheus_dashboard_label_variable_shorthand.png)
+
#### Panel types for dashboards
The below panel types are supported in monitoring dashboards.
@@ -287,10 +371,13 @@ panel_groups:
- type: area-chart # or line-chart
title: 'Area Chart Title'
y_label: "Y-Axis"
+ y_axis:
+ format: number
+ precision: 0
metrics:
- id: area_http_requests_total
query_range: 'http_requests_total'
- label: "Metric of Ages"
+ label: "Instance: {{instance}}, Method: {{method}}"
unit: "count"
```
@@ -439,6 +526,29 @@ Note the following properties:
![single stat panel type](img/prometheus_dashboard_single_stat_panel_type.png)
+###### Percentile based results
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/201946) in GitLab 12.8.
+
+Query results sometimes need to be represented as a percentage value out of 100. You can use the `max_value` property at the root of the panel definition:
+
+```yaml
+dashboard: 'Dashboard Title'
+panel_groups:
+ - group: 'Group Title'
+ panels:
+ - title: "Single Stat"
+ type: "single-stat"
+ max_value: 100
+ metrics:
+ - id: 10
+ query: 'max(go_memstats_alloc_bytes{job="prometheus"})'
+ unit: '%'
+ label: "Total"
+```
+
+For example, if you have a query value of `53.6`, adding `%` as the unit results in a single stat value of `53.6%`, but if the maximum expected value of the query is `120`, the value would be `44.6%`. Adding the `max_value` causes the correct percentage value to display.
+
##### Heatmaps
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/30581) in GitLab 12.5.
@@ -483,17 +593,17 @@ From each of the panels in the dashboard, you can access the context menu by cli
The options are:
-- [View logs](#view-pod-logs-ultimate)
+- [View logs](#view-logs-ultimate)
- [Download CSV](#downloading-data-as-csv)
- [Generate link to chart](#embedding-gitlab-managed-kubernetes-metrics)
- [Alerts](#setting-up-alerts-for-prometheus-metrics-ultimate)
-### View Pod Logs **(ULTIMATE)**
+### View Logs **(ULTIMATE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/122013) in GitLab 12.8.
-If you have [Pod Logs](../clusters/kubernetes_pod_logs.md) enabled,
-you can navigate from the charts in the dashboard to view Pod Logs by
+If you have [Logs](../clusters/kubernetes_pod_logs.md) enabled,
+you can navigate from the charts in the dashboard to view Logs by
clicking on the context menu in the upper-right corner.
If you use the **Timeline zoom** function at the bottom of the chart, logs will narrow down to the time range you selected.
@@ -608,7 +718,7 @@ Prometheus server.
> [Introduced][ce-29691] in GitLab 12.2.
-It is possible to display metrics charts within [GitLab Flavored Markdown](../../markdown.md#gitlab-flavored-markdown-gfm). The maximum number of embeds allowed in a GitLab Flavored Markdown field is 100.
+It is possible to display metrics charts within [GitLab Flavored Markdown](../../markdown.md#gitlab-flavored-markdown-gfm) fields such as issue or merge request descriptions. The maximum number of embedded charts allowed in a GitLab Flavored Markdown field is 100.
This can be useful if you are sharing an application incident or performance
metrics to others and want to have relevant information directly available.
@@ -646,6 +756,25 @@ It is also possible to embed either the default dashboard metrics or individual
![Embedded Metrics in issue templates](img/embed_metrics_issue_template.png)
+### Embedding Cluster Health Charts **(ULTIMATE)**
+
+> [Introduced](<https://gitlab.com/gitlab-org/gitlab/issues/40997>) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.9.
+
+[Cluster Health Metrics](../clusters/index.md#monitoring-your-kubernetes-cluster-ultimate) can also be embedded in [GitLab-flavored Markdown](../../markdown.md).
+
+To embed a metric chart, include a link to that chart in the form `https://<root_url>/<project>/-/cluster/<cluster_id>?<query_params>` anywhere that GitLab-flavored Markdown is supported. To generate and copy a link to the chart, follow the instructions in the [Cluster Health Metric documentation](../clusters/index.md#monitoring-your-kubernetes-cluster-ultimate).
+
+The following requirements must be met for the metric to unfurl:
+
+- The `<cluster_id>` must correspond to a real cluster.
+- Prometheus must be monitoring the cluster.
+- The user must be allowed access to the project cluster metrics.
+- The dashboards must be reporting data on the [Cluster Health Page](../clusters/index.md#monitoring-your-kubernetes-cluster-ultimate)
+
+ If the above requirements are met, then the metric will unfurl as seen below.
+
+![Embedded Cluster Metric in issue descriptions](img/prometheus_cluster_health_embed_v12_9.png)
+
### Embedding Grafana charts
Grafana metrics can be embedded in [GitLab Flavored Markdown](../../markdown.md).
@@ -659,7 +788,7 @@ The sharing dialog within Grafana provides the link, as highlighted below.
![Grafana Direct Linked Rendered Image](img/grafana_live_embed.png)
NOTE: **Note:**
-For this embed to display correctly, the Grafana instance must be available to the target user, either as a public dashboard or on the same network.
+For this embed to display correctly, the Grafana instance must be available to the target user, either as a public dashboard, or on the same network.
Copy the link and add an image tag as [inline HTML](../../markdown.md#inline-html) in your Markdown. You may tweak the query parameters as required. For instance, removing the `&from=` and `&to=` parameters will give you a live chart. Here is example markup for a live chart from GitLab's public dashboard:
@@ -699,8 +828,8 @@ Prerequisites for embedding from a Grafana instance:
![Grafana Metric Panel](img/grafana_panel_v12_5.png)
1. In the upper-left corner of the page, select a specific value for each variable required for the queries in the chart.
![Select Query Variables](img/select_query_variables_v12_5.png)
-1. In Grafana, click on a panel's title, then click **Share** to open the panel's sharing dialog to the **Link** tab.
-1. If your Prometheus queries use Grafana's custom template variables, ensure that "Template variables" and "Current time range" options are toggled to **On**. Of Grafana global template variables, only `$__interval`, `$__from`, and `$__to` are currently supported.
+1. In Grafana, click on a panel's title, then click **Share** to open the panel's sharing dialog to the **Link** tab. If you click the _dashboard's_ share panel instead, GitLab will attempt to embed the first supported panel on the dashboard (if available).
+1. If your Prometheus queries use Grafana's custom template variables, ensure that "Template variables" option is toggled to **On**. Of Grafana global template variables, only `$__interval`, `$__from`, and `$__to` are currently supported. Toggle **On** the "Current time range" option to specify the time range of the chart. Otherwise, the default range will be the last 8 hours.
![Grafana Sharing Dialog](img/grafana_sharing_dialog_v12_5.png)
1. Click **Copy** to copy the URL to the clipboard.
1. In GitLab, paste the URL into a Markdown field and save. The chart will take a few moments to render.
@@ -713,7 +842,7 @@ If the "No data found" screen continues to appear, it could be due to:
- No successful deployments have occurred to this environment.
- Prometheus does not have performance data for this environment, or the metrics
are not labeled correctly. To test this, connect to the Prometheus server and
- [run a query](prometheus_library/kubernetes.html#metrics-supported), replacing `$CI_ENVIRONMENT_SLUG`
+ [run a query](prometheus_library/kubernetes.md#metrics-supported), replacing `$CI_ENVIRONMENT_SLUG`
with the name of your environment.
- You may need to re-add the GitLab predefined common metrics. This can be done by running the [import common metrics rake task](../../../administration/raketasks/maintenance.md#import-common-metrics).
diff --git a/doc/user/project/integrations/prometheus_library/kubernetes.md b/doc/user/project/integrations/prometheus_library/kubernetes.md
index 7433210b553..ca1555c793b 100644
--- a/doc/user/project/integrations/prometheus_library/kubernetes.md
+++ b/doc/user/project/integrations/prometheus_library/kubernetes.md
@@ -13,13 +13,13 @@ integration services must be enabled.
- Average Memory Usage (MB):
- ```
+ ```prometheus
avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024
```
- Average CPU Utilization (%):
- ```
+ ```prometheus
avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name))
```
@@ -48,12 +48,12 @@ These metrics expect the [Deployment](https://kubernetes.io/docs/concepts/worklo
- Average Memory Usage (MB)
- ```
+ ```prometheus
avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024
```
- Average CPU Utilization (%)
- ```
+ ```prometheus
avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name))
```
diff --git a/doc/user/project/integrations/prometheus_units.md b/doc/user/project/integrations/prometheus_units.md
new file mode 100644
index 00000000000..9df9f52ceb1
--- /dev/null
+++ b/doc/user/project/integrations/prometheus_units.md
@@ -0,0 +1,110 @@
+# Unit formats reference
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/201999) in GitLab 12.9.
+
+You can select units to format your charts by adding `format` to your
+[axis configuration](prometheus.md#dashboard-yaml-properties).
+
+## Numbers
+
+For generic data, numbers are formatted according to the current locale.
+
+Formats: `number`
+
+**Examples:**
+
+| Data | Displayed |
+| --------- | --------- |
+| `10` | 1 |
+| `1000` | 1,000 |
+| `1000000` | 1,000,000 |
+
+## Percentage
+
+For percentage data, format numbers in the chart with a `%` symbol.
+
+Formats supported: `percent`, `percentHundred`
+
+**Examples:**
+
+| Format | Data | Displayed |
+| ---------------- | ----- | --------- |
+| `percent` | `0.5` | 50% |
+| `percent` | `1` | 100% |
+| `percent` | `2` | 200% |
+| `percentHundred` | `50` | 50% |
+| `percentHundred` | `100` | 100% |
+| `percentHundred` | `200` | 200% |
+
+## Duration
+
+For time durations, format numbers in the chart with a time unit symbol.
+
+Formats supported: `milliseconds`, `seconds`
+
+**Examples:**
+
+| Format | Data | Displayed |
+| -------------- | ------ | --------- |
+| `milliseconds` | `10` | 10ms |
+| `milliseconds` | `500` | 100ms |
+| `milliseconds` | `1000` | 1000ms |
+| `seconds` | `10` | 10s |
+| `seconds` | `500` | 500s |
+| `seconds` | `1000` | 1000s |
+
+## Digital (Metric)
+
+Converts a number of bytes using metric prefixes. It scales to
+use the unit that's the best fit.
+
+Formats supported:
+
+- `decimalBytes`
+- `kilobytes`
+- `megabytes`
+- `gigabytes`
+- `terabytes`
+- `petabytes`
+
+**Examples:**
+
+| Format | Data | Displayed |
+| -------------- | --------- | --------- |
+| `decimalBytes` | `1` | 1B |
+| `decimalBytes` | `1000` | 1kB |
+| `decimalBytes` | `1000000` | 1MB |
+| `kilobytes` | `1` | 1kB |
+| `kilobytes` | `1000` | 1MB |
+| `kilobytes` | `1000000` | 1GB |
+| `megabytes` | `1` | 1MB |
+| `megabytes` | `1000` | 1GB |
+| `megabytes` | `1000000` | 1TB |
+
+## Digital (IEC)
+
+Converts a number of bytes using binary prefixes. It scales to
+use the unit that's the best fit.
+
+Formats supported:
+
+- `bytes`
+- `kibibytes`
+- `mebibytes`
+- `gibibytes`
+- `tebibytes`
+- `pebibytes`
+
+**Examples:**
+
+| Format | Data | Displayed |
+| ----------- | ------------- | --------- |
+| `bytes` | `1` | 1B |
+| `bytes` | `1024` | 1KiB |
+| `bytes` | `1024 * 1024` | 1MiB |
+| `kibibytes` | `1` | 1KiB |
+| `kibibytes` | `1024` | 1MiB |
+| `kibibytes` | `1024 * 1024` | 1GiB |
+| `mebibytes` | `1` | 1MiB |
+| `mebibytes` | `1024` | 1GiB |
+| `mebibytes` | `1024 * 1024` | 1TiB |
diff --git a/doc/user/project/integrations/slack.md b/doc/user/project/integrations/slack.md
index 1dda3a60430..c6a8941785a 100644
--- a/doc/user/project/integrations/slack.md
+++ b/doc/user/project/integrations/slack.md
@@ -16,7 +16,7 @@ The Slack Notifications Service allows your GitLab project to send events (e.g.
1. Select the **Slack notifications** project service to configure it.
1. Check the **Active** checkbox to turn on the service.
1. Check the checkboxes corresponding to the GitLab events you want to send to Slack as a notification.
-1. For each event, optionally enter the Slack channel where you want to send the event. (Do _not_ include the `#` symbol.) If left empty, the event will be sent to the default channel that you configured in the Slack Configuration step.
+1. For each event, optionally enter the Slack channel names where you want to send the event, separated by a comma. If left empty, the event will be sent to the default channel that you configured in the Slack Configuration step. **Note:** Usernames and private channels are not supported. To send direct messages, use the Member ID found under user's Slack profile.
1. Paste the **Webhook URL** that you copied from the Slack Configuration step.
1. Optionally customize the Slack bot username that will be sending the notifications.
1. Configure the remaining options and click `Save changes`.
@@ -52,10 +52,10 @@ the below rails console script.
```shell
# start a rails console:
-sudo gitlab-rails console production
+sudo gitlab-rails console -e production
# or for source installs:
-bundle exec rails console production
+bundle exec rails console -e production
```
```ruby
diff --git a/doc/user/project/integrations/webhooks.md b/doc/user/project/integrations/webhooks.md
index 5c9a21e2fbb..9423929d72f 100644
--- a/doc/user/project/integrations/webhooks.md
+++ b/doc/user/project/integrations/webhooks.md
@@ -45,21 +45,10 @@ They are available **per project** for GitLab Community Edition,
and **per project and per group** for **GitLab Enterprise Edition**.
Navigate to the webhooks page by going to your project's
-**Settings ➔ Integrations**.
+**Settings ➔ Webhooks**.
-## Maximum number of webhooks (per tier)
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20730) in GitLab 12.6.
-
-A maximum number of project webhooks applies to each [GitLab.com
-tier](https://about.gitlab.com/pricing/), as shown in the following table:
-
-| Tier | Number of webhooks per project |
-|----------|--------------------------------|
-| Free | 100 |
-| Bronze | 100 |
-| Silver | 100 |
-| Gold | 100 |
+NOTE: **Note:**
+On GitLab.com, the [maximum number of webhooks](../../../user/gitlab_com/index.md#maximum-number-of-webhooks) per project, and per group, is limited.
## Use-cases
@@ -174,7 +163,8 @@ X-Gitlab-Event: Push Hook
"commits": [
{
"id": "b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327",
- "message": "Update Catalan translation to e38cb41.",
+ "message": "Update Catalan translation to e38cb41.\n\nSee https://gitlab.com/gitlab-org/gitlab for more information",
+ "title": "Update Catalan translation to e38cb41.",
"timestamp": "2011-12-12T14:27:31+02:00",
"url": "http://example.com/mike/diaspora/commit/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327",
"author": {
@@ -188,6 +178,7 @@ X-Gitlab-Event: Push Hook
{
"id": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
"message": "fixed readme",
+ "title": "fixed readme",
"timestamp": "2012-01-03T23:36:29+02:00",
"url": "http://example.com/mike/diaspora/commit/da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
"author": {
@@ -467,12 +458,12 @@ X-Gitlab-Event: Note Hook
},
"commit": {
"id": "cfe32cf61b73a0d5e9f13e774abde7ff789b1660",
- "message": "Add submodule\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
+ "message": "Add submodule\n\nSigned-off-by: Example User \u003cuser@example.com.com\u003e\n",
"timestamp": "2014-02-27T10:06:20+02:00",
"url": "http://example.com/gitlab-org/gitlab-test/commit/cfe32cf61b73a0d5e9f13e774abde7ff789b1660",
"author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
+ "name": "Example User",
+ "email": "user@example.com"
}
}
}
@@ -1301,7 +1292,7 @@ Markdown features, like link labels.
## Testing webhooks
-You can trigger the webhook manually. Sample data from the project will be used. Sample data will take from the project.
+You can trigger the webhook manually. Sample data from the project will be used.
> For example: for triggering `Push Events` your project should have at least one commit.
![Webhook testing](img/webhook_testing.png)
diff --git a/doc/user/project/issue_board.md b/doc/user/project/issue_board.md
index 0a5d7805e41..464929a7e6c 100644
--- a/doc/user/project/issue_board.md
+++ b/doc/user/project/issue_board.md
@@ -147,7 +147,7 @@ Create lists for each of your team members and quickly drag-and-drop issues onto
## Permissions
[Reporters and up](../permissions.md) can use all the functionality of the
-Issue Board, that is, create or delete lists and drag issues from one list to another.
+Issue Board to create or delete lists, and drag issues from one list to another.
## GitLab Enterprise features for Issue Boards
@@ -180,18 +180,6 @@ These are shortcuts to your last 4 visited boards.
When you're revisiting an issue board in a project or group with multiple boards,
GitLab will automatically load the last board you visited.
-### Multi-select Issue Cards
-
-As the name suggest, multi-select issue cards allows more than one issue card
-to be dragged and dropped across different lists. This becomes helpful while
-moving and grooming a lot of issues at once.
-
-You can multi-select an issue card by pressing `CTRL` + `Left mouse click` on
-Windows or `CMD` + `Left mouse click` on MacOS. Once done, start by dragging one
-of the issue card you have selected and drop it in the new list you want.
-
-![Multi-select Issue Cards](img/issue_boards_multi_select.png)
-
### Configurable Issue Boards **(STARTER)**
> Introduced in [GitLab Starter Edition 10.2](https://about.gitlab.com/releases/2017/11/22/gitlab-10-2-released/#issue-boards-configuration).
@@ -373,7 +361,7 @@ window where you can see all the issues that do not belong to any list.
Select one or more issues by clicking on the cards and then click **Add issues**
to add them to the selected list. You can limit the issues you want to add to
-the list by filtering by author, assignee, milestone and label.
+the list by filtering by author, assignee, milestone, and label.
![Bulk adding issues to lists](img/issue_boards_add_issues_modal.png)
@@ -419,7 +407,7 @@ You should be able to use the filters on top of your Issue Board to show only
the results you want. This is similar to the filtering used in the issue tracker
since the metadata from the issues and labels are re-used in the Issue Board.
-You can filter by author, assignee, milestone and label.
+You can filter by author, assignee, milestone, and label.
### Creating workflows
@@ -467,6 +455,19 @@ When dragging issues between lists, different behavior occurs depending on the s
| From label `A` list | `A` removed | Issue closed | `A` removed<br/>`B` added | `Bob` assigned |
| From assignee `Alice` list | `Alice` unassigned | Issue closed | `B` added | `Alice` unassigned<br/>`Bob` assigned |
+### Multi-select issue cards
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/18954) in GitLab 12.4.
+
+You can select multiple issue cards, then drag the group to another position within the list, or to another list. This makes it faster to reorder many issues at once.
+
+To select and move multiple cards:
+
+1. Select each card with <kbd>Ctrl</kbd>+`Click` on Windows or Linux, or <kbd>Cmd</kbd>+`Click` on MacOS.
+1. Drag one of the selected cards to another position or list and all selected cards will be moved.
+
+![Multi-select Issue Cards](img/issue_boards_multi_select_v12_4.png)
+
## Tips
A few things to remember:
diff --git a/doc/user/project/issues/csv_import.md b/doc/user/project/issues/csv_import.md
index 56643dcee53..d67b135186f 100644
--- a/doc/user/project/issues/csv_import.md
+++ b/doc/user/project/issues/csv_import.md
@@ -3,7 +3,7 @@
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/23532) in GitLab 11.7.
Issues can be imported to a project by uploading a CSV file with the columns
-`title` and `description`, in that order.
+`title` and `description`.
The user uploading the CSV file will be set as the author of the imported issues.
@@ -31,9 +31,9 @@ to you once the import is complete.
When importing issues from a CSV file, it must be formatted in a certain way:
-- **header row:** CSV files must contain a header row where the first column header
- is `title` and the second is `description`. If additional columns are present, they
- will be ignored.
+- **header row:** CSV files must include the following headers:
+`title` and `description`. The case of the headers does not matter.
+- **columns:** Data from columns beyond `title` and `description` are not imported.
- **separators:** The column separator is automatically detected from the header row.
Supported separator characters are: commas (`,`), semicolons (`;`), and tabs (`\t`).
The row separator can be either `CRLF` or `LF`.
diff --git a/doc/user/project/issues/design_management.md b/doc/user/project/issues/design_management.md
index 58da77697d8..add466a91b1 100644
--- a/doc/user/project/issues/design_management.md
+++ b/doc/user/project/issues/design_management.md
@@ -37,17 +37,18 @@ Design Management requires that projects are using
[hashed storage](../../../administration/repository_storage_types.md#hashed-storage)
(the default storage type since v10.0).
-### Feature Flags
+If the requirements are not met, the **Designs** tab displays a message to the user.
-- Reference Parsing
+## Supported files
- Designs support short references in Markdown, but this needs to be enabled by setting
- the `:design_management_reference_filter_gfm_pipeline` feature flag.
+Files uploaded must have a file extension of either `png`, `jpg`, `jpeg`,
+`gif`, `bmp`, `tiff` or `ico`.
+
+Support for [SVG files](https://gitlab.com/gitlab-org/gitlab/issues/12771)
+and [PDFs](https://gitlab.com/gitlab-org/gitlab/issues/32811) is planned for a future release.
## Limitations
-- Files uploaded must have a file extension of either `png`, `jpg`, `jpeg`, `gif`, `bmp`, `tiff` or `ico`.
- The [`svg` extension is not yet supported](https://gitlab.com/gitlab-org/gitlab/issues/12771).
- Design uploads are limited to 10 files at a time.
- Design Management data
[isn't deleted when a project is destroyed](https://gitlab.com/gitlab-org/gitlab/issues/13429) yet.
@@ -69,8 +70,14 @@ Navigate to the **Design Management** page from any issue by clicking the **Desi
To upload design images, click the **Upload Designs** button and select images to upload.
+[Introduced](https://gitlab.com/gitlab-org/gitlab/issues/34353) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.9,
+you can drag and drop designs onto the dedicated dropzone to upload them.
+
+![Drag and drop design uploads](img/design_drag_and_drop_uploads_v12_9.png)
+
Designs with the same filename as an existing uploaded design will create a new version
-of the design, and will replace the previous version.
+of the design, and will replace the previous version. [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/34353) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.9, dropping a design on an existing uploaded design will also create a new version,
+provided the filenames are the same.
Designs cannot be added if the issue has been moved, or its
[discussion is locked](../../discussions/#lock-discussions).
@@ -155,32 +162,3 @@ Different discussions have different badge numbers:
From GitLab 12.5 on, new annotations will be outputted to the issue activity,
so that everyone involved can participate in the discussion.
-
-## References
-
-GitLab Flavored Markdown supports references to designs. The syntax for this is:
-
- `#123[file.jpg]` - the issue reference, with the filename in square braces
-
-File names may contain a variety of odd characters, so two escaping mechanisms are supported:
-
-### Quoting
-
-File names may be quoted with double quotation marks, eg:
-
- `#123["file.jpg"]`
-
-This is useful if, for instance, your filename has square braces in its name. In this scheme, all
-double quotation marks in the file name need to be escaped with backslashes, and backslashes need
-to be escaped likewise:
-
- `#123["with with \"quote\" marks and a backslash \\.png"]`
-
-### Base64 Encoding
-
-In the case of file names that include HTML elements, you will need to escape these names to avoid
-them being processed as HTML literals. To do this, we support base64 encoding, eg.
-
- The file `<a>.jpg` can be referenced as `#123[base64:PGE+LmpwZwo=]`
-
-Obviously we would advise against using such filenames.
diff --git a/doc/user/project/issues/img/design_drag_and_drop_uploads_v12_9.png b/doc/user/project/issues/img/design_drag_and_drop_uploads_v12_9.png
new file mode 100644
index 00000000000..61ce3692808
--- /dev/null
+++ b/doc/user/project/issues/img/design_drag_and_drop_uploads_v12_9.png
Binary files differ
diff --git a/doc/user/project/issues/img/issue_health_status_v12_9.png b/doc/user/project/issues/img/issue_health_status_v12_9.png
new file mode 100644
index 00000000000..f8922a74fc1
--- /dev/null
+++ b/doc/user/project/issues/img/issue_health_status_v12_9.png
Binary files differ
diff --git a/doc/user/project/issues/img/related_issue_block_v12_8.png b/doc/user/project/issues/img/related_issue_block_v12_8.png
index 02d70868abc..ce261f26ce6 100644
--- a/doc/user/project/issues/img/related_issue_block_v12_8.png
+++ b/doc/user/project/issues/img/related_issue_block_v12_8.png
Binary files differ
diff --git a/doc/user/project/issues/img/related_issues_add_v12_8.png b/doc/user/project/issues/img/related_issues_add_v12_8.png
index 5d6fa218426..8a06d005a5f 100644
--- a/doc/user/project/issues/img/related_issues_add_v12_8.png
+++ b/doc/user/project/issues/img/related_issues_add_v12_8.png
Binary files differ
diff --git a/doc/user/project/issues/img/related_issues_remove_v12_8.png b/doc/user/project/issues/img/related_issues_remove_v12_8.png
index bf35bec1bec..a8dff4c7052 100644
--- a/doc/user/project/issues/img/related_issues_remove_v12_8.png
+++ b/doc/user/project/issues/img/related_issues_remove_v12_8.png
Binary files differ
diff --git a/doc/user/project/issues/index.md b/doc/user/project/issues/index.md
index f28f20cf8b2..628228adc1b 100644
--- a/doc/user/project/issues/index.md
+++ b/doc/user/project/issues/index.md
@@ -6,9 +6,14 @@ Issues are the fundamental medium for collaborating on ideas and planning work i
The GitLab issue tracker is an advanced tool for collaboratively developing ideas, solving problems, and planning work.
-Issues can allow you, your team, and your collaborators to share and discuss proposals
-before, and during, their implementation. However, they can be used for a variety of
-other purposes, customized to your needs and workflow.
+Issues can allow sharing and discussion of proposals before, and during,
+their implementation between:
+
+- You and your team.
+- Outside collaborators.
+
+They can also be used for a variety of other purposes, customized to your
+needs and workflow.
Issues are always associated with a specific project, but if you have multiple projects in a group,
you can also view all the issues collectively at the group level.
@@ -45,7 +50,8 @@ must be set.
</ul>
<li>State</li>
<ul>
- <li>Status (open/closed)</li>
+ <li>State (open or closed)</li>
+ <li>Status (On track, Needs attention, or At risk)</li>
<li>Confidentiality</li>
<li>Tasks (completed vs. outstanding)</li>
</ul>
@@ -104,8 +110,7 @@ view, you can also make certain changes [in bulk](../bulk_editing.md) to the dis
For more information, see the [Issue Data and Actions](issue_data_and_actions.md) page
for a rundown of all the fields and information in an issue.
-You can sort a list of issues several ways, including by issue creation date, milestone due date,
-etc. For more information, see the [Sorting and Ordering Issue Lists](sorting_issue_lists.md) page.
+You can sort a list of issues in several ways, for example by issue creation date, milestone due date. For more information, see the [Sorting and Ordering Issue Lists](sorting_issue_lists.md) page.
### Issue boards
@@ -152,12 +157,38 @@ To prevent duplication of issues for the same topic, GitLab searches for similar
when new issues are being created.
When typing in the title in the **New Issue** page, GitLab searches titles and descriptions
-across all issues the user has access to in the current project. Up 5 similar issues,
+across all issues the user has access to in the current project. Up to five similar issues,
sorted by most recently updated, are displayed below the title box. Note that this feature
requires [GraphQL](../../../api/graphql/index.md) to be enabled.
![Similar issues](img/similar_issues.png)
+---
+
+### Status **(ULTIMATE)**
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/36427) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.9.
+
+To help you track the status of your issues, you can assign a status to each issue to flag work that's progressing as planned or needs attention to keep on schedule:
+
+- `On track` (green)
+- `Needs attention` (amber)
+- `At risk` (red)
+
+!["On track" health status on an issue](img/issue_health_status_v12_9.png)
+
+---
+
+#### Enable issue health status
+
+This feature comes with the `:save_issuable_health_status` feature flag disabled by default. However, in some cases
+this feature is incompatible with old configuration. To turn on the feature while configuration is
+migrated, ask a GitLab administrator with Rails console access to run the following command:
+
+```ruby
+Feature.enable(:save_issuable_health_status)
+```
+
## Other Issue actions
- [Create an issue from a template](../../project/description_templates.md#using-the-templates)
diff --git a/doc/user/project/issues/issue_data_and_actions.md b/doc/user/project/issues/issue_data_and_actions.md
index 8bb43b1231f..d6576fc780d 100644
--- a/doc/user/project/issues/issue_data_and_actions.md
+++ b/doc/user/project/issues/issue_data_and_actions.md
@@ -200,8 +200,12 @@ to let them know your reaction without spamming them.
#### 21. Show all activity
You can filter what is displayed in the issue history by clicking on **Show all activity**
-and selecting either **Show comments only**, which only shows threads and hides
-updates to the issue, or **Show history only**, which hides threads and only shows updates.
+and selecting either:
+
+- **Show comments only**, which only shows threads and hides updates to the issue.
+- **Show history only**, which hides threads and only shows updates.
+
+Also:
- You can mention a user or a group present in your GitLab instance with
`@username` or `@groupname` and they will be notified via To-Do items
diff --git a/doc/user/project/labels.md b/doc/user/project/labels.md
index a5a7fc34ef9..cd106b062f4 100644
--- a/doc/user/project/labels.md
+++ b/doc/user/project/labels.md
@@ -210,7 +210,7 @@ means higher priority.
![Drag to change label priority](img/labels_drag_priority_v12_1.gif)
-On the epic, merge request, and issue list pages, for both groups and projects, you
+On the epic, merge request, and issue list pages (for both groups and projects) you
can sort by `Label priority` or `Priority`.
If you sort by `Label priority`, GitLab uses this sort comparison order:
diff --git a/doc/user/project/merge_requests/accessibility_testing.md b/doc/user/project/merge_requests/accessibility_testing.md
new file mode 100644
index 00000000000..bfd62b77314
--- /dev/null
+++ b/doc/user/project/merge_requests/accessibility_testing.md
@@ -0,0 +1,57 @@
+---
+type: reference, howto
+---
+
+# Accessibility Testing
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25144) in GitLab 12.8.
+
+If your application offers a web interface and you are using
+[GitLab CI/CD](../../../ci/README.md), you can quickly determine the accessibility
+impact of pending code changes.
+
+## Overview
+
+GitLab uses [pa11y](https://pa11y.org/), a free and open source tool for
+measuring the accessibility of web sites, and has built a simple
+[CI job template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml).
+This job outputs accessibility violations, warnings, and notices for each page
+analyzed to a file called `accessibility`.
+
+## Configure Accessibility Testing
+
+This example shows how to run [pa11y](https://pa11y.org/)
+on your code with GitLab CI/CD using the [GitLab Accessibility Docker image](https://gitlab.com/gitlab-org/ci-cd/accessibility).
+
+For GitLab 12.9 and later, to define the `a11y` job, you must
+[include](../../../ci/yaml/README.md#includetemplate) the
+[`Accessibility.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml)
+included with your GitLab installation, as shown below.
+
+Add the following to your `.gitlab-ci.yml` file:
+
+```yaml
+variables:
+ a11y_urls: "https://about.gitlab.com https://gitlab.com/users/sign_in"
+
+include:
+ - template: "Verify/Accessibility.gitlab-ci.yml"
+```
+
+creates an `a11y` job in your CI/CD pipeline, runs
+Pa11y against the webpages defined in `a11y_urls`, and builds an HTML report for each.
+
+The report for each URL is saved as an artifact that can be [viewed directly in your browser](../../../ci/pipelines/job_artifacts.md#browsing-artifacts).
+
+A single `accessibility.json` artifact is created and saved along with the individual HTML reports.
+It includes report data for all URLs scanned.
+
+NOTE: **Note:**
+For GitLab versions earlier than 12.9, you can use `include:remote` and use a
+link to the [current template in `master`](https://gitlab.com/gitlab-org/gitlab/-/raw/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml)
+
+NOTE: **Note:**
+The job definition provided by the template does not support Kubernetes yet.
+
+It is not yet possible to pass configurations into Pa11y via CI configuration. To change anything,
+copy the template to your CI file and make the desired edits.
diff --git a/doc/user/project/merge_requests/cherry_pick_changes.md b/doc/user/project/merge_requests/cherry_pick_changes.md
index 1ba5ece89d5..3e76b9ec6b9 100644
--- a/doc/user/project/merge_requests/cherry_pick_changes.md
+++ b/doc/user/project/merge_requests/cherry_pick_changes.md
@@ -21,6 +21,20 @@ where you can choose to either:
- Cherry-pick the changes directly into the selected branch.
- Create a new merge request with the cherry-picked changes.
+### Cherry-pick tracking
+
+> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/2675) in GitLab 12.9.
+
+When you cherry-pick a merge commit, GitLab will output a system note to the related merge
+request thread crosslinking the new commit and the existing merge request.
+
+![Cherry-pick tracking in Merge Request timeline](img/cherry_pick_mr_timeline_v12_9.png)
+
+Each deployment's [list of associated merge requests](../../../api/deployments.md#list-of-merge-requests-associated-with-a-deployment) will include cherry-picked merge commits.
+
+NOTE: **Note:**
+We only track cherry-pick executed from GitLab (both UI and API). Support for [tracking cherry-picked commits through the command line](https://gitlab.com/gitlab-org/gitlab/issues/202215) is planned for a future release.
+
## Cherry-picking a commit
You can cherry-pick a commit from the commit details page:
diff --git a/doc/user/project/merge_requests/code_quality.md b/doc/user/project/merge_requests/code_quality.md
index 91cdddc9300..0525d17cca2 100644
--- a/doc/user/project/merge_requests/code_quality.md
+++ b/doc/user/project/merge_requests/code_quality.md
@@ -6,7 +6,7 @@ type: reference, howto
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1984) in [GitLab Starter](https://about.gitlab.com/pricing/) 9.3.
-With the help of [GitLab CI/CD](../../../ci/README.md), you can analyze your
+Ensuring your project's code stays simple, readable and easy to contribute to can be problematic. With the help of [GitLab CI/CD](../../../ci/README.md), you can analyze your
source code quality using GitLab Code Quality.
Code Quality:
@@ -14,18 +14,21 @@ Code Quality:
- Uses [Code Climate Engines](https://codeclimate.com), which are
free and open source. Code Quality doesn't require a Code Climate
subscription.
-- Runs in [pipelines](../../../ci/pipelines.md) using an Docker image built in
+- Runs in [pipelines](../../../ci/pipelines/index.md) using a Docker image built in the
[GitLab Code
- Quality](https://gitlab.com/gitlab-org/security-products/codequality) project.
+ Quality](https://gitlab.com/gitlab-org/ci-cd/codequality) project using [default Code Climate configurations](https://gitlab.com/gitlab-org/ci-cd/codequality/-/tree/master/codeclimate_defaults).
- Can make use of a [template](#example-configuration).
- Is available with [Auto
DevOps](../../../topics/autodevops/index.md#auto-code-quality-starter).
+- Can be extended through [Analysis Plugins](https://docs.codeclimate.com/docs/list-of-engines) or a [custom tool](#implementing-a-custom-tool).
Going a step further, GitLab can show the Code Quality report right
in the merge request widget area:
![Code Quality Widget](img/code_quality.png)
+For more information, see the Code Climate list of [Supported Languages for Maintainability](https://docs.codeclimate.com/docs/supported-languages-for-maintainability).
+
## Use cases
For instance, consider the following workflow:
@@ -132,14 +135,14 @@ code_quality:
--env SOURCE_CODE="$PWD"
--volume "$PWD":/code
--volume /var/run/docker.sock:/var/run/docker.sock
- "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
+ "registry.gitlab.com/gitlab-org/ci-cd/codequality:$SP_VERSION" /code
artifacts:
reports:
codequality: gl-code-quality-report.json
```
In GitLab 12.6, Code Quality switched to the
-[new versioning scheme](https://gitlab.com/gitlab-org/security-products/codequality/-/merge_requests/38).
+[new versioning scheme](https://gitlab.com/gitlab-org/ci-cd/codequality#versioning-and-release-cycle).
It is highly recommended to include the Code Quality template as shown in the
[example configuration](#example-configuration), which uses the new versioning scheme.
If not using the template, the `SP_VERSION` variable can be hardcoded to use the
@@ -159,7 +162,7 @@ code_quality:
--env SOURCE_CODE="$PWD"
--volume "$PWD":/code
--volume /var/run/docker.sock:/var/run/docker.sock
- "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
+ "registry.gitlab.com/gitlab-org/ci-cd/codequality:$SP_VERSION" /code
artifacts:
reports:
codequality: gl-code-quality-report.json
@@ -181,7 +184,7 @@ code_quality:
--env SOURCE_CODE="$PWD"
--volume "$PWD":/code
--volume /var/run/docker.sock:/var/run/docker.sock
- "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
+ "registry.gitlab.com/gitlab-org/ci-cd/codequality:$SP_VERSION" /code
artifacts:
paths: [gl-code-quality-report.json]
```
@@ -213,7 +216,7 @@ The Code Quality job supports environment variables that users can set to
configure job execution at runtime.
For a list of available environment variables, see
-[Environment variables](https://gitlab.com/gitlab-org/security-products/codequality/blob/master/README.md#environment-variables).
+[Environment variables](https://gitlab.com/gitlab-org/ci-cd/codequality#environment-variables).
## Implementing a custom tool
@@ -260,11 +263,16 @@ GitLab.
## Code Quality reports
-Once the Code Quality job has completed, GitLab:
+Once the Code Quality job has completed:
-- Checks the generated report.
-- Compares the metrics between the source and target branches.
-- Shows the information right on the merge request.
+- The full list of code quality violations generated by a pipeline is available in the
+ Code Quality tab of the Pipeline Details page.
+- Potential changes to code quality are shown directly in the merge request.
+ The Code Quality widget in the merge request compares the reports from the base and head of the branch,
+ then lists any violations that will be resolved or created when the branch is merged.
+- The full JSON report is available as a
+ [downloadable artifact](../../../ci/pipelines/job_artifacts.md#downloading-artifacts)
+ for the `code_quality` job.
If multiple jobs in a pipeline generate a code quality artifact, only the artifact from
the last created job (the job with the largest job ID) is used. To avoid confusion,
@@ -276,6 +284,10 @@ Code Quality job in your `.gitlab-ci.yml` for the very first time.
Consecutive merge requests will have something to compare to and the Code Quality
report will be shown properly.
+These reports will only be available as long as the Code Quality artifact(s) required to generate
+them are also available. See
+[`artifacts:expire_in`](../../../ci/yaml/README.md#artifactsexpire_in) for more details.
+
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
diff --git a/doc/user/project/merge_requests/img/approvals_premium_mr_widget_v12_7.png b/doc/user/project/merge_requests/img/approvals_premium_mr_widget_v12_7.png
index f9348b0eefc..164779a8450 100644
--- a/doc/user/project/merge_requests/img/approvals_premium_mr_widget_v12_7.png
+++ b/doc/user/project/merge_requests/img/approvals_premium_mr_widget_v12_7.png
Binary files differ
diff --git a/doc/user/project/merge_requests/img/cherry_pick_mr_timeline_v12_9.png b/doc/user/project/merge_requests/img/cherry_pick_mr_timeline_v12_9.png
new file mode 100644
index 00000000000..919b576fcc6
--- /dev/null
+++ b/doc/user/project/merge_requests/img/cherry_pick_mr_timeline_v12_9.png
Binary files differ
diff --git a/doc/user/project/merge_requests/img/mr_approvals_by_code_owners_v12_7.png b/doc/user/project/merge_requests/img/mr_approvals_by_code_owners_v12_7.png
index c2e5714e78d..669148a41d8 100644
--- a/doc/user/project/merge_requests/img/mr_approvals_by_code_owners_v12_7.png
+++ b/doc/user/project/merge_requests/img/mr_approvals_by_code_owners_v12_7.png
Binary files differ
diff --git a/doc/user/project/merge_requests/img/scoped_to_protected_branch_v12_8.png b/doc/user/project/merge_requests/img/scoped_to_protected_branch_v12_8.png
index 08a24e9f28e..9446ed66c38 100644
--- a/doc/user/project/merge_requests/img/scoped_to_protected_branch_v12_8.png
+++ b/doc/user/project/merge_requests/img/scoped_to_protected_branch_v12_8.png
Binary files differ
diff --git a/doc/user/project/merge_requests/img/test_coverage_visualization_v12_9.png b/doc/user/project/merge_requests/img/test_coverage_visualization_v12_9.png
new file mode 100644
index 00000000000..c2cd28adc95
--- /dev/null
+++ b/doc/user/project/merge_requests/img/test_coverage_visualization_v12_9.png
Binary files differ
diff --git a/doc/user/project/merge_requests/index.md b/doc/user/project/merge_requests/index.md
index 0617e6bc74d..f296c3fbd8c 100644
--- a/doc/user/project/merge_requests/index.md
+++ b/doc/user/project/merge_requests/index.md
@@ -21,7 +21,7 @@ A. Consider you are a software developer working in a team:
1. You gather feedback from your team
1. You work on the implementation optimizing code with [Code Quality reports](code_quality.md) **(STARTER)**
1. You verify your changes with [JUnit test reports](../../../ci/junit_test_reports.md) in GitLab CI/CD
-1. You avoid using dependencies whose license is not compatible with your project with [License Compliance reports](../../application_security/license_compliance/index.md) **(ULTIMATE)**
+1. You avoid using dependencies whose license is not compatible with your project with [License Compliance reports](../../compliance/license_compliance/index.md) **(ULTIMATE)**
1. You request the [approval](merge_request_approvals.md) from your manager **(STARTER)**
1. Your manager:
1. Pushes a commit with their final review
@@ -48,7 +48,7 @@ about CI/CD pipelines, when present), followed by the discussion threads of the
collaborating with that MR.
MRs also contain navigation tabs from which you can see the discussion happening on the thread,
-the list of commits, the list of pipelines and jobs, the code changes and inline code reviews.
+the list of commits, the list of pipelines and jobs, the code changes, and inline code reviews.
To get started, read the [introduction to merge requests](getting_started.md).
@@ -91,15 +91,18 @@ or link to useful information directly in the merge request page:
| Feature | Description |
|--------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [Accessibility Testing](accessibility_testing.md) | Automatically report A11y violations for changed pages in merge requests |
| [Browser Performance Testing](browser_performance_testing.md) **(PREMIUM)** | Quickly determine the performance impact of pending code changes. |
| [Code Quality](code_quality.md) **(STARTER)** | Analyze your source code quality using the [Code Climate](https://codeclimate.com/) analyzer and show the Code Climate report right in the merge request widget area. |
-| [Display arbitrary job artifacts](../../../ci/yaml/README.md#artifactsexpose_as) | Configure CI pipelines with the `artifacts:expose_as` parameter to directly link to selected [artifacts](../pipelines/job_artifacts.md) in merge requests. |
+| [Display arbitrary job artifacts](../../../ci/yaml/README.md#artifactsexpose_as) | Configure CI pipelines with the `artifacts:expose_as` parameter to directly link to selected [artifacts](../../../ci/pipelines/job_artifacts.md) in merge requests. |
| [GitLab CI/CD](../../../ci/README.md) | Build, test, and deploy your code in a per-branch basis with built-in CI/CD. |
| [JUnit test reports](../../../ci/junit_test_reports.md) | Configure your CI jobs to use JUnit test reports, and let GitLab display a report on the merge request so that it’s easier and faster to identify the failure without having to check the entire job log. |
+| [License Compliance](../../compliance/license_compliance/index.md) **(ULTIMATE)** | Manage the licenses of your dependencies. |
| [Metrics Reports](../../../ci/metrics_reports.md) **(PREMIUM)** | Display the Metrics Report on the merge request so that it's fast and easy to identify changes to important metrics. |
| [Multi-Project pipelines](../../../ci/multi_project_pipelines.md) **(PREMIUM)** | When you set up GitLab CI/CD across multiple projects, you can visualize the entire pipeline, including all cross-project interdependencies. |
| [Pipelines for merge requests](../../../ci/merge_request_pipelines/index.md) | Customize a specific pipeline structure for merge requests in order to speed the cycle up by running only important jobs. |
-| [Pipeline Graphs](../../../ci/pipelines.md#visualizing-pipelines) | View the status of pipelines within the merge request, including the deployment process. |
+| [Pipeline Graphs](../../../ci/pipelines/index.md#visualizing-pipelines) | View the status of pipelines within the merge request, including the deployment process. |
+| [Test Coverage visualization](test_coverage_visualization.md) | See test coverage results for merge requests, within the file diff. |
### Security Reports **(ULTIMATE)**
@@ -111,7 +114,6 @@ generated by scanning and reporting any vulnerabilities found in your project:
| [Container Scanning](../../application_security/container_scanning/index.md) | Analyze your Docker images for known vulnerabilities. |
| [Dynamic Application Security Testing (DAST)](../../application_security/dast/index.md) | Analyze your running web applications for known vulnerabilities. |
| [Dependency Scanning](../../application_security/dependency_scanning/index.md) | Analyze your dependencies for known vulnerabilities. |
-| [License Compliance](../../application_security/license_compliance/index.md) | Manage the licenses of your dependencies. |
| [Static Application Security Testing (SAST)](../../application_security/sast/index.md) | Analyze your source code for known vulnerabilities. |
## Authorization for merge requests
diff --git a/doc/user/project/merge_requests/license_management.md b/doc/user/project/merge_requests/license_management.md
index df5bd073ade..ed81eb8ca10 100644
--- a/doc/user/project/merge_requests/license_management.md
+++ b/doc/user/project/merge_requests/license_management.md
@@ -1,5 +1,5 @@
---
-redirect_to: '../../application_security/license_compliance/index.md'
+redirect_to: '../../compliance/license_compliance/index.md'
---
-This document was moved to [another location](../../application_security/license_compliance/index.md).
+This document was moved to [another location](../../compliance/license_compliance/index.md).
diff --git a/doc/user/project/merge_requests/reviewing_and_managing_merge_requests.md b/doc/user/project/merge_requests/reviewing_and_managing_merge_requests.md
index f3f791a3e22..bb9192e3600 100644
--- a/doc/user/project/merge_requests/reviewing_and_managing_merge_requests.md
+++ b/doc/user/project/merge_requests/reviewing_and_managing_merge_requests.md
@@ -120,7 +120,7 @@ be disabled. If the pipeline fails to deploy, the deployment info will be hidden
![Merge request pipeline](img/merge_request_pipeline.png)
-For more information, [read about pipelines](../../../ci/pipelines.md).
+For more information, [read about pipelines](../../../ci/pipelines/index.md).
### Merge when pipeline succeeds (MWPS)
diff --git a/doc/user/project/merge_requests/squash_and_merge.md b/doc/user/project/merge_requests/squash_and_merge.md
index 82580a1c1fa..924334055b9 100644
--- a/doc/user/project/merge_requests/squash_and_merge.md
+++ b/doc/user/project/merge_requests/squash_and_merge.md
@@ -29,6 +29,9 @@ The squashed commit's commit message will be either:
- Taken from the first multi-line commit message in the merge.
- The merge request's title if no multi-line commit message is found.
+NOTE: **Note:**
+This only takes effect if there are at least 2 commits. As there is nothing to squash, the commit message does not change if there is only 1 commit.
+
It can be customized before merging a merge request.
![A squash commit message editor](img/squash_mr_message.png)
diff --git a/doc/user/project/merge_requests/test_coverage_visualization.md b/doc/user/project/merge_requests/test_coverage_visualization.md
new file mode 100644
index 00000000000..a0a4c5d3743
--- /dev/null
+++ b/doc/user/project/merge_requests/test_coverage_visualization.md
@@ -0,0 +1,78 @@
+---
+type: reference, howto
+---
+
+# Test Coverage Visualization
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/3708) in GitLab 12.9.
+
+With the help of [GitLab CI/CD](../../../ci/README.md), you can collect the test
+coverage information of your favorite testing or coverage-analysis tool, and visualize
+this information inside the file diff view of your merge requests (MRs). This will allow you
+to see which lines are covered by tests, and which lines still require coverage, before the
+MR is merged.
+
+![Test Coverage Visualization Diff View](img/test_coverage_visualization_v12_9.png)
+
+## How test coverage visualization works
+
+Collecting the coverage information is done via GitLab CI/CD's
+[artifacts reports feature](../../../ci/yaml/README.md#artifactsreports).
+You can specify one or more coverage reports to collect, including wildcard paths.
+GitLab will then take the coverage information in all the files and combine it
+together.
+
+For the coverage analysis to work, you have to provide a properly formated
+[Cobertura XML](https://cobertura.github.io/cobertura/) report to
+[`artifacts:reports:cobertura`](../../../ci/yaml/README.md#artifactsreportscobertura).
+This format was originally developed for Java, but most coverage analysis frameworks
+for other languages have plugins to add support for it, like:
+
+- [simplecov-cobertura](https://rubygems.org/gems/simplecov-cobertura) (Ruby)
+- [gocover-cobertura](https://github.com/t-yuki/gocover-cobertura) (Golang)
+
+Other coverage analysis frameworks support the format out of the box, for example:
+
+- [Istanbul](https://istanbul.js.org/docs/advanced/alternative-reporters/#cobertura) (JavaScript)
+- [Coverage.py](https://coverage.readthedocs.io/en/coverage-5.0/cmd.html#xml-reporting) (Python)
+
+Once configured, if you create a merge request that triggers a pipeline which collects
+coverage reports, the coverage will be shown in the diff view. This includes reports
+from any job in any stage in the pipeline. The coverage will be displayed for each line:
+
+- `covered` (green): lines which have been checked at least once by tests
+- `no test coverage` (orange): lines which are loaded but never executed
+- no coverage information: lines which are non-instrumented or not loaded
+
+Hovering over the coverage bar will provide further information, such as the number
+of times the line was checked by tests.
+
+## Example test coverage configuration
+
+The following [`gitlab-ci.yml`](../../../ci/yaml/README.md) example uses [Mocha](https://mochajs.org/)
+JavaScript testing and [NYC](https://github.com/istanbuljs/nyc) coverage-tooling to
+generate the coverage artifact:
+
+```yaml
+test:
+ script:
+ - npm install
+ - npx nyc --reporter cobertura mocha
+ artifacts:
+ reports:
+ cobertura: coverage/cobertura-coverage.xml
+```
+
+## Enabling the feature
+
+This feature comes with the `:coverage_report_view` feature flag disabled by
+default. This feature is disabled due to some performance issues with very large
+data sets. When [the performance issue](https://gitlab.com/gitlab-org/gitlab/issues/37725)
+is resolved, the feature will be enabled by default.
+
+To enable this feature, ask a GitLab administrator with Rails console access to
+run the following command:
+
+```ruby
+Feature.enable(:coverage_report_view)
+```
diff --git a/doc/user/project/milestones/burndown_charts.md b/doc/user/project/milestones/burndown_charts.md
index 75501ab3c85..e9f23899068 100644
--- a/doc/user/project/milestones/burndown_charts.md
+++ b/doc/user/project/milestones/burndown_charts.md
@@ -28,7 +28,7 @@ For an overview, check the video demonstration on [Mapping work versus time with
## Use cases
-Burndown Charts, in general, are used for tracking and analyzing the completion of
+Burndown Charts are generally used for tracking and analyzing the completion of
a milestone. Therefore, their use cases are tied to the
[use you are assigning your milestone to](index.md).
diff --git a/doc/user/project/new_ci_build_permissions_model.md b/doc/user/project/new_ci_build_permissions_model.md
index d1bb23396e4..09af6d05690 100644
--- a/doc/user/project/new_ci_build_permissions_model.md
+++ b/doc/user/project/new_ci_build_permissions_model.md
@@ -68,7 +68,7 @@ Let's consider the following scenario:
A unique job token is generated for each job and provides the user read
access all projects that would be normally accessible to the user creating that
job. The unique job token does not have any write permissions, but there
-is a [proposal to add support](https://gitlab.com/gitlab-org/gitlab-foss/issues/18106).
+is a [proposal to add support](https://gitlab.com/gitlab-org/gitlab/issues/35067).
We try to make sure that this token doesn't leak by:
@@ -101,14 +101,14 @@ allowing pulling and pushing Docker images from within the CI job.
GitLab would create a special checkout URL like:
-```
+```plaintext
https://gitlab-ci-token:<project-runners-token>/gitlab.com/gitlab-org/gitlab-foss.git
```
And then the users could also use it in their CI jobs all Docker related
commands to interact with GitLab Container Registry. For example:
-```
+```shell
docker login -u gitlab-ci-token -p $CI_JOB_TOKEN registry.gitlab.com
```
@@ -173,14 +173,14 @@ As a user:
The [Job environment variable][jobenv] `CI_JOB_TOKEN` can be used to
authenticate any clones of dependent repositories. For example:
-```
+```shell
git clone https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.com/<user>/<mydependentrepo>.git
```
It can also be used for system-wide authentication
(only do this in a docker container, it will overwrite ~/.netrc):
-```
+```shell
echo -e "machine gitlab.com\nlogin gitlab-ci-token\npassword ${CI_JOB_TOKEN}" > ~/.netrc
```
diff --git a/doc/user/project/operations/error_tracking.md b/doc/user/project/operations/error_tracking.md
index e7565835be7..bc1256a1c50 100644
--- a/doc/user/project/operations/error_tracking.md
+++ b/doc/user/project/operations/error_tracking.md
@@ -41,8 +41,8 @@ You may also want to enable Sentry's GitLab integration by following the steps i
NOTE: **Note:**
You will need at least Reporter [permissions](../../permissions.md) to view the Error Tracking list.
-The Error Tracking list may be found at **Operations > Error Tracking** in your project's sidebar.
-Errors can be filtered by title or sorted by Frequency, First Seen or Last Seen. Errors are always sorted in descending order by the field specified.
+You can find the Error Tracking list at **Operations > Error Tracking** in your project's sidebar.
+Here, you can filter errors by title or by status (one of Ignored , Resolved, or Unresolved) and sort in descending order by Frequency, First Seen, or Last Seen. By default, the error list is ordered by Last Seen and filtered to Unresolved errors.
![Error Tracking list](img/error_tracking_list_v12_6.png)
diff --git a/doc/user/project/pages/custom_domains_ssl_tls_certification/ssl_tls_concepts.md b/doc/user/project/pages/custom_domains_ssl_tls_certification/ssl_tls_concepts.md
index cf2f0cbd12c..bc51a9c90d2 100644
--- a/doc/user/project/pages/custom_domains_ssl_tls_certification/ssl_tls_concepts.md
+++ b/doc/user/project/pages/custom_domains_ssl_tls_certification/ssl_tls_concepts.md
@@ -27,7 +27,7 @@ they are static, hence we are not dealing with server-side scripts
nor credit card transactions, then why do we need secure connections?
Back in the 1990s, where HTTPS came out, [SSL](https://en.wikipedia.org/wiki/Transport_Layer_Security#SSL_1.0.2C_2.0_and_3.0) was considered a "special"
-security measure, necessary just for big companies, like banks and shoppings sites
+security measure, necessary just for big companies like banks and shopping sites
with financial transactions.
Now we have a different picture. [According to Josh Aas](https://letsencrypt.org/2015/10/29/phishing-and-malware.html), Executive Director at [ISRG](https://en.wikipedia.org/wiki/Internet_Security_Research_Group):
diff --git a/doc/user/project/pages/getting_started/new_or_existing_website.md b/doc/user/project/pages/getting_started/new_or_existing_website.md
index 49a330ea202..027a238bf02 100644
--- a/doc/user/project/pages/getting_started/new_or_existing_website.md
+++ b/doc/user/project/pages/getting_started/new_or_existing_website.md
@@ -16,7 +16,7 @@ To do so, follow the steps below.
click **New project**, and name it according to the
[Pages domain names](../getting_started_part_one.md#gitlab-pages-default-domain-names).
1. Clone it to your local computer, add your website
- files to your project, add, commit and push to GitLab.
+ files to your project, add, commit, and push to GitLab.
Alternatively, you can run `git init` in your local directory,
add the remote URL:
`git remote add origin git@gitlab.com:namespace/project-name.git`,
diff --git a/doc/user/project/pages/index.md b/doc/user/project/pages/index.md
index cabde1f4e8d..4ebe37394b4 100644
--- a/doc/user/project/pages/index.md
+++ b/doc/user/project/pages/index.md
@@ -30,7 +30,7 @@ instances (GitLab Core, Starter, Premium, and Ultimate).
<div class="col-md-9">
<p style="margin-top: 18px;">
To publish a website with Pages, you can use any Static Site Generator (SSG),
-such as Jekyll, Hugo, Middleman, Harp, Hexo, and Brunch, just to name a few. You can also
+such as Gatsby, Jekyll, Hugo, Middleman, Harp, Hexo, and Brunch, just to name a few. You can also
publish any website written directly in plain HTML, CSS, and JavaScript.</p>
<p>Pages does <strong>not</strong> support dynamic server-side processing, for instance, as <code>.php</code> and <code>.asp</code> requires. See this article to learn more about
<a href="https://about.gitlab.com/blog/2016/06/03/ssg-overview-gitlab-pages-part-1-dynamic-x-static/">static websites vs dynamic websites</a>.</p>
diff --git a/doc/user/project/pages/introduction.md b/doc/user/project/pages/introduction.md
index 359a9b4489a..f95ee26a226 100644
--- a/doc/user/project/pages/introduction.md
+++ b/doc/user/project/pages/introduction.md
@@ -297,7 +297,7 @@ For a list of known issues, visit GitLab's [public issue tracker].
[staticgen]: https://www.staticgen.com/
[pages-jekyll]: https://gitlab.com/pages/jekyll
[metarefresh]: https://en.wikipedia.org/wiki/Meta_refresh
-[public issue tracker]: https://gitlab.com/gitlab-org/gitlab-foss/issues?label_name=pages
+[public issue tracker]: https://gitlab.com/gitlab-org/gitlab/-/issues?label_name[]=Category%3APages
[quick start guide]: ../../../ci/quick_start/README.md
[pages-index-guide]: index.md
[pages-quick]: getting_started_part_one.md
diff --git a/doc/user/project/pipelines/job_artifacts.md b/doc/user/project/pipelines/job_artifacts.md
index 5b611fdd825..c23eaebcbe9 100644
--- a/doc/user/project/pipelines/job_artifacts.md
+++ b/doc/user/project/pipelines/job_artifacts.md
@@ -1,214 +1,5 @@
---
-type: reference, howto
+redirect_to: '../../../ci/pipelines/job_artifacts.md'
---
-# Introduction to job artifacts
-
-> - Introduced in GitLab 8.2 and GitLab Runner 0.7.0.
-> - Starting with GitLab 8.4 and GitLab Runner 1.0, the artifacts archive format changed to `ZIP`, and it is now possible to browse its contents, with the added ability of downloading the files separately.
-> - In GitLab 8.17, builds were renamed to jobs.
-> - The artifacts browser will be available only for new artifacts that are sent to GitLab using GitLab Runner version 1.0 and up. It will not be possible to browse old artifacts already uploaded to GitLab.
-
-Job artifacts are a list of files and directories created by a job
-once it finishes. This feature is [enabled by default](../../../administration/job_artifacts.md) in all
-GitLab installations.
-
-Job artifacts created by GitLab Runner are uploaded to GitLab and are downloadable as a single archive using the GitLab UI or the [GitLab API](../../../api/jobs.md#get-job-artifacts).
-
-<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
-For an overview, watch the video [GitLab CI Pipeline, Artifacts, and Environments](https://www.youtube.com/watch?v=PCKDICEe10s).
-Watch also [GitLab CI pipeline tutorial for beginners](https://www.youtube.com/watch?v=Jav4vbUrqII).
-
-## Defining artifacts in `.gitlab-ci.yml`
-
-A simple example of using the artifacts definition in `.gitlab-ci.yml` would be
-the following:
-
-```yaml
-pdf:
- script: xelatex mycv.tex
- artifacts:
- paths:
- - mycv.pdf
- expire_in: 1 week
-```
-
-A job named `pdf` calls the `xelatex` command in order to build a pdf file from
-the latex source file `mycv.tex`. We then define the `artifacts` paths which in
-turn are defined with the `paths` keyword. All paths to files and directories
-are relative to the repository that was cloned during the build.
-
-The artifacts will be uploaded when the job succeeds by default, but can be set to upload
-when the job fails, or always, if the [`artifacts:when`](../../../ci/yaml/README.md#artifactswhen)
-parameter is used. These uploaded artifacts will be kept in GitLab for 1 week as defined
-by the `expire_in` definition. You have the option to keep the artifacts from expiring
-via the [web interface](#browsing-artifacts). If the expiry time is not defined, it defaults
-to the [instance wide setting](../../admin_area/settings/continuous_integration.md#default-artifacts-expiration-core-only).
-
-For more examples on artifacts, follow the [artifacts reference in
-`.gitlab-ci.yml`](../../../ci/yaml/README.md#artifacts).
-
-## Browsing artifacts
-
-> - From GitLab 9.2, PDFs, images, videos and other formats can be previewed directly in the job artifacts browser without the need to download them.
-> - Introduced in [GitLab 10.1][ce-14399], HTML files in a public project can be previewed directly in a new tab without the need to download them when [GitLab Pages](../../../administration/pages/index.md) is enabled. The same applies for textual formats (currently supported extensions: `.txt`, `.json`, and `.log`).
-> - Introduced in [GitLab 12.4][gitlab-16675], artifacts in private projects can be previewed when [GitLab Pages access control](../../../administration/pages/index.md#access-control) is enabled.
-
-After a job finishes, if you visit the job's specific page, there are three
-buttons. You can download the artifacts archive or browse its contents, whereas
-the **Keep** button appears only if you have set an [expiry date] to the
-artifacts in case you changed your mind and want to keep them.
-
-![Job artifacts browser button](img/job_artifacts_browser_button.png)
-
-The archive browser shows the name and the actual file size of each file in the
-archive. If your artifacts contained directories, then you are also able to
-browse inside them.
-
-Below you can see what browsing looks like. In this case we have browsed inside
-the archive and at this point there is one directory, a couple files, and
-one HTML file that you can view directly online when
-[GitLab Pages](../../../administration/pages/index.md) is enabled (opens in a new tab).
-
-![Job artifacts browser](img/job_artifacts_browser.png)
-
-## Downloading artifacts
-
-If you need to download the whole archive, there are buttons in various places
-in the GitLab UI to do this:
-
-1. While on the pipelines page, you can see the download icon for each job's
- artifacts archive in the right corner:
-
- ![Job artifacts in Pipelines page](img/job_artifacts_pipelines_page.png)
-
-1. While on the **Jobs** page, you can see the download icon for each job's
- artifacts archive in the right corner:
-
- ![Job artifacts in Builds page](img/job_artifacts_builds_page.png)
-
-1. While inside a specific job, you are presented with a download button
- along with the one that browses the archive:
-
- ![Job artifacts browser button](img/job_artifacts_browser_button.png)
-
-1. And finally, when browsing an archive you can see the download button at
- the top right corner:
-
- ![Job artifacts browser](img/job_artifacts_browser.png)
-
-## Downloading the latest artifacts
-
-It is possible to download the latest artifacts of a job via a well known URL
-so you can use it for scripting purposes.
-
-NOTE: **Note:**
-The latest artifacts are created by jobs in the **most recent** successful pipeline
-for the specific ref. If you run two types of pipelines for the same ref, the latest
-artifact will be determined by timing. For example, if a branch pipeline created
-by merging a merge request runs at the same time as a scheduled pipeline, the
-latest artifact will be from the pipeline that completed most recently.
-
-Artifacts for other pipelines can be accessed with direct access to them.
-
-The structure of the URL to download the whole artifacts archive is the following:
-
-```plaintext
-https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/download?job=<job_name>
-```
-
-To download a single file from the artifacts use the following URL:
-
-```plaintext
-https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/raw/<path_to_file>?job=<job_name>
-```
-
-For example, to download the latest artifacts of the job named `coverage` of
-the `master` branch of the `gitlab` project that belongs to the `gitlab-org`
-namespace, the URL would be:
-
-```plaintext
-https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/download?job=coverage
-```
-
-To download the file `coverage/index.html` from the same
-artifacts use the following URL:
-
-```plaintext
-https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/raw/coverage/index.html?job=coverage
-```
-
-There is also a URL to browse the latest job artifacts:
-
-```plaintext
-https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/browse?job=<job_name>
-```
-
-For example:
-
-```plaintext
-https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/browse?job=coverage
-```
-
-There is also a URL to specific files, including html files that
-are shown in [GitLab Pages](../../../administration/pages/index.md):
-
-```plaintext
-https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/file/<path>?job=<job_name>
-```
-
-For example, when a job `coverage` creates the artifact `htmlcov/index.html`,
-you can access it at:
-
-```plaintext
-https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/file/htmlcov/index.html?job=coverage
-```
-
-The latest builds are also exposed in the UI in various places. Specifically,
-look for the download button in:
-
-- The main project's page
-- The branches page
-- The tags page
-
-If the latest job has failed to upload the artifacts, you can see that
-information in the UI.
-
-![Latest artifacts button](img/job_latest_artifacts_browser.png)
-
-## Erasing artifacts
-
-DANGER: **Warning:**
-This is a destructive action that leads to data loss. Use with caution.
-
-You can erase a single job via the UI, which will also remove the job's
-artifacts and trace, if you are:
-
-- The owner of the job.
-- A [Maintainer](../../permissions.md#gitlab-cicd-permissions) of the project.
-
-To erase a job:
-
-1. Navigate to a job's page.
-1. Click the trash icon at the top right of the job's trace.
-1. Confirm the deletion.
-
-## Retrieve artifacts of private projects when using GitLab CI
-
-In order to retrieve a job artifact of a different project, you might need to use a private token in order to [authenticate and download](../../../api/jobs.md#get-job-artifacts) the artifacts.
-
-[expiry date]: ../../../ci/yaml/README.md#artifactsexpire_in
-[ce-14399]: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/14399
-[gitlab-16675]: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/16675
-
-<!-- ## Troubleshooting
-
-Include any troubleshooting steps that you can foresee. If you know beforehand what issues
-one might have when setting this up, or when something is changed, or on upgrading, it's
-important to describe those, too. Think of things that may go wrong and include them here.
-This is important to minimize requests for support, and to avoid doc comments with
-questions that you know someone might ask.
-
-Each scenario can be a third-level heading, e.g. `### Getting error message X`.
-If you have none to add when creating a doc, leave this section in place
-but commented out to help encourage others to add to it in the future. -->
+This document was moved to [another location](../../../ci/pipelines/job_artifacts.md).
diff --git a/doc/user/project/pipelines/schedules.md b/doc/user/project/pipelines/schedules.md
index 08928431881..a92464d6817 100644
--- a/doc/user/project/pipelines/schedules.md
+++ b/doc/user/project/pipelines/schedules.md
@@ -1,136 +1,5 @@
---
-type: reference, howto
+redirect_to: '../../../ci/pipelines/schedules.md'
---
-# Pipeline schedules
-
-> - Introduced in GitLab 9.1 as [Trigger Schedule](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/10533).
-> - [Renamed to Pipeline Schedule](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/10853) in GitLab 9.2.
-
-NOTE: **Note:**
-Cron notation is parsed by [Fugit](https://github.com/floraison/fugit).
-
-Pipelines are normally run based on certain conditions being met. For example, when a branch is pushed to repository.
-
-Pipeline schedules can be used to also run [pipelines](../../../ci/pipelines.md) at specific intervals. For example:
-
-- Every month on the 22nd for a certain branch.
-- Once every day.
-
-In addition to using the GitLab UI, pipeline schedules can be maintained using the
-[Pipeline schedules API](../../../api/pipeline_schedules.md).
-
-## Configuring pipeline schedules
-
-To schedule a pipeline for project:
-
-1. Navigate to the project's **CI / CD > Schedules** page.
-1. Click the **New schedule** button.
-1. Fill in the **Schedule a new pipeline** form.
-1. Click the **Save pipeline schedule** button.
-
-![New Schedule Form](img/pipeline_schedules_new_form.png)
-
-NOTE: **Note:**
-Pipelines execution [timing is dependent](#advanced-configuration) on Sidekiq's own schedule.
-
-In the **Schedules** index page you can see a list of the pipelines that are
-scheduled to run. The next run is automatically calculated by the server GitLab
-is installed on.
-
-![Schedules list](img/pipeline_schedules_list.png)
-
-### Using variables
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/12328) in GitLab 9.4.
-
-You can pass any number of arbitrary variables and they will be available in
-GitLab CI so that they can be used in your [`.gitlab-ci.yml` file](../../../ci/yaml/README.md).
-
-![Scheduled pipeline variables](img/pipeline_schedule_variables.png)
-
-### Using only and except
-
-To configure that a job can be executed only when the pipeline has been
-scheduled (or the opposite), you can use
-[only and except](../../../ci/yaml/README.md#onlyexcept-basic) configuration keywords.
-
-For example:
-
-```yaml
-job:on-schedule:
- only:
- - schedules
- script:
- - make world
-
-job:
- except:
- - schedules
- script:
- - make build
-```
-
-### Advanced configuration
-
-The pipelines won't be executed exactly on schedule because schedules are handled by
-Sidekiq, which runs according to its interval.
-
-For example, only two pipelines will be created per day if:
-
-- You set a schedule to create a pipeline every minute (`* * * * *`).
-- The Sidekiq worker runs on 00:00 and 12:00 every day (`0 */12 * * *`).
-
-To change the Sidekiq worker's frequency:
-
-1. Edit the `gitlab_rails['pipeline_schedule_worker_cron']` value in your instance's `gitlab.rb` file.
-1. [Reconfigure GitLab](../../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
-
-For GitLab.com, refer to the [dedicated settings page](../../gitlab_com/index.md#cron-jobs).
-
-## Working with scheduled pipelines
-
-Once configured, GitLab supports many functions for working with scheduled pipelines.
-
-### Running manually
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/15700) in GitLab 10.4.
-
-To trigger a pipeline schedule manually, click the "Play" button:
-
-![Play Pipeline Schedule](img/pipeline_schedule_play.png)
-
-This will schedule a background job to run the pipeline schedule. A flash
-message will provide a link to the CI/CD Pipeline index page.
-
-NOTE: **Note:**
-To help avoid abuse, users are rate limited to triggering a pipeline once per
-minute.
-
-### Taking ownership
-
-Pipelines are executed as a user, who owns a schedule. This influences what projects and other resources the pipeline has access to.
-
-If a user does not own a pipeline, you can take ownership by clicking the **Take ownership** button.
-The next time a pipeline is scheduled, your credentials will be used.
-
-![Schedules list](img/pipeline_schedules_ownership.png)
-
-NOTE: **Note:**
-If the owner of a pipeline schedule doesn't have the ability to create pipelines
-on the target branch, the schedule will stop creating new pipelines. This can
-happen if, for example, the owner is blocked or removed from the project, or
-the target branch or tag is protected. In this case, someone with sufficient
-privileges must take ownership of the schedule.
-
-<!-- ## Troubleshooting
-
-Include any troubleshooting steps that you can foresee. If you know beforehand what issues
-one might have when setting this up, or when something is changed, or on upgrading, it's
-important to describe those, too. Think of things that may go wrong and include them here.
-This is important to minimize requests for support, and to avoid doc comments with
-questions that you know someone might ask.
-
-Each scenario can be a third-level heading, e.g. `### Getting error message X`.
-If you have none to add when creating a doc, leave this section in place
-but commented out to help encourage others to add to it in the future. -->
+This document was moved to [another location](../../../ci/pipelines/schedules.md).
diff --git a/doc/user/project/pipelines/settings.md b/doc/user/project/pipelines/settings.md
index 96710e957ff..af4cbe13aba 100644
--- a/doc/user/project/pipelines/settings.md
+++ b/doc/user/project/pipelines/settings.md
@@ -1,276 +1,5 @@
---
-type: reference, howto
+redirect_to: '../../../ci/pipelines/settings.md'
---
-# Pipelines settings
-
-To reach the pipelines settings navigate to your project's
-**Settings > CI/CD**.
-
-The following settings can be configured per project.
-
-<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
-For an overview, watch the video [GitLab CI Pipeline, Artifacts, and Environments](https://www.youtube.com/watch?v=PCKDICEe10s).
-Watch also [GitLab CI pipeline tutorial for beginners](https://www.youtube.com/watch?v=Jav4vbUrqII).
-
-## Git strategy
-
-With Git strategy, you can choose the default way your repository is fetched
-from GitLab in a job.
-
-There are two options. Using:
-
-- `git clone`, which is slower since it clones the repository from scratch
- for every job, ensuring that the local working copy is always pristine.
-- `git fetch`, which is faster as it re-uses the local working copy (falling
- back to clone if it doesn't exist).
-
-The default Git strategy can be overridden by the [GIT_STRATEGY variable](../../../ci/yaml/README.md#git-strategy)
-in `.gitlab-ci.yml`.
-
-## Git shallow clone
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/28919) in GitLab 12.0.
-
-NOTE: **Note**:
-As of GitLab 12.0, newly created projects will automatically have a default
-`git depth` value of `50`.
-
-It is possible to limit the number of changes that GitLab CI/CD will fetch when cloning
-a repository. Setting a limit to `git depth` can speed up Pipelines execution. Maximum
-allowed value is `1000`.
-
-To disable shallow clone and make GitLab CI/CD fetch all branches and tags each time,
-keep the value empty or set to `0`.
-
-This value can also be [overridden by `GIT_DEPTH`](../../../ci/large_repositories/index.md#shallow-cloning) variable in `.gitlab-ci.yml` file.
-
-## Timeout
-
-Timeout defines the maximum amount of time in minutes that a job is able run.
-This is configurable under your project's **Settings > CI/CD > General pipelines settings**.
-The default value is 60 minutes. Decrease the time limit if you want to impose
-a hard limit on your jobs' running time or increase it otherwise. In any case,
-if the job surpasses the threshold, it is marked as failed.
-
-### Timeout overriding on Runner level
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17221) in GitLab 10.7.
-
-Project defined timeout (either specific timeout set by user or the default
-60 minutes timeout) may be [overridden on Runner level](../../../ci/runners/README.md#setting-maximum-job-timeout-for-a-runner).
-
-## Maximum artifacts size **(CORE ONLY)**
-
-For information about setting a maximum artifact size for a project, see
-[Maximum artifacts size](../../admin_area/settings/continuous_integration.md#maximum-artifacts-size-core-only).
-
-## Custom CI configuration path
-
-> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/12509) in GitLab 9.4.
-> - [Support for external `.gitlab-ci.yml` locations](https://gitlab.com/gitlab-org/gitlab/issues/14376) introduced in GitLab 12.6.
-
-By default we look for the `.gitlab-ci.yml` file in the project's root
-directory. If needed, you can specify an alternate path and file name, including locations outside the project.
-
-To customize the path:
-
-1. Go to the project's **Settings > CI / CD**.
-1. Expand the **General pipelines** section.
-1. Provide a value in the **Custom CI configuration path** field.
-1. Click **Save changes**.
-
-If the CI configuration is stored within the repository in a non-default
-location, the path must be relative to the root directory. Examples of valid
-paths and file names include:
-
-- `.gitlab-ci.yml` (default)
-- `.my-custom-file.yml`
-- `my/path/.gitlab-ci.yml`
-- `my/path/.my-custom-file.yml`
-
-If the CI configuration will be hosted on an external site, the URL link must end with `.yml`:
-
-- `http://example.com/generate/ci/config.yml`
-
-If the CI configuration will be hosted in a different project within GitLab, the path must be relative
-to the root directory in the other project, with the group and project name added to the end:
-
-- `.gitlab-ci.yml@mygroup/another-project`
-- `my/path/.my-custom-file.yml@mygroup/another-project`
-
-Hosting the configuration file in a separate project allows stricter control of the
-configuration file. For example:
-
-- Create a public project to host the configuration file.
-- Give write permissions on the project only to users who are allowed to edit the file.
-
-Other users and projects will be able to access the configuration file without being
-able to edit it.
-
-## Test coverage parsing
-
-If you use test coverage in your code, GitLab can capture its output in the
-job log using a regular expression. In the pipelines settings, search for the
-"Test coverage parsing" section.
-
-![Pipelines settings test coverage](img/pipelines_settings_test_coverage.png)
-
-Leave blank if you want to disable it or enter a ruby regular expression. You
-can use <https://rubular.com> to test your regex.
-
-If the pipeline succeeds, the coverage is shown in the merge request widget and
-in the jobs table.
-
-![MR widget coverage](img/pipelines_test_coverage_mr_widget.png)
-
-![Build status coverage](img/pipelines_test_coverage_build.png)
-
-A few examples of known coverage tools for a variety of languages can be found
-in the pipelines settings page.
-
-### Removing color codes
-
-Some test coverage tools output with ANSI color codes that won't be
-parsed correctly by the regular expression and will cause coverage
-parsing to fail.
-
-If your coverage tool doesn't provide an option to disable color
-codes in the output, you can pipe the output of the coverage tool through a
-small one line script that will strip the color codes off.
-
-For example:
-
-```shell
-lein cloverage | perl -pe 's/\e\[?.*?[\@-~]//g'
-```
-
-## Visibility of pipelines
-
-Pipeline visibility is determined by:
-
-- Your current [user access level](../../permissions.md).
-- The **Public pipelines** project setting under your project's **Settings > CI/CD > General pipelines**.
-
-NOTE: **Note:**
-If the project visibility is set to **Private**, the [**Public pipelines** setting will have no effect](../../../ci/enable_or_disable_ci.md#per-project-user-setting).
-
-This also determines the visibility of these related features:
-
-- Job output logs
-- Job artifacts
-- The [pipeline security dashboard](../../application_security/security_dashboard/index.md#pipeline-security-dashboard) **(ULTIMATE)**
-
-If **Public pipelines** is enabled (default):
-
-- For **public** projects, anyone can view the pipelines and related features.
-- For **internal** projects, any logged in user can view the pipelines
- and related features.
-- For **private** projects, any project member (guest or higher) can view the pipelines
- and related features.
-
-If **Public pipelines** is disabled:
-
-- For **public** projects, anyone can view the pipelines, but only members
- (reporter or higher) can access the related features.
-- For **internal** projects, any logged in user can view the pipelines.
- However, only members (reporter or higher) can access the job related features.
-- For **private** projects, only project members (reporter or higher)
- can view the pipelines or access the related features.
-
-## Auto-cancel pending pipelines
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9362) in GitLab 9.1.
-
-If you want to auto-cancel all pending non-HEAD pipelines on branch, when
-new pipeline will be created (after your Git push or manually from UI),
-check **Auto-cancel pending pipelines** checkbox and save the changes.
-
-## Pipeline Badges
-
-In the pipelines settings page you can find pipeline status and test coverage
-badges for your project. The latest successful pipeline will be used to read
-the pipeline status and test coverage values.
-
-Visit the pipelines settings page in your project to see the exact link to
-your badges, as well as ways to embed the badge image in your HTML or Markdown
-pages.
-
-![Pipelines badges](img/pipelines_settings_badges.png)
-
-### Pipeline status badge
-
-Depending on the status of your job, a badge can have the following values:
-
-- pending
-- running
-- passed
-- failed
-- skipped
-- canceled
-- unknown
-
-You can access a pipeline status badge image using the following link:
-
-```text
-https://example.gitlab.com/<namespace>/<project>/badges/<branch>/pipeline.svg
-```
-
-### Test coverage report badge
-
-GitLab makes it possible to define the regular expression for [coverage report](#test-coverage-parsing),
-that each job log will be matched against. This means that each job in the
-pipeline can have the test coverage percentage value defined.
-
-The test coverage badge can be accessed using following link:
-
-```text
-https://example.gitlab.com/<namespace>/<project>/badges/<branch>/coverage.svg
-```
-
-If you would like to get the coverage report from a specific job, you can add
-the `job=coverage_job_name` parameter to the URL. For example, the following
-Markdown code will embed the test coverage report badge of the `coverage` job
-into your `README.md`:
-
-```markdown
-![coverage](https://gitlab.com/gitlab-org/gitlab-foss/badges/master/coverage.svg?job=coverage)
-```
-
-### Badge styles
-
-Pipeline badges can be rendered in different styles by adding the `style=style_name` parameter to the URL. Currently two styles are available:
-
-#### Flat (default)
-
-```text
-https://example.gitlab.com/<namespace>/<project>/badges/<branch>/coverage.svg?style=flat
-```
-
-![Badge flat style](https://gitlab.com/gitlab-org/gitlab-foss/badges/master/coverage.svg?job=coverage&style=flat)
-
-#### Flat square
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/30120) in GitLab 11.8.
-
-```text
-https://example.gitlab.com/<namespace>/<project>/badges/<branch>/coverage.svg?style=flat-square
-```
-
-![Badge flat square style](https://gitlab.com/gitlab-org/gitlab-foss/badges/master/coverage.svg?job=coverage&style=flat-square)
-
-## Environment Variables
-
-[Environment variables](../../../ci/variables/README.html#gitlab-cicd-environment-variables) can be set in an environment to be available to a runner.
-
-<!-- ## Troubleshooting
-
-Include any troubleshooting steps that you can foresee. If you know beforehand what issues
-one might have when setting this up, or when something is changed, or on upgrading, it's
-important to describe those, too. Think of things that may go wrong and include them here.
-This is important to minimize requests for support, and to avoid doc comments with
-questions that you know someone might ask.
-
-Each scenario can be a third-level heading, e.g. `### Getting error message X`.
-If you have none to add when creating a doc, leave this section in place
-but commented out to help encourage others to add to it in the future. -->
+This document was moved to [another location](../../../ci/pipelines/settings.md).
diff --git a/doc/user/project/protected_branches.md b/doc/user/project/protected_branches.md
index ab6b4e7f858..14feb189964 100644
--- a/doc/user/project/protected_branches.md
+++ b/doc/user/project/protected_branches.md
@@ -187,7 +187,7 @@ Additionally, direct pushes to the protected branch are denied if a rule is matc
The permission to merge or push to protected branches is used to define if a user can
run CI/CD pipelines and execute actions on jobs that are related to those branches.
-See [Security on protected branches](../../ci/pipelines.md#security-on-protected-branches)
+See [Security on protected branches](../../ci/pipelines/index.md#security-on-protected-branches)
for details about the pipelines security model.
## Changelog
diff --git a/doc/user/project/push_options.md b/doc/user/project/push_options.md
index c52320ef656..e8d94a05e7e 100644
--- a/doc/user/project/push_options.md
+++ b/doc/user/project/push_options.md
@@ -6,8 +6,8 @@ type: reference
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/15643) in GitLab 11.7.
-GitLab supports using [Git push options](https://git-scm.com/docs/git-push#Documentation/git-push.txt--oltoptiongt)
-to perform various actions at the same time as pushing changes.
+GitLab supports using client-side [Git push options](https://git-scm.com/docs/git-push#Documentation/git-push.txt--oltoptiongt)
+to perform various actions at the same time as pushing changes. Additionally, [Push Rules](../../push_rules/push_rules.md) offer server-side control and enforcement options.
Currently, there are push options available for:
diff --git a/doc/user/project/releases/img/release_with_milestone_v12_9.png b/doc/user/project/releases/img/release_with_milestone_v12_9.png
new file mode 100644
index 00000000000..53100e33955
--- /dev/null
+++ b/doc/user/project/releases/img/release_with_milestone_v12_9.png
Binary files differ
diff --git a/doc/user/project/releases/img/releases_count_v12_8.png b/doc/user/project/releases/img/releases_count_v12_8.png
index fae3b58d8e3..e70f623d508 100644
--- a/doc/user/project/releases/img/releases_count_v12_8.png
+++ b/doc/user/project/releases/img/releases_count_v12_8.png
Binary files differ
diff --git a/doc/user/project/releases/img/releases_v12_9.png b/doc/user/project/releases/img/releases_v12_9.png
new file mode 100644
index 00000000000..bd23cf76651
--- /dev/null
+++ b/doc/user/project/releases/img/releases_v12_9.png
Binary files differ
diff --git a/doc/user/project/releases/img/upcoming_release_v12_7.png b/doc/user/project/releases/img/upcoming_release_v12_7.png
index 4ff8fad98f9..458aa66f6fe 100644
--- a/doc/user/project/releases/img/upcoming_release_v12_7.png
+++ b/doc/user/project/releases/img/upcoming_release_v12_7.png
Binary files differ
diff --git a/doc/user/project/releases/index.md b/doc/user/project/releases/index.md
index 04667e2adfe..f2013512572 100644
--- a/doc/user/project/releases/index.md
+++ b/doc/user/project/releases/index.md
@@ -13,21 +13,26 @@ assets output by your CI system to use them, not just the raw source
code.
GitLab's **Releases** are a way to track deliverables in your project. Consider them
-a snapshot in time of the source, build output, and other metadata or artifacts
+a snapshot in time of the source, build output, artifacts, and other metadata
associated with a released version of your code.
-There are several ways to create a Release:
-
-- In the interface, when you create a new Git tag.
-- In the interface, by adding a release note to an existing Git tag.
-- Using the [Releases API](../../../api/releases/index.md): we recommend doing this as one of the last
- steps in your CI/CD release pipeline.
-
## Getting started with Releases
Start by giving a [description](#release-description) to the Release and
including its [assets](#release-assets), as follows.
+## Release versioning
+
+Release versions are manually assigned by the user in the Release title. GitLab uses [Semantic Versioning](https://semver.org/) for our releases, and we recommend you do too. Use `(Major).(Minor).(Patch)`, as detailed in the [GitLab Policy for Versioning](../../../policy/maintenance.md#versioning).
+
+For example, for GitLab version `10.5.7`:
+
+- `10` represents the major version. The major release was `10.0.0`, but often referred to as `10.0`.
+- `5` represents the minor version. The minor release was `10.5.0`, but often referred to as `10.5`.
+- `7` represents the patch number.
+
+Any part of the version number can be multiple digits, for example, `13.10.11`.
+
### Release description
Every Release has a description. You can add any text you like, but we recommend
@@ -62,7 +67,43 @@ links from your GitLab instance.
NOTE: **NOTE**
You can manipulate links of each release entry with [Release Links API](../../../api/releases/links.md)
-#### Releases associated with milestones
+#### Permanent links to Release assets
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/27300) in GitLab 12.9.
+
+The assets associated with a Release are accessible through a permanent URL.
+GitLab will always redirect this URL to the actual asset
+location, so even if the assets move to a different location, you can continue
+to use the same URL. This is defined during [link creation](../../../api/releases/links.md#create-a-link) or [updating](../../../api/releases/links.md#update-a-link).
+
+Each asset has a name, a URL of the *actual* asset location, and optionally, a
+`filepath` parameter, which, if you specify it, will create a URL pointing
+to the asset for the Release. The format of the URL is:
+
+```html
+https://host/namespace/project/releases/:release/downloads/:filepath
+```
+
+If you have an asset for the `v11.9.0-rc2` release in the `gitlab-org`
+namespace and `gitlab-runner` project on `gitlab.com`, for example:
+
+```json
+{
+ "name": "linux amd64",
+ "filepath": "/binaries/gitlab-runner-linux-amd64",
+ "url": "https://gitlab-runner-downloads.s3.amazonaws.com/v11.9.0-rc2/binaries/gitlab-runner-linux-amd64"
+}
+```
+
+This asset has a direct link of:
+
+```html
+https://gitlab.com/gitlab-org/gitlab-runner/releases/v11.9.0-rc2/downloads/binaries/gitlab-runner-linux-amd64
+```
+
+The physical location of the asset can change at any time and the direct link will remain unchanged.
+
+### Releases associated with milestones
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/29020) in GitLab 12.5.
@@ -71,10 +112,11 @@ Releases can optionally be associated with one or more
by including a `milestones` array in your requests to the
[Releases API](../../../api/releases/index.md#create-a-release).
-Releases display this association with the **Milestone** indicator near
-the top of the Release block on the **Project overview > Releases** page.
+Releases display this association with the **Milestone** indicator in the top
+section of the Release block on the **Project overview > Releases** page, along
+with some statistics about the issues in the milestone(s).
-![A Release with one associated milestone](img/release_with_milestone_v12_5.png)
+![A Release with one associated milestone](img/release_with_milestone_v12_9.png)
Below is an example of milestones with no Releases, one Release, and two
Releases, respectively.
@@ -92,7 +134,7 @@ associated with a large number of Releases.
Navigate to **Project > Releases** in order to see the list of releases for a given
project.
-![Releases list](img/releases.png)
+![Releases list](img/releases_v12_9.png)
### Number of Releases
@@ -104,7 +146,7 @@ it takes you to the list of Releases.
![Number of Releases](img/releases_count_v12_8.png "Incremental counter of Releases")
For private projects, the number of Releases is displayed to users with Reporter
-[permissions](../../permissions.md#releases-permissions) or higher. For public projects,
+[permissions](../../permissions.md#project-members-permissions) or higher. For public projects,
it is displayed to every user regardless of their permission level.
### Upcoming Releases
@@ -117,6 +159,29 @@ Release tag. Once the `released_at` date and time has passed, the badge is autom
![An upcoming release](img/upcoming_release_v12_7.png)
+## Creating a Release
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/32812) in GitLab
+ 12.9, Releases can be created directly through the GitLab Releases UI.
+
+NOTE: **Note:**
+Only users with Developer permissions or higher can create Releases.
+Read more about [Release permissions](../../../user/permissions.md#project-members-permissions).
+
+To create a new Release through the GitLab UI:
+
+1. Navigate to **Project overview > Releases** and click the **New release** button.
+1. On the **New Tag** page, fill out the tag details.
+1. Optionally, in the **Release notes** field, enter the Release's description.
+ If you leave this field empty, only a tag will be created.
+ If you populate it, both a tag and a Release will be created.
+1. Click **Create tag**.
+
+If you created a release, you can view it at **Project overview > Releases**.
+
+You can also create a Release using the [Releases API](../../../api/releases/index.md#create-a-release):
+we recommend doing this as one of the last steps in your CI/CD release pipeline.
+
## Editing a release
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/26016) in GitLab 12.6.
diff --git a/doc/user/project/repository/forking_workflow.md b/doc/user/project/repository/forking_workflow.md
index 45a32655f96..927c1db804a 100644
--- a/doc/user/project/repository/forking_workflow.md
+++ b/doc/user/project/repository/forking_workflow.md
@@ -4,11 +4,14 @@ disqus_identifier: 'https://docs.gitlab.com/ee/workflow/forking_workflow.html'
# Project forking workflow
-Forking a project to your own namespace is useful if you have no write
-access to the project you want to contribute to. Even if you do have write
-access or can request it, we recommend working together in the same
-repository since it is simpler. See our [GitLab Flow](../../../topics/gitlab_flow.md)
-document more information about using branches to work together.
+Whenever possible, it's recommended to work in a common Git repository and use
+[branching strategies](../../../topics/gitlab_flow.md) to manage your work. However,
+if you do not have write access for the repository you want to contribute to, you
+can create a fork.
+
+A fork is a personal copy of the repository and all its branches, which you create
+in a namespace of your choice. This way you can make changes in your own fork and
+submit them through a merge request to the repo you don't have access to.
## Creating a fork
@@ -27,7 +30,7 @@ Forking a project is, in most cases, a two-step process.
The fork is created. The permissions you have in the namespace are the permissions you will have in the fork.
-CAUTION: **CAUTION:**
+CAUTION: **Caution:**
In GitLab 12.6 and later, when project owners [reduce a project's visibility](../../../public_access/public_access.md#reducing-visibility),
it **removes the relationship** between a project and all its forks.
@@ -37,10 +40,11 @@ You can use [repository mirroring](repository_mirroring.md) to keep your fork sy
The main difference is that with repository mirroring your remote fork will be automatically kept up-to-date.
-Without mirroring, to work locally you'll have to user `git pull` to update your local repo with the fork on GitLab. You'll have to fetch locally and push it back to the remote repo to update it.
+Without mirroring, to work locally you'll have to use `git pull` to update your local repo
+with the upstream project, then push the changes back to your fork to update it.
CAUTION: **Caution:**
-With mirroring, before approving a merge request you'll likely to be asked to sync, hence automating it is recommend.
+With mirroring, before approving a merge request, you'll likely be asked to sync; hence automating it is recommend.
Read more about [How to keep your fork up to date with its origin](https://about.gitlab.com/blog/2016/12/01/how-to-keep-your-fork-up-to-date-with-its-origin/).
diff --git a/doc/user/project/repository/img/forking_workflow_fork_button.png b/doc/user/project/repository/img/forking_workflow_fork_button.png
index 74b68a7e61c..eea62892232 100644
--- a/doc/user/project/repository/img/forking_workflow_fork_button.png
+++ b/doc/user/project/repository/img/forking_workflow_fork_button.png
Binary files differ
diff --git a/doc/user/project/repository/index.md b/doc/user/project/repository/index.md
index a081a8f5ae4..6b0c58e60bd 100644
--- a/doc/user/project/repository/index.md
+++ b/doc/user/project/repository/index.md
@@ -104,7 +104,7 @@ Some things to note about precedence:
[Jupyter](https://jupyter.org/) Notebook (previously IPython Notebook) files are used for
interactive computing in many fields and contain a complete record of the
-user's sessions and include code, narrative text, equations and rich output.
+user's sessions and include code, narrative text, equations, and rich output.
[Read how to use Jupyter notebooks with GitLab.](jupyter_notebooks/index.md)
diff --git a/doc/user/project/repository/jupyter_notebooks/index.md b/doc/user/project/repository/jupyter_notebooks/index.md
index 6b93ee05a9b..ca82be280d9 100644
--- a/doc/user/project/repository/jupyter_notebooks/index.md
+++ b/doc/user/project/repository/jupyter_notebooks/index.md
@@ -4,7 +4,7 @@
[Jupyter](https://jupyter.org/) Notebook (previously IPython Notebook) files are used for
interactive computing in many fields and contain a complete record of the
-user's sessions and include code, narrative text, equations and rich output.
+user's sessions and include code, narrative text, equations, and rich output.
When added to a repository, Jupyter Notebooks with a `.ipynb` extension will be
rendered to HTML when viewed.
diff --git a/doc/user/project/repository/reducing_the_repo_size_using_git.md b/doc/user/project/repository/reducing_the_repo_size_using_git.md
index a024f8eff97..16bffe5417d 100644
--- a/doc/user/project/repository/reducing_the_repo_size_using_git.md
+++ b/doc/user/project/repository/reducing_the_repo_size_using_git.md
@@ -19,7 +19,7 @@ Unfortunately, it's not so easy and that workflow won't work. Deleting files in
a commit doesn't actually reduce the size of the repo since the earlier commits
and blobs are still around. What you need to do is rewrite history with Git's
[`filter-branch` option](https://git-scm.com/book/en/v2/Git-Tools-Rewriting-History#The-Nuclear-Option:-filter-branch),
-or an open source community-maintained tool like the
+or an open source community-maintained tool like the
[BFG](https://rtyley.github.io/bfg-repo-cleaner/).
Note that even with that method, until `git gc` runs on the GitLab side, the
diff --git a/doc/user/project/repository/repository_mirroring.md b/doc/user/project/repository/repository_mirroring.md
index fe57df38d37..33fa09255e1 100644
--- a/doc/user/project/repository/repository_mirroring.md
+++ b/doc/user/project/repository/repository_mirroring.md
@@ -69,6 +69,11 @@ Changes pushed to files in the repository are automatically pushed to the remote
In the case of a diverged branch, you will see an error indicated at the **Mirroring repositories**
section.
+### Configuring push mirrors through the API
+
+You can also create and modify project push mirrors through the
+[remote mirrors API](../../../api/remote_mirrors.md).
+
### Push only protected branches **(CORE)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/3350) in [GitLab Starter](https://about.gitlab.com/pricing/) 10.3.
diff --git a/doc/user/project/repository/web_editor.md b/doc/user/project/repository/web_editor.md
index 0e47bf2c902..f1947066279 100644
--- a/doc/user/project/repository/web_editor.md
+++ b/doc/user/project/repository/web_editor.md
@@ -75,7 +75,7 @@ ready.
To keep files in the repository organized it is often helpful to create a new
directory.
-From a project's files page, click the '+' button to the right of the branch selector.
+From a project's files page, click the plus button (`+`) to the right of the branch selector.
Choose **New directory** from the dropdown.
![New directory dropdown](img/web_editor_new_directory_dropdown.png)
@@ -91,14 +91,21 @@ There are multiple ways to create a branch from GitLab's web interface.
### Create a new branch from an issue
-> [Introduced][ce-2808] in GitLab 8.6.
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/2808) in GitLab 8.6.
+
+If your development workflow dictates to have an issue for every merge
+request, you can quickly create a branch directly from the issue to speed the process up.
+The new branch, and later its merge request, will be marked as related to this issue.
+Once merged, the MR will automatically close the issue.
+You can see a **Create merge request** dropdown below the issue description.
-In case your development workflow dictates to have an issue for every merge
-request, you can quickly create a branch right on the issue page which will be
-tied with the issue itself. You can see a **Create merge request** dropdown
-below the issue description unless there is already a branch with the same
-name or a referenced merge request or your project (still) has an active
-[fork relationship](../settings/index.md#advanced-settings).
+NOTE: **Note:**
+You won't see the **Create merge request** button if there is already a branch with the same
+name or a referenced merge request or your project has an active
+fork relationship.
+If you would like to make this button appear, a possible workaround is to [remove your project's
+fork relationship](../settings/index.md#removing-a-fork-relationship). Once removed, the fork
+relationship cannot be restored and you will no longer be able to send merge requests to the source.
![Create Button](img/web_editor_new_branch_from_issue_create_button_v12_6.png)
@@ -181,8 +188,6 @@ If you'd prefer _not_ to use your primary email address for commits created
through the web editor, you can choose to use another of your linked email
addresses from the **User Settings > Edit Profile** page.
-[ce-2808]: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/2808
-
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
diff --git a/doc/user/project/repository/x509_signed_commits/index.md b/doc/user/project/repository/x509_signed_commits/index.md
index 421c2f60350..19238839a5e 100644
--- a/doc/user/project/repository/x509_signed_commits/index.md
+++ b/doc/user/project/repository/x509_signed_commits/index.md
@@ -7,7 +7,7 @@ type: concepts, howto
[x509](https://en.wikipedia.org/wiki/X.509) is a standard format for public key
certificates issued by a public or private Public Key Infrastructure (PKI).
Personal x509 certificates are used for authentication or signing purposes
-such as SMIME, but beside that, Git supports signing of commits and tags
+such as SMIME, but Git also supports signing of commits and tags
with x509 certificates in a similar way as with [GPG](../gpg_signed_commits/index.md).
The main difference is the trust anchor which is the PKI for x509 certificates
instead of a web of trust with GPG.
diff --git a/doc/user/project/settings/import_export.md b/doc/user/project/settings/import_export.md
index cdf6a789ec2..9ff9f76dadb 100644
--- a/doc/user/project/settings/import_export.md
+++ b/doc/user/project/settings/import_export.md
@@ -12,6 +12,7 @@ See also:
- [Project import/export API](../../../api/project_import_export.md)
- [Project import/export administration rake tasks](../../../administration/raketasks/project_import_export.md) **(CORE ONLY)**
+- [Group import/export API](../../../api/group_import_export.md)
To set up a project import/export:
@@ -28,10 +29,11 @@ Note the following:
- Exports are stored in a temporary [shared directory](../../../development/shared_files.md)
and are deleted every 24 hours by a specific worker.
- Group members are exported as project members, as long as the user has
- maintainer or admin access to the group where the exported project lives. Import admins should map users by email address.
+ maintainer or admin access to the group where the exported project lives.
+- Project members with owner access will be imported as maintainers.
+- Using an admin account to import will map users by email address (self-managed only).
Otherwise, a supplementary comment is left to mention that the original author and
the MRs, notes, or issues will be owned by the importer.
-- Project members with owner access will be imported as maintainers.
- If an imported project contains merge requests originating from forks,
then new branches associated with such merge requests will be created
within a project during the import/export. Thus, the number of branches
@@ -41,22 +43,23 @@ Note the following:
The following table lists updates to Import/Export:
-| GitLab version | Import/Export schema version |
-| ---------------- | --------------------- |
-| 11.1 to current | 0.2.4 |
-| 10.8 | 0.2.3 |
-| 10.4 | 0.2.2 |
-| 10.3 | 0.2.1 |
-| 10.0 | 0.2.0 |
-| 9.4.0 | 0.1.8 |
-| 9.2.0 | 0.1.7 |
-| 8.17.0 | 0.1.6 |
-| 8.13.0 | 0.1.5 |
-| 8.12.0 | 0.1.4 |
-| 8.10.3 | 0.1.3 |
-| 8.10.0 | 0.1.2 |
-| 8.9.5 | 0.1.1 |
-| 8.9.0 | 0.1.0 |
+| Exporting GitLab version | Importing GitLab version |
+| -------------------------- | -------------------------- |
+| 11.7 to current | 11.7 to current |
+| 11.1 to 11.6 | 11.1 to 11.6 |
+| 10.8 to 11.0 | 10.8 to 11.0 |
+| 10.4 to 10.7 | 10.4 to 10.7 |
+| 10.3 | 10.3 |
+| 10.0 to 10.2 | 10.0 to 10.2 |
+| 9.4 to 9.6 | 9.4 to 9.6 |
+| 9.2 to 9.3 | 9.2 to 9.3 |
+| 8.17 to 9.1 | 8.17 to 9.1 |
+| 8.13 to 8.16 | 8.13 to 8.16 |
+| 8.12 | 8.12 |
+| 8.10.3 to 8.11 | 8.10.3 to 8.11 |
+| 8.10.0 to 8.10.2 | 8.10.0 to 8.10.2 |
+| 8.9.5 to 8.9.11 | 8.9.5 to 8.9.11 |
+| 8.9.0 to 8.9.4 | 8.9.0 to 8.9.4 |
Projects can be exported and imported only between versions of GitLab with matching Import/Export versions.
@@ -75,6 +78,7 @@ The following items will be exported:
- Design Management files and data **(PREMIUM)**
- LFS objects
- Issue boards
+- Pipelines history
The following items will NOT be exported:
@@ -89,7 +93,7 @@ The following items will NOT be exported:
NOTE: **Note:**
For more details on the specific data persisted in a project export, see the
-[`import_export.yml`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/import_export/import_export.yml) file.
+[`import_export.yml`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/import_export/project/import_export.yml) file.
## Exporting a project and its data
@@ -139,4 +143,4 @@ To help avoid abuse, users are rate limited to:
| ---------------- | --------------------------- |
| Export | 1 project per 5 minutes |
| Download export | 10 projects per 10 minutes |
-| Import | 30 projects per 10 minutes |
+| Import | 30 projects per 5 minutes |
diff --git a/doc/user/project/settings/index.md b/doc/user/project/settings/index.md
index 158aaf143a9..1deb66aacfd 100644
--- a/doc/user/project/settings/index.md
+++ b/doc/user/project/settings/index.md
@@ -115,7 +115,7 @@ no longer actively maintained. Projects that have been archived can also be
unarchived. Only project Owners and Admin users have the
[permissions](../../permissions.md#project-members-permissions) to archive a project.
-When a project is archived, the repository, issues, merge requests and all
+When a project is archived, the repository, issues, merge requests, and all
other features are read-only. Archived projects are also hidden
in project listings.
@@ -204,7 +204,7 @@ namespace if needed.
#### Remove a project
NOTE: **Note:**
-Only project owners and admins have [permissions]((../../permissions.md#project-members-permissions) to remove a project.
+Only project owners and admins have [permissions](../../permissions.md#project-members-permissions) to remove a project.
To remove a project:
@@ -215,9 +215,12 @@ To remove a project:
This action either:
- Removes a project including all associated resources (issues, merge requests etc).
-- Since [GitLab 12.6](https://gitlab.com/gitlab-org/gitlab/issues/32935), on [Premium or Silver](https://about.gitlab.com/pricing/) or higher tiers, marks a project for deletion. The deletion will happen 7 days later by default, but this can be changed in the [instance settings](../../admin_area/settings/visibility_and_access_controls.md#default-deletion-adjourned-period-premium-only).
+- Since [GitLab 12.6](https://gitlab.com/gitlab-org/gitlab/issues/32935), on
+ [GitLab Premium or GitLab.com Silver](https://about.gitlab.com/pricing/) or higher tiers, marks a project for
+ deletion. The deletion will happen 7 days later by default, but this can be changed in the
+ [instance settings](../../admin_area/settings/visibility_and_access_controls.md#default-deletion-adjourned-period-premium-only).
-### Restore a project **(PREMIUM)**
+#### Restore a project **(PREMIUM)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/32935) in GitLab 12.6.
@@ -234,6 +237,9 @@ If you want to use the fork for yourself and don't need to send
[merge requests](../merge_requests.md) to the upstream project,
you can safely remove the fork relationship.
+CAUTION: **Caution:**
+Once removed, the fork relationship cannot be restored. You will no longer be able to send merge requests to the source, and if anyone has forked your project, their fork will also lose the relationship.
+
To do so:
1. Navigate to your project's **Settings > General > Advanced**.
diff --git a/doc/user/project/web_ide/img/commit_changes_v12_3.png b/doc/user/project/web_ide/img/commit_changes_v12_3.png
deleted file mode 100644
index e7dffbc7655..00000000000
--- a/doc/user/project/web_ide/img/commit_changes_v12_3.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/web_ide/img/commit_changes_v12_9.png b/doc/user/project/web_ide/img/commit_changes_v12_9.png
new file mode 100644
index 00000000000..48491360626
--- /dev/null
+++ b/doc/user/project/web_ide/img/commit_changes_v12_9.png
Binary files differ
diff --git a/doc/user/project/web_ide/index.md b/doc/user/project/web_ide/index.md
index 25bc7edc2bc..aeffd21d48c 100644
--- a/doc/user/project/web_ide/index.md
+++ b/doc/user/project/web_ide/index.md
@@ -30,11 +30,11 @@ the Web IDE will make your direct editing even easier.
The Web IDE currently provides:
- Basic syntax colorization for a variety of programming, scripting and markup
- languages such as XML, PHP, C#, C++, Markdown, Java, VB, Batch, Python, Ruby
+ languages such as XML, PHP, C#, C++, Markdown, Java, VB, Batch, Python, Ruby,
and Objective-C.
- IntelliSense and validation support (displaying errors and warnings, providing
smart completions, formatting, and outlining) for some languages. For example:
-TypeScript, JavaScript, CSS, LESS, SCSS, JSON and HTML.
+ TypeScript, JavaScript, CSS, LESS, SCSS, JSON, and HTML.
Because the Web IDE is based on the [Monaco Editor](https://microsoft.github.io/monaco-editor/),
you can find a more complete list of supported languages in the
@@ -43,30 +43,33 @@ you can find a more complete list of supported languages in the
NOTE: **Note:**
Single file editing is based on the [Ace Editor](https://ace.c9.io).
-## Stage and commit changes
+## Commit changes
-After making your changes, click the **Commit** button in the bottom left to
-review the list of changed files. If you're using GitLab 12.6 or older versions,
-click on each file to review the changes and tick the item to stage a file.
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/4539) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.4 and [brought to GitLab Core](https://gitlab.com/gitlab-org/gitlab-foss/issues/44157) in 10.7.
+> - From [GitLab 12.7 onwards](https://gitlab.com/gitlab-org/gitlab/issues/33441),
+files were automatically staged.
+> - From [GitLab 12.9 onwards](https://gitlab.com/gitlab-org/gitlab/-/issues/196609), support for staging files was removed
+to prevent loss of unstaged data. All your current changes necessarily have to be
+committed or discarded.
-From [GitLab 12.7 onward](https://gitlab.com/gitlab-org/gitlab/issues/33441),
-all your files will be automatically staged. You still have the option to unstage
-changes in case you want to submit them in multiple smaller commits. To unstage
-a change, simply click the **Unstage** button when a staged file is open, or click
-the undo icon next to **Staged changes** to unstage all changes.
+After making your changes, click the **Commit** button on the bottom-left to
+review the list of changed files.
Once you have finalized your changes, you can add a commit message, commit the
-staged changes and directly create a merge request. In case you don't have write
+changes and directly create a merge request. In case you don't have write
access to the selected branch, you will see a warning, but still be able to create
a new branch and start a merge request.
-![Commit changes](img/commit_changes_v12_3.png)
+To discard a change in a particular file, click the **Discard changes** button on that
+file in the changes tab. To discard all the changes, click the trash icon on the
+top-right corner of the changes sidebar.
+
+![Commit changes](img/commit_changes_v12_9.png)
## Reviewing changes
Before you commit your changes, you can compare them with the previous commit
-by switching to the review mode or selecting the file from the staged files
-list.
+by switching to the review mode or selecting the file from the list of changes.
An additional review mode is available when you open a merge request, which
shows you a preview of the merge request diff if you commit your changes.
diff --git a/doc/user/project/wiki/index.md b/doc/user/project/wiki/index.md
index 42c622e4786..6fda2514849 100644
--- a/doc/user/project/wiki/index.md
+++ b/doc/user/project/wiki/index.md
@@ -38,7 +38,7 @@ automatically. For example, a title of `docs/my-page` will create a wiki
page with a path `/wikis/docs/my-page`.
Once you enter the page name, it's time to fill in its content. GitLab wikis
-support Markdown, RDoc, AsciiDoc and Org. For Markdown based pages, all the
+support Markdown, RDoc, AsciiDoc, and Org. For Markdown based pages, all the
[Markdown features](../../markdown.md) are supported and for links there is
some [wiki specific](../../markdown.md#wiki-specific-markdown) behavior.
@@ -60,6 +60,14 @@ if you clone the wiki repository locally. All uploaded files prior to GitLab
11.3 are stored in GitLab itself. If you want them to be part of the wiki's Git
repository, you will have to upload them again.
+### Special characters in page titles
+
+Wiki pages are stored as files in a Git repository, so certain characters have a special meaning:
+
+- Spaces are converted into hyphens when storing a page.
+- Hyphens (`-`) are converted back into spaces when displaying a page.
+- Slashes (`/`) can't be used, because they're used as path separator.
+
### Length restrictions for file and directory names
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24364) in GitLab 12.8.
@@ -121,7 +129,7 @@ The changes of a wiki page over time are recorded in the wiki's Git repository,
and you can view them by clicking the **Page history** button.
From the history page you can see the revision of the page (Git commit SHA), its
-author, the commit message, when it was last updated and the page markup format.
+author, the commit message, when it was last updated, and the page markup format.
To see how a previous version of the page looked like, click on a revision
number.
diff --git a/doc/user/reserved_names.md b/doc/user/reserved_names.md
index b160cb03f94..3e53bc0fd81 100644
--- a/doc/user/reserved_names.md
+++ b/doc/user/reserved_names.md
@@ -69,7 +69,6 @@ Currently the following names are reserved as top level groups:
- `invites`
- `jwt`
- `login`
-- `notification_settings`
- `oauth`
- `profile`
- `projects`
diff --git a/doc/user/search/index.md b/doc/user/search/index.md
index 4e0e0734200..407578fd4df 100644
--- a/doc/user/search/index.md
+++ b/doc/user/search/index.md
@@ -41,6 +41,7 @@ groups:
- [Label](../project/labels.md)
- My-reaction
- Confidential
+ - Epic ([Introduced](https://gitlab.com/gitlab-org/gitlab/issues/195704) in GitLab 12.9)
- Search for this text
1. Select or type the operator to use for filtering the attribute. The following operators are
available:
@@ -118,7 +119,7 @@ Your [To-Do List](../todos.md#gitlab-to-do-list) can be searched by "to do" and
You can [filter](../todos.md#filtering-your-to-do-list) them per project,
author, type, and action. Also, you can sort them by
[**Label priority**](../../user/project/labels.md#label-priority),
-**Last created** and **Oldest created**.
+**Last created**, and **Oldest created**.
## Projects
diff --git a/doc/user/shortcuts.md b/doc/user/shortcuts.md
index 1df1a8a8ba6..dcc4753a794 100644
--- a/doc/user/shortcuts.md
+++ b/doc/user/shortcuts.md
@@ -34,7 +34,7 @@ These shortcuts are available in most areas of GitLab
| <kbd>p</kbd> + <kbd>b</kbd> | Show/hide the Performance Bar. |
Additionally, the following shortcuts are available when editing text in text fields,
-for example comments, replies, or issue and merge request descriptions:
+for example comments, replies, issue descriptions, and merge request descriptions:
| Keyboard Shortcut | Description |
| ---------------------------------------------------------------------- | ----------- |
diff --git a/generator_templates/active_record/migration/create_table_migration.rb b/generator_templates/active_record/migration/create_table_migration.rb
index 4a6bea2796c..91df1b0d98f 100644
--- a/generator_templates/active_record/migration/create_table_migration.rb
+++ b/generator_templates/active_record/migration/create_table_migration.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# See https://docs.gitlab.com/ee/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class <%= migration_class_name %> < ActiveRecord::Migration[<%= ActiveRecord::Migration.current_version %>]
diff --git a/generator_templates/active_record/migration/migration.rb b/generator_templates/active_record/migration/migration.rb
index 153280cd4b7..2a57edba65e 100644
--- a/generator_templates/active_record/migration/migration.rb
+++ b/generator_templates/active_record/migration/migration.rb
@@ -1,10 +1,11 @@
# frozen_string_literal: true
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# See https://docs.gitlab.com/ee/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class <%= migration_class_name %> < ActiveRecord::Migration[<%= ActiveRecord::Migration.current_version %>]
- include Gitlab::Database::MigrationHelpers
+ # Uncomment the following include if you require helper functions:
+ # include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
diff --git a/generator_templates/rails/post_deployment_migration/migration.rb b/generator_templates/rails/post_deployment_migration/migration.rb
index 4c1685545b5..b36cf877f0e 100644
--- a/generator_templates/rails/post_deployment_migration/migration.rb
+++ b/generator_templates/rails/post_deployment_migration/migration.rb
@@ -1,10 +1,11 @@
# frozen_string_literal: true
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# See https://docs.gitlab.com/ee/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class <%= migration_class_name %> < ActiveRecord::Migration[<%= ActiveRecord::Migration.current_version %>]
- include Gitlab::Database::MigrationHelpers
+ # Uncomment the following include if you require helper functions:
+ # include Gitlab::Database::MigrationHelpers
DOWNTIME = false
diff --git a/jest.config.js b/jest.config.js
index d07c034e88e..e9e1a598608 100644
--- a/jest.config.js
+++ b/jest.config.js
@@ -35,7 +35,7 @@ const moduleNameMapper = {
'^ee_else_ce(/.*)$': '<rootDir>/app/assets/javascripts$1',
'^helpers(/.*)$': '<rootDir>/spec/frontend/helpers$1',
'^vendor(/.*)$': '<rootDir>/vendor/assets/javascripts$1',
- '\\.(jpg|jpeg|png|svg)$': '<rootDir>/spec/frontend/__mocks__/file_mock.js',
+ '\\.(jpg|jpeg|png|svg|css)$': '<rootDir>/spec/frontend/__mocks__/file_mock.js',
'emojis(/.*).json': '<rootDir>/fixtures/emojis$1.json',
'^spec/test_constants$': '<rootDir>/spec/frontend/helpers/test_constants',
'^jest/(.*)$': '<rootDir>/spec/frontend/$1',
@@ -55,6 +55,14 @@ if (IS_EE) {
collectCoverageFrom.push(rootDirEE.replace('$1', '/**/*.{js,vue}'));
}
+const coverageDirectory = () => {
+ if (process.env.CI_NODE_INDEX && process.env.CI_NODE_TOTAL) {
+ return `<rootDir>/coverage-frontend/jest-${process.env.CI_NODE_INDEX}-${process.env.CI_NODE_TOTAL}`;
+ }
+
+ return '<rootDir>/coverage-frontend/';
+};
+
// eslint-disable-next-line import/no-commonjs
module.exports = {
clearMocks: true,
@@ -62,7 +70,7 @@ module.exports = {
moduleFileExtensions: ['js', 'json', 'vue'],
moduleNameMapper,
collectCoverageFrom,
- coverageDirectory: '<rootDir>/coverage-frontend/',
+ coverageDirectory: coverageDirectory(),
coverageReporters: ['json', 'lcov', 'text-summary', 'clover'],
cacheDirectory: '<rootDir>/tmp/cache/jest',
modulePathIgnorePatterns: ['<rootDir>/.yarn-cache/'],
@@ -74,7 +82,7 @@ module.exports = {
'^.+\\.js$': 'babel-jest',
'^.+\\.vue$': 'vue-jest',
},
- transformIgnorePatterns: ['node_modules/(?!(@gitlab/ui)/)'],
+ transformIgnorePatterns: ['node_modules/(?!(@gitlab/ui|bootstrap-vue|three|monaco-editor)/)'],
timers: 'fake',
testEnvironment: '<rootDir>/spec/frontend/environment.js',
testEnvironmentOptions: {
diff --git a/lib/api/admin/sidekiq.rb b/lib/api/admin/sidekiq.rb
new file mode 100644
index 00000000000..a700bea0fd7
--- /dev/null
+++ b/lib/api/admin/sidekiq.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module API
+ module Admin
+ class Sidekiq < Grape::API
+ before { authenticated_as_admin! }
+
+ namespace 'admin' do
+ namespace 'sidekiq' do
+ namespace 'queues' do
+ desc 'Drop jobs matching the given metadata from the Sidekiq queue'
+ params do
+ Labkit::Context::KNOWN_KEYS.each do |key|
+ optional key, type: String, allow_blank: false
+ end
+
+ at_least_one_of(*Labkit::Context::KNOWN_KEYS)
+ end
+ delete ':queue_name' do
+ result =
+ Gitlab::SidekiqQueue
+ .new(params[:queue_name])
+ .drop_jobs!(declared_params, timeout: 30)
+
+ present result
+ rescue Gitlab::SidekiqQueue::NoMetadataError
+ render_api_error!("Invalid metadata: #{declared_params}", 400)
+ rescue Gitlab::SidekiqQueue::InvalidQueueError
+ not_found!(params[:queue_name])
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/api.rb b/lib/api/api.rb
index 9a1e0e3f8e9..02b3fe7e03e 100644
--- a/lib/api/api.rb
+++ b/lib/api/api.rb
@@ -45,7 +45,7 @@ module API
before do
Gitlab::ApplicationContext.push(
- user: -> { current_user },
+ user: -> { @current_user },
project: -> { @project },
namespace: -> { @group },
caller_id: route.origin
@@ -110,6 +110,7 @@ module API
# Keep in alphabetical order
mount ::API::AccessRequests
+ mount ::API::Admin::Sidekiq
mount ::API::Appearance
mount ::API::Applications
mount ::API::Avatar
@@ -121,6 +122,7 @@ module API
mount ::API::Commits
mount ::API::CommitStatuses
mount ::API::DeployKeys
+ mount ::API::DeployTokens
mount ::API::Deployments
mount ::API::Environments
mount ::API::ErrorTracking
diff --git a/lib/api/api_guard.rb b/lib/api/api_guard.rb
index 0769e464d26..5cab13f001e 100644
--- a/lib/api/api_guard.rb
+++ b/lib/api/api_guard.rb
@@ -50,17 +50,13 @@ module API
user = find_user_from_sources
return unless user
+ # Sessions are enforced to be unavailable for API calls, so ignore them for admin mode
+ Gitlab::Auth::CurrentUserMode.bypass_session!(user.id) if Feature.enabled?(:user_mode_in_session)
+
unless api_access_allowed?(user)
forbidden!(api_access_denied_message(user))
end
- # Set admin mode for API requests (if admin)
- if Feature.enabled?(:user_mode_in_session)
- current_user_mode = Gitlab::Auth::CurrentUserMode.new(user)
-
- current_user_mode.enable_sessionless_admin_mode!
- end
-
user
end
@@ -154,19 +150,13 @@ module API
end
class AdminModeMiddleware < ::Grape::Middleware::Base
- def initialize(app, **options)
- super
- end
+ def after
+ # Use a Grape middleware since the Grape `after` blocks might run
+ # before we are finished rendering the `Grape::Entity` classes
+ Gitlab::Auth::CurrentUserMode.reset_bypass_session! if Feature.enabled?(:user_mode_in_session)
- def call(env)
- if Feature.enabled?(:user_mode_in_session)
- session = {}
- Gitlab::Session.with_session(session) do
- app.call(env)
- end
- else
- app.call(env)
- end
+ # Explicit nil is needed or the api call return value will be overwritten
+ nil
end
end
end
diff --git a/lib/api/broadcast_messages.rb b/lib/api/broadcast_messages.rb
index af7c69f857e..42e7dc751f0 100644
--- a/lib/api/broadcast_messages.rb
+++ b/lib/api/broadcast_messages.rb
@@ -36,6 +36,7 @@ module API
optional :font, type: String, desc: 'Foreground color'
optional :target_path, type: String, desc: 'Target path'
optional :broadcast_type, type: String, values: BroadcastMessage.broadcast_types.keys, desc: 'Broadcast type. Defaults to banner', default: -> { 'banner' }
+ optional :dismissable, type: Boolean, desc: 'Is dismissable'
end
post do
authenticated_as_admin!
@@ -75,6 +76,7 @@ module API
optional :font, type: String, desc: 'Foreground color'
optional :target_path, type: String, desc: 'Target path'
optional :broadcast_type, type: String, values: BroadcastMessage.broadcast_types.keys, desc: 'Broadcast Type'
+ optional :dismissable, type: Boolean, desc: 'Is dismissable'
end
put ':id' do
authenticated_as_admin!
diff --git a/lib/api/deploy_tokens.rb b/lib/api/deploy_tokens.rb
new file mode 100644
index 00000000000..2b1c485785b
--- /dev/null
+++ b/lib/api/deploy_tokens.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+module API
+ class DeployTokens < Grape::API
+ include PaginationParams
+
+ helpers do
+ def scope_params
+ scopes = params.delete(:scopes)
+
+ result_hash = {}
+ result_hash[:read_registry] = scopes.include?('read_registry')
+ result_hash[:read_repository] = scopes.include?('read_repository')
+ result_hash
+ end
+ end
+
+ desc 'Return all deploy tokens' do
+ detail 'This feature was introduced in GitLab 12.9.'
+ success Entities::DeployToken
+ end
+ params do
+ use :pagination
+ end
+ get 'deploy_tokens' do
+ service_unavailable! unless Feature.enabled?(:deploy_tokens_api, default_enabled: true)
+
+ authenticated_as_admin!
+
+ present paginate(DeployToken.all), with: Entities::DeployToken
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ end
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ before do
+ service_unavailable! unless Feature.enabled?(:deploy_tokens_api, user_project, default_enabled: true)
+ end
+
+ params do
+ use :pagination
+ end
+ desc 'List deploy tokens for a project' do
+ detail 'This feature was introduced in GitLab 12.9'
+ success Entities::DeployToken
+ end
+ get ':id/deploy_tokens' do
+ authorize!(:read_deploy_token, user_project)
+
+ present paginate(user_project.deploy_tokens), with: Entities::DeployToken
+ end
+
+ params do
+ requires :name, type: String, desc: "New deploy token's name"
+ requires :expires_at, type: DateTime, desc: 'Expiration date for the deploy token. Does not expire if no value is provided.'
+ requires :username, type: String, desc: 'Username for deploy token. Default is `gitlab+deploy-token-{n}`'
+ requires :scopes, type: Array[String], values: ::DeployToken::AVAILABLE_SCOPES.map(&:to_s),
+ desc: 'Indicates the deploy token scopes. Must be at least one of "read_repository" or "read_registry".'
+ end
+ desc 'Create a project deploy token' do
+ detail 'This feature was introduced in GitLab 12.9'
+ success Entities::DeployTokenWithToken
+ end
+ post ':id/deploy_tokens' do
+ authorize!(:create_deploy_token, user_project)
+
+ deploy_token = ::Projects::DeployTokens::CreateService.new(
+ user_project, current_user, scope_params.merge(declared(params, include_missing: false, include_parent_namespaces: false))
+ ).execute
+
+ present deploy_token, with: Entities::DeployTokenWithToken
+ end
+
+ desc 'Delete a project deploy token' do
+ detail 'This feature was introduced in GitLab 12.9'
+ end
+ params do
+ requires :token_id, type: Integer, desc: 'The deploy token ID'
+ end
+ delete ':id/deploy_tokens/:token_id' do
+ authorize!(:destroy_deploy_token, user_project)
+
+ deploy_token = user_project.project_deploy_tokens
+ .find_by_deploy_token_id(params[:token_id])
+
+ not_found!('Deploy Token') unless deploy_token
+
+ deploy_token.destroy
+ no_content!
+ end
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a group'
+ end
+ resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ before do
+ service_unavailable! unless Feature.enabled?(:deploy_tokens_api, user_group, default_enabled: true)
+ end
+
+ params do
+ use :pagination
+ end
+ desc 'List deploy tokens for a group' do
+ detail 'This feature was introduced in GitLab 12.9'
+ success Entities::DeployToken
+ end
+ get ':id/deploy_tokens' do
+ authorize!(:read_deploy_token, user_group)
+
+ present paginate(user_group.deploy_tokens), with: Entities::DeployToken
+ end
+
+ params do
+ requires :name, type: String, desc: 'The name of the deploy token'
+ requires :expires_at, type: DateTime, desc: 'Expiration date for the deploy token. Does not expire if no value is provided.'
+ requires :username, type: String, desc: 'Username for deploy token. Default is `gitlab+deploy-token-{n}`'
+ requires :scopes, type: Array[String], values: ::DeployToken::AVAILABLE_SCOPES.map(&:to_s),
+ desc: 'Indicates the deploy token scopes. Must be at least one of "read_repository" or "read_registry".'
+ end
+ desc 'Create a group deploy token' do
+ detail 'This feature was introduced in GitLab 12.9'
+ success Entities::DeployTokenWithToken
+ end
+ post ':id/deploy_tokens' do
+ authorize!(:create_deploy_token, user_group)
+
+ deploy_token = ::Groups::DeployTokens::CreateService.new(
+ user_group, current_user, scope_params.merge(declared(params, include_missing: false, include_parent_namespaces: false))
+ ).execute
+
+ present deploy_token, with: Entities::DeployTokenWithToken
+ end
+
+ desc 'Delete a group deploy token' do
+ detail 'This feature was introduced in GitLab 12.9'
+ end
+ delete ':id/deploy_tokens/:token_id' do
+ authorize!(:destroy_deploy_token, user_group)
+
+ deploy_token = user_group.group_deploy_tokens
+ .find_by_deploy_token_id!(params[:token_id])
+
+ destroy_conditionally!(deploy_token)
+ end
+ end
+ end
+end
diff --git a/lib/api/deployments.rb b/lib/api/deployments.rb
index 487d4e37a56..cb1dca11e87 100644
--- a/lib/api/deployments.rb
+++ b/lib/api/deployments.rb
@@ -143,6 +143,7 @@ module API
success Entities::MergeRequestBasic
end
params do
+ use :pagination
requires :deployment_id, type: Integer, desc: 'The deployment ID'
use :merge_requests_base_params
end
@@ -153,7 +154,7 @@ module API
mr_params = declared_params.merge(deployment_id: params[:deployment_id])
merge_requests = MergeRequestsFinder.new(current_user, mr_params).execute
- present merge_requests, { with: Entities::MergeRequestBasic, current_user: current_user }
+ present paginate(merge_requests), { with: Entities::MergeRequestBasic, current_user: current_user }
end
end
end
diff --git a/lib/api/discussions.rb b/lib/api/discussions.rb
index 25d38615c7f..a1cec148aeb 100644
--- a/lib/api/discussions.rb
+++ b/lib/api/discussions.rb
@@ -230,7 +230,7 @@ module API
.fresh
# Without RendersActions#prepare_notes_for_rendering,
- # Note#cross_reference_not_visible_for? will attempt to render
+ # Note#system_note_with_references_visible_for? will attempt to render
# Markdown references mentioned in the note to see whether they
# should be redacted. For notes that reference a commit, this
# would also incur a Gitaly call to verify the commit exists.
@@ -239,7 +239,7 @@ module API
# because notes are redacted if they point to projects that
# cannot be accessed by the user.
notes = prepare_notes_for_rendering(notes)
- notes.select { |n| n.visible_for?(current_user) }
+ notes.select { |n| n.readable_by?(current_user) }
end
# rubocop: enable CodeReuse/ActiveRecord
end
diff --git a/lib/api/entities/broadcast_message.rb b/lib/api/entities/broadcast_message.rb
index 403677aa300..e42b110adbe 100644
--- a/lib/api/entities/broadcast_message.rb
+++ b/lib/api/entities/broadcast_message.rb
@@ -3,7 +3,7 @@
module API
module Entities
class BroadcastMessage < Grape::Entity
- expose :id, :message, :starts_at, :ends_at, :color, :font, :target_path, :broadcast_type
+ expose :id, :message, :starts_at, :ends_at, :color, :font, :target_path, :broadcast_type, :dismissable
expose :active?, as: :active
end
end
diff --git a/lib/api/entities/commit.rb b/lib/api/entities/commit.rb
index 7ce97c2c3d8..3eaf896f1ac 100644
--- a/lib/api/entities/commit.rb
+++ b/lib/api/entities/commit.rb
@@ -9,6 +9,10 @@ module API
expose :safe_message, as: :message
expose :author_name, :author_email, :authored_date
expose :committer_name, :committer_email, :committed_date
+
+ expose :web_url do |commit, _options|
+ Gitlab::UrlBuilder.build(commit)
+ end
end
end
end
diff --git a/lib/api/entities/deploy_token.rb b/lib/api/entities/deploy_token.rb
new file mode 100644
index 00000000000..9c5bf54e299
--- /dev/null
+++ b/lib/api/entities/deploy_token.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class DeployToken < Grape::Entity
+ # exposing :token is a security risk and should be avoided
+ expose :id, :name, :username, :expires_at, :scopes
+ end
+ end
+end
diff --git a/lib/api/entities/deploy_token_with_token.rb b/lib/api/entities/deploy_token_with_token.rb
new file mode 100644
index 00000000000..11efe3720fa
--- /dev/null
+++ b/lib/api/entities/deploy_token_with_token.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class DeployTokenWithToken < Entities::DeployToken
+ expose :token
+ end
+ end
+end
diff --git a/lib/api/entities/discussion.rb b/lib/api/entities/discussion.rb
index dd1dd40da23..0740de97897 100644
--- a/lib/api/entities/discussion.rb
+++ b/lib/api/entities/discussion.rb
@@ -5,7 +5,7 @@ module API
class Discussion < Grape::Entity
expose :id
expose :individual_note?, as: :individual_note
- expose :notes, using: Entities::Note
+ expose :notes, using: Entities::NoteWithGitlabEmployeeBadge
end
end
end
diff --git a/lib/api/entities/gpg_key.rb b/lib/api/entities/gpg_key.rb
index a97e704a5dd..50b72680cc8 100644
--- a/lib/api/entities/gpg_key.rb
+++ b/lib/api/entities/gpg_key.rb
@@ -2,7 +2,7 @@
module API
module Entities
- class GPGKey < Grape::Entity
+ class GpgKey < Grape::Entity
expose :id, :key, :created_at
end
end
diff --git a/lib/api/entities/group.rb b/lib/api/entities/group.rb
index ae5ee4784ed..10e10e52d9f 100644
--- a/lib/api/entities/group.rb
+++ b/lib/api/entities/group.rb
@@ -13,6 +13,7 @@ module API
expose :emails_disabled
expose :mentions_disabled
expose :lfs_enabled?, as: :lfs_enabled
+ expose :default_branch_protection
expose :avatar_url do |group, options|
group.avatar_url(only_path: false)
end
diff --git a/lib/api/entities/internal.rb b/lib/api/entities/internal.rb
deleted file mode 100644
index 8f79bd14833..00000000000
--- a/lib/api/entities/internal.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-module API
- module Entities
- module Internal
- module Pages
- class LookupPath < Grape::Entity
- expose :project_id, :access_control,
- :source, :https_only, :prefix
- end
-
- class VirtualDomain < Grape::Entity
- expose :certificate, :key
- expose :lookup_paths, using: LookupPath
- end
- end
- end
- end
-end
diff --git a/lib/api/entities/internal/pages/lookup_path.rb b/lib/api/entities/internal/pages/lookup_path.rb
new file mode 100644
index 00000000000..1bf94f74fb4
--- /dev/null
+++ b/lib/api/entities/internal/pages/lookup_path.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Internal
+ module Pages
+ class LookupPath < Grape::Entity
+ expose :project_id, :access_control,
+ :source, :https_only, :prefix
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/internal/pages/virtual_domain.rb b/lib/api/entities/internal/pages/virtual_domain.rb
new file mode 100644
index 00000000000..27eb7571368
--- /dev/null
+++ b/lib/api/entities/internal/pages/virtual_domain.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Internal
+ module Pages
+ class VirtualDomain < Grape::Entity
+ expose :certificate, :key
+ expose :lookup_paths, using: LookupPath
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/internal/serverless/lookup_path.rb b/lib/api/entities/internal/serverless/lookup_path.rb
new file mode 100644
index 00000000000..8ca40b4f128
--- /dev/null
+++ b/lib/api/entities/internal/serverless/lookup_path.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Internal
+ module Serverless
+ class LookupPath < Grape::Entity
+ expose :source
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/internal/serverless/virtual_domain.rb b/lib/api/entities/internal/serverless/virtual_domain.rb
new file mode 100644
index 00000000000..8b53aa51bf5
--- /dev/null
+++ b/lib/api/entities/internal/serverless/virtual_domain.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Internal
+ module Serverless
+ class VirtualDomain < Grape::Entity
+ expose :certificate, :key
+ expose :lookup_paths, using: LookupPath
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/milestone_with_stats.rb b/lib/api/entities/milestone_with_stats.rb
new file mode 100644
index 00000000000..33fa322573b
--- /dev/null
+++ b/lib/api/entities/milestone_with_stats.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class MilestoneWithStats < Entities::Milestone
+ expose :issue_stats do
+ expose :total_issues_count, as: :total
+ expose :closed_issues_count, as: :closed
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/note_with_gitlab_employee_badge.rb b/lib/api/entities/note_with_gitlab_employee_badge.rb
new file mode 100644
index 00000000000..2ea300ffeb6
--- /dev/null
+++ b/lib/api/entities/note_with_gitlab_employee_badge.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class NoteWithGitlabEmployeeBadge < Note
+ expose :author, using: Entities::UserWithGitlabEmployeeBadge
+ expose :resolved_by, using: Entities::UserWithGitlabEmployeeBadge, if: ->(note, options) { note.resolvable? }
+ end
+ end
+end
diff --git a/lib/api/entities/project.rb b/lib/api/entities/project.rb
index 6ed2ed34360..85a00273192 100644
--- a/lib/api/entities/project.rb
+++ b/lib/api/entities/project.rb
@@ -106,6 +106,9 @@ module API
project.auto_devops.nil? ? 'continuous' : project.auto_devops.deploy_strategy
end
expose :autoclose_referenced_issues
+ expose :repository_storage, if: ->(project, options) {
+ Ability.allowed?(options[:current_user], :change_repository_storage, project)
+ }
# rubocop: disable CodeReuse/ActiveRecord
def self.preload_relation(projects_relation, options = {})
diff --git a/lib/api/entities/project_upload.rb b/lib/api/entities/project_upload.rb
new file mode 100644
index 00000000000..f38f8d74f7b
--- /dev/null
+++ b/lib/api/entities/project_upload.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class ProjectUpload < Grape::Entity
+ include Gitlab::Routing
+
+ expose :markdown_name, as: :alt
+ expose :secure_url, as: :url
+ expose :full_path do |uploader|
+ show_project_uploads_path(
+ uploader.model,
+ uploader.secret,
+ uploader.filename
+ )
+ end
+
+ expose :markdown_link, as: :markdown
+ end
+ end
+end
diff --git a/lib/api/entities/release.rb b/lib/api/entities/release.rb
index dc4b91e594e..c70982a9ece 100644
--- a/lib/api/entities/release.rb
+++ b/lib/api/entities/release.rb
@@ -11,14 +11,14 @@ module API
expose :tag, as: :tag_name, if: ->(_, _) { can_download_code? }
expose :description
expose :description_html do |entity|
- MarkupHelper.markdown_field(entity, :description)
+ MarkupHelper.markdown_field(entity, :description, current_user: options[:current_user])
end
expose :created_at
expose :released_at
expose :author, using: Entities::UserBasic, if: -> (release, _) { release.author.present? }
expose :commit, using: Entities::Commit, if: ->(_, _) { can_download_code? }
expose :upcoming_release?, as: :upcoming_release
- expose :milestones, using: Entities::Milestone, if: -> (release, _) { release.milestones.present? && can_read_milestone? }
+ expose :milestones, using: Entities::MilestoneWithStats, if: -> (release, _) { release.milestones.present? && can_read_milestone? }
expose :commit_path, expose_nil: false
expose :tag_path, expose_nil: false
expose :evidence_sha, expose_nil: false, if: ->(_, _) { can_download_code? }
diff --git a/lib/api/entities/releases/link.rb b/lib/api/entities/releases/link.rb
index 6cc01e0e981..f4edb83bd58 100644
--- a/lib/api/entities/releases/link.rb
+++ b/lib/api/entities/releases/link.rb
@@ -7,7 +7,17 @@ module API
expose :id
expose :name
expose :url
+ expose :direct_asset_url
expose :external?, as: :external
+
+ def direct_asset_url
+ return object.url unless object.filepath
+
+ release = object.release
+ project = release.project
+
+ Gitlab::Routing.url_helpers.project_release_url(project, release) << object.filepath
+ end
end
end
end
diff --git a/lib/api/entities/remote_mirror.rb b/lib/api/entities/remote_mirror.rb
index dde3e9dea99..18d51726bab 100644
--- a/lib/api/entities/remote_mirror.rb
+++ b/lib/api/entities/remote_mirror.rb
@@ -12,6 +12,9 @@ module API
expose :last_successful_update_at
expose :last_error
expose :only_protected_branches
+ expose :keep_divergent_refs, if: -> (mirror, _options) do
+ ::Feature.enabled?(:keep_divergent_refs, mirror.project)
+ end
end
end
end
diff --git a/lib/api/entities/ssh_key.rb b/lib/api/entities/ssh_key.rb
index 0e2f6ebae8c..aae216173c7 100644
--- a/lib/api/entities/ssh_key.rb
+++ b/lib/api/entities/ssh_key.rb
@@ -3,7 +3,7 @@
module API
module Entities
class SSHKey < Grape::Entity
- expose :id, :title, :key, :created_at
+ expose :id, :title, :key, :created_at, :expires_at
end
end
end
diff --git a/lib/api/entities/user.rb b/lib/api/entities/user.rb
index 15e4619cdb8..4a1f570c3f0 100644
--- a/lib/api/entities/user.rb
+++ b/lib/api/entities/user.rb
@@ -4,7 +4,7 @@ module API
module Entities
class User < UserBasic
expose :created_at, if: ->(user, opts) { Ability.allowed?(opts[:current_user], :read_user_profile, user) }
- expose :bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization
+ expose :bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title
end
end
end
diff --git a/lib/api/entities/user_details_with_admin.rb b/lib/api/entities/user_details_with_admin.rb
index 9ea5c583437..22a842983e2 100644
--- a/lib/api/entities/user_details_with_admin.rb
+++ b/lib/api/entities/user_details_with_admin.rb
@@ -9,3 +9,5 @@ module API
end
end
end
+
+API::Entities::UserDetailsWithAdmin.prepend_if_ee('EE::API::Entities::UserDetailsWithAdmin')
diff --git a/lib/api/entities/user_with_gitlab_employee_badge.rb b/lib/api/entities/user_with_gitlab_employee_badge.rb
new file mode 100644
index 00000000000..36b9f633132
--- /dev/null
+++ b/lib/api/entities/user_with_gitlab_employee_badge.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class UserWithGitlabEmployeeBadge < UserBasic
+ expose :gitlab_employee?, as: :is_gitlab_employee, if: ->(user, options) { ::Feature.enabled?(:gitlab_employee_badge) && user.gitlab_employee? }
+ end
+ end
+end
diff --git a/lib/api/files.rb b/lib/api/files.rb
index feed22d188c..76ab9a2190b 100644
--- a/lib/api/files.rb
+++ b/lib/api/files.rb
@@ -61,7 +61,7 @@ module API
end
params :simple_file_params do
- requires :file_path, type: String, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
+ requires :file_path, type: String, file_path: true, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
requires :branch, type: String, desc: 'Name of the branch to commit into. To create a new branch, also provide `start_branch`.', allow_blank: false
requires :commit_message, type: String, allow_blank: false, desc: 'Commit message'
optional :start_branch, type: String, desc: 'Name of the branch to start the new commit from'
@@ -85,7 +85,7 @@ module API
desc 'Get blame file metadata from repository'
params do
- requires :file_path, type: String, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
+ requires :file_path, type: String, file_path: true, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
requires :ref, type: String, desc: 'The name of branch, tag or commit', allow_blank: false
end
head ":id/repository/files/:file_path/blame", requirements: FILE_ENDPOINT_REQUIREMENTS do
@@ -96,7 +96,7 @@ module API
desc 'Get blame file from the repository'
params do
- requires :file_path, type: String, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
+ requires :file_path, type: String, file_path: true, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
requires :ref, type: String, desc: 'The name of branch, tag or commit', allow_blank: false
end
get ":id/repository/files/:file_path/blame", requirements: FILE_ENDPOINT_REQUIREMENTS do
@@ -110,7 +110,7 @@ module API
desc 'Get raw file metadata from repository'
params do
- requires :file_path, type: String, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
+ requires :file_path, type: String, file_path: true, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
requires :ref, type: String, desc: 'The name of branch, tag or commit', allow_blank: false
end
head ":id/repository/files/:file_path/raw", requirements: FILE_ENDPOINT_REQUIREMENTS do
@@ -121,7 +121,7 @@ module API
desc 'Get raw file contents from the repository'
params do
- requires :file_path, type: String, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
+ requires :file_path, type: String, file_path: true, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
requires :ref, type: String, desc: 'The name of branch, tag commit', allow_blank: false
end
get ":id/repository/files/:file_path/raw", requirements: FILE_ENDPOINT_REQUIREMENTS do
@@ -135,7 +135,7 @@ module API
desc 'Get file metadata from repository'
params do
- requires :file_path, type: String, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
+ requires :file_path, type: String, file_path: true, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
requires :ref, type: String, desc: 'The name of branch, tag or commit', allow_blank: false
end
head ":id/repository/files/:file_path", requirements: FILE_ENDPOINT_REQUIREMENTS do
@@ -146,7 +146,7 @@ module API
desc 'Get a file from the repository'
params do
- requires :file_path, type: String, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
+ requires :file_path, type: String, file_path: true, desc: 'The url encoded path to the file. Ex. lib%2Fclass%2Erb'
requires :ref, type: String, desc: 'The name of branch, tag or commit', allow_blank: false
end
get ":id/repository/files/:file_path", requirements: FILE_ENDPOINT_REQUIREMENTS do
diff --git a/lib/api/group_variables.rb b/lib/api/group_variables.rb
index 47fcbabb4d4..916f89649a5 100644
--- a/lib/api/group_variables.rb
+++ b/lib/api/group_variables.rb
@@ -47,6 +47,7 @@ module API
requires :key, type: String, desc: 'The key of the variable'
requires :value, type: String, desc: 'The value of the variable'
optional :protected, type: String, desc: 'Whether the variable is protected'
+ optional :masked, type: String, desc: 'Whether the variable is masked'
optional :variable_type, type: String, values: Ci::GroupVariable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file. Defaults to env_var'
end
post ':id/variables' do
@@ -68,6 +69,7 @@ module API
optional :key, type: String, desc: 'The key of the variable'
optional :value, type: String, desc: 'The value of the variable'
optional :protected, type: String, desc: 'Whether the variable is protected'
+ optional :masked, type: String, desc: 'Whether the variable is masked'
optional :variable_type, type: String, values: Ci::GroupVariable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file'
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index 001fb92ec52..c3b5654e217 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -142,6 +142,12 @@ module API
end
end
+ def check_namespace_access(namespace)
+ return namespace if can?(current_user, :read_namespace, namespace)
+
+ not_found!('Namespace')
+ end
+
# rubocop: disable CodeReuse/ActiveRecord
def find_namespace(id)
if id.to_s =~ /^\d+$/
@@ -153,13 +159,15 @@ module API
# rubocop: enable CodeReuse/ActiveRecord
def find_namespace!(id)
- namespace = find_namespace(id)
+ check_namespace_access(find_namespace(id))
+ end
- if can?(current_user, :read_namespace, namespace)
- namespace
- else
- not_found!('Namespace')
- end
+ def find_namespace_by_path(path)
+ Namespace.find_by_full_path(path)
+ end
+
+ def find_namespace_by_path!(path)
+ check_namespace_access(find_namespace_by_path(path))
end
def find_branch!(branch_name)
@@ -359,6 +367,10 @@ module API
render_api_error!('405 Method Not Allowed', 405)
end
+ def service_unavailable!
+ render_api_error!('503 Service Unavailable', 503)
+ end
+
def conflict!(message = nil)
render_api_error!(message || '409 Conflict', 409)
end
diff --git a/lib/api/helpers/custom_validators.rb b/lib/api/helpers/custom_validators.rb
index c86eae6f2da..4c15c1d01cd 100644
--- a/lib/api/helpers/custom_validators.rb
+++ b/lib/api/helpers/custom_validators.rb
@@ -3,6 +3,28 @@
module API
module Helpers
module CustomValidators
+ class FilePath < Grape::Validations::Base
+ def validate_param!(attr_name, params)
+ path = params[attr_name]
+
+ Gitlab::Utils.check_path_traversal!(path)
+ rescue StandardError
+ raise Grape::Exceptions::Validation, params: [@scope.full_name(attr_name)],
+ message: "should be a valid file path"
+ end
+ end
+
+ class GitSha < Grape::Validations::Base
+ def validate_param!(attr_name, params)
+ sha = params[attr_name]
+
+ return if Commit::EXACT_COMMIT_SHA_PATTERN.match?(sha)
+
+ raise Grape::Exceptions::Validation, params: [@scope.full_name(attr_name)],
+ message: "should be a valid sha"
+ end
+ end
+
class Absence < Grape::Validations::Base
def validate_param!(attr_name, params)
return if params.respond_to?(:key?) && !params.key?(attr_name)
@@ -38,6 +60,8 @@ module API
end
end
+Grape::Validations.register_validator(:file_path, ::API::Helpers::CustomValidators::FilePath)
+Grape::Validations.register_validator(:git_sha, ::API::Helpers::CustomValidators::GitSha)
Grape::Validations.register_validator(:absence, ::API::Helpers::CustomValidators::Absence)
Grape::Validations.register_validator(:integer_none_any, ::API::Helpers::CustomValidators::IntegerNoneAny)
Grape::Validations.register_validator(:array_none_any, ::API::Helpers::CustomValidators::ArrayNoneAny)
diff --git a/lib/api/helpers/file_upload_helpers.rb b/lib/api/helpers/file_upload_helpers.rb
index c5fb291a2b7..dd551ec2976 100644
--- a/lib/api/helpers/file_upload_helpers.rb
+++ b/lib/api/helpers/file_upload_helpers.rb
@@ -4,11 +4,12 @@ module API
module Helpers
module FileUploadHelpers
def file_is_valid?
- params[:file] && params[:file]['tempfile'].respond_to?(:read)
+ filename = params[:file]&.original_filename
+ filename && ImportExportUploader::EXTENSION_WHITELIST.include?(File.extname(filename).delete('.'))
end
def validate_file!
- render_api_error!('Uploaded file is invalid', 400) unless file_is_valid?
+ render_api_error!({ error: _('You need to upload a GitLab project export archive (ending in .gz).') }, 422) unless file_is_valid?
end
end
end
diff --git a/lib/api/helpers/groups_helpers.rb b/lib/api/helpers/groups_helpers.rb
index e0fea4c7c96..f3dfc093926 100644
--- a/lib/api/helpers/groups_helpers.rb
+++ b/lib/api/helpers/groups_helpers.rb
@@ -11,6 +11,8 @@ module API
optional :visibility, type: String,
values: Gitlab::VisibilityLevel.string_values,
desc: 'The visibility of the group'
+ # TODO: remove rubocop disable - https://gitlab.com/gitlab-org/gitlab/issues/14960
+ optional :avatar, type: File, desc: 'Avatar image for the group' # rubocop:disable Scalability/FileUploads
optional :share_with_group_lock, type: Boolean, desc: 'Prevent sharing a project with another group within this group'
optional :require_two_factor_authentication, type: Boolean, desc: 'Require all users in this group to setup Two-factor authentication'
optional :two_factor_grace_period, type: Integer, desc: 'Time before Two-factor authentication is enforced'
@@ -21,6 +23,7 @@ module API
optional :mentions_disabled, type: Boolean, desc: 'Disable a group from getting mentioned'
optional :lfs_enabled, type: Boolean, desc: 'Enable/disable LFS for the projects in this group'
optional :request_access_enabled, type: Boolean, desc: 'Allow users to request member access'
+ optional :default_branch_protection, type: Integer, values: ::Gitlab::Access.protection_values, desc: 'Determine if developers can push to master'
end
params :optional_params_ee do
diff --git a/lib/api/helpers/internal_helpers.rb b/lib/api/helpers/internal_helpers.rb
index ab43096a1de..f7aabc8ce4f 100644
--- a/lib/api/helpers/internal_helpers.rb
+++ b/lib/api/helpers/internal_helpers.rb
@@ -3,7 +3,7 @@
module API
module Helpers
module InternalHelpers
- attr_reader :redirected_path
+ attr_reader :redirected_path, :container
delegate :wiki?, to: :repo_type
@@ -22,10 +22,10 @@ module API
end
def access_checker_for(actor, protocol)
- access_checker_klass.new(actor.key_or_user, project, protocol,
+ access_checker_klass.new(actor.key_or_user, container, protocol,
authentication_abilities: ssh_authentication_abilities,
namespace_path: namespace_path,
- project_path: project_path,
+ repository_path: project_path,
redirected_path: redirected_path)
end
@@ -80,7 +80,7 @@ module API
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def set_project
- @project, @repo_type, @redirected_path =
+ @container, @project, @repo_type, @redirected_path =
if params[:gl_repository]
Gitlab::GlRepository.parse(params[:gl_repository])
elsif params[:project]
@@ -92,17 +92,17 @@ module API
# Project id to pass between components that don't share/don't have
# access to the same filesystem mounts
def gl_repository
- repo_type.identifier_for_container(project)
+ repo_type.identifier_for_container(container)
end
- def gl_project_path
+ def gl_repository_path
repository.full_path
end
# Return the repository depending on whether we want the wiki or the
# regular repository
def repository
- @repository ||= repo_type.repository_for(project)
+ @repository ||= repo_type.repository_for(container)
end
# Return the Gitaly Address if it is enabled
@@ -111,8 +111,8 @@ module API
{
repository: repository.gitaly_repository,
- address: Gitlab::GitalyClient.address(project.repository_storage),
- token: Gitlab::GitalyClient.token(project.repository_storage),
+ address: Gitlab::GitalyClient.address(container.repository_storage),
+ token: Gitlab::GitalyClient.token(container.repository_storage),
features: Feature::Gitaly.server_feature_flags
}
end
diff --git a/lib/api/helpers/notes_helpers.rb b/lib/api/helpers/notes_helpers.rb
index 3c453953e37..bed0345a608 100644
--- a/lib/api/helpers/notes_helpers.rb
+++ b/lib/api/helpers/notes_helpers.rb
@@ -62,7 +62,7 @@ module API
def get_note(noteable, note_id)
note = noteable.notes.with_metadata.find(note_id)
- can_read_note = note.visible_for?(current_user)
+ can_read_note = note.readable_by?(current_user)
if can_read_note
present note, with: Entities::Note
diff --git a/lib/api/helpers/projects_helpers.rb b/lib/api/helpers/projects_helpers.rb
index c7c9f3ba077..85ed8a4d636 100644
--- a/lib/api/helpers/projects_helpers.rb
+++ b/lib/api/helpers/projects_helpers.rb
@@ -54,6 +54,7 @@ module API
optional :auto_devops_enabled, type: Boolean, desc: 'Flag indication if Auto DevOps is enabled'
optional :auto_devops_deploy_strategy, type: String, values: %w(continuous manual timed_incremental), desc: 'Auto Deploy strategy'
optional :autoclose_referenced_issues, type: Boolean, desc: 'Flag indication if referenced issues auto-closing is enabled'
+ optional :repository_storage, type: String, desc: 'Which storage shard the repository is on. Available only to admins'
end
params :optional_project_params_ee do
@@ -125,6 +126,7 @@ module API
:wiki_access_level,
:avatar,
:suggestion_commit_message,
+ :repository_storage,
# TODO: remove in API v5, replaced by *_access_level
:issues_enabled,
diff --git a/lib/api/internal/base.rb b/lib/api/internal/base.rb
index 382bbeb66de..9c37b610cca 100644
--- a/lib/api/internal/base.rb
+++ b/lib/api/internal/base.rb
@@ -40,7 +40,7 @@ module API
# Stores some Git-specific env thread-safely
env = parse_env
- Gitlab::Git::HookEnv.set(gl_repository, env) if project
+ Gitlab::Git::HookEnv.set(gl_repository, env) if container
actor.update_last_used_at!
access_checker = access_checker_for(actor, params[:protocol])
@@ -49,7 +49,11 @@ module API
result = access_checker.check(params[:action], params[:changes])
@project ||= access_checker.project
result
- rescue Gitlab::GitAccess::UnauthorizedError => e
+ rescue Gitlab::GitAccess::ForbiddenError => e
+ # The return code needs to be 401. If we return 403
+ # the custom message we return won't be shown to the user
+ # and, instead, the default message 'GitLab: API is not accessible'
+ # will be displayed
return response_with_status(code: 401, success: false, message: e.message)
rescue Gitlab::GitAccess::TimeoutError => e
return response_with_status(code: 503, success: false, message: e.message)
@@ -63,7 +67,7 @@ module API
when ::Gitlab::GitAccessResult::Success
payload = {
gl_repository: gl_repository,
- gl_project_path: gl_project_path,
+ gl_project_path: gl_repository_path,
gl_id: Gitlab::GlId.gl_id(actor.user),
gl_username: actor.username,
git_config_options: [],
@@ -104,6 +108,10 @@ module API
# check_ip - optional, only in EE version, may limit access to
# group resources based on its IP restrictions
post "/allowed" do
+ if repo_type.snippet? && Feature.disabled?(:version_snippets, actor.user)
+ break response_with_status(code: 404, success: false, message: 'The project you were looking for could not be found.')
+ end
+
# It was moved to a separate method so that EE can alter its behaviour more
# easily.
check_allowed(params)
@@ -212,7 +220,7 @@ module API
post '/post_receive' do
status 200
- response = PostReceiveService.new(actor.user, project, params).execute
+ response = PostReceiveService.new(actor.user, repository, project, params).execute
ee_post_receive_response_hook(response)
diff --git a/lib/api/internal/pages.rb b/lib/api/internal/pages.rb
index a2fe3e09df8..4339d2ef490 100644
--- a/lib/api/internal/pages.rb
+++ b/lib/api/internal/pages.rb
@@ -24,13 +24,26 @@ module API
requires :host, type: String, desc: 'The host to query for'
end
get "/" do
- host = Namespace.find_by_pages_host(params[:host]) || PagesDomain.find_by_domain(params[:host])
- no_content! unless host
+ serverless_domain_finder = ServerlessDomainFinder.new(params[:host])
+ if serverless_domain_finder.serverless?
+ # Handle Serverless domains
+ serverless_domain = serverless_domain_finder.execute
+ no_content! unless serverless_domain
- virtual_domain = host.pages_virtual_domain
- no_content! unless virtual_domain
+ virtual_domain = Serverless::VirtualDomain.new(serverless_domain)
+ no_content! unless virtual_domain
- present virtual_domain, with: Entities::Internal::Pages::VirtualDomain
+ present virtual_domain, with: Entities::Internal::Serverless::VirtualDomain
+ else
+ # Handle Pages domains
+ host = Namespace.find_by_pages_host(params[:host]) || PagesDomain.find_by_domain_case_insensitive(params[:host])
+ no_content! unless host
+
+ virtual_domain = host.pages_virtual_domain
+ no_content! unless virtual_domain
+
+ present virtual_domain, with: Entities::Internal::Pages::VirtualDomain
+ end
end
end
end
diff --git a/lib/api/issues.rb b/lib/api/issues.rb
index e5bfca13d66..d34c205281a 100644
--- a/lib/api/issues.rb
+++ b/lib/api/issues.rb
@@ -247,6 +247,7 @@ module API
requires :issue_iid, type: Integer, desc: 'The internal ID of a project issue'
optional :title, type: String, desc: 'The title of an issue'
optional :updated_at, type: DateTime,
+ allow_blank: false,
desc: 'Date time when the issue was updated. Available only for admins and project owners.'
optional :state_event, type: String, values: %w[reopen close], desc: 'State of the issue'
use :issue_params
diff --git a/lib/api/lsif_data.rb b/lib/api/lsif_data.rb
index 63e6eb3ab2d..a673ccb4af0 100644
--- a/lib/api/lsif_data.rb
+++ b/lib/api/lsif_data.rb
@@ -15,22 +15,24 @@ module API
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
segment ':id/commits/:commit_id' do
params do
- requires :path, type: String, desc: 'The path of a file'
+ requires :paths, type: Array, desc: 'The paths of the files'
end
get 'lsif/info' do
authorize! :download_code, user_project
artifact =
- @project.job_artifacts
+ Ci::JobArtifact
.with_file_types(['lsif'])
- .for_sha(params[:commit_id])
+ .for_sha(params[:commit_id], @project.id)
.last
not_found! unless artifact
authorize! :read_pipeline, artifact.job.pipeline
file_too_large! if artifact.file.cached_size > MAX_FILE_SIZE
- ::Projects::LsifDataService.new(artifact.file, @project, params).execute
+ service = ::Projects::LsifDataService.new(artifact.file, @project, params[:commit_id])
+
+ params[:paths].to_h { |path| [path, service.execute(path)] }
end
end
end
diff --git a/lib/api/notes.rb b/lib/api/notes.rb
index 35eda481a4f..7237fa24bab 100644
--- a/lib/api/notes.rb
+++ b/lib/api/notes.rb
@@ -45,7 +45,7 @@ module API
# array returned, but this is really a edge-case.
notes = paginate(raw_notes)
notes = prepare_notes_for_rendering(notes)
- notes = notes.select { |note| note.visible_for?(current_user) }
+ notes = notes.select { |note| note.readable_by?(current_user) }
present notes, with: Entities::Note
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/lib/api/pipeline_schedules.rb b/lib/api/pipeline_schedules.rb
index 445a37a70c0..edc99590cdb 100644
--- a/lib/api/pipeline_schedules.rb
+++ b/lib/api/pipeline_schedules.rb
@@ -22,7 +22,7 @@ module API
get ':id/pipeline_schedules' do
authorize! :read_pipeline_schedule, user_project
- schedules = PipelineSchedulesFinder.new(user_project).execute(scope: params[:scope])
+ schedules = Ci::PipelineSchedulesFinder.new(user_project).execute(scope: params[:scope])
.preload([:owner, :last_pipeline])
present paginate(schedules), with: Entities::PipelineSchedule
end
diff --git a/lib/api/pipelines.rb b/lib/api/pipelines.rb
index 66a183173af..06f8920b37c 100644
--- a/lib/api/pipelines.rb
+++ b/lib/api/pipelines.rb
@@ -27,7 +27,7 @@ module API
optional :username, type: String, desc: 'The username of the user who triggered pipelines'
optional :updated_before, type: DateTime, desc: 'Return pipelines updated before the specified datetime. Format: ISO 8601 YYYY-MM-DDTHH:MM:SSZ'
optional :updated_after, type: DateTime, desc: 'Return pipelines updated after the specified datetime. Format: ISO 8601 YYYY-MM-DDTHH:MM:SSZ'
- optional :order_by, type: String, values: PipelinesFinder::ALLOWED_INDEXED_COLUMNS, default: 'id',
+ optional :order_by, type: String, values: Ci::PipelinesFinder::ALLOWED_INDEXED_COLUMNS, default: 'id',
desc: 'Order pipelines'
optional :sort, type: String, values: %w[asc desc], default: 'desc',
desc: 'Sort pipelines'
@@ -36,7 +36,7 @@ module API
authorize! :read_pipeline, user_project
authorize! :read_build, user_project
- pipelines = PipelinesFinder.new(user_project, current_user, params).execute
+ pipelines = Ci::PipelinesFinder.new(user_project, current_user, params).execute
present paginate(pipelines), with: Entities::PipelineBasic
end
diff --git a/lib/api/project_container_repositories.rb b/lib/api/project_container_repositories.rb
index 70c913bea98..555fd98b451 100644
--- a/lib/api/project_container_repositories.rb
+++ b/lib/api/project_container_repositories.rb
@@ -69,7 +69,11 @@ module API
end
params do
requires :repository_id, type: Integer, desc: 'The ID of the repository'
- requires :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
+ optional :name_regex_delete, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
+ optional :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
+ # require either name_regex (deprecated) or name_regex_delete, it is ok to have both
+ at_least_one_of :name_regex, :name_regex_delete
+ optional :name_regex_keep, type: String, desc: 'The tag name regexp to retain'
optional :keep_n, type: Integer, desc: 'Keep n of latest tags with matching name'
optional :older_than, type: String, desc: 'Delete older than: 1h, 1d, 1month'
end
diff --git a/lib/api/project_import.rb b/lib/api/project_import.rb
index ea793a09f6c..ffa9dd13754 100644
--- a/lib/api/project_import.rb
+++ b/lib/api/project_import.rb
@@ -4,6 +4,8 @@ module API
class ProjectImport < Grape::API
include PaginationParams
+ MAXIMUM_FILE_SIZE = 50.megabytes
+
helpers Helpers::ProjectsHelpers
helpers Helpers::FileUploadHelpers
@@ -26,10 +28,21 @@ module API
end
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ desc 'Workhorse authorize the project import upload' do
+ detail 'This feature was introduced in GitLab 12.9'
+ end
+ post 'import/authorize' do
+ require_gitlab_workhorse!
+
+ status 200
+ content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
+
+ ImportExportUploader.workhorse_authorize(has_length: false, maximum_size: MAXIMUM_FILE_SIZE)
+ end
+
params do
requires :path, type: String, desc: 'The new project path and name'
- # TODO: remove rubocop disable - https://gitlab.com/gitlab-org/gitlab/issues/14960
- requires :file, type: File, desc: 'The project export file to be imported' # rubocop:disable Scalability/FileUploads
+ requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: 'The project export file to be imported'
optional :name, type: String, desc: 'The name of the project to be imported. Defaults to the path of the project if not provided.'
optional :namespace, type: String, desc: "The ID or name of the namespace that the project will be imported into. Defaults to the current user's namespace."
optional :overwrite, type: Boolean, default: false, desc: 'If there is a project in the same namespace and with the same name overwrite it'
@@ -38,12 +51,24 @@ module API
desc: 'New project params to override values in the export' do
use :optional_project_params
end
+ optional 'file.path', type: String, desc: 'Path to locally stored body (generated by Workhorse)'
+ optional 'file.name', type: String, desc: 'Real filename as send in Content-Disposition (generated by Workhorse)'
+ optional 'file.type', type: String, desc: 'Real content type as send in Content-Type (generated by Workhorse)'
+ optional 'file.size', type: Integer, desc: 'Real size of file (generated by Workhorse)'
+ optional 'file.md5', type: String, desc: 'MD5 checksum of the file (generated by Workhorse)'
+ optional 'file.sha1', type: String, desc: 'SHA1 checksum of the file (generated by Workhorse)'
+ optional 'file.sha256', type: String, desc: 'SHA256 checksum of the file (generated by Workhorse)'
+ optional 'file.etag', type: String, desc: 'Etag of the file (generated by Workhorse)'
+ optional 'file.remote_id', type: String, desc: 'Remote_id of the file (generated by Workhorse)'
+ optional 'file.remote_url', type: String, desc: 'Remote_url of the file (generated by Workhorse)'
end
desc 'Create a new project import' do
detail 'This feature was introduced in GitLab 10.6.'
success Entities::ProjectImportStatus
end
post 'import' do
+ require_gitlab_workhorse!
+
key = "project_import".to_sym
if throttled?(key, [current_user, key])
@@ -52,10 +77,10 @@ module API
render_api_error!({ error: _('This endpoint has been requested too many times. Try again later.') }, 429)
end
- validate_file!
-
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42437')
+ validate_file!
+
namespace = if import_params[:namespace]
find_namespace!(import_params[:namespace])
else
@@ -66,7 +91,7 @@ module API
path: import_params[:path],
namespace_id: namespace.id,
name: import_params[:name],
- file: import_params[:file]['tempfile'],
+ file: import_params[:file],
overwrite: import_params[:overwrite]
}
diff --git a/lib/api/project_snippets.rb b/lib/api/project_snippets.rb
index 3040c3c27c6..e8234a9285c 100644
--- a/lib/api/project_snippets.rb
+++ b/lib/api/project_snippets.rb
@@ -5,12 +5,17 @@ module API
include PaginationParams
before { authenticate! }
+ before { check_snippets_enabled }
params do
requires :id, type: String, desc: 'The ID of a project'
end
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
helpers do
+ def check_snippets_enabled
+ forbidden! unless user_project.feature_available?(:snippets, current_user)
+ end
+
def handle_project_member_errors(errors)
if errors[:project_access].any?
error!(errors[:project_access], 422)
diff --git a/lib/api/projects.rb b/lib/api/projects.rb
index 2271131ced3..3717e25d997 100644
--- a/lib/api/projects.rb
+++ b/lib/api/projects.rb
@@ -25,6 +25,7 @@ module API
end
def verify_update_project_attrs!(project, attrs)
+ attrs.delete(:repository_storage) unless can?(current_user, :change_repository_storage, project)
end
def delete_project(user_project)
@@ -176,6 +177,7 @@ module API
use :create_params
end
post do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab/issues/21139')
attrs = declared_params(include_missing: false)
attrs = translate_params_for_compatibility(attrs)
filter_attributes_using_license!(attrs)
@@ -208,6 +210,7 @@ module API
end
# rubocop: disable CodeReuse/ActiveRecord
post "user/:user_id" do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab/issues/21139')
authenticated_as_admin!
user = User.find_by(id: params.delete(:user_id))
not_found!('User') unless user
@@ -260,32 +263,40 @@ module API
success Entities::Project
end
params do
- optional :namespace, type: String, desc: 'The ID or name of the namespace that the project will be forked into'
+ optional :namespace, type: String, desc: '(deprecated) The ID or name of the namespace that the project will be forked into'
+ optional :namespace_id, type: Integer, desc: 'The ID of the namespace that the project will be forked into'
+ optional :namespace_path, type: String, desc: 'The path of the namespace that the project will be forked into'
optional :path, type: String, desc: 'The path that will be assigned to the fork'
optional :name, type: String, desc: 'The name that will be assigned to the fork'
end
post ':id/fork' do
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42284')
- fork_params = declared_params(include_missing: false)
- namespace_id = fork_params[:namespace]
+ not_found! unless can?(current_user, :fork_project, user_project)
- if namespace_id.present?
- fork_params[:namespace] = find_namespace(namespace_id)
+ fork_params = declared_params(include_missing: false)
- unless fork_params[:namespace] && can?(current_user, :create_projects, fork_params[:namespace])
- not_found!('Target Namespace')
+ fork_params[:namespace] =
+ if fork_params[:namespace_id].present?
+ find_namespace!(fork_params[:namespace_id])
+ elsif fork_params[:namespace_path].present?
+ find_namespace_by_path!(fork_params[:namespace_path])
+ elsif fork_params[:namespace].present?
+ find_namespace!(fork_params[:namespace])
end
- end
- forked_project = ::Projects::ForkService.new(user_project, current_user, fork_params).execute
+ service = ::Projects::ForkService.new(user_project, current_user, fork_params)
+
+ not_found!('Target Namespace') unless service.valid_fork_target?
+
+ forked_project = service.execute
if forked_project.errors.any?
conflict!(forked_project.errors.messages)
else
present forked_project, with: Entities::Project,
- user_can_admin_project: can?(current_user, :admin_project, forked_project),
- current_user: current_user
+ user_can_admin_project: can?(current_user, :admin_project, forked_project),
+ current_user: current_user
end
end
@@ -496,7 +507,9 @@ module API
requires :file, type: File, desc: 'The file to be uploaded' # rubocop:disable Scalability/FileUploads
end
post ":id/uploads" do
- UploadService.new(user_project, params[:file]).execute.to_h
+ upload = UploadService.new(user_project, params[:file]).execute
+
+ present upload, with: Entities::ProjectUpload
end
desc 'Get the users list of a project' do
diff --git a/lib/api/release/links.rb b/lib/api/release/links.rb
index def36dc8529..f72230c084c 100644
--- a/lib/api/release/links.rb
+++ b/lib/api/release/links.rb
@@ -39,6 +39,7 @@ module API
params do
requires :name, type: String, desc: 'The name of the link'
requires :url, type: String, desc: 'The URL of the link'
+ optional :filepath, type: String, desc: 'The filepath of the link'
end
post 'links' do
authorize! :create_release, release
@@ -73,6 +74,7 @@ module API
params do
optional :name, type: String, desc: 'The name of the link'
optional :url, type: String, desc: 'The URL of the link'
+ optional :filepath, type: String, desc: 'The filepath of the link'
at_least_one_of :name, :url
end
put do
diff --git a/lib/api/releases.rb b/lib/api/releases.rb
index 6e7a99bf0bb..1be263ac80d 100644
--- a/lib/api/releases.rb
+++ b/lib/api/releases.rb
@@ -46,7 +46,7 @@ module API
params do
requires :tag_name, type: String, desc: 'The name of the tag', as: :tag
optional :name, type: String, desc: 'The name of the release'
- requires :description, type: String, desc: 'The release notes'
+ optional :description, type: String, desc: 'The release notes'
optional :ref, type: String, desc: 'The commit sha or branch name'
optional :assets, type: Hash do
optional :links, type: Array do
diff --git a/lib/api/remote_mirrors.rb b/lib/api/remote_mirrors.rb
index 95313966133..7e484eb8885 100644
--- a/lib/api/remote_mirrors.rb
+++ b/lib/api/remote_mirrors.rb
@@ -5,9 +5,6 @@ module API
include PaginationParams
before do
- # TODO: Remove flag: https://gitlab.com/gitlab-org/gitlab/issues/38121
- not_found! unless Feature.enabled?(:remote_mirrors_api, user_project)
-
unauthorized! unless can?(current_user, :admin_remote_mirror, user_project)
end
@@ -26,6 +23,28 @@ module API
with: Entities::RemoteMirror
end
+ desc 'Create remote mirror for a project' do
+ success Entities::RemoteMirror
+ end
+ params do
+ requires :url, type: String, desc: 'The URL for a remote mirror'
+ optional :enabled, type: Boolean, desc: 'Determines if the mirror is enabled'
+ optional :only_protected_branches, type: Boolean, desc: 'Determines if only protected branches are mirrored'
+ optional :keep_divergent_refs, type: Boolean, desc: 'Determines if divergent refs are kept on the target'
+ end
+ post ':id/remote_mirrors' do
+ create_params = declared_params(include_missing: false)
+ create_params.delete(:keep_divergent_refs) unless ::Feature.enabled?(:keep_divergent_refs, user_project)
+
+ new_mirror = user_project.remote_mirrors.create(create_params)
+
+ if new_mirror.persisted?
+ present new_mirror, with: Entities::RemoteMirror
+ else
+ render_validation_error!(new_mirror)
+ end
+ end
+
desc 'Update the attributes of a single remote mirror' do
success Entities::RemoteMirror
end
@@ -33,12 +52,15 @@ module API
requires :mirror_id, type: String, desc: 'The ID of a remote mirror'
optional :enabled, type: Boolean, desc: 'Determines if the mirror is enabled'
optional :only_protected_branches, type: Boolean, desc: 'Determines if only protected branches are mirrored'
+ optional :keep_divergent_refs, type: Boolean, desc: 'Determines if divergent refs are kept on the target'
end
put ':id/remote_mirrors/:mirror_id' do
mirror = user_project.remote_mirrors.find(params[:mirror_id])
mirror_params = declared_params(include_missing: false)
mirror_params[:id] = mirror_params.delete(:mirror_id)
+ mirror_params.delete(:keep_divergent_refs) unless ::Feature.enabled?(:keep_divergent_refs, user_project)
+
update_params = { remote_mirrors_attributes: mirror_params }
result = ::Projects::UpdateService
diff --git a/lib/api/repositories.rb b/lib/api/repositories.rb
index 00473db1ff1..62f5b67af1e 100644
--- a/lib/api/repositories.rb
+++ b/lib/api/repositories.rb
@@ -13,6 +13,8 @@ module API
end
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
helpers do
+ include ::Gitlab::RateLimitHelpers
+
def handle_project_member_errors(errors)
if errors[:project_access].any?
error!(errors[:project_access], 422)
@@ -89,6 +91,10 @@ module API
optional :format, type: String, desc: 'The archive format'
end
get ':id/repository/archive', requirements: { format: Gitlab::PathRegex.archive_formats_regex } do
+ if archive_rate_limit_reached?(current_user, user_project)
+ render_api_error!({ error: ::Gitlab::RateLimitHelpers::ARCHIVE_RATE_LIMIT_REACHED_MESSAGE }, 429)
+ end
+
send_git_archive user_project.repository, ref: params[:sha], format: params[:format], append_sha: true
rescue
not_found!('File')
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index e1c79aa8efe..0b6bad6708b 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -283,10 +283,12 @@ module API
bad_request!('Missing artifacts file!') unless artifacts
file_too_large! unless artifacts.size < max_artifacts_size(job)
- if Ci::CreateJobArtifactsService.new.execute(job, artifacts, params, metadata_file: metadata)
+ result = Ci::CreateJobArtifactsService.new(job.project).execute(job, artifacts, params, metadata_file: metadata)
+
+ if result[:status] == :success
status :created
else
- render_validation_error!(job)
+ render_api_error!(result[:message], result[:http_status])
end
end
diff --git a/lib/api/runners.rb b/lib/api/runners.rb
index c2d371b6867..eba1b5499d0 100644
--- a/lib/api/runners.rb
+++ b/lib/api/runners.rb
@@ -115,7 +115,7 @@ module API
params do
requires :id, type: Integer, desc: 'The ID of the runner'
optional :status, type: String, desc: 'Status of the job', values: Ci::Build::AVAILABLE_STATUSES
- optional :order_by, type: String, desc: 'Order by `id` or not', values: RunnerJobsFinder::ALLOWED_INDEXED_COLUMNS
+ optional :order_by, type: String, desc: 'Order by `id` or not', values: Ci::RunnerJobsFinder::ALLOWED_INDEXED_COLUMNS
optional :sort, type: String, values: %w[asc desc], default: 'desc', desc: 'Sort by asc (ascending) or desc (descending)'
use :pagination
end
@@ -123,7 +123,7 @@ module API
runner = get_runner(params[:id])
authenticate_list_runners_jobs!(runner)
- jobs = RunnerJobsFinder.new(runner, params).execute
+ jobs = Ci::RunnerJobsFinder.new(runner, params).execute
present paginate(jobs), with: Entities::JobBasicWithProject
end
diff --git a/lib/api/todos.rb b/lib/api/todos.rb
index e3f3aca27df..02b8bb55274 100644
--- a/lib/api/todos.rb
+++ b/lib/api/todos.rb
@@ -13,13 +13,6 @@ module API
'issues' => ->(iid) { find_project_issue(iid) }
}.freeze
- helpers do
- # EE::API::Todos would override this method
- def find_todos
- TodosFinder.new(current_user, params).execute
- end
- end
-
params do
requires :id, type: String, desc: 'The ID of a project'
end
@@ -48,6 +41,10 @@ module API
resource :todos do
helpers do
+ def find_todos
+ TodosFinder.new(current_user, params).execute
+ end
+
def issuable_and_awardable?(type)
obj_type = Object.const_get(type, false)
diff --git a/lib/api/users.rb b/lib/api/users.rb
index c6dc7c08b11..1ca222b4ed5 100644
--- a/lib/api/users.rb
+++ b/lib/api/users.rb
@@ -52,8 +52,8 @@ module API
optional :external, type: Boolean, desc: 'Flag indicating the user is an external user'
# TODO: remove rubocop disable - https://gitlab.com/gitlab-org/gitlab/issues/14960
optional :avatar, type: File, desc: 'Avatar image for user' # rubocop:disable Scalability/FileUploads
- optional :theme_id, type: Integer, default: 1, desc: 'The GitLab theme for the user'
- optional :color_scheme_id, type: Integer, default: 1, desc: 'The color scheme for the file viewer'
+ optional :theme_id, type: Integer, desc: 'The GitLab theme for the user'
+ optional :color_scheme_id, type: Integer, desc: 'The color scheme for the file viewer'
optional :private_profile, type: Boolean, desc: 'Flag indicating the user has a private profile'
all_or_none_of :extern_uid, :provider
@@ -308,7 +308,7 @@ module API
desc 'Add a GPG key to a specified user. Available only for admins.' do
detail 'This feature was added in GitLab 10.0'
- success Entities::GPGKey
+ success Entities::GpgKey
end
params do
requires :id, type: Integer, desc: 'The ID of the user'
@@ -324,7 +324,7 @@ module API
key = user.gpg_keys.new(declared_params(include_missing: false))
if key.save
- present key, with: Entities::GPGKey
+ present key, with: Entities::GpgKey
else
render_validation_error!(key)
end
@@ -333,7 +333,7 @@ module API
desc 'Get the GPG keys of a specified user. Available only for admins.' do
detail 'This feature was added in GitLab 10.0'
- success Entities::GPGKey
+ success Entities::GpgKey
end
params do
requires :id, type: Integer, desc: 'The ID of the user'
@@ -346,7 +346,7 @@ module API
user = User.find_by(id: params[:id])
not_found!('User') unless user
- present paginate(user.gpg_keys), with: Entities::GPGKey
+ present paginate(user.gpg_keys), with: Entities::GpgKey
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -528,11 +528,18 @@ module API
user = User.find_by(id: params[:id])
not_found!('User') unless user
- if !user.ldap_blocked?
- user.block
- else
+ if user.ldap_blocked?
forbidden!('LDAP blocked users cannot be modified by the API')
end
+
+ break if user.blocked?
+
+ result = ::Users::BlockService.new(current_user).execute(user)
+ if result[:status] == :success
+ true
+ else
+ render_api_error!(result[:message], result[:http_status])
+ end
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -739,18 +746,18 @@ module API
desc "Get the currently authenticated user's GPG keys" do
detail 'This feature was added in GitLab 10.0'
- success Entities::GPGKey
+ success Entities::GpgKey
end
params do
use :pagination
end
get 'gpg_keys' do
- present paginate(current_user.gpg_keys), with: Entities::GPGKey
+ present paginate(current_user.gpg_keys), with: Entities::GpgKey
end
desc 'Get a single GPG key owned by currently authenticated user' do
detail 'This feature was added in GitLab 10.0'
- success Entities::GPGKey
+ success Entities::GpgKey
end
params do
requires :key_id, type: Integer, desc: 'The ID of the GPG key'
@@ -760,13 +767,13 @@ module API
key = current_user.gpg_keys.find_by(id: params[:key_id])
not_found!('GPG Key') unless key
- present key, with: Entities::GPGKey
+ present key, with: Entities::GpgKey
end
# rubocop: enable CodeReuse/ActiveRecord
desc 'Add a new GPG key to the currently authenticated user' do
detail 'This feature was added in GitLab 10.0'
- success Entities::GPGKey
+ success Entities::GpgKey
end
params do
requires :key, type: String, desc: 'The new GPG key'
@@ -775,7 +782,7 @@ module API
key = current_user.gpg_keys.new(declared_params)
if key.save
- present key, with: Entities::GPGKey
+ present key, with: Entities::GpgKey
else
render_validation_error!(key)
end
diff --git a/lib/api/version.rb b/lib/api/version.rb
index f79bb3428f2..2d8c90260fa 100644
--- a/lib/api/version.rb
+++ b/lib/api/version.rb
@@ -3,6 +3,9 @@
module API
class Version < Grape::API
helpers ::API::Helpers::GraphqlHelpers
+ include APIGuard
+
+ allow_access_with_scope :read_user, if: -> (request) { request.get? }
before { authenticate! }
diff --git a/lib/backup/manager.rb b/lib/backup/manager.rb
index 2b6b10cf044..915567f8106 100644
--- a/lib/backup/manager.rb
+++ b/lib/backup/manager.rb
@@ -12,7 +12,7 @@ module Backup
@progress = progress
end
- def pack
+ def write_info
# Make sure there is a connection
ActiveRecord::Base.connection.reconnect!
@@ -20,7 +20,11 @@ module Backup
File.open("#{backup_path}/backup_information.yml", "w+") do |file|
file << backup_information.to_yaml.gsub(/^---\n/, '')
end
+ end
+ end
+ def pack
+ Dir.chdir(backup_path) do
# create archive
progress.print "Creating backup archive: #{tar_file} ... "
# Set file permissions on open to prevent chmod races.
@@ -31,8 +35,6 @@ module Backup
puts "creating archive #{tar_file} failed".color(:red)
raise Backup::Error, 'Backup failed'
end
-
- upload
end
end
@@ -105,8 +107,30 @@ module Backup
end
end
- # rubocop: disable Metrics/AbcSize
+ def verify_backup_version
+ Dir.chdir(backup_path) do
+ # restoring mismatching backups can lead to unexpected problems
+ if settings[:gitlab_version] != Gitlab::VERSION
+ progress.puts(<<~HEREDOC.color(:red))
+ GitLab version mismatch:
+ Your current GitLab version (#{Gitlab::VERSION}) differs from the GitLab version in the backup!
+ Please switch to the following version and try again:
+ version: #{settings[:gitlab_version]}
+ HEREDOC
+ progress.puts
+ progress.puts "Hint: git checkout v#{settings[:gitlab_version]}"
+ exit 1
+ end
+ end
+ end
+
def unpack
+ if ENV['BACKUP'].blank? && non_tarred_backup?
+ progress.puts "Non tarred backup found in #{backup_path}, using that"
+
+ return false
+ end
+
Dir.chdir(backup_path) do
# check for existing backups in the backup dir
if backup_file_list.empty?
@@ -141,21 +165,6 @@ module Backup
progress.puts 'unpacking backup failed'.color(:red)
exit 1
end
-
- ENV["VERSION"] = "#{settings[:db_version]}" if settings[:db_version].to_i > 0
-
- # restoring mismatching backups can lead to unexpected problems
- if settings[:gitlab_version] != Gitlab::VERSION
- progress.puts(<<~HEREDOC.color(:red))
- GitLab version mismatch:
- Your current GitLab version (#{Gitlab::VERSION}) differs from the GitLab version in the backup!
- Please switch to the following version and try again:
- version: #{settings[:gitlab_version]}
- HEREDOC
- progress.puts
- progress.puts "Hint: git checkout v#{settings[:gitlab_version]}"
- exit 1
- end
end
end
@@ -170,6 +179,10 @@ module Backup
private
+ def non_tarred_backup?
+ File.exist?(File.join(backup_path, 'backup_information.yml'))
+ end
+
def backup_path
Gitlab.config.backup.path
end
@@ -252,7 +265,7 @@ module Backup
def create_attributes
attrs = {
key: remote_target,
- body: File.open(tar_file),
+ body: File.open(File.join(backup_path, tar_file)),
multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
encryption: Gitlab.config.backup.upload.encryption,
encryption_key: Gitlab.config.backup.upload.encryption_key,
diff --git a/lib/backup/repository.rb b/lib/backup/repository.rb
index 123a695be13..1c5108b12ab 100644
--- a/lib/backup/repository.rb
+++ b/lib/backup/repository.rb
@@ -4,7 +4,6 @@ require 'yaml'
module Backup
class Repository
- include Gitlab::ShellAdapter
attr_reader :progress
def initialize(progress)
@@ -71,23 +70,14 @@ module Backup
def restore
Project.find_each(batch_size: 1000) do |project|
progress.print " * #{project.full_path} ... "
- path_to_project_bundle = path_to_bundle(project)
- project.repository.remove rescue nil
- restore_repo_success = nil
-
- if File.exist?(path_to_project_bundle)
+ restore_repo_success =
begin
- project.repository.create_from_bundle(path_to_project_bundle)
- restore_custom_hooks(project)
- restore_repo_success = true
- rescue => e
- restore_repo_success = false
- progress.puts "Error: #{e}".color(:red)
+ try_restore_repository(project)
+ rescue => err
+ progress.puts "Error: #{err}".color(:red)
+ false
end
- else
- restore_repo_success = gitlab_shell.create_project_repository(project)
- end
if restore_repo_success
progress.puts "[DONE]".color(:green)
@@ -118,6 +108,20 @@ module Backup
protected
+ def try_restore_repository(project)
+ path_to_project_bundle = path_to_bundle(project)
+ project.repository.remove rescue nil
+
+ if File.exist?(path_to_project_bundle)
+ project.repository.create_from_bundle(path_to_project_bundle)
+ restore_custom_hooks(project)
+ else
+ project.repository.create_repository
+ end
+
+ true
+ end
+
def path_to_bundle(project)
File.join(backup_repos_path, project.disk_path + '.bundle')
end
diff --git a/lib/banzai/filter/broadcast_message_placeholders_filter.rb b/lib/banzai/filter/broadcast_message_placeholders_filter.rb
new file mode 100644
index 00000000000..5b5e2f643c5
--- /dev/null
+++ b/lib/banzai/filter/broadcast_message_placeholders_filter.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+module Banzai
+ module Filter
+ # Replaces placeholders for broadcast messages with data from the current
+ # user or the instance.
+ class BroadcastMessagePlaceholdersFilter < HTML::Pipeline::Filter
+ def call
+ return doc unless context[:broadcast_message_placeholders]
+
+ doc.traverse { |node| replace_placeholders(node) }
+ end
+
+ private
+
+ def replace_placeholders(node)
+ if node.text? && !node.content.empty?
+ node.content = replace_content(node.content)
+ elsif href = link_href(node)
+ href.value = replace_content(href.value, url_safe_encoding: true)
+ end
+
+ node
+ end
+
+ def link_href(node)
+ node.element? &&
+ node.name == 'a' &&
+ node.attribute_nodes.find { |a| a.name == "href" }
+ end
+
+ def replace_content(content, url_safe_encoding: false)
+ placeholders.each do |placeholder, method|
+ regex = Regexp.new("{{#{placeholder}}}|#{CGI.escape("{{#{placeholder}}}")}")
+ value = url_safe_encoding ? CGI.escape(method.call.to_s) : method.call.to_s
+ content.gsub!(regex, value)
+ end
+
+ content
+ end
+
+ def placeholders
+ {
+ "email" => -> { current_user.try(:email) },
+ "name" => -> { current_user.try(:name) },
+ "user_id" => -> { current_user.try(:id) },
+ "username" => -> { current_user.try(:username) },
+ "instance_id" => -> { Gitlab::CurrentSettings.try(:uuid) }
+ }
+ end
+
+ def current_user
+ context[:current_user]
+ end
+ end
+ end
+end
diff --git a/lib/banzai/filter/inline_embeds_filter.rb b/lib/banzai/filter/inline_embeds_filter.rb
index 9f1ef0796f0..d7d78cf1866 100644
--- a/lib/banzai/filter/inline_embeds_filter.rb
+++ b/lib/banzai/filter/inline_embeds_filter.rb
@@ -21,11 +21,18 @@ module Banzai
doc
end
- # Implement in child class.
+ # Child class must provide the metrics_dashboard_url.
#
# Return a Nokogiri::XML::Element to embed in the
- # markdown.
+ # markdown which provides a url to the metric_dashboard endpoint where
+ # data can be requested through a prometheus proxy. InlineMetricsRedactorFilter
+ # is responsible for premissions to see this div (and relies on the class 'js-render-metrics' ).
def create_element(params)
+ doc.document.create_element(
+ 'div',
+ class: 'js-render-metrics',
+ 'data-dashboard-url': metrics_dashboard_url(params)
+ )
end
# Implement in child class unless overriding #embed_params
@@ -60,6 +67,21 @@ module Banzai
link_pattern.match(url) { |m| m.named_captures }
end
+
+ # Parses query params out from full url string into hash.
+ #
+ # Ex) 'https://<root>/<project>/<environment>/metrics?title=Title&group=Group'
+ # --> { title: 'Title', group: 'Group' }
+ def query_params(url)
+ Gitlab::Metrics::Dashboard::Url.parse_query(url)
+ end
+
+ # Implement in child class.
+ #
+ # Provides a full url to request the relevant panels of metric data.
+ def metrics_dashboard_url
+ raise NotImplementedError
+ end
end
end
end
diff --git a/lib/banzai/filter/inline_grafana_metrics_filter.rb b/lib/banzai/filter/inline_grafana_metrics_filter.rb
index 321580b532f..07bde9858e8 100644
--- a/lib/banzai/filter/inline_grafana_metrics_filter.rb
+++ b/lib/banzai/filter/inline_grafana_metrics_filter.rb
@@ -10,20 +10,17 @@ module Banzai
def create_element(params)
begin_loading_dashboard(params[:url])
- doc.document.create_element(
- 'div',
- class: 'js-render-metrics',
- 'data-dashboard-url': metrics_dashboard_url(params)
- )
+ super
end
+ # @return [Hash<Symbol, String>] with keys :grafana_url, :start, and :end
def embed_params(node)
query_params = Gitlab::Metrics::Dashboard::Url.parse_query(node['href'])
- return unless [:panelId, :from, :to].all? do |param|
- query_params.include?(param)
- end
- { url: node['href'], start: query_params[:from], end: query_params[:to] }
+ time_window = Grafana::TimeWindow.new(query_params[:from], query_params[:to])
+ url = url_with_window(node['href'], query_params, time_window.in_milliseconds)
+
+ { grafana_url: url }.merge(time_window.formatted)
end
# Selects any links with an href contains the configured
@@ -48,18 +45,24 @@ module Banzai
Gitlab::Routing.url_helpers.project_grafana_api_metrics_dashboard_url(
project,
embedded: true,
- grafana_url: params[:url],
- start: format_time(params[:start]),
- end: format_time(params[:end])
+ **params
)
end
- # Formats a timestamp from Grafana for compatibility with
- # parsing in JS via `new Date(timestamp)`
+ # If the provided url is missing time window parameters,
+ # this inserts the default window into the url, allowing
+ # the embed service to correctly format prometheus
+ # queries during embed processing.
#
- # @param time [String] Represents miliseconds since epoch
- def format_time(time)
- Time.at(time.to_i / 1000).utc.strftime('%FT%TZ')
+ # @param url [String]
+ # @param query_params [Hash<Symbol, String>]
+ # @param time_window_params [Hash<Symbol, Integer>]
+ # @return [String]
+ def url_with_window(url, query_params, time_window_params)
+ uri = URI(url)
+ uri.query = time_window_params.merge(query_params).to_query
+
+ uri.to_s
end
# Fetches a dashboard and caches the result for the
diff --git a/lib/banzai/filter/inline_metrics_filter.rb b/lib/banzai/filter/inline_metrics_filter.rb
index c1f4bf1f97f..409e8db87f4 100644
--- a/lib/banzai/filter/inline_metrics_filter.rb
+++ b/lib/banzai/filter/inline_metrics_filter.rb
@@ -5,21 +5,12 @@ module Banzai
# HTML filter that inserts a placeholder element for each
# reference to a metrics dashboard.
class InlineMetricsFilter < Banzai::Filter::InlineEmbedsFilter
- # Placeholder element for the frontend to use as an
- # injection point for charts.
- def create_element(params)
- doc.document.create_element(
- 'div',
- class: 'js-render-metrics',
- 'data-dashboard-url': metrics_dashboard_url(params)
- )
- end
-
# Search params for selecting metrics links. A few
# simple checks is enough to boost performance without
# the cost of doing a full regex match.
def xpath_search
"descendant-or-self::a[contains(@href,'metrics') and \
+ contains(@href,'environments') and \
starts-with(@href, '#{Gitlab.config.gitlab.url}')]"
end
@@ -41,14 +32,6 @@ module Banzai
**query_params(params['url'])
)
end
-
- # Parses query params out from full url string into hash.
- #
- # Ex) 'https://<root>/<project>/<environment>/metrics?title=Title&group=Group'
- # --> { title: 'Title', group: 'Group' }
- def query_params(url)
- Gitlab::Metrics::Dashboard::Url.parse_query(url)
- end
end
end
end
diff --git a/lib/banzai/filter/inline_metrics_redactor_filter.rb b/lib/banzai/filter/inline_metrics_redactor_filter.rb
index ae830831a27..75bd3325bd4 100644
--- a/lib/banzai/filter/inline_metrics_redactor_filter.rb
+++ b/lib/banzai/filter/inline_metrics_redactor_filter.rb
@@ -9,8 +9,8 @@ module Banzai
METRICS_CSS_CLASS = '.js-render-metrics'
EMBED_LIMIT = 100
- URL = Gitlab::Metrics::Dashboard::Url
+ Route = Struct.new(:regex, :permission)
Embed = Struct.new(:project_path, :permission)
# Finds all embeds based on the css class the FE
@@ -59,14 +59,28 @@ module Banzai
embed = Embed.new
url = node.attribute('data-dashboard-url').to_s
- set_path_and_permission(embed, url, URL.metrics_regex, :read_environment)
- set_path_and_permission(embed, url, URL.grafana_regex, :read_project) unless embed.permission
+ permissions_by_route.each do |route|
+ set_path_and_permission(embed, url, route.regex, route.permission) unless embed.permission
+ end
embeds[node] = embed if embed.permission
end
end
end
+ def permissions_by_route
+ [
+ Route.new(
+ ::Gitlab::Metrics::Dashboard::Url.metrics_regex,
+ :read_environment
+ ),
+ Route.new(
+ ::Gitlab::Metrics::Dashboard::Url.grafana_regex,
+ :read_project
+ )
+ ]
+ end
+
# Attempts to determine the path and permission attributes
# of a url based on expected dashboard url formats and
# sets the attributes on an Embed object
@@ -129,3 +143,5 @@ module Banzai
end
end
end
+
+Banzai::Filter::InlineMetricsRedactorFilter.prepend_if_ee('EE::Banzai::Filter::InlineMetricsRedactorFilter')
diff --git a/lib/banzai/filter/issuable_state_filter.rb b/lib/banzai/filter/issuable_state_filter.rb
index f9d8bf8a1fa..a88629ac105 100644
--- a/lib/banzai/filter/issuable_state_filter.rb
+++ b/lib/banzai/filter/issuable_state_filter.rb
@@ -18,7 +18,7 @@ module Banzai
issuables = extractor.extract([doc])
issuables.each do |node, issuable|
- next if !can_read_cross_project? && cross_reference?(issuable)
+ next if !can_read_cross_project? && cross_referenced?(issuable)
if VISIBLE_STATES.include?(issuable.state) && issuable_reference?(node.inner_html, issuable)
state = moved_issue?(issuable) ? s_("IssuableStatus|moved") : issuable.state
@@ -39,7 +39,7 @@ module Banzai
CGI.unescapeHTML(text) == issuable.reference_link_text(project || group)
end
- def cross_reference?(issuable)
+ def cross_referenced?(issuable)
return true if issuable.project != project
return true if issuable.respond_to?(:group) && issuable.group != group
diff --git a/lib/banzai/filter/label_reference_filter.rb b/lib/banzai/filter/label_reference_filter.rb
index 609ea8fb5ca..60ffb178393 100644
--- a/lib/banzai/filter/label_reference_filter.rb
+++ b/lib/banzai/filter/label_reference_filter.rb
@@ -93,23 +93,26 @@ module Banzai
end
presenter = object.present(issuable_subject: parent)
- LabelsHelper.render_colored_label(presenter, label_suffix: label_suffix, title: tooltip_title(presenter))
+ LabelsHelper.render_colored_label(presenter, suffix: label_suffix)
end
- def tooltip_title(label)
- nil
+ def wrap_link(link, label)
+ presenter = label.present(issuable_subject: project || group)
+ LabelsHelper.wrap_label_html(link, small: true, label: presenter)
end
def full_path_ref?(matches)
matches[:namespace] && matches[:project]
end
+ def reference_class(type, tooltip: true)
+ super + ' gl-link gl-label-link'
+ end
+
def object_link_title(object, matches)
- # use title of wrapped element instead
- nil
+ presenter = object.present(issuable_subject: project || group)
+ LabelsHelper.label_tooltip_title(presenter)
end
end
end
end
-
-Banzai::Filter::LabelReferenceFilter.prepend_if_ee('EE::Banzai::Filter::LabelReferenceFilter')
diff --git a/lib/banzai/filter/reference_filter.rb b/lib/banzai/filter/reference_filter.rb
index b3ce9200b49..38bbed3cf72 100644
--- a/lib/banzai/filter/reference_filter.rb
+++ b/lib/banzai/filter/reference_filter.rb
@@ -37,7 +37,8 @@ module Banzai
attributes[:reference_type] ||= self.class.reference_type
attributes[:container] ||= 'body'
- attributes[:placement] ||= 'bottom'
+ attributes[:placement] ||= 'top'
+ attributes[:html] ||= 'true'
attributes.delete(:original) if context[:no_original_data]
attributes.map do |key, value|
%Q(data-#{key.to_s.dasherize}="#{escape_once(value)}")
diff --git a/lib/banzai/filter/repository_link_filter.rb b/lib/banzai/filter/repository_link_filter.rb
index d448238c6e4..24900217560 100644
--- a/lib/banzai/filter/repository_link_filter.rb
+++ b/lib/banzai/filter/repository_link_filter.rb
@@ -80,6 +80,13 @@ module Banzai
end
Gitlab::GitalyClient::BlobService.new(repository).get_blob_types(revision_paths, 1)
+ rescue GRPC::Unavailable, GRPC::DeadlineExceeded => e
+ # Handle Gitaly connection issues gracefully
+ Gitlab::ErrorTracking.track_exception(e, project_id: project.id)
+ # Return all links as blob types
+ paths.collect do |path|
+ [path, :blob]
+ end
end
def get_uri(html_attr)
@@ -124,7 +131,7 @@ module Banzai
path = cleaned_file_path(uri)
nested_path = relative_file_path(uri)
- file_exists?(nested_path) ? nested_path : path
+ path_exists?(nested_path) ? nested_path : path
end
def cleaned_file_path(uri)
@@ -183,12 +190,12 @@ module Banzai
parts.push(path).join('/')
end
- def file_exists?(path)
- path.present? && uri_type(path).present?
+ def path_exists?(path)
+ path.present? && @uri_types[path] != :unknown
end
def uri_type(path)
- @uri_types[path] == :unknown ? "" : @uri_types[path]
+ @uri_types[path] == :unknown ? :blob : @uri_types[path]
end
def current_commit
diff --git a/lib/banzai/pipeline/gfm_pipeline.rb b/lib/banzai/pipeline/gfm_pipeline.rb
index dad0d95685e..b6238dfe7f0 100644
--- a/lib/banzai/pipeline/gfm_pipeline.rb
+++ b/lib/banzai/pipeline/gfm_pipeline.rb
@@ -29,8 +29,7 @@ module Banzai
Filter::AudioLinkFilter,
Filter::ImageLazyLoadFilter,
Filter::ImageLinkFilter,
- Filter::InlineMetricsFilter,
- Filter::InlineGrafanaMetricsFilter,
+ *metrics_filters,
Filter::TableOfContentsFilter,
Filter::TableOfContentsTagFilter,
Filter::AutolinkFilter,
@@ -48,6 +47,13 @@ module Banzai
]
end
+ def self.metrics_filters
+ [
+ Filter::InlineMetricsFilter,
+ Filter::InlineGrafanaMetricsFilter
+ ]
+ end
+
def self.reference_filters
[
Filter::UserReferenceFilter,
diff --git a/lib/banzai/pipeline/post_process_pipeline.rb b/lib/banzai/pipeline/post_process_pipeline.rb
index 5e02d972614..8236b702147 100644
--- a/lib/banzai/pipeline/post_process_pipeline.rb
+++ b/lib/banzai/pipeline/post_process_pipeline.rb
@@ -8,7 +8,8 @@ module Banzai
def self.filters
@filters ||= FilterArray[
*internal_link_filters,
- Filter::AbsoluteLinkFilter
+ Filter::AbsoluteLinkFilter,
+ Filter::BroadcastMessagePlaceholdersFilter
]
end
diff --git a/lib/declarative_policy.rb b/lib/declarative_policy.rb
index 9e9df88373a..e51f30af581 100644
--- a/lib/declarative_policy.rb
+++ b/lib/declarative_policy.rb
@@ -13,6 +13,8 @@ require_dependency 'declarative_policy/step'
require_dependency 'declarative_policy/base'
module DeclarativePolicy
+ extend PreferredScope
+
CLASS_CACHE_MUTEX = Mutex.new
CLASS_CACHE_IVAR = :@__DeclarativePolicy_CLASS_CACHE
diff --git a/lib/declarative_policy/preferred_scope.rb b/lib/declarative_policy/preferred_scope.rb
index 9b7d1548056..d653a0ec1e1 100644
--- a/lib/declarative_policy/preferred_scope.rb
+++ b/lib/declarative_policy/preferred_scope.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
module DeclarativePolicy
- PREFERRED_SCOPE_KEY = :"DeclarativePolicy.preferred_scope"
+ module PreferredScope
+ PREFERRED_SCOPE_KEY = :"DeclarativePolicy.preferred_scope"
- class << self
def with_preferred_scope(scope)
Thread.current[PREFERRED_SCOPE_KEY], old_scope = scope, Thread.current[PREFERRED_SCOPE_KEY]
yield
diff --git a/lib/feature.rb b/lib/feature.rb
index aadc2c64957..60a5c03a839 100644
--- a/lib/feature.rb
+++ b/lib/feature.rb
@@ -38,7 +38,7 @@ class Feature
begin
# We saw on GitLab.com, this database request was called 2300
# times/s. Let's cache it for a minute to avoid that load.
- Gitlab::ThreadMemoryCache.cache_backend.fetch('flipper:persisted_names', expires_in: 1.minute) do
+ Gitlab::ProcessMemoryCache.cache_backend.fetch('flipper:persisted_names', expires_in: 1.minute) do
FlipperFeature.feature_names
end
end
@@ -134,7 +134,11 @@ class Feature
end
def l1_cache_backend
- Gitlab::ThreadMemoryCache.cache_backend
+ if Gitlab::Utils.to_boolean(ENV['USE_THREAD_MEMORY_CACHE'])
+ Gitlab::ThreadMemoryCache.cache_backend
+ else
+ Gitlab::ProcessMemoryCache.cache_backend
+ end
end
def l2_cache_backend
diff --git a/lib/gitlab/access/branch_protection.rb b/lib/gitlab/access/branch_protection.rb
index f039e5c011f..339a99eb068 100644
--- a/lib/gitlab/access/branch_protection.rb
+++ b/lib/gitlab/access/branch_protection.rb
@@ -37,6 +37,10 @@ module Gitlab
def developer_can_merge?
level == PROTECTION_DEV_CAN_MERGE
end
+
+ def fully_protected?
+ level == PROTECTION_FULL
+ end
end
end
end
diff --git a/lib/gitlab/application_rate_limiter.rb b/lib/gitlab/application_rate_limiter.rb
index 49e1f1edfb9..211c59fe841 100644
--- a/lib/gitlab/application_rate_limiter.rb
+++ b/lib/gitlab/application_rate_limiter.rb
@@ -21,8 +21,9 @@ module Gitlab
{
project_export: { threshold: 1, interval: 5.minutes },
project_download_export: { threshold: 10, interval: 10.minutes },
+ project_repositories_archive: { threshold: 5, interval: 1.minute },
project_generate_new_export: { threshold: 1, interval: 5.minutes },
- project_import: { threshold: 30, interval: 10.minutes },
+ project_import: { threshold: 30, interval: 5.minutes },
play_pipeline_schedule: { threshold: 1, interval: 1.minute },
show_raw_controller: { threshold: -> { Gitlab::CurrentSettings.current_application_settings.raw_blob_request_limit }, interval: 1.minute }
}.freeze
diff --git a/lib/gitlab/auth.rb b/lib/gitlab/auth.rb
index 1329357d0b8..c16c2ce96de 100644
--- a/lib/gitlab/auth.rb
+++ b/lib/gitlab/auth.rb
@@ -88,7 +88,7 @@ module Gitlab
else
# If no user is provided, try LDAP.
# LDAP users are only authenticated via LDAP
- authenticators << Gitlab::Auth::LDAP::Authentication
+ authenticators << Gitlab::Auth::Ldap::Authentication
end
authenticators.compact!
@@ -134,7 +134,7 @@ module Gitlab
end
def authenticate_using_internal_or_ldap_password?
- Gitlab::CurrentSettings.password_authentication_enabled_for_git? || Gitlab::Auth::LDAP::Config.enabled?
+ Gitlab::CurrentSettings.password_authentication_enabled_for_git? || Gitlab::Auth::Ldap::Config.enabled?
end
def service_request_check(login, password, project)
diff --git a/lib/gitlab/auth/current_user_mode.rb b/lib/gitlab/auth/current_user_mode.rb
index 1ef95c03cfc..06ae4d81870 100644
--- a/lib/gitlab/auth/current_user_mode.rb
+++ b/lib/gitlab/auth/current_user_mode.rb
@@ -23,15 +23,26 @@ module Gitlab
class << self
# Admin mode activation requires storing a flag in the user session. Using this
- # method when scheduling jobs in Sidekiq will bypass the session check for a
- # user that was already in admin mode
+ # method when scheduling jobs in sessionless environments (e.g. Sidekiq, API)
+ # will bypass the session check for a user that was already in admin mode
+ #
+ # If passed a block, it will surround the block execution and reset the session
+ # bypass at the end; otherwise use manually '.reset_bypass_session!'
def bypass_session!(admin_id)
Gitlab::SafeRequestStore[CURRENT_REQUEST_BYPASS_SESSION_ADMIN_ID_RS_KEY] = admin_id
Gitlab::AppLogger.debug("Bypassing session in admin mode for: #{admin_id}")
- yield
- ensure
+ if block_given?
+ begin
+ yield
+ ensure
+ reset_bypass_session!
+ end
+ end
+ end
+
+ def reset_bypass_session!
Gitlab::SafeRequestStore.delete(CURRENT_REQUEST_BYPASS_SESSION_ADMIN_ID_RS_KEY)
end
@@ -90,10 +101,6 @@ module Gitlab
current_session_data[ADMIN_MODE_START_TIME_KEY] = Time.now
end
- def enable_sessionless_admin_mode!
- request_admin_mode! && enable_admin_mode!(skip_password_validation: true)
- end
-
def disable_admin_mode!
return unless user&.admin?
diff --git a/lib/gitlab/auth/key_status_checker.rb b/lib/gitlab/auth/key_status_checker.rb
new file mode 100644
index 00000000000..af654c0caad
--- /dev/null
+++ b/lib/gitlab/auth/key_status_checker.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Auth
+ class KeyStatusChecker
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :key
+
+ def initialize(key)
+ @key = key
+ end
+
+ def show_console_message?
+ console_message.present?
+ end
+
+ def console_message
+ strong_memoize(:console_message) do
+ if key.expired?
+ _('INFO: Your SSH key has expired. Please generate a new key.')
+ elsif key.expires_soon?
+ _('INFO: Your SSH key is expiring soon. Please generate a new key.')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/ldap/access.rb b/lib/gitlab/auth/ldap/access.rb
index 940b802be7e..98eec0e4a7b 100644
--- a/lib/gitlab/auth/ldap/access.rb
+++ b/lib/gitlab/auth/ldap/access.rb
@@ -6,14 +6,14 @@
#
module Gitlab
module Auth
- module LDAP
+ module Ldap
class Access
- prepend_if_ee('::EE::Gitlab::Auth::LDAP::Access') # rubocop: disable Cop/InjectEnterpriseEditionModule
+ prepend_if_ee('::EE::Gitlab::Auth::Ldap::Access') # rubocop: disable Cop/InjectEnterpriseEditionModule
attr_reader :provider, :user, :ldap_identity
def self.open(user, &block)
- Gitlab::Auth::LDAP::Adapter.open(user.ldap_identity.provider) do |adapter|
+ Gitlab::Auth::Ldap::Adapter.open(user.ldap_identity.provider) do |adapter|
block.call(self.new(user, adapter))
end
end
@@ -50,7 +50,7 @@ module Gitlab
end
# Block user in GitLab if they were blocked in AD
- if Gitlab::Auth::LDAP::Person.disabled_via_active_directory?(ldap_identity.extern_uid, adapter)
+ if Gitlab::Auth::Ldap::Person.disabled_via_active_directory?(ldap_identity.extern_uid, adapter)
block_user(user, 'is disabled in Active Directory')
false
else
@@ -62,7 +62,7 @@ module Gitlab
block_user(user, 'does not exist anymore')
false
end
- rescue LDAPConnectionError
+ rescue LdapConnectionError
false
end
@@ -73,11 +73,11 @@ module Gitlab
private
def adapter
- @adapter ||= Gitlab::Auth::LDAP::Adapter.new(provider)
+ @adapter ||= Gitlab::Auth::Ldap::Adapter.new(provider)
end
def ldap_config
- Gitlab::Auth::LDAP::Config.new(provider)
+ Gitlab::Auth::Ldap::Config.new(provider)
end
def ldap_user
@@ -87,7 +87,7 @@ module Gitlab
end
def find_ldap_user
- Gitlab::Auth::LDAP::Person.find_by_dn(ldap_identity.extern_uid, adapter)
+ Gitlab::Auth::Ldap::Person.find_by_dn(ldap_identity.extern_uid, adapter)
end
def block_user(user, reason)
diff --git a/lib/gitlab/auth/ldap/adapter.rb b/lib/gitlab/auth/ldap/adapter.rb
index 356579ef402..c5ec4e1981b 100644
--- a/lib/gitlab/auth/ldap/adapter.rb
+++ b/lib/gitlab/auth/ldap/adapter.rb
@@ -2,9 +2,9 @@
module Gitlab
module Auth
- module LDAP
+ module Ldap
class Adapter
- prepend_if_ee('::EE::Gitlab::Auth::LDAP::Adapter') # rubocop: disable Cop/InjectEnterpriseEditionModule
+ prepend_if_ee('::EE::Gitlab::Auth::Ldap::Adapter') # rubocop: disable Cop/InjectEnterpriseEditionModule
SEARCH_RETRY_FACTOR = [1, 1, 2, 3].freeze
MAX_SEARCH_RETRIES = Rails.env.test? ? 1 : SEARCH_RETRY_FACTOR.size.freeze
@@ -18,7 +18,7 @@ module Gitlab
end
def self.config(provider)
- Gitlab::Auth::LDAP::Config.new(provider)
+ Gitlab::Auth::Ldap::Config.new(provider)
end
def initialize(provider, ldap = nil)
@@ -27,7 +27,7 @@ module Gitlab
end
def config
- Gitlab::Auth::LDAP::Config.new(provider)
+ Gitlab::Auth::Ldap::Config.new(provider)
end
def users(fields, value, limit = nil)
@@ -75,7 +75,7 @@ module Gitlab
renew_connection_adapter
retry
else
- raise LDAPConnectionError, error_message
+ raise LdapConnectionError, error_message
end
end
@@ -91,13 +91,13 @@ module Gitlab
end
entries.map do |entry|
- Gitlab::Auth::LDAP::Person.new(entry, provider)
+ Gitlab::Auth::Ldap::Person.new(entry, provider)
end
end
def user_options(fields, value, limit)
options = {
- attributes: Gitlab::Auth::LDAP::Person.ldap_attributes(config),
+ attributes: Gitlab::Auth::Ldap::Person.ldap_attributes(config),
base: config.base
}
diff --git a/lib/gitlab/auth/ldap/auth_hash.rb b/lib/gitlab/auth/ldap/auth_hash.rb
index 83fdc8a8c76..5435355f136 100644
--- a/lib/gitlab/auth/ldap/auth_hash.rb
+++ b/lib/gitlab/auth/ldap/auth_hash.rb
@@ -4,10 +4,10 @@
#
module Gitlab
module Auth
- module LDAP
+ module Ldap
class AuthHash < Gitlab::Auth::OAuth::AuthHash
def uid
- @uid ||= Gitlab::Auth::LDAP::Person.normalize_dn(super)
+ @uid ||= Gitlab::Auth::Ldap::Person.normalize_dn(super)
end
def username
@@ -42,7 +42,7 @@ module Gitlab
end
def ldap_config
- @ldap_config ||= Gitlab::Auth::LDAP::Config.new(self.provider)
+ @ldap_config ||= Gitlab::Auth::Ldap::Config.new(self.provider)
end
end
end
diff --git a/lib/gitlab/auth/ldap/authentication.rb b/lib/gitlab/auth/ldap/authentication.rb
index 174e81dd603..d9964f237b1 100644
--- a/lib/gitlab/auth/ldap/authentication.rb
+++ b/lib/gitlab/auth/ldap/authentication.rb
@@ -8,10 +8,10 @@
module Gitlab
module Auth
- module LDAP
+ module Ldap
class Authentication < Gitlab::Auth::OAuth::Authentication
def self.login(login, password)
- return unless Gitlab::Auth::LDAP::Config.enabled?
+ return unless Gitlab::Auth::Ldap::Config.enabled?
return unless login.present? && password.present?
# return found user that was authenticated by first provider for given login credentials
@@ -22,7 +22,7 @@ module Gitlab
end
def self.providers
- Gitlab::Auth::LDAP::Config.providers
+ Gitlab::Auth::Ldap::Config.providers
end
def login(login, password)
@@ -33,7 +33,7 @@ module Gitlab
)
return unless result
- @user = Gitlab::Auth::LDAP::User.find_by_uid_and_provider(result.dn, provider)
+ @user = Gitlab::Auth::Ldap::User.find_by_uid_and_provider(result.dn, provider)
end
def adapter
@@ -41,7 +41,7 @@ module Gitlab
end
def config
- Gitlab::Auth::LDAP::Config.new(provider)
+ Gitlab::Auth::Ldap::Config.new(provider)
end
def user_filter(login)
diff --git a/lib/gitlab/auth/ldap/config.rb b/lib/gitlab/auth/ldap/config.rb
index 4bc0ceedae7..709cd0d787a 100644
--- a/lib/gitlab/auth/ldap/config.rb
+++ b/lib/gitlab/auth/ldap/config.rb
@@ -3,9 +3,9 @@
# Load a specific server configuration
module Gitlab
module Auth
- module LDAP
+ module Ldap
class Config
- prepend_if_ee('::EE::Gitlab::Auth::LDAP::Config') # rubocop: disable Cop/InjectEnterpriseEditionModule
+ prepend_if_ee('::EE::Gitlab::Auth::Ldap::Config') # rubocop: disable Cop/InjectEnterpriseEditionModule
NET_LDAP_ENCRYPTION_METHOD = {
simple_tls: :simple_tls,
diff --git a/lib/gitlab/auth/ldap/dn.rb b/lib/gitlab/auth/ldap/dn.rb
index 0b496da784d..ea88dedadf5 100644
--- a/lib/gitlab/auth/ldap/dn.rb
+++ b/lib/gitlab/auth/ldap/dn.rb
@@ -21,7 +21,7 @@
# class also helps take care of that.
module Gitlab
module Auth
- module LDAP
+ module Ldap
class DN
FormatError = Class.new(StandardError)
MalformedError = Class.new(FormatError)
diff --git a/lib/gitlab/auth/ldap/ldap_connection_error.rb b/lib/gitlab/auth/ldap/ldap_connection_error.rb
index d0e5f24d203..13b0d29e104 100644
--- a/lib/gitlab/auth/ldap/ldap_connection_error.rb
+++ b/lib/gitlab/auth/ldap/ldap_connection_error.rb
@@ -2,8 +2,8 @@
module Gitlab
module Auth
- module LDAP
- LDAPConnectionError = Class.new(StandardError)
+ module Ldap
+ LdapConnectionError = Class.new(StandardError)
end
end
end
diff --git a/lib/gitlab/auth/ldap/person.rb b/lib/gitlab/auth/ldap/person.rb
index 88ec22aa75c..430f94a9a28 100644
--- a/lib/gitlab/auth/ldap/person.rb
+++ b/lib/gitlab/auth/ldap/person.rb
@@ -2,9 +2,9 @@
module Gitlab
module Auth
- module LDAP
+ module Ldap
class Person
- prepend_if_ee('::EE::Gitlab::Auth::LDAP::Person') # rubocop: disable Cop/InjectEnterpriseEditionModule
+ prepend_if_ee('::EE::Gitlab::Auth::Ldap::Person') # rubocop: disable Cop/InjectEnterpriseEditionModule
# Active Directory-specific LDAP filter that checks if bit 2 of the
# userAccountControl attribute is set.
@@ -45,8 +45,8 @@ module Gitlab
end
def self.normalize_dn(dn)
- ::Gitlab::Auth::LDAP::DN.new(dn).to_normalized_s
- rescue ::Gitlab::Auth::LDAP::DN::FormatError => e
+ ::Gitlab::Auth::Ldap::DN.new(dn).to_normalized_s
+ rescue ::Gitlab::Auth::Ldap::DN::FormatError => e
Rails.logger.info("Returning original DN \"#{dn}\" due to error during normalization attempt: #{e.message}") # rubocop:disable Gitlab/RailsLogger
dn
@@ -57,8 +57,8 @@ module Gitlab
# 1. Excess spaces are stripped
# 2. The string is downcased (for case-insensitivity)
def self.normalize_uid(uid)
- ::Gitlab::Auth::LDAP::DN.normalize_value(uid)
- rescue ::Gitlab::Auth::LDAP::DN::FormatError => e
+ ::Gitlab::Auth::Ldap::DN.normalize_value(uid)
+ rescue ::Gitlab::Auth::Ldap::DN::FormatError => e
Rails.logger.info("Returning original UID \"#{uid}\" due to error during normalization attempt: #{e.message}") # rubocop:disable Gitlab/RailsLogger
uid
@@ -103,7 +103,7 @@ module Gitlab
attr_reader :entry
def config
- @config ||= Gitlab::Auth::LDAP::Config.new(provider)
+ @config ||= Gitlab::Auth::Ldap::Config.new(provider)
end
# Using the LDAP attributes configuration, find and return the first
diff --git a/lib/gitlab/auth/ldap/user.rb b/lib/gitlab/auth/ldap/user.rb
index 3b68230e193..df14e5fc3dc 100644
--- a/lib/gitlab/auth/ldap/user.rb
+++ b/lib/gitlab/auth/ldap/user.rb
@@ -8,10 +8,10 @@
#
module Gitlab
module Auth
- module LDAP
+ module Ldap
class User < Gitlab::Auth::OAuth::User
extend ::Gitlab::Utils::Override
- prepend_if_ee('::EE::Gitlab::Auth::LDAP::User') # rubocop: disable Cop/InjectEnterpriseEditionModule
+ prepend_if_ee('::EE::Gitlab::Auth::Ldap::User') # rubocop: disable Cop/InjectEnterpriseEditionModule
class << self
# rubocop: disable CodeReuse/ActiveRecord
@@ -46,7 +46,7 @@ module Gitlab
end
def allowed?
- Gitlab::Auth::LDAP::Access.allowed?(gl_user)
+ Gitlab::Auth::Ldap::Access.allowed?(gl_user)
end
def valid_sign_in?
@@ -54,11 +54,11 @@ module Gitlab
end
def ldap_config
- Gitlab::Auth::LDAP::Config.new(auth_hash.provider)
+ Gitlab::Auth::Ldap::Config.new(auth_hash.provider)
end
def auth_hash=(auth_hash)
- @auth_hash = Gitlab::Auth::LDAP::AuthHash.new(auth_hash)
+ @auth_hash = Gitlab::Auth::Ldap::AuthHash.new(auth_hash)
end
end
end
diff --git a/lib/gitlab/auth/o_auth/provider.rb b/lib/gitlab/auth/o_auth/provider.rb
index 3d44c83736a..f0811098b15 100644
--- a/lib/gitlab/auth/o_auth/provider.rb
+++ b/lib/gitlab/auth/o_auth/provider.rb
@@ -7,7 +7,8 @@ module Gitlab
LABELS = {
"github" => "GitHub",
"gitlab" => "GitLab.com",
- "google_oauth2" => "Google"
+ "google_oauth2" => "Google",
+ "azure_oauth2" => "Azure AD"
}.freeze
def self.authentication(user, provider)
@@ -17,7 +18,7 @@ module Gitlab
authenticator =
case provider
when /^ldap/
- Gitlab::Auth::LDAP::Authentication
+ Gitlab::Auth::Ldap::Authentication
when 'database'
Gitlab::Auth::Database::Authentication
end
@@ -59,8 +60,8 @@ module Gitlab
def self.config_for(name)
name = name.to_s
if ldap_provider?(name)
- if Gitlab::Auth::LDAP::Config.valid_provider?(name)
- Gitlab::Auth::LDAP::Config.new(name).options
+ if Gitlab::Auth::Ldap::Config.valid_provider?(name)
+ Gitlab::Auth::Ldap::Config.new(name).options
else
nil
end
@@ -74,6 +75,12 @@ module Gitlab
config = config_for(name)
(config && config['label']) || LABELS[name] || name.titleize
end
+
+ def self.icon_for(name)
+ name = name.to_s
+ config = config_for(name)
+ config && config['icon']
+ end
end
end
end
diff --git a/lib/gitlab/auth/o_auth/user.rb b/lib/gitlab/auth/o_auth/user.rb
index 300181025a4..df595da1536 100644
--- a/lib/gitlab/auth/o_auth/user.rb
+++ b/lib/gitlab/auth/o_auth/user.rb
@@ -111,7 +111,7 @@ module Gitlab
def find_or_build_ldap_user
return unless ldap_person
- user = Gitlab::Auth::LDAP::User.find_by_uid_and_provider(ldap_person.dn, ldap_person.provider)
+ user = Gitlab::Auth::Ldap::User.find_by_uid_and_provider(ldap_person.dn, ldap_person.provider)
if user
log.info "LDAP account found for user #{user.username}. Building new #{auth_hash.provider} identity."
return user
@@ -141,8 +141,8 @@ module Gitlab
return @ldap_person if defined?(@ldap_person)
# Look for a corresponding person with same uid in any of the configured LDAP providers
- Gitlab::Auth::LDAP::Config.providers.each do |provider|
- adapter = Gitlab::Auth::LDAP::Adapter.new(provider)
+ Gitlab::Auth::Ldap::Config.providers.each do |provider|
+ adapter = Gitlab::Auth::Ldap::Adapter.new(provider)
@ldap_person = find_ldap_person(auth_hash, adapter)
break if @ldap_person
end
@@ -150,15 +150,15 @@ module Gitlab
end
def find_ldap_person(auth_hash, adapter)
- Gitlab::Auth::LDAP::Person.find_by_uid(auth_hash.uid, adapter) ||
- Gitlab::Auth::LDAP::Person.find_by_email(auth_hash.uid, adapter) ||
- Gitlab::Auth::LDAP::Person.find_by_dn(auth_hash.uid, adapter)
- rescue Gitlab::Auth::LDAP::LDAPConnectionError
+ Gitlab::Auth::Ldap::Person.find_by_uid(auth_hash.uid, adapter) ||
+ Gitlab::Auth::Ldap::Person.find_by_email(auth_hash.uid, adapter) ||
+ Gitlab::Auth::Ldap::Person.find_by_dn(auth_hash.uid, adapter)
+ rescue Gitlab::Auth::Ldap::LdapConnectionError
nil
end
def ldap_config
- Gitlab::Auth::LDAP::Config.new(ldap_person.provider) if ldap_person
+ Gitlab::Auth::Ldap::Config.new(ldap_person.provider) if ldap_person
end
def needs_blocking?
diff --git a/lib/gitlab/authorized_keys.rb b/lib/gitlab/authorized_keys.rb
index 820a78b653c..50cd15b7a10 100644
--- a/lib/gitlab/authorized_keys.rb
+++ b/lib/gitlab/authorized_keys.rb
@@ -70,7 +70,7 @@ module Gitlab
#
# @param [String] id identifier of the key to be removed prefixed by `key-`
# @return [Boolean]
- def rm_key(id)
+ def remove_key(id)
lock do
logger.info("Removing key (#{id})")
open_authorized_keys_file('r+') do |f|
diff --git a/lib/gitlab/background_migration/backfill_snippet_repositories.rb b/lib/gitlab/background_migration/backfill_snippet_repositories.rb
new file mode 100644
index 00000000000..fa6453abefb
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_snippet_repositories.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Class that will fill the project_repositories table for projects that
+ # are on hashed storage and an entry is missing in this table.
+ class BackfillSnippetRepositories
+ MAX_RETRIES = 2
+
+ def perform(start_id, stop_id)
+ Snippet.includes(:author, snippet_repository: :shard).where(id: start_id..stop_id).find_each do |snippet|
+ # We need to expire the exists? value for the cached method in case it was cached
+ snippet.repository.expire_exists_cache
+
+ next if repository_present?(snippet)
+
+ retry_index = 0
+
+ begin
+ create_repository_and_files(snippet)
+
+ logger.info(message: 'Snippet Migration: repository created and migrated', snippet: snippet.id)
+ rescue => e
+ retry_index += 1
+
+ retry if retry_index < MAX_RETRIES
+
+ logger.error(message: "Snippet Migration: error migrating snippet. Reason: #{e.message}", snippet: snippet.id)
+
+ destroy_snippet_repository(snippet)
+ delete_repository(snippet)
+ end
+ end
+ end
+
+ private
+
+ def repository_present?(snippet)
+ snippet.snippet_repository && !snippet.empty_repo?
+ end
+
+ def create_repository_and_files(snippet)
+ snippet.create_repository
+ create_commit(snippet)
+ end
+
+ def destroy_snippet_repository(snippet)
+ # Removing the db record
+ snippet.snippet_repository&.destroy
+ rescue => e
+ logger.error(message: "Snippet Migration: error destroying snippet repository. Reason: #{e.message}", snippet: snippet.id)
+ end
+
+ def delete_repository(snippet)
+ # Removing the repository in disk
+ snippet.repository.remove if snippet.repository_exists?
+ rescue => e
+ logger.error(message: "Snippet Migration: error deleting repository. Reason: #{e.message}", snippet: snippet.id)
+ end
+
+ def logger
+ @logger ||= Gitlab::BackgroundMigration::Logger.build
+ end
+
+ def snippet_action(snippet)
+ # We don't need the previous_path param
+ # Because we're not updating any existing file
+ [{ file_path: filename(snippet),
+ content: snippet.content }]
+ end
+
+ def filename(snippet)
+ snippet.file_name.presence || empty_file_name
+ end
+
+ def empty_file_name
+ @empty_file_name ||= "#{SnippetRepository::DEFAULT_EMPTY_FILE_NAME}1.txt"
+ end
+
+ def commit_attrs
+ @commit_attrs ||= { branch_name: 'master', message: 'Initial commit' }
+ end
+
+ def create_commit(snippet)
+ snippet.snippet_repository.multi_files_action(snippet.author, snippet_action(snippet), commit_attrs)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/cleanup_optimistic_locking_nulls.rb b/lib/gitlab/background_migration/cleanup_optimistic_locking_nulls.rb
new file mode 100644
index 00000000000..bf69ef352cc
--- /dev/null
+++ b/lib/gitlab/background_migration/cleanup_optimistic_locking_nulls.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class CleanupOptimisticLockingNulls
+ QUERY_ITEM_SIZE = 1_000
+
+ # table - The name of the table the migration is performed for.
+ # start_id - The ID of the object to start at
+ # stop_id - The ID of the object to end at
+ def perform(start_id, stop_id, table)
+ model = define_model_for(table)
+
+ # After analysis done, a batch size of 1,000 items per query was found to be
+ # the most optimal. Discussion in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18418#note_282285336
+ (start_id..stop_id).each_slice(QUERY_ITEM_SIZE).each do |range|
+ model
+ .where(lock_version: nil)
+ .where("ID BETWEEN ? AND ?", range.first, range.last)
+ .update_all(lock_version: 0)
+ end
+ end
+
+ def define_model_for(table)
+ Class.new(ActiveRecord::Base) do
+ self.table_name = table
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/link_lfs_objects_projects.rb b/lib/gitlab/background_migration/link_lfs_objects_projects.rb
new file mode 100644
index 00000000000..983470c5121
--- /dev/null
+++ b/lib/gitlab/background_migration/link_lfs_objects_projects.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Create missing LfsObjectsProject records for forks
+ class LinkLfsObjectsProjects
+ # Model specifically used for migration.
+ class LfsObjectsProject < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'lfs_objects_projects'
+
+ def self.linkable
+ where(
+ <<~SQL
+ lfs_objects_projects.project_id IN (
+ SELECT fork_network_members.forked_from_project_id
+ FROM fork_network_members
+ WHERE fork_network_members.forked_from_project_id IS NOT NULL
+ )
+ SQL
+ )
+ end
+ end
+
+ # Model specifically used for migration.
+ class ForkNetworkMember < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'fork_network_members'
+
+ def self.without_lfs_object(lfs_object_id)
+ where(
+ <<~SQL
+ fork_network_members.project_id NOT IN (
+ SELECT lop.project_id
+ FROM lfs_objects_projects lop
+ WHERE lop.lfs_object_id = #{lfs_object_id}
+ )
+ SQL
+ )
+ end
+ end
+
+ BATCH_SIZE = 1000
+
+ def perform(start_id, end_id)
+ lfs_objects_projects =
+ Gitlab::BackgroundMigration::LinkLfsObjectsProjects::LfsObjectsProject
+ .linkable
+ .where(id: start_id..end_id)
+
+ return if lfs_objects_projects.empty?
+
+ lfs_objects_projects.find_each do |lop|
+ ForkNetworkMember
+ .select("#{lop.lfs_object_id}, fork_network_members.project_id, NOW(), NOW()")
+ .without_lfs_object(lop.lfs_object_id)
+ .where(forked_from_project_id: lop.project_id)
+ .each_batch(of: BATCH_SIZE) do |batch, index|
+ execute <<~SQL
+ INSERT INTO lfs_objects_projects (lfs_object_id, project_id, created_at, updated_at)
+ #{batch.to_sql}
+ SQL
+
+ logger.info(message: "LinkLfsObjectsProjects: created missing LfsObjectsProject records for LfsObject #{lop.lfs_object_id}")
+ end
+ end
+ end
+
+ private
+
+ def execute(sql)
+ ::ActiveRecord::Base.connection.execute(sql)
+ end
+
+ def logger
+ @logger ||= Gitlab::BackgroundMigration::Logger.build
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/migrate_security_scans.rb b/lib/gitlab/background_migration/migrate_security_scans.rb
new file mode 100644
index 00000000000..189a150cb87
--- /dev/null
+++ b/lib/gitlab/background_migration/migrate_security_scans.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # rubocop: disable Style/Documentation
+ class MigrateSecurityScans
+ def perform(start_id, stop_id)
+ end
+ end
+ end
+end
+
+Gitlab::BackgroundMigration::MigrateSecurityScans.prepend_if_ee('EE::Gitlab::BackgroundMigration::MigrateSecurityScans')
diff --git a/lib/gitlab/background_migration/remove_undefined_occurrence_severity_level.rb b/lib/gitlab/background_migration/remove_undefined_occurrence_severity_level.rb
new file mode 100644
index 00000000000..f137e41c728
--- /dev/null
+++ b/lib/gitlab/background_migration/remove_undefined_occurrence_severity_level.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class RemoveUndefinedOccurrenceSeverityLevel
+ def perform(start_id, stop_id)
+ end
+ end
+ end
+end
+
+Gitlab::BackgroundMigration::RemoveUndefinedOccurrenceSeverityLevel.prepend_if_ee('EE::Gitlab::BackgroundMigration::RemoveUndefinedOccurrenceSeverityLevel')
diff --git a/lib/gitlab/background_migration/remove_undefined_vulnerability_severity_level.rb b/lib/gitlab/background_migration/remove_undefined_vulnerability_severity_level.rb
new file mode 100644
index 00000000000..95540cd5f49
--- /dev/null
+++ b/lib/gitlab/background_migration/remove_undefined_vulnerability_severity_level.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class RemoveUndefinedVulnerabilitySeverityLevel
+ def perform(start_id, stop_id)
+ end
+ end
+ end
+end
+
+Gitlab::BackgroundMigration::RemoveUndefinedVulnerabilitySeverityLevel.prepend_if_ee('EE::Gitlab::BackgroundMigration::RemoveUndefinedVulnerabilitySeverityLevel')
diff --git a/lib/gitlab/background_migration/update_authorized_keys_file_since.rb b/lib/gitlab/background_migration/update_authorized_keys_file_since.rb
deleted file mode 100644
index dd80d4bab1a..00000000000
--- a/lib/gitlab/background_migration/update_authorized_keys_file_since.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # rubocop: disable Style/Documentation
- class UpdateAuthorizedKeysFileSince
- def perform(cutoff_datetime)
- end
- end
- end
-end
-
-Gitlab::BackgroundMigration::UpdateAuthorizedKeysFileSince.prepend_if_ee('EE::Gitlab::BackgroundMigration::UpdateAuthorizedKeysFileSince')
diff --git a/lib/gitlab/background_migration/user_mentions/create_resource_user_mention.rb b/lib/gitlab/background_migration/user_mentions/create_resource_user_mention.rb
index 40f45301727..cf0f582a2d4 100644
--- a/lib/gitlab/background_migration/user_mentions/create_resource_user_mention.rb
+++ b/lib/gitlab/background_migration/user_mentions/create_resource_user_mention.rb
@@ -8,7 +8,7 @@ module Gitlab
# Resources that have mentions to be migrated:
# issue, merge_request, epic, commit, snippet, design
- BULK_INSERT_SIZE = 5000
+ BULK_INSERT_SIZE = 1_000
ISOLATION_MODULE = 'Gitlab::BackgroundMigration::UserMentions::Models'
def perform(resource_model, join, conditions, with_notes, start_id, end_id)
@@ -21,7 +21,8 @@ module Gitlab
records.in_groups_of(BULK_INSERT_SIZE, false).each do |records|
mentions = []
records.each do |record|
- mentions << record.build_mention_values(resource_user_mention_model.resource_foreign_key)
+ mention_record = record.build_mention_values(resource_user_mention_model.resource_foreign_key)
+ mentions << mention_record unless mention_record.blank?
end
Gitlab::Database.bulk_insert(
diff --git a/lib/gitlab/background_migration/user_mentions/models/commit.rb b/lib/gitlab/background_migration/user_mentions/models/commit.rb
new file mode 100644
index 00000000000..279e93dbf0d
--- /dev/null
+++ b/lib/gitlab/background_migration/user_mentions/models/commit.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ module UserMentions
+ module Models
+ class Commit
+ include Concerns::IsolatedMentionable
+ include Concerns::MentionableMigrationMethods
+
+ def self.user_mention_model
+ Gitlab::BackgroundMigration::UserMentions::Models::CommitUserMention
+ end
+
+ def user_mention_model
+ self.class.user_mention_model
+ end
+
+ def user_mention_resource_id
+ id
+ end
+
+ def user_mention_note_id
+ 'NULL'
+ end
+
+ def self.no_quote_columns
+ [:note_id]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/user_mentions/models/commit_user_mention.rb b/lib/gitlab/background_migration/user_mentions/models/commit_user_mention.rb
new file mode 100644
index 00000000000..bdb4d6c7d48
--- /dev/null
+++ b/lib/gitlab/background_migration/user_mentions/models/commit_user_mention.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ module UserMentions
+ module Models
+ class CommitUserMention < ActiveRecord::Base
+ self.table_name = 'commit_user_mentions'
+
+ def self.resource_foreign_key
+ :commit_id
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/user_mentions/models/concerns/isolated_mentionable.rb b/lib/gitlab/background_migration/user_mentions/models/concerns/isolated_mentionable.rb
index b7fa92a6686..69ba3f9132b 100644
--- a/lib/gitlab/background_migration/user_mentions/models/concerns/isolated_mentionable.rb
+++ b/lib/gitlab/background_migration/user_mentions/models/concerns/isolated_mentionable.rb
@@ -4,89 +4,97 @@ module Gitlab
module BackgroundMigration
module UserMentions
module Models
- # == IsolatedMentionable concern
- #
- # Shortcutted for isolation version of Mentionable to be used in mentions migrations
- #
- module IsolatedMentionable
- extend ::ActiveSupport::Concern
-
- class_methods do
- # Indicate which attributes of the Mentionable to search for GFM references.
- def attr_mentionable(attr, options = {})
- attr = attr.to_s
- mentionable_attrs << [attr, options]
+ module Concerns
+ # == IsolatedMentionable concern
+ #
+ # Shortcutted for isolation version of Mentionable to be used in mentions migrations
+ #
+ module IsolatedMentionable
+ extend ::ActiveSupport::Concern
+
+ class_methods do
+ # Indicate which attributes of the Mentionable to search for GFM references.
+ def attr_mentionable(attr, options = {})
+ attr = attr.to_s
+ mentionable_attrs << [attr, options]
+ end
end
- end
- included do
- # Accessor for attributes marked mentionable.
- cattr_accessor :mentionable_attrs, instance_accessor: false do
- []
- end
+ included do
+ # Accessor for attributes marked mentionable.
+ cattr_accessor :mentionable_attrs, instance_accessor: false do
+ []
+ end
- if self < Participable
- participant -> (user, ext) { all_references(user, extractor: ext) }
+ if self < Participable
+ participant -> (user, ext) { all_references(user, extractor: ext) }
+ end
end
- end
- def all_references(current_user = nil, extractor: nil)
- # Use custom extractor if it's passed in the function parameters.
- if extractor
- extractors[current_user] = extractor
- else
- extractor = extractors[current_user] ||= ::Gitlab::ReferenceExtractor.new(project, current_user)
+ def all_references(current_user = nil, extractor: nil)
+ # Use custom extractor if it's passed in the function parameters.
+ if extractor
+ extractors[current_user] = extractor
+ else
+ extractor = extractors[current_user] ||= ::Gitlab::ReferenceExtractor.new(project, current_user)
- extractor.reset_memoized_values
- end
+ extractor.reset_memoized_values
+ end
- self.class.mentionable_attrs.each do |attr, options|
- text = __send__(attr) # rubocop:disable GitlabSecurity/PublicSend
- options = options.merge(
- cache_key: [self, attr],
- author: author,
- skip_project_check: skip_project_check?
- ).merge(mentionable_params)
+ self.class.mentionable_attrs.each do |attr, options|
+ text = __send__(attr) # rubocop:disable GitlabSecurity/PublicSend
+ options = options.merge(
+ cache_key: [self, attr],
+ author: author,
+ skip_project_check: skip_project_check?
+ ).merge(mentionable_params)
- cached_html = self.try(:updated_cached_html_for, attr.to_sym)
- options[:rendered] = cached_html if cached_html
+ cached_html = self.try(:updated_cached_html_for, attr.to_sym)
+ options[:rendered] = cached_html if cached_html
- extractor.analyze(text, options)
+ extractor.analyze(text, options)
+ end
+
+ extractor
end
- extractor
- end
+ def extractors
+ @extractors ||= {}
+ end
- def extractors
- @extractors ||= {}
- end
+ def skip_project_check?
+ false
+ end
- def skip_project_check?
- false
- end
+ def build_mention_values(resource_foreign_key)
+ refs = all_references(author)
- def build_mention_values(resource_foreign_key)
- refs = all_references(author)
+ mentioned_users_ids = array_to_sql(refs.mentioned_users.pluck(:id))
+ mentioned_projects_ids = array_to_sql(refs.mentioned_projects.pluck(:id))
+ mentioned_groups_ids = array_to_sql(refs.mentioned_groups.pluck(:id))
- {
- "#{resource_foreign_key}": user_mention_resource_id,
- note_id: user_mention_note_id,
- mentioned_users_ids: array_to_sql(refs.mentioned_users.pluck(:id)),
- mentioned_projects_ids: array_to_sql(refs.mentioned_projects.pluck(:id)),
- mentioned_groups_ids: array_to_sql(refs.mentioned_groups.pluck(:id))
- }
- end
+ return if mentioned_users_ids.blank? && mentioned_projects_ids.blank? && mentioned_groups_ids.blank?
+
+ {
+ "#{resource_foreign_key}": user_mention_resource_id,
+ note_id: user_mention_note_id,
+ mentioned_users_ids: mentioned_users_ids,
+ mentioned_projects_ids: mentioned_projects_ids,
+ mentioned_groups_ids: mentioned_groups_ids
+ }
+ end
- def array_to_sql(ids_array)
- return unless ids_array.present?
+ def array_to_sql(ids_array)
+ return unless ids_array.present?
- '{' + ids_array.join(", ") + '}'
- end
+ '{' + ids_array.join(", ") + '}'
+ end
- private
+ private
- def mentionable_params
- {}
+ def mentionable_params
+ {}
+ end
end
end
end
diff --git a/lib/gitlab/background_migration/user_mentions/models/concerns/mentionable_migration_methods.rb b/lib/gitlab/background_migration/user_mentions/models/concerns/mentionable_migration_methods.rb
index fa479cb0ed3..efb08d44100 100644
--- a/lib/gitlab/background_migration/user_mentions/models/concerns/mentionable_migration_methods.rb
+++ b/lib/gitlab/background_migration/user_mentions/models/concerns/mentionable_migration_methods.rb
@@ -4,17 +4,19 @@ module Gitlab
module BackgroundMigration
module UserMentions
module Models
- # Extract common no_quote_columns method used in determining the columns that do not need
- # to be quoted for corresponding models
- module MentionableMigrationMethods
- extend ::ActiveSupport::Concern
+ module Concerns
+ # Extract common no_quote_columns method used in determining the columns that do not need
+ # to be quoted for corresponding models
+ module MentionableMigrationMethods
+ extend ::ActiveSupport::Concern
- class_methods do
- def no_quote_columns
- [
- :note_id,
- user_mention_model.resource_foreign_key
- ]
+ class_methods do
+ def no_quote_columns
+ [
+ :note_id,
+ user_mention_model.resource_foreign_key
+ ]
+ end
end
end
end
diff --git a/lib/gitlab/background_migration/user_mentions/models/design_management/design.rb b/lib/gitlab/background_migration/user_mentions/models/design_management/design.rb
new file mode 100644
index 00000000000..0cdfc6447c7
--- /dev/null
+++ b/lib/gitlab/background_migration/user_mentions/models/design_management/design.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ module UserMentions
+ module Models
+ module DesignManagement
+ class Design < ActiveRecord::Base
+ include Concerns::MentionableMigrationMethods
+
+ def self.user_mention_model
+ Gitlab::BackgroundMigration::UserMentions::Models::DesignUserMention
+ end
+
+ def user_mention_model
+ self.class.user_mention_model
+ end
+
+ def user_mention_resource_id
+ id
+ end
+
+ def user_mention_note_id
+ 'NULL'
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/user_mentions/models/design_user_mention.rb b/lib/gitlab/background_migration/user_mentions/models/design_user_mention.rb
new file mode 100644
index 00000000000..68205ecd3c2
--- /dev/null
+++ b/lib/gitlab/background_migration/user_mentions/models/design_user_mention.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ module UserMentions
+ module Models
+ class DesignUserMention < ActiveRecord::Base
+ self.table_name = 'design_user_mentions'
+
+ def self.resource_foreign_key
+ :design_id
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/user_mentions/models/epic.rb b/lib/gitlab/background_migration/user_mentions/models/epic.rb
index 9797c86478e..dc2b7819800 100644
--- a/lib/gitlab/background_migration/user_mentions/models/epic.rb
+++ b/lib/gitlab/background_migration/user_mentions/models/epic.rb
@@ -6,9 +6,9 @@ module Gitlab
module UserMentions
module Models
class Epic < ActiveRecord::Base
- include IsolatedMentionable
+ include Concerns::IsolatedMentionable
+ include Concerns::MentionableMigrationMethods
include CacheMarkdownField
- include MentionableMigrationMethods
attr_mentionable :title, pipeline: :single_line
attr_mentionable :description
diff --git a/lib/gitlab/background_migration/user_mentions/models/merge_request.rb b/lib/gitlab/background_migration/user_mentions/models/merge_request.rb
new file mode 100644
index 00000000000..655c1db71ae
--- /dev/null
+++ b/lib/gitlab/background_migration/user_mentions/models/merge_request.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ module UserMentions
+ module Models
+ class MergeRequest < ActiveRecord::Base
+ include Concerns::IsolatedMentionable
+ include CacheMarkdownField
+ include Concerns::MentionableMigrationMethods
+
+ attr_mentionable :title, pipeline: :single_line
+ attr_mentionable :description
+ cache_markdown_field :title, pipeline: :single_line
+ cache_markdown_field :description, issuable_state_filter_enabled: true
+
+ self.table_name = 'merge_requests'
+
+ belongs_to :author, class_name: "User"
+ belongs_to :target_project, class_name: "Project"
+ belongs_to :source_project, class_name: "Project"
+
+ alias_attribute :project, :target_project
+
+ def self.user_mention_model
+ Gitlab::BackgroundMigration::UserMentions::Models::MergeRequestUserMention
+ end
+
+ def user_mention_model
+ self.class.user_mention_model
+ end
+
+ def user_mention_resource_id
+ id
+ end
+
+ def user_mention_note_id
+ 'NULL'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/user_mentions/models/merge_request_user_mention.rb b/lib/gitlab/background_migration/user_mentions/models/merge_request_user_mention.rb
new file mode 100644
index 00000000000..e9b85e9cb8c
--- /dev/null
+++ b/lib/gitlab/background_migration/user_mentions/models/merge_request_user_mention.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ module UserMentions
+ module Models
+ class MergeRequestUserMention < ActiveRecord::Base
+ self.table_name = 'merge_request_user_mentions'
+
+ def self.resource_foreign_key
+ :merge_request_id
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/user_mentions/models/note.rb b/lib/gitlab/background_migration/user_mentions/models/note.rb
index dc364d7af5a..7a1a0223bc7 100644
--- a/lib/gitlab/background_migration/user_mentions/models/note.rb
+++ b/lib/gitlab/background_migration/user_mentions/models/note.rb
@@ -6,7 +6,7 @@ module Gitlab
module UserMentions
module Models
class Note < ActiveRecord::Base
- include IsolatedMentionable
+ include Concerns::IsolatedMentionable
include CacheMarkdownField
self.table_name = 'notes'
@@ -20,7 +20,7 @@ module Gitlab
belongs_to :project
def for_personal_snippet?
- noteable.class.name == 'PersonalSnippet'
+ noteable && noteable.class.name == 'PersonalSnippet'
end
def for_project_noteable?
@@ -32,7 +32,7 @@ module Gitlab
end
def for_epic?
- noteable.class.name == 'Epic'
+ noteable && noteable_type == 'Epic'
end
def user_mention_resource_id
@@ -43,6 +43,14 @@ module Gitlab
id
end
+ def noteable
+ super unless for_commit?
+ end
+
+ def for_commit?
+ noteable_type == "Commit"
+ end
+
private
def mentionable_params
@@ -52,6 +60,8 @@ module Gitlab
end
def banzai_context_params
+ return {} unless noteable
+
{ group: noteable.group, label_url_method: :group_epics_url }
end
end
diff --git a/lib/gitlab/bitbucket_import/importer.rb b/lib/gitlab/bitbucket_import/importer.rb
index 3a087a3ef83..5af839d8a32 100644
--- a/lib/gitlab/bitbucket_import/importer.rb
+++ b/lib/gitlab/bitbucket_import/importer.rb
@@ -3,8 +3,6 @@
module Gitlab
module BitbucketImport
class Importer
- include Gitlab::ShellAdapter
-
LABELS = [{ title: 'bug', color: '#FF0000' },
{ title: 'enhancement', color: '#428BCA' },
{ title: 'proposal', color: '#69D100' },
@@ -80,7 +78,7 @@ module Gitlab
wiki = WikiFormatter.new(project)
- gitlab_shell.import_wiki_repository(project, wiki)
+ project.wiki.repository.import_repository(wiki.import_url)
rescue StandardError => e
errors << { type: :wiki, errors: e.message }
end
diff --git a/lib/gitlab/cache/import/caching.rb b/lib/gitlab/cache/import/caching.rb
new file mode 100644
index 00000000000..ead94761ae7
--- /dev/null
+++ b/lib/gitlab/cache/import/caching.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Cache
+ module Import
+ module Caching
+ # The default timeout of the cache keys.
+ TIMEOUT = 24.hours.to_i
+
+ WRITE_IF_GREATER_SCRIPT = <<-EOF.strip_heredoc.freeze
+ local key, value, ttl = KEYS[1], tonumber(ARGV[1]), ARGV[2]
+ local existing = tonumber(redis.call("get", key))
+
+ if existing == nil or value > existing then
+ redis.call("set", key, value)
+ redis.call("expire", key, ttl)
+ return true
+ else
+ return false
+ end
+ EOF
+
+ # Reads a cache key.
+ #
+ # If the key exists and has a non-empty value its TTL is refreshed
+ # automatically.
+ #
+ # raw_key - The cache key to read.
+ # timeout - The new timeout of the key if the key is to be refreshed.
+ def self.read(raw_key, timeout: TIMEOUT)
+ key = cache_key_for(raw_key)
+ value = Redis::Cache.with { |redis| redis.get(key) }
+
+ if value.present?
+ # We refresh the expiration time so frequently used keys stick
+ # around, removing the need for querying the database as much as
+ # possible.
+ #
+ # A key may be empty when we looked up a GitHub user (for example) but
+ # did not find a matching GitLab user. In that case we _don't_ want to
+ # refresh the TTL so we automatically pick up the right data when said
+ # user were to register themselves on the GitLab instance.
+ Redis::Cache.with { |redis| redis.expire(key, timeout) }
+ end
+
+ value
+ end
+
+ # Reads an integer from the cache, or returns nil if no value was found.
+ #
+ # See Caching.read for more information.
+ def self.read_integer(raw_key, timeout: TIMEOUT)
+ value = read(raw_key, timeout: timeout)
+
+ value.to_i if value.present?
+ end
+
+ # Sets a cache key to the given value.
+ #
+ # key - The cache key to write.
+ # value - The value to set.
+ # timeout - The time after which the cache key should expire.
+ def self.write(raw_key, value, timeout: TIMEOUT)
+ key = cache_key_for(raw_key)
+
+ Redis::Cache.with do |redis|
+ redis.set(key, value, ex: timeout)
+ end
+
+ value
+ end
+
+ # Adds a value to a set.
+ #
+ # raw_key - The key of the set to add the value to.
+ # value - The value to add to the set.
+ # timeout - The new timeout of the key.
+ def self.set_add(raw_key, value, timeout: TIMEOUT)
+ key = cache_key_for(raw_key)
+
+ Redis::Cache.with do |redis|
+ redis.multi do |m|
+ m.sadd(key, value)
+ m.expire(key, timeout)
+ end
+ end
+ end
+
+ # Returns true if the given value is present in the set.
+ #
+ # raw_key - The key of the set to check.
+ # value - The value to check for.
+ def self.set_includes?(raw_key, value)
+ key = cache_key_for(raw_key)
+
+ Redis::Cache.with do |redis|
+ redis.sismember(key, value)
+ end
+ end
+
+ # Sets multiple keys to a given value.
+ #
+ # mapping - A Hash mapping the cache keys to their values.
+ # timeout - The time after which the cache key should expire.
+ def self.write_multiple(mapping, timeout: TIMEOUT)
+ Redis::Cache.with do |redis|
+ redis.multi do |multi|
+ mapping.each do |raw_key, value|
+ multi.set(cache_key_for(raw_key), value, ex: timeout)
+ end
+ end
+ end
+ end
+
+ # Sets the expiration time of a key.
+ #
+ # raw_key - The key for which to change the timeout.
+ # timeout - The new timeout.
+ def self.expire(raw_key, timeout)
+ key = cache_key_for(raw_key)
+
+ Redis::Cache.with do |redis|
+ redis.expire(key, timeout)
+ end
+ end
+
+ # Sets a key to the given integer but only if the existing value is
+ # smaller than the given value.
+ #
+ # This method uses a Lua script to ensure the read and write are atomic.
+ #
+ # raw_key - The key to set.
+ # value - The new value for the key.
+ # timeout - The key timeout in seconds.
+ #
+ # Returns true when the key was overwritten, false otherwise.
+ def self.write_if_greater(raw_key, value, timeout: TIMEOUT)
+ key = cache_key_for(raw_key)
+ val = Redis::Cache.with do |redis|
+ redis
+ .eval(WRITE_IF_GREATER_SCRIPT, keys: [key], argv: [value, timeout])
+ end
+
+ val ? true : false
+ end
+
+ def self.cache_key_for(raw_key)
+ "#{Redis::Cache::CACHE_NAMESPACE}:#{raw_key}"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/checks/branch_check.rb b/lib/gitlab/checks/branch_check.rb
index 4ddc1c718c7..7be0ef05a49 100644
--- a/lib/gitlab/checks/branch_check.rb
+++ b/lib/gitlab/checks/branch_check.rb
@@ -28,7 +28,7 @@ module Gitlab
logger.log_timed(LOG_MESSAGES[:delete_default_branch_check]) do
if deletion? && branch_name == project.default_branch
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:delete_default_branch]
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:delete_default_branch]
end
end
@@ -42,7 +42,7 @@ module Gitlab
return unless ProtectedBranch.protected?(project, branch_name) # rubocop:disable Cop/AvoidReturnFromBlocks
if forced_push?
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:force_push_protected_branch]
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:force_push_protected_branch]
end
end
@@ -62,15 +62,15 @@ module Gitlab
break if user_access.can_push_to_branch?(branch_name)
unless user_access.can_merge_to_branch?(branch_name)
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:create_protected_branch]
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:create_protected_branch]
end
unless safe_commit_for_new_protected_branch?
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:invalid_commit_create_protected_branch]
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:invalid_commit_create_protected_branch]
end
unless updated_from_web?
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:non_web_create_protected_branch]
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:non_web_create_protected_branch]
end
end
end
@@ -78,11 +78,11 @@ module Gitlab
def protected_branch_deletion_checks
logger.log_timed(LOG_MESSAGES[:protected_branch_deletion_checks]) do
unless user_access.can_delete_branch?(branch_name)
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:non_master_delete_protected_branch]
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:non_master_delete_protected_branch]
end
unless updated_from_web?
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:non_web_delete_protected_branch]
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:non_web_delete_protected_branch]
end
end
end
@@ -91,11 +91,11 @@ module Gitlab
logger.log_timed(LOG_MESSAGES[:protected_branch_push_checks]) do
if matching_merge_request?
unless user_access.can_merge_to_branch?(branch_name) || user_access.can_push_to_branch?(branch_name)
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:merge_protected_branch]
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:merge_protected_branch]
end
else
unless user_access.can_push_to_branch?(branch_name)
- raise GitAccess::UnauthorizedError, push_to_protected_branch_rejected_message
+ raise GitAccess::ForbiddenError, push_to_protected_branch_rejected_message
end
end
end
diff --git a/lib/gitlab/checks/diff_check.rb b/lib/gitlab/checks/diff_check.rb
index 5de71addd5f..0eb2b4c79ef 100644
--- a/lib/gitlab/checks/diff_check.rb
+++ b/lib/gitlab/checks/diff_check.rb
@@ -46,7 +46,7 @@ module Gitlab
def validate_diff(diff)
validations_for_diff.each do |validation|
if error = validation.call(diff)
- raise ::Gitlab::GitAccess::UnauthorizedError, error
+ raise ::Gitlab::GitAccess::ForbiddenError, error
end
end
end
@@ -77,7 +77,7 @@ module Gitlab
logger.log_timed(LOG_MESSAGES[__method__]) do
path_validations.each do |validation|
if error = validation.call(file_paths)
- raise ::Gitlab::GitAccess::UnauthorizedError, error
+ raise ::Gitlab::GitAccess::ForbiddenError, error
end
end
end
diff --git a/lib/gitlab/checks/lfs_check.rb b/lib/gitlab/checks/lfs_check.rb
index 7b013567a03..f81c215d847 100644
--- a/lib/gitlab/checks/lfs_check.rb
+++ b/lib/gitlab/checks/lfs_check.rb
@@ -15,7 +15,7 @@ module Gitlab
lfs_check = Checks::LfsIntegrity.new(project, newrev, logger.time_left)
if lfs_check.objects_missing?
- raise GitAccess::UnauthorizedError, ERROR_MESSAGE
+ raise GitAccess::ForbiddenError, ERROR_MESSAGE
end
end
end
diff --git a/lib/gitlab/checks/lfs_integrity.rb b/lib/gitlab/checks/lfs_integrity.rb
index 1652d5a30a4..e18cf6ff8f2 100644
--- a/lib/gitlab/checks/lfs_integrity.rb
+++ b/lib/gitlab/checks/lfs_integrity.rb
@@ -9,7 +9,6 @@ module Gitlab
@time_left = time_left
end
- # rubocop: disable CodeReuse/ActiveRecord
def objects_missing?
return false unless @newrev && @project.lfs_enabled?
@@ -19,12 +18,11 @@ module Gitlab
return false unless new_lfs_pointers.present?
existing_count = @project.all_lfs_objects
- .where(oid: new_lfs_pointers.map(&:lfs_oid))
+ .for_oids(new_lfs_pointers.map(&:lfs_oid))
.count
existing_count != new_lfs_pointers.count
end
- # rubocop: enable CodeReuse/ActiveRecord
end
end
end
diff --git a/lib/gitlab/checks/post_push_message.rb b/lib/gitlab/checks/post_push_message.rb
index 492dbb5a596..b3c981d252b 100644
--- a/lib/gitlab/checks/post_push_message.rb
+++ b/lib/gitlab/checks/post_push_message.rb
@@ -3,8 +3,8 @@
module Gitlab
module Checks
class PostPushMessage
- def initialize(project, user, protocol)
- @project = project
+ def initialize(repository, user, protocol)
+ @repository = repository
@user = user
@protocol = protocol
end
@@ -34,14 +34,21 @@ module Gitlab
protected
- attr_reader :project, :user, :protocol
+ attr_reader :repository, :user, :protocol
+
+ delegate :project, to: :repository, allow_nil: true
+ delegate :container, to: :repository, allow_nil: false
def self.message_key(user_id, project_id)
raise NotImplementedError
end
def url_to_repo
- protocol == 'ssh' ? project.ssh_url_to_repo : project.http_url_to_repo
+ protocol == 'ssh' ? message_subject.ssh_url_to_repo : message_subject.http_url_to_repo
+ end
+
+ def message_subject
+ repository.repo_type.wiki? ? project.wiki : container
end
end
end
diff --git a/lib/gitlab/checks/project_moved.rb b/lib/gitlab/checks/project_moved.rb
index 6f04fddc6c4..4cc35de9c2d 100644
--- a/lib/gitlab/checks/project_moved.rb
+++ b/lib/gitlab/checks/project_moved.rb
@@ -5,10 +5,10 @@ module Gitlab
class ProjectMoved < PostPushMessage
REDIRECT_NAMESPACE = "redirect_namespace"
- def initialize(project, user, protocol, redirected_path)
+ def initialize(repository, user, protocol, redirected_path)
@redirected_path = redirected_path
- super(project, user, protocol)
+ super(repository, user, protocol)
end
def message
diff --git a/lib/gitlab/checks/push_check.rb b/lib/gitlab/checks/push_check.rb
index 91f8d0bdbc8..7cc5bc56cbb 100644
--- a/lib/gitlab/checks/push_check.rb
+++ b/lib/gitlab/checks/push_check.rb
@@ -6,7 +6,7 @@ module Gitlab
def validate!
logger.log_timed("Checking if you are allowed to push...") do
unless can_push?
- raise GitAccess::UnauthorizedError, GitAccess::ERROR_MESSAGES[:push_code]
+ raise GitAccess::ForbiddenError, GitAccess::ERROR_MESSAGES[:push_code]
end
end
end
diff --git a/lib/gitlab/checks/push_file_count_check.rb b/lib/gitlab/checks/push_file_count_check.rb
new file mode 100644
index 00000000000..288a7e0d41a
--- /dev/null
+++ b/lib/gitlab/checks/push_file_count_check.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Checks
+ class PushFileCountCheck < BaseChecker
+ attr_reader :repository, :newrev, :limit, :logger
+
+ LOG_MESSAGES = {
+ diff_content_check: "Validating diff contents being single file..."
+ }.freeze
+
+ ERROR_MESSAGES = {
+ upper_limit: "The repository can contain at most %{limit} file(s).",
+ lower_limit: "The repository must contain at least 1 file."
+ }.freeze
+
+ def initialize(change, repository:, limit:, logger:)
+ @repository = repository
+ @newrev = change[:newrev]
+ @limit = limit
+ @logger = logger
+ end
+
+ def validate!
+ file_count = repository.ls_files(newrev).size
+
+ if file_count > limit
+ raise ::Gitlab::GitAccess::ForbiddenError, ERROR_MESSAGES[:upper_limit] % { limit: limit }
+ end
+
+ if file_count == 0
+ raise ::Gitlab::GitAccess::ForbiddenError, ERROR_MESSAGES[:lower_limit]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/checks/snippet_check.rb b/lib/gitlab/checks/snippet_check.rb
new file mode 100644
index 00000000000..bcecd0fc251
--- /dev/null
+++ b/lib/gitlab/checks/snippet_check.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Checks
+ class SnippetCheck < BaseChecker
+ DEFAULT_BRANCH = 'master'.freeze
+ ERROR_MESSAGES = {
+ create_delete_branch: 'You can not create or delete branches.'
+ }.freeze
+
+ ATTRIBUTES = %i[oldrev newrev ref branch_name tag_name logger].freeze
+ attr_reader(*ATTRIBUTES)
+
+ def initialize(change, logger:)
+ @oldrev, @newrev, @ref = change.values_at(:oldrev, :newrev, :ref)
+ @branch_name = Gitlab::Git.branch_name(@ref)
+ @tag_name = Gitlab::Git.tag_name(@ref)
+
+ @logger = logger
+ @logger.append_message("Running checks for ref: #{@branch_name || @tag_name}")
+ end
+
+ def validate!
+ if creation? || deletion?
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:create_delete_branch]
+ end
+
+ true
+ end
+
+ private
+
+ def creation?
+ @branch_name != DEFAULT_BRANCH && super
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/checks/tag_check.rb b/lib/gitlab/checks/tag_check.rb
index ced0612a7a3..a47e55cb160 100644
--- a/lib/gitlab/checks/tag_check.rb
+++ b/lib/gitlab/checks/tag_check.rb
@@ -20,7 +20,7 @@ module Gitlab
logger.log_timed(LOG_MESSAGES[:tag_checks]) do
if tag_exists? && user_access.cannot_do_action?(:admin_tag)
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:change_existing_tags]
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:change_existing_tags]
end
end
@@ -33,11 +33,11 @@ module Gitlab
logger.log_timed(LOG_MESSAGES[__method__]) do
return unless ProtectedTag.protected?(project, tag_name) # rubocop:disable Cop/AvoidReturnFromBlocks
- raise(GitAccess::UnauthorizedError, ERROR_MESSAGES[:update_protected_tag]) if update?
- raise(GitAccess::UnauthorizedError, ERROR_MESSAGES[:delete_protected_tag]) if deletion?
+ raise(GitAccess::ForbiddenError, ERROR_MESSAGES[:update_protected_tag]) if update?
+ raise(GitAccess::ForbiddenError, ERROR_MESSAGES[:delete_protected_tag]) if deletion?
unless user_access.can_create_tag?(tag_name)
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:create_protected_tag]
+ raise GitAccess::ForbiddenError, ERROR_MESSAGES[:create_protected_tag]
end
end
end
diff --git a/lib/gitlab/ci/artifact_file_reader.rb b/lib/gitlab/ci/artifact_file_reader.rb
new file mode 100644
index 00000000000..c2d17cc176e
--- /dev/null
+++ b/lib/gitlab/ci/artifact_file_reader.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+# This class takes in input a Ci::Build object and an artifact path to read.
+# It downloads and extracts the artifacts archive, then returns the content
+# of the artifact, if found.
+module Gitlab
+ module Ci
+ class ArtifactFileReader
+ Error = Class.new(StandardError)
+
+ MAX_ARCHIVE_SIZE = 5.megabytes
+
+ def initialize(job)
+ @job = job
+
+ raise ArgumentError, 'Job does not have artifacts' unless @job.artifacts?
+
+ validate!
+ end
+
+ def read(path)
+ return unless job.artifacts_metadata
+
+ metadata_entry = job.artifacts_metadata_entry(path)
+
+ if metadata_entry.total_size > MAX_ARCHIVE_SIZE
+ raise Error, "Artifacts archive for job `#{job.name}` is too large: max #{max_archive_size_in_mb}"
+ end
+
+ read_zip_file!(path)
+ end
+
+ private
+
+ attr_reader :job
+
+ def validate!
+ if job.job_artifacts_archive.size > MAX_ARCHIVE_SIZE
+ raise Error, "Artifacts archive for job `#{job.name}` is too large: max #{max_archive_size_in_mb}"
+ end
+
+ unless job.artifacts_metadata?
+ raise Error, "Job `#{job.name}` has missing artifacts metadata and cannot be extracted!"
+ end
+ end
+
+ def read_zip_file!(file_path)
+ job.artifacts_file.use_file do |archive_path|
+ Zip::File.open(archive_path) do |zip_file|
+ entry = zip_file.find_entry(file_path)
+ unless entry
+ raise Error, "Path `#{file_path}` does not exist inside the `#{job.name}` artifacts archive!"
+ end
+
+ if entry.name_is_directory?
+ raise Error, "Path `#{file_path}` was expected to be a file but it was a directory!"
+ end
+
+ zip_file.get_input_stream(entry) do |is|
+ is.read
+ end
+ end
+ end
+ end
+
+ def max_archive_size_in_mb
+ ActiveSupport::NumberHelper.number_to_human_size(MAX_ARCHIVE_SIZE)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/config.rb b/lib/gitlab/ci/config.rb
index 38ab3475d01..10e0f4b8e4d 100644
--- a/lib/gitlab/ci/config.rb
+++ b/lib/gitlab/ci/config.rb
@@ -18,12 +18,9 @@ module Gitlab
attr_reader :root
- def initialize(config, project: nil, sha: nil, user: nil)
- @context = build_context(project: project, sha: sha, user: user)
-
- if Feature.enabled?(:ci_limit_yaml_expansion, project, default_enabled: true)
- @context.set_deadline(TIMEOUT_SECONDS)
- end
+ def initialize(config, project: nil, sha: nil, user: nil, parent_pipeline: nil)
+ @context = build_context(project: project, sha: sha, user: user, parent_pipeline: parent_pipeline)
+ @context.set_deadline(TIMEOUT_SECONDS)
@config = expand_config(config)
@@ -79,19 +76,17 @@ module Gitlab
initial_config = Gitlab::Config::Loader::Yaml.new(config).load!
initial_config = Config::External::Processor.new(initial_config, @context).perform
initial_config = Config::Extendable.new(initial_config).to_hash
-
- if Feature.enabled?(:ci_pre_post_pipeline_stages, @context.project, default_enabled: true)
- initial_config = Config::EdgeStagesInjector.new(initial_config).to_hash
- end
+ initial_config = Config::EdgeStagesInjector.new(initial_config).to_hash
initial_config
end
- def build_context(project:, sha:, user:)
+ def build_context(project:, sha:, user:, parent_pipeline:)
Config::External::Context.new(
project: project,
sha: sha || project&.repository&.root_ref_sha,
- user: user)
+ user: user,
+ parent_pipeline: parent_pipeline)
end
def track_and_raise_for_dev_exception(error)
diff --git a/lib/gitlab/ci/config/entry/artifacts.rb b/lib/gitlab/ci/config/entry/artifacts.rb
index aebc1675bec..241c73db3bb 100644
--- a/lib/gitlab/ci/config/entry/artifacts.rb
+++ b/lib/gitlab/ci/config/entry/artifacts.rb
@@ -44,8 +44,6 @@ module Gitlab
end
end
- helpers :reports
-
def value
@config[:reports] = reports_value if @config.key?(:reports)
@config
diff --git a/lib/gitlab/ci/config/entry/bridge.rb b/lib/gitlab/ci/config/entry/bridge.rb
index c0247dca73d..f4362d3b0ce 100644
--- a/lib/gitlab/ci/config/entry/bridge.rb
+++ b/lib/gitlab/ci/config/entry/bridge.rb
@@ -9,34 +9,21 @@ module Gitlab
# defining a downstream project trigger.
#
class Bridge < ::Gitlab::Config::Entry::Node
- include ::Gitlab::Config::Entry::Configurable
- include ::Gitlab::Config::Entry::Attributable
- include ::Gitlab::Config::Entry::Inheritable
+ include ::Gitlab::Ci::Config::Entry::Processable
- ALLOWED_KEYS = %i[trigger stage allow_failure only except
- when extends variables needs rules].freeze
+ ALLOWED_KEYS = %i[trigger allow_failure when needs].freeze
validations do
- validates :config, allowed_keys: ALLOWED_KEYS
- validates :config, presence: true
- validates :name, presence: true
- validates :name, type: Symbol
- validates :config, disallowed_keys: {
- in: %i[only except when start_in],
- message: 'key may not be used with `rules`'
- },
- if: :has_rules?
+ validates :config, allowed_keys: ALLOWED_KEYS + PROCESSABLE_ALLOWED_KEYS
with_options allow_nil: true do
validates :when,
inclusion: { in: %w[on_success on_failure always],
message: 'should be on_success, on_failure or always' }
- validates :extends, type: String
- validates :rules, array_of_hashes: true
end
validate on: :composed do
- unless trigger.present? || bridge_needs.present?
+ unless trigger_defined? || bridge_needs.present?
errors.add(:config, 'should contain either a trigger or a needs:pipeline')
end
end
@@ -58,32 +45,7 @@ module Gitlab
inherit: false,
metadata: { allowed_needs: %i[job bridge] }
- entry :stage, ::Gitlab::Ci::Config::Entry::Stage,
- description: 'Pipeline stage this job will be executed into.',
- inherit: false
-
- entry :only, ::Gitlab::Ci::Config::Entry::Policy,
- description: 'Refs policy this job will be executed for.',
- default: ::Gitlab::Ci::Config::Entry::Policy::DEFAULT_ONLY,
- inherit: false
-
- entry :except, ::Gitlab::Ci::Config::Entry::Policy,
- description: 'Refs policy this job will be executed for.',
- inherit: false
-
- entry :rules, ::Gitlab::Ci::Config::Entry::Rules,
- description: 'List of evaluable Rules to determine job inclusion.',
- inherit: false,
- metadata: {
- allowed_when: %w[on_success on_failure always never manual delayed].freeze
- }
-
- entry :variables, ::Gitlab::Ci::Config::Entry::Variables,
- description: 'Environment variables available for this job.',
- inherit: false
-
- helpers(*ALLOWED_KEYS)
- attributes(*ALLOWED_KEYS)
+ attributes :when, :allow_failure
def self.matching?(name, config)
!name.to_s.start_with?('.') &&
@@ -95,56 +57,19 @@ module Gitlab
true
end
- def compose!(deps = nil)
- super do
- has_workflow_rules = deps&.workflow&.has_rules?
-
- # If workflow:rules: or rules: are used
- # they are considered not compatible
- # with `only/except` defaults
- #
- # Context: https://gitlab.com/gitlab-org/gitlab/merge_requests/21742
- if has_rules? || has_workflow_rules
- # Remove only/except defaults
- # defaults are not considered as defined
- @entries.delete(:only) unless only_defined?
- @entries.delete(:except) unless except_defined?
- end
- end
- end
-
- def has_rules?
- @config&.key?(:rules)
- end
-
- def name
- @metadata[:name]
- end
-
def value
- { name: name,
+ super.merge(
trigger: (trigger_value if trigger_defined?),
needs: (needs_value if needs_defined?),
ignore: !!allow_failure,
- stage: stage_value,
- when: when_value,
- extends: extends_value,
- variables: (variables_value if variables_defined?),
- rules: (rules_value if has_rules?),
- only: only_value,
- except: except_value,
- scheduling_type: needs_defined? && !bridge_needs ? :dag : :stage }.compact
+ when: self.when,
+ scheduling_type: needs_defined? && !bridge_needs ? :dag : :stage
+ ).compact
end
def bridge_needs
needs_value[:bridge] if needs_value
end
-
- private
-
- def overwrite_entry(deps, key, current_entry)
- deps.default[key] unless current_entry.specified?
- end
end
end
end
diff --git a/lib/gitlab/ci/config/entry/cache.rb b/lib/gitlab/ci/config/entry/cache.rb
index ef07c319ce4..a304d9b724f 100644
--- a/lib/gitlab/ci/config/entry/cache.rb
+++ b/lib/gitlab/ci/config/entry/cache.rb
@@ -28,8 +28,6 @@ module Gitlab
entry :paths, Entry::Paths,
description: 'Specify which paths should be cached across builds.'
- helpers :key
-
attributes :policy
def value
diff --git a/lib/gitlab/ci/config/entry/default.rb b/lib/gitlab/ci/config/entry/default.rb
index 88db17a75da..ab493ff7d78 100644
--- a/lib/gitlab/ci/config/entry/default.rb
+++ b/lib/gitlab/ci/config/entry/default.rb
@@ -61,8 +61,6 @@ module Gitlab
description: 'Default artifacts.',
inherit: false
- helpers :before_script, :image, :services, :after_script, :cache
-
private
def overwrite_entry(deps, key, current_entry)
diff --git a/lib/gitlab/ci/config/entry/include.rb b/lib/gitlab/ci/config/entry/include.rb
index f2f3dd84eda..cd09d83b728 100644
--- a/lib/gitlab/ci/config/entry/include.rb
+++ b/lib/gitlab/ci/config/entry/include.rb
@@ -10,7 +10,7 @@ module Gitlab
class Include < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
- ALLOWED_KEYS = %i[local file remote template].freeze
+ ALLOWED_KEYS = %i[local file remote template artifact job].freeze
validations do
validates :config, hash_or_string: true
diff --git a/lib/gitlab/ci/config/entry/inherit.rb b/lib/gitlab/ci/config/entry/inherit.rb
new file mode 100644
index 00000000000..b806d77b155
--- /dev/null
+++ b/lib/gitlab/ci/config/entry/inherit.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ class Config
+ module Entry
+ ##
+ # This class represents a inherit entry
+ #
+ class Inherit < ::Gitlab::Config::Entry::Node
+ include ::Gitlab::Config::Entry::Configurable
+
+ ALLOWED_KEYS = %i[default variables].freeze
+
+ validations do
+ validates :config, allowed_keys: ALLOWED_KEYS
+ end
+
+ entry :default, ::Gitlab::Ci::Config::Entry::Inherit::Default,
+ description: 'Indicates whether to inherit `default:`.',
+ default: true
+
+ entry :variables, ::Gitlab::Ci::Config::Entry::Inherit::Variables,
+ description: 'Indicates whether to inherit `variables:`.',
+ default: true
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/config/entry/inherit/default.rb b/lib/gitlab/ci/config/entry/inherit/default.rb
new file mode 100644
index 00000000000..74386baf62f
--- /dev/null
+++ b/lib/gitlab/ci/config/entry/inherit/default.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ class Config
+ module Entry
+ ##
+ # This class represents a default inherit entry
+ #
+ class Inherit
+ class Default < ::Gitlab::Config::Entry::Simplifiable
+ strategy :BooleanStrategy, if: -> (config) { [true, false].include?(config) }
+ strategy :ArrayStrategy, if: -> (config) { config.is_a?(Array) }
+
+ class BooleanStrategy < ::Gitlab::Config::Entry::Boolean
+ def inherit?(_key)
+ value
+ end
+ end
+
+ class ArrayStrategy < ::Gitlab::Config::Entry::Node
+ include ::Gitlab::Config::Entry::Validatable
+
+ ALLOWED_VALUES = ::Gitlab::Ci::Config::Entry::Default::ALLOWED_KEYS.map(&:to_s).freeze
+
+ validations do
+ validates :config, type: Array
+ validates :config, array_of_strings: true
+ validates :config, allowed_array_values: { in: ALLOWED_VALUES }
+ end
+
+ def inherit?(key)
+ value.include?(key.to_s)
+ end
+ end
+
+ class UnknownStrategy < ::Gitlab::Config::Entry::Node
+ def errors
+ ["#{location} should be a bool or array of strings"]
+ end
+
+ def inherit?(key)
+ false
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/config/entry/inherit/variables.rb b/lib/gitlab/ci/config/entry/inherit/variables.rb
new file mode 100644
index 00000000000..aa68833bdb8
--- /dev/null
+++ b/lib/gitlab/ci/config/entry/inherit/variables.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ class Config
+ module Entry
+ ##
+ # This class represents a variables inherit entry
+ #
+ class Inherit
+ class Variables < ::Gitlab::Config::Entry::Simplifiable
+ strategy :BooleanStrategy, if: -> (config) { [true, false].include?(config) }
+ strategy :ArrayStrategy, if: -> (config) { config.is_a?(Array) }
+
+ class BooleanStrategy < ::Gitlab::Config::Entry::Boolean
+ def inherit?(_key)
+ value
+ end
+ end
+
+ class ArrayStrategy < ::Gitlab::Config::Entry::Node
+ include ::Gitlab::Config::Entry::Validatable
+
+ validations do
+ validates :config, type: Array
+ validates :config, array_of_strings: true
+ end
+
+ def inherit?(key)
+ value.include?(key.to_s)
+ end
+ end
+
+ class UnknownStrategy < ::Gitlab::Config::Entry::Node
+ def errors
+ ["#{location} should be a bool or array of strings"]
+ end
+
+ def inherit?(key)
+ false
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/config/entry/job.rb b/lib/gitlab/ci/config/entry/job.rb
index 666c6e23eb4..1ea59491378 100644
--- a/lib/gitlab/ci/config/entry/job.rb
+++ b/lib/gitlab/ci/config/entry/job.rb
@@ -8,33 +8,21 @@ module Gitlab
# Entry that represents a concrete CI/CD job.
#
class Job < ::Gitlab::Config::Entry::Node
- include ::Gitlab::Config::Entry::Configurable
- include ::Gitlab::Config::Entry::Attributable
- include ::Gitlab::Config::Entry::Inheritable
+ include ::Gitlab::Ci::Config::Entry::Processable
ALLOWED_WHEN = %w[on_success on_failure always manual delayed].freeze
- ALLOWED_KEYS = %i[tags script only except rules type image services
- allow_failure type stage when start_in artifacts cache
- dependencies before_script needs after_script variables
- environment coverage retry parallel extends interruptible timeout
+ ALLOWED_KEYS = %i[tags script type image services
+ allow_failure type when start_in artifacts cache
+ dependencies before_script needs after_script
+ environment coverage retry parallel interruptible timeout
resource_group release].freeze
REQUIRED_BY_NEEDS = %i[stage].freeze
validations do
- validates :config, type: Hash
- validates :config, allowed_keys: ALLOWED_KEYS
+ validates :config, allowed_keys: ALLOWED_KEYS + PROCESSABLE_ALLOWED_KEYS
validates :config, required_keys: REQUIRED_BY_NEEDS, if: :has_needs?
- validates :config, presence: true
validates :script, presence: true
- validates :name, presence: true
- validates :name, type: Symbol
- validates :config,
- disallowed_keys: {
- in: %i[only except when start_in],
- message: 'key may not be used with `rules`'
- },
- if: :has_rules?
validates :config,
disallowed_keys: {
in: %i[release],
@@ -53,8 +41,6 @@ module Gitlab
}
validates :dependencies, array_of_strings: true
- validates :extends, array_of_strings_or_string: true
- validates :rules, array_of_hashes: true
validates :resource_group, type: String
end
@@ -81,10 +67,6 @@ module Gitlab
description: 'Commands that will be executed in this job.',
inherit: false
- entry :stage, Entry::Stage,
- description: 'Pipeline stage this job will be executed into.',
- inherit: false
-
entry :type, Entry::Stage,
description: 'Deprecated: stage this job will be executed into.',
inherit: false
@@ -125,31 +107,11 @@ module Gitlab
description: 'Artifacts configuration for this job.',
inherit: true
- entry :only, Entry::Policy,
- description: 'Refs policy this job will be executed for.',
- default: ::Gitlab::Ci::Config::Entry::Policy::DEFAULT_ONLY,
- inherit: false
-
- entry :except, Entry::Policy,
- description: 'Refs policy this job will be executed for.',
- inherit: false
-
- entry :rules, Entry::Rules,
- description: 'List of evaluable Rules to determine job inclusion.',
- inherit: false,
- metadata: {
- allowed_when: %w[on_success on_failure always never manual delayed].freeze
- }
-
entry :needs, Entry::Needs,
description: 'Needs configuration for this job.',
metadata: { allowed_needs: %i[job cross_dependency] },
inherit: false
- entry :variables, Entry::Variables,
- description: 'Environment variables available for this job.',
- inherit: false
-
entry :environment, Entry::Environment,
description: 'Environment configuration for this job.',
inherit: false
@@ -162,13 +124,8 @@ module Gitlab
description: 'This job will produce a release.',
inherit: false
- helpers :before_script, :script, :stage, :type, :after_script,
- :cache, :image, :services, :only, :except, :variables,
- :artifacts, :environment, :coverage, :retry, :rules,
- :parallel, :needs, :interruptible, :release, :tags
-
attributes :script, :tags, :allow_failure, :when, :dependencies,
- :needs, :retry, :parallel, :extends, :start_in, :rules,
+ :needs, :retry, :parallel, :start_in,
:interruptible, :timeout, :resource_group, :release
def self.matching?(name, config)
@@ -187,31 +144,9 @@ module Gitlab
end
@entries.delete(:type)
-
- has_workflow_rules = deps&.workflow&.has_rules?
-
- # If workflow:rules: or rules: are used
- # they are considered not compatible
- # with `only/except` defaults
- #
- # Context: https://gitlab.com/gitlab-org/gitlab/merge_requests/21742
- if has_rules? || has_workflow_rules
- # Remove only/except defaults
- # defaults are not considered as defined
- @entries.delete(:only) unless only_defined?
- @entries.delete(:except) unless except_defined?
- end
end
end
- def name
- @metadata[:name]
- end
-
- def value
- @config.merge(to_hash.compact)
- end
-
def manual_action?
self.when == 'manual'
end
@@ -220,38 +155,26 @@ module Gitlab
self.when == 'delayed'
end
- def has_rules?
- @config.try(:key?, :rules)
- end
-
def ignored?
allow_failure.nil? ? manual_action? : allow_failure
end
- private
-
- def overwrite_entry(deps, key, current_entry)
- deps.default[key] unless current_entry.specified?
- end
-
- def to_hash
- { name: name,
+ def value
+ super.merge(
before_script: before_script_value,
script: script_value,
image: image_value,
services: services_value,
- stage: stage_value,
cache: cache_value,
tags: tags_value,
- only: only_value,
- except: except_value,
- rules: has_rules? ? rules_value : nil,
- variables: variables_defined? ? variables_value : {},
+ when: self.when,
+ start_in: self.start_in,
+ dependencies: dependencies,
environment: environment_defined? ? environment_value : nil,
environment_name: environment_defined? ? environment_value[:name] : nil,
coverage: coverage_defined? ? coverage_value : nil,
retry: retry_defined? ? retry_value : nil,
- parallel: parallel_defined? ? parallel_value.to_i : nil,
+ parallel: has_parallel? ? parallel.to_i : nil,
interruptible: interruptible_defined? ? interruptible_value : nil,
timeout: has_timeout? ? ChronicDuration.parse(timeout.to_s) : nil,
artifacts: artifacts_value,
@@ -260,7 +183,8 @@ module Gitlab
ignore: ignored?,
needs: needs_defined? ? needs_value : nil,
resource_group: resource_group,
- scheduling_type: needs_defined? ? :dag : :stage }
+ scheduling_type: needs_defined? ? :dag : :stage
+ ).compact
end
end
end
diff --git a/lib/gitlab/ci/config/entry/processable.rb b/lib/gitlab/ci/config/entry/processable.rb
new file mode 100644
index 00000000000..81211acbec7
--- /dev/null
+++ b/lib/gitlab/ci/config/entry/processable.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ class Config
+ module Entry
+ ##
+ # Entry that represents a CI/CD Processable (a job)
+ #
+ module Processable
+ extend ActiveSupport::Concern
+
+ include ::Gitlab::Config::Entry::Configurable
+ include ::Gitlab::Config::Entry::Attributable
+ include ::Gitlab::Config::Entry::Inheritable
+
+ PROCESSABLE_ALLOWED_KEYS = %i[extends stage only except rules variables inherit].freeze
+
+ included do
+ validations do
+ validates :config, presence: true
+ validates :name, presence: true
+ validates :name, type: Symbol
+
+ validates :config, disallowed_keys: {
+ in: %i[only except when start_in],
+ message: 'key may not be used with `rules`'
+ },
+ if: :has_rules?
+
+ with_options allow_nil: true do
+ validates :extends, array_of_strings_or_string: true
+ validates :rules, array_of_hashes: true
+ end
+ end
+
+ entry :stage, Entry::Stage,
+ description: 'Pipeline stage this job will be executed into.',
+ inherit: false
+
+ entry :only, ::Gitlab::Ci::Config::Entry::Policy,
+ description: 'Refs policy this job will be executed for.',
+ default: ::Gitlab::Ci::Config::Entry::Policy::DEFAULT_ONLY,
+ inherit: false
+
+ entry :except, ::Gitlab::Ci::Config::Entry::Policy,
+ description: 'Refs policy this job will be executed for.',
+ inherit: false
+
+ entry :rules, ::Gitlab::Ci::Config::Entry::Rules,
+ description: 'List of evaluable Rules to determine job inclusion.',
+ inherit: false,
+ metadata: {
+ allowed_when: %w[on_success on_failure always never manual delayed].freeze
+ }
+
+ entry :variables, ::Gitlab::Ci::Config::Entry::Variables,
+ description: 'Environment variables available for this job.',
+ inherit: false
+
+ entry :inherit, ::Gitlab::Ci::Config::Entry::Inherit,
+ description: 'Indicates whether to inherit defaults or not.',
+ inherit: false,
+ default: {}
+
+ attributes :extends, :rules
+ end
+
+ def compose!(deps = nil)
+ super do
+ has_workflow_rules = deps&.workflow_entry&.has_rules?
+
+ # If workflow:rules: or rules: are used
+ # they are considered not compatible
+ # with `only/except` defaults
+ #
+ # Context: https://gitlab.com/gitlab-org/gitlab/merge_requests/21742
+ if has_rules? || has_workflow_rules
+ # Remove only/except defaults
+ # defaults are not considered as defined
+ @entries.delete(:only) unless only_defined? # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ @entries.delete(:except) unless except_defined? # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+
+ # inherit root variables
+ @root_variables_value = deps&.variables_value # rubocop:disable Gitlab/ModuleWithInstanceVariables
+
+ yield if block_given?
+ end
+ end
+
+ def name
+ metadata[:name]
+ end
+
+ def overwrite_entry(deps, key, current_entry)
+ return unless inherit_entry&.default_entry&.inherit?(key)
+ return unless deps.default_entry
+
+ deps.default_entry[key] unless current_entry.specified?
+ end
+
+ def value
+ { name: name,
+ stage: stage_value,
+ extends: extends,
+ rules: rules_value,
+ variables: root_and_job_variables_value,
+ only: only_value,
+ except: except_value }.compact
+ end
+
+ def root_and_job_variables_value
+ root_variables = @root_variables_value.to_h # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ root_variables = root_variables.select do |key, _|
+ inherit_entry&.variables_entry&.inherit?(key)
+ end
+
+ root_variables.merge(variables_value.to_h)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/config/entry/release.rb b/lib/gitlab/ci/config/entry/release.rb
index 3eceaa0ccd9..b4e4c149730 100644
--- a/lib/gitlab/ci/config/entry/release.rb
+++ b/lib/gitlab/ci/config/entry/release.rb
@@ -33,8 +33,6 @@ module Gitlab
validates :description, type: String, presence: true
end
- helpers :assets
-
def value
@config[:assets] = assets_value if @config.key?(:assets)
@config
diff --git a/lib/gitlab/ci/config/entry/release/assets.rb b/lib/gitlab/ci/config/entry/release/assets.rb
index 82ed39f51e0..1f7057d1bf6 100644
--- a/lib/gitlab/ci/config/entry/release/assets.rb
+++ b/lib/gitlab/ci/config/entry/release/assets.rb
@@ -23,8 +23,6 @@ module Gitlab
validates :links, array_of_hashes: true, presence: true
end
- helpers :links
-
def value
@config[:links] = links_value if @config.key?(:links)
@config
diff --git a/lib/gitlab/ci/config/entry/reports.rb b/lib/gitlab/ci/config/entry/reports.rb
index 571e056e096..40d37f3601a 100644
--- a/lib/gitlab/ci/config/entry/reports.rb
+++ b/lib/gitlab/ci/config/entry/reports.rb
@@ -11,7 +11,10 @@ module Gitlab
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
- ALLOWED_KEYS = %i[junit codequality sast dependency_scanning container_scanning dast performance license_management license_scanning metrics lsif].freeze
+ ALLOWED_KEYS =
+ %i[junit codequality sast dependency_scanning container_scanning
+ dast performance license_management license_scanning metrics lsif
+ dotenv cobertura].freeze
attributes ALLOWED_KEYS
@@ -31,6 +34,8 @@ module Gitlab
validates :license_scanning, array_of_strings_or_string: true
validates :metrics, array_of_strings_or_string: true
validates :lsif, array_of_strings_or_string: true
+ validates :dotenv, array_of_strings_or_string: true
+ validates :cobertura, array_of_strings_or_string: true
end
end
diff --git a/lib/gitlab/ci/config/entry/root.rb b/lib/gitlab/ci/config/entry/root.rb
index 12dd942fc1c..19d6a470941 100644
--- a/lib/gitlab/ci/config/entry/root.rb
+++ b/lib/gitlab/ci/config/entry/root.rb
@@ -65,15 +65,16 @@ module Gitlab
reserved: true
entry :workflow, Entry::Workflow,
- description: 'List of evaluable rules to determine Pipeline status'
+ description: 'List of evaluable rules to determine Pipeline status',
+ default: {}
- helpers :default, :jobs, :stages, :types, :variables, :workflow
+ dynamic_helpers :jobs
delegate :before_script_value,
:image_value,
:services_value,
:after_script_value,
- :cache_value, to: :default
+ :cache_value, to: :default_entry
attr_reader :jobs_config
@@ -102,14 +103,6 @@ module Gitlab
end
end
- def default
- self[:default]
- end
-
- def workflow
- self[:workflow] if workflow_defined?
- end
-
private
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/lib/gitlab/ci/config/entry/service.rb b/lib/gitlab/ci/config/entry/service.rb
index 8d16371e857..247bf930d3b 100644
--- a/lib/gitlab/ci/config/entry/service.rb
+++ b/lib/gitlab/ci/config/entry/service.rb
@@ -7,8 +7,13 @@ module Gitlab
##
# Entry that represents a configuration of Docker service.
#
- class Service < Image
+ # TODO: remove duplication with Image superclass by defining a common
+ # Imageable concern.
+ # https://gitlab.com/gitlab-org/gitlab/issues/208774
+ class Service < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
+ include ::Gitlab::Config::Entry::Attributable
+ include ::Gitlab::Config::Entry::Configurable
ALLOWED_KEYS = %i[name entrypoint command alias ports].freeze
@@ -16,9 +21,9 @@ module Gitlab
validates :config, hash_or_string: true
validates :config, allowed_keys: ALLOWED_KEYS
validates :config, disallowed_keys: %i[ports], unless: :with_image_ports?
-
validates :name, type: String, presence: true
validates :entrypoint, array_of_strings: true, allow_nil: true
+
validates :command, array_of_strings: true, allow_nil: true
validates :alias, type: String, allow_nil: true
validates :alias, type: String, presence: true, unless: ->(record) { record.ports.blank? }
@@ -27,6 +32,8 @@ module Gitlab
entry :ports, Entry::Ports,
description: 'Ports used to expose the service'
+ attributes :ports
+
def alias
value[:alias]
end
@@ -34,6 +41,29 @@ module Gitlab
def command
value[:command]
end
+
+ def name
+ value[:name]
+ end
+
+ def entrypoint
+ value[:entrypoint]
+ end
+
+ def value
+ return { name: @config } if string?
+ return @config if hash?
+
+ {}
+ end
+
+ def with_image_ports?
+ opt(:with_image_ports)
+ end
+
+ def skip_config_hash_validation?
+ true
+ end
end
end
end
diff --git a/lib/gitlab/ci/config/entry/workflow.rb b/lib/gitlab/ci/config/entry/workflow.rb
index 1d9007c9b9b..5bc992a38a0 100644
--- a/lib/gitlab/ci/config/entry/workflow.rb
+++ b/lib/gitlab/ci/config/entry/workflow.rb
@@ -12,7 +12,6 @@ module Gitlab
validations do
validates :config, type: Hash
validates :config, allowed_keys: ALLOWED_KEYS
- validates :config, presence: true
end
entry :rules, Entry::Rules,
diff --git a/lib/gitlab/ci/config/external/context.rb b/lib/gitlab/ci/config/external/context.rb
index bb4439cd069..814dcc66362 100644
--- a/lib/gitlab/ci/config/external/context.rb
+++ b/lib/gitlab/ci/config/external/context.rb
@@ -7,13 +7,14 @@ module Gitlab
class Context
TimeoutError = Class.new(StandardError)
- attr_reader :project, :sha, :user
+ attr_reader :project, :sha, :user, :parent_pipeline
attr_reader :expandset, :execution_deadline
- def initialize(project: nil, sha: nil, user: nil)
+ def initialize(project: nil, sha: nil, user: nil, parent_pipeline: nil)
@project = project
@sha = sha
@user = user
+ @parent_pipeline = parent_pipeline
@expandset = Set.new
@execution_deadline = 0
diff --git a/lib/gitlab/ci/config/external/file/artifact.rb b/lib/gitlab/ci/config/external/file/artifact.rb
new file mode 100644
index 00000000000..a8f78b62d8d
--- /dev/null
+++ b/lib/gitlab/ci/config/external/file/artifact.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ class Config
+ module External
+ module File
+ class Artifact < Base
+ extend ::Gitlab::Utils::Override
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :job_name
+
+ def initialize(params, context)
+ @location = params[:artifact]
+ @job_name = params[:job]
+
+ super
+ end
+
+ def content
+ strong_memoize(:content) do
+ next unless artifact_job
+
+ Gitlab::Ci::ArtifactFileReader.new(artifact_job).read(location)
+ rescue Gitlab::Ci::ArtifactFileReader::Error => error
+ errors.push(error.message)
+ end
+ end
+
+ def matching?
+ super &&
+ Feature.enabled?(:ci_dynamic_child_pipeline, project, default_enabled: true)
+ end
+
+ private
+
+ def project
+ context&.parent_pipeline&.project
+ end
+
+ def validate_content!
+ return unless ensure_preconditions_satisfied!
+
+ errors.push("File `#{location}` is empty!") unless content.present?
+ end
+
+ def ensure_preconditions_satisfied!
+ unless creating_child_pipeline?
+ errors.push('Including configs from artifacts is only allowed when triggering child pipelines')
+ return false
+ end
+
+ unless job_name.present?
+ errors.push("Job must be provided when including configs from artifacts")
+ return false
+ end
+
+ unless artifact_job.present?
+ errors.push("Job `#{job_name}` not found in parent pipeline or does not have artifacts!")
+ return false
+ end
+
+ true
+ end
+
+ def artifact_job
+ strong_memoize(:artifact_job) do
+ next unless creating_child_pipeline?
+
+ context.parent_pipeline.find_job_with_archive_artifacts(job_name)
+ end
+ end
+
+ def creating_child_pipeline?
+ context.parent_pipeline.present?
+ end
+
+ override :expand_context_attrs
+ def expand_context_attrs
+ {
+ project: context.project,
+ sha: context.sha,
+ user: context.user,
+ parent_pipeline: context.parent_pipeline
+ }
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/config/external/file/local.rb b/lib/gitlab/ci/config/external/file/local.rb
index 8cb1575a3e1..e74f5b33de7 100644
--- a/lib/gitlab/ci/config/external/file/local.rb
+++ b/lib/gitlab/ci/config/external/file/local.rb
@@ -40,7 +40,8 @@ module Gitlab
{
project: context.project,
sha: context.sha,
- user: context.user
+ user: context.user,
+ parent_pipeline: context.parent_pipeline
}
end
end
diff --git a/lib/gitlab/ci/config/external/file/project.rb b/lib/gitlab/ci/config/external/file/project.rb
index c7b49b495fa..be479741784 100644
--- a/lib/gitlab/ci/config/external/file/project.rb
+++ b/lib/gitlab/ci/config/external/file/project.rb
@@ -71,7 +71,8 @@ module Gitlab
{
project: project,
sha: sha,
- user: context.user
+ user: context.user,
+ parent_pipeline: context.parent_pipeline
}
end
end
diff --git a/lib/gitlab/ci/config/external/mapper.rb b/lib/gitlab/ci/config/external/mapper.rb
index 0143d7784fa..97ae6c4ceba 100644
--- a/lib/gitlab/ci/config/external/mapper.rb
+++ b/lib/gitlab/ci/config/external/mapper.rb
@@ -13,7 +13,8 @@ module Gitlab
External::File::Remote,
External::File::Template,
External::File::Local,
- External::File::Project
+ External::File::Project,
+ External::File::Artifact
].freeze
Error = Class.new(StandardError)
diff --git a/lib/gitlab/ci/parsers.rb b/lib/gitlab/ci/parsers.rb
index c76cd5ff285..a44105d53c2 100644
--- a/lib/gitlab/ci/parsers.rb
+++ b/lib/gitlab/ci/parsers.rb
@@ -9,7 +9,8 @@ module Gitlab
def self.parsers
{
- junit: ::Gitlab::Ci::Parsers::Test::Junit
+ junit: ::Gitlab::Ci::Parsers::Test::Junit,
+ cobertura: ::Gitlab::Ci::Parsers::Coverage::Cobertura
}
end
diff --git a/lib/gitlab/ci/parsers/coverage/cobertura.rb b/lib/gitlab/ci/parsers/coverage/cobertura.rb
new file mode 100644
index 00000000000..006d5097148
--- /dev/null
+++ b/lib/gitlab/ci/parsers/coverage/cobertura.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Parsers
+ module Coverage
+ class Cobertura
+ CoberturaParserError = Class.new(Gitlab::Ci::Parsers::ParserError)
+
+ def parse!(xml_data, coverage_report)
+ root = Hash.from_xml(xml_data)
+
+ parse_all(root, coverage_report)
+ rescue Nokogiri::XML::SyntaxError
+ raise CoberturaParserError, "XML parsing failed"
+ rescue
+ raise CoberturaParserError, "Cobertura parsing failed"
+ end
+
+ private
+
+ def parse_all(root, coverage_report)
+ return unless root.present?
+
+ root.each do |key, value|
+ parse_node(key, value, coverage_report)
+ end
+ end
+
+ def parse_node(key, value, coverage_report)
+ if key == 'class'
+ Array.wrap(value).each do |item|
+ parse_class(item, coverage_report)
+ end
+ elsif value.is_a?(Hash)
+ parse_all(value, coverage_report)
+ elsif value.is_a?(Array)
+ value.each do |item|
+ parse_all(item, coverage_report)
+ end
+ end
+ end
+
+ def parse_class(file, coverage_report)
+ return unless file["filename"].present? && file["lines"].present?
+
+ parsed_lines = parse_lines(file["lines"])
+
+ coverage_report.add_file(file["filename"], Hash[parsed_lines])
+ end
+
+ def parse_lines(lines)
+ line_array = Array.wrap(lines["line"])
+
+ line_array.map do |line|
+ # Using `Integer()` here to raise exception on invalid values
+ [Integer(line["number"]), Integer(line["hits"])]
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/parsers/test/junit.rb b/lib/gitlab/ci/parsers/test/junit.rb
index 133eb16a83e..0ce901fa5aa 100644
--- a/lib/gitlab/ci/parsers/test/junit.rb
+++ b/lib/gitlab/ci/parsers/test/junit.rb
@@ -6,6 +6,7 @@ module Gitlab
module Test
class Junit
JunitParserError = Class.new(Gitlab::Ci::Parsers::ParserError)
+ ATTACHMENT_TAG_REGEX = /\[\[ATTACHMENT\|(?<path>.+?)\]\]/.freeze
def parse!(xml_data, test_suite)
root = Hash.from_xml(xml_data)
@@ -49,6 +50,7 @@ module Gitlab
if data['failure']
status = ::Gitlab::Ci::Reports::TestCase::STATUS_FAILED
system_output = data['failure']
+ attachment = attachment_path(data['system_out'])
elsif data['error']
status = ::Gitlab::Ci::Reports::TestCase::STATUS_ERROR
system_output = data['error']
@@ -63,9 +65,17 @@ module Gitlab
file: data['file'],
execution_time: data['time'],
status: status,
- system_output: system_output
+ system_output: system_output,
+ attachment: attachment
)
end
+
+ def attachment_path(data)
+ return unless data
+
+ matches = data.match(ATTACHMENT_TAG_REGEX)
+ matches[:path] if matches
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/base.rb b/lib/gitlab/ci/pipeline/chain/base.rb
index aabdf7ce47d..9b494f3a7ec 100644
--- a/lib/gitlab/ci/pipeline/chain/base.rb
+++ b/lib/gitlab/ci/pipeline/chain/base.rb
@@ -7,7 +7,7 @@ module Gitlab
class Base
attr_reader :pipeline, :command, :config
- delegate :project, :current_user, to: :command
+ delegate :project, :current_user, :parent_pipeline, to: :command
def initialize(pipeline, command)
@pipeline = pipeline
diff --git a/lib/gitlab/ci/pipeline/chain/build/associations.rb b/lib/gitlab/ci/pipeline/chain/build/associations.rb
new file mode 100644
index 00000000000..eb49c56bcd7
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/build/associations.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ class Build
+ class Associations < Chain::Base
+ def perform!
+ return unless @command.bridge
+
+ @pipeline.build_source_pipeline(
+ source_pipeline: @command.bridge.pipeline,
+ source_project: @command.bridge.project,
+ source_bridge: @command.bridge,
+ project: @command.project
+ )
+ end
+
+ def break?
+ false
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/command.rb b/lib/gitlab/ci/pipeline/chain/command.rb
index 6a16e6df23d..fa46114615c 100644
--- a/lib/gitlab/ci/pipeline/chain/command.rb
+++ b/lib/gitlab/ci/pipeline/chain/command.rb
@@ -72,6 +72,10 @@ module Gitlab
project.repository.ambiguous_ref?(origin_ref)
end
end
+
+ def parent_pipeline
+ bridge&.parent_pipeline
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/config/process.rb b/lib/gitlab/ci/pipeline/chain/config/process.rb
index 09d1b0edc93..1e47be21b93 100644
--- a/lib/gitlab/ci/pipeline/chain/config/process.rb
+++ b/lib/gitlab/ci/pipeline/chain/config/process.rb
@@ -15,7 +15,8 @@ module Gitlab
@command.config_content, {
project: project,
sha: @pipeline.sha,
- user: current_user
+ user: current_user,
+ parent_pipeline: parent_pipeline
}
)
rescue Gitlab::Ci::YamlProcessor::ValidationError => ex
diff --git a/lib/gitlab/ci/pipeline/seed/build.rb b/lib/gitlab/ci/pipeline/seed/build.rb
index 98b4b4593e0..114a46ca9f6 100644
--- a/lib/gitlab/ci/pipeline/seed/build.rb
+++ b/lib/gitlab/ci/pipeline/seed/build.rb
@@ -7,6 +7,8 @@ module Gitlab
class Build < Seed::Base
include Gitlab::Utils::StrongMemoize
+ EnvironmentCreationFailure = Class.new(StandardError)
+
delegate :dig, to: :@seed_attributes
# When the `ci_dag_limit_needs` is enabled it uses the lower limit
@@ -77,14 +79,39 @@ module Gitlab
if bridge?
::Ci::Bridge.new(attributes)
else
- ::Ci::Build.new(attributes).tap do |job|
- job.deployment = Seed::Deployment.new(job).to_resource
- job.resource_group = Seed::Build::ResourceGroup.new(job, @resource_group_key).to_resource
+ ::Ci::Build.new(attributes).tap do |build|
+ build.assign_attributes(self.class.environment_attributes_for(build))
+ build.resource_group = Seed::Build::ResourceGroup.new(build, @resource_group_key).to_resource
end
end
end
end
+ def self.environment_attributes_for(build)
+ return {} unless build.has_environment?
+
+ environment = Seed::Environment.new(build).to_resource
+
+ # If there is a validation error on environment creation, such as
+ # the name contains invalid character, the build falls back to a
+ # non-environment job.
+ unless environment.persisted?
+ Gitlab::ErrorTracking.track_exception(
+ EnvironmentCreationFailure.new,
+ project_id: build.project_id,
+ reason: environment.errors.full_messages.to_sentence)
+
+ return { environment: nil }
+ end
+
+ {
+ deployment: Seed::Deployment.new(build, environment).to_resource,
+ metadata_attributes: {
+ expanded_environment_name: environment.name
+ }
+ }
+ end
+
private
def all_of_only?
diff --git a/lib/gitlab/ci/pipeline/seed/deployment.rb b/lib/gitlab/ci/pipeline/seed/deployment.rb
index cc63fb4c609..69dfd6be8d5 100644
--- a/lib/gitlab/ci/pipeline/seed/deployment.rb
+++ b/lib/gitlab/ci/pipeline/seed/deployment.rb
@@ -7,9 +7,9 @@ module Gitlab
class Deployment < Seed::Base
attr_reader :job, :environment
- def initialize(job)
+ def initialize(job, environment)
@job = job
- @environment = Seed::Environment.new(@job)
+ @environment = environment
end
def to_resource
@@ -17,19 +17,18 @@ module Gitlab
return unless job.starts_environment?
deployment = ::Deployment.new(attributes)
- deployment.environment = environment.to_resource
# If there is a validation error on environment creation, such as
# the name contains invalid character, the job will fall back to a
# non-environment job.
return unless deployment.valid? && deployment.environment.persisted?
- if cluster_id = deployment.environment.deployment_platform&.cluster_id
+ if cluster = deployment.environment.deployment_platform&.cluster
# double write cluster_id until 12.9: https://gitlab.com/gitlab-org/gitlab/issues/202628
- deployment.cluster_id = cluster_id
+ deployment.cluster_id = cluster.id
deployment.deployment_cluster = ::DeploymentCluster.new(
- cluster_id: cluster_id,
- kubernetes_namespace: deployment.environment.deployment_namespace
+ cluster_id: cluster.id,
+ kubernetes_namespace: cluster.kubernetes_namespace_for(deployment.environment, deployable: job)
)
end
@@ -45,6 +44,7 @@ module Gitlab
def attributes
{
project: job.project,
+ environment: environment,
user: job.user,
ref: job.ref,
tag: job.tag,
diff --git a/lib/gitlab/ci/pipeline/seed/environment.rb b/lib/gitlab/ci/pipeline/seed/environment.rb
index 2d3a1e702f9..42e8c365824 100644
--- a/lib/gitlab/ci/pipeline/seed/environment.rb
+++ b/lib/gitlab/ci/pipeline/seed/environment.rb
@@ -12,25 +12,15 @@ module Gitlab
end
def to_resource
- find_environment || ::Environment.create(attributes)
+ job.project.environments
+ .safe_find_or_create_by(name: expanded_environment_name)
end
private
- def find_environment
- job.project.environments.find_by_name(expanded_environment_name)
- end
-
def expanded_environment_name
job.expanded_environment_name
end
-
- def attributes
- {
- project: job.project,
- name: expanded_environment_name
- }
- end
end
end
end
diff --git a/lib/gitlab/ci/reports/coverage_reports.rb b/lib/gitlab/ci/reports/coverage_reports.rb
new file mode 100644
index 00000000000..31afb636d2f
--- /dev/null
+++ b/lib/gitlab/ci/reports/coverage_reports.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Reports
+ class CoverageReports
+ attr_reader :files
+
+ def initialize
+ @files = {}
+ end
+
+ def pick(keys)
+ coverage_files = files.select do |key|
+ keys.include?(key)
+ end
+
+ { files: coverage_files }
+ end
+
+ def add_file(name, line_coverage)
+ if files[name].present?
+ line_coverage.each { |line, hits| combine_lines(name, line, hits) }
+
+ else
+ files[name] = line_coverage
+ end
+ end
+
+ private
+
+ def combine_lines(name, line, hits)
+ if files[name][line].present?
+ files[name][line] += hits
+
+ else
+ files[name][line] = hits
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/reports/test_case.rb b/lib/gitlab/ci/reports/test_case.rb
index fdeaad698b9..55856f64533 100644
--- a/lib/gitlab/ci/reports/test_case.rb
+++ b/lib/gitlab/ci/reports/test_case.rb
@@ -10,9 +10,9 @@ module Gitlab
STATUS_ERROR = 'error'
STATUS_TYPES = [STATUS_SUCCESS, STATUS_FAILED, STATUS_SKIPPED, STATUS_ERROR].freeze
- attr_reader :name, :classname, :execution_time, :status, :file, :system_output, :stack_trace, :key
+ attr_reader :name, :classname, :execution_time, :status, :file, :system_output, :stack_trace, :key, :attachment
- def initialize(name:, classname:, execution_time:, status:, file: nil, system_output: nil, stack_trace: nil)
+ def initialize(name:, classname:, execution_time:, status:, file: nil, system_output: nil, stack_trace: nil, attachment: nil)
@name = name
@classname = classname
@file = file
@@ -21,6 +21,11 @@ module Gitlab
@system_output = system_output
@stack_trace = stack_trace
@key = sanitize_key_name("#{classname}_#{name}")
+ @attachment = attachment
+ end
+
+ def has_attachment?
+ attachment.present?
end
private
diff --git a/lib/gitlab/ci/templates/Deploy-ECS.gitlab-ci.yml b/lib/gitlab/ci/templates/Deploy-ECS.gitlab-ci.yml
new file mode 100644
index 00000000000..ecca1731579
--- /dev/null
+++ b/lib/gitlab/ci/templates/Deploy-ECS.gitlab-ci.yml
@@ -0,0 +1,36 @@
+stages:
+ - build
+ - test
+ - review
+ - deploy
+ - production
+
+include:
+ - template: Jobs/Build.gitlab-ci.yml
+
+.deploy_to_ecs:
+ image: registry.gitlab.com/gitlab-org/cloud-deploy:latest
+ script:
+ - ecs update-task-definition
+
+review:
+ extends: .deploy_to_ecs
+ stage: review
+ environment:
+ name: review/$CI_COMMIT_REF_NAME
+ only:
+ refs:
+ - branches
+ - tags
+ except:
+ refs:
+ - master
+
+production:
+ extends: .deploy_to_ecs
+ stage: production
+ environment:
+ name: production
+ only:
+ refs:
+ - master
diff --git a/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
index c3ca44eea9e..20063cf6a69 100644
--- a/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
@@ -1,12 +1,10 @@
performance:
stage: performance
- # pin to a version matching the dind service, just to be safe
image: docker:19.03.5
allow_failure: true
variables:
DOCKER_TLS_CERTDIR: ""
services:
- # pin to a known working version until https://gitlab.com/gitlab-org/gitlab-runner/issues/6697 is fixed
- docker:19.03.5-dind
script:
- |
diff --git a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml
index 488945ffa3e..bb0de9df8bf 100644
--- a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml
@@ -4,7 +4,6 @@ build:
variables:
DOCKER_TLS_CERTDIR: ""
services:
- # pin to a known working version until https://gitlab.com/gitlab-org/gitlab-runner/issues/6697 is fixed
- docker:19.03.5-dind
script:
- |
diff --git a/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml
index dd5144e28a7..a6338ff6925 100644
--- a/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml
@@ -1,15 +1,13 @@
code_quality:
stage: test
- # pin to a version matching the dind service, just to be safe
image: docker:19.03.5
allow_failure: true
services:
- # pin to a known working version until https://gitlab.com/gitlab-org/gitlab-runner/issues/6697 is fixed
- docker:19.03.5-dind
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
- CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/security-products/codequality:0.85.6"
+ CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/ci-cd/codequality:0.85.9"
script:
- |
if ! docker info &>/dev/null; then
diff --git a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
index 78ee9b28605..3cf4910fe86 100644
--- a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
@@ -1,5 +1,5 @@
.dast-auto-deploy:
- image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.9.1"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.10.0"
dast_environment_deploy:
extends: .dast-auto-deploy
diff --git a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
index 47cc6caa192..c6c8256b4bb 100644
--- a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
@@ -1,5 +1,5 @@
.auto-deploy:
- image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.9.3"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.12.1"
review:
extends: .auto-deploy
@@ -40,6 +40,7 @@ stop_review:
environment:
name: review/$CI_COMMIT_REF_NAME
action: stop
+ dependencies: []
when: manual
allow_failure: true
only:
diff --git a/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml b/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml
index 73ae63c3092..4ef6a4d3bef 100644
--- a/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml
@@ -1,6 +1,6 @@
apply:
stage: deploy
- image: "registry.gitlab.com/gitlab-org/cluster-integration/cluster-applications:v0.8.0"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/cluster-applications:v0.12.0"
environment:
name: production
variables:
@@ -11,9 +11,12 @@ apply:
SENTRY_VALUES_FILE: $CI_PROJECT_DIR/.gitlab/managed-apps/sentry/values.yaml
GITLAB_RUNNER_VALUES_FILE: $CI_PROJECT_DIR/.gitlab/managed-apps/gitlab-runner/values.yaml
CILIUM_VALUES_FILE: $CI_PROJECT_DIR/.gitlab/managed-apps/cilium/values.yaml
+ CILIUM_HUBBLE_VALUES_FILE: $CI_PROJECT_DIR/.gitlab/managed-apps/cilium/hubble-values.yaml
JUPYTERHUB_VALUES_FILE: $CI_PROJECT_DIR/.gitlab/managed-apps/jupyterhub/values.yaml
PROMETHEUS_VALUES_FILE: $CI_PROJECT_DIR/.gitlab/managed-apps/prometheus/values.yaml
ELASTIC_STACK_VALUES_FILE: $CI_PROJECT_DIR/.gitlab/managed-apps/elastic-stack/values.yaml
+ VAULT_VALUES_FILE: $CI_PROJECT_DIR/.gitlab/managed-apps/vault/values.yaml
+ CROSSPLANE_VALUES_FILE: $CI_PROJECT_DIR/.gitlab/managed-apps/crossplane/values.yaml
script:
- gitlab-managed-apps /usr/local/share/gitlab-managed-apps/helmfile.yaml
only:
diff --git a/lib/gitlab/ci/templates/Pages/Jekyll.gitlab-ci.yml b/lib/gitlab/ci/templates/Pages/Jekyll.gitlab-ci.yml
index e7dacd3a1fc..0c8859dc779 100644
--- a/lib/gitlab/ci/templates/Pages/Jekyll.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Pages/Jekyll.gitlab-ci.yml
@@ -1,12 +1,13 @@
# Template project: https://gitlab.com/pages/jekyll
# Docs: https://docs.gitlab.com/ce/pages/
-image: ruby:2.3
+image: ruby:2.6
variables:
JEKYLL_ENV: production
LC_ALL: C.UTF-8
before_script:
+ - gem install bundler
- bundle install
test:
diff --git a/lib/gitlab/ci/templates/Pages/Middleman.gitlab-ci.yml b/lib/gitlab/ci/templates/Pages/Middleman.gitlab-ci.yml
index 57ac323dfdf..462b4737c4e 100644
--- a/lib/gitlab/ci/templates/Pages/Middleman.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Pages/Middleman.gitlab-ci.yml
@@ -1,5 +1,5 @@
# Full project: https://gitlab.com/pages/middleman
-image: ruby:2.3
+image: ruby:2.6
cache:
paths:
diff --git a/lib/gitlab/ci/templates/Pages/Nanoc.gitlab-ci.yml b/lib/gitlab/ci/templates/Pages/Nanoc.gitlab-ci.yml
index 7f037b5f5cf..b512f8d77e9 100644
--- a/lib/gitlab/ci/templates/Pages/Nanoc.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Pages/Nanoc.gitlab-ci.yml
@@ -1,5 +1,5 @@
# Full project: https://gitlab.com/pages/nanoc
-image: ruby:2.3
+image: ruby:2.6
pages:
script:
diff --git a/lib/gitlab/ci/templates/Pages/Octopress.gitlab-ci.yml b/lib/gitlab/ci/templates/Pages/Octopress.gitlab-ci.yml
index 6d912a89bc1..4318aadcaa6 100644
--- a/lib/gitlab/ci/templates/Pages/Octopress.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Pages/Octopress.gitlab-ci.yml
@@ -1,5 +1,5 @@
# Full project: https://gitlab.com/pages/octopress
-image: ruby:2.3
+image: ruby:2.6
pages:
script:
diff --git a/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml
index f708e95c2cf..6efb6b4e273 100644
--- a/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml
@@ -5,9 +5,7 @@ variables:
container_scanning:
stage: test
- image:
- name: registry.gitlab.com/gitlab-org/security-products/analyzers/klar:$CS_MAJOR_VERSION
- entrypoint: []
+ image: registry.gitlab.com/gitlab-org/security-products/analyzers/klar:$CS_MAJOR_VERSION
variables:
# By default, use the latest clair vulnerabilities database, however, allow it to be overridden here with a specific image
# to enable container scanning to run offline, or to provide a consistent list of vulnerabilities for integration testing purposes
@@ -22,10 +20,7 @@ container_scanning:
- name: $CLAIR_DB_IMAGE
alias: clair-vulnerabilities-db
script:
- # the kubernetes executor currently ignores the Docker image entrypoint value, so the start.sh script must
- # be explicitly executed here in order for this to work with both the kubernetes and docker executors
- # see this issue for more details https://gitlab.com/gitlab-org/gitlab-runner/issues/4125
- - /container-scanner/start.sh
+ - /analyzer run
artifacts:
reports:
container_scanning: gl-container-scanning-report.json
diff --git a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
index 94b9d94fd39..020d1f323ee 100644
--- a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
@@ -18,6 +18,7 @@ dast:
image:
name: "registry.gitlab.com/gitlab-org/security-products/dast:$DAST_VERSION"
variables:
+ GIT_STRATEGY: none
# URL to scan:
# DAST_WEBSITE: https://example.com/
#
diff --git a/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
index 5ff6413898f..3200220a332 100644
--- a/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
@@ -5,7 +5,8 @@
# How to set: https://docs.gitlab.com/ee/ci/yaml/#variables
variables:
- DS_ANALYZER_IMAGE_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
+ SECURITY_SCANNER_IMAGE_PREFIX: "registry.gitlab.com/gitlab-org/security-products"
+ DS_ANALYZER_IMAGE_PREFIX: "$SECURITY_SCANNER_IMAGE_PREFIX/analyzers"
DS_DEFAULT_ANALYZERS: "bundler-audit, retire.js, gemnasium, gemnasium-maven, gemnasium-python"
DS_MAJOR_VERSION: 2
DS_DISABLE_DIND: "false"
@@ -59,10 +60,12 @@ dependency_scanning:
BUNDLER_AUDIT_UPDATE_DISABLED \
BUNDLER_AUDIT_ADVISORY_DB_URL \
BUNDLER_AUDIT_ADVISORY_DB_REF_NAME \
+ RETIREJS_JS_ADVISORY_DB \
+ RETIREJS_NODE_ADVISORY_DB \
) \
--volume "$PWD:/code" \
--volume /var/run/docker.sock:/var/run/docker.sock \
- "registry.gitlab.com/gitlab-org/security-products/dependency-scanning:$DS_VERSION" /code
+ "$SECURITY_SCANNER_IMAGE_PREFIX/dependency-scanning:$DS_VERSION" /code
artifacts:
reports:
dependency_scanning: gl-dependency-scanning-report.json
diff --git a/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
index 51a1f4e549b..9f9975f9e1c 100644
--- a/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
@@ -36,9 +36,9 @@ sast:
export DOCKER_HOST='tcp://localhost:2375'
fi
fi
+ - ENVS=`printenv | grep -vE '^(DOCKER_|CI|GITLAB_|FF_|HOME|PWD|OLDPWD|PATH|SHLVL|HOSTNAME)' | sed -n '/^[^\t]/s/=.*//p' | sed '/^$/d' | sed 's/^/-e /g' | tr '\n' ' '`
- |
- ENVS=`printenv | grep -vE '^(DOCKER_|CI|GITLAB_|FF_|HOME|PWD|OLDPWD|PATH|SHLVL|HOSTNAME)' | sed -n '/^[^\t]/s/=.*//p' | sed '/^$/d' | sed 's/^/-e /g' | tr '\n' ' '`
- docker run "$ENVS" \
+ docker run $ENVS \
--volume "$PWD:/code" \
--volume /var/run/docker.sock:/var/run/docker.sock \
"registry.gitlab.com/gitlab-org/security-products/sast:$SAST_VERSION" /app/bin/run /code
diff --git a/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml b/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml
new file mode 100644
index 00000000000..5d9d3c74def
--- /dev/null
+++ b/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml
@@ -0,0 +1,19 @@
+# Read more about the feature here: https://docs.gitlab.com/ee/user/project/merge_requests/accessibility_testing.html
+
+stages:
+ - build
+ - test
+ - deploy
+ - accessibility
+
+a11y:
+ stage: accessibility
+ image: registry.gitlab.com/gitlab-org/ci-cd/accessibility:5.3.0-gitlab.2
+ script: /gitlab-accessibility.sh $a11y_urls
+ allow_failure: true
+ artifacts:
+ when: always
+ expose_as: 'Accessibility Reports'
+ paths: ['reports/']
+ rules:
+ - if: $a11y_urls
diff --git a/lib/gitlab/ci/yaml_processor.rb b/lib/gitlab/ci/yaml_processor.rb
index ae3ff4a51e2..764047dae6d 100644
--- a/lib/gitlab/ci/yaml_processor.rb
+++ b/lib/gitlab/ci/yaml_processor.rb
@@ -57,7 +57,7 @@ module Gitlab
when: job[:when] || 'on_success',
environment: job[:environment_name],
coverage_regex: job[:coverage],
- yaml_variables: transform_to_yaml_variables(job_variables(name)),
+ yaml_variables: transform_to_yaml_variables(job[:variables]),
needs_attributes: job.dig(:needs, :job),
interruptible: job[:interruptible],
only: job[:only],
@@ -146,13 +146,6 @@ module Gitlab
end
end
- def job_variables(name)
- job_variables = @jobs.dig(name.to_sym, :variables)
-
- @variables.to_h
- .merge(job_variables.to_h)
- end
-
def transform_to_yaml_variables(variables)
variables.to_h.map do |key, value|
{ key: key.to_s, value: value, public: true }
diff --git a/lib/gitlab/config/entry/attributable.rb b/lib/gitlab/config/entry/attributable.rb
index 4deb233d10e..d266d5218de 100644
--- a/lib/gitlab/config/entry/attributable.rb
+++ b/lib/gitlab/config/entry/attributable.rb
@@ -10,7 +10,7 @@ module Gitlab
def attributes(*attributes)
attributes.flatten.each do |attribute|
if method_defined?(attribute)
- raise ArgumentError, "Method already defined: #{attribute}"
+ raise ArgumentError, "Method '#{attribute}' already defined in '#{name}'"
end
define_method(attribute) do
diff --git a/lib/gitlab/config/entry/configurable.rb b/lib/gitlab/config/entry/configurable.rb
index e7d441bb21c..571e7a5127e 100644
--- a/lib/gitlab/config/entry/configurable.rb
+++ b/lib/gitlab/config/entry/configurable.rb
@@ -75,6 +75,9 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord
def entry(key, entry, description: nil, default: nil, inherit: nil, reserved: nil, metadata: {})
+ entry_name = key.to_sym
+ raise ArgumentError, "Entry '#{key}' already defined in '#{name}'" if @nodes.to_h[entry_name]
+
factory = ::Gitlab::Config::Entry::Factory.new(entry)
.with(description: description)
.with(default: default)
@@ -82,20 +85,38 @@ module Gitlab
.with(reserved: reserved)
.metadata(metadata)
- (@nodes ||= {}).merge!(key.to_sym => factory)
+ @nodes ||= {}
+ @nodes[entry_name] = factory
+
+ helpers(entry_name)
end
# rubocop: enable CodeReuse/ActiveRecord
- def helpers(*nodes)
+ def dynamic_helpers(*nodes)
+ helpers(*nodes, dynamic: true)
+ end
+
+ def helpers(*nodes, dynamic: false)
nodes.each do |symbol|
+ if method_defined?("#{symbol}_defined?") || method_defined?("#{symbol}_entry") || method_defined?("#{symbol}_value")
+ raise ArgumentError, "Method '#{symbol}_defined?', '#{symbol}_entry' or '#{symbol}_value' already defined in '#{name}'"
+ end
+
+ unless @nodes.to_h[symbol]
+ raise ArgumentError, "Entry for #{symbol} is undefined" unless dynamic
+ end
+
define_method("#{symbol}_defined?") do
entries[symbol]&.specified?
end
- define_method("#{symbol}_value") do
- return unless entries[symbol] && entries[symbol].valid?
+ define_method("#{symbol}_entry") do
+ entries[symbol]
+ end
- entries[symbol].value
+ define_method("#{symbol}_value") do
+ entry = entries[symbol]
+ entry.value if entry&.valid?
end
end
end
diff --git a/lib/gitlab/config_checker/puma_rugged_checker.rb b/lib/gitlab/config_checker/puma_rugged_checker.rb
new file mode 100644
index 00000000000..82c59f3328b
--- /dev/null
+++ b/lib/gitlab/config_checker/puma_rugged_checker.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ConfigChecker
+ module PumaRuggedChecker
+ extend self
+ extend Gitlab::Git::RuggedImpl::UseRugged
+
+ def check
+ notices = []
+
+ if running_puma_with_multiple_threads? && rugged_enabled_through_feature_flag?
+ link_start = '<a href="https://docs.gitlab.com/ee/administration/operations/puma.html#performance-caveat-when-using-puma-with-rugged">'
+ link_end = '</a>'
+ notices << {
+ type: 'warning',
+ message: _('Puma is running with a thread count above 1 and the Rugged '\
+ 'service is enabled. This may decrease performance in some environments. '\
+ 'See our %{link_start}documentation%{link_end} '\
+ 'for details of this issue.') % { link_start: link_start, link_end: link_end }
+ }
+ end
+
+ notices
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/cycle_analytics/usage_data.rb b/lib/gitlab/cycle_analytics/usage_data.rb
index acfb641aeec..e58def57e69 100644
--- a/lib/gitlab/cycle_analytics/usage_data.rb
+++ b/lib/gitlab/cycle_analytics/usage_data.rb
@@ -3,15 +3,32 @@
module Gitlab
module CycleAnalytics
class UsageData
+ include Gitlab::Utils::StrongMemoize
PROJECTS_LIMIT = 10
- attr_reader :projects, :options
+ attr_reader :options
def initialize
- @projects = Project.sorted_by_activity.limit(PROJECTS_LIMIT)
@options = { from: 7.days.ago }
end
+ def projects
+ strong_memoize(:projects) do
+ projects = Project.where.not(last_activity_at: nil).order(last_activity_at: :desc).limit(10) +
+ Project.where.not(last_repository_updated_at: nil).order(last_repository_updated_at: :desc).limit(10)
+
+ projects = projects.uniq.sort_by do |project|
+ [project.last_activity_at, project.last_repository_updated_at].min
+ end
+
+ if projects.size < 10
+ projects.concat(Project.where(last_activity_at: nil, last_repository_updated_at: nil).limit(10))
+ end
+
+ projects.uniq.first(10)
+ end
+ end
+
def to_json(*)
total = 0
diff --git a/lib/gitlab/danger/commit_linter.rb b/lib/gitlab/danger/commit_linter.rb
index c0748a4b8e6..8f51ef05f69 100644
--- a/lib/gitlab/danger/commit_linter.rb
+++ b/lib/gitlab/danger/commit_linter.rb
@@ -14,6 +14,7 @@ module Gitlab
MAX_CHANGED_LINES_IN_COMMIT = 30
SHORT_REFERENCE_REGEX = %r{([\w\-\/]+)?(#|!|&|%)\d+\b}.freeze
DEFAULT_SUBJECT_DESCRIPTION = 'commit subject'
+ WIP_PREFIX = 'WIP: '
PROBLEMS = {
subject_too_short: "The %s must contain at least #{MIN_SUBJECT_WORDS_COUNT} words",
subject_too_long: "The %s may not be longer than #{MAX_LINE_LENGTH} characters",
@@ -164,7 +165,7 @@ module Gitlab
end
def subject
- message_parts[0]
+ message_parts[0].delete_prefix(WIP_PREFIX)
end
def separator
@@ -199,7 +200,9 @@ module Gitlab
end
def subject_starts_with_lowercase?
- first_char = subject[0]
+ first_char = subject.sub(/\A\[.+\]\s/, '')[0]
+ first_char_downcased = first_char.downcase
+ return true unless ('a'..'z').cover?(first_char_downcased)
first_char.downcase == first_char
end
diff --git a/lib/gitlab/danger/helper.rb b/lib/gitlab/danger/helper.rb
index 5363533ace5..c5174da4b7c 100644
--- a/lib/gitlab/danger/helper.rb
+++ b/lib/gitlab/danger/helper.rb
@@ -118,19 +118,22 @@ module Gitlab
\.haml-lint_todo.yml |
babel\.config\.js |
jest\.config\.js |
- karma\.config\.js |
- webpack\.config\.js |
package\.json |
yarn\.lock |
+ config/.+\.js |
\.gitlab/ci/frontend\.gitlab-ci\.yml
)\z}x => :frontend,
%r{\A(ee/)?db/(?!fixtures)[^/]+} => :database,
%r{\A(ee/)?lib/gitlab/(database|background_migration|sql|github_import)(/|\.rb)} => :database,
%r{\A(app/models/project_authorization|app/services/users/refresh_authorized_projects_service)(/|\.rb)} => :database,
+ %r{\A(ee/)?app/finders/} => :database,
%r{\Arubocop/cop/migration(/|\.rb)} => :database,
%r{\A(\.gitlab-ci\.yml\z|\.gitlab\/ci)} => :engineering_productivity,
+ %r{\A\.overcommit\.yml\.example\z} => :engineering_productivity,
+ %r{\Atooling/overcommit/} => :engineering_productivity,
+ %r{\A.editorconfig\z} => :engineering_productivity,
%r{Dangerfile\z} => :engineering_productivity,
%r{\A(ee/)?(danger/|lib/gitlab/danger/)} => :engineering_productivity,
%r{\A(ee/)?scripts/} => :engineering_productivity,
diff --git a/lib/gitlab/data_builder/push.rb b/lib/gitlab/data_builder/push.rb
index 41ceeb329b3..af363705bed 100644
--- a/lib/gitlab/data_builder/push.rb
+++ b/lib/gitlab/data_builder/push.rb
@@ -35,7 +35,8 @@ module Gitlab
commits: [
{
id: "c5feabde2d8cd023215af4d2ceeb7a64839fc428",
- message: "Add simple search to projects in public area",
+ message: "Add simple search to projects in public area\n\ncommit message body",
+ title: "Add simple search to projects in public area",
timestamp: "2013-05-13T18:18:08+00:00",
url: "https://test.example.com/gitlab/gitlab/-/commit/c5feabde2d8cd023215af4d2ceeb7a64839fc428",
author: {
diff --git a/lib/gitlab/database/batch_count.rb b/lib/gitlab/database/batch_count.rb
index a9d4665bc5f..728e0d423af 100644
--- a/lib/gitlab/database/batch_count.rb
+++ b/lib/gitlab/database/batch_count.rb
@@ -28,7 +28,7 @@ module Gitlab
class BatchCounter
FALLBACK = -1
- MIN_REQUIRED_BATCH_SIZE = 2_000
+ MIN_REQUIRED_BATCH_SIZE = 1_250
MAX_ALLOWED_LOOPS = 10_000
SLEEP_TIME_IN_SECONDS = 0.01 # 10 msec sleep
# Each query should take <<500ms https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22705
diff --git a/lib/gitlab/database/connection_timer.rb b/lib/gitlab/database/connection_timer.rb
new file mode 100644
index 00000000000..ef8d52ba71c
--- /dev/null
+++ b/lib/gitlab/database/connection_timer.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ class ConnectionTimer
+ DEFAULT_INTERVAL = 3600
+ RANDOMIZATION_INTERVAL = 600
+
+ class << self
+ def configure
+ yield self
+ end
+
+ def starting_now
+ # add a small amount of randomization to the interval, so reconnects don't all occur at once
+ new(interval_with_randomization, current_clock_value)
+ end
+
+ attr_writer :interval
+
+ def interval
+ @interval ||= DEFAULT_INTERVAL
+ end
+
+ def interval_with_randomization
+ interval + rand(RANDOMIZATION_INTERVAL) if interval.positive?
+ end
+
+ def current_clock_value
+ Concurrent.monotonic_time
+ end
+ end
+
+ attr_reader :interval, :starting_clock_value
+
+ def initialize(interval, starting_clock_value)
+ @interval = interval
+ @starting_clock_value = starting_clock_value
+ end
+
+ def expired?
+ interval&.positive? && self.class.current_clock_value > (starting_clock_value + interval)
+ end
+
+ def reset!
+ @starting_clock_value = self.class.current_clock_value
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index 3b6684b861c..82a84508959 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -215,7 +215,7 @@ module Gitlab
fk_name = name || concurrent_foreign_key_name(source, column)
unless foreign_key_exists?(source, name: fk_name)
- raise "cannot find #{fk_name} on #{source} table"
+ raise missing_schema_object_message(source, "foreign key", fk_name)
end
disable_statement_timeout do
@@ -235,11 +235,17 @@ module Gitlab
# PostgreSQL constraint names have a limit of 63 bytes. The logic used
# here is based on Rails' foreign_key_name() method, which unfortunately
# is private so we can't rely on it directly.
- def concurrent_foreign_key_name(table, column)
+ #
+ # prefix:
+ # - The default prefix is `fk_` for backward compatibility with the existing
+ # concurrent foreign key helpers.
+ # - For standard rails foreign keys the prefix is `fk_rails_`
+ #
+ def concurrent_foreign_key_name(table, column, prefix: 'fk_')
identifier = "#{table}_#{column}_fk"
hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
- "fk_#{hashed_identifier}"
+ "#{prefix}#{hashed_identifier}"
end
# Long-running migrations may take more than the timeout allowed by
@@ -688,7 +694,7 @@ module Gitlab
start_id, end_id = batch.pluck('MIN(id), MAX(id)').first
max_index = index
- BackgroundMigrationWorker.perform_in(
+ migrate_in(
index * interval,
'CopyColumn',
[table, column, temp_column, start_id, end_id]
@@ -697,7 +703,7 @@ module Gitlab
# Schedule the renaming of the column to happen (initially) 1 hour after
# the last batch finished.
- BackgroundMigrationWorker.perform_in(
+ migrate_in(
(max_index * interval) + 1.hour,
'CleanupConcurrentTypeChange',
[table, column, temp_column]
@@ -779,7 +785,7 @@ module Gitlab
start_id, end_id = batch.pluck('MIN(id), MAX(id)').first
max_index = index
- BackgroundMigrationWorker.perform_in(
+ migrate_in(
index * interval,
'CopyColumn',
[table, old_column, new_column, start_id, end_id]
@@ -788,7 +794,7 @@ module Gitlab
# Schedule the renaming of the column to happen (initially) 1 hour after
# the last batch finished.
- BackgroundMigrationWorker.perform_in(
+ migrate_in(
(max_index * interval) + 1.hour,
'CleanupConcurrentRename',
[table, old_column, new_column]
@@ -925,7 +931,10 @@ module Gitlab
def column_for(table, name)
name = name.to_s
- columns(table).find { |column| column.name == name }
+ column = columns(table).find { |column| column.name == name }
+ raise(missing_schema_object_message(table, "column", name)) if column.nil?
+
+ column
end
# This will replace the first occurrence of a string in a column with
@@ -1024,14 +1033,14 @@ into similar problems in the future (e.g. when new tables are created).
# We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range.
- BackgroundMigrationWorker.bulk_perform_async(jobs)
+ bulk_migrate_async(jobs)
jobs.clear
end
jobs << [job_class_name, [start_id, end_id]]
end
- BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty?
+ bulk_migrate_async(jobs) unless jobs.empty?
end
# Queues background migration jobs for an entire table, batched by ID range.
@@ -1042,6 +1051,7 @@ into similar problems in the future (e.g. when new tables are created).
# job_class_name - The background migration job class as a string
# delay_interval - The duration between each job's scheduled time (must respond to `to_f`)
# batch_size - The maximum number of rows per job
+ # other_arguments - Other arguments to send to the job
#
# Example:
#
@@ -1059,7 +1069,7 @@ into similar problems in the future (e.g. when new tables are created).
# # do something
# end
# end
- def queue_background_migration_jobs_by_range_at_intervals(model_class, job_class_name, delay_interval, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE)
+ def queue_background_migration_jobs_by_range_at_intervals(model_class, job_class_name, delay_interval, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE, other_arguments: [])
raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id')
# To not overload the worker too much we enforce a minimum interval both
@@ -1074,7 +1084,7 @@ into similar problems in the future (e.g. when new tables are created).
# `BackgroundMigrationWorker.bulk_perform_in` schedules all jobs for
# the same time, which is not helpful in most cases where we wish to
# spread the work over time.
- BackgroundMigrationWorker.perform_in(delay_interval * index, job_class_name, [start_id, end_id])
+ migrate_in(delay_interval * index, job_class_name, [start_id, end_id] + other_arguments)
end
end
@@ -1133,8 +1143,44 @@ into similar problems in the future (e.g. when new tables are created).
execute(sql)
end
+ def migrate_async(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.perform_async(*args)
+ end
+ end
+
+ def migrate_in(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.perform_in(*args)
+ end
+ end
+
+ def bulk_migrate_in(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.bulk_perform_in(*args)
+ end
+ end
+
+ def bulk_migrate_async(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.bulk_perform_async(*args)
+ end
+ end
+
private
+ def missing_schema_object_message(table, type, name)
+ <<~MESSAGE
+ Could not find #{type} "#{name}" on table "#{table}" which was referenced during the migration.
+ This issue could be caused by the database schema straying from the expected state.
+
+ To resolve this issue, please verify:
+ 1. all previous migrations have completed
+ 2. the database objects used in this migration match the Rails definition in schema.rb or structure.sql
+
+ MESSAGE
+ end
+
def tables_match?(target_table, foreign_key_table)
target_table.blank? || foreign_key_table == target_table
end
@@ -1191,6 +1237,10 @@ into similar problems in the future (e.g. when new tables are created).
your migration class
ERROR
end
+
+ def with_migration_context(&block)
+ Gitlab::ApplicationContext.with_context(caller_id: self.class.to_s, &block)
+ end
end
end
end
diff --git a/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin.rb b/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin.rb
new file mode 100644
index 00000000000..9f664fa2137
--- /dev/null
+++ b/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module PostgresqlAdapter
+ module ForceDisconnectableMixin
+ extend ActiveSupport::Concern
+
+ prepended do
+ set_callback :checkin, :after, :force_disconnect_if_old!
+ end
+
+ def force_disconnect_if_old!
+ if force_disconnect_timer.expired?
+ disconnect!
+ reset_force_disconnect_timer!
+ end
+ end
+
+ def reset_force_disconnect_timer!
+ force_disconnect_timer.reset!
+ end
+
+ def force_disconnect_timer
+ @force_disconnect_timer ||= ConnectionTimer.starting_now
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/diff/highlight_cache.rb b/lib/gitlab/diff/highlight_cache.rb
index 0a8fbb9a673..e79127108b4 100644
--- a/lib/gitlab/diff/highlight_cache.rb
+++ b/lib/gitlab/diff/highlight_cache.rb
@@ -17,6 +17,14 @@ module Gitlab
buckets [100, 1000, 10000, 100000, 1000000, 10000000]
end
+ define_counter :gitlab_redis_diff_caching_hit do
+ docstring 'Redis diff caching hits'
+ end
+
+ define_counter :gitlab_redis_diff_caching_miss do
+ docstring 'Redis diff caching misses'
+ end
+
def initialize(diff_collection)
@diff_collection = diff_collection
end
@@ -93,6 +101,8 @@ module Gitlab
#
redis.expire(key, EXPIRATION)
end
+
+ record_memory_usage(fetch_memory_usage(redis, key))
end
# Subsequent read_file calls would need the latest cache.
@@ -101,6 +111,23 @@ module Gitlab
clear_memoization(:cacheable_files)
end
+ def record_memory_usage(memory_usage)
+ if memory_usage
+ self.class.gitlab_redis_diff_caching_memory_usage_bytes.observe({}, memory_usage)
+ end
+ end
+
+ def fetch_memory_usage(redis, key)
+ # Redis versions prior to 4.0.0 do not support memory usage reporting
+ # for a specific key. As of 11-March-2020 we support Redis 3.x, so
+ # need to account for this. We can remove this check once we
+ # officially cease supporting versions <4.0.0.
+ #
+ return if Gem::Version.new(redis.info["redis_version"]) < Gem::Version.new("4")
+
+ redis.memory("USAGE", key)
+ end
+
def file_paths
strong_memoize(:file_paths) do
diff_files.collect(&:file_path)
diff --git a/lib/gitlab/elasticsearch/logs.rb b/lib/gitlab/elasticsearch/logs.rb
new file mode 100644
index 00000000000..f976f6ce305
--- /dev/null
+++ b/lib/gitlab/elasticsearch/logs.rb
@@ -0,0 +1,150 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Elasticsearch
+ class Logs
+ InvalidCursor = Class.new(RuntimeError)
+
+ # How many log lines to fetch in a query
+ LOGS_LIMIT = 500
+
+ def initialize(client)
+ @client = client
+ end
+
+ def pod_logs(namespace, pod_name, container_name: nil, search: nil, start_time: nil, end_time: nil, cursor: nil)
+ query = { bool: { must: [] } }.tap do |q|
+ filter_pod_name(q, pod_name)
+ filter_namespace(q, namespace)
+ filter_container_name(q, container_name)
+ filter_search(q, search)
+ filter_times(q, start_time, end_time)
+ end
+
+ body = build_body(query, cursor)
+ response = @client.search body: body
+
+ format_response(response)
+ end
+
+ private
+
+ def build_body(query, cursor = nil)
+ body = {
+ query: query,
+ # reverse order so we can query N-most recent records
+ sort: [
+ { "@timestamp": { order: :desc } },
+ { "offset": { order: :desc } }
+ ],
+ # only return these fields in the response
+ _source: ["@timestamp", "message"],
+ # fixed limit for now, we should support paginated queries
+ size: ::Gitlab::Elasticsearch::Logs::LOGS_LIMIT
+ }
+
+ unless cursor.nil?
+ body[:search_after] = decode_cursor(cursor)
+ end
+
+ body
+ end
+
+ def filter_pod_name(query, pod_name)
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.pod.name" => {
+ query: pod_name
+ }
+ }
+ }
+ end
+
+ def filter_namespace(query, namespace)
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.namespace" => {
+ query: namespace
+ }
+ }
+ }
+ end
+
+ def filter_container_name(query, container_name)
+ # A pod can contain multiple containers.
+ # By default we return logs from every container
+ return if container_name.nil?
+
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.container.name" => {
+ query: container_name
+ }
+ }
+ }
+ end
+
+ def filter_search(query, search)
+ return if search.nil?
+
+ query[:bool][:must] << {
+ simple_query_string: {
+ query: search,
+ fields: [:message],
+ default_operator: :and
+ }
+ }
+ end
+
+ def filter_times(query, start_time, end_time)
+ return unless start_time || end_time
+
+ time_range = { range: { :@timestamp => {} } }.tap do |tr|
+ tr[:range][:@timestamp][:gte] = start_time if start_time
+ tr[:range][:@timestamp][:lt] = end_time if end_time
+ end
+
+ query[:bool][:filter] = [time_range]
+ end
+
+ def format_response(response)
+ results = response.fetch("hits", {}).fetch("hits", [])
+ last_result = results.last
+ results = results.map do |hit|
+ {
+ timestamp: hit["_source"]["@timestamp"],
+ message: hit["_source"]["message"]
+ }
+ end
+
+ # we queried for the N-most recent records but we want them ordered oldest to newest
+ {
+ logs: results.reverse,
+ cursor: last_result.nil? ? nil : encode_cursor(last_result["sort"])
+ }
+ end
+
+ # we want to hide the implementation details of the search_after parameter from the frontend
+ # behind a single easily transmitted value
+ def encode_cursor(obj)
+ obj.join(',')
+ end
+
+ def decode_cursor(obj)
+ cursor = obj.split(',').map(&:to_i)
+
+ unless valid_cursor(cursor)
+ raise InvalidCursor, "invalid cursor format"
+ end
+
+ cursor
+ end
+
+ def valid_cursor(cursor)
+ cursor.instance_of?(Array) &&
+ cursor.length == 2 &&
+ cursor.map {|i| i.instance_of?(Integer)}.reduce(:&)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/email.rb b/lib/gitlab/email.rb
new file mode 100644
index 00000000000..5f935880764
--- /dev/null
+++ b/lib/gitlab/email.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Email
+ ProcessingError = Class.new(StandardError)
+ EmailUnparsableError = Class.new(ProcessingError)
+ SentNotificationNotFoundError = Class.new(ProcessingError)
+ ProjectNotFound = Class.new(ProcessingError)
+ EmptyEmailError = Class.new(ProcessingError)
+ AutoGeneratedEmailError = Class.new(ProcessingError)
+ UserNotFoundError = Class.new(ProcessingError)
+ UserBlockedError = Class.new(ProcessingError)
+ UserNotAuthorizedError = Class.new(ProcessingError)
+ NoteableNotFoundError = Class.new(ProcessingError)
+ InvalidRecordError = Class.new(ProcessingError)
+ InvalidNoteError = Class.new(InvalidRecordError)
+ InvalidIssueError = Class.new(InvalidRecordError)
+ InvalidMergeRequestError = Class.new(InvalidRecordError)
+ UnknownIncomingEmail = Class.new(ProcessingError)
+ InvalidAttachment = Class.new(ProcessingError)
+ end
+end
diff --git a/lib/gitlab/email/receiver.rb b/lib/gitlab/email/receiver.rb
index f028102da9b..bf6c28b9f90 100644
--- a/lib/gitlab/email/receiver.rb
+++ b/lib/gitlab/email/receiver.rb
@@ -5,23 +5,6 @@ require_dependency 'gitlab/email/handler'
# Inspired in great part by Discourse's Email::Receiver
module Gitlab
module Email
- ProcessingError = Class.new(StandardError)
- EmailUnparsableError = Class.new(ProcessingError)
- SentNotificationNotFoundError = Class.new(ProcessingError)
- ProjectNotFound = Class.new(ProcessingError)
- EmptyEmailError = Class.new(ProcessingError)
- AutoGeneratedEmailError = Class.new(ProcessingError)
- UserNotFoundError = Class.new(ProcessingError)
- UserBlockedError = Class.new(ProcessingError)
- UserNotAuthorizedError = Class.new(ProcessingError)
- NoteableNotFoundError = Class.new(ProcessingError)
- InvalidRecordError = Class.new(ProcessingError)
- InvalidNoteError = Class.new(InvalidRecordError)
- InvalidIssueError = Class.new(InvalidRecordError)
- InvalidMergeRequestError = Class.new(InvalidRecordError)
- UnknownIncomingEmail = Class.new(ProcessingError)
- InvalidAttachment = Class.new(ProcessingError)
-
class Receiver
def initialize(raw)
@raw = raw
@@ -34,8 +17,7 @@ module Gitlab
ignore_auto_reply!(mail)
- mail_key = extract_mail_key(mail)
- handler = Handler.for(mail, mail_key)
+ handler = find_handler(mail)
raise UnknownIncomingEmail unless handler
@@ -46,6 +28,11 @@ module Gitlab
private
+ def find_handler(mail)
+ mail_key = extract_mail_key(mail)
+ Handler.for(mail, mail_key)
+ end
+
def build_mail
Mail::Message.new(@raw)
rescue Encoding::UndefinedConversionError,
diff --git a/lib/gitlab/encoding_helper.rb b/lib/gitlab/encoding_helper.rb
index 88729babb2b..67f8d691a77 100644
--- a/lib/gitlab/encoding_helper.rb
+++ b/lib/gitlab/encoding_helper.rb
@@ -50,7 +50,7 @@ module Gitlab
detect && detect[:type] == :binary
end
- def encode_utf8(message)
+ def encode_utf8(message, replace: "")
message = force_encode_utf8(message)
return message if message.valid_encoding?
@@ -64,7 +64,7 @@ module Gitlab
''
end
else
- clean(message)
+ clean(message, replace: replace)
end
rescue ArgumentError
nil
@@ -94,8 +94,13 @@ module Gitlab
message.force_encoding("UTF-8")
end
- def clean(message)
- message.encode("UTF-16BE", undef: :replace, invalid: :replace, replace: "".encode("UTF-16BE"))
+ def clean(message, replace: "")
+ message.encode(
+ "UTF-16BE",
+ undef: :replace,
+ invalid: :replace,
+ replace: replace.encode("UTF-16BE")
+ )
.encode("UTF-8")
.gsub("\0".encode("UTF-8"), "")
end
diff --git a/lib/gitlab/experimentation.rb b/lib/gitlab/experimentation.rb
index 7c59267c0b6..30c8eaf605a 100644
--- a/lib/gitlab/experimentation.rb
+++ b/lib/gitlab/experimentation.rb
@@ -40,7 +40,7 @@ module Gitlab
extend ActiveSupport::Concern
included do
- before_action :set_experimentation_subject_id_cookie
+ before_action :set_experimentation_subject_id_cookie, unless: :dnt_enabled?
helper_method :experiment_enabled?
end
@@ -56,7 +56,12 @@ module Gitlab
end
def experiment_enabled?(experiment_key)
- Experimentation.enabled_for_user?(experiment_key, experimentation_subject_index) || forced_enabled?(experiment_key)
+ return false if dnt_enabled?
+
+ return true if Experimentation.enabled_for_user?(experiment_key, experimentation_subject_index)
+ return true if forced_enabled?(experiment_key)
+
+ false
end
def track_experiment_event(experiment_key, action, value = nil)
@@ -73,6 +78,10 @@ module Gitlab
private
+ def dnt_enabled?
+ Gitlab::Utils.to_boolean(request.headers['DNT'])
+ end
+
def experimentation_subject_id
cookies.signed[:experimentation_subject_id]
end
diff --git a/lib/gitlab/file_type_detection.rb b/lib/gitlab/file_type_detection.rb
index e052792675a..475d50e37bf 100644
--- a/lib/gitlab/file_type_detection.rb
+++ b/lib/gitlab/file_type_detection.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-# The method `filename` must be defined in classes that use this module.
+# The method `filename` must be defined in classes that mix in this module.
#
# This module is intended to be used as a helper and not a security gate
# to validate that a file is safe, as it identifies files only by the
@@ -35,6 +35,13 @@ module Gitlab
DANGEROUS_VIDEO_EXT = [].freeze # None, yet
DANGEROUS_AUDIO_EXT = [].freeze # None, yet
+ def self.extension_match?(filename, extensions)
+ return false unless filename.present?
+
+ extension = File.extname(filename).delete('.')
+ extensions.include?(extension.downcase)
+ end
+
def image?
extension_match?(SAFE_IMAGE_EXT)
end
@@ -74,10 +81,7 @@ module Gitlab
private
def extension_match?(extensions)
- return false unless filename
-
- extension = File.extname(filename).delete('.')
- extensions.include?(extension.downcase)
+ ::Gitlab::FileTypeDetection.extension_match?(filename, extensions)
end
end
end
diff --git a/lib/gitlab/git/blob.rb b/lib/gitlab/git/blob.rb
index f2a6211f270..5579449bf57 100644
--- a/lib/gitlab/git/blob.rb
+++ b/lib/gitlab/git/blob.rb
@@ -35,6 +35,11 @@ module Gitlab
docstring 'blob.truncated? == false'
end
+ define_histogram :gitlab_blob_size do
+ docstring 'Gitlab::Git::Blob size'
+ buckets [1_000, 5_000, 10_000, 50_000, 100_000, 500_000, 1_000_000]
+ end
+
class << self
def find(repository, sha, path, limit: MAX_DATA_DISPLAY_SIZE)
tree_entry(repository, sha, path, limit)
@@ -122,6 +127,9 @@ module Gitlab
# Retain the actual size before it is encoded
@loaded_size = @data.bytesize if @data
@loaded_all_data = @loaded_size == size
+
+ record_metric_blob_size
+ record_metric_truncated(truncated?)
end
def binary_in_repo?
@@ -157,7 +165,9 @@ module Gitlab
end
def truncated?
- size && (size > loaded_size)
+ return false unless size && loaded_size
+
+ size > loaded_size
end
# Valid LFS object pointer is a text file consisting of
@@ -197,6 +207,20 @@ module Gitlab
private
+ def record_metric_blob_size
+ return unless size
+
+ self.class.gitlab_blob_size.observe({}, size)
+ end
+
+ def record_metric_truncated(bool)
+ if bool
+ self.class.gitlab_blob_truncated_true.increment
+ else
+ self.class.gitlab_blob_truncated_false.increment
+ end
+ end
+
def has_lfs_version_key?
!empty? && text_in_repo? && data.start_with?("version https://git-lfs.github.com/spec")
end
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index 6bfe744a5cd..9adabd4e8fe 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -152,6 +152,12 @@ module Gitlab
end
end
+ def replicate(source_repository)
+ wrapped_gitaly_errors do
+ gitaly_repository_client.replicate(source_repository)
+ end
+ end
+
def expire_has_local_branches_cache
clear_memoization(:has_local_branches)
end
@@ -322,6 +328,7 @@ module Gitlab
limit: 10,
offset: 0,
path: nil,
+ author: nil,
follow: false,
skip_merges: false,
after: nil,
@@ -766,12 +773,6 @@ module Gitlab
!has_visible_content?
end
- def fetch_repository_as_mirror(repository)
- wrapped_gitaly_errors do
- gitaly_remote_client.fetch_internal_remote(repository)
- end
- end
-
# Fetch remote for repository
#
# remote - remote name
@@ -792,6 +793,14 @@ module Gitlab
end
end
+ def import_repository(url)
+ raise ArgumentError, "don't use disk paths with import_repository: #{url.inspect}" if url.start_with?('.', '/')
+
+ wrapped_gitaly_errors do
+ gitaly_repository_client.import_repository(url)
+ end
+ end
+
def blob_at(sha, path)
Gitlab::Git::Blob.find(self, sha, path) unless Gitlab::Git.blank_ref?(sha)
end
@@ -841,10 +850,9 @@ module Gitlab
end
end
- def squash(user, squash_id, branch:, start_sha:, end_sha:, author:, message:)
+ def squash(user, squash_id, start_sha:, end_sha:, author:, message:)
wrapped_gitaly_errors do
- gitaly_operation_client.user_squash(user, squash_id, branch,
- start_sha, end_sha, author, message)
+ gitaly_operation_client.user_squash(user, squash_id, start_sha, end_sha, author, message)
end
end
diff --git a/lib/gitlab/git/rugged_impl/use_rugged.rb b/lib/gitlab/git/rugged_impl/use_rugged.rb
index f63e35030c1..f9573bedba7 100644
--- a/lib/gitlab/git/rugged_impl/use_rugged.rb
+++ b/lib/gitlab/git/rugged_impl/use_rugged.rb
@@ -15,12 +15,6 @@ module Gitlab
Gitlab::GitalyClient.can_use_disk?(repo.storage)
end
- def running_puma_with_multiple_threads?
- return false unless Gitlab::Runtime.puma?
-
- ::Puma.respond_to?(:cli_config) && ::Puma.cli_config.options[:max_threads] > 1
- end
-
def execute_rugged_call(method_name, *args)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
start = Gitlab::Metrics::System.monotonic_time
@@ -43,6 +37,22 @@ module Gitlab
result
end
end
+
+ def running_puma_with_multiple_threads?
+ return false unless Gitlab::Runtime.puma?
+
+ ::Puma.respond_to?(:cli_config) && ::Puma.cli_config.options[:max_threads] > 1
+ end
+
+ def rugged_feature_keys
+ Gitlab::Git::RuggedImpl::Repository::FEATURE_FLAGS
+ end
+
+ def rugged_enabled_through_feature_flag?
+ rugged_feature_keys.any? do |feature_key|
+ Feature.enabled?(feature_key)
+ end
+ end
end
end
end
diff --git a/lib/gitlab/git_access.rb b/lib/gitlab/git_access.rb
index 906350e57c5..c400e1cd4fd 100644
--- a/lib/gitlab/git_access.rb
+++ b/lib/gitlab/git_access.rb
@@ -6,7 +6,7 @@ module Gitlab
class GitAccess
include Gitlab::Utils::StrongMemoize
- UnauthorizedError = Class.new(StandardError)
+ ForbiddenError = Class.new(StandardError)
NotFoundError = Class.new(StandardError)
ProjectCreationError = Class.new(StandardError)
TimeoutError = Class.new(StandardError)
@@ -43,15 +43,15 @@ module Gitlab
PUSH_COMMANDS = %w{git-receive-pack}.freeze
ALL_COMMANDS = DOWNLOAD_COMMANDS + PUSH_COMMANDS
- attr_reader :actor, :project, :protocol, :authentication_abilities, :namespace_path, :project_path, :redirected_path, :auth_result_type, :changes, :logger
+ attr_reader :actor, :project, :protocol, :authentication_abilities, :namespace_path, :repository_path, :redirected_path, :auth_result_type, :changes, :logger
- def initialize(actor, project, protocol, authentication_abilities:, namespace_path: nil, project_path: nil, redirected_path: nil, auth_result_type: nil)
+ def initialize(actor, project, protocol, authentication_abilities:, namespace_path: nil, repository_path: nil, redirected_path: nil, auth_result_type: nil)
@actor = actor
@project = project
@protocol = protocol
- @authentication_abilities = authentication_abilities
+ @authentication_abilities = Array(authentication_abilities)
@namespace_path = namespace_path || project&.namespace&.full_path
- @project_path = project_path || project&.path
+ @repository_path = repository_path || project&.path
@redirected_path = redirected_path
@auth_result_type = auth_result_type
end
@@ -60,7 +60,6 @@ module Gitlab
@logger = Checks::TimedLogger.new(timeout: INTERNAL_TIMEOUT, header: LOG_HEADER)
@changes = changes
- check_namespace!
check_protocol!
check_valid_actor!
check_active_user!
@@ -72,11 +71,7 @@ module Gitlab
return custom_action if custom_action
check_db_accessibility!(cmd)
-
- ensure_project_on_push!(cmd, changes)
-
- check_project_accessibility!
- add_project_moved_message!
+ check_project!(changes, cmd)
check_repository_existence!
case cmd
@@ -86,7 +81,7 @@ module Gitlab
check_push_access!
end
- success_result(cmd)
+ success_result
end
def guest_can_download_code?
@@ -113,19 +108,38 @@ module Gitlab
private
+ def check_project!(changes, cmd)
+ check_namespace!
+ ensure_project_on_push!(cmd, changes)
+ check_project_accessibility!
+ add_project_moved_message!
+ end
+
def check_custom_action(cmd)
nil
end
- def check_for_console_messages(cmd)
+ def check_for_console_messages
+ return console_messages unless key?
+
+ key_status = Gitlab::Auth::KeyStatusChecker.new(actor)
+
+ if key_status.show_console_message?
+ console_messages.push(key_status.console_message)
+ else
+ console_messages
+ end
+ end
+
+ def console_messages
[]
end
def check_valid_actor!
- return unless actor.is_a?(Key)
+ return unless key?
unless actor.valid?
- raise UnauthorizedError, "Your SSH key #{actor.errors[:key].first}."
+ raise ForbiddenError, "Your SSH key #{actor.errors[:key].first}."
end
end
@@ -133,7 +147,7 @@ module Gitlab
return if request_from_ci_build?
unless protocol_allowed?
- raise UnauthorizedError, "Git access over #{protocol.upcase} is not allowed"
+ raise ForbiddenError, "Git access over #{protocol.upcase} is not allowed"
end
end
@@ -148,7 +162,7 @@ module Gitlab
unless user_access.allowed?
message = Gitlab::Auth::UserAccessDeniedReason.new(user).rejection_message
- raise UnauthorizedError, message
+ raise ForbiddenError, message
end
end
@@ -156,11 +170,11 @@ module Gitlab
case cmd
when *DOWNLOAD_COMMANDS
unless authentication_abilities.include?(:download_code) || authentication_abilities.include?(:build_download_code)
- raise UnauthorizedError, ERROR_MESSAGES[:auth_download]
+ raise ForbiddenError, ERROR_MESSAGES[:auth_download]
end
when *PUSH_COMMANDS
unless authentication_abilities.include?(:push_code)
- raise UnauthorizedError, ERROR_MESSAGES[:auth_upload]
+ raise ForbiddenError, ERROR_MESSAGES[:auth_upload]
end
end
end
@@ -174,7 +188,7 @@ module Gitlab
def add_project_moved_message!
return if redirected_path.nil?
- project_moved = Checks::ProjectMoved.new(project, user, protocol, redirected_path)
+ project_moved = Checks::ProjectMoved.new(repository, user, protocol, redirected_path)
project_moved.add_message
end
@@ -189,19 +203,19 @@ module Gitlab
def check_upload_pack_disabled!
if http? && upload_pack_disabled_over_http?
- raise UnauthorizedError, ERROR_MESSAGES[:upload_pack_disabled_over_http]
+ raise ForbiddenError, ERROR_MESSAGES[:upload_pack_disabled_over_http]
end
end
def check_receive_pack_disabled!
if http? && receive_pack_disabled_over_http?
- raise UnauthorizedError, ERROR_MESSAGES[:receive_pack_disabled_over_http]
+ raise ForbiddenError, ERROR_MESSAGES[:receive_pack_disabled_over_http]
end
end
def check_command_existence!(cmd)
unless ALL_COMMANDS.include?(cmd)
- raise UnauthorizedError, ERROR_MESSAGES[:command_not_allowed]
+ raise ForbiddenError, ERROR_MESSAGES[:command_not_allowed]
end
end
@@ -209,7 +223,7 @@ module Gitlab
return unless receive_pack?(cmd)
if Gitlab::Database.read_only?
- raise UnauthorizedError, push_to_read_only_message
+ raise ForbiddenError, push_to_read_only_message
end
end
@@ -222,7 +236,7 @@ module Gitlab
return unless user&.can?(:create_projects, namespace)
project_params = {
- path: project_path,
+ path: repository_path,
namespace_id: namespace.id,
visibility_level: Gitlab::VisibilityLevel::PRIVATE
}
@@ -236,7 +250,7 @@ module Gitlab
@project = project
user_access.project = @project
- Checks::ProjectCreated.new(project, user, protocol).add_message
+ Checks::ProjectCreated.new(repository, user, protocol).add_message
end
def check_repository_existence!
@@ -253,23 +267,23 @@ module Gitlab
guest_can_download_code?
unless passed
- raise UnauthorizedError, ERROR_MESSAGES[:download]
+ raise ForbiddenError, ERROR_MESSAGES[:download]
end
end
def check_push_access!
if project.repository_read_only?
- raise UnauthorizedError, ERROR_MESSAGES[:read_only]
+ raise ForbiddenError, ERROR_MESSAGES[:read_only]
end
if deploy_key?
unless deploy_key.can_push_to?(project)
- raise UnauthorizedError, ERROR_MESSAGES[:deploy_key_upload]
+ raise ForbiddenError, ERROR_MESSAGES[:deploy_key_upload]
end
elsif user
# User access is verified in check_change_access!
else
- raise UnauthorizedError, ERROR_MESSAGES[:upload]
+ raise ForbiddenError, ERROR_MESSAGES[:upload]
end
check_change_access!
@@ -284,7 +298,7 @@ module Gitlab
project.any_branch_allows_collaboration?(user_access.user)
unless can_push
- raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:push_code]
+ raise ForbiddenError, ERROR_MESSAGES[:push_code]
end
else
# If there are worktrees with a HEAD pointing to a non-existent object,
@@ -338,6 +352,10 @@ module Gitlab
actor == :ci
end
+ def key?
+ actor.is_a?(Key)
+ end
+
def can_read_project?
if deploy_key?
deploy_key.has_access_to?(project)
@@ -372,8 +390,8 @@ module Gitlab
protected
- def success_result(cmd)
- ::Gitlab::GitAccessResult::Success.new(console_messages: check_for_console_messages(cmd))
+ def success_result
+ ::Gitlab::GitAccessResult::Success.new(console_messages: check_for_console_messages)
end
def changes_list
diff --git a/lib/gitlab/git_access_snippet.rb b/lib/gitlab/git_access_snippet.rb
index d99b9c3fe89..e11c1ea527c 100644
--- a/lib/gitlab/git_access_snippet.rb
+++ b/lib/gitlab/git_access_snippet.rb
@@ -2,7 +2,12 @@
module Gitlab
class GitAccessSnippet < GitAccess
+ extend ::Gitlab::Utils::Override
+
ERROR_MESSAGES = {
+ authentication_mechanism: 'The authentication mechanism is not supported.',
+ read_snippet: 'You are not allowed to read this snippet.',
+ update_snippet: 'You are not allowed to update this snippet.',
snippet_not_found: 'The snippet you were looking for could not be found.',
repository_not_found: 'The snippet repository you were looking for could not be found.'
}.freeze
@@ -12,25 +17,43 @@ module Gitlab
def initialize(actor, snippet, protocol, **kwargs)
@snippet = snippet
- super(actor, project, protocol, **kwargs)
+ super(actor, snippet&.project, protocol, **kwargs)
+
+ @auth_result_type = nil
+ @authentication_abilities &= [:download_code, :push_code]
end
- def check(cmd, _changes)
- unless Feature.enabled?(:version_snippets, user)
- raise NotFoundError, ERROR_MESSAGES[:snippet_not_found]
+ def check(cmd, changes)
+ # TODO: Investigate if expanding actor/authentication types are needed.
+ # https://gitlab.com/gitlab-org/gitlab/issues/202190
+ if actor && !actor.is_a?(User) && !actor.instance_of?(Key)
+ raise ForbiddenError, ERROR_MESSAGES[:authentication_mechanism]
end
check_snippet_accessibility!
- success_result(cmd)
+ super
end
- def project
- snippet&.project
+ private
+
+ override :check_project!
+ def check_project!(cmd, changes)
+ return unless snippet.is_a?(ProjectSnippet)
+
+ check_namespace!
+ check_project_accessibility!
+ add_project_moved_message!
end
- private
+ override :check_push_access!
+ def check_push_access!
+ raise ForbiddenError, ERROR_MESSAGES[:update_snippet] unless user
+ check_change_access!
+ end
+
+ override :repository
def repository
snippet&.repository
end
@@ -39,10 +62,63 @@ module Gitlab
if snippet.blank?
raise NotFoundError, ERROR_MESSAGES[:snippet_not_found]
end
+ end
- unless repository&.exists?
+ override :check_download_access!
+ def check_download_access!
+ passed = guest_can_download_code? || user_can_download_code?
+
+ unless passed
+ raise ForbiddenError, ERROR_MESSAGES[:read_snippet]
+ end
+ end
+
+ override :guest_can_download_code?
+ def guest_can_download_code?
+ Guest.can?(:read_snippet, snippet)
+ end
+
+ override :user_can_download_code?
+ def user_can_download_code?
+ authentication_abilities.include?(:download_code) && user_access.can_do_action?(:read_snippet)
+ end
+
+ override :check_change_access!
+ def check_change_access!
+ unless user_access.can_do_action?(:update_snippet)
+ raise ForbiddenError, ERROR_MESSAGES[:update_snippet]
+ end
+
+ changes_list.each do |change|
+ # If user does not have access to make at least one change, cancel all
+ # push by allowing the exception to bubble up
+ check_single_change_access(change)
+ end
+ end
+
+ def check_single_change_access(change)
+ Checks::SnippetCheck.new(change, logger: logger).validate!
+ Checks::PushFileCountCheck.new(change, repository: repository, limit: Snippet::MAX_FILE_COUNT, logger: logger).validate!
+ rescue Checks::TimedLogger::TimeoutError
+ raise TimeoutError, logger.full_message
+ end
+
+ override :check_repository_existence!
+ def check_repository_existence!
+ unless repository.exists?
raise NotFoundError, ERROR_MESSAGES[:repository_not_found]
end
end
+
+ override :user_access
+ def user_access
+ @user_access ||= UserAccessSnippet.new(user, snippet: snippet)
+ end
+
+ # TODO: Implement EE/Geo https://gitlab.com/gitlab-org/gitlab/issues/205629
+ override :check_custom_action
+ def check_custom_action(cmd)
+ nil
+ end
end
end
diff --git a/lib/gitlab/git_access_wiki.rb b/lib/gitlab/git_access_wiki.rb
index 3d0db753f6e..aad46937c32 100644
--- a/lib/gitlab/git_access_wiki.rb
+++ b/lib/gitlab/git_access_wiki.rb
@@ -19,11 +19,11 @@ module Gitlab
def check_change_access!
unless user_access.can_do_action?(:create_wiki)
- raise UnauthorizedError, ERROR_MESSAGES[:write_to_wiki]
+ raise ForbiddenError, ERROR_MESSAGES[:write_to_wiki]
end
if Gitlab::Database.read_only?
- raise UnauthorizedError, push_to_read_only_message
+ raise ForbiddenError, push_to_read_only_message
end
true
diff --git a/lib/gitlab/git_post_receive.rb b/lib/gitlab/git_post_receive.rb
index 5264bae47a1..13d991cdfbd 100644
--- a/lib/gitlab/git_post_receive.rb
+++ b/lib/gitlab/git_post_receive.rb
@@ -3,10 +3,10 @@
module Gitlab
class GitPostReceive
include Gitlab::Identifier
- attr_reader :project, :identifier, :changes, :push_options
+ attr_reader :container, :identifier, :changes, :push_options
- def initialize(project, identifier, changes, push_options = {})
- @project = project
+ def initialize(container, identifier, changes, push_options = {})
+ @container = container
@identifier = identifier
@changes = parse_changes(changes)
@push_options = push_options
@@ -27,10 +27,10 @@ module Gitlab
def includes_default_branch?
# If the branch doesn't have a default branch yet, we presume the
# first branch pushed will be the default.
- return true unless project.default_branch.present?
+ return true unless container.default_branch.present?
changes.branch_changes.any? do |change|
- Gitlab::Git.branch_name(change[:ref]) == project.default_branch
+ Gitlab::Git.branch_name(change[:ref]) == container.default_branch
end
end
diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb
index 4eb1ccf32ba..3b9402da0dd 100644
--- a/lib/gitlab/gitaly_client.rb
+++ b/lib/gitlab/gitaly_client.rb
@@ -42,7 +42,7 @@ module Gitlab
klass = stub_class(name)
addr = stub_address(storage)
creds = stub_creds(storage)
- klass.new(addr, creds, interceptors: interceptors)
+ klass.new(addr, creds, interceptors: interceptors, channel_args: channel_args)
end
end
end
@@ -54,6 +54,16 @@ module Gitlab
end
private_class_method :interceptors
+ def self.channel_args
+ # These values match the go Gitaly client
+ # https://gitlab.com/gitlab-org/gitaly/-/blob/bf9f52bc/client/dial.go#L78
+ {
+ 'grpc.keepalive_time_ms': 20000,
+ 'grpc.keepalive_permit_without_calls': 1
+ }
+ end
+ private_class_method :channel_args
+
def self.stub_cert_paths
cert_paths = Dir["#{OpenSSL::X509::DEFAULT_CERT_DIR}/*"]
cert_paths << OpenSSL::X509::DEFAULT_CERT_FILE if File.exist? OpenSSL::X509::DEFAULT_CERT_FILE
@@ -141,6 +151,20 @@ module Gitlab
# kwargs.merge(deadline: Time.now + 10)
# end
#
+ # The optional remote_storage keyword argument is used to enable
+ # inter-gitaly calls. Say you have an RPC that needs to pull data from
+ # one repository to another. For example, to fetch a branch from a
+ # (non-deduplicated) fork into the fork parent. In that case you would
+ # send an RPC call to the Gitaly server hosting the fork parent, and in
+ # the request, you would tell that Gitaly server to pull Git data from
+ # the fork. How does that Gitaly server connect to the Gitaly server the
+ # forked repo lives on? This is the problem `remote_storage:` solves: it
+ # adds address and authentication information to the call, as gRPC
+ # metadata (under the `gitaly-servers` header). The request would say
+ # "pull from repo X on gitaly-2". In the Ruby code you pass
+ # `remote_storage: 'gitaly-2'`. And then the metadata would say
+ # "gitaly-2 is at network address tcp://10.0.1.2:8075".
+ #
def self.call(storage, service, rpc, request, remote_storage: nil, timeout: default_timeout, &block)
self.measure_timings(service, rpc, request) do
self.execute(storage, service, rpc, request, remote_storage: remote_storage, timeout: timeout, &block)
diff --git a/lib/gitlab/gitaly_client/commit_service.rb b/lib/gitlab/gitaly_client/commit_service.rb
index ac22f5bf419..1f914dc95d1 100644
--- a/lib/gitlab/gitaly_client/commit_service.rb
+++ b/lib/gitlab/gitaly_client/commit_service.rb
@@ -324,7 +324,8 @@ module Gitlab
request.after = GitalyClient.timestamp(options[:after]) if options[:after]
request.before = GitalyClient.timestamp(options[:before]) if options[:before]
request.revision = encode_binary(options[:ref]) if options[:ref]
- request.order = options[:order].upcase.sub('DEFAULT', 'NONE') if options[:order].present?
+ request.author = encode_binary(options[:author]) if options[:author]
+ request.order = options[:order].upcase.sub('DEFAULT', 'NONE') if options[:order].present?
request.paths = encode_repeated(Array(options[:path])) if options[:path].present?
diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb
index 67fb0ab9608..9ed4b2da09a 100644
--- a/lib/gitlab/gitaly_client/operation_service.rb
+++ b/lib/gitlab/gitaly_client/operation_service.rb
@@ -248,12 +248,11 @@ module Gitlab
request_enum.close
end
- def user_squash(user, squash_id, branch, start_sha, end_sha, author, message)
+ def user_squash(user, squash_id, start_sha, end_sha, author, message)
request = Gitaly::UserSquashRequest.new(
repository: @gitaly_repo,
user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
squash_id: squash_id.to_s,
- branch: encode_binary(branch),
start_sha: start_sha,
end_sha: end_sha,
author: Gitlab::Git::User.from_gitlab(author).to_gitaly,
diff --git a/lib/gitlab/gitaly_client/remote_service.rb b/lib/gitlab/gitaly_client/remote_service.rb
index 0e95b0ef469..2405f3be197 100644
--- a/lib/gitlab/gitaly_client/remote_service.rb
+++ b/lib/gitlab/gitaly_client/remote_service.rb
@@ -41,20 +41,6 @@ module Gitlab
GitalyClient.call(@storage, :remote_service, :remove_remote, request, timeout: GitalyClient.long_timeout).result
end
- def fetch_internal_remote(repository)
- request = Gitaly::FetchInternalRemoteRequest.new(
- repository: @gitaly_repo,
- remote_repository: repository.gitaly_repository
- )
-
- response = GitalyClient.call(@storage, :remote_service,
- :fetch_internal_remote, request,
- timeout: GitalyClient.long_timeout,
- remote_storage: repository.storage)
-
- response.result
- end
-
def find_remote_root_ref(remote_name)
request = Gitaly::FindRemoteRootRefRequest.new(
repository: @gitaly_repo,
diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb
index 597ae4651ea..f74c9ea4192 100644
--- a/lib/gitlab/gitaly_client/repository_service.rb
+++ b/lib/gitlab/gitaly_client/repository_service.rb
@@ -359,6 +359,22 @@ module Gitlab
GitalyClient.call(@storage, :repository_service, :remove_repository, request, timeout: GitalyClient.long_timeout)
end
+ def replicate(source_repository)
+ request = Gitaly::ReplicateRepositoryRequest.new(
+ repository: @gitaly_repo,
+ source: source_repository.gitaly_repository
+ )
+
+ GitalyClient.call(
+ @storage,
+ :repository_service,
+ :replicate_repository,
+ request,
+ remote_storage: source_repository.storage,
+ timeout: GitalyClient.long_timeout
+ )
+ end
+
private
def search_results_from_response(gitaly_response, options = {})
diff --git a/lib/gitlab/github_import.rb b/lib/gitlab/github_import.rb
index 14a6d6443ec..9a7c406d981 100644
--- a/lib/gitlab/github_import.rb
+++ b/lib/gitlab/github_import.rb
@@ -16,7 +16,7 @@ module Gitlab
def self.ghost_user_id
key = 'github-import/ghost-user-id'
- Caching.read_integer(key) || Caching.write(key, User.select(:id).ghost.id)
+ Gitlab::Cache::Import::Caching.read_integer(key) || Gitlab::Cache::Import::Caching.write(key, User.select(:id).ghost.id)
end
end
end
diff --git a/lib/gitlab/github_import/caching.rb b/lib/gitlab/github_import/caching.rb
deleted file mode 100644
index b08f133794f..00000000000
--- a/lib/gitlab/github_import/caching.rb
+++ /dev/null
@@ -1,151 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module GithubImport
- module Caching
- # The default timeout of the cache keys.
- TIMEOUT = 24.hours.to_i
-
- WRITE_IF_GREATER_SCRIPT = <<-EOF.strip_heredoc.freeze
- local key, value, ttl = KEYS[1], tonumber(ARGV[1]), ARGV[2]
- local existing = tonumber(redis.call("get", key))
-
- if existing == nil or value > existing then
- redis.call("set", key, value)
- redis.call("expire", key, ttl)
- return true
- else
- return false
- end
- EOF
-
- # Reads a cache key.
- #
- # If the key exists and has a non-empty value its TTL is refreshed
- # automatically.
- #
- # raw_key - The cache key to read.
- # timeout - The new timeout of the key if the key is to be refreshed.
- def self.read(raw_key, timeout: TIMEOUT)
- key = cache_key_for(raw_key)
- value = Redis::Cache.with { |redis| redis.get(key) }
-
- if value.present?
- # We refresh the expiration time so frequently used keys stick
- # around, removing the need for querying the database as much as
- # possible.
- #
- # A key may be empty when we looked up a GitHub user (for example) but
- # did not find a matching GitLab user. In that case we _don't_ want to
- # refresh the TTL so we automatically pick up the right data when said
- # user were to register themselves on the GitLab instance.
- Redis::Cache.with { |redis| redis.expire(key, timeout) }
- end
-
- value
- end
-
- # Reads an integer from the cache, or returns nil if no value was found.
- #
- # See Caching.read for more information.
- def self.read_integer(raw_key, timeout: TIMEOUT)
- value = read(raw_key, timeout: timeout)
-
- value.to_i if value.present?
- end
-
- # Sets a cache key to the given value.
- #
- # key - The cache key to write.
- # value - The value to set.
- # timeout - The time after which the cache key should expire.
- def self.write(raw_key, value, timeout: TIMEOUT)
- key = cache_key_for(raw_key)
-
- Redis::Cache.with do |redis|
- redis.set(key, value, ex: timeout)
- end
-
- value
- end
-
- # Adds a value to a set.
- #
- # raw_key - The key of the set to add the value to.
- # value - The value to add to the set.
- # timeout - The new timeout of the key.
- def self.set_add(raw_key, value, timeout: TIMEOUT)
- key = cache_key_for(raw_key)
-
- Redis::Cache.with do |redis|
- redis.multi do |m|
- m.sadd(key, value)
- m.expire(key, timeout)
- end
- end
- end
-
- # Returns true if the given value is present in the set.
- #
- # raw_key - The key of the set to check.
- # value - The value to check for.
- def self.set_includes?(raw_key, value)
- key = cache_key_for(raw_key)
-
- Redis::Cache.with do |redis|
- redis.sismember(key, value)
- end
- end
-
- # Sets multiple keys to a given value.
- #
- # mapping - A Hash mapping the cache keys to their values.
- # timeout - The time after which the cache key should expire.
- def self.write_multiple(mapping, timeout: TIMEOUT)
- Redis::Cache.with do |redis|
- redis.multi do |multi|
- mapping.each do |raw_key, value|
- multi.set(cache_key_for(raw_key), value, ex: timeout)
- end
- end
- end
- end
-
- # Sets the expiration time of a key.
- #
- # raw_key - The key for which to change the timeout.
- # timeout - The new timeout.
- def self.expire(raw_key, timeout)
- key = cache_key_for(raw_key)
-
- Redis::Cache.with do |redis|
- redis.expire(key, timeout)
- end
- end
-
- # Sets a key to the given integer but only if the existing value is
- # smaller than the given value.
- #
- # This method uses a Lua script to ensure the read and write are atomic.
- #
- # raw_key - The key to set.
- # value - The new value for the key.
- # timeout - The key timeout in seconds.
- #
- # Returns true when the key was overwritten, false otherwise.
- def self.write_if_greater(raw_key, value, timeout: TIMEOUT)
- key = cache_key_for(raw_key)
- val = Redis::Cache.with do |redis|
- redis
- .eval(WRITE_IF_GREATER_SCRIPT, keys: [key], argv: [value, timeout])
- end
-
- val ? true : false
- end
-
- def self.cache_key_for(raw_key)
- "#{Redis::Cache::CACHE_NAMESPACE}:#{raw_key}"
- end
- end
- end
-end
diff --git a/lib/gitlab/github_import/importer/repository_importer.rb b/lib/gitlab/github_import/importer/repository_importer.rb
index 6aad7955415..7ae91912b8a 100644
--- a/lib/gitlab/github_import/importer/repository_importer.rb
+++ b/lib/gitlab/github_import/importer/repository_importer.rb
@@ -4,7 +4,6 @@ module Gitlab
module GithubImport
module Importer
class RepositoryImporter
- include Gitlab::ShellAdapter
include Gitlab::Utils::StrongMemoize
attr_reader :project, :client, :wiki_formatter
@@ -65,10 +64,10 @@ module Gitlab
end
def import_wiki_repository
- gitlab_shell.import_wiki_repository(project, wiki_formatter)
+ project.wiki.repository.import_repository(wiki_formatter.import_url)
true
- rescue Gitlab::Shell::Error => e
+ rescue ::Gitlab::Git::CommandError => e
if e.message !~ /repository not exported/
project.create_wiki
fail_import("Failed to import the wiki: #{e.message}")
diff --git a/lib/gitlab/github_import/issuable_finder.rb b/lib/gitlab/github_import/issuable_finder.rb
index c81603a1aa9..136531505ea 100644
--- a/lib/gitlab/github_import/issuable_finder.rb
+++ b/lib/gitlab/github_import/issuable_finder.rb
@@ -23,7 +23,7 @@ module Gitlab
#
# This method will return `nil` if no ID could be found.
def database_id
- val = Caching.read(cache_key)
+ val = Gitlab::Cache::Import::Caching.read(cache_key)
val.to_i if val.present?
end
@@ -32,7 +32,7 @@ module Gitlab
#
# database_id - The ID of the corresponding database row.
def cache_database_id(database_id)
- Caching.write(cache_key, database_id)
+ Gitlab::Cache::Import::Caching.write(cache_key, database_id)
end
private
diff --git a/lib/gitlab/github_import/label_finder.rb b/lib/gitlab/github_import/label_finder.rb
index cad39e48e43..39e669dbba4 100644
--- a/lib/gitlab/github_import/label_finder.rb
+++ b/lib/gitlab/github_import/label_finder.rb
@@ -15,7 +15,7 @@ module Gitlab
# Returns the label ID for the given name.
def id_for(name)
- Caching.read_integer(cache_key_for(name))
+ Gitlab::Cache::Import::Caching.read_integer(cache_key_for(name))
end
# rubocop: disable CodeReuse/ActiveRecord
@@ -27,7 +27,7 @@ module Gitlab
hash[cache_key_for(name)] = id
end
- Caching.write_multiple(mapping)
+ Gitlab::Cache::Import::Caching.write_multiple(mapping)
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/lib/gitlab/github_import/milestone_finder.rb b/lib/gitlab/github_import/milestone_finder.rb
index a157a1e1ff5..d9290e36ea1 100644
--- a/lib/gitlab/github_import/milestone_finder.rb
+++ b/lib/gitlab/github_import/milestone_finder.rb
@@ -18,7 +18,7 @@ module Gitlab
def id_for(issuable)
return unless issuable.milestone_number
- Caching.read_integer(cache_key_for(issuable.milestone_number))
+ Gitlab::Cache::Import::Caching.read_integer(cache_key_for(issuable.milestone_number))
end
# rubocop: disable CodeReuse/ActiveRecord
@@ -30,7 +30,7 @@ module Gitlab
hash[cache_key_for(iid)] = id
end
- Caching.write_multiple(mapping)
+ Gitlab::Cache::Import::Caching.write_multiple(mapping)
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/lib/gitlab/github_import/page_counter.rb b/lib/gitlab/github_import/page_counter.rb
index a3e7b3c1afc..3b4fd42ba2a 100644
--- a/lib/gitlab/github_import/page_counter.rb
+++ b/lib/gitlab/github_import/page_counter.rb
@@ -19,12 +19,12 @@ module Gitlab
#
# Returns true if the page number was overwritten, false otherwise.
def set(page)
- Caching.write_if_greater(cache_key, page)
+ Gitlab::Cache::Import::Caching.write_if_greater(cache_key, page)
end
# Returns the current value from the cache.
def current
- Caching.read_integer(cache_key) || 1
+ Gitlab::Cache::Import::Caching.read_integer(cache_key) || 1
end
end
end
diff --git a/lib/gitlab/github_import/parallel_scheduling.rb b/lib/gitlab/github_import/parallel_scheduling.rb
index 849a66d47ed..cabc615ea11 100644
--- a/lib/gitlab/github_import/parallel_scheduling.rb
+++ b/lib/gitlab/github_import/parallel_scheduling.rb
@@ -42,7 +42,7 @@ module Gitlab
# still scheduling duplicates while. Since all work has already been
# completed those jobs will just cycle through any remaining pages while
# not scheduling anything.
- Caching.expire(already_imported_cache_key, 15.minutes.to_i)
+ Gitlab::Cache::Import::Caching.expire(already_imported_cache_key, 15.minutes.to_i)
retval
end
@@ -112,14 +112,14 @@ module Gitlab
def already_imported?(object)
id = id_for_already_imported_cache(object)
- Caching.set_includes?(already_imported_cache_key, id)
+ Gitlab::Cache::Import::Caching.set_includes?(already_imported_cache_key, id)
end
# Marks the given object as "already imported".
def mark_as_imported(object)
id = id_for_already_imported_cache(object)
- Caching.set_add(already_imported_cache_key, id)
+ Gitlab::Cache::Import::Caching.set_add(already_imported_cache_key, id)
end
# Returns the ID to use for the cache used for checking if an object has
diff --git a/lib/gitlab/github_import/user_finder.rb b/lib/gitlab/github_import/user_finder.rb
index 51a532437bd..9da986ae921 100644
--- a/lib/gitlab/github_import/user_finder.rb
+++ b/lib/gitlab/github_import/user_finder.rb
@@ -102,11 +102,11 @@ module Gitlab
def email_for_github_username(username)
cache_key = EMAIL_FOR_USERNAME_CACHE_KEY % username
- email = Caching.read(cache_key)
+ email = Gitlab::Cache::Import::Caching.read(cache_key)
unless email
user = client.user(username)
- email = Caching.write(cache_key, user.email) if user
+ email = Gitlab::Cache::Import::Caching.write(cache_key, user.email) if user
end
email
@@ -125,7 +125,7 @@ module Gitlab
def id_for_github_id(id)
gitlab_id = query_id_for_github_id(id) || nil
- Caching.write(ID_CACHE_KEY % id, gitlab_id)
+ Gitlab::Cache::Import::Caching.write(ID_CACHE_KEY % id, gitlab_id)
end
# Queries and caches the GitLab user ID for a GitHub email, if one was
@@ -133,7 +133,7 @@ module Gitlab
def id_for_github_email(email)
gitlab_id = query_id_for_github_email(email) || nil
- Caching.write(ID_FOR_EMAIL_CACHE_KEY % email, gitlab_id)
+ Gitlab::Cache::Import::Caching.write(ID_FOR_EMAIL_CACHE_KEY % email, gitlab_id)
end
# rubocop: disable CodeReuse/ActiveRecord
@@ -155,7 +155,7 @@ module Gitlab
# 1. A boolean indicating if the key was present or not.
# 2. The ID as an Integer, or nil in case no ID could be found.
def read_id_from_cache(key)
- value = Caching.read(key)
+ value = Gitlab::Cache::Import::Caching.read(key)
exists = !value.nil?
number = value.to_i
diff --git a/lib/gitlab/gl_repository.rb b/lib/gitlab/gl_repository.rb
index fcebcb463cd..26440e6f82d 100644
--- a/lib/gitlab/gl_repository.rb
+++ b/lib/gitlab/gl_repository.rb
@@ -7,19 +7,21 @@ module Gitlab
PROJECT = RepoType.new(
name: :project,
access_checker_class: Gitlab::GitAccess,
- repository_resolver: -> (project) { project.repository }
+ repository_resolver: -> (project) { project&.repository }
).freeze
WIKI = RepoType.new(
name: :wiki,
access_checker_class: Gitlab::GitAccessWiki,
- repository_resolver: -> (project) { project.wiki.repository },
+ repository_resolver: -> (project) { project&.wiki&.repository },
suffix: :wiki
).freeze
SNIPPET = RepoType.new(
name: :snippet,
access_checker_class: Gitlab::GitAccessSnippet,
- repository_resolver: -> (snippet) { snippet.repository },
- container_resolver: -> (id) { Snippet.find_by_id(id) }
+ repository_resolver: -> (snippet) { snippet&.repository },
+ container_resolver: -> (id) { Snippet.find_by_id(id) },
+ project_resolver: -> (snippet) { snippet&.project },
+ guest_read_ability: :read_snippet
).freeze
TYPES = {
@@ -42,7 +44,7 @@ module Gitlab
container = type.fetch_container!(gl_repository)
- [container, type]
+ [container, type.project_for(container), type]
end
def self.default_type
diff --git a/lib/gitlab/gl_repository/repo_type.rb b/lib/gitlab/gl_repository/repo_type.rb
index 9663fd7de8f..052ce578881 100644
--- a/lib/gitlab/gl_repository/repo_type.rb
+++ b/lib/gitlab/gl_repository/repo_type.rb
@@ -7,6 +7,8 @@ module Gitlab
:access_checker_class,
:repository_resolver,
:container_resolver,
+ :project_resolver,
+ :guest_read_ability,
:suffix
def initialize(
@@ -14,11 +16,15 @@ module Gitlab
access_checker_class:,
repository_resolver:,
container_resolver: default_container_resolver,
+ project_resolver: nil,
+ guest_read_ability: :download_code,
suffix: nil)
@name = name
@access_checker_class = access_checker_class
@repository_resolver = repository_resolver
@container_resolver = container_resolver
+ @project_resolver = project_resolver
+ @guest_read_ability = guest_read_ability
@suffix = suffix
end
@@ -59,8 +65,18 @@ module Gitlab
repository_resolver.call(container)
end
+ def project_for(container)
+ return container unless project_resolver
+
+ project_resolver.call(container)
+ end
+
def valid?(repository_path)
- repository_path.end_with?(path_suffix)
+ repository_path.end_with?(path_suffix) &&
+ (
+ !snippet? ||
+ repository_path.match?(Gitlab::PathRegex.full_snippets_repository_path_regex)
+ )
end
private
diff --git a/lib/gitlab/gon_helper.rb b/lib/gitlab/gon_helper.rb
index 3db6c3b51c0..e4e69241bd9 100644
--- a/lib/gitlab/gon_helper.rb
+++ b/lib/gitlab/gon_helper.rb
@@ -46,6 +46,7 @@ module Gitlab
push_frontend_feature_flag(:monaco_snippets, default_enabled: false)
push_frontend_feature_flag(:monaco_blobs, default_enabled: false)
push_frontend_feature_flag(:monaco_ci, default_enabled: false)
+ push_frontend_feature_flag(:snippets_edit_vue, default_enabled: false)
end
# Exposes the state of a feature flag to the frontend code.
diff --git a/lib/gitlab/grape_logging/formatters/lograge_with_timestamp.rb b/lib/gitlab/grape_logging/formatters/lograge_with_timestamp.rb
index 837473d47cd..045a341f2ed 100644
--- a/lib/gitlab/grape_logging/formatters/lograge_with_timestamp.rb
+++ b/lib/gitlab/grape_logging/formatters/lograge_with_timestamp.rb
@@ -6,6 +6,8 @@ module Gitlab
class LogrageWithTimestamp
include Gitlab::EncodingHelper
+ EMPTY_ARRAY = [].freeze
+
def call(severity, datetime, _, data)
time = data.delete :time
data[:params] = process_params(data)
@@ -16,29 +18,27 @@ module Gitlab
duration: time[:total],
db: time[:db],
view: time[:view]
- }.merge(data)
- ::Lograge.formatter.call(attributes) + "\n"
+ }.merge!(data)
+
+ ::Lograge.formatter.call(attributes) << "\n"
end
private
def process_params(data)
- return [] unless data.has_key?(:params)
+ return EMPTY_ARRAY unless data.has_key?(:params)
- params_array =
- data[:params]
- .each_pair
- .map { |k, v| { key: k, value: utf8_encode_values(v) } }
+ params_array = data[:params].map { |k, v| { key: k, value: utf8_encode_values(v) } }
- Gitlab::Utils::LogLimitedArray.log_limited_array(params_array)
+ Gitlab::Utils::LogLimitedArray.log_limited_array(params_array, sentinel: Gitlab::Lograge::CustomOptions::LIMITED_ARRAY_SENTINEL)
end
def utf8_encode_values(data)
case data
when Hash
- data.merge(data) { |k, v| utf8_encode_values(v) }
+ data.merge!(data) { |k, v| utf8_encode_values(v) }
when Array
- data.map { |v| utf8_encode_values(v) }
+ data.map! { |v| utf8_encode_values(v) }
when String
encode_utf8(data)
end
diff --git a/lib/gitlab/graphql/connections.rb b/lib/gitlab/graphql/connections.rb
index 08d5cd0b72e..0c0bfe5a458 100644
--- a/lib/gitlab/graphql/connections.rb
+++ b/lib/gitlab/graphql/connections.rb
@@ -16,6 +16,10 @@ module Gitlab
Gitlab::Graphql::ExternallyPaginatedArray,
Gitlab::Graphql::Connections::ExternallyPaginatedArrayConnection
)
+ GraphQL::Relay::BaseConnection.register_connection_implementation(
+ Gitlab::Graphql::Pagination::Relations::OffsetActiveRecordRelation,
+ Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection
+ )
end
end
end
diff --git a/lib/gitlab/graphql/docs/helper.rb b/lib/gitlab/graphql/docs/helper.rb
index 56524120ffd..0dd28b32511 100644
--- a/lib/gitlab/graphql/docs/helper.rb
+++ b/lib/gitlab/graphql/docs/helper.rb
@@ -25,6 +25,28 @@ module Gitlab
fields.sort_by { |field| field[:name] }
end
+ def render_field(field)
+ '| %s | %s | %s |' % [
+ render_field_name(field),
+ render_field_type(field[:type][:info]),
+ render_field_description(field)
+ ]
+ end
+
+ def render_field_name(field)
+ rendered_name = "`#{field[:name]}`"
+ rendered_name += ' **{warning-solid}**' if field[:is_deprecated]
+ rendered_name
+ end
+
+ # Returns the field description. If the field has been deprecated,
+ # the deprecation reason will be returned in place of the description.
+ def render_field_description(field)
+ return field[:description] unless field[:is_deprecated]
+
+ "**Deprecated:** #{field[:deprecation_reason]}"
+ end
+
# Some fields types are arrays of other types and are displayed
# on docs wrapped in square brackets, for example: [String!].
# This makes GitLab docs renderer thinks they are links so here
diff --git a/lib/gitlab/graphql/docs/templates/default.md.haml b/lib/gitlab/graphql/docs/templates/default.md.haml
index b126a22c301..8c033526557 100644
--- a/lib/gitlab/graphql/docs/templates/default.md.haml
+++ b/lib/gitlab/graphql/docs/templates/default.md.haml
@@ -11,6 +11,9 @@
Each table below documents a GraphQL type. Types match loosely to models, but not all
fields and methods on a model are available via GraphQL.
+
+ CAUTION: **Caution:**
+ Fields that are deprecated are marked with **{warning-solid}**.
\
- objects.each do |type|
- unless type[:fields].empty?
@@ -22,5 +25,5 @@
~ "| Name | Type | Description |"
~ "| --- | ---- | ---------- |"
- sorted_fields(type[:fields]).each do |field|
- = "| `#{field[:name]}` | #{render_field_type(field[:type][:info])} | #{field[:description]} |"
+ = render_field(field)
\
diff --git a/lib/gitlab/graphql/pagination/offset_active_record_relation_connection.rb b/lib/gitlab/graphql/pagination/offset_active_record_relation_connection.rb
new file mode 100644
index 00000000000..c852fbf0ab8
--- /dev/null
+++ b/lib/gitlab/graphql/pagination/offset_active_record_relation_connection.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+# We use the Keyset / Stable cursor connection by default for ActiveRecord::Relation.
+# However, there are times when that may not be powerful enough (yet), and we
+# want to use standard offset pagination.
+module Gitlab
+ module Graphql
+ module Pagination
+ class OffsetActiveRecordRelationConnection < GraphQL::Relay::RelationConnection
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/pagination/relations/offset_active_record_relation.rb b/lib/gitlab/graphql/pagination/relations/offset_active_record_relation.rb
new file mode 100644
index 00000000000..2e5a0d66d4e
--- /dev/null
+++ b/lib/gitlab/graphql/pagination/relations/offset_active_record_relation.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module Pagination
+ module Relations
+ class OffsetActiveRecordRelation < ::ActiveRecord::Relation
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/timeout.rb b/lib/gitlab/graphql/timeout.rb
new file mode 100644
index 00000000000..4282c46a19e
--- /dev/null
+++ b/lib/gitlab/graphql/timeout.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ class Timeout < GraphQL::Schema::Timeout
+ def handle_timeout(error, query)
+ Gitlab::GraphqlLogger.error(message: error.message, query: query.query_string, query_variables: query.provided_variables)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export.rb b/lib/gitlab/import_export.rb
index 8ce6549c0c7..52102b6f508 100644
--- a/lib/gitlab/import_export.rb
+++ b/lib/gitlab/import_export.rb
@@ -42,8 +42,20 @@ module Gitlab
"project.wiki.bundle"
end
+ def snippet_repo_bundle_dir
+ 'snippets'
+ end
+
+ def snippets_repo_bundle_path(absolute_path)
+ File.join(absolute_path, ::Gitlab::ImportExport.snippet_repo_bundle_dir)
+ end
+
+ def snippet_repo_bundle_filename_for(snippet)
+ "#{snippet.hexdigest}.bundle"
+ end
+
def config_file
- Rails.root.join('lib/gitlab/import_export/import_export.yml')
+ Rails.root.join('lib/gitlab/import_export/project/import_export.yml')
end
def version_filename
@@ -77,7 +89,7 @@ module Gitlab
end
def group_config_file
- Rails.root.join('lib/gitlab/import_export/group_import_export.yml')
+ Rails.root.join('lib/gitlab/import_export/group/import_export.yml')
end
end
end
diff --git a/lib/gitlab/import_export/after_export_strategies/move_file_strategy.rb b/lib/gitlab/import_export/after_export_strategies/move_file_strategy.rb
new file mode 100644
index 00000000000..2e3136936f8
--- /dev/null
+++ b/lib/gitlab/import_export/after_export_strategies/move_file_strategy.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module AfterExportStrategies
+ class MoveFileStrategy < BaseAfterExportStrategy
+ def initialize(archive_path:)
+ @archive_path = archive_path
+ end
+
+ private
+
+ def strategy_execute
+ FileUtils.mv(project.export_file.path, @archive_path)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/attribute_cleaner.rb b/lib/gitlab/import_export/attribute_cleaner.rb
index d1c20dff799..3bfc059dcd3 100644
--- a/lib/gitlab/import_export/attribute_cleaner.rb
+++ b/lib/gitlab/import_export/attribute_cleaner.rb
@@ -4,8 +4,8 @@ module Gitlab
module ImportExport
class AttributeCleaner
ALLOWED_REFERENCES = [
- *ProjectRelationFactory::PROJECT_REFERENCES,
- *ProjectRelationFactory::USER_REFERENCES,
+ *Gitlab::ImportExport::Project::RelationFactory::PROJECT_REFERENCES,
+ *Gitlab::ImportExport::Project::RelationFactory::USER_REFERENCES,
'group_id',
'commit_id',
'discussion_id',
diff --git a/lib/gitlab/import_export/base/object_builder.rb b/lib/gitlab/import_export/base/object_builder.rb
new file mode 100644
index 00000000000..109d2e233a5
--- /dev/null
+++ b/lib/gitlab/import_export/base/object_builder.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Base
+ # Base class for Group & Project Object Builders.
+ # This class is not intended to be used on its own but
+ # rather inherited from.
+ #
+ # Cache keeps 1000 entries at most, 1000 is chosen based on:
+ # - one cache entry uses around 0.5K memory, 1000 items uses around 500K.
+ # (leave some buffer it should be less than 1M). It is afforable cost for project import.
+ # - for projects in Gitlab.com, it seems 1000 entries for labels/milestones is enough.
+ # For example, gitlab has ~970 labels and 26 milestones.
+ LRU_CACHE_SIZE = 1000
+
+ class ObjectBuilder
+ def self.build(*args)
+ new(*args).find
+ end
+
+ def initialize(klass, attributes)
+ @klass = klass.ancestors.include?(Label) ? Label : klass
+ @attributes = attributes
+
+ if Gitlab::SafeRequestStore.active?
+ @lru_cache = cache_from_request_store
+ @cache_key = [klass, attributes]
+ end
+ end
+
+ def find
+ find_with_cache do
+ find_object || klass.create(prepare_attributes)
+ end
+ end
+
+ protected
+
+ def where_clauses
+ raise NotImplementedError
+ end
+
+ # attributes wrapped in a method to be
+ # adjusted in sub-class if needed
+ def prepare_attributes
+ attributes
+ end
+
+ private
+
+ attr_reader :klass, :attributes, :lru_cache, :cache_key
+
+ def find_with_cache
+ return yield unless lru_cache && cache_key
+
+ lru_cache[cache_key] ||= yield
+ end
+
+ def cache_from_request_store
+ Gitlab::SafeRequestStore[:lru_cache] ||= LruRedux::Cache.new(LRU_CACHE_SIZE)
+ end
+
+ def find_object
+ klass.where(where_clause).first
+ end
+
+ def where_clause
+ where_clauses.reduce(:and)
+ end
+
+ def table
+ @table ||= klass.arel_table
+ end
+
+ # Returns Arel clause:
+ # `"{table_name}"."{attrs.keys[0]}" = '{attrs.values[0]} AND {table_name}"."{attrs.keys[1]}" = '{attrs.values[1]}"`
+ # from the given Hash of attributes.
+ def attrs_to_arel(attrs)
+ attrs.map do |key, value|
+ table[key].eq(value)
+ end.reduce(:and)
+ end
+
+ # Returns Arel clause `"{table_name}"."title" = '{attributes['title']}'`
+ # if attributes has 'title key, otherwise `nil`.
+ def where_clause_for_title
+ attrs_to_arel(attributes.slice('title'))
+ end
+
+ # Returns Arel clause `"{table_name}"."description" = '{attributes['description']}'`
+ # if attributes has 'description key, otherwise `nil`.
+ def where_clause_for_description
+ attrs_to_arel(attributes.slice('description'))
+ end
+
+ # Returns Arel clause `"{table_name}"."created_at" = '{attributes['created_at']}'`
+ # if attributes has 'created_at key, otherwise `nil`.
+ def where_clause_for_created_at
+ attrs_to_arel(attributes.slice('created_at'))
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/base/relation_factory.rb b/lib/gitlab/import_export/base/relation_factory.rb
new file mode 100644
index 00000000000..05b69362976
--- /dev/null
+++ b/lib/gitlab/import_export/base/relation_factory.rb
@@ -0,0 +1,312 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Base
+ class RelationFactory
+ include Gitlab::Utils::StrongMemoize
+
+ IMPORTED_OBJECT_MAX_RETRIES = 5.freeze
+
+ OVERRIDES = {}.freeze
+ EXISTING_OBJECT_RELATIONS = %i[].freeze
+
+ # This represents all relations that have unique key on `project_id` or `group_id`
+ UNIQUE_RELATIONS = %i[].freeze
+
+ USER_REFERENCES = %w[
+ author_id
+ assignee_id
+ updated_by_id
+ merged_by_id
+ latest_closed_by_id
+ user_id
+ created_by_id
+ last_edited_by_id
+ merge_user_id
+ resolved_by_id
+ closed_by_id
+ owner_id
+ ].freeze
+
+ TOKEN_RESET_MODELS = %i[Project Namespace Group Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze
+
+ def self.create(*args)
+ new(*args).create
+ end
+
+ def self.relation_class(relation_name)
+ # There are scenarios where the model is pluralized (e.g.
+ # MergeRequest::Metrics), and we don't want to force it to singular
+ # with #classify.
+ relation_name.to_s.classify.constantize
+ rescue NameError
+ relation_name.to_s.constantize
+ end
+
+ def initialize(relation_sym:, relation_hash:, members_mapper:, object_builder:, user:, importable:, excluded_keys: [])
+ @relation_name = self.class.overrides[relation_sym]&.to_sym || relation_sym
+ @relation_hash = relation_hash.except('noteable_id')
+ @members_mapper = members_mapper
+ @object_builder = object_builder
+ @user = user
+ @importable = importable
+ @imported_object_retries = 0
+ @relation_hash[importable_column_name] = @importable.id
+
+ # Remove excluded keys from relation_hash
+ # We don't do this in the parsed_relation_hash because of the 'transformed attributes'
+ # For example, MergeRequestDiffFiles exports its diff attribute as utf8_diff. Then,
+ # in the create method that attribute is renamed to diff. And because diff is an excluded key,
+ # if we clean the excluded keys in the parsed_relation_hash, it will be removed
+ # from the object attributes and the export will fail.
+ @relation_hash.except!(*excluded_keys)
+ end
+
+ # Creates an object from an actual model with name "relation_sym" with params from
+ # the relation_hash, updating references with new object IDs, mapping users using
+ # the "members_mapper" object, also updating notes if required.
+ def create
+ return if invalid_relation? || predefined_relation?
+
+ setup_base_models
+ setup_models
+
+ generate_imported_object
+ end
+
+ def self.overrides
+ self::OVERRIDES
+ end
+
+ def self.existing_object_relations
+ self::EXISTING_OBJECT_RELATIONS
+ end
+
+ private
+
+ def invalid_relation?
+ false
+ end
+
+ def predefined_relation?
+ relation_class.try(:predefined_id?, @relation_hash['id'])
+ end
+
+ def setup_models
+ raise NotImplementedError
+ end
+
+ def unique_relations
+ # define in sub-class if any
+ self.class::UNIQUE_RELATIONS
+ end
+
+ def setup_base_models
+ update_user_references
+ remove_duplicate_assignees
+ reset_tokens!
+ remove_encrypted_attributes!
+ end
+
+ def update_user_references
+ self.class::USER_REFERENCES.each do |reference|
+ if @relation_hash[reference]
+ @relation_hash[reference] = @members_mapper.map[@relation_hash[reference]]
+ end
+ end
+ end
+
+ def remove_duplicate_assignees
+ return unless @relation_hash['issue_assignees']
+
+ # When an assignee did not exist in the members mapper, the importer is
+ # assigned. We only need to assign each user once.
+ @relation_hash['issue_assignees'].uniq!(&:user_id)
+ end
+
+ def generate_imported_object
+ imported_object
+ end
+
+ def reset_tokens!
+ return unless Gitlab::ImportExport.reset_tokens? && self.class::TOKEN_RESET_MODELS.include?(@relation_name)
+
+ # If we import/export to the same instance, tokens will have to be reset.
+ # We also have to reset them to avoid issues when the gitlab secrets file cannot be copied across.
+ relation_class.attribute_names.select { |name| name.include?('token') }.each do |token|
+ @relation_hash[token] = nil
+ end
+ end
+
+ def remove_encrypted_attributes!
+ return unless relation_class.respond_to?(:encrypted_attributes) && relation_class.encrypted_attributes.any?
+
+ relation_class.encrypted_attributes.each_key do |key|
+ @relation_hash[key.to_s] = nil
+ end
+ end
+
+ def relation_class
+ @relation_class ||= self.class.relation_class(@relation_name)
+ end
+
+ def importable_column_name
+ importable_class_name.concat('_id')
+ end
+
+ def importable_class_name
+ @importable.class.to_s.downcase
+ end
+
+ def imported_object
+ if existing_or_new_object.respond_to?(:importing)
+ existing_or_new_object.importing = true
+ end
+
+ existing_or_new_object
+ rescue ActiveRecord::RecordNotUnique
+ # as the operation is not atomic, retry in the unlikely scenario an INSERT is
+ # performed on the same object between the SELECT and the INSERT
+ @imported_object_retries += 1
+ retry if @imported_object_retries < IMPORTED_OBJECT_MAX_RETRIES
+ end
+
+ def parsed_relation_hash
+ @parsed_relation_hash ||= Gitlab::ImportExport::AttributeCleaner.clean(relation_hash: @relation_hash,
+ relation_class: relation_class)
+ end
+
+ def existing_or_new_object
+ # Only find existing records to avoid mapping tables such as milestones
+ # Otherwise always create the record, skipping the extra SELECT clause.
+ @existing_or_new_object ||= begin
+ if existing_object?
+ attribute_hash = attribute_hash_for(['events'])
+
+ existing_object.assign_attributes(attribute_hash) if attribute_hash.any?
+
+ existing_object
+ else
+ # Because of single-type inheritance, we need to be careful to use the `type` field
+ # See https://gitlab.com/gitlab-org/gitlab/issues/34860#note_235321497
+ inheritance_column = relation_class.try(:inheritance_column)
+ inheritance_attributes = parsed_relation_hash.slice(inheritance_column)
+ object = relation_class.new(inheritance_attributes)
+ object.assign_attributes(parsed_relation_hash)
+ object
+ end
+ end
+ end
+
+ def attribute_hash_for(attributes)
+ attributes.each_with_object({}) do |hash, value|
+ hash[value] = parsed_relation_hash.delete(value) if parsed_relation_hash[value]
+ hash
+ end
+ end
+
+ def existing_object
+ @existing_object ||= find_or_create_object!
+ end
+
+ def unique_relation_object
+ unique_relation_object = relation_class.find_or_create_by(importable_column_name => @importable.id)
+ unique_relation_object.assign_attributes(parsed_relation_hash)
+ unique_relation_object
+ end
+
+ def find_or_create_object!
+ return unique_relation_object if unique_relation?
+
+ # Can't use IDs as validation exists calling `group` or `project` attributes
+ finder_hash = parsed_relation_hash.tap do |hash|
+ if relation_class.attribute_method?('group_id') && @importable.is_a?(::Project)
+ hash['group'] = @importable.group
+ end
+
+ hash[importable_class_name] = @importable if relation_class.reflect_on_association(importable_class_name.to_sym)
+ hash.delete(importable_column_name)
+ end
+
+ @object_builder.build(relation_class, finder_hash)
+ end
+
+ def setup_note
+ set_note_author
+ # attachment is deprecated and note uploads are handled by Markdown uploader
+ @relation_hash['attachment'] = nil
+ end
+
+ # Sets the author for a note. If the user importing the project
+ # has admin access, an actual mapping with new project members
+ # will be used. Otherwise, a note stating the original author name
+ # is left.
+ def set_note_author
+ old_author_id = @relation_hash['author_id']
+ author = @relation_hash.delete('author')
+
+ update_note_for_missing_author(author['name']) unless has_author?(old_author_id)
+ end
+
+ def has_author?(old_author_id)
+ admin_user? && @members_mapper.include?(old_author_id)
+ end
+
+ def missing_author_note(updated_at, author_name)
+ timestamp = updated_at.split('.').first
+ "\n\n *By #{author_name} on #{timestamp} (imported from GitLab project)*"
+ end
+
+ def update_note_for_missing_author(author_name)
+ @relation_hash['note'] = '*Blank note*' if @relation_hash['note'].blank?
+ @relation_hash['note'] = "#{@relation_hash['note']}#{missing_author_note(@relation_hash['updated_at'], author_name)}"
+ end
+
+ def admin_user?
+ @user.admin?
+ end
+
+ def existing_object?
+ strong_memoize(:_existing_object) do
+ self.class.existing_object_relations.include?(@relation_name) || unique_relation?
+ end
+ end
+
+ def unique_relation?
+ strong_memoize(:unique_relation) do
+ importable_foreign_key.present? &&
+ (has_unique_index_on_importable_fk? || uses_importable_fk_as_primary_key?)
+ end
+ end
+
+ def has_unique_index_on_importable_fk?
+ cache = cached_has_unique_index_on_importable_fk
+ table_name = relation_class.table_name
+ return cache[table_name] if cache.has_key?(table_name)
+
+ index_exists =
+ ActiveRecord::Base.connection.index_exists?(
+ relation_class.table_name,
+ importable_foreign_key,
+ unique: true)
+
+ cache[table_name] = index_exists
+ end
+
+ # Avoid unnecessary DB requests
+ def cached_has_unique_index_on_importable_fk
+ Thread.current[:cached_has_unique_index_on_importable_fk] ||= {}
+ end
+
+ def uses_importable_fk_as_primary_key?
+ relation_class.primary_key == importable_foreign_key
+ end
+
+ def importable_foreign_key
+ relation_class.reflect_on_association(importable_class_name.to_sym)&.foreign_key
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/base_object_builder.rb b/lib/gitlab/import_export/base_object_builder.rb
deleted file mode 100644
index ec66b7a7a4f..00000000000
--- a/lib/gitlab/import_export/base_object_builder.rb
+++ /dev/null
@@ -1,103 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- # Base class for Group & Project Object Builders.
- # This class is not intended to be used on its own but
- # rather inherited from.
- #
- # Cache keeps 1000 entries at most, 1000 is chosen based on:
- # - one cache entry uses around 0.5K memory, 1000 items uses around 500K.
- # (leave some buffer it should be less than 1M). It is afforable cost for project import.
- # - for projects in Gitlab.com, it seems 1000 entries for labels/milestones is enough.
- # For example, gitlab has ~970 labels and 26 milestones.
- LRU_CACHE_SIZE = 1000
-
- class BaseObjectBuilder
- def self.build(*args)
- new(*args).find
- end
-
- def initialize(klass, attributes)
- @klass = klass.ancestors.include?(Label) ? Label : klass
- @attributes = attributes
-
- if Gitlab::SafeRequestStore.active?
- @lru_cache = cache_from_request_store
- @cache_key = [klass, attributes]
- end
- end
-
- def find
- find_with_cache do
- find_object || klass.create(prepare_attributes)
- end
- end
-
- protected
-
- def where_clauses
- raise NotImplementedError
- end
-
- # attributes wrapped in a method to be
- # adjusted in sub-class if needed
- def prepare_attributes
- attributes
- end
-
- private
-
- attr_reader :klass, :attributes, :lru_cache, :cache_key
-
- def find_with_cache
- return yield unless lru_cache && cache_key
-
- lru_cache[cache_key] ||= yield
- end
-
- def cache_from_request_store
- Gitlab::SafeRequestStore[:lru_cache] ||= LruRedux::Cache.new(LRU_CACHE_SIZE)
- end
-
- def find_object
- klass.where(where_clause).first
- end
-
- def where_clause
- where_clauses.reduce(:and)
- end
-
- def table
- @table ||= klass.arel_table
- end
-
- # Returns Arel clause:
- # `"{table_name}"."{attrs.keys[0]}" = '{attrs.values[0]} AND {table_name}"."{attrs.keys[1]}" = '{attrs.values[1]}"`
- # from the given Hash of attributes.
- def attrs_to_arel(attrs)
- attrs.map do |key, value|
- table[key].eq(value)
- end.reduce(:and)
- end
-
- # Returns Arel clause `"{table_name}"."title" = '{attributes['title']}'`
- # if attributes has 'title key, otherwise `nil`.
- def where_clause_for_title
- attrs_to_arel(attributes.slice('title'))
- end
-
- # Returns Arel clause `"{table_name}"."description" = '{attributes['description']}'`
- # if attributes has 'description key, otherwise `nil`.
- def where_clause_for_description
- attrs_to_arel(attributes.slice('description'))
- end
-
- # Returns Arel clause `"{table_name}"."created_at" = '{attributes['created_at']}'`
- # if attributes has 'created_at key, otherwise `nil`.
- def where_clause_for_created_at
- attrs_to_arel(attributes.slice('created_at'))
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/base_relation_factory.rb b/lib/gitlab/import_export/base_relation_factory.rb
deleted file mode 100644
index d3c8802bcce..00000000000
--- a/lib/gitlab/import_export/base_relation_factory.rb
+++ /dev/null
@@ -1,307 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- class BaseRelationFactory
- include Gitlab::Utils::StrongMemoize
-
- IMPORTED_OBJECT_MAX_RETRIES = 5.freeze
-
- OVERRIDES = {}.freeze
- EXISTING_OBJECT_RELATIONS = %i[].freeze
-
- # This represents all relations that have unique key on `project_id` or `group_id`
- UNIQUE_RELATIONS = %i[].freeze
-
- USER_REFERENCES = %w[
- author_id
- assignee_id
- updated_by_id
- merged_by_id
- latest_closed_by_id
- user_id
- created_by_id
- last_edited_by_id
- merge_user_id
- resolved_by_id
- closed_by_id
- owner_id
- ].freeze
-
- TOKEN_RESET_MODELS = %i[Project Namespace Group Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze
-
- def self.create(*args)
- new(*args).create
- end
-
- def self.relation_class(relation_name)
- # There are scenarios where the model is pluralized (e.g.
- # MergeRequest::Metrics), and we don't want to force it to singular
- # with #classify.
- relation_name.to_s.classify.constantize
- rescue NameError
- relation_name.to_s.constantize
- end
-
- def initialize(relation_sym:, relation_hash:, members_mapper:, object_builder:, merge_requests_mapping: nil, user:, importable:, excluded_keys: [])
- @relation_name = self.class.overrides[relation_sym]&.to_sym || relation_sym
- @relation_hash = relation_hash.except('noteable_id')
- @members_mapper = members_mapper
- @object_builder = object_builder
- @merge_requests_mapping = merge_requests_mapping
- @user = user
- @importable = importable
- @imported_object_retries = 0
- @relation_hash[importable_column_name] = @importable.id
-
- # Remove excluded keys from relation_hash
- # We don't do this in the parsed_relation_hash because of the 'transformed attributes'
- # For example, MergeRequestDiffFiles exports its diff attribute as utf8_diff. Then,
- # in the create method that attribute is renamed to diff. And because diff is an excluded key,
- # if we clean the excluded keys in the parsed_relation_hash, it will be removed
- # from the object attributes and the export will fail.
- @relation_hash.except!(*excluded_keys)
- end
-
- # Creates an object from an actual model with name "relation_sym" with params from
- # the relation_hash, updating references with new object IDs, mapping users using
- # the "members_mapper" object, also updating notes if required.
- def create
- return if invalid_relation?
-
- setup_base_models
- setup_models
-
- generate_imported_object
- end
-
- def self.overrides
- self::OVERRIDES
- end
-
- def self.existing_object_relations
- self::EXISTING_OBJECT_RELATIONS
- end
-
- private
-
- def invalid_relation?
- false
- end
-
- def setup_models
- raise NotImplementedError
- end
-
- def unique_relations
- # define in sub-class if any
- self.class::UNIQUE_RELATIONS
- end
-
- def setup_base_models
- update_user_references
- remove_duplicate_assignees
- reset_tokens!
- remove_encrypted_attributes!
- end
-
- def update_user_references
- self.class::USER_REFERENCES.each do |reference|
- if @relation_hash[reference]
- @relation_hash[reference] = @members_mapper.map[@relation_hash[reference]]
- end
- end
- end
-
- def remove_duplicate_assignees
- return unless @relation_hash['issue_assignees']
-
- # When an assignee did not exist in the members mapper, the importer is
- # assigned. We only need to assign each user once.
- @relation_hash['issue_assignees'].uniq!(&:user_id)
- end
-
- def generate_imported_object
- imported_object
- end
-
- def reset_tokens!
- return unless Gitlab::ImportExport.reset_tokens? && self.class::TOKEN_RESET_MODELS.include?(@relation_name)
-
- # If we import/export to the same instance, tokens will have to be reset.
- # We also have to reset them to avoid issues when the gitlab secrets file cannot be copied across.
- relation_class.attribute_names.select { |name| name.include?('token') }.each do |token|
- @relation_hash[token] = nil
- end
- end
-
- def remove_encrypted_attributes!
- return unless relation_class.respond_to?(:encrypted_attributes) && relation_class.encrypted_attributes.any?
-
- relation_class.encrypted_attributes.each_key do |key|
- @relation_hash[key.to_s] = nil
- end
- end
-
- def relation_class
- @relation_class ||= self.class.relation_class(@relation_name)
- end
-
- def importable_column_name
- importable_class_name.concat('_id')
- end
-
- def importable_class_name
- @importable.class.to_s.downcase
- end
-
- def imported_object
- if existing_or_new_object.respond_to?(:importing)
- existing_or_new_object.importing = true
- end
-
- existing_or_new_object
- rescue ActiveRecord::RecordNotUnique
- # as the operation is not atomic, retry in the unlikely scenario an INSERT is
- # performed on the same object between the SELECT and the INSERT
- @imported_object_retries += 1
- retry if @imported_object_retries < IMPORTED_OBJECT_MAX_RETRIES
- end
-
- def parsed_relation_hash
- @parsed_relation_hash ||= Gitlab::ImportExport::AttributeCleaner.clean(relation_hash: @relation_hash,
- relation_class: relation_class)
- end
-
- def existing_or_new_object
- # Only find existing records to avoid mapping tables such as milestones
- # Otherwise always create the record, skipping the extra SELECT clause.
- @existing_or_new_object ||= begin
- if existing_object?
- attribute_hash = attribute_hash_for(['events'])
-
- existing_object.assign_attributes(attribute_hash) if attribute_hash.any?
-
- existing_object
- else
- # Because of single-type inheritance, we need to be careful to use the `type` field
- # See https://gitlab.com/gitlab-org/gitlab/issues/34860#note_235321497
- inheritance_column = relation_class.try(:inheritance_column)
- inheritance_attributes = parsed_relation_hash.slice(inheritance_column)
- object = relation_class.new(inheritance_attributes)
- object.assign_attributes(parsed_relation_hash)
- object
- end
- end
- end
-
- def attribute_hash_for(attributes)
- attributes.each_with_object({}) do |hash, value|
- hash[value] = parsed_relation_hash.delete(value) if parsed_relation_hash[value]
- hash
- end
- end
-
- def existing_object
- @existing_object ||= find_or_create_object!
- end
-
- def unique_relation_object
- unique_relation_object = relation_class.find_or_create_by(importable_column_name => @importable.id)
- unique_relation_object.assign_attributes(parsed_relation_hash)
- unique_relation_object
- end
-
- def find_or_create_object!
- return unique_relation_object if unique_relation?
-
- # Can't use IDs as validation exists calling `group` or `project` attributes
- finder_hash = parsed_relation_hash.tap do |hash|
- if relation_class.attribute_method?('group_id') && @importable.is_a?(Project)
- hash['group'] = @importable.group
- end
-
- hash[importable_class_name] = @importable if relation_class.reflect_on_association(importable_class_name.to_sym)
- hash.delete(importable_column_name)
- end
-
- @object_builder.build(relation_class, finder_hash)
- end
-
- def setup_note
- set_note_author
- # attachment is deprecated and note uploads are handled by Markdown uploader
- @relation_hash['attachment'] = nil
- end
-
- # Sets the author for a note. If the user importing the project
- # has admin access, an actual mapping with new project members
- # will be used. Otherwise, a note stating the original author name
- # is left.
- def set_note_author
- old_author_id = @relation_hash['author_id']
- author = @relation_hash.delete('author')
-
- update_note_for_missing_author(author['name']) unless has_author?(old_author_id)
- end
-
- def has_author?(old_author_id)
- admin_user? && @members_mapper.include?(old_author_id)
- end
-
- def missing_author_note(updated_at, author_name)
- timestamp = updated_at.split('.').first
- "\n\n *By #{author_name} on #{timestamp} (imported from GitLab project)*"
- end
-
- def update_note_for_missing_author(author_name)
- @relation_hash['note'] = '*Blank note*' if @relation_hash['note'].blank?
- @relation_hash['note'] = "#{@relation_hash['note']}#{missing_author_note(@relation_hash['updated_at'], author_name)}"
- end
-
- def admin_user?
- @user.admin?
- end
-
- def existing_object?
- strong_memoize(:_existing_object) do
- self.class.existing_object_relations.include?(@relation_name) || unique_relation?
- end
- end
-
- def unique_relation?
- strong_memoize(:unique_relation) do
- importable_foreign_key.present? &&
- (has_unique_index_on_importable_fk? || uses_importable_fk_as_primary_key?)
- end
- end
-
- def has_unique_index_on_importable_fk?
- cache = cached_has_unique_index_on_importable_fk
- table_name = relation_class.table_name
- return cache[table_name] if cache.has_key?(table_name)
-
- index_exists =
- ActiveRecord::Base.connection.index_exists?(
- relation_class.table_name,
- importable_foreign_key,
- unique: true)
-
- cache[table_name] = index_exists
- end
-
- # Avoid unnecessary DB requests
- def cached_has_unique_index_on_importable_fk
- Thread.current[:cached_has_unique_index_on_importable_fk] ||= {}
- end
-
- def uses_importable_fk_as_primary_key?
- relation_class.primary_key == importable_foreign_key
- end
-
- def importable_foreign_key
- relation_class.reflect_on_association(importable_class_name.to_sym)&.foreign_key
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/error.rb b/lib/gitlab/import_export/error.rb
index 454dc778b6b..f11b7a0a298 100644
--- a/lib/gitlab/import_export/error.rb
+++ b/lib/gitlab/import_export/error.rb
@@ -2,6 +2,13 @@
module Gitlab
module ImportExport
- Error = Class.new(StandardError)
+ class Error < StandardError
+ def self.permission_error(user, importable)
+ self.new(
+ "User with ID: %s does not have required permissions for %s: %s with ID: %s" %
+ [user.id, importable.class.name, importable.name, importable.id]
+ )
+ end
+ end
end
end
diff --git a/lib/gitlab/import_export/fast_hash_serializer.rb b/lib/gitlab/import_export/fast_hash_serializer.rb
index 5a067b5c9f3..c6ecf13ded8 100644
--- a/lib/gitlab/import_export/fast_hash_serializer.rb
+++ b/lib/gitlab/import_export/fast_hash_serializer.rb
@@ -136,6 +136,12 @@ module Gitlab
data = []
record.in_batches(of: @batch_size) do |batch| # rubocop:disable Cop/InBatches
+ # order each batch by it's primary key to ensure
+ # consistent and predictable ordering of each exported relation
+ # as additional `WHERE` clauses can impact the order in which data is being
+ # returned by database when no `ORDER` is specified
+ batch = batch.reorder(batch.klass.primary_key)
+
if Feature.enabled?(:export_fast_serialize_with_raw_json, default_enabled: true)
data.append(JSONBatchRelation.new(batch, options, preloads[key]).tap(&:raw_json))
else
diff --git a/lib/gitlab/import_export/group/import_export.yml b/lib/gitlab/import_export/group/import_export.yml
new file mode 100644
index 00000000000..2721198860c
--- /dev/null
+++ b/lib/gitlab/import_export/group/import_export.yml
@@ -0,0 +1,79 @@
+# Model relationships to be included in the group import/export
+#
+# This list _must_ only contain relationships that are available to both FOSS and
+# Enterprise editions. EE specific relationships must be defined in the `ee` section further
+# down below.
+tree:
+ group:
+ - :milestones
+ - :badges
+ - labels:
+ - :priorities
+ - boards:
+ - lists:
+ - label:
+ - :priorities
+ - :board
+ - members:
+ - :user
+
+included_attributes:
+ user:
+ - :id
+ - :email
+ - :username
+ author:
+ - :name
+
+excluded_attributes:
+ group:
+ - :id
+ - :owner_id
+ - :parent_id
+ - :created_at
+ - :updated_at
+ - :runners_token
+ - :runners_token_encrypted
+ - :saml_discovery_token
+ - :visibility_level
+
+methods:
+ labels:
+ - :type
+ label:
+ - :type
+ badges:
+ - :type
+ notes:
+ - :type
+ events:
+ - :action
+ lists:
+ - :list_type
+
+preloads:
+
+# EE specific relationships and settings to include. All of this will be merged
+# into the previous structures if EE is used.
+ee:
+ tree:
+ group:
+ - epics:
+ - :parent
+ - :award_emoji
+ - events:
+ - :push_event_payload
+ - notes:
+ - :author
+ - :award_emoji
+ - events:
+ - :push_event_payload
+ - boards:
+ - :board_assignee
+ - :milestone
+ - labels:
+ - :priorities
+ - lists:
+ - milestone:
+ - events:
+ - :push_event_payload
diff --git a/lib/gitlab/import_export/group/object_builder.rb b/lib/gitlab/import_export/group/object_builder.rb
new file mode 100644
index 00000000000..e171a31348e
--- /dev/null
+++ b/lib/gitlab/import_export/group/object_builder.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Group
+ # Given a class, it finds or creates a new object at group level.
+ #
+ # Example:
+ # `Group::ObjectBuilder.build(Label, label_attributes)`
+ # finds or initializes a label with the given attributes.
+ class ObjectBuilder < Base::ObjectBuilder
+ def self.build(*args)
+ ::Group.transaction do
+ super
+ end
+ end
+
+ def initialize(klass, attributes)
+ super
+
+ @group = @attributes['group']
+
+ update_description
+ end
+
+ private
+
+ attr_reader :group
+
+ # Convert description empty string to nil
+ # due to existing object being saved with description: nil
+ # Which makes object lookup to fail since nil != ''
+ def update_description
+ attributes['description'] = nil if attributes['description'] == ''
+ end
+
+ def where_clauses
+ [
+ where_clause_base,
+ where_clause_for_title,
+ where_clause_for_description,
+ where_clause_for_created_at
+ ].compact
+ end
+
+ # Returns Arel clause `"{table_name}"."group_id" = {group.id}`
+ def where_clause_base
+ table[:group_id].in(group_and_ancestor_ids)
+ end
+
+ def group_and_ancestor_ids
+ group.ancestors.map(&:id) << group.id
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/group/relation_factory.rb b/lib/gitlab/import_export/group/relation_factory.rb
new file mode 100644
index 00000000000..91637161377
--- /dev/null
+++ b/lib/gitlab/import_export/group/relation_factory.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Group
+ class RelationFactory < Base::RelationFactory
+ OVERRIDES = {
+ labels: :group_labels,
+ priorities: :label_priorities,
+ label: :group_label,
+ parent: :epic
+ }.freeze
+
+ EXISTING_OBJECT_RELATIONS = %i[
+ epic
+ epics
+ milestone
+ milestones
+ label
+ labels
+ group_label
+ group_labels
+ ].freeze
+
+ private
+
+ def setup_models
+ setup_note if @relation_name == :notes
+
+ update_group_references
+ end
+
+ def update_group_references
+ return unless self.class.existing_object_relations.include?(@relation_name)
+ return unless @relation_hash['group_id']
+
+ @relation_hash['group_id'] = @importable.id
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/group/tree_restorer.rb b/lib/gitlab/import_export/group/tree_restorer.rb
new file mode 100644
index 00000000000..247e39a68b9
--- /dev/null
+++ b/lib/gitlab/import_export/group/tree_restorer.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Group
+ class TreeRestorer
+ attr_reader :user
+ attr_reader :shared
+ attr_reader :group
+
+ def initialize(user:, shared:, group:, group_hash:)
+ @path = File.join(shared.export_path, 'group.json')
+ @user = user
+ @shared = shared
+ @group = group
+ @group_hash = group_hash
+ end
+
+ def restore
+ @relation_reader ||=
+ if @group_hash.present?
+ ImportExport::JSON::LegacyReader::User.new(@group_hash, reader.group_relation_names)
+ else
+ ImportExport::JSON::LegacyReader::File.new(@path, reader.group_relation_names)
+ end
+
+ @group_members = @relation_reader.consume_relation('members')
+ @children = @relation_reader.consume_attribute('children')
+ @relation_reader.consume_attribute('name')
+ @relation_reader.consume_attribute('path')
+
+ if members_mapper.map && restorer.restore
+ @children&.each do |group_hash|
+ group = create_group(group_hash: group_hash, parent_group: @group)
+ shared = Gitlab::ImportExport::Shared.new(group)
+
+ self.class.new(
+ user: @user,
+ shared: shared,
+ group: group,
+ group_hash: group_hash
+ ).restore
+ end
+ end
+
+ return false if @shared.errors.any?
+
+ true
+ rescue => e
+ @shared.error(e)
+ false
+ end
+
+ private
+
+ def restorer
+ @relation_tree_restorer ||= RelationTreeRestorer.new(
+ user: @user,
+ shared: @shared,
+ importable: @group,
+ relation_reader: @relation_reader,
+ members_mapper: members_mapper,
+ object_builder: object_builder,
+ relation_factory: relation_factory,
+ reader: reader
+ )
+ end
+
+ def create_group(group_hash:, parent_group:)
+ group_params = {
+ name: group_hash['name'],
+ path: group_hash['path'],
+ parent_id: parent_group&.id,
+ visibility_level: sub_group_visibility_level(group_hash, parent_group)
+ }
+
+ ::Groups::CreateService.new(@user, group_params).execute
+ end
+
+ def sub_group_visibility_level(group_hash, parent_group)
+ original_visibility_level = group_hash['visibility_level'] || Gitlab::VisibilityLevel::PRIVATE
+
+ if parent_group && parent_group.visibility_level < original_visibility_level
+ Gitlab::VisibilityLevel.closest_allowed_level(parent_group.visibility_level)
+ else
+ original_visibility_level
+ end
+ end
+
+ def members_mapper
+ @members_mapper ||= Gitlab::ImportExport::MembersMapper.new(exported_members: @group_members, user: @user, importable: @group)
+ end
+
+ def relation_factory
+ Gitlab::ImportExport::Group::RelationFactory
+ end
+
+ def object_builder
+ Gitlab::ImportExport::Group::ObjectBuilder
+ end
+
+ def reader
+ @reader ||= Gitlab::ImportExport::Reader.new(
+ shared: @shared,
+ config: Gitlab::ImportExport::Config.new(
+ config: Gitlab::ImportExport.group_config_file
+ ).to_h
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/group/tree_saver.rb b/lib/gitlab/import_export/group/tree_saver.rb
new file mode 100644
index 00000000000..fd1eb329ad2
--- /dev/null
+++ b/lib/gitlab/import_export/group/tree_saver.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Group
+ class TreeSaver
+ attr_reader :full_path, :shared
+
+ def initialize(group:, current_user:, shared:, params: {})
+ @params = params
+ @current_user = current_user
+ @shared = shared
+ @group = group
+ @full_path = File.join(@shared.export_path, ImportExport.group_filename)
+ end
+
+ def save
+ group_tree = serialize(@group, reader.group_tree)
+ tree_saver.save(group_tree, @shared.export_path, ImportExport.group_filename)
+
+ true
+ rescue => e
+ @shared.error(e)
+ false
+ end
+
+ private
+
+ def serialize(group, relations_tree)
+ group_tree = tree_saver.serialize(group, relations_tree)
+
+ group.children.each do |child|
+ group_tree['children'] ||= []
+ group_tree['children'] << serialize(child, relations_tree)
+ end
+
+ group_tree
+ rescue => e
+ @shared.error(e)
+ end
+
+ def reader
+ @reader ||= Gitlab::ImportExport::Reader.new(
+ shared: @shared,
+ config: Gitlab::ImportExport::Config.new(
+ config: Gitlab::ImportExport.group_config_file
+ ).to_h
+ )
+ end
+
+ def tree_saver
+ @tree_saver ||= LegacyRelationTreeSaver.new
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/group_import_export.yml b/lib/gitlab/import_export/group_import_export.yml
deleted file mode 100644
index d4e0ff12373..00000000000
--- a/lib/gitlab/import_export/group_import_export.yml
+++ /dev/null
@@ -1,78 +0,0 @@
-# Model relationships to be included in the group import/export
-#
-# This list _must_ only contain relationships that are available to both FOSS and
-# Enterprise editions. EE specific relationships must be defined in the `ee` section further
-# down below.
-tree:
- group:
- - :milestones
- - :badges
- - labels:
- - :priorities
- - boards:
- - lists:
- - label:
- - :priorities
- - :board
- - members:
- - :user
-
-included_attributes:
- user:
- - :id
- - :email
- - :username
- author:
- - :name
-
-excluded_attributes:
- group:
- - :id
- - :owner_id
- - :parent_id
- - :created_at
- - :updated_at
- - :runners_token
- - :runners_token_encrypted
- - :saml_discovery_token
- - :visibility_level
-
-methods:
- labels:
- - :type
- label:
- - :type
- badges:
- - :type
- notes:
- - :type
- events:
- - :action
- lists:
- - :list_type
-
-preloads:
-
-# EE specific relationships and settings to include. All of this will be merged
-# into the previous structures if EE is used.
-ee:
- tree:
- group:
- - epics:
- - :parent
- - :award_emoji
- - events:
- - :push_event_payload
- - notes:
- - :author
- - :award_emoji
- - events:
- - :push_event_payload
- - boards:
- - :board_assignee
- - labels:
- - :priorities
- - lists:
- - milestone:
- - events:
- - :push_event_payload
diff --git a/lib/gitlab/import_export/group_object_builder.rb b/lib/gitlab/import_export/group_object_builder.rb
deleted file mode 100644
index 9796bfa07d4..00000000000
--- a/lib/gitlab/import_export/group_object_builder.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- # Given a class, it finds or creates a new object at group level.
- #
- # Example:
- # `GroupObjectBuilder.build(Label, label_attributes)`
- # finds or initializes a label with the given attributes.
- class GroupObjectBuilder < BaseObjectBuilder
- def self.build(*args)
- Group.transaction do
- super
- end
- end
-
- def initialize(klass, attributes)
- super
-
- @group = @attributes['group']
-
- update_description
- end
-
- private
-
- attr_reader :group
-
- # Convert description empty string to nil
- # due to existing object being saved with description: nil
- # Which makes object lookup to fail since nil != ''
- def update_description
- attributes['description'] = nil if attributes['description'] == ''
- end
-
- def where_clauses
- [
- where_clause_base,
- where_clause_for_title,
- where_clause_for_description,
- where_clause_for_created_at
- ].compact
- end
-
- # Returns Arel clause `"{table_name}"."group_id" = {group.id}`
- def where_clause_base
- table[:group_id].in(group_and_ancestor_ids)
- end
-
- def group_and_ancestor_ids
- group.ancestors.map(&:id) << group.id
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/group_project_object_builder.rb b/lib/gitlab/import_export/group_project_object_builder.rb
deleted file mode 100644
index 9e8f9d11393..00000000000
--- a/lib/gitlab/import_export/group_project_object_builder.rb
+++ /dev/null
@@ -1,117 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- # Given a class, it finds or creates a new object
- # (initializes in the case of Label) at group or project level.
- # If it does not exist in the group, it creates it at project level.
- #
- # Example:
- # `GroupProjectObjectBuilder.build(Label, label_attributes)`
- # finds or initializes a label with the given attributes.
- #
- # It also adds some logic around Group Labels/Milestones for edge cases.
- class GroupProjectObjectBuilder < BaseObjectBuilder
- def self.build(*args)
- Project.transaction do
- super
- end
- end
-
- def initialize(klass, attributes)
- super
-
- @group = @attributes['group']
- @project = @attributes['project']
- end
-
- def find
- return if epic? && group.nil?
-
- super
- end
-
- private
-
- attr_reader :group, :project
-
- def where_clauses
- [
- where_clause_base,
- where_clause_for_title,
- where_clause_for_klass
- ].compact
- end
-
- # Returns Arel clause `"{table_name}"."project_id" = {project.id}` if project is present
- # For example: merge_request has :target_project_id, and we are searching by :iid
- # or, if group is present:
- # `"{table_name}"."project_id" = {project.id} OR "{table_name}"."group_id" = {group.id}`
- def where_clause_base
- [].tap do |clauses|
- clauses << table[:project_id].eq(project.id) if project
- clauses << table[:group_id].in(group.self_and_ancestors_ids) if group
- end.reduce(:or)
- end
-
- # Returns Arel clause for a particular model or `nil`.
- def where_clause_for_klass
- attrs_to_arel(attributes.slice('iid')) if merge_request?
- end
-
- def prepare_attributes
- attributes.dup.tap do |atts|
- atts.delete('group') unless epic?
-
- if label?
- atts['type'] = 'ProjectLabel' # Always create project labels
- elsif milestone?
- if atts['group_id'] # Transform new group milestones into project ones
- atts['iid'] = nil
- atts.delete('group_id')
- else
- claim_iid
- end
- end
-
- atts['importing'] = true if klass.ancestors.include?(Importable)
- end
- end
-
- def label?
- klass == Label
- end
-
- def milestone?
- klass == Milestone
- end
-
- def merge_request?
- klass == MergeRequest
- end
-
- def epic?
- klass == Epic
- end
-
- # If an existing group milestone used the IID
- # claim the IID back and set the group milestone to use one available
- # This is necessary to fix situations like the following:
- # - Importing into a user namespace project with exported group milestones
- # where the IID of the Group milestone could conflict with a project one.
- def claim_iid
- # The milestone has to be a group milestone, as it's the only case where
- # we set the IID as the maximum. The rest of them are fixed.
- milestone = project.milestones.find_by(iid: attributes['iid'])
-
- return unless milestone
-
- milestone.iid = nil
- milestone.ensure_project_iid!
- milestone.save!
- end
- end
- end
-end
-
-Gitlab::ImportExport::GroupProjectObjectBuilder.prepend_if_ee('EE::Gitlab::ImportExport::GroupProjectObjectBuilder')
diff --git a/lib/gitlab/import_export/group_relation_factory.rb b/lib/gitlab/import_export/group_relation_factory.rb
deleted file mode 100644
index e3597af44d2..00000000000
--- a/lib/gitlab/import_export/group_relation_factory.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- class GroupRelationFactory < BaseRelationFactory
- OVERRIDES = {
- labels: :group_labels,
- priorities: :label_priorities,
- label: :group_label,
- parent: :epic
- }.freeze
-
- EXISTING_OBJECT_RELATIONS = %i[
- epic
- epics
- milestone
- milestones
- label
- labels
- group_label
- group_labels
- ].freeze
-
- private
-
- def setup_models
- setup_note if @relation_name == :notes
-
- update_group_references
- end
-
- def update_group_references
- return unless self.class.existing_object_relations.include?(@relation_name)
- return unless @relation_hash['group_id']
-
- @relation_hash['group_id'] = @importable.id
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/group_tree_restorer.rb b/lib/gitlab/import_export/group_tree_restorer.rb
deleted file mode 100644
index 2f42843ed6c..00000000000
--- a/lib/gitlab/import_export/group_tree_restorer.rb
+++ /dev/null
@@ -1,116 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- class GroupTreeRestorer
- attr_reader :user
- attr_reader :shared
- attr_reader :group
-
- def initialize(user:, shared:, group:, group_hash:)
- @path = File.join(shared.export_path, 'group.json')
- @user = user
- @shared = shared
- @group = group
- @group_hash = group_hash
- end
-
- def restore
- @tree_hash = @group_hash || read_tree_hash
- @group_members = @tree_hash.delete('members')
- @children = @tree_hash.delete('children')
-
- if members_mapper.map && restorer.restore
- @children&.each do |group_hash|
- group = create_group(group_hash: group_hash, parent_group: @group)
- shared = Gitlab::ImportExport::Shared.new(group)
-
- self.class.new(
- user: @user,
- shared: shared,
- group: group,
- group_hash: group_hash
- ).restore
- end
- end
-
- return false if @shared.errors.any?
-
- true
- rescue => e
- @shared.error(e)
- false
- end
-
- private
-
- def read_tree_hash
- json = IO.read(@path)
- ActiveSupport::JSON.decode(json)
- rescue => e
- @shared.logger.error(
- group_id: @group.id,
- group_name: @group.name,
- message: "Import/Export error: #{e.message}"
- )
-
- raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
- end
-
- def restorer
- @relation_tree_restorer ||= RelationTreeRestorer.new(
- user: @user,
- shared: @shared,
- importable: @group,
- tree_hash: @tree_hash.except('name', 'path'),
- members_mapper: members_mapper,
- object_builder: object_builder,
- relation_factory: relation_factory,
- reader: reader
- )
- end
-
- def create_group(group_hash:, parent_group:)
- group_params = {
- name: group_hash['name'],
- path: group_hash['path'],
- parent_id: parent_group&.id,
- visibility_level: sub_group_visibility_level(group_hash, parent_group)
- }
-
- ::Groups::CreateService.new(@user, group_params).execute
- end
-
- def sub_group_visibility_level(group_hash, parent_group)
- original_visibility_level = group_hash['visibility_level'] || Gitlab::VisibilityLevel::PRIVATE
-
- if parent_group && parent_group.visibility_level < original_visibility_level
- Gitlab::VisibilityLevel.closest_allowed_level(parent_group.visibility_level)
- else
- original_visibility_level
- end
- end
-
- def members_mapper
- @members_mapper ||= Gitlab::ImportExport::MembersMapper.new(exported_members: @group_members, user: @user, importable: @group)
- end
-
- def relation_factory
- Gitlab::ImportExport::GroupRelationFactory
- end
-
- def object_builder
- Gitlab::ImportExport::GroupObjectBuilder
- end
-
- def reader
- @reader ||= Gitlab::ImportExport::Reader.new(
- shared: @shared,
- config: Gitlab::ImportExport::Config.new(
- config: Gitlab::ImportExport.group_config_file
- ).to_h
- )
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/group_tree_saver.rb b/lib/gitlab/import_export/group_tree_saver.rb
deleted file mode 100644
index 2effcd01e30..00000000000
--- a/lib/gitlab/import_export/group_tree_saver.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- class GroupTreeSaver
- attr_reader :full_path, :shared
-
- def initialize(group:, current_user:, shared:, params: {})
- @params = params
- @current_user = current_user
- @shared = shared
- @group = group
- @full_path = File.join(@shared.export_path, ImportExport.group_filename)
- end
-
- def save
- group_tree = serialize(@group, reader.group_tree)
- tree_saver.save(group_tree, @shared.export_path, ImportExport.group_filename)
-
- true
- rescue => e
- @shared.error(e)
- false
- end
-
- private
-
- def serialize(group, relations_tree)
- group_tree = tree_saver.serialize(group, relations_tree)
-
- group.children.each do |child|
- group_tree['children'] ||= []
- group_tree['children'] << serialize(child, relations_tree)
- end
-
- group_tree
- rescue => e
- @shared.error(e)
- end
-
- def reader
- @reader ||= Gitlab::ImportExport::Reader.new(
- shared: @shared,
- config: Gitlab::ImportExport::Config.new(
- config: Gitlab::ImportExport.group_config_file
- ).to_h
- )
- end
-
- def tree_saver
- @tree_saver ||= RelationTreeSaver.new
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/import_export.yml b/lib/gitlab/import_export/import_export.yml
deleted file mode 100644
index e55ad898263..00000000000
--- a/lib/gitlab/import_export/import_export.yml
+++ /dev/null
@@ -1,379 +0,0 @@
-# Model relationships to be included in the project import/export
-#
-# This list _must_ only contain relationships that are available to both CE and
-# EE. EE specific relationships must be defined in the `ee` section further
-# down below.
-tree:
- project:
- - labels:
- - :priorities
- - milestones:
- - events:
- - :push_event_payload
- - issues:
- - events:
- - :push_event_payload
- - :timelogs
- - notes:
- - :author
- - :award_emoji
- - events:
- - :push_event_payload
- - label_links:
- - label:
- - :priorities
- - milestone:
- - events:
- - :push_event_payload
- - resource_label_events:
- - label:
- - :priorities
- - :issue_assignees
- - :zoom_meetings
- - :sentry_issue
- - :award_emoji
- - snippets:
- - :award_emoji
- - notes:
- - :author
- - :award_emoji
- - releases:
- - :links
- - project_members:
- - :user
- - merge_requests:
- - :metrics
- - :award_emoji
- - notes:
- - :author
- - :award_emoji
- - events:
- - :push_event_payload
- - :suggestions
- - merge_request_diff:
- - :merge_request_diff_commits
- - :merge_request_diff_files
- - events:
- - :push_event_payload
- - :timelogs
- - label_links:
- - label:
- - :priorities
- - milestone:
- - events:
- - :push_event_payload
- - resource_label_events:
- - label:
- - :priorities
- - ci_pipelines:
- - notes:
- - :author
- - events:
- - :push_event_payload
- - stages:
- - :statuses
- - :external_pull_request
- - :merge_request
- - :external_pull_requests
- - :auto_devops
- - :triggers
- - :pipeline_schedules
- - :container_expiration_policy
- - :services
- - protected_branches:
- - :merge_access_levels
- - :push_access_levels
- - protected_tags:
- - :create_access_levels
- - :project_feature
- - :custom_attributes
- - :prometheus_metrics
- - :project_badges
- - :ci_cd_settings
- - :error_tracking_setting
- - :metrics_setting
- - boards:
- - lists:
- - label:
- - :priorities
- group_members:
- - :user
-
-# Only include the following attributes for the models specified.
-included_attributes:
- user:
- - :id
- - :email
- - :username
- author:
- - :name
- ci_cd_settings:
- - :group_runners_enabled
-
-# Do not include the following attributes for the models specified.
-excluded_attributes:
- project:
- - :name
- - :path
- - :namespace_id
- - :creator_id
- - :pool_repository_id
- - :import_url
- - :import_status
- - :avatar
- - :import_type
- - :import_source
- - :mirror
- - :runners_token
- - :runners_token_encrypted
- - :repository_storage
- - :repository_read_only
- - :lfs_enabled
- - :created_at
- - :updated_at
- - :id
- - :star_count
- - :last_activity_at
- - :last_repository_updated_at
- - :last_repository_check_at
- - :storage_version
- - :remote_mirror_available_overridden
- - :description_html
- - :repository_languages
- - :bfg_object_map
- - :detected_repository_languages
- - :tag_list
- - :mirror_user_id
- - :mirror_trigger_builds
- - :only_mirror_protected_branches
- - :pull_mirror_available_overridden
- - :pull_mirror_branch_prefix
- - :mirror_overwrites_diverged_branches
- - :packages_enabled
- - :mirror_last_update_at
- - :mirror_last_successful_update_at
- - :emails_disabled
- - :max_pages_size
- - :max_artifacts_size
- - :marked_for_deletion_at
- - :marked_for_deletion_by_user_id
- namespaces:
- - :runners_token
- - :runners_token_encrypted
- project_import_state:
- - :last_error
- - :jid
- - :last_update_at
- - :last_successful_update_at
- prometheus_metrics:
- - :common
- - :identifier
- snippets:
- - :expired_at
- - :secret
- - :encrypted_secret_token
- - :encrypted_secret_token_iv
- - :repository_storage
- merge_request_diff:
- - :external_diff
- - :stored_externally
- - :external_diff_store
- - :merge_request_id
- merge_request_diff_commits:
- - :merge_request_diff_id
- merge_request_diff_files:
- - :diff
- - :external_diff_offset
- - :external_diff_size
- - :merge_request_diff_id
- issues:
- - :milestone_id
- - :moved_to_id
- - :state_id
- - :duplicated_to_id
- - :promoted_to_epic_id
- merge_request:
- - :milestone_id
- - :ref_fetched
- - :merge_jid
- - :rebase_jid
- - :latest_merge_request_diff_id
- - :head_pipeline_id
- - :state_id
- merge_requests:
- - :milestone_id
- - :ref_fetched
- - :merge_jid
- - :rebase_jid
- - :latest_merge_request_diff_id
- - :head_pipeline_id
- - :state_id
- award_emoji:
- - :awardable_id
- statuses:
- - :trace
- - :token
- - :token_encrypted
- - :when
- - :artifacts_file
- - :artifacts_metadata
- - :artifacts_file_store
- - :artifacts_metadata_store
- - :artifacts_size
- - :commands
- - :runner_id
- - :trigger_request_id
- - :erased_by_id
- - :auto_canceled_by_id
- - :stage_id
- - :upstream_pipeline_id
- - :resource_group_id
- - :waiting_for_resource_at
- - :processed
- sentry_issue:
- - :issue_id
- push_event_payload:
- - :event_id
- project_badges:
- - :group_id
- resource_label_events:
- - :reference
- - :reference_html
- - :epic_id
- - :issue_id
- - :merge_request_id
- - :label_id
- runners:
- - :token
- - :token_encrypted
- services:
- - :template
- error_tracking_setting:
- - :encrypted_token
- - :encrypted_token_iv
- - :enabled
- service_desk_setting:
- - :outgoing_name
- priorities:
- - :label_id
- events:
- - :target_id
- timelogs:
- - :issue_id
- - :merge_request_id
- notes:
- - :noteable_id
- - :review_id
- label_links:
- - :label_id
- - :target_id
- issue_assignees:
- - :issue_id
- zoom_meetings:
- - :issue_id
- design:
- - :issue_id
- designs:
- - :issue_id
- design_versions:
- - :issue_id
- actions:
- - :design_id
- - :version_id
- links:
- - :release_id
- project_members:
- - :source_id
- metrics:
- - :merge_request_id
- - :pipeline_id
- suggestions:
- - :note_id
- ci_pipelines:
- - :auto_canceled_by_id
- - :pipeline_schedule_id
- - :merge_request_id
- - :external_pull_request_id
- stages:
- - :pipeline_id
- merge_access_levels:
- - :protected_branch_id
- push_access_levels:
- - :protected_branch_id
- unprotect_access_levels:
- - :protected_branch_id
- create_access_levels:
- - :protected_tag_id
- deploy_access_levels:
- - :protected_environment_id
- boards:
- - :milestone_id
- lists:
- - :board_id
- - :label_id
- - :milestone_id
- epic:
- - :start_date_sourcing_milestone_id
- - :due_date_sourcing_milestone_id
- - :parent_id
- - :state_id
- - :start_date_sourcing_epic_id
- - :due_date_sourcing_epic_id
-methods:
- notes:
- - :type
- labels:
- - :type
- label:
- - :type
- statuses:
- - :type
- services:
- - :type
- merge_request_diff_files:
- - :utf8_diff
- merge_requests:
- - :diff_head_sha
- - :source_branch_sha
- - :target_branch_sha
- events:
- - :action
- push_event_payload:
- - :action
- project_badges:
- - :type
- lists:
- - :list_type
- ci_pipelines:
- - :notes
-
-preloads:
- statuses:
- # TODO: We cannot preload tags, as they are not part of `GenericCommitStatus`
- # tags: # needed by tag_list
- project: # deprecated: needed by coverage_regex of Ci::Build
- merge_requests:
- source_project: # needed by source_branch_sha and diff_head_sha
- target_project: # needed by target_branch_sha
- assignees: # needed by assigne_id that is implemented by DeprecatedAssignee
-
-# EE specific relationships and settings to include. All of this will be merged
-# into the previous structures if EE is used.
-ee:
- tree:
- project:
- - issues:
- - designs:
- - notes:
- - :author
- - events:
- - :push_event_payload
- - design_versions:
- - actions:
- - :design # Duplicate export of issues.designs in order to link the record to both Issue and Action
- - :epic
- - protected_branches:
- - :unprotect_access_levels
- - protected_environments:
- - :deploy_access_levels
- - :service_desk_setting
diff --git a/lib/gitlab/import_export/importer.rb b/lib/gitlab/import_export/importer.rb
index a6463ed678c..4b761eb86ae 100644
--- a/lib/gitlab/import_export/importer.rb
+++ b/lib/gitlab/import_export/importer.rb
@@ -35,7 +35,7 @@ module Gitlab
def restorers
[repo_restorer, wiki_restorer, project_tree, avatar_restorer,
- uploads_restorer, lfs_restorer, statistics_restorer]
+ uploads_restorer, lfs_restorer, statistics_restorer, snippets_repo_restorer]
end
def import_file
@@ -49,7 +49,7 @@ module Gitlab
end
def project_tree
- @project_tree ||= Gitlab::ImportExport::ProjectTreeRestorer.new(user: current_user,
+ @project_tree ||= Gitlab::ImportExport::Project::TreeRestorer.new(user: current_user,
shared: shared,
project: project)
end
@@ -79,6 +79,12 @@ module Gitlab
Gitlab::ImportExport::LfsRestorer.new(project: project, shared: shared)
end
+ def snippets_repo_restorer
+ Gitlab::ImportExport::SnippetsRepoRestorer.new(project: project,
+ shared: shared,
+ user: current_user)
+ end
+
def statistics_restorer
Gitlab::ImportExport::StatisticsRestorer.new(project: project, shared: shared)
end
@@ -125,7 +131,7 @@ module Gitlab
def project_to_overwrite
strong_memoize(:project_to_overwrite) do
- Project.find_by_full_path("#{project.namespace.full_path}/#{original_path}")
+ ::Project.find_by_full_path("#{project.namespace.full_path}/#{original_path}")
end
end
end
diff --git a/lib/gitlab/import_export/json/legacy_reader.rb b/lib/gitlab/import_export/json/legacy_reader.rb
new file mode 100644
index 00000000000..477e41ae3eb
--- /dev/null
+++ b/lib/gitlab/import_export/json/legacy_reader.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module JSON
+ class LegacyReader
+ class File < LegacyReader
+ def initialize(path, relation_names)
+ @path = path
+ super(relation_names)
+ end
+
+ def valid?
+ ::File.exist?(@path)
+ end
+
+ private
+
+ def tree_hash
+ @tree_hash ||= read_hash
+ end
+
+ def read_hash
+ ActiveSupport::JSON.decode(IO.read(@path))
+ rescue => e
+ Gitlab::ErrorTracking.log_exception(e)
+ raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
+ end
+ end
+
+ class User < LegacyReader
+ def initialize(tree_hash, relation_names)
+ @tree_hash = tree_hash
+ super(relation_names)
+ end
+
+ def valid?
+ @tree_hash.present?
+ end
+
+ protected
+
+ attr_reader :tree_hash
+ end
+
+ def initialize(relation_names)
+ @relation_names = relation_names.map(&:to_s)
+ end
+
+ def valid?
+ raise NotImplementedError
+ end
+
+ def legacy?
+ true
+ end
+
+ def root_attributes(excluded_attributes = [])
+ attributes.except(*excluded_attributes.map(&:to_s))
+ end
+
+ def consume_relation(key)
+ value = relations.delete(key)
+
+ return value unless block_given?
+
+ return if value.nil?
+
+ if value.is_a?(Array)
+ value.each.with_index do |item, idx|
+ yield(item, idx)
+ end
+ else
+ yield(value, 0)
+ end
+ end
+
+ def consume_attribute(key)
+ attributes.delete(key)
+ end
+
+ def sort_ci_pipelines_by_id
+ relations['ci_pipelines']&.sort_by! { |hash| hash['id'] }
+ end
+
+ private
+
+ attr_reader :relation_names
+
+ def tree_hash
+ raise NotImplementedError
+ end
+
+ def attributes
+ @attributes ||= tree_hash.slice!(*relation_names)
+ end
+
+ def relations
+ @relations ||= tree_hash.extract!(*relation_names)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/json/legacy_writer.rb b/lib/gitlab/import_export/json/legacy_writer.rb
new file mode 100644
index 00000000000..c935e360a65
--- /dev/null
+++ b/lib/gitlab/import_export/json/legacy_writer.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module JSON
+ class LegacyWriter
+ include Gitlab::ImportExport::CommandLineUtil
+
+ attr_reader :path
+
+ def initialize(path)
+ @path = path
+ @last_array = nil
+ @keys = Set.new
+
+ mkdir_p(File.dirname(@path))
+ file.write('{}')
+ end
+
+ def close
+ @file&.close
+ @file = nil
+ end
+
+ def set(hash)
+ hash.each do |key, value|
+ write(key, value)
+ end
+ end
+
+ def write(key, value)
+ raise ArgumentError, "key '#{key}' already written" if @keys.include?(key)
+
+ # rewind by one byte, to overwrite '}'
+ file.pos = file.size - 1
+
+ file.write(',') if @keys.any?
+ file.write(key.to_json)
+ file.write(':')
+ file.write(value.to_json)
+ file.write('}')
+
+ @keys.add(key)
+ @last_array = nil
+ @last_array_count = nil
+ end
+
+ def append(key, value)
+ unless @last_array == key
+ write(key, [])
+
+ @last_array = key
+ @last_array_count = 0
+ end
+
+ # rewind by two bytes, to overwrite ']}'
+ file.pos = file.size - 2
+
+ file.write(',') if @last_array_count > 0
+ file.write(value.to_json)
+ file.write(']}')
+ @last_array_count += 1
+ end
+
+ private
+
+ def file
+ @file ||= File.open(@path, "wb")
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/json/streaming_serializer.rb b/lib/gitlab/import_export/json/streaming_serializer.rb
new file mode 100644
index 00000000000..d053bf16166
--- /dev/null
+++ b/lib/gitlab/import_export/json/streaming_serializer.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module JSON
+ class StreamingSerializer
+ include Gitlab::ImportExport::CommandLineUtil
+
+ BATCH_SIZE = 100
+
+ class Raw < String
+ def to_json(*_args)
+ to_s
+ end
+ end
+
+ def initialize(exportable, relations_schema, json_writer)
+ @exportable = exportable
+ @relations_schema = relations_schema
+ @json_writer = json_writer
+ end
+
+ def execute
+ serialize_root
+
+ includes.each do |relation_definition|
+ serialize_relation(relation_definition)
+ end
+ end
+
+ private
+
+ attr_reader :json_writer, :relations_schema, :exportable
+
+ def serialize_root
+ attributes = exportable.as_json(
+ relations_schema.merge(include: nil, preloads: nil))
+ json_writer.set(attributes)
+ end
+
+ def serialize_relation(definition)
+ raise ArgumentError, 'definition needs to be Hash' unless definition.is_a?(Hash)
+ raise ArgumentError, 'definition needs to have exactly one Hash element' unless definition.one?
+
+ key, options = definition.first
+
+ record = exportable.public_send(key) # rubocop: disable GitlabSecurity/PublicSend
+ if record.is_a?(ActiveRecord::Relation)
+ serialize_many_relations(key, record, options)
+ else
+ serialize_single_relation(key, record, options)
+ end
+ end
+
+ def serialize_many_relations(key, records, options)
+ key_preloads = preloads&.dig(key)
+ records = records.preload(key_preloads) if key_preloads
+
+ records.find_each(batch_size: BATCH_SIZE) do |record|
+ json = Raw.new(record.to_json(options))
+
+ json_writer.append(key, json)
+ end
+ end
+
+ def serialize_single_relation(key, record, options)
+ json = Raw.new(record.to_json(options))
+
+ json_writer.write(key, json)
+ end
+
+ def includes
+ relations_schema[:include]
+ end
+
+ def preloads
+ relations_schema[:preload]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/legacy_relation_tree_saver.rb b/lib/gitlab/import_export/legacy_relation_tree_saver.rb
new file mode 100644
index 00000000000..fe3e64358e5
--- /dev/null
+++ b/lib/gitlab/import_export/legacy_relation_tree_saver.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ class LegacyRelationTreeSaver
+ include Gitlab::ImportExport::CommandLineUtil
+
+ def serialize(exportable, relations_tree)
+ if Feature.enabled?(:export_fast_serialize, default_enabled: true)
+ Gitlab::ImportExport::FastHashSerializer
+ .new(exportable, relations_tree)
+ .execute
+ else
+ exportable.as_json(relations_tree)
+ end
+ end
+
+ def save(tree, dir_path, filename)
+ mkdir_p(dir_path)
+
+ tree_json = ::JSON.generate(tree)
+
+ File.write(File.join(dir_path, filename), tree_json)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/members_mapper.rb b/lib/gitlab/import_export/members_mapper.rb
index 2a70344374b..fd76252eb36 100644
--- a/lib/gitlab/import_export/members_mapper.rb
+++ b/lib/gitlab/import_export/members_mapper.rb
@@ -51,7 +51,7 @@ module Gitlab
@importable.members.destroy_all # rubocop: disable DestroyAll
- relation_class.create!(user: @user, access_level: relation_class::MAINTAINER, source_id: @importable.id, importing: true)
+ relation_class.create!(user: @user, access_level: highest_access_level, source_id: @importable.id, importing: true)
rescue => e
raise e, "Error adding importer user to #{@importable.class} members. #{e.message}"
end
@@ -59,7 +59,7 @@ module Gitlab
def user_already_member?
member = @importable.members&.first
- member&.user == @user && member.access_level >= relation_class::MAINTAINER
+ member&.user == @user && member.access_level >= highest_access_level
end
def add_team_member(member, existing_user = nil)
@@ -72,7 +72,7 @@ module Gitlab
parsed_hash(member).merge(
'source_id' => @importable.id,
'importing' => true,
- 'access_level' => [member['access_level'], relation_class::MAINTAINER].min
+ 'access_level' => [member['access_level'], highest_access_level].min
).except('user_id')
end
@@ -91,12 +91,18 @@ module Gitlab
def relation_class
case @importable
- when Project
+ when ::Project
ProjectMember
- when Group
+ when ::Group
GroupMember
end
end
+
+ def highest_access_level
+ return relation_class::OWNER if relation_class == GroupMember
+
+ relation_class::MAINTAINER
+ end
end
end
end
diff --git a/lib/gitlab/import_export/project/base_task.rb b/lib/gitlab/import_export/project/base_task.rb
new file mode 100644
index 00000000000..6a7b24421c9
--- /dev/null
+++ b/lib/gitlab/import_export/project/base_task.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Project
+ class BaseTask
+ include Gitlab::WithRequestStore
+
+ def initialize(opts, logger: Logger.new($stdout))
+ @project_path = opts.fetch(:project_path)
+ @file_path = opts.fetch(:file_path)
+ @namespace = Namespace.find_by_full_path(opts.fetch(:namespace_path))
+ @current_user = User.find_by_username(opts.fetch(:username))
+ @measurement_enabled = opts.fetch(:measurement_enabled)
+ @measurement = Gitlab::Utils::Measuring.new(logger: logger) if @measurement_enabled
+ @logger = logger
+ end
+
+ private
+
+ attr_reader :measurement, :project, :namespace, :current_user, :file_path, :project_path, :logger
+
+ def measurement_enabled?
+ @measurement_enabled
+ end
+
+ def success(message)
+ logger.info(message)
+
+ true
+ end
+
+ def error(message)
+ logger.error(message)
+
+ false
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/project/export_task.rb b/lib/gitlab/import_export/project/export_task.rb
new file mode 100644
index 00000000000..ec287380c48
--- /dev/null
+++ b/lib/gitlab/import_export/project/export_task.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Project
+ class ExportTask < BaseTask
+ def initialize(*)
+ super
+
+ @project = namespace.projects.find_by_path(@project_path)
+ end
+
+ def export
+ return error("Project with path: #{project_path} was not found. Please provide correct project path") unless project
+ return error("Invalid file path: #{file_path}. Please provide correct file path") unless file_path_exists?
+
+ with_export do
+ ::Projects::ImportExport::ExportService.new(project, current_user)
+ .execute(Gitlab::ImportExport::AfterExportStrategies::MoveFileStrategy.new(archive_path: file_path))
+ end
+
+ success('Done!')
+ end
+
+ private
+
+ def file_path_exists?
+ directory = File.dirname(file_path)
+
+ Dir.exist?(directory)
+ end
+
+ def with_export
+ with_request_store do
+ ::Gitlab::GitalyClient.allow_n_plus_1_calls do
+ measurement_enabled? ? measurement.with_measuring { yield } : yield
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/project/import_export.yml b/lib/gitlab/import_export/project/import_export.yml
new file mode 100644
index 00000000000..aa6085de4f9
--- /dev/null
+++ b/lib/gitlab/import_export/project/import_export.yml
@@ -0,0 +1,387 @@
+# Model relationships to be included in the project import/export
+#
+# This list _must_ only contain relationships that are available to both CE and
+# EE. EE specific relationships must be defined in the `ee` section further
+# down below.
+tree:
+ project:
+ - labels:
+ - :priorities
+ - milestones:
+ - events:
+ - :push_event_payload
+ - issues:
+ - events:
+ - :push_event_payload
+ - :timelogs
+ - notes:
+ - :author
+ - :award_emoji
+ - events:
+ - :push_event_payload
+ - label_links:
+ - label:
+ - :priorities
+ - milestone:
+ - events:
+ - :push_event_payload
+ - resource_label_events:
+ - label:
+ - :priorities
+ - :issue_assignees
+ - :zoom_meetings
+ - :sentry_issue
+ - :award_emoji
+ - snippets:
+ - :award_emoji
+ - notes:
+ - :author
+ - :award_emoji
+ - releases:
+ - :links
+ - project_members:
+ - :user
+ - merge_requests:
+ - :metrics
+ - :award_emoji
+ - notes:
+ - :author
+ - :award_emoji
+ - events:
+ - :push_event_payload
+ - :suggestions
+ - merge_request_diff:
+ - :merge_request_diff_commits
+ - :merge_request_diff_files
+ - events:
+ - :push_event_payload
+ - :timelogs
+ - label_links:
+ - label:
+ - :priorities
+ - milestone:
+ - events:
+ - :push_event_payload
+ - resource_label_events:
+ - label:
+ - :priorities
+ - :external_pull_requests
+ - ci_pipelines:
+ - notes:
+ - :author
+ - events:
+ - :push_event_payload
+ - stages:
+ - :statuses
+ - :external_pull_request
+ - :merge_request
+ - :auto_devops
+ - :triggers
+ - :pipeline_schedules
+ - :container_expiration_policy
+ - :services
+ - protected_branches:
+ - :merge_access_levels
+ - :push_access_levels
+ - protected_tags:
+ - :create_access_levels
+ - :project_feature
+ - :custom_attributes
+ - :prometheus_metrics
+ - :project_badges
+ - :ci_cd_settings
+ - :error_tracking_setting
+ - :metrics_setting
+ - boards:
+ - lists:
+ - label:
+ - :priorities
+ group_members:
+ - :user
+
+# Only include the following attributes for the models specified.
+included_attributes:
+ user:
+ - :id
+ - :email
+ - :username
+ author:
+ - :name
+ ci_cd_settings:
+ - :group_runners_enabled
+
+# Do not include the following attributes for the models specified.
+excluded_attributes:
+ project:
+ - :name
+ - :path
+ - :namespace_id
+ - :creator_id
+ - :pool_repository_id
+ - :import_url
+ - :import_status
+ - :avatar
+ - :import_type
+ - :import_source
+ - :mirror
+ - :runners_token
+ - :runners_token_encrypted
+ - :repository_storage
+ - :repository_read_only
+ - :lfs_enabled
+ - :created_at
+ - :updated_at
+ - :id
+ - :star_count
+ - :last_activity_at
+ - :last_repository_updated_at
+ - :last_repository_check_at
+ - :storage_version
+ - :remote_mirror_available_overridden
+ - :description_html
+ - :repository_languages
+ - :bfg_object_map
+ - :detected_repository_languages
+ - :tag_list
+ - :mirror_user_id
+ - :mirror_trigger_builds
+ - :only_mirror_protected_branches
+ - :pull_mirror_available_overridden
+ - :pull_mirror_branch_prefix
+ - :mirror_overwrites_diverged_branches
+ - :packages_enabled
+ - :mirror_last_update_at
+ - :mirror_last_successful_update_at
+ - :emails_disabled
+ - :max_pages_size
+ - :max_artifacts_size
+ - :marked_for_deletion_at
+ - :marked_for_deletion_by_user_id
+ namespaces:
+ - :runners_token
+ - :runners_token_encrypted
+ project_import_state:
+ - :last_error
+ - :jid
+ - :last_update_at
+ - :last_successful_update_at
+ prometheus_metrics:
+ - :common
+ - :identifier
+ snippets:
+ - :expired_at
+ - :secret
+ - :encrypted_secret_token
+ - :encrypted_secret_token_iv
+ merge_request_diff:
+ - :external_diff
+ - :stored_externally
+ - :external_diff_store
+ - :merge_request_id
+ merge_request_diff_commits:
+ - :merge_request_diff_id
+ merge_request_diff_files:
+ - :diff
+ - :external_diff_offset
+ - :external_diff_size
+ - :merge_request_diff_id
+ issues:
+ - :milestone_id
+ - :moved_to_id
+ - :sent_notifications
+ - :state_id
+ - :duplicated_to_id
+ - :promoted_to_epic_id
+ merge_request:
+ - :milestone_id
+ - :ref_fetched
+ - :merge_jid
+ - :rebase_jid
+ - :latest_merge_request_diff_id
+ - :head_pipeline_id
+ - :state_id
+ merge_requests:
+ - :milestone_id
+ - :ref_fetched
+ - :merge_jid
+ - :rebase_jid
+ - :latest_merge_request_diff_id
+ - :head_pipeline_id
+ - :state_id
+ award_emoji:
+ - :awardable_id
+ statuses:
+ - :trace
+ - :token
+ - :token_encrypted
+ - :when
+ - :artifacts_file
+ - :artifacts_metadata
+ - :artifacts_file_store
+ - :artifacts_metadata_store
+ - :artifacts_size
+ - :commands
+ - :runner_id
+ - :trigger_request_id
+ - :erased_by_id
+ - :auto_canceled_by_id
+ - :stage_id
+ - :upstream_pipeline_id
+ - :resource_group_id
+ - :waiting_for_resource_at
+ - :processed
+ sentry_issue:
+ - :issue_id
+ push_event_payload:
+ - :event_id
+ project_badges:
+ - :group_id
+ resource_label_events:
+ - :reference
+ - :reference_html
+ - :epic_id
+ - :issue_id
+ - :merge_request_id
+ - :label_id
+ runners:
+ - :token
+ - :token_encrypted
+ services:
+ - :template
+ - :instance
+ error_tracking_setting:
+ - :encrypted_token
+ - :encrypted_token_iv
+ - :enabled
+ service_desk_setting:
+ - :outgoing_name
+ priorities:
+ - :label_id
+ events:
+ - :target_id
+ timelogs:
+ - :issue_id
+ - :merge_request_id
+ notes:
+ - :noteable_id
+ - :review_id
+ label_links:
+ - :label_id
+ - :target_id
+ issue_assignees:
+ - :issue_id
+ zoom_meetings:
+ - :issue_id
+ design:
+ - :issue_id
+ designs:
+ - :issue_id
+ design_versions:
+ - :issue_id
+ actions:
+ - :design_id
+ - :version_id
+ links:
+ - :release_id
+ project_members:
+ - :source_id
+ metrics:
+ - :merge_request_id
+ - :pipeline_id
+ suggestions:
+ - :note_id
+ ci_pipelines:
+ - :auto_canceled_by_id
+ - :pipeline_schedule_id
+ - :merge_request_id
+ - :external_pull_request_id
+ stages:
+ - :pipeline_id
+ merge_access_levels:
+ - :protected_branch_id
+ push_access_levels:
+ - :protected_branch_id
+ unprotect_access_levels:
+ - :protected_branch_id
+ create_access_levels:
+ - :protected_tag_id
+ deploy_access_levels:
+ - :protected_environment_id
+ boards:
+ - :milestone_id
+ lists:
+ - :board_id
+ - :label_id
+ - :milestone_id
+ epic:
+ - :start_date_sourcing_milestone_id
+ - :due_date_sourcing_milestone_id
+ - :parent_id
+ - :state_id
+ - :start_date_sourcing_epic_id
+ - :due_date_sourcing_epic_id
+ epic_issue:
+ - :epic_id
+ - :issue_id
+methods:
+ notes:
+ - :type
+ labels:
+ - :type
+ label:
+ - :type
+ statuses:
+ - :type
+ services:
+ - :type
+ merge_request_diff_files:
+ - :utf8_diff
+ merge_requests:
+ - :diff_head_sha
+ - :source_branch_sha
+ - :target_branch_sha
+ events:
+ - :action
+ push_event_payload:
+ - :action
+ project_badges:
+ - :type
+ lists:
+ - :list_type
+ ci_pipelines:
+ - :notes
+
+preloads:
+ statuses:
+ # TODO: We cannot preload tags, as they are not part of `GenericCommitStatus`
+ # tags: # needed by tag_list
+ project: # deprecated: needed by coverage_regex of Ci::Build
+ merge_requests:
+ source_project: # needed by source_branch_sha and diff_head_sha
+ target_project: # needed by target_branch_sha
+ assignees: # needed by assigne_id that is implemented by DeprecatedAssignee
+
+# EE specific relationships and settings to include. All of this will be merged
+# into the previous structures if EE is used.
+ee:
+ tree:
+ project:
+ - issues:
+ - designs:
+ - notes:
+ - :author
+ - events:
+ - :push_event_payload
+ - design_versions:
+ - actions:
+ - :design # Duplicate export of issues.designs in order to link the record to both Issue and Action
+ - epic_issue:
+ - :epic
+ - protected_branches:
+ - :unprotect_access_levels
+ - protected_environments:
+ - :deploy_access_levels
+ - :service_desk_setting
+ excluded_attributes:
+ actions:
+ - image_v432x230
diff --git a/lib/gitlab/import_export/project/import_task.rb b/lib/gitlab/import_export/project/import_task.rb
new file mode 100644
index 00000000000..ae654ddbeaf
--- /dev/null
+++ b/lib/gitlab/import_export/project/import_task.rb
@@ -0,0 +1,110 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Project
+ class ImportTask < BaseTask
+ def import
+ show_import_start_message
+
+ run_isolated_sidekiq_job
+
+ show_import_failures_count
+
+ return error(project.import_state.last_error) if project.import_state&.last_error
+ return error(project.errors.full_messages.to_sentence) if project.errors.any?
+
+ success('Done!')
+ end
+
+ private
+
+ # We want to ensure that all Sidekiq jobs are executed
+ # synchronously as part of that process.
+ # This ensures that all expensive operations do not escape
+ # to general Sidekiq clusters/nodes.
+ def with_isolated_sidekiq_job
+ Sidekiq::Testing.fake! do
+ with_request_store do
+ # If you are attempting to import a large project into a development environment,
+ # you may see Gitaly throw an error about too many calls or invocations.
+ # This is due to a n+1 calls limit being set for development setups (not enforced in production)
+ # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24475#note_283090635
+ # For development setups, this code-path will be excluded from n+1 detection.
+ ::Gitlab::GitalyClient.allow_n_plus_1_calls do
+ measurement_enabled? ? measurement.with_measuring { yield } : yield
+ end
+ end
+
+ true
+ end
+ end
+
+ def run_isolated_sidekiq_job
+ with_isolated_sidekiq_job do
+ @project = create_project
+
+ execute_sidekiq_job
+ end
+ end
+
+ def create_project
+ # We are disabling ObjectStorage for `import`
+ # as it is too slow to handle big archives:
+ # 1. DB transaction timeouts on upload
+ # 2. Download of archive before unpacking
+ disable_upload_object_storage do
+ service = Projects::GitlabProjectsImportService.new(
+ current_user,
+ {
+ namespace_id: namespace.id,
+ path: project_path,
+ file: File.open(file_path)
+ }
+ )
+
+ service.execute
+ end
+ end
+
+ def execute_sidekiq_job
+ Sidekiq::Worker.drain_all
+ end
+
+ def disable_upload_object_storage
+ overwrite_uploads_setting('background_upload', false) do
+ overwrite_uploads_setting('direct_upload', false) do
+ yield
+ end
+ end
+ end
+
+ def overwrite_uploads_setting(key, value)
+ old_value = Settings.uploads.object_store[key]
+ Settings.uploads.object_store[key] = value
+
+ yield
+
+ ensure
+ Settings.uploads.object_store[key] = old_value
+ end
+
+ def full_path
+ "#{namespace.full_path}/#{project_path}"
+ end
+
+ def show_import_start_message
+ logger.info "Importing GitLab export: #{file_path} into GitLab" \
+ " #{full_path}" \
+ " as #{current_user.name}"
+ end
+
+ def show_import_failures_count
+ return unless project.import_failures.exists?
+
+ logger.info "Total number of not imported relations: #{project.import_failures.count}"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/project/legacy_tree_saver.rb b/lib/gitlab/import_export/project/legacy_tree_saver.rb
new file mode 100644
index 00000000000..2ed98f52c58
--- /dev/null
+++ b/lib/gitlab/import_export/project/legacy_tree_saver.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Project
+ class LegacyTreeSaver
+ attr_reader :full_path
+
+ def initialize(project:, current_user:, shared:, params: {})
+ @params = params
+ @project = project
+ @current_user = current_user
+ @shared = shared
+ @full_path = File.join(@shared.export_path, ImportExport.project_filename)
+ end
+
+ def save
+ project_tree = tree_saver.serialize(@project, reader.project_tree)
+ fix_project_tree(project_tree)
+ tree_saver.save(project_tree, @shared.export_path, ImportExport.project_filename)
+
+ true
+ rescue => e
+ @shared.error(e)
+ false
+ end
+
+ private
+
+ # Aware that the resulting hash needs to be pure-hash and
+ # does not include any AR objects anymore, only objects that run `.to_json`
+ def fix_project_tree(project_tree)
+ if @params[:description].present?
+ project_tree['description'] = @params[:description]
+ end
+
+ project_tree['project_members'] += group_members_array
+ end
+
+ def reader
+ @reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
+ end
+
+ def group_members_array
+ group_members.as_json(reader.group_members_tree).each do |group_member|
+ group_member['source_type'] = 'Project' # Make group members project members of the future import
+ end
+ end
+
+ def group_members
+ return [] unless @current_user.can?(:admin_group, @project.group)
+
+ # We need `.where.not(user_id: nil)` here otherwise when a group has an
+ # invitee, it would make the following query return 0 rows since a NULL
+ # user_id would be present in the subquery
+ # See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
+ non_null_user_ids = @project.project_members.where.not(user_id: nil).select(:user_id)
+
+ GroupMembersFinder.new(@project.group).execute.where.not(user_id: non_null_user_ids)
+ end
+
+ def tree_saver
+ @tree_saver ||= Gitlab::ImportExport::LegacyRelationTreeSaver.new
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/project/object_builder.rb b/lib/gitlab/import_export/project/object_builder.rb
new file mode 100644
index 00000000000..c3637b1c115
--- /dev/null
+++ b/lib/gitlab/import_export/project/object_builder.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Project
+ # Given a class, it finds or creates a new object
+ # (initializes in the case of Label) at group or project level.
+ # If it does not exist in the group, it creates it at project level.
+ #
+ # Example:
+ # `ObjectBuilder.build(Label, label_attributes)`
+ # finds or initializes a label with the given attributes.
+ #
+ # It also adds some logic around Group Labels/Milestones for edge cases.
+ class ObjectBuilder < Base::ObjectBuilder
+ def self.build(*args)
+ ::Project.transaction do
+ super
+ end
+ end
+
+ def initialize(klass, attributes)
+ super
+
+ @group = @attributes['group']
+ @project = @attributes['project']
+ end
+
+ def find
+ return if epic? && group.nil?
+
+ super
+ end
+
+ private
+
+ attr_reader :group, :project
+
+ def where_clauses
+ [
+ where_clause_base,
+ where_clause_for_title,
+ where_clause_for_klass
+ ].compact
+ end
+
+ # Returns Arel clause `"{table_name}"."project_id" = {project.id}` if project is present
+ # For example: merge_request has :target_project_id, and we are searching by :iid
+ # or, if group is present:
+ # `"{table_name}"."project_id" = {project.id} OR "{table_name}"."group_id" = {group.id}`
+ def where_clause_base
+ [].tap do |clauses|
+ clauses << table[:project_id].eq(project.id) if project
+ clauses << table[:group_id].in(group.self_and_ancestors_ids) if group
+ end.reduce(:or)
+ end
+
+ # Returns Arel clause for a particular model or `nil`.
+ def where_clause_for_klass
+ attrs_to_arel(attributes.slice('iid')) if merge_request?
+ end
+
+ def prepare_attributes
+ attributes.dup.tap do |atts|
+ atts.delete('group') unless epic?
+
+ if label?
+ atts['type'] = 'ProjectLabel' # Always create project labels
+ elsif milestone?
+ if atts['group_id'] # Transform new group milestones into project ones
+ atts['iid'] = nil
+ atts.delete('group_id')
+ else
+ claim_iid
+ end
+ end
+
+ atts['importing'] = true if klass.ancestors.include?(Importable)
+ end
+ end
+
+ def label?
+ klass == Label
+ end
+
+ def milestone?
+ klass == Milestone
+ end
+
+ def merge_request?
+ klass == MergeRequest
+ end
+
+ def epic?
+ klass == Epic
+ end
+
+ # If an existing group milestone used the IID
+ # claim the IID back and set the group milestone to use one available
+ # This is necessary to fix situations like the following:
+ # - Importing into a user namespace project with exported group milestones
+ # where the IID of the Group milestone could conflict with a project one.
+ def claim_iid
+ # The milestone has to be a group milestone, as it's the only case where
+ # we set the IID as the maximum. The rest of them are fixed.
+ milestone = project.milestones.find_by(iid: attributes['iid'])
+
+ return unless milestone
+
+ milestone.iid = nil
+ milestone.ensure_project_iid!
+ milestone.save!
+ end
+ end
+ end
+ end
+end
+
+Gitlab::ImportExport::Project::ObjectBuilder.prepend_if_ee('EE::Gitlab::ImportExport::Project::ObjectBuilder')
diff --git a/lib/gitlab/import_export/project/relation_factory.rb b/lib/gitlab/import_export/project/relation_factory.rb
new file mode 100644
index 00000000000..2405176c518
--- /dev/null
+++ b/lib/gitlab/import_export/project/relation_factory.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Project
+ class RelationFactory < Base::RelationFactory
+ prepend_if_ee('::EE::Gitlab::ImportExport::Project::RelationFactory') # rubocop: disable Cop/InjectEnterpriseEditionModule
+
+ OVERRIDES = { snippets: :project_snippets,
+ ci_pipelines: 'Ci::Pipeline',
+ pipelines: 'Ci::Pipeline',
+ stages: 'Ci::Stage',
+ statuses: 'commit_status',
+ triggers: 'Ci::Trigger',
+ pipeline_schedules: 'Ci::PipelineSchedule',
+ builds: 'Ci::Build',
+ runners: 'Ci::Runner',
+ hooks: 'ProjectHook',
+ merge_access_levels: 'ProtectedBranch::MergeAccessLevel',
+ push_access_levels: 'ProtectedBranch::PushAccessLevel',
+ create_access_levels: 'ProtectedTag::CreateAccessLevel',
+ labels: :project_labels,
+ priorities: :label_priorities,
+ auto_devops: :project_auto_devops,
+ label: :project_label,
+ custom_attributes: 'ProjectCustomAttribute',
+ project_badges: 'Badge',
+ metrics: 'MergeRequest::Metrics',
+ ci_cd_settings: 'ProjectCiCdSetting',
+ error_tracking_setting: 'ErrorTracking::ProjectErrorTrackingSetting',
+ links: 'Releases::Link',
+ metrics_setting: 'ProjectMetricsSetting' }.freeze
+
+ BUILD_MODELS = %i[Ci::Build commit_status].freeze
+
+ GROUP_REFERENCES = %w[group_id].freeze
+
+ PROJECT_REFERENCES = %w[project_id source_project_id target_project_id].freeze
+
+ EXISTING_OBJECT_RELATIONS = %i[
+ milestone
+ milestones
+ label
+ labels
+ project_label
+ project_labels
+ group_label
+ group_labels
+ project_feature
+ merge_request
+ epic
+ ProjectCiCdSetting
+ container_expiration_policy
+ external_pull_request
+ external_pull_requests
+ ].freeze
+
+ def create
+ @object = super
+
+ # We preload the project, user, and group to re-use objects
+ @object = preload_keys(@object, PROJECT_REFERENCES, @importable)
+ @object = preload_keys(@object, GROUP_REFERENCES, @importable.group)
+ @object = preload_keys(@object, USER_REFERENCES, @user)
+ end
+
+ private
+
+ def invalid_relation?
+ # Do not create relation if it is:
+ # - An unknown service
+ # - A legacy trigger
+ unknown_service? ||
+ (!Feature.enabled?(:use_legacy_pipeline_triggers, @importable) && legacy_trigger?)
+ end
+
+ def setup_models
+ case @relation_name
+ when :merge_request_diff_files then setup_diff
+ when :notes then setup_note
+ when :'Ci::Pipeline' then setup_pipeline
+ when *BUILD_MODELS then setup_build
+ end
+
+ update_project_references
+ update_group_references
+ end
+
+ def generate_imported_object
+ if @relation_name == :merge_requests
+ MergeRequestParser.new(@importable, @relation_hash.delete('diff_head_sha'), super, @relation_hash).parse!
+ else
+ super
+ end
+ end
+
+ def update_project_references
+ # If source and target are the same, populate them with the new project ID.
+ if @relation_hash['source_project_id']
+ @relation_hash['source_project_id'] = same_source_and_target? ? @relation_hash['project_id'] : MergeRequestParser::FORKED_PROJECT_ID
+ end
+
+ @relation_hash['target_project_id'] = @relation_hash['project_id'] if @relation_hash['target_project_id']
+ end
+
+ def same_source_and_target?
+ @relation_hash['target_project_id'] && @relation_hash['target_project_id'] == @relation_hash['source_project_id']
+ end
+
+ def update_group_references
+ return unless existing_object?
+ return unless @relation_hash['group_id']
+
+ @relation_hash['group_id'] = @importable.namespace_id
+ end
+
+ def setup_build
+ @relation_hash.delete('trace') # old export files have trace
+ @relation_hash.delete('token')
+ @relation_hash.delete('commands')
+ @relation_hash.delete('artifacts_file_store')
+ @relation_hash.delete('artifacts_metadata_store')
+ @relation_hash.delete('artifacts_size')
+ end
+
+ def setup_diff
+ @relation_hash['diff'] = @relation_hash.delete('utf8_diff')
+ end
+
+ def setup_pipeline
+ @relation_hash.fetch('stages', []).each do |stage|
+ stage.statuses.each do |status|
+ status.pipeline = imported_object
+ end
+ end
+ end
+
+ def unknown_service?
+ @relation_name == :services && parsed_relation_hash['type'] &&
+ !Object.const_defined?(parsed_relation_hash['type'])
+ end
+
+ def legacy_trigger?
+ @relation_name == :'Ci::Trigger' && @relation_hash['owner_id'].nil?
+ end
+
+ def preload_keys(object, references, value)
+ return object unless value
+
+ references.each do |key|
+ attribute = "#{key.delete_suffix('_id')}=".to_sym
+ next unless object.respond_to?(key) && object.respond_to?(attribute)
+
+ if object.read_attribute(key) == value&.id
+ object.public_send(attribute, value) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+
+ object
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/project/tree_restorer.rb b/lib/gitlab/import_export/project/tree_restorer.rb
new file mode 100644
index 00000000000..f8d25e14c02
--- /dev/null
+++ b/lib/gitlab/import_export/project/tree_restorer.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Project
+ class TreeRestorer
+ attr_reader :user
+ attr_reader :shared
+ attr_reader :project
+
+ def initialize(user:, shared:, project:)
+ @user = user
+ @shared = shared
+ @project = project
+ end
+
+ def restore
+ @relation_reader = ImportExport::JSON::LegacyReader::File.new(File.join(shared.export_path, 'project.json'), reader.project_relation_names)
+
+ @project_members = @relation_reader.consume_relation('project_members')
+
+ if relation_tree_restorer.restore
+ import_failure_service.with_retry(action: 'set_latest_merge_request_diff_ids!') do
+ @project.merge_requests.set_latest_merge_request_diff_ids!
+ end
+
+ true
+ else
+ false
+ end
+ rescue => e
+ @shared.error(e)
+ false
+ end
+
+ private
+
+ def relation_tree_restorer
+ @relation_tree_restorer ||= RelationTreeRestorer.new(
+ user: @user,
+ shared: @shared,
+ importable: @project,
+ relation_reader: @relation_reader,
+ object_builder: object_builder,
+ members_mapper: members_mapper,
+ relation_factory: relation_factory,
+ reader: reader
+ )
+ end
+
+ def members_mapper
+ @members_mapper ||= Gitlab::ImportExport::MembersMapper.new(exported_members: @project_members,
+ user: @user,
+ importable: @project)
+ end
+
+ def object_builder
+ Project::ObjectBuilder
+ end
+
+ def relation_factory
+ Project::RelationFactory
+ end
+
+ def reader
+ @reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
+ end
+
+ def import_failure_service
+ @import_failure_service ||= ImportFailureService.new(@project)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/project/tree_saver.rb b/lib/gitlab/import_export/project/tree_saver.rb
new file mode 100644
index 00000000000..01000c9d6d9
--- /dev/null
+++ b/lib/gitlab/import_export/project/tree_saver.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Project
+ class TreeSaver
+ attr_reader :full_path
+
+ def initialize(project:, current_user:, shared:, params: {})
+ @params = params
+ @project = project
+ @current_user = current_user
+ @shared = shared
+ @full_path = File.join(@shared.export_path, ImportExport.project_filename)
+ end
+
+ def save
+ json_writer = ImportExport::JSON::LegacyWriter.new(@full_path)
+
+ serializer = ImportExport::JSON::StreamingSerializer.new(exportable, reader.project_tree, json_writer)
+ serializer.execute
+
+ true
+ rescue => e
+ @shared.error(e)
+ false
+ ensure
+ json_writer&.close
+ end
+
+ private
+
+ def reader
+ @reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
+ end
+
+ def exportable
+ @project.present(exportable_params)
+ end
+
+ def exportable_params
+ params = {
+ presenter_class: presenter_class,
+ current_user: @current_user
+ }
+ params[:override_description] = @params[:description] if @params[:description].present?
+ params
+ end
+
+ def presenter_class
+ Projects::ImportExport::ProjectExportPresenter
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/project_relation_factory.rb b/lib/gitlab/import_export/project_relation_factory.rb
deleted file mode 100644
index e27bb9d3af1..00000000000
--- a/lib/gitlab/import_export/project_relation_factory.rb
+++ /dev/null
@@ -1,184 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- class ProjectRelationFactory < BaseRelationFactory
- prepend_if_ee('::EE::Gitlab::ImportExport::ProjectRelationFactory') # rubocop: disable Cop/InjectEnterpriseEditionModule
-
- OVERRIDES = { snippets: :project_snippets,
- ci_pipelines: 'Ci::Pipeline',
- pipelines: 'Ci::Pipeline',
- stages: 'Ci::Stage',
- statuses: 'commit_status',
- triggers: 'Ci::Trigger',
- pipeline_schedules: 'Ci::PipelineSchedule',
- builds: 'Ci::Build',
- runners: 'Ci::Runner',
- hooks: 'ProjectHook',
- merge_access_levels: 'ProtectedBranch::MergeAccessLevel',
- push_access_levels: 'ProtectedBranch::PushAccessLevel',
- create_access_levels: 'ProtectedTag::CreateAccessLevel',
- labels: :project_labels,
- priorities: :label_priorities,
- auto_devops: :project_auto_devops,
- label: :project_label,
- custom_attributes: 'ProjectCustomAttribute',
- project_badges: 'Badge',
- metrics: 'MergeRequest::Metrics',
- ci_cd_settings: 'ProjectCiCdSetting',
- error_tracking_setting: 'ErrorTracking::ProjectErrorTrackingSetting',
- links: 'Releases::Link',
- metrics_setting: 'ProjectMetricsSetting' }.freeze
-
- BUILD_MODELS = %i[Ci::Build commit_status].freeze
-
- GROUP_REFERENCES = %w[group_id].freeze
-
- PROJECT_REFERENCES = %w[project_id source_project_id target_project_id].freeze
-
- EXISTING_OBJECT_RELATIONS = %i[
- milestone
- milestones
- label
- labels
- project_label
- project_labels
- group_label
- group_labels
- project_feature
- merge_request
- epic
- ProjectCiCdSetting
- container_expiration_policy
- ].freeze
-
- def create
- @object = super
-
- # We preload the project, user, and group to re-use objects
- @object = preload_keys(@object, PROJECT_REFERENCES, @importable)
- @object = preload_keys(@object, GROUP_REFERENCES, @importable.group)
- @object = preload_keys(@object, USER_REFERENCES, @user)
- end
-
- private
-
- def invalid_relation?
- # Do not create relation if it is:
- # - An unknown service
- # - A legacy trigger
- unknown_service? ||
- (!Feature.enabled?(:use_legacy_pipeline_triggers, @importable) && legacy_trigger?)
- end
-
- def setup_models
- case @relation_name
- when :merge_request_diff_files then setup_diff
- when :notes then setup_note
- when :'Ci::Pipeline' then setup_pipeline
- when *BUILD_MODELS then setup_build
- end
-
- update_project_references
- update_group_references
- end
-
- def generate_imported_object
- if @relation_name == :merge_requests
- MergeRequestParser.new(@importable, @relation_hash.delete('diff_head_sha'), super, @relation_hash).parse!
- else
- super
- end
- end
-
- def update_project_references
- # If source and target are the same, populate them with the new project ID.
- if @relation_hash['source_project_id']
- @relation_hash['source_project_id'] = same_source_and_target? ? @relation_hash['project_id'] : MergeRequestParser::FORKED_PROJECT_ID
- end
-
- @relation_hash['target_project_id'] = @relation_hash['project_id'] if @relation_hash['target_project_id']
- end
-
- def same_source_and_target?
- @relation_hash['target_project_id'] && @relation_hash['target_project_id'] == @relation_hash['source_project_id']
- end
-
- def update_group_references
- return unless existing_object?
- return unless @relation_hash['group_id']
-
- @relation_hash['group_id'] = @importable.namespace_id
- end
-
- # This code is a workaround for broken project exports that don't
- # export merge requests with CI pipelines (i.e. exports that were
- # generated from
- # https://gitlab.com/gitlab-org/gitlab/merge_requests/17844).
- # This method can be removed in GitLab 12.6.
- def update_merge_request_references
- # If a merge request was properly created, we don't need to fix
- # up this export.
- return if @relation_hash['merge_request']
-
- merge_request_id = @relation_hash['merge_request_id']
-
- return unless merge_request_id
-
- new_merge_request_id = @merge_requests_mapping[merge_request_id]
-
- return unless new_merge_request_id
-
- @relation_hash['merge_request_id'] = new_merge_request_id
- parsed_relation_hash['merge_request_id'] = new_merge_request_id
- end
-
- def setup_build
- @relation_hash.delete('trace') # old export files have trace
- @relation_hash.delete('token')
- @relation_hash.delete('commands')
- @relation_hash.delete('artifacts_file_store')
- @relation_hash.delete('artifacts_metadata_store')
- @relation_hash.delete('artifacts_size')
- end
-
- def setup_diff
- @relation_hash['diff'] = @relation_hash.delete('utf8_diff')
- end
-
- def setup_pipeline
- update_merge_request_references
-
- @relation_hash.fetch('stages', []).each do |stage|
- stage.statuses.each do |status|
- status.pipeline = imported_object
- end
- end
- end
-
- def unknown_service?
- @relation_name == :services && parsed_relation_hash['type'] &&
- !Object.const_defined?(parsed_relation_hash['type'])
- end
-
- def legacy_trigger?
- @relation_name == :'Ci::Trigger' && @relation_hash['owner_id'].nil?
- end
-
- def preload_keys(object, references, value)
- return object unless value
-
- references.each do |key|
- attribute = "#{key.delete_suffix('_id')}=".to_sym
- next unless object.respond_to?(key) && object.respond_to?(attribute)
-
- if object.read_attribute(key) == value&.id
- object.public_send(attribute, value) # rubocop:disable GitlabSecurity/PublicSend
- end
- end
-
- object
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/project_tree_loader.rb b/lib/gitlab/import_export/project_tree_loader.rb
deleted file mode 100644
index fc21858043d..00000000000
--- a/lib/gitlab/import_export/project_tree_loader.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- class ProjectTreeLoader
- def load(path, dedup_entries: false)
- tree_hash = ActiveSupport::JSON.decode(IO.read(path))
-
- if dedup_entries
- dedup_tree(tree_hash)
- else
- tree_hash
- end
- end
-
- private
-
- # This function removes duplicate entries from the given tree recursively
- # by caching nodes it encounters repeatedly. We only consider nodes for
- # which there can actually be multiple equivalent instances (e.g. strings,
- # hashes and arrays, but not `nil`s, numbers or booleans.)
- #
- # The algorithm uses a recursive depth-first descent with 3 cases, starting
- # with a root node (the tree/hash itself):
- # - a node has already been cached; in this case we return it from the cache
- # - a node has not been cached yet but should be; descend into its children
- # - a node is neither cached nor qualifies for caching; this is a no-op
- def dedup_tree(node, nodes_seen = {})
- if nodes_seen.key?(node) && distinguishable?(node)
- yield nodes_seen[node]
- elsif should_dedup?(node)
- nodes_seen[node] = node
-
- case node
- when Array
- node.each_index do |idx|
- dedup_tree(node[idx], nodes_seen) do |cached_node|
- node[idx] = cached_node
- end
- end
- when Hash
- node.each do |k, v|
- dedup_tree(v, nodes_seen) do |cached_node|
- node[k] = cached_node
- end
- end
- end
- else
- node
- end
- end
-
- # We do not need to consider nodes for which there cannot be multiple instances
- def should_dedup?(node)
- node && !(node.is_a?(Numeric) || node.is_a?(TrueClass) || node.is_a?(FalseClass))
- end
-
- # We can only safely de-dup values that are distinguishable. True value objects
- # are always distinguishable by nature. Hashes however can represent entities,
- # which are identified by ID, not value. We therefore disallow de-duping hashes
- # that do not have an `id` field, since we might risk dropping entities that
- # have equal attributes yet different identities.
- def distinguishable?(node)
- if node.is_a?(Hash)
- node.key?('id')
- else
- true
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/project_tree_restorer.rb b/lib/gitlab/import_export/project_tree_restorer.rb
deleted file mode 100644
index aae07657ea0..00000000000
--- a/lib/gitlab/import_export/project_tree_restorer.rb
+++ /dev/null
@@ -1,92 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- class ProjectTreeRestorer
- LARGE_PROJECT_FILE_SIZE_BYTES = 500.megabyte
-
- attr_reader :user
- attr_reader :shared
- attr_reader :project
-
- def initialize(user:, shared:, project:)
- @user = user
- @shared = shared
- @project = project
- @tree_loader = ProjectTreeLoader.new
- end
-
- def restore
- @tree_hash = read_tree_hash
- @project_members = @tree_hash.delete('project_members')
-
- RelationRenameService.rename(@tree_hash)
-
- if relation_tree_restorer.restore
- import_failure_service.with_retry(action: 'set_latest_merge_request_diff_ids!') do
- @project.merge_requests.set_latest_merge_request_diff_ids!
- end
-
- true
- else
- false
- end
- rescue => e
- @shared.error(e)
- false
- end
-
- private
-
- def large_project?(path)
- File.size(path) >= LARGE_PROJECT_FILE_SIZE_BYTES
- end
-
- def read_tree_hash
- path = File.join(@shared.export_path, 'project.json')
- dedup_entries = large_project?(path) &&
- Feature.enabled?(:dedup_project_import_metadata, project.group)
-
- @tree_loader.load(path, dedup_entries: dedup_entries)
- rescue => e
- Rails.logger.error("Import/Export error: #{e.message}") # rubocop:disable Gitlab/RailsLogger
- raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
- end
-
- def relation_tree_restorer
- @relation_tree_restorer ||= RelationTreeRestorer.new(
- user: @user,
- shared: @shared,
- importable: @project,
- tree_hash: @tree_hash,
- object_builder: object_builder,
- members_mapper: members_mapper,
- relation_factory: relation_factory,
- reader: reader
- )
- end
-
- def members_mapper
- @members_mapper ||= Gitlab::ImportExport::MembersMapper.new(exported_members: @project_members,
- user: @user,
- importable: @project)
- end
-
- def object_builder
- Gitlab::ImportExport::GroupProjectObjectBuilder
- end
-
- def relation_factory
- Gitlab::ImportExport::ProjectRelationFactory
- end
-
- def reader
- @reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
- end
-
- def import_failure_service
- @import_failure_service ||= ImportFailureService.new(@project)
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/project_tree_saver.rb b/lib/gitlab/import_export/project_tree_saver.rb
deleted file mode 100644
index 386a4cfdfc6..00000000000
--- a/lib/gitlab/import_export/project_tree_saver.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- class ProjectTreeSaver
- attr_reader :full_path
-
- def initialize(project:, current_user:, shared:, params: {})
- @params = params
- @project = project
- @current_user = current_user
- @shared = shared
- @full_path = File.join(@shared.export_path, ImportExport.project_filename)
- end
-
- def save
- project_tree = tree_saver.serialize(@project, reader.project_tree)
- fix_project_tree(project_tree)
- tree_saver.save(project_tree, @shared.export_path, ImportExport.project_filename)
-
- true
- rescue => e
- @shared.error(e)
- false
- end
-
- private
-
- # Aware that the resulting hash needs to be pure-hash and
- # does not include any AR objects anymore, only objects that run `.to_json`
- def fix_project_tree(project_tree)
- if @params[:description].present?
- project_tree['description'] = @params[:description]
- end
-
- project_tree['project_members'] += group_members_array
-
- RelationRenameService.add_new_associations(project_tree)
- end
-
- def reader
- @reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
- end
-
- def group_members_array
- group_members.as_json(reader.group_members_tree).each do |group_member|
- group_member['source_type'] = 'Project' # Make group members project members of the future import
- end
- end
-
- def group_members
- return [] unless @current_user.can?(:admin_group, @project.group)
-
- # We need `.where.not(user_id: nil)` here otherwise when a group has an
- # invitee, it would make the following query return 0 rows since a NULL
- # user_id would be present in the subquery
- # See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
- non_null_user_ids = @project.project_members.where.not(user_id: nil).select(:user_id)
-
- GroupMembersFinder.new(@project.group).execute.where.not(user_id: non_null_user_ids)
- end
-
- def tree_saver
- @tree_saver ||= RelationTreeSaver.new
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/reader.rb b/lib/gitlab/import_export/reader.rb
index 1390770acef..8d36d05ca6f 100644
--- a/lib/gitlab/import_export/reader.rb
+++ b/lib/gitlab/import_export/reader.rb
@@ -17,10 +17,18 @@ module Gitlab
tree_by_key(:project)
end
+ def project_relation_names
+ attributes_finder.find_relations_tree(:project).keys
+ end
+
def group_tree
tree_by_key(:group)
end
+ def group_relation_names
+ attributes_finder.find_relations_tree(:group).keys
+ end
+
def group_members_tree
tree_by_key(:group_members)
end
diff --git a/lib/gitlab/import_export/relation_rename_service.rb b/lib/gitlab/import_export/relation_rename_service.rb
deleted file mode 100644
index 03aaa6aefc3..00000000000
--- a/lib/gitlab/import_export/relation_rename_service.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-# This class is intended to help with relation renames within Gitlab versions
-# and allow compatibility between versions.
-# If you have to change one relationship name that is imported/exported,
-# you should add it to the RENAMES constant indicating the old name and the
-# new one.
-# The behavior of these renamed relationships should be transient and it should
-# only last one release until you completely remove the renaming from the list.
-#
-# When importing, this class will check the hash and:
-# - if only the old relationship name is found, it will rename it with the new one
-# - if only the new relationship name is found, it will do nothing
-# - if it finds both, it will use the new relationship data
-#
-# When exporting, this class will duplicate the keys in the resulting file.
-# This way, if we open the file in an old version of the exporter it will work
-# and also it will with the newer versions.
-module Gitlab
- module ImportExport
- class RelationRenameService
- RENAMES = {
- 'pipelines' => 'ci_pipelines' # Added in 11.6, remove in 11.7
- }.freeze
-
- def self.rename(tree_hash)
- return unless tree_hash&.present?
-
- RENAMES.each do |old_name, new_name|
- old_entry = tree_hash.delete(old_name)
-
- next if tree_hash[new_name]
- next unless old_entry
-
- tree_hash[new_name] = old_entry
- end
- end
-
- def self.add_new_associations(tree_hash)
- RENAMES.each do |old_name, new_name|
- next if tree_hash.key?(old_name)
-
- tree_hash[old_name] = tree_hash[new_name]
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/relation_tree_restorer.rb b/lib/gitlab/import_export/relation_tree_restorer.rb
index cc01d70db16..466cb03862e 100644
--- a/lib/gitlab/import_export/relation_tree_restorer.rb
+++ b/lib/gitlab/import_export/relation_tree_restorer.rb
@@ -9,13 +9,13 @@ module Gitlab
attr_reader :user
attr_reader :shared
attr_reader :importable
- attr_reader :tree_hash
+ attr_reader :relation_reader
- def initialize(user:, shared:, importable:, tree_hash:, members_mapper:, object_builder:, relation_factory:, reader:)
+ def initialize(user:, shared:, importable:, relation_reader:, members_mapper:, object_builder:, relation_factory:, reader:)
@user = user
@shared = shared
@importable = importable
- @tree_hash = tree_hash
+ @relation_reader = relation_reader
@members_mapper = members_mapper
@object_builder = object_builder
@relation_factory = relation_factory
@@ -26,7 +26,13 @@ module Gitlab
ActiveRecord::Base.uncached do
ActiveRecord::Base.no_touching do
update_params!
- create_relations!
+
+ bulk_inserts_enabled = @importable.class == ::Project &&
+ Feature.enabled?(:import_bulk_inserts, @importable.group)
+ BulkInsertableAssociations.with_bulk_insert(enabled: bulk_inserts_enabled) do
+ fix_ci_pipelines_not_sorted_on_legacy_project_json!
+ create_relations!
+ end
end
end
@@ -51,33 +57,21 @@ module Gitlab
end
def process_relation!(relation_key, relation_definition)
- data_hashes = @tree_hash.delete(relation_key)
- return unless data_hashes
-
- # we do not care if we process array or hash
- data_hashes = [data_hashes] unless data_hashes.is_a?(Array)
-
- relation_index = 0
-
- # consume and remove objects from memory
- while data_hash = data_hashes.shift
+ @relation_reader.consume_relation(relation_key) do |data_hash, relation_index|
process_relation_item!(relation_key, relation_definition, relation_index, data_hash)
- relation_index += 1
end
end
def process_relation_item!(relation_key, relation_definition, relation_index, data_hash)
relation_object = build_relation(relation_key, relation_definition, data_hash)
return unless relation_object
- return if importable_class == Project && group_model?(relation_object)
+ return if importable_class == ::Project && group_model?(relation_object)
relation_object.assign_attributes(importable_class_sym => @importable)
import_failure_service.with_retry(action: 'relation_object.save!', relation_key: relation_key, relation_index: relation_index) do
relation_object.save!
end
-
- save_id_mapping(relation_key, data_hash, relation_object)
rescue => e
import_failure_service.log_import_failure(
source: 'process_relation_item!',
@@ -90,17 +84,6 @@ module Gitlab
@import_failure_service ||= ImportFailureService.new(@importable)
end
- # Older, serialized CI pipeline exports may only have a
- # merge_request_id and not the full hash of the merge request. To
- # import these pipelines, we need to preserve the mapping between
- # the old and new the merge request ID.
- def save_id_mapping(relation_key, data_hash, relation_object)
- return unless importable_class == Project
- return unless relation_key == 'merge_requests'
-
- merge_requests_mapping[data_hash['id']] = relation_object.id
- end
-
def relations
@relations ||=
@reader
@@ -110,10 +93,7 @@ module Gitlab
end
def update_params!
- params = @tree_hash.reject do |key, _|
- relations.include?(key)
- end
-
+ params = @relation_reader.root_attributes(relations.keys)
params = params.merge(present_override_params)
# Cleaning all imported and overridden params
@@ -123,7 +103,7 @@ module Gitlab
excluded_keys: excluded_keys_for_relation(importable_class_sym))
@importable.assign_attributes(params)
- @importable.drop_visibility_level! if importable_class == Project
+ @importable.drop_visibility_level! if importable_class == ::Project
Gitlab::Timeless.timeless(@importable) do
@importable.save!
@@ -182,7 +162,7 @@ module Gitlab
# if object is a hash we can create simple object
# as it means that this is 1-to-1 vs 1-to-many
- sub_data_hash =
+ current_item =
if sub_data_hash.is_a?(Array)
build_relations(
sub_relation_key,
@@ -195,9 +175,8 @@ module Gitlab
sub_data_hash)
end
- # persist object(s) or delete from relation
- if sub_data_hash
- data_hash[sub_relation_key] = sub_data_hash
+ if current_item
+ data_hash[sub_relation_key] = current_item
else
data_hash.delete(sub_relation_key)
end
@@ -219,13 +198,8 @@ module Gitlab
importable_class.to_s.downcase.to_sym
end
- # A Hash of the imported merge request ID -> imported ID.
- def merge_requests_mapping
- @merge_requests_mapping ||= {}
- end
-
def relation_factory_params(relation_key, data_hash)
- base_params = {
+ {
relation_sym: relation_key.to_sym,
relation_hash: data_hash,
importable: @importable,
@@ -234,9 +208,15 @@ module Gitlab
user: @user,
excluded_keys: excluded_keys_for_relation(relation_key)
}
+ end
+
+ # Temporary fix for https://gitlab.com/gitlab-org/gitlab/-/issues/27883 when import from legacy project.json
+ # This should be removed once legacy JSON format is deprecated.
+ # Ndjson export file will fix the order during project export.
+ def fix_ci_pipelines_not_sorted_on_legacy_project_json!
+ return unless relation_reader.legacy?
- base_params[:merge_requests_mapping] = merge_requests_mapping if importable_class == Project
- base_params
+ relation_reader.sort_ci_pipelines_by_id
end
end
end
diff --git a/lib/gitlab/import_export/relation_tree_saver.rb b/lib/gitlab/import_export/relation_tree_saver.rb
deleted file mode 100644
index a0452071ccf..00000000000
--- a/lib/gitlab/import_export/relation_tree_saver.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- class RelationTreeSaver
- include Gitlab::ImportExport::CommandLineUtil
-
- def serialize(exportable, relations_tree)
- if Feature.enabled?(:export_fast_serialize, default_enabled: true)
- Gitlab::ImportExport::FastHashSerializer
- .new(exportable, relations_tree)
- .execute
- else
- exportable.as_json(relations_tree)
- end
- end
-
- def save(tree, dir_path, filename)
- mkdir_p(dir_path)
-
- tree_json = JSON.generate(tree)
-
- File.write(File.join(dir_path, filename), tree_json)
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/shared.rb b/lib/gitlab/import_export/shared.rb
index 8d81b2af065..09ed4eb568d 100644
--- a/lib/gitlab/import_export/shared.rb
+++ b/lib/gitlab/import_export/shared.rb
@@ -94,14 +94,6 @@ module Gitlab
end
end
- def log_error(details)
- @logger.error(log_base_data.merge(details))
- end
-
- def log_debug(details)
- @logger.debug(log_base_data.merge(details))
- end
-
def log_base_data
log = {
importer: 'Import/Export',
diff --git a/lib/gitlab/import_export/snippet_repo_restorer.rb b/lib/gitlab/import_export/snippet_repo_restorer.rb
new file mode 100644
index 00000000000..079681dfac5
--- /dev/null
+++ b/lib/gitlab/import_export/snippet_repo_restorer.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ class SnippetRepoRestorer < RepoRestorer
+ attr_reader :snippet
+
+ def initialize(snippet:, user:, shared:, path_to_bundle:)
+ @snippet = snippet
+ @user = user
+ @repository = snippet.repository
+ @path_to_bundle = path_to_bundle.to_s
+ @shared = shared
+ end
+
+ def restore
+ if File.exist?(path_to_bundle)
+ create_repository_from_bundle
+ else
+ create_repository_from_db
+ end
+
+ true
+ rescue => e
+ shared.error(e)
+ false
+ end
+
+ private
+
+ def create_repository_from_bundle
+ repository.create_from_bundle(path_to_bundle)
+ snippet.track_snippet_repository
+ end
+
+ def create_repository_from_db
+ snippet.create_repository
+
+ commit_attrs = {
+ branch_name: 'master',
+ message: 'Initial commit'
+ }
+
+ repository.create_file(@user, snippet.file_name, snippet.content, commit_attrs)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/snippet_repo_saver.rb b/lib/gitlab/import_export/snippet_repo_saver.rb
new file mode 100644
index 00000000000..cab96c78232
--- /dev/null
+++ b/lib/gitlab/import_export/snippet_repo_saver.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ class SnippetRepoSaver < RepoSaver
+ def initialize(project:, shared:, repository:)
+ @project = project
+ @shared = shared
+ @repository = repository
+ end
+
+ private
+
+ def bundle_full_path
+ File.join(shared.export_path,
+ ::Gitlab::ImportExport.snippet_repo_bundle_dir,
+ ::Gitlab::ImportExport.snippet_repo_bundle_filename_for(repository.container))
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/snippets_repo_restorer.rb b/lib/gitlab/import_export/snippets_repo_restorer.rb
new file mode 100644
index 00000000000..8fe83225812
--- /dev/null
+++ b/lib/gitlab/import_export/snippets_repo_restorer.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ class SnippetsRepoRestorer
+ def initialize(project:, shared:, user:)
+ @project = project
+ @shared = shared
+ @user = user
+ end
+
+ def restore
+ return true unless Feature.enabled?(:version_snippets, @user)
+ return true unless Dir.exist?(snippets_repo_bundle_path)
+
+ @project.snippets.find_each.all? do |snippet|
+ Gitlab::ImportExport::SnippetRepoRestorer.new(snippet: snippet,
+ user: @user,
+ shared: @shared,
+ path_to_bundle: snippet_repo_bundle_path(snippet))
+ .restore
+ end
+ end
+
+ private
+
+ def snippet_repo_bundle_path(snippet)
+ File.join(snippets_repo_bundle_path, ::Gitlab::ImportExport.snippet_repo_bundle_filename_for(snippet))
+ end
+
+ def snippets_repo_bundle_path
+ @snippets_repo_bundle_path ||= ::Gitlab::ImportExport.snippets_repo_bundle_path(@shared.export_path)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/snippets_repo_saver.rb b/lib/gitlab/import_export/snippets_repo_saver.rb
new file mode 100644
index 00000000000..85e094c0d15
--- /dev/null
+++ b/lib/gitlab/import_export/snippets_repo_saver.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ class SnippetsRepoSaver
+ include Gitlab::ImportExport::CommandLineUtil
+
+ def initialize(current_user:, project:, shared:)
+ @project = project
+ @shared = shared
+ @current_user = current_user
+ end
+
+ def save
+ return true unless Feature.enabled?(:version_snippets, @current_user)
+
+ create_snippets_repo_directory
+
+ @project.snippets.find_each.all? do |snippet|
+ Gitlab::ImportExport::SnippetRepoSaver.new(project: @project,
+ shared: @shared,
+ repository: snippet.repository)
+ .save
+ end
+ end
+
+ private
+
+ def create_snippets_repo_directory
+ mkdir_p(::Gitlab::ImportExport.snippets_repo_bundle_path(@shared.export_path))
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/incoming_email.rb b/lib/gitlab/incoming_email.rb
index 4547a9b0a01..2889dbc68cc 100644
--- a/lib/gitlab/incoming_email.rb
+++ b/lib/gitlab/incoming_email.rb
@@ -28,8 +28,9 @@ module Gitlab
config.address.sub(WILDCARD_PLACEHOLDER, "#{key}#{UNSUBSCRIBE_SUFFIX}")
end
- def key_from_address(address)
- regex = address_regex
+ def key_from_address(address, wildcard_address: nil)
+ wildcard_address ||= config.address
+ regex = address_regex(wildcard_address)
return unless regex
match = address.match(regex)
@@ -55,8 +56,7 @@ module Gitlab
private
- def address_regex
- wildcard_address = config.address
+ def address_regex(wildcard_address)
return unless wildcard_address
regex = Regexp.escape(wildcard_address)
diff --git a/lib/gitlab/jira/http_client.rb b/lib/gitlab/jira/http_client.rb
index c09d8170d17..b973244a531 100644
--- a/lib/gitlab/jira/http_client.rb
+++ b/lib/gitlab/jira/http_client.rb
@@ -12,7 +12,12 @@ module Gitlab
def request(*args)
result = make_request(*args)
- raise JIRA::HTTPError.new(result.response) unless result.response.is_a?(Net::HTTPSuccess)
+ unless result.response.is_a?(Net::HTTPSuccess)
+ Gitlab::ErrorTracking.track_and_raise_exception(
+ JIRA::HTTPError.new(result.response),
+ response: result.body
+ )
+ end
result
end
diff --git a/lib/gitlab/job_waiter.rb b/lib/gitlab/job_waiter.rb
index 90dbe4d005d..e7a8cc6305a 100644
--- a/lib/gitlab/job_waiter.rb
+++ b/lib/gitlab/job_waiter.rb
@@ -19,6 +19,9 @@ module Gitlab
class JobWaiter
KEY_PREFIX = "gitlab:job_waiter"
+ STARTED_METRIC = :gitlab_job_waiter_started_total
+ TIMEOUTS_METRIC = :gitlab_job_waiter_timeouts_total
+
def self.notify(key, jid)
Gitlab::Redis::SharedState.with { |redis| redis.lpush(key, jid) }
end
@@ -27,15 +30,16 @@ module Gitlab
key.is_a?(String) && key =~ /\A#{KEY_PREFIX}:\h{8}-\h{4}-\h{4}-\h{4}-\h{12}\z/
end
- attr_reader :key, :finished
+ attr_reader :key, :finished, :worker_label
attr_accessor :jobs_remaining
# jobs_remaining - the number of jobs left to wait for
# key - The key of this waiter.
- def initialize(jobs_remaining = 0, key = "#{KEY_PREFIX}:#{SecureRandom.uuid}")
+ def initialize(jobs_remaining = 0, key = "#{KEY_PREFIX}:#{SecureRandom.uuid}", worker_label: nil)
@key = key
@jobs_remaining = jobs_remaining
@finished = []
+ @worker_label = worker_label
end
# Waits for all the jobs to be completed.
@@ -45,6 +49,7 @@ module Gitlab
# long to process, or is never processed.
def wait(timeout = 10)
deadline = Time.now.utc + timeout
+ increment_counter(STARTED_METRIC)
Gitlab::Redis::SharedState.with do |redis|
# Fallback key expiry: allow a long grace period to reduce the chance of
@@ -60,7 +65,12 @@ module Gitlab
break if seconds_left <= 0
list, jid = redis.blpop(key, timeout: seconds_left)
- break unless list && jid # timed out
+
+ # timed out
+ unless list && jid
+ increment_counter(TIMEOUTS_METRIC)
+ break
+ end
@finished << jid
@jobs_remaining -= 1
@@ -72,5 +82,20 @@ module Gitlab
finished
end
+
+ private
+
+ def increment_counter(metric)
+ return unless worker_label
+
+ metrics[metric].increment(worker: worker_label)
+ end
+
+ def metrics
+ @metrics ||= {
+ STARTED_METRIC => Gitlab::Metrics.counter(STARTED_METRIC, 'JobWaiter attempts started'),
+ TIMEOUTS_METRIC => Gitlab::Metrics.counter(TIMEOUTS_METRIC, 'JobWaiter attempts timed out')
+ }
+ end
end
end
diff --git a/lib/gitlab/kubernetes/helm.rb b/lib/gitlab/kubernetes/helm.rb
index c7c348ce9eb..3e201d68297 100644
--- a/lib/gitlab/kubernetes/helm.rb
+++ b/lib/gitlab/kubernetes/helm.rb
@@ -3,13 +3,19 @@
module Gitlab
module Kubernetes
module Helm
- HELM_VERSION = '2.16.1'
+ HELM_VERSION = '2.16.3'
KUBECTL_VERSION = '1.13.12'
NAMESPACE = 'gitlab-managed-apps'
NAMESPACE_LABELS = { 'app.gitlab.com/managed_by' => :gitlab }.freeze
SERVICE_ACCOUNT = 'tiller'
CLUSTER_ROLE_BINDING = 'tiller-admin'
CLUSTER_ROLE = 'cluster-admin'
+
+ MANAGED_APPS_LOCAL_TILLER_FEATURE_FLAG = :managed_apps_local_tiller
+
+ def self.local_tiller_enabled?
+ Feature.enabled?(MANAGED_APPS_LOCAL_TILLER_FEATURE_FLAG)
+ end
end
end
end
diff --git a/lib/gitlab/kubernetes/helm/api.rb b/lib/gitlab/kubernetes/helm/api.rb
index 3ed07818302..3b843799d66 100644
--- a/lib/gitlab/kubernetes/helm/api.rb
+++ b/lib/gitlab/kubernetes/helm/api.rb
@@ -3,7 +3,7 @@
module Gitlab
module Kubernetes
module Helm
- class Api
+ class API
def initialize(kubeclient)
@kubeclient = kubeclient
@namespace = Gitlab::Kubernetes::Namespace.new(
diff --git a/lib/gitlab/kubernetes/helm/client_command.rb b/lib/gitlab/kubernetes/helm/client_command.rb
index b953ce24c4a..e7ade7e4d39 100644
--- a/lib/gitlab/kubernetes/helm/client_command.rb
+++ b/lib/gitlab/kubernetes/helm/client_command.rb
@@ -59,7 +59,7 @@ module Gitlab
end
def local_tiller_enabled?
- Feature.enabled?(:managed_apps_local_tiller)
+ ::Gitlab::Kubernetes::Helm.local_tiller_enabled?
end
end
end
diff --git a/lib/gitlab/kubernetes/namespace.rb b/lib/gitlab/kubernetes/namespace.rb
index 9862861118b..68e4aeb4bae 100644
--- a/lib/gitlab/kubernetes/namespace.rb
+++ b/lib/gitlab/kubernetes/namespace.rb
@@ -35,12 +35,14 @@ module Gitlab
def log_create_failed(error)
logger.error({
- exception: error.class.name,
+ exception: {
+ class: error.class.name,
+ message: error.message
+ },
status_code: error.error_code,
namespace: name,
class_name: self.class.name,
- event: :failed_to_create_namespace,
- message: error.message
+ event: :failed_to_create_namespace
})
end
diff --git a/lib/gitlab/legacy_github_import/importer.rb b/lib/gitlab/legacy_github_import/importer.rb
index 751726d4810..3f9fd1b1a19 100644
--- a/lib/gitlab/legacy_github_import/importer.rb
+++ b/lib/gitlab/legacy_github_import/importer.rb
@@ -3,8 +3,6 @@
module Gitlab
module LegacyGithubImport
class Importer
- include Gitlab::ShellAdapter
-
def self.refmap
Gitlab::GithubImport.refmap
end
@@ -264,11 +262,11 @@ module Gitlab
end
def import_wiki
- unless project.wiki.repository_exists?
- wiki = WikiFormatter.new(project)
- gitlab_shell.import_wiki_repository(project, wiki)
- end
- rescue Gitlab::Shell::Error => e
+ return if project.wiki.repository_exists?
+
+ wiki = WikiFormatter.new(project)
+ project.wiki.repository.import_repository(wiki.import_url)
+ rescue ::Gitlab::Git::CommandError => e
# GitHub error message when the wiki repo has not been created,
# this means that repo has wiki enabled, but have no pages. So,
# we can skip the import.
diff --git a/lib/gitlab/lograge/custom_options.rb b/lib/gitlab/lograge/custom_options.rb
new file mode 100644
index 00000000000..5dbff7d9102
--- /dev/null
+++ b/lib/gitlab/lograge/custom_options.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Lograge
+ module CustomOptions
+ LIMITED_ARRAY_SENTINEL = { key: 'truncated', value: '...' }.freeze
+ IGNORE_PARAMS = Set.new(%w(controller action format)).freeze
+
+ def self.call(event)
+ params = event
+ .payload[:params]
+ .each_with_object([]) { |(k, v), array| array << { key: k, value: v } unless IGNORE_PARAMS.include?(k) }
+
+ payload = {
+ time: Time.now.utc.iso8601(3),
+ params: Gitlab::Utils::LogLimitedArray.log_limited_array(params, sentinel: LIMITED_ARRAY_SENTINEL),
+ remote_ip: event.payload[:remote_ip],
+ user_id: event.payload[:user_id],
+ username: event.payload[:username],
+ ua: event.payload[:ua],
+ queue_duration: event.payload[:queue_duration]
+ }
+
+ ::Gitlab::InstrumentationHelper.add_instrumentation_data(payload)
+
+ payload[:response] = event.payload[:response] if event.payload[:response]
+ payload[:etag_route] = event.payload[:etag_route] if event.payload[:etag_route]
+ payload[Labkit::Correlation::CorrelationId::LOG_KEY] = Labkit::Correlation::CorrelationId.current_id
+
+ if cpu_s = Gitlab::Metrics::System.thread_cpu_duration(::Gitlab::RequestContext.instance.start_thread_cpu_time)
+ payload[:cpu_s] = cpu_s
+ end
+
+ # https://github.com/roidrage/lograge#logging-errors--exceptions
+ exception = event.payload[:exception_object]
+
+ ::Gitlab::ExceptionLogFormatter.format!(exception, payload)
+
+ payload
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/markdown_cache.rb b/lib/gitlab/markdown_cache.rb
index 3dfaec48311..d7a0a9b6518 100644
--- a/lib/gitlab/markdown_cache.rb
+++ b/lib/gitlab/markdown_cache.rb
@@ -3,7 +3,7 @@
module Gitlab
module MarkdownCache
# Increment this number every time the renderer changes its output
- CACHE_COMMONMARK_VERSION = 18
+ CACHE_COMMONMARK_VERSION = 20
CACHE_COMMONMARK_VERSION_START = 10
BaseError = Class.new(StandardError)
diff --git a/lib/gitlab/metrics/dashboard/finder.rb b/lib/gitlab/metrics/dashboard/finder.rb
index 3dd86c8685d..990fd57bf41 100644
--- a/lib/gitlab/metrics/dashboard/finder.rb
+++ b/lib/gitlab/metrics/dashboard/finder.rb
@@ -29,9 +29,11 @@ module Gitlab
# Used by embedded dashboards.
# @param options - y_label [String] Y-Axis label of
# a panel. Used by embedded dashboards.
- # @param options - cluster [Cluster]
+ # @param options - cluster [Cluster]. Used by
+ # embedded and un-embedded dashboards.
# @param options - cluster_type [Symbol] The level of
- # cluster, one of [:admin, :project, :group]
+ # cluster, one of [:admin, :project, :group]. Used by
+ # embedded and un-embedded dashboards.
# @param options - grafana_url [String] URL pointing
# to a grafana dashboard panel
# @param options - prometheus_alert_id [Integer] ID of
diff --git a/lib/gitlab/metrics/dashboard/service_selector.rb b/lib/gitlab/metrics/dashboard/service_selector.rb
index 24ea85a5a95..993e508cbc6 100644
--- a/lib/gitlab/metrics/dashboard/service_selector.rb
+++ b/lib/gitlab/metrics/dashboard/service_selector.rb
@@ -3,7 +3,8 @@
# Responsible for determining which dashboard service should
# be used to fetch or generate a dashboard hash.
# The services can be considered in two categories - embeds
-# and dashboards. Embeds are all portions of dashboards.
+# and dashboards. Embed hashes are identical to dashboard hashes except
+# that they contain a subset of panels.
module Gitlab
module Metrics
module Dashboard
diff --git a/lib/gitlab/metrics/dashboard/stages/grafana_formatter.rb b/lib/gitlab/metrics/dashboard/stages/grafana_formatter.rb
index ce75c54d014..c90c1e3f0bc 100644
--- a/lib/gitlab/metrics/dashboard/stages/grafana_formatter.rb
+++ b/lib/gitlab/metrics/dashboard/stages/grafana_formatter.rb
@@ -13,12 +13,7 @@ module Gitlab
# Reformats the specified panel in the Gitlab
# dashboard-yml format
def transform!
- InputFormatValidator.new(
- grafana_dashboard,
- datasource,
- panel,
- query_params
- ).validate!
+ validate_input!
new_dashboard = formatted_dashboard
@@ -28,6 +23,17 @@ module Gitlab
private
+ def validate_input!
+ ::Grafana::Validator.new(
+ grafana_dashboard,
+ datasource,
+ panel,
+ query_params
+ ).validate!
+ rescue ::Grafana::Validator::Error => e
+ raise ::Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError, e.message
+ end
+
def formatted_dashboard
{ panel_groups: [{ panels: [formatted_panel] }] }
end
@@ -56,11 +62,25 @@ module Gitlab
def panel
strong_memoize(:panel) do
grafana_dashboard[:dashboard][:panels].find do |panel|
- panel[:id].to_s == query_params[:panelId]
+ query_params[:panelId] ? matching_panel?(panel) : valid_panel?(panel)
end
end
end
+ # Determines whether a given panel is the one
+ # specified by the linked grafana url
+ def matching_panel?(panel)
+ panel[:id].to_s == query_params[:panelId]
+ end
+
+ # Determines whether any given panel has the potenial
+ # to return valid results from grafana/prometheus
+ def valid_panel?(panel)
+ ::Grafana::Validator
+ .new(grafana_dashboard, datasource, panel, query_params)
+ .valid?
+ end
+
# Grafana url query parameters. Includes information
# on which panel to select and time range.
def query_params
@@ -141,83 +161,6 @@ module Gitlab
params[:grafana_url]
end
end
-
- class InputFormatValidator
- include ::Gitlab::Metrics::Dashboard::Errors
-
- attr_reader :grafana_dashboard, :datasource, :panel, :query_params
-
- UNSUPPORTED_GRAFANA_GLOBAL_VARS = %w(
- $__interval_ms
- $__timeFilter
- $__name
- $timeFilter
- $interval
- ).freeze
-
- def initialize(grafana_dashboard, datasource, panel, query_params)
- @grafana_dashboard = grafana_dashboard
- @datasource = datasource
- @panel = panel
- @query_params = query_params
- end
-
- def validate!
- validate_query_params!
- validate_datasource!
- validate_panel_type!
- validate_variable_definitions!
- validate_global_variables!
- end
-
- private
-
- def validate_datasource!
- return if datasource[:access] == 'proxy' && datasource[:type] == 'prometheus'
-
- raise_error 'Only Prometheus datasources with proxy access in Grafana are supported.'
- end
-
- def validate_query_params!
- return if [:panelId, :from, :to].all? { |param| query_params.include?(param) }
-
- raise_error 'Grafana query parameters must include panelId, from, and to.'
- end
-
- def validate_panel_type!
- return if panel[:type] == 'graph' && panel[:lines]
-
- raise_error 'Panel type must be a line graph.'
- end
-
- def validate_variable_definitions!
- return unless grafana_dashboard[:dashboard][:templating]
-
- return if grafana_dashboard[:dashboard][:templating][:list].all? do |variable|
- query_params[:"var-#{variable[:name]}"].present?
- end
-
- raise_error 'All Grafana variables must be defined in the query parameters.'
- end
-
- def validate_global_variables!
- return unless panel_contains_unsupported_vars?
-
- raise_error 'Prometheus must not include'
- end
-
- def panel_contains_unsupported_vars?
- panel[:targets].any? do |target|
- UNSUPPORTED_GRAFANA_GLOBAL_VARS.any? do |variable|
- target[:expr].include?(variable)
- end
- end
- end
-
- def raise_error(message)
- raise DashboardProcessingError.new(message)
- end
- end
end
end
end
diff --git a/lib/gitlab/middleware/go.rb b/lib/gitlab/middleware/go.rb
index 53508938c49..abdbccd3aa8 100644
--- a/lib/gitlab/middleware/go.rb
+++ b/lib/gitlab/middleware/go.rb
@@ -53,8 +53,9 @@ module Gitlab
repository_url = if Gitlab::CurrentSettings.enabled_git_access_protocol == 'ssh'
shell = config.gitlab_shell
+ user = "#{shell.ssh_user}@" unless shell.ssh_user.empty?
port = ":#{shell.ssh_port}" unless shell.ssh_port == 22
- "ssh://#{shell.ssh_user}@#{shell.ssh_host}#{port}/#{path}.git"
+ "ssh://#{user}#{shell.ssh_host}#{port}/#{path}.git"
else
"#{project_url}.git"
end
diff --git a/lib/gitlab/middleware/read_only/controller.rb b/lib/gitlab/middleware/read_only/controller.rb
index ca8f4e34802..cdab86540f8 100644
--- a/lib/gitlab/middleware/read_only/controller.rb
+++ b/lib/gitlab/middleware/read_only/controller.rb
@@ -90,12 +90,14 @@ module Gitlab
# Overridden in EE module
def whitelisted_routes
- grack_route? || internal_route? || lfs_route? || compare_git_revisions_route? || sidekiq_route? || session_route? || graphql_query?
+ workhorse_passthrough_route? || internal_route? || lfs_route? || compare_git_revisions_route? || sidekiq_route? || session_route? || graphql_query?
end
- def grack_route?
+ # URL for requests passed through gitlab-workhorse to rails-web
+ # https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/12
+ def workhorse_passthrough_route?
# Calling route_hash may be expensive. Only do it if we think there's a possible match
- return false unless
+ return false unless request.post? &&
request.path.end_with?('.git/git-upload-pack', '.git/git-receive-pack')
WHITELISTED_GIT_ROUTES[route_hash[:controller]]&.include?(route_hash[:action])
diff --git a/lib/gitlab/object_hierarchy.rb b/lib/gitlab/object_hierarchy.rb
index 74057bbc493..41d80fe9aa6 100644
--- a/lib/gitlab/object_hierarchy.rb
+++ b/lib/gitlab/object_hierarchy.rb
@@ -51,7 +51,7 @@ module Gitlab
# and all their ancestors (recursively).
#
# Passing an `upto` will stop the recursion once the specified parent_id is
- # reached. So all ancestors *lower* than the specified acestor will be
+ # reached. So all ancestors *lower* than the specified ancestor will be
# included.
#
# Passing a `hierarchy_order` with either `:asc` or `:desc` will cause the
diff --git a/lib/gitlab/omniauth_logging/json_formatter.rb b/lib/gitlab/omniauth_logging/json_formatter.rb
new file mode 100644
index 00000000000..cdd4da31803
--- /dev/null
+++ b/lib/gitlab/omniauth_logging/json_formatter.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'json'
+
+module Gitlab
+ module OmniauthLogging
+ class JSONFormatter
+ def call(severity, datetime, progname, msg)
+ { severity: severity, timestamp: datetime.utc.iso8601(3), pid: $$, progname: progname, message: msg }.to_json << "\n"
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/path_regex.rb b/lib/gitlab/path_regex.rb
index 9606e3e134c..5fa0fbf874c 100644
--- a/lib/gitlab/path_regex.rb
+++ b/lib/gitlab/path_regex.rb
@@ -42,7 +42,6 @@ module Gitlab
invites
jwt
login
- notification_settings
oauth
profile
projects
@@ -237,8 +236,32 @@ module Gitlab
}x
end
+ def full_snippets_repository_path_regex
+ %r{\A(#{personal_snippet_repository_path_regex}|#{project_snippet_repository_path_regex})\z}
+ end
+
+ def personal_and_project_snippets_path_regex
+ %r{#{personal_snippet_path_regex}|#{project_snippet_path_regex}}
+ end
+
private
+ def personal_snippet_path_regex
+ /snippets/
+ end
+
+ def personal_snippet_repository_path_regex
+ %r{#{personal_snippet_path_regex}/\d+}
+ end
+
+ def project_snippet_path_regex
+ %r{#{full_namespace_route_regex}/#{project_route_regex}/snippets}
+ end
+
+ def project_snippet_repository_path_regex
+ %r{#{project_snippet_path_regex}/\d+}
+ end
+
def single_line_regexp(regex)
# Turns a multiline extended regexp into a single line one,
# because `rake routes` breaks on multiline regexes.
diff --git a/lib/gitlab/process_memory_cache.rb b/lib/gitlab/process_memory_cache.rb
new file mode 100644
index 00000000000..5e8578711b2
--- /dev/null
+++ b/lib/gitlab/process_memory_cache.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Gitlab
+ class ProcessMemoryCache
+ # ActiveSupport::Cache::MemoryStore is thread-safe:
+ # https://github.com/rails/rails/blob/2f1fefe456932a6d7d2b155d27b5315c33f3daa1/activesupport/lib/active_support/cache/memory_store.rb#L19
+ @cache = ActiveSupport::Cache::MemoryStore.new
+
+ def self.cache_backend
+ @cache
+ end
+ end
+end
diff --git a/lib/gitlab/profiler.rb b/lib/gitlab/profiler.rb
index f47ccb8fed9..e10cdf0d8fb 100644
--- a/lib/gitlab/profiler.rb
+++ b/lib/gitlab/profiler.rb
@@ -2,6 +2,8 @@
module Gitlab
module Profiler
+ extend WithRequestStore
+
FILTERED_STRING = '[FILTERED]'
IGNORE_BACKTRACES = %w[
@@ -58,28 +60,26 @@ module Gitlab
logger = create_custom_logger(logger, private_token: private_token)
- RequestStore.begin!
-
- # Make an initial call for an asset path in development mode to avoid
- # sprockets dominating the profiler output.
- ActionController::Base.helpers.asset_path('katex.css') if Rails.env.development?
+ result = with_request_store do
+ # Make an initial call for an asset path in development mode to avoid
+ # sprockets dominating the profiler output.
+ ActionController::Base.helpers.asset_path('katex.css') if Rails.env.development?
- # Rails loads internationalization files lazily the first time a
- # translation is needed. Running this prevents this overhead from showing
- # up in profiles.
- ::I18n.t('.')[:test_string]
+ # Rails loads internationalization files lazily the first time a
+ # translation is needed. Running this prevents this overhead from showing
+ # up in profiles.
+ ::I18n.t('.')[:test_string]
- # Remove API route mounting from the profile.
- app.get('/api/v4/users')
+ # Remove API route mounting from the profile.
+ app.get('/api/v4/users')
- result = with_custom_logger(logger) do
- with_user(user) do
- RubyProf.profile { app.public_send(verb, url, params: post_data, headers: headers) } # rubocop:disable GitlabSecurity/PublicSend
+ with_custom_logger(logger) do
+ with_user(user) do
+ RubyProf.profile { app.public_send(verb, url, params: post_data, headers: headers) } # rubocop:disable GitlabSecurity/PublicSend
+ end
end
end
- RequestStore.end!
-
log_load_times_by_model(logger)
result
diff --git a/lib/gitlab/project_template.rb b/lib/gitlab/project_template.rb
index b4ee8818925..9ed6a23632c 100644
--- a/lib/gitlab/project_template.rb
+++ b/lib/gitlab/project_template.rb
@@ -40,10 +40,11 @@ module Gitlab
ProjectTemplate.new('rails', 'Ruby on Rails', _('Includes an MVC structure, Gemfile, Rakefile, along with many others, to help you get started.'), 'https://gitlab.com/gitlab-org/project-templates/rails', 'illustrations/logos/rails.svg'),
ProjectTemplate.new('spring', 'Spring', _('Includes an MVC structure, mvnw and pom.xml to help you get started.'), 'https://gitlab.com/gitlab-org/project-templates/spring', 'illustrations/logos/spring.svg'),
ProjectTemplate.new('express', 'NodeJS Express', _('Includes an MVC structure to help you get started.'), 'https://gitlab.com/gitlab-org/project-templates/express', 'illustrations/logos/express.svg'),
- ProjectTemplate.new('iosswift', 'iOS (Swift)', _('A ready-to-go template for use with iOS Swift apps.'), 'https://gitlab.com/gitlab-org/project-templates/iosswift'),
+ ProjectTemplate.new('iosswift', 'iOS (Swift)', _('A ready-to-go template for use with iOS Swift apps.'), 'https://gitlab.com/gitlab-org/project-templates/iosswift', 'illustrations/logos/swift.svg'),
ProjectTemplate.new('dotnetcore', '.NET Core', _('A .NET Core console application template, customizable for any .NET Core project'), 'https://gitlab.com/gitlab-org/project-templates/dotnetcore', 'illustrations/logos/dotnet.svg'),
ProjectTemplate.new('android', 'Android', _('A ready-to-go template for use with Android apps.'), 'https://gitlab.com/gitlab-org/project-templates/android', 'illustrations/logos/android.svg'),
ProjectTemplate.new('gomicro', 'Go Micro', _('Go Micro is a framework for micro service development.'), 'https://gitlab.com/gitlab-org/project-templates/go-micro'),
+ ProjectTemplate.new('gatsby', 'Pages/Gatsby', _('Everything you need to create a GitLab Pages site using Gatsby.'), 'https://gitlab.com/pages/gatsby'),
ProjectTemplate.new('hugo', 'Pages/Hugo', _('Everything you need to create a GitLab Pages site using Hugo.'), 'https://gitlab.com/pages/hugo'),
ProjectTemplate.new('jekyll', 'Pages/Jekyll', _('Everything you need to create a GitLab Pages site using Jekyll.'), 'https://gitlab.com/pages/jekyll'),
ProjectTemplate.new('plainhtml', 'Pages/Plain HTML', _('Everything you need to create a GitLab Pages site using plain HTML.'), 'https://gitlab.com/pages/plain-html'),
diff --git a/lib/gitlab/prometheus/query_variables.rb b/lib/gitlab/prometheus/query_variables.rb
index ba2d33ee1c1..4d48c4a3af7 100644
--- a/lib/gitlab/prometheus/query_variables.rb
+++ b/lib/gitlab/prometheus/query_variables.rb
@@ -7,7 +7,11 @@ module Gitlab
{
ci_environment_slug: environment.slug,
kube_namespace: environment.deployment_namespace || '',
- environment_filter: %{container_name!="POD",environment="#{environment.slug}"}
+ environment_filter: %{container_name!="POD",environment="#{environment.slug}"},
+ ci_project_name: environment.project.name,
+ ci_project_namespace: environment.project.namespace.name,
+ ci_project_path: environment.project.full_path,
+ ci_environment_name: environment.name
}
end
end
diff --git a/lib/gitlab/quick_actions/extractor.rb b/lib/gitlab/quick_actions/extractor.rb
index 6f87968e286..cd07122ffd9 100644
--- a/lib/gitlab/quick_actions/extractor.rb
+++ b/lib/gitlab/quick_actions/extractor.rb
@@ -13,6 +13,7 @@ module Gitlab
def initialize(command_definitions)
@command_definitions = command_definitions
+ @commands_regex = {}
end
# Extracts commands from content and return an array of commands.
@@ -58,7 +59,8 @@ module Gitlab
content = content.dup
content.delete!("\r")
- content.gsub!(commands_regex(only: only)) do
+ names = command_names(limit_to_commands: only).map(&:to_s)
+ content.gsub!(commands_regex(names: names)) do
command, output = process_commands($~, redact)
commands << command
output
@@ -91,10 +93,8 @@ module Gitlab
# It looks something like:
#
# /^\/(?<cmd>close|reopen|...)(?:( |$))(?<arg>[^\/\n]*)(?:\n|$)/
- def commands_regex(only:)
- names = command_names(limit_to_commands: only).map(&:to_s)
-
- @commands_regex ||= %r{
+ def commands_regex(names:)
+ @commands_regex[names] ||= %r{
(?<code>
# Code blocks:
# ```
@@ -106,6 +106,17 @@ module Gitlab
\n```$
)
|
+ (?<inline_code>
+ # Inline code on separate rows:
+ # `
+ # Anything, including `/cmd arg` which are ignored by this filter
+ # `
+
+ ^.*`\n*
+ .+?
+ \n*`$
+ )
+ |
(?<html>
# HTML block:
# <tag>
@@ -151,14 +162,18 @@ module Gitlab
end
substitution_definitions.each do |substitution|
- match_data = substitution.match(content.downcase)
- if match_data
- command = [substitution.name.to_s]
- command << match_data[1] unless match_data[1].empty?
- commands << command
+ regex = commands_regex(names: substitution.all_names)
+ content = content.gsub(regex) do |text|
+ if $~[:cmd]
+ command = [substitution.name.to_s]
+ command << $~[:arg] if $~[:arg].present?
+ commands << command
+
+ substitution.perform_substitution(self, text)
+ else
+ text
+ end
end
-
- content = substitution.perform_substitution(self, content)
end
[content, commands]
diff --git a/lib/gitlab/quick_actions/substitution_definition.rb b/lib/gitlab/quick_actions/substitution_definition.rb
index b7231aa3a8b..cd4d202e8d0 100644
--- a/lib/gitlab/quick_actions/substitution_definition.rb
+++ b/lib/gitlab/quick_actions/substitution_definition.rb
@@ -17,7 +17,7 @@ module Gitlab
return unless content
all_names.each do |a_name|
- content = content.gsub(%r{/#{a_name}(?![\S]) ?(.*)$}i, execute_block(action_block, context, '\1'))
+ content = content.sub(%r{/#{a_name}(?![\S]) ?(.*)$}i, execute_block(action_block, context, '\1'))
end
content
diff --git a/lib/gitlab/rate_limit_helpers.rb b/lib/gitlab/rate_limit_helpers.rb
new file mode 100644
index 00000000000..2dcc888892b
--- /dev/null
+++ b/lib/gitlab/rate_limit_helpers.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module RateLimitHelpers
+ ARCHIVE_RATE_LIMIT_REACHED_MESSAGE = 'This archive has been requested too many times. Try again later.'
+ ARCHIVE_RATE_ANONYMOUS_THRESHOLD = 100 # Allow 100 requests/min for anonymous users
+ ARCHIVE_RATE_THROTTLE_KEY = :project_repositories_archive
+
+ def archive_rate_limit_reached?(user, project)
+ return false unless Feature.enabled?(:archive_rate_limit, default_enabled: true)
+
+ key = ARCHIVE_RATE_THROTTLE_KEY
+
+ if rate_limiter.throttled?(key, scope: [project, user], threshold: archive_rate_threshold_by_user(user))
+ rate_limiter.log_request(request, "#{key}_request_limit".to_sym, user)
+
+ return true
+ end
+
+ false
+ end
+
+ def archive_rate_threshold_by_user(user)
+ if user
+ nil # Use the defaults
+ else
+ ARCHIVE_RATE_ANONYMOUS_THRESHOLD
+ end
+ end
+
+ def rate_limiter
+ ::Gitlab::ApplicationRateLimiter
+ end
+ end
+end
diff --git a/lib/gitlab/reactive_cache_set_cache.rb b/lib/gitlab/reactive_cache_set_cache.rb
new file mode 100644
index 00000000000..609087d8137
--- /dev/null
+++ b/lib/gitlab/reactive_cache_set_cache.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+# Interface to the Redis-backed cache store to keep track of complete cache keys
+# for a ReactiveCache resource.
+module Gitlab
+ class ReactiveCacheSetCache < Gitlab::SetCache
+ attr_reader :expires_in
+
+ def initialize(expires_in: 10.minutes)
+ @expires_in = expires_in
+ end
+
+ def cache_key(key)
+ "#{cache_type}:#{key}:set"
+ end
+
+ def clear_cache!(key)
+ with do |redis|
+ keys = read(key).map { |value| "#{cache_type}:#{value}" }
+ keys << cache_key(key)
+
+ redis.pipelined do
+ keys.each_slice(1000) { |subset| redis.del(*subset) }
+ end
+ end
+ end
+
+ private
+
+ def cache_type
+ Gitlab::Redis::Cache::CACHE_NAMESPACE
+ end
+ end
+end
diff --git a/lib/gitlab/redacted_search_results_logger.rb b/lib/gitlab/redacted_search_results_logger.rb
new file mode 100644
index 00000000000..07dbf6fe97d
--- /dev/null
+++ b/lib/gitlab/redacted_search_results_logger.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module Gitlab
+ class RedactedSearchResultsLogger < ::Gitlab::JsonLogger
+ def self.file_name_noext
+ 'redacted_search_results'
+ end
+ end
+end
diff --git a/lib/gitlab/reference_counter.rb b/lib/gitlab/reference_counter.rb
index 1c43de35816..5fdfa5e75ed 100644
--- a/lib/gitlab/reference_counter.rb
+++ b/lib/gitlab/reference_counter.rb
@@ -1,20 +1,42 @@
# frozen_string_literal: true
module Gitlab
+ # Reference Counter
+ #
+ # A reference counter is used as a mechanism to identify when
+ # a repository is being accessed by a writable operation.
+ #
+ # Maintenance operations would use this as a clue to when it should
+ # execute significant changes in order to avoid disrupting running traffic
class ReferenceCounter
REFERENCE_EXPIRE_TIME = 600
attr_reader :gl_repository, :key
+ # Reference Counter instance
+ #
+ # @example
+ # Gitlab::ReferenceCounter.new('project-1')
+ #
+ # @see Gitlab::GlRepository::RepoType.identifier_for_repositorable
+ # @param [String] gl_repository repository identifier
def initialize(gl_repository)
@gl_repository = gl_repository
@key = "git-receive-pack-reference-counter:#{gl_repository}"
end
+ # Return the actual counter value
+ #
+ # @return [Integer] value
def value
- Gitlab::Redis::SharedState.with { |redis| (redis.get(key) || 0).to_i }
+ Gitlab::Redis::SharedState.with do |redis|
+ (redis.get(key) || 0).to_i
+ end
end
+ # Increase the counter
+ #
+ # @return [Boolean] whether operation was a success
def increase
redis_cmd do |redis|
redis.incr(key)
@@ -22,26 +44,51 @@ module Gitlab
end
end
- # rubocop:disable Gitlab/RailsLogger
+ # Decrease the counter
+ #
+ # @return [Boolean] whether operation was a success
def decrease
redis_cmd do |redis|
current_value = redis.decr(key)
if current_value < 0
+ # rubocop:disable Gitlab/RailsLogger
Rails.logger.warn("Reference counter for #{gl_repository} decreased" \
- " when its value was less than 1. Reseting the counter.")
+ " when its value was less than 1. Resetting the counter.")
+ # rubocop:enable Gitlab/RailsLogger
redis.del(key)
end
end
end
- # rubocop:enable Gitlab/RailsLogger
+
+ # Reset the reference counter
+ #
+ # @private Used internally by SRE and debugging purpose
+ # @return [Boolean] whether reset was a success
+ def reset!
+ redis_cmd do |redis|
+ redis.del(key)
+ end
+ end
+
+ # When the reference counter would expire
+ #
+ # @api private Used internally by SRE and debugging purpose
+ # @return [Integer] Number in seconds until expiration or false if never
+ def expires_in
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.ttl(key)
+ end
+ end
private
def redis_cmd
Gitlab::Redis::SharedState.with { |redis| yield(redis) }
+
true
rescue => e
Rails.logger.warn("GitLab: An unexpected error occurred in writing to Redis: #{e}") # rubocop:disable Gitlab/RailsLogger
+
false
end
end
diff --git a/lib/gitlab/reference_extractor.rb b/lib/gitlab/reference_extractor.rb
index 519eb49658a..d07d6440c6b 100644
--- a/lib/gitlab/reference_extractor.rb
+++ b/lib/gitlab/reference_extractor.rb
@@ -44,7 +44,7 @@ module Gitlab
end
def issues
- if project && project.jira_tracker?
+ if project&.external_references_supported?
if project.issues_enabled?
@references[:all_issues] ||= references(:external_issue) + references(:issue)
else
diff --git a/lib/gitlab/regex.rb b/lib/gitlab/regex.rb
index fd6e24a96d8..38281fb1c91 100644
--- a/lib/gitlab/regex.rb
+++ b/lib/gitlab/regex.rb
@@ -112,7 +112,7 @@ module Gitlab
# Based on Jira's project key format
# https://confluence.atlassian.com/adminjiraserver073/changing-the-project-key-format-861253229.html
def jira_issue_key_regex
- @jira_issue_key_regex ||= /[A-Z][A-Z_0-9]+-\d+/
+ @jira_issue_key_regex ||= /[A-Z][A-Z_0-9]+-\d+\b/
end
def jira_transition_id_regex
@@ -144,6 +144,10 @@ module Gitlab
def utc_date_regex
@utc_date_regex ||= /\A[0-9]{4}-[0-9]{2}-[0-9]{2}\z/.freeze
end
+
+ def issue
+ @issue ||= /(?<issue>\d+\b)/
+ end
end
end
diff --git a/lib/gitlab/repo_path.rb b/lib/gitlab/repo_path.rb
index e8c749cac14..67e23624045 100644
--- a/lib/gitlab/repo_path.rb
+++ b/lib/gitlab/repo_path.rb
@@ -19,30 +19,62 @@ module Gitlab
# Removing the suffix (.wiki, .design, ...) from the project path
full_path = repo_path.chomp(type.path_suffix)
+ container, project, redirected_path = find_container(type, full_path)
- project, was_redirected = find_project(full_path)
- redirected_path = repo_path if was_redirected
-
- # If we found a matching project, then the type was matched, no need to
- # continue looking.
- return [project, type, redirected_path] if project
+ return [container, project, type, redirected_path] if container
end
# When a project did not exist, the parsed repo_type would be empty.
# In that case, we want to continue with a regular project repository. As we
# could create the project if the user pushing is allowed to do so.
- [nil, Gitlab::GlRepository.default_type, nil]
+ [nil, nil, Gitlab::GlRepository.default_type, nil]
+ end
+
+ def self.find_container(type, full_path)
+ if type.snippet?
+ snippet, redirected_path = find_snippet(full_path)
+
+ [snippet, snippet&.project, redirected_path]
+ else
+ project, redirected_path = find_project(full_path)
+
+ [project, project, redirected_path]
+ end
end
def self.find_project(project_path)
+ return [nil, nil] if project_path.blank?
+
project = Project.find_by_full_path(project_path, follow_redirects: true)
+ redirected_path = redirected?(project, project_path) ? project_path : nil
- [project, redirected?(project, project_path)]
+ [project, redirected_path]
end
def self.redirected?(project, project_path)
project && project.full_path.casecmp(project_path) != 0
end
+
+ # Snippet_path can be either:
+ # - snippets/1
+ # - h5bp/html5-boilerplate/snippets/53
+ def self.find_snippet(snippet_path)
+ return [nil, nil] if snippet_path.blank?
+
+ snippet_id, project_path = extract_snippet_info(snippet_path)
+ project, redirected_path = find_project(project_path)
+
+ [Snippet.find_by_id_and_project(id: snippet_id, project: project), redirected_path]
+ end
+
+ def self.extract_snippet_info(snippet_path)
+ path_segments = snippet_path.split('/')
+ snippet_id = path_segments.pop
+ path_segments.pop # Remove snippets from path
+ project_path = File.join(path_segments)
+
+ [snippet_id, project_path]
+ end
end
end
diff --git a/lib/gitlab/repository_cache_adapter.rb b/lib/gitlab/repository_cache_adapter.rb
index 304f53b58c4..688a4a39dba 100644
--- a/lib/gitlab/repository_cache_adapter.rb
+++ b/lib/gitlab/repository_cache_adapter.rb
@@ -237,7 +237,7 @@ module Gitlab
end
def expire_redis_set_method_caches(methods)
- methods.each { |name| redis_set_cache.expire(name) }
+ redis_set_cache.expire(*methods)
end
def expire_redis_hash_method_caches(methods)
diff --git a/lib/gitlab/repository_set_cache.rb b/lib/gitlab/repository_set_cache.rb
index 4797ec0b116..1e2d86b7ad2 100644
--- a/lib/gitlab/repository_set_cache.rb
+++ b/lib/gitlab/repository_set_cache.rb
@@ -2,7 +2,7 @@
# Interface to the Redis-backed cache store for keys that use a Redis set
module Gitlab
- class RepositorySetCache
+ class RepositorySetCache < Gitlab::SetCache
attr_reader :repository, :namespace, :expires_in
def initialize(repository, extra_namespace: nil, expires_in: 2.weeks)
@@ -17,18 +17,6 @@ module Gitlab
"#{type}:#{namespace}:set"
end
- def expire(key)
- with { |redis| redis.del(cache_key(key)) }
- end
-
- def exist?(key)
- with { |redis| redis.exists(cache_key(key)) }
- end
-
- def read(key)
- with { |redis| redis.smembers(cache_key(key)) }
- end
-
def write(key, value)
full_key = cache_key(key)
@@ -54,15 +42,5 @@ module Gitlab
write(key, yield)
end
end
-
- def include?(key, value)
- with { |redis| redis.sismember(cache_key(key), value) }
- end
-
- private
-
- def with(&blk)
- Gitlab::Redis::Cache.with(&blk) # rubocop:disable CodeReuse/ActiveRecord
- end
end
end
diff --git a/lib/gitlab/request_profiler/middleware.rb b/lib/gitlab/request_profiler/middleware.rb
index 99958d7a211..7050aee3847 100644
--- a/lib/gitlab/request_profiler/middleware.rb
+++ b/lib/gitlab/request_profiler/middleware.rb
@@ -51,7 +51,7 @@ module Gitlab
def call_with_call_stack_profiling(env)
ret = nil
report = RubyProf::Profile.profile do
- ret = catch(:warden) do
+ ret = catch(:warden) do # rubocop:disable Cop/BanCatchThrow
@app.call(env)
end
end
@@ -67,7 +67,7 @@ module Gitlab
def call_with_memory_profiling(env)
ret = nil
report = MemoryProfiler.report do
- ret = catch(:warden) do
+ ret = catch(:warden) do # rubocop:disable Cop/BanCatchThrow
@app.call(env)
end
end
@@ -99,7 +99,7 @@ module Gitlab
if ret.is_a?(Array)
ret
else
- throw(:warden, ret)
+ throw(:warden, ret) # rubocop:disable Cop/BanCatchThrow
end
end
end
diff --git a/lib/gitlab/search/found_blob.rb b/lib/gitlab/search/found_blob.rb
index f472c70446c..fc1abc064c7 100644
--- a/lib/gitlab/search/found_blob.rb
+++ b/lib/gitlab/search/found_blob.rb
@@ -155,7 +155,7 @@ module Gitlab
end
def repository
- @repository ||= project.repository
+ @repository ||= project&.repository
end
end
end
diff --git a/lib/gitlab/serverless/domain.rb b/lib/gitlab/serverless/domain.rb
deleted file mode 100644
index ec7c68764d1..00000000000
--- a/lib/gitlab/serverless/domain.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Serverless
- class Domain
- UUID_LENGTH = 14
-
- def self.generate_uuid
- SecureRandom.hex(UUID_LENGTH / 2)
- end
- end
- end
-end
diff --git a/lib/gitlab/serverless/function_uri.rb b/lib/gitlab/serverless/function_uri.rb
deleted file mode 100644
index c0e0cf00f35..00000000000
--- a/lib/gitlab/serverless/function_uri.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Serverless
- class FunctionURI < URI::HTTPS
- SERVERLESS_DOMAIN_REGEXP = %r{^(?<scheme>https?://)?(?<function>[^.]+)-(?<cluster_left>\h{2})a1(?<cluster_middle>\h{10})f2(?<cluster_right>\h{2})(?<environment_id>\h+)-(?<environment_slug>[^.]+)\.(?<domain>.+)}.freeze
-
- attr_reader :function, :cluster, :environment
-
- def initialize(function: nil, cluster: nil, environment: nil)
- initialize_required_argument(:function, function)
- initialize_required_argument(:cluster, cluster)
- initialize_required_argument(:environment, environment)
-
- @host = "#{function}-#{cluster.uuid[0..1]}a1#{cluster.uuid[2..-3]}f2#{cluster.uuid[-2..-1]}#{"%x" % environment.id}-#{environment.slug}.#{cluster.domain}"
-
- super('https', nil, host, nil, nil, nil, nil, nil, nil)
- end
-
- def self.parse(uri)
- match = SERVERLESS_DOMAIN_REGEXP.match(uri)
- return unless match
-
- cluster = ::Serverless::DomainCluster.find(match[:cluster_left] + match[:cluster_middle] + match[:cluster_right])
- return unless cluster
-
- environment = ::Environment.find(match[:environment_id].to_i(16))
- return unless environment&.slug == match[:environment_slug]
-
- new(
- function: match[:function],
- cluster: cluster,
- environment: environment
- )
- end
-
- private
-
- def initialize_required_argument(name, value)
- raise ArgumentError.new("missing argument: #{name}") unless value
-
- instance_variable_set("@#{name}".to_sym, value)
- end
- end
- end
-end
diff --git a/lib/gitlab/serverless/service.rb b/lib/gitlab/serverless/service.rb
index 643e076c587..c3ab2e9ddeb 100644
--- a/lib/gitlab/serverless/service.rb
+++ b/lib/gitlab/serverless/service.rb
@@ -60,7 +60,11 @@ class Gitlab::Serverless::Service
def proxy_url
if cluster&.serverless_domain
- Gitlab::Serverless::FunctionURI.new(function: name, cluster: cluster.serverless_domain, environment: environment)
+ ::Serverless::Domain.new(
+ function_name: name,
+ serverless_domain_cluster: cluster.serverless_domain,
+ environment: environment
+ ).uri.to_s
end
end
diff --git a/lib/gitlab/set_cache.rb b/lib/gitlab/set_cache.rb
new file mode 100644
index 00000000000..d1151a431bb
--- /dev/null
+++ b/lib/gitlab/set_cache.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+# Interface to the Redis-backed cache store to keep track of complete cache keys
+# for a ReactiveCache resource.
+module Gitlab
+ class SetCache
+ attr_reader :expires_in
+
+ def initialize(expires_in: 2.weeks)
+ @expires_in = expires_in
+ end
+
+ def cache_key(key)
+ "#{key}:set"
+ end
+
+ # Returns the number of keys deleted by Redis
+ def expire(*keys)
+ return 0 if keys.empty?
+
+ with do |redis|
+ keys = keys.map { |key| cache_key(key) }
+ unlink_or_delete(redis, keys)
+ end
+ end
+
+ def exist?(key)
+ with { |redis| redis.exists(cache_key(key)) }
+ end
+
+ def write(key, value)
+ with do |redis|
+ redis.pipelined do
+ redis.sadd(cache_key(key), value)
+
+ redis.expire(cache_key(key), expires_in)
+ end
+ end
+
+ value
+ end
+
+ def read(key)
+ with { |redis| redis.smembers(cache_key(key)) }
+ end
+
+ def include?(key, value)
+ with { |redis| redis.sismember(cache_key(key), value) }
+ end
+
+ def ttl(key)
+ with { |redis| redis.ttl(cache_key(key)) }
+ end
+
+ private
+
+ def with(&blk)
+ Gitlab::Redis::Cache.with(&blk) # rubocop:disable CodeReuse/ActiveRecord
+ end
+
+ def unlink_or_delete(redis, keys)
+ if Feature.enabled?(:repository_set_cache_unlink, default_enabled: true)
+ redis.unlink(*keys)
+ else
+ redis.del(*keys)
+ end
+ rescue ::Redis::CommandError
+ redis.del(*keys)
+ end
+ end
+end
diff --git a/lib/gitlab/setup_helper.rb b/lib/gitlab/setup_helper.rb
index c449c6879bc..99a7e617884 100644
--- a/lib/gitlab/setup_helper.rb
+++ b/lib/gitlab/setup_helper.rb
@@ -33,8 +33,6 @@ module Gitlab
if Rails.env.test?
storage_path = Rails.root.join('tmp', 'tests', 'second_storage').to_s
-
- FileUtils.mkdir(storage_path) unless File.exist?(storage_path)
storages << { name: 'test_second_storage', path: storage_path }
end
diff --git a/lib/gitlab/shell.rb b/lib/gitlab/shell.rb
index 726ecd81824..1f8a45e5481 100644
--- a/lib/gitlab/shell.rb
+++ b/lib/gitlab/shell.rb
@@ -1,13 +1,17 @@
# frozen_string_literal: true
-# Gitaly note: SSH key operations are not part of Gitaly so will never be migrated.
-
require 'securerandom'
module Gitlab
+ # This class is an artifact of a time when common repository operations were
+ # performed by calling out to scripts in the gitlab-shell project. Now, these
+ # operations are all performed by Gitaly, and are mostly accessible through
+ # the Repository class. Prefer using a Repository to functionality here.
+ #
+ # Legacy code relating to namespaces still relies on Gitlab::Shell; it can be
+ # converted to a module once https://gitlab.com/groups/gitlab-org/-/epics/2320
+ # is completed. https://gitlab.com/gitlab-org/gitlab/-/issues/25095 tracks it.
class Shell
- GITLAB_SHELL_ENV_VARS = %w(GIT_TERMINAL_PROMPT).freeze
-
Error = Class.new(StandardError)
class << self
@@ -36,8 +40,31 @@ module Gitlab
.join('GITLAB_SHELL_VERSION')).strip
end
+ # Return GitLab shell version
+ #
+ # @return [String] version
+ def version
+ @version ||= File.read(gitlab_shell_version_file).chomp if File.readable?(gitlab_shell_version_file)
+ end
+
+ # Return a SSH url for a given project path
+ #
+ # @param [String] full_path project path (URL)
+ # @return [String] SSH URL
+ def url_to_repo(full_path)
+ Gitlab.config.gitlab_shell.ssh_path_prefix + "#{full_path}.git"
+ end
+
private
+ def gitlab_shell_path
+ File.expand_path(Gitlab.config.gitlab_shell.path)
+ end
+
+ def gitlab_shell_version_file
+ File.join(gitlab_shell_path, 'VERSION')
+ end
+
# Create (if necessary) and link the secret token file
def generate_and_link_secret_token
secret_file = Gitlab.config.gitlab_shell.secret_file
@@ -56,88 +83,6 @@ module Gitlab
end
end
- # Initialize a new project repository using a Project model
- #
- # @param [Project] project
- # @return [Boolean] whether repository could be created
- def create_project_repository(project)
- create_repository(project.repository_storage, project.disk_path, project.full_path)
- end
-
- # Initialize a new wiki repository using a Project model
- #
- # @param [Project] project
- # @return [Boolean] whether repository could be created
- def create_wiki_repository(project)
- create_repository(project.repository_storage, project.wiki.disk_path, project.wiki.full_path)
- end
-
- # Init new repository
- #
- # @example Create a repository
- # create_repository("default", "path/to/gitlab-ci", "gitlab/gitlab-ci")
- #
- # @param [String] storage the shard key
- # @param [String] disk_path project path on disk
- # @param [String] gl_project_path project name
- # @return [Boolean] whether repository could be created
- def create_repository(storage, disk_path, gl_project_path)
- relative_path = disk_path.dup
- relative_path << '.git' unless relative_path.end_with?('.git')
-
- # During creation of a repository, gl_repository may not be known
- # because that depends on a yet-to-be assigned project ID in the
- # database (e.g. project-1234), so for now it is blank.
- repository = Gitlab::Git::Repository.new(storage, relative_path, '', gl_project_path)
- wrapped_gitaly_errors { repository.gitaly_repository_client.create_repository }
-
- true
- rescue => err # Once the Rugged codes gets removes this can be improved
- Rails.logger.error("Failed to add repository #{storage}/#{disk_path}: #{err}") # rubocop:disable Gitlab/RailsLogger
- false
- end
-
- # Import wiki repository from external service
- #
- # @param [Project] project
- # @param [Gitlab::LegacyGithubImport::WikiFormatter, Gitlab::BitbucketImport::WikiFormatter] wiki_formatter
- # @return [Boolean] whether repository could be imported
- def import_wiki_repository(project, wiki_formatter)
- import_repository(project.repository_storage, wiki_formatter.disk_path, wiki_formatter.import_url, project.wiki.full_path)
- end
-
- # Import project repository from external service
- #
- # @param [Project] project
- # @return [Boolean] whether repository could be imported
- def import_project_repository(project)
- import_repository(project.repository_storage, project.disk_path, project.import_url, project.full_path)
- end
-
- # Import repository
- #
- # @example Import a repository
- # import_repository("nfs-file06", "gitlab/gitlab-ci", "https://gitlab.com/gitlab-org/gitlab-test.git", "gitlab/gitlab-ci")
- #
- # @param [String] storage project's storage name
- # @param [String] disk_path project path on disk
- # @param [String] url from external resource to import from
- # @param [String] gl_project_path project name
- # @return [Boolean] whether repository could be imported
- def import_repository(storage, disk_path, url, gl_project_path)
- if url.start_with?('.', '/')
- raise Error.new("don't use disk paths with import_repository: #{url.inspect}")
- end
-
- relative_path = "#{disk_path}.git"
- cmd = GitalyGitlabProjects.new(storage, relative_path, gl_project_path)
-
- success = cmd.import_project(url, git_timeout)
- raise Error, cmd.output unless success
-
- success
- end
-
# Move or rename a repository
#
# @example Move/rename a repository
@@ -147,6 +92,8 @@ module Gitlab
# @param [String] disk_path current project path on disk
# @param [String] new_disk_path new project path on disk
# @return [Boolean] whether repository could be moved/renamed on disk
+ #
+ # @deprecated
def mv_repository(storage, disk_path, new_disk_path)
return false if disk_path.empty? || new_disk_path.empty?
@@ -159,17 +106,6 @@ module Gitlab
false
end
- # Fork repository to new path
- #
- # @param [Project] source_project forked-from Project
- # @param [Project] target_project forked-to Project
- def fork_repository(source_project, target_project)
- forked_from_relative_path = "#{source_project.disk_path}.git"
- fork_args = [target_project.repository_storage, "#{target_project.disk_path}.git", target_project.full_path]
-
- GitalyGitlabProjects.new(source_project.repository_storage, forked_from_relative_path, source_project.full_path).fork_repository(*fork_args)
- end
-
# Removes a repository from file system, using rm_diretory which is an alias
# for rm_namespace. Given the underlying implementation removes the name
# passed as second argument on the passed storage.
@@ -179,6 +115,8 @@ module Gitlab
#
# @param [String] storage project's storage path
# @param [String] disk_path current project path on disk
+ #
+ # @deprecated
def remove_repository(storage, disk_path)
return false if disk_path.empty?
@@ -192,84 +130,6 @@ module Gitlab
false
end
- # Add new key to authorized_keys
- #
- # @example Add new key
- # add_key("key-42", "sha-rsa ...")
- #
- # @param [String] key_id identifier of the key
- # @param [String] key_content key content (public certificate)
- # @return [Boolean] whether key could be added
- def add_key(key_id, key_content)
- return unless self.authorized_keys_enabled?
-
- gitlab_authorized_keys.add_key(key_id, key_content)
- end
-
- # Batch-add keys to authorized_keys
- #
- # @example
- # batch_add_keys(Key.all)
- #
- # @param [Array<Key>] keys
- # @return [Boolean] whether keys could be added
- def batch_add_keys(keys)
- return unless self.authorized_keys_enabled?
-
- gitlab_authorized_keys.batch_add_keys(keys)
- end
-
- # Remove SSH key from authorized_keys
- #
- # @example Remove a key
- # remove_key("key-342")
- #
- # @param [String] key_id
- # @return [Boolean] whether key could be removed or not
- def remove_key(key_id, _ = nil)
- return unless self.authorized_keys_enabled?
-
- gitlab_authorized_keys.rm_key(key_id)
- end
-
- # Remove all SSH keys from gitlab shell
- #
- # @example Remove all keys
- # remove_all_keys
- #
- # @return [Boolean] whether keys could be removed or not
- def remove_all_keys
- return unless self.authorized_keys_enabled?
-
- gitlab_authorized_keys.clear
- end
-
- # Remove SSH keys from gitlab shell that are not in the DB
- #
- # @example Remove keys not on the database
- # remove_keys_not_found_in_db
- #
- # rubocop: disable CodeReuse/ActiveRecord
- def remove_keys_not_found_in_db
- return unless self.authorized_keys_enabled?
-
- Rails.logger.info("Removing keys not found in DB") # rubocop:disable Gitlab/RailsLogger
-
- batch_read_key_ids do |ids_in_file|
- ids_in_file.uniq!
- keys_in_db = Key.where(id: ids_in_file)
-
- next unless ids_in_file.size > keys_in_db.count # optimization
-
- ids_to_remove = ids_in_file - keys_in_db.pluck(:id)
- ids_to_remove.each do |id|
- Rails.logger.info("Removing key-#{id} not found in DB") # rubocop:disable Gitlab/RailsLogger
- remove_key("key-#{id}")
- end
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
# Add empty directory for storing repositories
#
# @example Add new namespace directory
@@ -277,6 +137,8 @@ module Gitlab
#
# @param [String] storage project's storage path
# @param [String] name namespace name
+ #
+ # @deprecated
def add_namespace(storage, name)
Gitlab::GitalyClient.allow_n_plus_1_calls do
Gitlab::GitalyClient::NamespaceService.new(storage).add(name)
@@ -293,6 +155,8 @@ module Gitlab
#
# @param [String] storage project's storage path
# @param [String] name namespace name
+ #
+ # @deprecated
def rm_namespace(storage, name)
Gitlab::GitalyClient::NamespaceService.new(storage).remove(name)
rescue GRPC::InvalidArgument => e
@@ -308,6 +172,8 @@ module Gitlab
# @param [String] storage project's storage path
# @param [String] old_name current namespace name
# @param [String] new_name new namespace name
+ #
+ # @deprecated
def mv_namespace(storage, old_name, new_name)
Gitlab::GitalyClient::NamespaceService.new(storage).rename(old_name, new_name)
rescue GRPC::InvalidArgument => e
@@ -316,25 +182,6 @@ module Gitlab
false
end
- # Return a SSH url for a given project path
- #
- # @param [String] full_path project path (URL)
- # @return [String] SSH URL
- def url_to_repo(full_path)
- Gitlab.config.gitlab_shell.ssh_path_prefix + "#{full_path}.git"
- end
-
- # Return GitLab shell version
- #
- # @return [String] version
- def version
- gitlab_shell_version_file = "#{gitlab_shell_path}/VERSION"
-
- if File.readable?(gitlab_shell_version_file)
- File.read(gitlab_shell_version_file).chomp
- end
- end
-
# Check if repository exists on disk
#
# @example Check if repository exists
@@ -343,116 +190,12 @@ module Gitlab
# @return [Boolean] whether repository exists or not
# @param [String] storage project's storage path
# @param [Object] dir_name repository dir name
+ #
+ # @deprecated
def repository_exists?(storage, dir_name)
Gitlab::Git::Repository.new(storage, dir_name, nil, nil).exists?
rescue GRPC::Internal
false
end
-
- # Return hooks folder path used by projects
- #
- # @return [String] path
- def hooks_path
- File.join(gitlab_shell_path, 'hooks')
- end
-
- protected
-
- def gitlab_shell_path
- File.expand_path(Gitlab.config.gitlab_shell.path)
- end
-
- def gitlab_shell_user_home
- File.expand_path("~#{Gitlab.config.gitlab_shell.ssh_user}")
- end
-
- def full_path(storage, dir_name)
- raise ArgumentError.new("Directory name can't be blank") if dir_name.blank?
-
- File.join(Gitlab.config.repositories.storages[storage].legacy_disk_path, dir_name)
- end
-
- def authorized_keys_enabled?
- # Return true if nil to ensure the authorized_keys methods work while
- # fixing the authorized_keys file during migration.
- return true if Gitlab::CurrentSettings.current_application_settings.authorized_keys_enabled.nil?
-
- Gitlab::CurrentSettings.current_application_settings.authorized_keys_enabled
- end
-
- private
-
- def git_timeout
- Gitlab.config.gitlab_shell.git_timeout
- end
-
- def wrapped_gitaly_errors
- yield
- rescue GRPC::NotFound, GRPC::BadStatus => e
- # Old Popen code returns [Error, output] to the caller, so we
- # need to do the same here...
- raise Error, e
- end
-
- def gitlab_authorized_keys
- @gitlab_authorized_keys ||= Gitlab::AuthorizedKeys.new
- end
-
- def batch_read_key_ids(batch_size: 100, &block)
- return unless self.authorized_keys_enabled?
-
- gitlab_authorized_keys.list_key_ids.lazy.each_slice(batch_size) do |key_ids|
- yield(key_ids)
- end
- end
-
- def strip_key(key)
- key.split(/[ ]+/)[0, 2].join(' ')
- end
-
- def add_keys_to_io(keys, io)
- keys.each do |k|
- key = strip_key(k.key)
-
- raise Error.new("Invalid key: #{key.inspect}") if key.include?("\t") || key.include?("\n")
-
- io.puts("#{k.shell_id}\t#{key}")
- end
- end
-
- class GitalyGitlabProjects
- attr_reader :shard_name, :repository_relative_path, :output, :gl_project_path
-
- def initialize(shard_name, repository_relative_path, gl_project_path)
- @shard_name = shard_name
- @repository_relative_path = repository_relative_path
- @output = ''
- @gl_project_path = gl_project_path
- end
-
- def import_project(source, _timeout)
- raw_repository = Gitlab::Git::Repository.new(shard_name, repository_relative_path, nil, gl_project_path)
-
- Gitlab::GitalyClient::RepositoryService.new(raw_repository).import_repository(source)
- true
- rescue GRPC::BadStatus => e
- @output = e.message
- false
- end
-
- def fork_repository(new_shard_name, new_repository_relative_path, new_project_name)
- target_repository = Gitlab::Git::Repository.new(new_shard_name, new_repository_relative_path, nil, new_project_name)
- raw_repository = Gitlab::Git::Repository.new(shard_name, repository_relative_path, nil, gl_project_path)
-
- Gitlab::GitalyClient::RepositoryService.new(target_repository).fork_repository(raw_repository)
- rescue GRPC::BadStatus => e
- logger.error "fork-repository failed: #{e.message}"
- false
- end
-
- def logger
- Rails.logger # rubocop:disable Gitlab/RailsLogger
- end
- end
end
end
diff --git a/lib/gitlab/sidekiq_cluster.rb b/lib/gitlab/sidekiq_cluster.rb
new file mode 100644
index 00000000000..c19bef1389a
--- /dev/null
+++ b/lib/gitlab/sidekiq_cluster.rb
@@ -0,0 +1,162 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqCluster
+ # The signals that should terminate both the master and workers.
+ TERMINATE_SIGNALS = %i(INT TERM).freeze
+
+ # The signals that should simply be forwarded to the workers.
+ FORWARD_SIGNALS = %i(TTIN USR1 USR2 HUP).freeze
+
+ # Traps the given signals and yields the block whenever these signals are
+ # received.
+ #
+ # The block is passed the name of the signal.
+ #
+ # Example:
+ #
+ # trap_signals(%i(HUP TERM)) do |signal|
+ # ...
+ # end
+ def self.trap_signals(signals)
+ signals.each do |signal|
+ trap(signal) do
+ yield signal
+ end
+ end
+ end
+
+ def self.trap_terminate(&block)
+ trap_signals(TERMINATE_SIGNALS, &block)
+ end
+
+ def self.trap_forward(&block)
+ trap_signals(FORWARD_SIGNALS, &block)
+ end
+
+ def self.signal(pid, signal)
+ Process.kill(signal, pid)
+ true
+ rescue Errno::ESRCH
+ false
+ end
+
+ def self.signal_processes(pids, signal)
+ pids.each { |pid| signal(pid, signal) }
+ end
+
+ # Starts Sidekiq workers for the pairs of processes.
+ #
+ # Example:
+ #
+ # start([ ['foo'], ['bar', 'baz'] ], :production)
+ #
+ # This would start two Sidekiq processes: one processing "foo", and one
+ # processing "bar" and "baz". Each one is placed in its own process group.
+ #
+ # queues - An Array containing Arrays. Each sub Array should specify the
+ # queues to use for a single process.
+ #
+ # directory - The directory of the Rails application.
+ #
+ # Returns an Array containing the PIDs of the started processes.
+ def self.start(queues, env: :development, directory: Dir.pwd, max_concurrency: 50, min_concurrency: 0, dryrun: false)
+ queues.map.with_index do |pair, index|
+ start_sidekiq(pair, env: env, directory: directory, max_concurrency: max_concurrency, min_concurrency: min_concurrency, worker_id: index, dryrun: dryrun)
+ end
+ end
+
+ # Starts a Sidekiq process that processes _only_ the given queues.
+ #
+ # Returns the PID of the started process.
+ def self.start_sidekiq(queues, env:, directory:, max_concurrency:, min_concurrency:, worker_id:, dryrun:)
+ counts = count_by_queue(queues)
+
+ cmd = %w[bundle exec sidekiq]
+ cmd << "-c #{self.concurrency(queues, min_concurrency, max_concurrency)}"
+ cmd << "-e#{env}"
+ cmd << "-gqueues: #{proc_details(counts)}"
+ cmd << "-r#{directory}"
+
+ counts.each do |queue, count|
+ cmd << "-q#{queue},#{count}"
+ end
+
+ if dryrun
+ puts "Sidekiq command: #{cmd}" # rubocop:disable Rails/Output
+ return
+ end
+
+ pid = Process.spawn(
+ { 'ENABLE_SIDEKIQ_CLUSTER' => '1',
+ 'SIDEKIQ_WORKER_ID' => worker_id.to_s },
+ *cmd,
+ pgroup: true,
+ err: $stderr,
+ out: $stdout
+ )
+
+ wait_async(pid)
+
+ pid
+ end
+
+ def self.count_by_queue(queues)
+ queues.each_with_object(Hash.new(0)) { |element, hash| hash[element] += 1 }
+ end
+
+ def self.proc_details(counts)
+ counts.map do |queue, count|
+ if count == 1
+ queue
+ else
+ "#{queue} (#{count})"
+ end
+ end.join(', ')
+ end
+
+ def self.concurrency(queues, min_concurrency, max_concurrency)
+ concurrency_from_queues = queues.length + 1
+ max = max_concurrency.positive? ? max_concurrency : concurrency_from_queues
+ min = [min_concurrency, max].min
+
+ concurrency_from_queues.clamp(min, max)
+ end
+
+ # Waits for the given process to complete using a separate thread.
+ def self.wait_async(pid)
+ Thread.new do
+ Process.wait(pid) rescue Errno::ECHILD
+ end
+ end
+
+ # Returns true if all the processes are alive.
+ def self.all_alive?(pids)
+ pids.each do |pid|
+ return false unless process_alive?(pid)
+ end
+
+ true
+ end
+
+ def self.any_alive?(pids)
+ pids_alive(pids).any?
+ end
+
+ def self.pids_alive(pids)
+ pids.select { |pid| process_alive?(pid) }
+ end
+
+ def self.process_alive?(pid)
+ # Signal 0 tests whether the process exists and we have access to send signals
+ # but is otherwise a noop (doesn't actually send a signal to the process)
+ signal(pid, 0)
+ end
+
+ def self.write_pid(path)
+ File.open(path, 'w') do |handle|
+ handle.write(Process.pid.to_s)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_cluster/cli.rb b/lib/gitlab/sidekiq_cluster/cli.rb
new file mode 100644
index 00000000000..0a9624950c2
--- /dev/null
+++ b/lib/gitlab/sidekiq_cluster/cli.rb
@@ -0,0 +1,184 @@
+# frozen_string_literal: true
+
+require 'optparse'
+require 'logger'
+require 'time'
+
+module Gitlab
+ module SidekiqCluster
+ class CLI
+ CHECK_TERMINATE_INTERVAL_SECONDS = 1
+ # How long to wait in total when asking for a clean termination
+ # Sidekiq default to self-terminate is 25s
+ TERMINATE_TIMEOUT_SECONDS = 30
+
+ CommandError = Class.new(StandardError)
+
+ def initialize(log_output = STDERR)
+ require_relative '../../../lib/gitlab/sidekiq_logging/json_formatter'
+
+ # As recommended by https://github.com/mperham/sidekiq/wiki/Advanced-Options#concurrency
+ @max_concurrency = 50
+ @min_concurrency = 0
+ @environment = ENV['RAILS_ENV'] || 'development'
+ @pid = nil
+ @interval = 5
+ @alive = true
+ @processes = []
+ @logger = Logger.new(log_output)
+ @logger.formatter = ::Gitlab::SidekiqLogging::JSONFormatter.new
+ @rails_path = Dir.pwd
+ @dryrun = false
+ end
+
+ def run(argv = ARGV)
+ if argv.empty?
+ raise CommandError,
+ 'You must specify at least one queue to start a worker for'
+ end
+
+ option_parser.parse!(argv)
+
+ all_queues = SidekiqConfig::CliMethods.all_queues(@rails_path)
+ queue_names = SidekiqConfig::CliMethods.worker_queues(@rails_path)
+
+ queue_groups = argv.map do |queues|
+ next queue_names if queues == '*'
+
+ # When using the experimental queue query syntax, we treat
+ # each queue group as a worker attribute query, and resolve
+ # the queues for the queue group using this query.
+ if @experimental_queue_selector
+ SidekiqConfig::CliMethods.query_workers(queues, all_queues)
+ else
+ SidekiqConfig::CliMethods.expand_queues(queues.split(','), queue_names)
+ end
+ end
+
+ if @negate_queues
+ queue_groups.map! { |queues| queue_names - queues }
+ end
+
+ if queue_groups.all?(&:empty?)
+ raise CommandError,
+ 'No queues found, you must select at least one queue'
+ end
+
+ @logger.info("Starting cluster with #{queue_groups.length} processes")
+
+ @processes = SidekiqCluster.start(
+ queue_groups,
+ env: @environment,
+ directory: @rails_path,
+ max_concurrency: @max_concurrency,
+ min_concurrency: @min_concurrency,
+ dryrun: @dryrun
+ )
+
+ return if @dryrun
+
+ write_pid
+ trap_signals
+ start_loop
+ end
+
+ def write_pid
+ SidekiqCluster.write_pid(@pid) if @pid
+ end
+
+ def monotonic_time
+ Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_second)
+ end
+
+ def continue_waiting?(deadline)
+ SidekiqCluster.any_alive?(@processes) && monotonic_time < deadline
+ end
+
+ def hard_stop_stuck_pids
+ SidekiqCluster.signal_processes(SidekiqCluster.pids_alive(@processes), :KILL)
+ end
+
+ def wait_for_termination
+ deadline = monotonic_time + TERMINATE_TIMEOUT_SECONDS
+ sleep(CHECK_TERMINATE_INTERVAL_SECONDS) while continue_waiting?(deadline)
+
+ hard_stop_stuck_pids
+ end
+
+ def trap_signals
+ SidekiqCluster.trap_terminate do |signal|
+ @alive = false
+ SidekiqCluster.signal_processes(@processes, signal)
+ wait_for_termination
+ end
+
+ SidekiqCluster.trap_forward do |signal|
+ SidekiqCluster.signal_processes(@processes, signal)
+ end
+ end
+
+ def start_loop
+ while @alive
+ sleep(@interval)
+
+ unless SidekiqCluster.all_alive?(@processes)
+ # If a child process died we'll just terminate the whole cluster. It's up to
+ # runit and such to then restart the cluster.
+ @logger.info('A worker terminated, shutting down the cluster')
+
+ SidekiqCluster.signal_processes(@processes, :TERM)
+ break
+ end
+ end
+ end
+
+ def option_parser
+ OptionParser.new do |opt|
+ opt.banner = "#{File.basename(__FILE__)} [QUEUE,QUEUE] [QUEUE] ... [OPTIONS]"
+
+ opt.separator "\nOptions:\n"
+
+ opt.on('-h', '--help', 'Shows this help message') do
+ abort opt.to_s
+ end
+
+ opt.on('-m', '--max-concurrency INT', 'Maximum threads to use with Sidekiq (default: 50, 0 to disable)') do |int|
+ @max_concurrency = int.to_i
+ end
+
+ opt.on('--min-concurrency INT', 'Minimum threads to use with Sidekiq (default: 0)') do |int|
+ @min_concurrency = int.to_i
+ end
+
+ opt.on('-e', '--environment ENV', 'The application environment') do |env|
+ @environment = env
+ end
+
+ opt.on('-P', '--pidfile PATH', 'Path to the PID file') do |pid|
+ @pid = pid
+ end
+
+ opt.on('-r', '--require PATH', 'Location of the Rails application') do |path|
+ @rails_path = path
+ end
+
+ opt.on('--experimental-queue-selector', 'EXPERIMENTAL: Run workers based on the provided selector') do |experimental_queue_selector|
+ @experimental_queue_selector = experimental_queue_selector
+ end
+
+ opt.on('-n', '--negate', 'Run workers for all queues in sidekiq_queues.yml except the given ones') do
+ @negate_queues = true
+ end
+
+ opt.on('-i', '--interval INT', 'The number of seconds to wait between worker checks') do |int|
+ @interval = int.to_i
+ end
+
+ opt.on('-d', '--dryrun', 'Print commands that would be run without this flag, and quit') do |int|
+ @dryrun = true
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_config/cli_methods.rb b/lib/gitlab/sidekiq_config/cli_methods.rb
index 8f19b557d24..c49432f0fc6 100644
--- a/lib/gitlab/sidekiq_config/cli_methods.rb
+++ b/lib/gitlab/sidekiq_config/cli_methods.rb
@@ -21,14 +21,14 @@ module Gitlab
QUERY_OR_OPERATOR = '|'
QUERY_AND_OPERATOR = '&'
QUERY_CONCATENATE_OPERATOR = ','
- QUERY_TERM_REGEX = %r{^(\w+)(!?=)([\w#{QUERY_CONCATENATE_OPERATOR}]+)}.freeze
+ QUERY_TERM_REGEX = %r{^(\w+)(!?=)([\w:#{QUERY_CONCATENATE_OPERATOR}]+)}.freeze
QUERY_PREDICATES = {
feature_category: :to_sym,
has_external_dependencies: lambda { |value| value == 'true' },
- latency_sensitive: lambda { |value| value == 'true' },
name: :to_s,
- resource_boundary: :to_sym
+ resource_boundary: :to_sym,
+ urgency: :to_sym
}.freeze
QueryError = Class.new(StandardError)
diff --git a/lib/gitlab/sidekiq_config/dummy_worker.rb b/lib/gitlab/sidekiq_config/dummy_worker.rb
index 858ff0db0c9..bd205c81931 100644
--- a/lib/gitlab/sidekiq_config/dummy_worker.rb
+++ b/lib/gitlab/sidekiq_config/dummy_worker.rb
@@ -9,8 +9,9 @@ module Gitlab
ATTRIBUTE_METHODS = {
feature_category: :get_feature_category,
has_external_dependencies: :worker_has_external_dependencies?,
- latency_sensitive: :latency_sensitive_worker?,
+ urgency: :get_urgency,
resource_boundary: :get_worker_resource_boundary,
+ idempotent: :idempotent?,
weight: :get_weight
}.freeze
diff --git a/lib/gitlab/sidekiq_config/worker.rb b/lib/gitlab/sidekiq_config/worker.rb
index 6cbe327e6b2..ec7a82f6459 100644
--- a/lib/gitlab/sidekiq_config/worker.rb
+++ b/lib/gitlab/sidekiq_config/worker.rb
@@ -7,8 +7,8 @@ module Gitlab
attr_reader :klass
delegate :feature_category_not_owned?, :get_feature_category,
- :get_weight, :get_worker_resource_boundary,
- :latency_sensitive_worker?, :queue, :queue_namespace,
+ :get_urgency, :get_weight, :get_worker_resource_boundary,
+ :idempotent?, :queue, :queue_namespace,
:worker_has_external_dependencies?,
to: :klass
@@ -49,9 +49,10 @@ module Gitlab
name: queue,
feature_category: get_feature_category,
has_external_dependencies: worker_has_external_dependencies?,
- latency_sensitive: latency_sensitive_worker?,
+ urgency: get_urgency,
resource_boundary: get_worker_resource_boundary,
- weight: get_weight
+ weight: get_weight,
+ idempotent: idempotent?
}
end
diff --git a/lib/gitlab/sidekiq_logging/client_logger.rb b/lib/gitlab/sidekiq_logging/client_logger.rb
new file mode 100644
index 00000000000..8be755a55db
--- /dev/null
+++ b/lib/gitlab/sidekiq_logging/client_logger.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqLogging
+ class ClientLogger < Gitlab::Logger
+ def self.file_name_noext
+ 'sidekiq_client'
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_logging/deduplication_logger.rb b/lib/gitlab/sidekiq_logging/deduplication_logger.rb
new file mode 100644
index 00000000000..01810e474dc
--- /dev/null
+++ b/lib/gitlab/sidekiq_logging/deduplication_logger.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqLogging
+ class DeduplicationLogger
+ include Singleton
+ include LogsJobs
+
+ def log(job, deduplication_type)
+ payload = parse_job(job)
+ payload['job_status'] = 'deduplicated'
+ payload['message'] = "#{base_message(payload)}: deduplicated: #{deduplication_type}"
+ payload['deduplication_type'] = deduplication_type
+
+ Sidekiq.logger.info payload
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_logging/json_formatter.rb b/lib/gitlab/sidekiq_logging/json_formatter.rb
index e0b0d684bea..c20e929ae36 100644
--- a/lib/gitlab/sidekiq_logging/json_formatter.rb
+++ b/lib/gitlab/sidekiq_logging/json_formatter.rb
@@ -1,5 +1,8 @@
# frozen_string_literal: true
+# This is needed for sidekiq-cluster
+require 'json'
+
module Gitlab
module SidekiqLogging
class JSONFormatter
diff --git a/lib/gitlab/sidekiq_logging/logs_jobs.rb b/lib/gitlab/sidekiq_logging/logs_jobs.rb
new file mode 100644
index 00000000000..55d711c54ae
--- /dev/null
+++ b/lib/gitlab/sidekiq_logging/logs_jobs.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqLogging
+ module LogsJobs
+ def base_message(payload)
+ "#{payload['class']} JID-#{payload['jid']}"
+ end
+
+ def parse_job(job)
+ # Error information from the previous try is in the payload for
+ # displaying in the Sidekiq UI, but is very confusing in logs!
+ job = job.except('error_backtrace', 'error_class', 'error_message')
+
+ # Add process id params
+ job['pid'] = ::Process.pid
+
+ job.delete('args') unless ENV['SIDEKIQ_LOG_ARGUMENTS']
+ job['args'] = Gitlab::Utils::LogLimitedArray.log_limited_array(job['args'].map(&:to_s)) if job['args']
+
+ job
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_logging/structured_logger.rb b/lib/gitlab/sidekiq_logging/structured_logger.rb
index b45014d283f..af9072ea201 100644
--- a/lib/gitlab/sidekiq_logging/structured_logger.rb
+++ b/lib/gitlab/sidekiq_logging/structured_logger.rb
@@ -6,6 +6,8 @@ require 'active_record/log_subscriber'
module Gitlab
module SidekiqLogging
class StructuredLogger
+ include LogsJobs
+
def call(job, queue)
started_time = get_time
base_payload = parse_job(job)
@@ -24,10 +26,6 @@ module Gitlab
private
- def base_message(payload)
- "#{payload['class']} JID-#{payload['jid']}"
- end
-
def add_instrumentation_keys!(job, output_payload)
output_payload.merge!(job.slice(*::Gitlab::InstrumentationHelper::KEYS))
end
@@ -76,20 +74,6 @@ module Gitlab
payload['completed_at'] = Time.now.utc.to_f
end
- def parse_job(job)
- # Error information from the previous try is in the payload for
- # displaying in the Sidekiq UI, but is very confusing in logs!
- job = job.except('error_backtrace', 'error_class', 'error_message')
-
- # Add process id params
- job['pid'] = ::Process.pid
-
- job.delete('args') unless ENV['SIDEKIQ_LOG_ARGUMENTS']
- job['args'] = Gitlab::Utils::LogLimitedArray.log_limited_array(job['args']) if job['args']
-
- job
- end
-
def elapsed(t0)
t1 = get_time
{
diff --git a/lib/gitlab/sidekiq_middleware.rb b/lib/gitlab/sidekiq_middleware.rb
index 6c27213df49..37165d787c7 100644
--- a/lib/gitlab/sidekiq_middleware.rb
+++ b/lib/gitlab/sidekiq_middleware.rb
@@ -9,17 +9,18 @@ module Gitlab
# eg: `config.server_middleware(&Gitlab::SidekiqMiddleware.server_configurator)`
def self.server_configurator(metrics: true, arguments_logger: true, memory_killer: true, request_store: true)
lambda do |chain|
- chain.add Gitlab::SidekiqMiddleware::Monitor
- chain.add Gitlab::SidekiqMiddleware::ServerMetrics if metrics
- chain.add Gitlab::SidekiqMiddleware::ArgumentsLogger if arguments_logger
- chain.add Gitlab::SidekiqMiddleware::MemoryKiller if memory_killer
- chain.add Gitlab::SidekiqMiddleware::RequestStoreMiddleware if request_store
- chain.add Gitlab::SidekiqMiddleware::BatchLoader
- chain.add Labkit::Middleware::Sidekiq::Server
- chain.add Gitlab::SidekiqMiddleware::InstrumentationLogger
- chain.add Gitlab::SidekiqMiddleware::AdminMode::Server
- chain.add Gitlab::SidekiqStatus::ServerMiddleware
- chain.add Gitlab::SidekiqMiddleware::WorkerContext::Server
+ chain.add ::Gitlab::SidekiqMiddleware::Monitor
+ chain.add ::Gitlab::SidekiqMiddleware::ServerMetrics if metrics
+ chain.add ::Gitlab::SidekiqMiddleware::ArgumentsLogger if arguments_logger
+ chain.add ::Gitlab::SidekiqMiddleware::MemoryKiller if memory_killer
+ chain.add ::Gitlab::SidekiqMiddleware::RequestStoreMiddleware if request_store
+ chain.add ::Gitlab::SidekiqMiddleware::BatchLoader
+ chain.add ::Labkit::Middleware::Sidekiq::Server
+ chain.add ::Gitlab::SidekiqMiddleware::InstrumentationLogger
+ chain.add ::Gitlab::SidekiqMiddleware::AdminMode::Server
+ chain.add ::Gitlab::SidekiqStatus::ServerMiddleware
+ chain.add ::Gitlab::SidekiqMiddleware::WorkerContext::Server
+ chain.add ::Gitlab::SidekiqMiddleware::DuplicateJobs::Server
end
end
@@ -28,11 +29,12 @@ module Gitlab
# eg: `config.client_middleware(&Gitlab::SidekiqMiddleware.client_configurator)`
def self.client_configurator
lambda do |chain|
- chain.add Gitlab::SidekiqStatus::ClientMiddleware
- chain.add Gitlab::SidekiqMiddleware::ClientMetrics
- chain.add Gitlab::SidekiqMiddleware::WorkerContext::Client # needs to be before the Labkit middleware
- chain.add Labkit::Middleware::Sidekiq::Client
- chain.add Gitlab::SidekiqMiddleware::AdminMode::Client
+ chain.add ::Gitlab::SidekiqStatus::ClientMiddleware
+ chain.add ::Gitlab::SidekiqMiddleware::ClientMetrics
+ chain.add ::Gitlab::SidekiqMiddleware::WorkerContext::Client # needs to be before the Labkit middleware
+ chain.add ::Labkit::Middleware::Sidekiq::Client
+ chain.add ::Gitlab::SidekiqMiddleware::AdminMode::Client
+ chain.add ::Gitlab::SidekiqMiddleware::DuplicateJobs::Client
end
end
end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs.rb
new file mode 100644
index 00000000000..23222430902
--- /dev/null
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'digest'
+
+module Gitlab
+ module SidekiqMiddleware
+ module DuplicateJobs
+ def self.drop_duplicates?
+ Feature.enabled?(:drop_duplicate_sidekiq_jobs)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/client.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/client.rb
new file mode 100644
index 00000000000..bb0c18735bb
--- /dev/null
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/client.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqMiddleware
+ module DuplicateJobs
+ class Client
+ def call(worker_class, job, queue, _redis_pool, &block)
+ DuplicateJob.new(job, queue).schedule(&block)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb
new file mode 100644
index 00000000000..c6fb50b4610
--- /dev/null
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'digest'
+
+module Gitlab
+ module SidekiqMiddleware
+ module DuplicateJobs
+ # This class defines an identifier of a job in a queue
+ # The identifier based on a job's class and arguments.
+ #
+ # As strategy decides when to keep track of the job in redis and when to
+ # remove it.
+ #
+ # Storing the deduplication key in redis can be done by calling `check!`
+ # check returns the `jid` of the job if it was scheduled, or the `jid` of
+ # the duplicate job if it was already scheduled
+ #
+ # When new jobs can be scheduled again, the strategy calls `#delete`.
+ class DuplicateJob
+ DUPLICATE_KEY_TTL = 6.hours
+
+ attr_reader :existing_jid
+
+ def initialize(job, queue_name, strategy: :until_executing)
+ @job = job
+ @queue_name = queue_name
+ @strategy = strategy
+ end
+
+ # This will continue the middleware chain if the job should be scheduled
+ # It will return false if the job needs to be cancelled
+ def schedule(&block)
+ Strategies.for(strategy).new(self).schedule(job, &block)
+ end
+
+ # This will continue the server middleware chain if the job should be
+ # executed.
+ # It will return false if the job should not be executed.
+ def perform(&block)
+ Strategies.for(strategy).new(self).perform(job, &block)
+ end
+
+ # This method will return the jid that was set in redis
+ def check!
+ read_jid = nil
+
+ Sidekiq.redis do |redis|
+ redis.multi do |multi|
+ redis.set(idempotency_key, jid, ex: DUPLICATE_KEY_TTL, nx: true)
+ read_jid = redis.get(idempotency_key)
+ end
+ end
+
+ self.existing_jid = read_jid.value
+ end
+
+ def delete!
+ Sidekiq.redis do |redis|
+ redis.del(idempotency_key)
+ end
+ end
+
+ def duplicate?
+ raise "Call `#check!` first to check for existing duplicates" unless existing_jid
+
+ jid != existing_jid
+ end
+
+ def droppable?
+ idempotent? && duplicate? && DuplicateJobs.drop_duplicates?
+ end
+
+ private
+
+ attr_reader :queue_name, :strategy, :job
+ attr_writer :existing_jid
+
+ def worker_class_name
+ job['class']
+ end
+
+ def arguments
+ job['args']
+ end
+
+ def jid
+ job['jid']
+ end
+
+ def idempotency_key
+ @idempotency_key ||= "#{namespace}:#{idempotency_hash}"
+ end
+
+ def idempotency_hash
+ Digest::SHA256.hexdigest(idempotency_string)
+ end
+
+ def namespace
+ "#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:duplicate:#{queue_name}"
+ end
+
+ def idempotency_string
+ "#{worker_class_name}:#{arguments.join('-')}"
+ end
+
+ def idempotent?
+ worker_class = worker_class_name.to_s.safe_constantize
+ return false unless worker_class
+ return false unless worker_class.respond_to?(:idempotent?)
+
+ worker_class.idempotent?
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/server.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/server.rb
new file mode 100644
index 00000000000..a35edc5774e
--- /dev/null
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/server.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqMiddleware
+ module DuplicateJobs
+ class Server
+ def call(worker, job, queue, &block)
+ DuplicateJob.new(job, queue).perform(&block)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies.rb
new file mode 100644
index 00000000000..a08310a58ff
--- /dev/null
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqMiddleware
+ module DuplicateJobs
+ module Strategies
+ UnknownStrategyError = Class.new(StandardError)
+
+ STRATEGIES = {
+ until_executing: UntilExecuting
+ }.freeze
+
+ def self.for(name)
+ STRATEGIES.fetch(name)
+ rescue KeyError
+ raise UnknownStrategyError, "Unknown deduplication strategy #{name}"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb
new file mode 100644
index 00000000000..674e436b714
--- /dev/null
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqMiddleware
+ module DuplicateJobs
+ module Strategies
+ # This strategy takes a lock before scheduling the job in a queue and
+ # removes the lock before the job starts allowing a new job to be queued
+ # while a job is still executing.
+ class UntilExecuting
+ def initialize(duplicate_job)
+ @duplicate_job = duplicate_job
+ end
+
+ def schedule(job)
+ if duplicate_job.check! && duplicate_job.duplicate?
+ job['duplicate-of'] = duplicate_job.existing_jid
+ end
+
+ if duplicate_job.droppable?
+ Gitlab::SidekiqLogging::DeduplicationLogger.instance.log(job, "dropped until executing")
+ return false
+ end
+
+ yield
+ end
+
+ def perform(_job)
+ duplicate_job.delete!
+
+ yield
+ end
+
+ private
+
+ attr_reader :duplicate_job
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/metrics.rb b/lib/gitlab/sidekiq_middleware/metrics.rb
index fbc34357323..693e35f2500 100644
--- a/lib/gitlab/sidekiq_middleware/metrics.rb
+++ b/lib/gitlab/sidekiq_middleware/metrics.rb
@@ -9,10 +9,10 @@ module Gitlab
private
def create_labels(worker_class, queue)
- labels = { queue: queue.to_s, latency_sensitive: FALSE_LABEL, external_dependencies: FALSE_LABEL, feature_category: "", boundary: "" }
+ labels = { queue: queue.to_s, urgency: "", external_dependencies: FALSE_LABEL, feature_category: "", boundary: "" }
return labels unless worker_class && worker_class.include?(WorkerAttributes)
- labels[:latency_sensitive] = bool_as_label(worker_class.latency_sensitive_worker?)
+ labels[:urgency] = worker_class.get_urgency.to_s
labels[:external_dependencies] = bool_as_label(worker_class.worker_has_external_dependencies?)
feature_category = worker_class.get_feature_category
diff --git a/lib/gitlab/sidekiq_middleware/request_store_middleware.rb b/lib/gitlab/sidekiq_middleware/request_store_middleware.rb
index 8824f81e8e3..f6142bd6ca5 100644
--- a/lib/gitlab/sidekiq_middleware/request_store_middleware.rb
+++ b/lib/gitlab/sidekiq_middleware/request_store_middleware.rb
@@ -3,12 +3,12 @@
module Gitlab
module SidekiqMiddleware
class RequestStoreMiddleware
+ include Gitlab::WithRequestStore
+
def call(worker, job, queue)
- RequestStore.begin!
- yield
- ensure
- RequestStore.end!
- RequestStore.clear!
+ with_request_store do
+ yield
+ end
end
end
end
diff --git a/lib/gitlab/sidekiq_middleware/server_metrics.rb b/lib/gitlab/sidekiq_middleware/server_metrics.rb
index fa7f56b8d9c..60618787b24 100644
--- a/lib/gitlab/sidekiq_middleware/server_metrics.rb
+++ b/lib/gitlab/sidekiq_middleware/server_metrics.rb
@@ -45,6 +45,8 @@ module Gitlab
labels[:job_status] = job_succeeded ? "done" : "fail"
@metrics[:sidekiq_jobs_cpu_seconds].observe(labels, job_thread_cputime)
@metrics[:sidekiq_jobs_completion_seconds].observe(labels, monotonic_time)
+ @metrics[:sidekiq_jobs_db_seconds].observe(labels, ActiveRecord::LogSubscriber.runtime / 1000)
+ @metrics[:sidekiq_jobs_gitaly_seconds].observe(labels, get_gitaly_time(job))
end
end
@@ -54,6 +56,8 @@ module Gitlab
{
sidekiq_jobs_cpu_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_cpu_seconds, 'Seconds of cpu time to run Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS),
sidekiq_jobs_completion_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_completion_seconds, 'Seconds to complete Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS),
+ sidekiq_jobs_db_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_db_seconds, 'Seconds of database time to run Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS),
+ sidekiq_jobs_gitaly_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_gitaly_seconds, 'Seconds of Gitaly time to run Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS),
sidekiq_jobs_queue_duration_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_queue_duration_seconds, 'Duration in seconds that a Sidekiq job was queued before being executed', {}, SIDEKIQ_LATENCY_BUCKETS),
sidekiq_jobs_failed_total: ::Gitlab::Metrics.counter(:sidekiq_jobs_failed_total, 'Sidekiq jobs failed'),
sidekiq_jobs_retried_total: ::Gitlab::Metrics.counter(:sidekiq_jobs_retried_total, 'Sidekiq jobs retried'),
@@ -65,6 +69,10 @@ module Gitlab
def get_thread_cputime
defined?(Process::CLOCK_THREAD_CPUTIME_ID) ? Process.clock_gettime(Process::CLOCK_THREAD_CPUTIME_ID) : 0
end
+
+ def get_gitaly_time(job)
+ job.fetch(:gitaly_duration, 0) / 1000.0
+ end
end
end
end
diff --git a/lib/gitlab/sidekiq_queue.rb b/lib/gitlab/sidekiq_queue.rb
new file mode 100644
index 00000000000..807c27a71ff
--- /dev/null
+++ b/lib/gitlab/sidekiq_queue.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+module Gitlab
+ class SidekiqQueue
+ include Gitlab::Utils::StrongMemoize
+
+ NoMetadataError = Class.new(StandardError)
+ InvalidQueueError = Class.new(StandardError)
+
+ attr_reader :queue_name
+
+ def initialize(queue_name)
+ @queue_name = queue_name
+ end
+
+ def drop_jobs!(search_metadata, timeout:)
+ start_time = Gitlab::Metrics::System.monotonic_time
+ completed = true
+ deleted_jobs = 0
+
+ job_search_metadata =
+ search_metadata
+ .stringify_keys
+ .slice(*Labkit::Context::KNOWN_KEYS)
+ .transform_keys { |key| "meta.#{key}" }
+ .compact
+
+ raise NoMetadataError if job_search_metadata.empty?
+ raise InvalidQueueError unless queue
+
+ queue.each do |job|
+ if timeout_exceeded?(start_time, timeout)
+ completed = false
+ break
+ end
+
+ next unless job_matches?(job, job_search_metadata)
+
+ job.delete
+ deleted_jobs += 1
+ end
+
+ {
+ completed: completed,
+ deleted_jobs: deleted_jobs,
+ queue_size: queue.size
+ }
+ end
+
+ private
+
+ def queue
+ strong_memoize(:queue) do
+ # Sidekiq::Queue.new always returns a queue, even if it doesn't
+ # exist.
+ Sidekiq::Queue.all.find { |queue| queue.name == queue_name }
+ end
+ end
+
+ def job_matches?(job, job_search_metadata)
+ job_search_metadata.all? { |key, value| job[key] == value }
+ end
+
+ def timeout_exceeded?(start_time, timeout)
+ (Gitlab::Metrics::System.monotonic_time - start_time) > timeout
+ end
+ end
+end
diff --git a/lib/gitlab/slash_commands/presenters/base.rb b/lib/gitlab/slash_commands/presenters/base.rb
index 54d74ed3998..08de9df14f8 100644
--- a/lib/gitlab/slash_commands/presenters/base.rb
+++ b/lib/gitlab/slash_commands/presenters/base.rb
@@ -63,7 +63,7 @@ module Gitlab
# Convert Markdown to slacks format
def format(string)
- Slack::Notifier::LinkFormatter.format(string)
+ Slack::Messenger::Util::LinkFormatter.format(string)
end
def resource_url
diff --git a/lib/gitlab/template/finders/global_template_finder.rb b/lib/gitlab/template/finders/global_template_finder.rb
index 2dd4b7a4092..3669d652fd3 100644
--- a/lib/gitlab/template/finders/global_template_finder.rb
+++ b/lib/gitlab/template/finders/global_template_finder.rb
@@ -5,9 +5,11 @@ module Gitlab
module Template
module Finders
class GlobalTemplateFinder < BaseTemplateFinder
- def initialize(base_dir, extension, categories = {})
+ def initialize(base_dir, extension, categories = {}, exclusions: [])
@categories = categories
@extension = extension
+ @exclusions = exclusions
+
super(base_dir)
end
@@ -16,6 +18,8 @@ module Gitlab
end
def find(key)
+ return if excluded?(key)
+
file_name = "#{key}#{@extension}"
# The key is untrusted input, so ensure we can't be directed outside
@@ -28,11 +32,20 @@ module Gitlab
def list_files_for(dir)
dir = "#{dir}/" unless dir.end_with?('/')
- Dir.glob(File.join(dir, "*#{@extension}")).select { |f| f =~ self.class.filter_regex(@extension) }
+
+ Dir.glob(File.join(dir, "*#{@extension}")).select do |f|
+ next if excluded?(f)
+
+ f =~ self.class.filter_regex(@extension)
+ end
end
private
+ def excluded?(file_name)
+ @exclusions.include?(file_name)
+ end
+
def select_directory(file_name)
@categories.keys.find do |category|
File.exist?(File.join(category_directory(category), file_name))
diff --git a/lib/gitlab/template/gitlab_ci_yml_template.rb b/lib/gitlab/template/gitlab_ci_yml_template.rb
index ee91f1200cd..26a9dc9fd38 100644
--- a/lib/gitlab/template/gitlab_ci_yml_template.rb
+++ b/lib/gitlab/template/gitlab_ci_yml_template.rb
@@ -17,16 +17,25 @@ module Gitlab
{
'General' => '',
'Pages' => 'Pages',
+ 'Verify' => 'Verify',
'Auto deploy' => 'autodeploy'
}
end
+ def disabled_templates
+ %w[
+ Verify/Browser-Performance
+ ]
+ end
+
def base_dir
Rails.root.join('lib/gitlab/ci/templates')
end
def finder(project = nil)
- Gitlab::Template::Finders::GlobalTemplateFinder.new(self.base_dir, self.extension, self.categories)
+ Gitlab::Template::Finders::GlobalTemplateFinder.new(
+ self.base_dir, self.extension, self.categories, exclusions: self.disabled_templates
+ )
end
end
end
diff --git a/lib/gitlab/testing/clear_thread_memory_cache_middleware.rb b/lib/gitlab/testing/clear_thread_memory_cache_middleware.rb
new file mode 100644
index 00000000000..6f54038ae22
--- /dev/null
+++ b/lib/gitlab/testing/clear_thread_memory_cache_middleware.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Testing
+ class ClearThreadMemoryCacheMiddleware
+ def initialize(app)
+ @app = app
+ end
+
+ def call(env)
+ Gitlab::ThreadMemoryCache.cache_backend.clear
+
+ @app.call(env)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/tracing.rb b/lib/gitlab/tracing.rb
deleted file mode 100644
index 7732d7c9d9c..00000000000
--- a/lib/gitlab/tracing.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Tracing
- # Only enable tracing when the `GITLAB_TRACING` env var is configured. Note that we avoid using ApplicationSettings since
- # the same environment variable needs to be configured for Workhorse, Gitaly and any other components which
- # emit tracing. Since other components may start before Rails, and may not have access to ApplicationSettings,
- # an env var makes more sense.
- def self.enabled?
- connection_string.present?
- end
-
- def self.connection_string
- ENV['GITLAB_TRACING']
- end
-
- def self.tracing_url_template
- ENV['GITLAB_TRACING_URL']
- end
-
- def self.tracing_url_enabled?
- enabled? && tracing_url_template.present?
- end
-
- # This will provide a link into the distributed tracing for the current trace,
- # if it has been captured.
- def self.tracing_url
- return unless tracing_url_enabled?
-
- # Avoid using `format` since it can throw TypeErrors
- # which we want to avoid on unsanitised env var input
- tracing_url_template.to_s
- .gsub(/\{\{\s*correlation_id\s*\}\}/, Labkit::Correlation::CorrelationId.current_id.to_s)
- .gsub(/\{\{\s*service\s*\}\}/, Gitlab.process_name)
- end
- end
-end
diff --git a/lib/gitlab/uploads/migration_helper.rb b/lib/gitlab/uploads/migration_helper.rb
index 4ff064007f1..96ee6f0e8e6 100644
--- a/lib/gitlab/uploads/migration_helper.rb
+++ b/lib/gitlab/uploads/migration_helper.rb
@@ -21,6 +21,10 @@ module Gitlab
prepare_variables(args, logger)
end
+ def self.categories
+ CATEGORIES
+ end
+
def migrate_to_remote_storage
@to_store = ObjectStorage::Store::REMOTE
@@ -70,3 +74,5 @@ module Gitlab
end
end
end
+
+Gitlab::Uploads::MigrationHelper.prepend_if_ee('EE::Gitlab::Uploads::MigrationHelper')
diff --git a/lib/gitlab/url_blocker.rb b/lib/gitlab/url_blocker.rb
index 0adca34440c..88094839062 100644
--- a/lib/gitlab/url_blocker.rb
+++ b/lib/gitlab/url_blocker.rb
@@ -49,7 +49,7 @@ module Gitlab
return [uri, nil] unless address_info
ip_address = ip_address(address_info)
- return [uri, nil] if domain_whitelisted?(uri) || ip_whitelisted?(ip_address)
+ return [uri, nil] if domain_whitelisted?(uri) || ip_whitelisted?(ip_address, port: get_port(uri))
protected_uri_with_hostname = enforce_uri_hostname(ip_address, uri, dns_rebind_protection)
@@ -254,11 +254,11 @@ module Gitlab
end
def domain_whitelisted?(uri)
- Gitlab::UrlBlockers::UrlWhitelist.domain_whitelisted?(uri.normalized_host)
+ Gitlab::UrlBlockers::UrlWhitelist.domain_whitelisted?(uri.normalized_host, port: get_port(uri))
end
- def ip_whitelisted?(ip_address)
- Gitlab::UrlBlockers::UrlWhitelist.ip_whitelisted?(ip_address)
+ def ip_whitelisted?(ip_address, port: nil)
+ Gitlab::UrlBlockers::UrlWhitelist.ip_whitelisted?(ip_address, port: port)
end
def config
diff --git a/lib/gitlab/url_blockers/domain_whitelist_entry.rb b/lib/gitlab/url_blockers/domain_whitelist_entry.rb
new file mode 100644
index 00000000000..b94e8ee3f69
--- /dev/null
+++ b/lib/gitlab/url_blockers/domain_whitelist_entry.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module UrlBlockers
+ class DomainWhitelistEntry
+ attr_reader :domain, :port
+
+ def initialize(domain, port: nil)
+ @domain = domain
+ @port = port
+ end
+
+ def match?(requested_domain, requested_port = nil)
+ return false unless domain == requested_domain
+ return true if port.nil?
+
+ port == requested_port
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/url_blockers/ip_whitelist_entry.rb b/lib/gitlab/url_blockers/ip_whitelist_entry.rb
new file mode 100644
index 00000000000..88c76574d3d
--- /dev/null
+++ b/lib/gitlab/url_blockers/ip_whitelist_entry.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module UrlBlockers
+ class IpWhitelistEntry
+ attr_reader :ip, :port
+
+ # Argument ip should be an IPAddr object
+ def initialize(ip, port: nil)
+ @ip = ip
+ @port = port
+ end
+
+ def match?(requested_ip, requested_port = nil)
+ return false unless ip.include?(requested_ip)
+ return true if port.nil?
+
+ port == requested_port
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/url_blockers/url_whitelist.rb b/lib/gitlab/url_blockers/url_whitelist.rb
index 7622de4fdbe..59f74dde7fc 100644
--- a/lib/gitlab/url_blockers/url_whitelist.rb
+++ b/lib/gitlab/url_blockers/url_whitelist.rb
@@ -4,21 +4,25 @@ module Gitlab
module UrlBlockers
class UrlWhitelist
class << self
- def ip_whitelisted?(ip_string)
+ def ip_whitelisted?(ip_string, port: nil)
return false if ip_string.blank?
ip_whitelist, _ = outbound_local_requests_whitelist_arrays
ip_obj = Gitlab::Utils.string_to_ip_object(ip_string)
- ip_whitelist.any? { |ip| ip.include?(ip_obj) }
+ ip_whitelist.any? do |ip_whitelist_entry|
+ ip_whitelist_entry.match?(ip_obj, port)
+ end
end
- def domain_whitelisted?(domain_string)
+ def domain_whitelisted?(domain_string, port: nil)
return false if domain_string.blank?
_, domain_whitelist = outbound_local_requests_whitelist_arrays
- domain_whitelist.include?(domain_string)
+ domain_whitelist.any? do |domain_whitelist_entry|
+ domain_whitelist_entry.match?(domain_string, port)
+ end
end
private
diff --git a/lib/gitlab/url_builder.rb b/lib/gitlab/url_builder.rb
index 4bedf7a301e..cc53e3b7577 100644
--- a/lib/gitlab/url_builder.rb
+++ b/lib/gitlab/url_builder.rb
@@ -13,7 +13,8 @@ module Gitlab
end
def url
- case object
+ # Objects are sometimes wrapped in a BatchLoader instance
+ case object.itself
when Commit
commit_url
when Issue
@@ -33,7 +34,7 @@ module Gitlab
when User
user_url(object)
else
- raise NotImplementedError.new("No URL builder defined for #{object.class}")
+ raise NotImplementedError.new("No URL builder defined for #{object.inspect}")
end
end
diff --git a/lib/gitlab/usage_counters/common.rb b/lib/gitlab/usage_counters/common.rb
new file mode 100644
index 00000000000..a5bdac430f4
--- /dev/null
+++ b/lib/gitlab/usage_counters/common.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module UsageCounters
+ class Common
+ class << self
+ def increment(project_id)
+ Gitlab::Redis::SharedState.with { |redis| redis.hincrby(base_key, project_id, 1) }
+ end
+
+ def usage_totals
+ Gitlab::Redis::SharedState.with do |redis|
+ total_sum = 0
+
+ totals = redis.hgetall(base_key).each_with_object({}) do |(project_id, count), result|
+ total_sum += result[project_id.to_i] = count.to_i
+ end
+
+ totals[:total] = total_sum
+ totals
+ end
+ end
+
+ def base_key
+ raise NotImplementedError
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage_counters/pod_logs.rb b/lib/gitlab/usage_counters/pod_logs.rb
new file mode 100644
index 00000000000..94e29d2fad7
--- /dev/null
+++ b/lib/gitlab/usage_counters/pod_logs.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module UsageCounters
+ class PodLogs < Common
+ def self.base_key
+ 'POD_LOGS_USAGE_COUNTS'
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage_data.rb b/lib/gitlab/usage_data.rb
index 6e29a3e4cc4..b9cd4d74914 100644
--- a/lib/gitlab/usage_data.rb
+++ b/lib/gitlab/usage_data.rb
@@ -2,7 +2,6 @@
module Gitlab
class UsageData
- APPROXIMATE_COUNT_MODELS = [Label, MergeRequest, Note, Todo].freeze
BATCH_SIZE = 100
class << self
@@ -67,8 +66,8 @@ module Gitlab
clusters_disabled: count(::Clusters::Cluster.disabled),
project_clusters_disabled: count(::Clusters::Cluster.disabled.project_type),
group_clusters_disabled: count(::Clusters::Cluster.disabled.group_type),
- clusters_platforms_eks: count(::Clusters::Cluster.aws_installed.enabled, batch: false),
- clusters_platforms_gke: count(::Clusters::Cluster.gcp_installed.enabled, batch: false),
+ clusters_platforms_eks: count(::Clusters::Cluster.aws_installed.enabled),
+ clusters_platforms_gke: count(::Clusters::Cluster.gcp_installed.enabled),
clusters_platforms_user: count(::Clusters::Cluster.user_provided.enabled),
clusters_applications_helm: count(::Clusters::Applications::Helm.available),
clusters_applications_ingress: count(::Clusters::Applications::Ingress.available),
@@ -85,7 +84,7 @@ module Gitlab
issues: count(Issue),
issues_created_from_gitlab_error_tracking_ui: count(SentryIssue),
issues_with_associated_zoom_link: count(ZoomMeeting.added_to_issue),
- issues_using_zoom_quick_actions: count(ZoomMeeting.select(:issue_id).distinct, batch: false),
+ issues_using_zoom_quick_actions: distinct_count(ZoomMeeting, :issue_id),
issues_with_embedded_grafana_charts_approx: ::Gitlab::GrafanaEmbedUsageData.issue_count,
incident_issues: count(::Issue.authored(::User.alert_bot)),
keys: count(Key),
@@ -107,10 +106,12 @@ module Gitlab
suggestions: count(Suggestion),
todos: count(Todo),
uploads: count(Upload),
- web_hooks: count(WebHook)
+ web_hooks: count(WebHook),
+ labels: count(Label),
+ merge_requests: count(MergeRequest),
+ notes: count(Note)
}.merge(
services_usage,
- approximate_counts,
usage_counters,
user_preferences_usage,
ingress_modsecurity_usage
@@ -122,6 +123,8 @@ module Gitlab
def cycle_analytics_usage_data
Gitlab::CycleAnalytics::UsageData.new.to_json
+ rescue ActiveRecord::StatementInvalid
+ { avg_cycle_analytics: {} }
end
def features_usage_data
@@ -181,10 +184,8 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord
def services_usage
- service_counts = count(Service.active.where(template: false).where.not(type: 'JiraService').group(:type), fallback: Hash.new(-1), batch: false)
-
- results = Service.available_services_names.each_with_object({}) do |service_name, response|
- response["projects_#{service_name}_active".to_sym] = service_counts["#{service_name}_service".camelize] || 0
+ results = Service.available_services_names.without('jira').each_with_object({}) do |service_name, response|
+ response["projects_#{service_name}_active".to_sym] = count(Service.active.where(template: false, type: "#{service_name}_service".camelize))
end
# Keep old Slack keys for backward compatibility, https://gitlab.com/gitlab-data/analytics/issues/3241
@@ -232,7 +233,7 @@ module Gitlab
end
def count(relation, column = nil, fallback: -1, batch: true)
- if batch && Feature.enabled?(:usage_ping_batch_counter)
+ if batch && Feature.enabled?(:usage_ping_batch_counter, default_enabled: true)
Gitlab::Database::BatchCount.batch_count(relation, column)
else
relation.count
@@ -242,7 +243,7 @@ module Gitlab
end
def distinct_count(relation, column = nil, fallback: -1, batch: true)
- if batch && Feature.enabled?(:usage_ping_batch_counter)
+ if batch && Feature.enabled?(:usage_ping_batch_counter, default_enabled: true)
Gitlab::Database::BatchCount.batch_distinct_count(relation, column)
else
relation.distinct_count_by(column)
@@ -251,16 +252,6 @@ module Gitlab
fallback
end
- def approximate_counts
- approx_counts = Gitlab::Database::Count.approximate_counts(APPROXIMATE_COUNT_MODELS)
-
- APPROXIMATE_COUNT_MODELS.each_with_object({}) do |model, result|
- key = model.name.underscore.pluralize.to_sym
-
- result[key] = approx_counts[model] || -1
- end
- end
-
def installation_type
if Rails.env.production?
Gitlab::INSTALLATION_TYPE
diff --git a/lib/gitlab/user_access.rb b/lib/gitlab/user_access.rb
index a00e72f7aad..5e0a4faeba8 100644
--- a/lib/gitlab/user_access.rb
+++ b/lib/gitlab/user_access.rb
@@ -33,7 +33,7 @@ module Gitlab
return false unless can_access_git?
if user.requires_ldap_check? && user.try_obtain_ldap_lease
- return false unless Gitlab::Auth::LDAP::Access.allowed?(user)
+ return false unless Gitlab::Auth::Ldap::Access.allowed?(user)
end
true
@@ -104,7 +104,7 @@ module Gitlab
@permission_cache ||= {}
end
- def can_access_git?
+ request_cache def can_access_git?
user && user.can?(:access_git)
end
diff --git a/lib/gitlab/user_access_snippet.rb b/lib/gitlab/user_access_snippet.rb
new file mode 100644
index 00000000000..bfed86c4df4
--- /dev/null
+++ b/lib/gitlab/user_access_snippet.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Gitlab
+ class UserAccessSnippet < UserAccess
+ extend ::Gitlab::Cache::RequestCache
+ # TODO: apply override check https://gitlab.com/gitlab-org/gitlab/issues/205677
+
+ request_cache_key do
+ [user&.id, snippet&.id]
+ end
+
+ attr_reader :snippet
+
+ def initialize(user, snippet: nil)
+ @user = user
+ @snippet = snippet
+ @project = snippet&.project
+ end
+
+ def can_do_action?(action)
+ return false unless can_access_git?
+
+ permission_cache[action] =
+ permission_cache.fetch(action) do
+ Ability.allowed?(user, action, snippet)
+ end
+ end
+
+ def can_create_tag?(ref)
+ false
+ end
+
+ def can_delete_branch?(ref)
+ false
+ end
+
+ def can_push_to_branch?(ref)
+ super
+ return false unless snippet
+ return false unless can_do_action?(:update_snippet)
+
+ true
+ end
+
+ def can_merge_to_branch?(ref)
+ false
+ end
+ end
+end
diff --git a/lib/gitlab/utils.rb b/lib/gitlab/utils.rb
index 3c567fad68d..ad6b213bb50 100644
--- a/lib/gitlab/utils.rb
+++ b/lib/gitlab/utils.rb
@@ -5,10 +5,20 @@ module Gitlab
extend self
# Ensure that the relative path will not traverse outside the base directory
- def check_path_traversal!(path)
- raise StandardError.new("Invalid path") if path.start_with?("..#{File::SEPARATOR}") ||
+ # We url decode the path to avoid passing invalid paths forward in url encoded format.
+ # We are ok to pass some double encoded paths to File.open since they won't resolve.
+ # Also see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24223#note_284122580
+ # It also checks for ALT_SEPARATOR aka '\' (forward slash)
+ def check_path_traversal!(path, allowed_absolute: false)
+ path = CGI.unescape(path)
+
+ if path.start_with?("..#{File::SEPARATOR}", "..#{File::ALT_SEPARATOR}") ||
path.include?("#{File::SEPARATOR}..#{File::SEPARATOR}") ||
- path.end_with?("#{File::SEPARATOR}..")
+ path.end_with?("#{File::SEPARATOR}..") ||
+ (!allowed_absolute && Pathname.new(path).absolute?)
+
+ raise StandardError.new("Invalid path")
+ end
path
end
diff --git a/lib/gitlab/utils/json_size_estimator.rb b/lib/gitlab/utils/json_size_estimator.rb
new file mode 100644
index 00000000000..9f8ea3e61f9
--- /dev/null
+++ b/lib/gitlab/utils/json_size_estimator.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Utils
+ # This class estimates the JSON blob byte size of a ruby object using as
+ # little allocations as possible.
+ # The estimation should be quite accurate when using simple objects.
+ #
+ # Example:
+ #
+ # Gitlab::Utils::JsonSizeEstimator.estimate(["a", { b: 12, c: nil }])
+ class JsonSizeEstimator
+ ARRAY_BRACKETS_SIZE = 2 # []
+ OBJECT_BRACKETS_SIZE = 2 # {}
+ DOUBLEQUOTE_SIZE = 2 # ""
+ COLON_SIZE = 1 # : character size from {"a": 1}
+ MINUS_SIGN_SIZE = 1 # - character size from -1
+ NULL_SIZE = 4 # null
+
+ class << self
+ # Returns: integer (number of bytes)
+ def estimate(object)
+ case object
+ when Hash
+ estimate_hash(object)
+ when Array
+ estimate_array(object)
+ when String
+ estimate_string(object)
+ when Integer
+ estimate_integer(object)
+ when Float
+ estimate_float(object)
+ when DateTime, Time
+ estimate_time(object)
+ when NilClass
+ NULL_SIZE
+ else
+ # might be incorrect, but #to_s is safe, #to_json might be disabled for some objects: User
+ estimate_string(object.to_s)
+ end
+ end
+
+ private
+
+ def estimate_hash(hash)
+ size = 0
+ item_count = 0
+
+ hash.each do |key, value|
+ item_count += 1
+
+ size += estimate(key.to_s) + COLON_SIZE + estimate(value)
+ end
+
+ size + OBJECT_BRACKETS_SIZE + comma_count(item_count)
+ end
+
+ def estimate_array(array)
+ size = 0
+ item_count = 0
+
+ array.each do |item|
+ item_count += 1
+
+ size += estimate(item)
+ end
+
+ size + ARRAY_BRACKETS_SIZE + comma_count(item_count)
+ end
+
+ def estimate_string(string)
+ string.bytesize + DOUBLEQUOTE_SIZE
+ end
+
+ def estimate_float(float)
+ float.to_s.bytesize
+ end
+
+ def estimate_integer(integer)
+ if integer > 0
+ integer_string_size(integer)
+ elsif integer < 0
+ integer_string_size(integer.abs) + MINUS_SIGN_SIZE
+ else # 0
+ 1
+ end
+ end
+
+ def estimate_time(time)
+ time.to_json.size
+ end
+
+ def integer_string_size(integer)
+ Math.log10(integer).floor + 1
+ end
+
+ def comma_count(item_count)
+ item_count == 0 ? 0 : item_count - 1
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/utils/log_limited_array.rb b/lib/gitlab/utils/log_limited_array.rb
index fe8aadf9020..e0589c3df4c 100644
--- a/lib/gitlab/utils/log_limited_array.rb
+++ b/lib/gitlab/utils/log_limited_array.rb
@@ -6,19 +6,19 @@ module Gitlab
MAXIMUM_ARRAY_LENGTH = 10.kilobytes
# Prepare an array for logging by limiting its JSON representation
- # to around 10 kilobytes. Once we hit the limit, add "..." as the
- # last item in the returned array.
- def self.log_limited_array(array)
+ # to around 10 kilobytes. Once we hit the limit, add the sentinel
+ # value as the last item in the returned array.
+ def self.log_limited_array(array, sentinel: '...')
return [] unless array.is_a?(Array)
total_length = 0
limited_array = array.take_while do |arg|
- total_length += arg.to_json.length
+ total_length += JsonSizeEstimator.estimate(arg)
total_length <= MAXIMUM_ARRAY_LENGTH
end
- limited_array.push('...') if total_length > MAXIMUM_ARRAY_LENGTH
+ limited_array.push(sentinel) if total_length > MAXIMUM_ARRAY_LENGTH
limited_array
end
diff --git a/lib/gitlab/utils/measuring.rb b/lib/gitlab/utils/measuring.rb
new file mode 100644
index 00000000000..c9e6cb9c039
--- /dev/null
+++ b/lib/gitlab/utils/measuring.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'prometheus/pid_provider'
+
+module Gitlab
+ module Utils
+ class Measuring
+ def initialize(logger: Logger.new($stdout))
+ @logger = logger
+ end
+
+ def with_measuring
+ logger.info "Measuring enabled..."
+ with_gc_counter do
+ with_count_queries do
+ with_measure_time do
+ yield
+ end
+ end
+ end
+
+ logger.info "Memory usage: #{Gitlab::Metrics::System.memory_usage.to_f / 1024 / 1024} MiB"
+ logger.info "Label: #{::Prometheus::PidProvider.worker_id}"
+ end
+
+ private
+
+ attr_reader :logger
+
+ def with_count_queries(&block)
+ count = 0
+
+ counter_f = ->(_name, _started, _finished, _unique_id, payload) {
+ count += 1 unless payload[:name].in? %w[CACHE SCHEMA]
+ }
+
+ ActiveSupport::Notifications.subscribed(counter_f, "sql.active_record", &block)
+
+ logger.info "Number of sql calls: #{count}"
+ end
+
+ def with_gc_counter
+ gc_counts_before = GC.stat.select { |k, _v| k =~ /count/ }
+ yield
+ gc_counts_after = GC.stat.select { |k, _v| k =~ /count/ }
+ stats = gc_counts_before.merge(gc_counts_after) { |_k, vb, va| va - vb }
+
+ logger.info "Total GC count: #{stats[:count]}"
+ logger.info "Minor GC count: #{stats[:minor_gc_count]}"
+ logger.info "Major GC count: #{stats[:major_gc_count]}"
+ end
+
+ def with_measure_time
+ timing = Benchmark.realtime do
+ yield
+ end
+
+ logger.info "Time to finish: #{duration_in_numbers(timing)}"
+ end
+
+ def duration_in_numbers(duration_in_seconds)
+ milliseconds = duration_in_seconds.in_milliseconds % 1.second.in_milliseconds
+ seconds = duration_in_seconds % 1.minute
+ minutes = (duration_in_seconds / 1.minute) % (1.hour / 1.minute)
+ hours = duration_in_seconds / 1.hour
+
+ if hours == 0
+ "%02d:%02d:%03d" % [minutes, seconds, milliseconds]
+ else
+ "%02d:%02d:%02d:%03d" % [hours, minutes, seconds, milliseconds]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/with_request_store.rb b/lib/gitlab/with_request_store.rb
new file mode 100644
index 00000000000..d6c05e1e256
--- /dev/null
+++ b/lib/gitlab/with_request_store.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module WithRequestStore
+ def with_request_store
+ RequestStore.begin!
+ yield
+ ensure
+ RequestStore.end!
+ RequestStore.clear!
+ end
+ end
+end
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index 8696e23cbc7..7da20b49d9d 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -24,7 +24,7 @@ module Gitlab
attrs = {
GL_ID: Gitlab::GlId.gl_id(user),
- GL_REPOSITORY: repo_type.identifier_for_container(repository.project),
+ GL_REPOSITORY: repo_type.identifier_for_container(repository.container),
GL_USERNAME: user&.username,
ShowAllRefs: show_all_refs,
Repository: repository.gitaly_repository.to_h,
diff --git a/lib/gitlab/x509/commit.rb b/lib/gitlab/x509/commit.rb
index b1d15047981..4b35c0ef7d2 100644
--- a/lib/gitlab/x509/commit.rb
+++ b/lib/gitlab/x509/commit.rb
@@ -184,11 +184,13 @@ module Gitlab
commit_sha: @commit.sha,
project: @commit.project,
x509_certificate_id: certificate.id,
- verification_status: verification_status
+ verification_status: verification_status(certificate)
}
end
- def verification_status
+ def verification_status(certificate)
+ return :unverified if certificate.revoked?
+
if verified_signature && certificate_email == @commit.committer_email
:verified
else
diff --git a/lib/gitlab_danger.rb b/lib/gitlab_danger.rb
index e776e2b7ea3..ee0951f18ca 100644
--- a/lib/gitlab_danger.rb
+++ b/lib/gitlab_danger.rb
@@ -3,14 +3,15 @@
class GitlabDanger
LOCAL_RULES ||= %w[
changes_size
- gemfile
documentation
frozen_string
duplicate_yarn_dependencies
prettier
eslint
+ karma
database
commit_messages
+ telemetry
].freeze
CI_ONLY_RULES ||= %w[
diff --git a/lib/grafana/time_window.rb b/lib/grafana/time_window.rb
new file mode 100644
index 00000000000..111e3ab7de2
--- /dev/null
+++ b/lib/grafana/time_window.rb
@@ -0,0 +1,130 @@
+# frozen_string_literal: true
+
+module Grafana
+ # Allows for easy formatting and manipulations of timestamps
+ # coming from a Grafana url
+ class TimeWindow
+ include ::Gitlab::Utils::StrongMemoize
+
+ def initialize(from, to)
+ @from = from
+ @to = to
+ end
+
+ def formatted
+ {
+ start: window[:from].formatted,
+ end: window[:to].formatted
+ }
+ end
+
+ def in_milliseconds
+ window.transform_values(&:to_ms)
+ end
+
+ private
+
+ def window
+ strong_memoize(:window) do
+ specified_window
+ rescue Timestamp::Error
+ default_window
+ end
+ end
+
+ def specified_window
+ RangeWithDefaults.new(
+ from: Timestamp.from_ms_since_epoch(@from),
+ to: Timestamp.from_ms_since_epoch(@to)
+ ).to_hash
+ end
+
+ def default_window
+ RangeWithDefaults.new.to_hash
+ end
+ end
+
+ # For incomplete time ranges, adds default parameters to
+ # achieve a complete range. If both full range is provided,
+ # range will be returned.
+ class RangeWithDefaults
+ DEFAULT_RANGE = 8.hours
+
+ # @param from [Grafana::Timestamp, nil] Start of the expected range
+ # @param to [Grafana::Timestamp, nil] End of the expected range
+ def initialize(from: nil, to: nil)
+ @from = from
+ @to = to
+
+ apply_defaults!
+ end
+
+ def to_hash
+ { from: @from, to: @to }.compact
+ end
+
+ private
+
+ def apply_defaults!
+ @to ||= @from ? relative_end : Timestamp.new(Time.now)
+ @from ||= relative_start
+ end
+
+ def relative_start
+ Timestamp.new(DEFAULT_RANGE.before(@to.time))
+ end
+
+ def relative_end
+ Timestamp.new(DEFAULT_RANGE.since(@from.time))
+ end
+ end
+
+ # Offers a consistent API for timestamps originating from
+ # Grafana or other sources, allowing for formatting of timestamps
+ # as consumed by Grafana-related utilities
+ class Timestamp
+ Error = Class.new(StandardError)
+
+ attr_accessor :time
+
+ # @param timestamp [Time]
+ def initialize(time)
+ @time = time
+ end
+
+ # Formats a timestamp from Grafana for compatibility with
+ # parsing in JS via `new Date(timestamp)`
+ def formatted
+ time.utc.strftime('%FT%TZ')
+ end
+
+ # Converts to milliseconds since epoch
+ def to_ms
+ time.to_i * 1000
+ end
+
+ class << self
+ # @param time [String] Representing milliseconds since epoch.
+ # This is what JS "decided" unix is.
+ def from_ms_since_epoch(time)
+ return if time.nil?
+
+ raise Error.new('Expected milliseconds since epoch') unless ms_since_epoch?(time)
+
+ new(cast_ms_to_time(time))
+ end
+
+ private
+
+ def cast_ms_to_time(time)
+ Time.at(time.to_i / 1000.0)
+ end
+
+ def ms_since_epoch?(time)
+ ms = time.to_i
+
+ ms.to_s == time && ms.bit_length < 64
+ end
+ end
+ end
+end
diff --git a/lib/grafana/validator.rb b/lib/grafana/validator.rb
new file mode 100644
index 00000000000..760263f7ec9
--- /dev/null
+++ b/lib/grafana/validator.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+# Performs checks on whether resources from Grafana can be handled
+# We have certain restrictions on which formats we accept.
+# Some are technical requirements, others are simplifications.
+module Grafana
+ class Validator
+ Error = Class.new(StandardError)
+
+ attr_reader :grafana_dashboard, :datasource, :panel, :query_params
+
+ UNSUPPORTED_GRAFANA_GLOBAL_VARS = %w(
+ $__interval_ms
+ $__timeFilter
+ $__name
+ $timeFilter
+ $interval
+ ).freeze
+
+ def initialize(grafana_dashboard, datasource, panel, query_params)
+ @grafana_dashboard = grafana_dashboard
+ @datasource = datasource
+ @panel = panel
+ @query_params = query_params
+ end
+
+ def validate!
+ validate_query_params!
+ validate_panel_type!
+ validate_variable_definitions!
+ validate_global_variables!
+ validate_datasource! if datasource
+ end
+
+ def valid?
+ validate!
+
+ true
+ rescue ::Grafana::Validator::Error
+ false
+ end
+
+ private
+
+ # See defaults in Banzai::Filter::InlineGrafanaMetricsFilter.
+ def validate_query_params!
+ return if [:from, :to].all? { |param| query_params.include?(param) }
+
+ raise_error 'Grafana query parameters must include from and to.'
+ end
+
+ # We may choose to support other panel types in future.
+ def validate_panel_type!
+ return if panel && panel[:type] == 'graph' && panel[:lines]
+
+ raise_error 'Panel type must be a line graph.'
+ end
+
+ # We must require variable definitions to create valid prometheus queries.
+ def validate_variable_definitions!
+ return unless grafana_dashboard[:dashboard][:templating]
+
+ return if grafana_dashboard[:dashboard][:templating][:list].all? do |variable|
+ query_params[:"var-#{variable[:name]}"].present?
+ end
+
+ raise_error 'All Grafana variables must be defined in the query parameters.'
+ end
+
+ # We may choose to support further Grafana variables in future.
+ def validate_global_variables!
+ return unless panel_contains_unsupported_vars?
+
+ raise_error "Prometheus must not include #{UNSUPPORTED_GRAFANA_GLOBAL_VARS}"
+ end
+
+ # We may choose to support additional datasources in future.
+ def validate_datasource!
+ return if datasource[:access] == 'proxy' && datasource[:type] == 'prometheus'
+
+ raise_error 'Only Prometheus datasources with proxy access in Grafana are supported.'
+ end
+
+ def panel_contains_unsupported_vars?
+ panel[:targets].any? do |target|
+ UNSUPPORTED_GRAFANA_GLOBAL_VARS.any? do |variable|
+ target[:expr].include?(variable)
+ end
+ end
+ end
+
+ def raise_error(message)
+ raise Validator::Error, message
+ end
+ end
+end
diff --git a/lib/omni_auth/strategies/saml.rb b/lib/omni_auth/strategies/saml.rb
deleted file mode 100644
index ebe062f17e0..00000000000
--- a/lib/omni_auth/strategies/saml.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-module OmniAuth
- module Strategies
- class SAML
- extend ::Gitlab::Utils::Override
-
- # NOTE: This method duplicates code from omniauth-saml
- # so that we can access authn_request to store it
- # See: https://github.com/omniauth/omniauth-saml/issues/172
- override :request_phase
- def request_phase
- authn_request = OneLogin::RubySaml::Authrequest.new
-
- store_authn_request_id(authn_request)
-
- with_settings do |settings|
- redirect(authn_request.create(settings, additional_params_for_authn_request))
- end
- end
-
- private
-
- def store_authn_request_id(authn_request)
- Gitlab::Auth::Saml::OriginValidator.new(session).store_origin(authn_request)
- end
- end
- end
-end
diff --git a/lib/quality/kubernetes_client.rb b/lib/quality/kubernetes_client.rb
index 453b9d21adb..f83652e117f 100644
--- a/lib/quality/kubernetes_client.rb
+++ b/lib/quality/kubernetes_client.rb
@@ -48,7 +48,8 @@ module Quality
resource_names = raw_resource_names
command = [
'delete',
- %(--namespace "#{namespace}")
+ %(--namespace "#{namespace}"),
+ '--ignore-not-found'
]
Array(release_name).each do |release|
diff --git a/lib/quality/test_level.rb b/lib/quality/test_level.rb
index 85e89059dbb..bbd8b4dcc3f 100644
--- a/lib/quality/test_level.rb
+++ b/lib/quality/test_level.rb
@@ -7,7 +7,10 @@ module Quality
TEST_LEVEL_FOLDERS = {
migration: %w[
migrations
+ ],
+ background_migration: %w[
lib/gitlab/background_migration
+ lib/ee/gitlab/background_migration
],
unit: %w[
bin
@@ -69,7 +72,7 @@ module Quality
case file_path
# Detect migration first since some background migration tests are under
# spec/lib/gitlab/background_migration and tests under spec/lib are unit by default
- when regexp(:migration)
+ when regexp(:migration), regexp(:background_migration)
:migration
when regexp(:unit)
:unit
@@ -82,6 +85,10 @@ module Quality
end
end
+ def background_migration?(file_path)
+ !!(file_path =~ regexp(:background_migration))
+ end
+
private
def folders_pattern(level)
diff --git a/lib/sentry/client/issue.rb b/lib/sentry/client/issue.rb
index 1c5d88e8862..986311ab62a 100644
--- a/lib/sentry/client/issue.rb
+++ b/lib/sentry/client/issue.rb
@@ -75,7 +75,21 @@ module Sentry
http_get(api_urls.issue_url(issue_id))[:body]
end
- def parse_gitlab_issue(plugin_issues)
+ def parse_gitlab_issue(issue)
+ parse_issue_annotations(issue) || parse_plugin_issue(issue)
+ end
+
+ def parse_issue_annotations(issue)
+ issue
+ .fetch('annotations', [])
+ .reject(&:blank?)
+ .map { |annotation| Nokogiri.make(annotation) }
+ .find { |html| html['href']&.starts_with?(Gitlab.config.gitlab.url) }
+ .try(:[], 'href')
+ end
+
+ def parse_plugin_issue(issue)
+ plugin_issues = issue.fetch('pluginIssues', nil)
return unless plugin_issues
gitlab_plugin = plugin_issues.detect { |item| item['id'] == 'gitlab' }
@@ -145,7 +159,7 @@ module Sentry
short_id: issue.fetch('shortId', nil),
status: issue.fetch('status', nil),
frequency: issue.dig('stats', '24h'),
- gitlab_issue: parse_gitlab_issue(issue.fetch('pluginIssues', nil)),
+ gitlab_issue: parse_gitlab_issue(issue),
project_id: issue.dig('project', 'id'),
project_name: issue.dig('project', 'name'),
project_slug: issue.dig('project', 'slug'),
diff --git a/lib/support/init.d/gitlab b/lib/support/init.d/gitlab
index 1c51288adf6..982c1dc8866 100755
--- a/lib/support/init.d/gitlab
+++ b/lib/support/init.d/gitlab
@@ -26,7 +26,7 @@
### Environment variables
RAILS_ENV="production"
-EXPERIMENTAL_PUMA=""
+USE_UNICORN=""
# Script variable names should be lower-case not to conflict with
# internal /bin/sh variables such as PATH, EDITOR or SHELL.
@@ -68,10 +68,10 @@ if ! cd "$app_root" ; then
fi
# Select the web server to use
-if [ -z "$EXPERIMENTAL_PUMA" ]; then
- use_web_server="unicorn"
-else
+if [ -z "$USE_UNICORN" ]; then
use_web_server="puma"
+else
+ use_web_server="unicorn"
fi
diff --git a/lib/support/init.d/gitlab.default.example b/lib/support/init.d/gitlab.default.example
index ab41dba3017..bb271b16836 100644
--- a/lib/support/init.d/gitlab.default.example
+++ b/lib/support/init.d/gitlab.default.example
@@ -5,8 +5,8 @@
# Normal values are "production", "test" and "development".
RAILS_ENV="production"
-# Uncomment the line below to enable Puma web server instead of Unicorn.
-# EXPERIMENTAL_PUMA=1
+# Uncomment the line below to enable the Unicorn web server instead of Puma.
+# USE_UNICORN=1
# app_user defines the user that GitLab is run as.
# The default is "git".
diff --git a/lib/system_check/gitlab_shell_check.rb b/lib/system_check/gitlab_shell_check.rb
index 31c4ec33247..f539719ce87 100644
--- a/lib/system_check/gitlab_shell_check.rb
+++ b/lib/system_check/gitlab_shell_check.rb
@@ -50,7 +50,7 @@ module SystemCheck
end
def gitlab_shell_version
- Gitlab::Shell.new.version
+ Gitlab::Shell.version
end
end
end
diff --git a/lib/system_check/ldap_check.rb b/lib/system_check/ldap_check.rb
index 938026424ed..3d71edbc256 100644
--- a/lib/system_check/ldap_check.rb
+++ b/lib/system_check/ldap_check.rb
@@ -6,7 +6,7 @@ module SystemCheck
set_name 'LDAP:'
def multi_check
- if Gitlab::Auth::LDAP::Config.enabled?
+ if Gitlab::Auth::Ldap::Config.enabled?
# Only show up to 100 results because LDAP directories can be very big.
# This setting only affects the `rake gitlab:check` script.
limit = ENV['LDAP_CHECK_LIMIT']
@@ -21,13 +21,13 @@ module SystemCheck
private
def check_ldap(limit)
- servers = Gitlab::Auth::LDAP::Config.providers
+ servers = Gitlab::Auth::Ldap::Config.providers
servers.each do |server|
$stdout.puts "Server: #{server}"
begin
- Gitlab::Auth::LDAP::Adapter.open(server) do |adapter|
+ Gitlab::Auth::Ldap::Adapter.open(server) do |adapter|
check_ldap_auth(adapter)
$stdout.puts "LDAP users with access to your GitLab server (only showing the first #{limit} results)"
diff --git a/lib/tasks/cleanup.rake b/lib/tasks/cleanup.rake
new file mode 100644
index 00000000000..8574f26dbdc
--- /dev/null
+++ b/lib/tasks/cleanup.rake
@@ -0,0 +1,33 @@
+namespace :gitlab do
+ namespace :cleanup do
+ desc "GitLab | Cleanup | Delete moved repositories"
+ task moved: :gitlab_environment do
+ warn_user_is_not_gitlab
+ remove_flag = ENV['REMOVE']
+
+ Gitlab.config.repositories.storages.each do |name, repository_storage|
+ repo_root = repository_storage.legacy_disk_path.chomp('/')
+ # Look for global repos (legacy, depth 1) and normal repos (depth 2)
+ IO.popen(%W(find #{repo_root} -mindepth 1 -maxdepth 2 -name *+moved*.git)) do |find|
+ find.each_line do |path|
+ path.chomp!
+
+ if remove_flag
+ if FileUtils.rm_rf(path)
+ puts "Removed...#{path}".color(:green)
+ else
+ puts "Cannot remove #{path}".color(:red)
+ end
+ else
+ puts "Can be removed: #{path}".color(:green)
+ end
+ end
+ end
+ end
+
+ unless remove_flag
+ puts "To cleanup these repositories run this command with REMOVE=true".color(:yellow)
+ end
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/backup.rake b/lib/tasks/gitlab/backup.rake
index 8f34101ea15..e5e2faaa7df 100644
--- a/lib/tasks/gitlab/backup.rake
+++ b/lib/tasks/gitlab/backup.rake
@@ -17,9 +17,16 @@ namespace :gitlab do
Rake::Task['gitlab:backup:registry:create'].invoke
backup = Backup::Manager.new(progress)
- backup.pack
- backup.cleanup
- backup.remove_old
+ backup.write_info
+
+ if ENV['SKIP'] && ENV['SKIP'].include?('tar')
+ backup.upload
+ else
+ backup.pack
+ backup.upload
+ backup.cleanup
+ backup.remove_old
+ end
progress.puts "Warning: Your gitlab.rb and gitlab-secrets.json files contain sensitive data \n" \
"and are not included in this backup. You will need these files to restore a backup.\n" \
@@ -33,7 +40,8 @@ namespace :gitlab do
warn_user_is_not_gitlab
backup = Backup::Manager.new(progress)
- backup.unpack
+ cleanup_required = backup.unpack
+ backup.verify_backup_version
unless backup.skipped?('db')
begin
@@ -72,7 +80,10 @@ namespace :gitlab do
Rake::Task['gitlab:shell:setup'].invoke
Rake::Task['cache:clear'].invoke
- backup.cleanup
+ if cleanup_required
+ backup.cleanup
+ end
+
puts "Warning: Your gitlab.rb and gitlab-secrets.json files contain sensitive data \n" \
"and are not included in this backup. You will need to restore these files manually.".color(:red)
puts "Restore task is done."
diff --git a/lib/tasks/gitlab/cleanup.rake b/lib/tasks/gitlab/cleanup.rake
index 63f5d7f2740..c26aa848d5a 100644
--- a/lib/tasks/gitlab/cleanup.rake
+++ b/lib/tasks/gitlab/cleanup.rake
@@ -13,7 +13,7 @@ namespace :gitlab do
print "#{user.name} (#{user.ldap_identity.extern_uid}) ..."
- if Gitlab::Auth::LDAP::Access.allowed?(user)
+ if Gitlab::Auth::Ldap::Access.allowed?(user)
puts " [OK]".color(:green)
else
if block_flag
diff --git a/lib/tasks/gitlab/graphql.rake b/lib/tasks/gitlab/graphql.rake
index c73691f3d45..5a583183924 100644
--- a/lib/tasks/gitlab/graphql.rake
+++ b/lib/tasks/gitlab/graphql.rake
@@ -8,13 +8,25 @@ namespace :gitlab do
OUTPUT_DIR = Rails.root.join("doc/api/graphql/reference")
TEMPLATES_DIR = 'lib/gitlab/graphql/docs/templates/'
+ # Make all feature flags enabled so that all feature flag
+ # controlled fields are considered visible and are output.
+ # Also avoids pipeline failures in case developer
+ # dumps schema with flags disabled locally before pushing
+ task enable_feature_flags: :environment do
+ class Feature
+ def self.enabled?(*args)
+ true
+ end
+ end
+ end
+
# Defines tasks for dumping the GraphQL schema:
# - gitlab:graphql:schema:dump
# - gitlab:graphql:schema:idl
# - gitlab:graphql:schema:json
GraphQL::RakeTask.new(
schema_name: 'GitlabSchema',
- dependencies: [:environment],
+ dependencies: [:environment, :enable_feature_flags],
directory: OUTPUT_DIR,
idl_outfile: "gitlab_schema.graphql",
json_outfile: "gitlab_schema.json"
@@ -22,7 +34,7 @@ namespace :gitlab do
namespace :graphql do
desc 'GitLab | GraphQL | Generate GraphQL docs'
- task compile_docs: :environment do
+ task compile_docs: [:environment, :enable_feature_flags] do
renderer = Gitlab::Graphql::Docs::Renderer.new(GitlabSchema.graphql_definition, render_options)
renderer.write
@@ -31,7 +43,7 @@ namespace :gitlab do
end
desc 'GitLab | GraphQL | Check if GraphQL docs are up to date'
- task check_docs: :environment do
+ task check_docs: [:environment, :enable_feature_flags] do
renderer = Gitlab::Graphql::Docs::Renderer.new(GitlabSchema.graphql_definition, render_options)
doc = File.read(Rails.root.join(OUTPUT_DIR, 'index.md'))
@@ -45,7 +57,7 @@ namespace :gitlab do
end
desc 'GitLab | GraphQL | Check if GraphQL schemas are up to date'
- task check_schema: :environment do
+ task check_schema: [:environment, :enable_feature_flags] do
idl_doc = File.read(Rails.root.join(OUTPUT_DIR, 'gitlab_schema.graphql'))
json_doc = File.read(Rails.root.join(OUTPUT_DIR, 'gitlab_schema.json'))
diff --git a/lib/tasks/gitlab/import_export/export.rake b/lib/tasks/gitlab/import_export/export.rake
new file mode 100644
index 00000000000..c9c212fbe4d
--- /dev/null
+++ b/lib/tasks/gitlab/import_export/export.rake
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+# Export project to archive
+#
+# @example
+# bundle exec rake "gitlab:import_export:export[root, root, project_to_export, /path/to/file.tar.gz, true]"
+#
+namespace :gitlab do
+ namespace :import_export do
+ desc 'GitLab | Import/Export | EXPERIMENTAL | Export large project archives'
+ task :export, [:username, :namespace_path, :project_path, :archive_path, :measurement_enabled] => :gitlab_environment do |_t, args|
+ # Load it here to avoid polluting Rake tasks with Sidekiq test warnings
+ require 'sidekiq/testing'
+
+ logger = Logger.new($stdout)
+
+ begin
+ warn_user_is_not_gitlab
+
+ if ENV['EXPORT_DEBUG'].present?
+ ActiveRecord::Base.logger = logger
+ logger.level = Logger::DEBUG
+ else
+ logger.level = Logger::INFO
+ end
+
+ task = Gitlab::ImportExport::Project::ExportTask.new(
+ namespace_path: args.namespace_path,
+ project_path: args.project_path,
+ username: args.username,
+ file_path: args.archive_path,
+ measurement_enabled: Gitlab::Utils.to_boolean(args.measurement_enabled),
+ logger: logger
+ )
+
+ success = task.export
+
+ exit(success)
+ rescue StandardError => e
+ logger.error "Exception: #{e.message}"
+ logger.debug e.backtrace
+ exit 1
+ end
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/import_export/import.rake b/lib/tasks/gitlab/import_export/import.rake
index c832cba0287..7e2162a7774 100644
--- a/lib/tasks/gitlab/import_export/import.rake
+++ b/lib/tasks/gitlab/import_export/import.rake
@@ -16,195 +16,35 @@ namespace :gitlab do
# Load it here to avoid polluting Rake tasks with Sidekiq test warnings
require 'sidekiq/testing'
- warn_user_is_not_gitlab
+ logger = Logger.new($stdout)
- if ENV['IMPORT_DEBUG'].present?
- ActiveRecord::Base.logger = Logger.new(STDOUT)
- end
-
- GitlabProjectImport.new(
- namespace_path: args.namespace_path,
- project_path: args.project_path,
- username: args.username,
- file_path: args.archive_path,
- measurement_enabled: args.measurement_enabled == 'true'
- ).import
- end
- end
-end
-
-class GitlabProjectImport
- def initialize(opts)
- @project_path = opts.fetch(:project_path)
- @file_path = opts.fetch(:file_path)
- @namespace = Namespace.find_by_full_path(opts.fetch(:namespace_path))
- @current_user = User.find_by_username(opts.fetch(:username))
- @measurement_enabled = opts.fetch(:measurement_enabled)
- end
-
- def import
- show_import_start_message
-
- run_isolated_sidekiq_job
-
- show_import_failures_count
-
- if @project&.import_state&.last_error
- puts "ERROR: #{@project.import_state.last_error}"
- exit 1
- elsif @project.errors.any?
- puts "ERROR: #{@project.errors.full_messages.join(', ')}"
- exit 1
- else
- puts 'Done!'
- end
- rescue StandardError => e
- puts "Exception: #{e.message}"
- puts e.backtrace
- exit 1
- end
-
- private
-
- def with_request_store
- RequestStore.begin!
- yield
- ensure
- RequestStore.end!
- RequestStore.clear!
- end
-
- def with_count_queries(&block)
- count = 0
-
- counter_f = ->(name, started, finished, unique_id, payload) {
- unless payload[:name].in? %w[CACHE SCHEMA]
- count += 1
- end
- }
-
- ActiveSupport::Notifications.subscribed(counter_f, "sql.active_record", &block)
-
- puts "Number of sql calls: #{count}"
- end
-
- def with_gc_counter
- gc_counts_before = GC.stat.select { |k, v| k =~ /count/ }
- yield
- gc_counts_after = GC.stat.select { |k, v| k =~ /count/ }
- stats = gc_counts_before.merge(gc_counts_after) { |k, vb, va| va - vb }
- puts "Total GC count: #{stats[:count]}"
- puts "Minor GC count: #{stats[:minor_gc_count]}"
- puts "Major GC count: #{stats[:major_gc_count]}"
- end
-
- def with_measure_time
- timing = Benchmark.realtime do
- yield
- end
-
- time = Time.at(timing).utc.strftime("%H:%M:%S")
- puts "Time to finish: #{time}"
- end
-
- def with_measuring
- puts "Measuring enabled..."
- with_gc_counter do
- with_count_queries do
- with_measure_time do
- yield
- end
- end
- end
- end
-
- def measurement_enabled?
- @measurement_enabled != false
- end
+ begin
+ warn_user_is_not_gitlab
- # We want to ensure that all Sidekiq jobs are executed
- # synchronously as part of that process.
- # This ensures that all expensive operations do not escape
- # to general Sidekiq clusters/nodes.
- def with_isolated_sidekiq_job
- Sidekiq::Testing.fake! do
- with_request_store do
- # If you are attempting to import a large project into a development environment,
- # you may see Gitaly throw an error about too many calls or invocations.
- # This is due to a n+1 calls limit being set for development setups (not enforced in production)
- # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24475#note_283090635
- # For development setups, this code-path will be excluded from n+1 detection.
- ::Gitlab::GitalyClient.allow_n_plus_1_calls do
- measurement_enabled? ? with_measuring { yield } : yield
+ if ENV['IMPORT_DEBUG'].present?
+ ActiveRecord::Base.logger = logger
+ logger.level = Logger::DEBUG
+ else
+ logger.level = Logger::INFO
end
- end
-
- true
- end
- end
-
- def run_isolated_sidekiq_job
- with_isolated_sidekiq_job do
- @project = create_project
-
- execute_sidekiq_job
- end
- end
-
- def create_project
- # We are disabling ObjectStorage for `import`
- # as it is too slow to handle big archives:
- # 1. DB transaction timeouts on upload
- # 2. Download of archive before unpacking
- disable_upload_object_storage do
- service = Projects::GitlabProjectsImportService.new(
- @current_user,
- {
- namespace_id: @namespace.id,
- path: @project_path,
- file: File.open(@file_path)
- }
- )
-
- service.execute
- end
- end
-
- def execute_sidekiq_job
- Sidekiq::Worker.drain_all
- end
- def disable_upload_object_storage
- overwrite_uploads_setting('background_upload', false) do
- overwrite_uploads_setting('direct_upload', false) do
- yield
+ task = Gitlab::ImportExport::Project::ImportTask.new(
+ namespace_path: args.namespace_path,
+ project_path: args.project_path,
+ username: args.username,
+ file_path: args.archive_path,
+ measurement_enabled: Gitlab::Utils.to_boolean(args.measurement_enabled),
+ logger: logger
+ )
+
+ success = task.import
+
+ exit(success)
+ rescue StandardError => e
+ logger.error "Exception: #{e.message}"
+ logger.debug e.backtrace
+ exit 1
end
end
end
-
- def overwrite_uploads_setting(key, value)
- old_value = Settings.uploads.object_store[key]
- Settings.uploads.object_store[key] = value
-
- yield
-
- ensure
- Settings.uploads.object_store[key] = old_value
- end
-
- def full_path
- "#{@namespace.full_path}/#{@project_path}"
- end
-
- def show_import_start_message
- puts "Importing GitLab export: #{@file_path} into GitLab" \
- " #{full_path}" \
- " as #{@current_user.name}"
- end
-
- def show_import_failures_count
- return unless @project.import_failures.exists?
-
- puts "Total number of not imported relations: #{@project.import_failures.count}"
- end
end
diff --git a/lib/tasks/gitlab/info.rake b/lib/tasks/gitlab/info.rake
index 5809f632c5a..d85c8fc7949 100644
--- a/lib/tasks/gitlab/info.rake
+++ b/lib/tasks/gitlab/info.rake
@@ -82,15 +82,10 @@ namespace :gitlab do
puts "Using Omniauth:\t#{Gitlab::Auth.omniauth_enabled? ? "yes".color(:green) : "no"}"
puts "Omniauth Providers: #{omniauth_providers.join(', ')}" if Gitlab::Auth.omniauth_enabled?
- # check Gitolite version
- gitlab_shell_version_file = "#{Gitlab.config.gitlab_shell.path}/VERSION"
- if File.readable?(gitlab_shell_version_file)
- gitlab_shell_version = File.read(gitlab_shell_version_file)
- end
-
+ # check Gitlab Shell version
puts ""
puts "GitLab Shell".color(:yellow)
- puts "Version:\t#{gitlab_shell_version || "unknown".color(:red)}"
+ puts "Version:\t#{Gitlab::Shell.version || "unknown".color(:red)}"
puts "Repository storage paths:"
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
Gitlab.config.repositories.storages.each do |name, repository_storage|
diff --git a/lib/tasks/gitlab/shell.rake b/lib/tasks/gitlab/shell.rake
index ba3e19caf3b..6586699f8ba 100644
--- a/lib/tasks/gitlab/shell.rake
+++ b/lib/tasks/gitlab/shell.rake
@@ -89,10 +89,12 @@ namespace :gitlab do
puts ""
end
- Gitlab::Shell.new.remove_all_keys
+ authorized_keys = Gitlab::AuthorizedKeys.new
+
+ authorized_keys.clear
Key.find_in_batches(batch_size: 1000) do |keys|
- unless Gitlab::Shell.new.batch_add_keys(keys)
+ unless authorized_keys.batch_add_keys(keys)
puts "Failed to add keys...".color(:red)
exit 1
end
@@ -103,7 +105,7 @@ namespace :gitlab do
end
def ensure_write_to_authorized_keys_is_enabled
- return if Gitlab::CurrentSettings.current_application_settings.authorized_keys_enabled
+ return if Gitlab::CurrentSettings.authorized_keys_enabled?
puts authorized_keys_is_disabled_warning
@@ -113,7 +115,7 @@ namespace :gitlab do
end
puts 'Enabling the "Write to authorized_keys file" setting...'
- Gitlab::CurrentSettings.current_application_settings.update!(authorized_keys_enabled: true)
+ Gitlab::CurrentSettings.update!(authorized_keys_enabled: true)
puts 'Successfully enabled "Write to authorized_keys file"!'
puts ''
diff --git a/lib/tasks/gitlab/uploads/migrate.rake b/lib/tasks/gitlab/uploads/migrate.rake
index 44536a447c7..879b07da1df 100644
--- a/lib/tasks/gitlab/uploads/migrate.rake
+++ b/lib/tasks/gitlab/uploads/migrate.rake
@@ -3,7 +3,7 @@ namespace :gitlab do
namespace :migrate do
desc "GitLab | Uploads | Migrate all uploaded files to object storage"
task all: :environment do
- Gitlab::Uploads::MigrationHelper::CATEGORIES.each do |args|
+ Gitlab::Uploads::MigrationHelper.categories.each do |args|
Rake::Task["gitlab:uploads:migrate"].invoke(*args)
Rake::Task["gitlab:uploads:migrate"].reenable
end
@@ -20,7 +20,7 @@ namespace :gitlab do
namespace :migrate_to_local do
desc "GitLab | Uploads | Migrate all uploaded files to local storage"
task all: :environment do
- Gitlab::Uploads::MigrationHelper::CATEGORIES.each do |args|
+ Gitlab::Uploads::MigrationHelper.categories.each do |args|
Rake::Task["gitlab:uploads:migrate_to_local"].invoke(*args)
Rake::Task["gitlab:uploads:migrate_to_local"].reenable
end
diff --git a/lib/tasks/sidekiq.rake b/lib/tasks/sidekiq.rake
index e281ebd5d60..d74878835fd 100644
--- a/lib/tasks/sidekiq.rake
+++ b/lib/tasks/sidekiq.rake
@@ -33,6 +33,6 @@ namespace :sidekiq do
task :launchd do
deprecation_warning!
- system(*%w(bin/background_jobs start_no_deamonize))
+ system(*%w(bin/background_jobs start_silent))
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index e1e04c38804..e0a6eda2a0f 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -68,6 +68,16 @@ msgstr ""
msgid "\"%{path}\" did not exist on \"%{ref}\""
msgstr ""
+msgid "%d changed file"
+msgid_plural "%d changed files"
+msgstr[0] ""
+msgstr[1] ""
+
+msgid "%d code quality issue"
+msgid_plural "%d code quality issues"
+msgstr[0] ""
+msgstr[1] ""
+
msgid "%d comment"
msgid_plural "%d comments"
msgstr[0] ""
@@ -96,13 +106,18 @@ msgid_plural "%d contributions"
msgstr[0] ""
msgstr[1] ""
+msgid "%d error"
+msgid_plural "%d errors"
+msgstr[0] ""
+msgstr[1] ""
+
msgid "%d exporter"
msgid_plural "%d exporters"
msgstr[0] ""
msgstr[1] ""
-msgid "%d failed/error test result"
-msgid_plural "%d failed/error test results"
+msgid "%d failed"
+msgid_plural "%d failed"
msgstr[0] ""
msgstr[1] ""
@@ -111,6 +126,11 @@ msgid_plural "%d fixed test results"
msgstr[0] ""
msgstr[1] ""
+msgid "%d group selected"
+msgid_plural "%d groups selected"
+msgstr[0] ""
+msgstr[1] ""
+
msgid "%d inaccessible merge request"
msgid_plural "%d inaccessible merge requests"
msgstr[0] ""
@@ -171,8 +191,8 @@ msgid_plural "%d seconds"
msgstr[0] ""
msgstr[1] ""
-msgid "%d staged change"
-msgid_plural "%d staged changes"
+msgid "%d shard selected"
+msgid_plural "%d shards selected"
msgstr[0] ""
msgstr[1] ""
@@ -181,8 +201,8 @@ msgid_plural "%d tags"
msgstr[0] ""
msgstr[1] ""
-msgid "%d unstaged change"
-msgid_plural "%d unstaged changes"
+msgid "%d vulnerability dismissed"
+msgid_plural "%d vulnerabilities dismissed"
msgstr[0] ""
msgstr[1] ""
@@ -197,9 +217,15 @@ msgstr ""
msgid "%{authorsName}'s thread"
msgstr ""
+msgid "%{buy_now_link_start}Buy now!%{link_end}"
+msgstr ""
+
msgid "%{commit_author_link} authored %{commit_timeago}"
msgstr ""
+msgid "%{completedWeight} of %{totalWeight} weight completed"
+msgstr ""
+
msgid "%{cores} cores"
msgstr ""
@@ -310,6 +336,9 @@ msgstr ""
msgid "%{level_name} is not allowed since the fork source project has lower visibility."
msgstr ""
+msgid "%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
+msgstr ""
+
msgid "%{link_start}Read more%{link_end} about role permissions"
msgstr ""
@@ -337,6 +366,9 @@ msgstr ""
msgid "%{name} found %{resultsString}"
msgstr ""
+msgid "%{name} is scheduled for %{action}"
+msgstr ""
+
msgid "%{name}'s avatar"
msgstr ""
@@ -381,10 +413,7 @@ msgstr ""
msgid "%{screenreaderOnlyStart}Keyboard shorcuts%{screenreaderOnlyEnd} Enabled"
msgstr ""
-msgid "%{service_title} activated."
-msgstr ""
-
-msgid "%{service_title} settings saved, but not activated."
+msgid "%{service_title} %{message}."
msgstr ""
msgid "%{size} GiB"
@@ -408,9 +437,6 @@ msgstr ""
msgid "%{spanStart}in%{spanEnd} %{errorFn}"
msgstr ""
-msgid "%{staged} staged and %{unstaged} unstaged changes"
-msgstr ""
-
msgid "%{start} to %{end}"
msgstr ""
@@ -469,6 +495,9 @@ msgstr ""
msgid "%{title} changes"
msgstr ""
+msgid "%{token}..."
+msgstr ""
+
msgid "%{totalWeight} total weight"
msgstr ""
@@ -534,6 +563,12 @@ msgstr ""
msgid "(removed)"
msgstr ""
+msgid "(revoked)"
+msgstr ""
+
+msgid "*"
+msgstr ""
+
msgid "+ %{amount} more"
msgstr ""
@@ -574,6 +609,9 @@ msgid_plural "- Users"
msgstr[0] ""
msgstr[1] ""
+msgid "- of - weight completed"
+msgstr ""
+
msgid "- show less"
msgstr ""
@@ -673,9 +711,6 @@ msgstr ""
msgid "20-29 contributions"
msgstr ""
-msgid "24 hours"
-msgstr ""
-
msgid "2FA"
msgstr ""
@@ -688,9 +723,6 @@ msgstr ""
msgid "3 hours"
msgstr ""
-msgid "30 days"
-msgstr ""
-
msgid "30 minutes"
msgstr ""
@@ -712,9 +744,6 @@ msgstr ""
msgid "404|Please contact your GitLab administrator if you think this is a mistake."
msgstr ""
-msgid "7 days"
-msgstr ""
-
msgid "8 hours"
msgstr ""
@@ -742,9 +771,6 @@ msgstr ""
msgid "<strong>%{group_name}</strong> group members"
msgstr ""
-msgid "<strong>%{stagedFilesLength} staged</strong> and <strong>%{changedFilesLength} unstaged</strong> changes"
-msgstr ""
-
msgid "<strong>Deletes</strong> source branch"
msgstr ""
@@ -835,6 +861,9 @@ msgstr ""
msgid "A secure token that identifies an external storage request."
msgstr ""
+msgid "A subscription will trigger a new pipeline on the default branch of this project when a pipeline successfully completes for a new tag on the %{default_branch_docs} of the subscribed project."
+msgstr ""
+
msgid "A user with write access to the source branch selected this option"
msgstr ""
@@ -1044,6 +1073,9 @@ msgstr ""
msgid "Add README"
msgstr ""
+msgid "Add Variable"
+msgstr ""
+
msgid "Add Zoom meeting"
msgstr ""
@@ -1113,6 +1145,9 @@ msgstr ""
msgid "Add email address"
msgstr ""
+msgid "Add environment"
+msgstr ""
+
msgid "Add header and footer to emails. Please note that color settings will only be applied within the application interface"
msgstr ""
@@ -1176,6 +1211,9 @@ msgstr ""
msgid "Add users to group"
msgstr ""
+msgid "Add variable"
+msgstr ""
+
msgid "Add webhook"
msgstr ""
@@ -1257,6 +1295,12 @@ msgstr ""
msgid "Admin notes"
msgstr ""
+msgid "AdminArea|Bots"
+msgstr ""
+
+msgid "AdminArea|Included Free in license"
+msgstr ""
+
msgid "AdminArea|Stop all jobs"
msgstr ""
@@ -1269,6 +1313,15 @@ msgstr ""
msgid "AdminArea|Stopping jobs failed"
msgstr ""
+msgid "AdminArea|Users statistics"
+msgstr ""
+
+msgid "AdminArea|Users total"
+msgstr ""
+
+msgid "AdminArea|Users with highest role"
+msgstr ""
+
msgid "AdminArea|You’re about to stop all jobs.This will halt all current jobs that are running."
msgstr ""
@@ -1491,13 +1544,13 @@ msgstr ""
msgid "Advanced"
msgstr ""
-msgid "Advanced permissions, Large File Storage and Two-Factor authentication settings."
+msgid "Advanced Settings"
msgstr ""
-msgid "Advanced search functionality"
+msgid "Advanced permissions, Large File Storage and Two-Factor authentication settings."
msgstr ""
-msgid "Advanced settings"
+msgid "Advanced search functionality"
msgstr ""
msgid "After a successful password update you will be redirected to login screen."
@@ -1529,6 +1582,9 @@ msgstr ""
msgid "All"
msgstr ""
+msgid "All %{replicableType} are being scheduled for %{action}"
+msgstr ""
+
msgid "All Members"
msgstr ""
@@ -1544,6 +1600,9 @@ msgstr ""
msgid "All email addresses will be used to identify your commits."
msgstr ""
+msgid "All environments"
+msgstr ""
+
msgid "All features are enabled for blank projects, from templates, or when importing, but you can disable them afterward in the project settings."
msgstr ""
@@ -1616,6 +1675,9 @@ msgstr ""
msgid "Allow this secondary node to replicate content on Object Storage"
msgstr ""
+msgid "Allow users to dismiss the broadcast message"
+msgstr ""
+
msgid "Allow users to register any application to use GitLab as an OAuth provider"
msgstr ""
@@ -1643,6 +1705,9 @@ msgstr ""
msgid "Alternate support URL for help page and help dropdown"
msgstr ""
+msgid "Alternatively, you can convert your account to a managed account by the %{group_name} group."
+msgstr ""
+
msgid "Amazon EKS"
msgstr ""
@@ -1658,6 +1723,9 @@ msgstr ""
msgid "Amount of time (in hours) that users are allowed to skip forced configuration of two-factor authentication"
msgstr ""
+msgid "An alert has been triggered in %{project_path}."
+msgstr ""
+
msgid "An application called %{link_to_client} is requesting access to your GitLab account."
msgstr ""
@@ -1700,12 +1768,18 @@ msgstr ""
msgid "An error occurred when updating the issue weight"
msgstr ""
+msgid "An error occurred while adding formatted title for epic"
+msgstr ""
+
msgid "An error occurred while checking group path"
msgstr ""
msgid "An error occurred while committing your changes."
msgstr ""
+msgid "An error occurred while decoding the file."
+msgstr ""
+
msgid "An error occurred while deleting the approvers group"
msgstr ""
@@ -1730,6 +1804,9 @@ msgstr ""
msgid "An error occurred while enabling Service Desk."
msgstr ""
+msgid "An error occurred while fetching coverage reports."
+msgstr ""
+
msgid "An error occurred while fetching environments."
msgstr ""
@@ -1811,6 +1888,9 @@ msgstr ""
msgid "An error occurred while loading chart data"
msgstr ""
+msgid "An error occurred while loading clusters"
+msgstr ""
+
msgid "An error occurred while loading commit signatures"
msgstr ""
@@ -1844,6 +1924,9 @@ msgstr ""
msgid "An error occurred while loading the file."
msgstr ""
+msgid "An error occurred while loading the file. Please try again later."
+msgstr ""
+
msgid "An error occurred while loading the merge request changes."
msgstr ""
@@ -1868,6 +1951,9 @@ msgstr ""
msgid "An error occurred while parsing recent searches"
msgstr ""
+msgid "An error occurred while parsing the file."
+msgstr ""
+
msgid "An error occurred while removing epics."
msgstr ""
@@ -1952,10 +2038,10 @@ msgstr ""
msgid "Analyze a review version of your web application."
msgstr ""
-msgid "Analyze your dependencies for known vulnerabilities"
+msgid "Analyze your dependencies for known vulnerabilities."
msgstr ""
-msgid "Analyze your source code for known vulnerabilities"
+msgid "Analyze your source code for known vulnerabilities."
msgstr ""
msgid "Ancestors"
@@ -1964,6 +2050,12 @@ msgstr ""
msgid "Anonymous"
msgstr ""
+msgid "Another action is currently in progress"
+msgstr ""
+
+msgid "Another issue tracker is already in use. Only one issue tracker service can be active at a time"
+msgstr ""
+
msgid "Anti-spam verification"
msgstr ""
@@ -2021,6 +2113,9 @@ msgstr ""
msgid "Application settings saved successfully"
msgstr ""
+msgid "Application settings update failed"
+msgstr ""
+
msgid "Application uninstalled but failed to destroy: %{error_message}"
msgstr ""
@@ -2203,6 +2298,9 @@ msgstr ""
msgid "Are you sure you want to delete this pipeline? Doing so will expire all pipeline caches and delete all related objects, such as builds, logs, artifacts, and triggers. This action cannot be undone."
msgstr ""
+msgid "Are you sure you want to deploy this environment?"
+msgstr ""
+
msgid "Are you sure you want to erase this build?"
msgstr ""
@@ -2218,6 +2316,9 @@ msgstr ""
msgid "Are you sure you want to permanently delete this license?"
msgstr ""
+msgid "Are you sure you want to re-deploy this environment?"
+msgstr ""
+
msgid "Are you sure you want to regenerate the public key? You will have to update the public key on the remote server before mirroring will work again."
msgstr ""
@@ -2305,6 +2406,12 @@ msgstr ""
msgid "Ascending"
msgstr ""
+msgid "Ask an admin to upload a new license to ensure uninterrupted service."
+msgstr ""
+
+msgid "Ask an admin to upload a new license to restore service."
+msgstr ""
+
msgid "Ask your group maintainer to set up a group Runner."
msgstr ""
@@ -2379,6 +2486,9 @@ msgstr ""
msgid "At least one of group_id or project_id must be specified"
msgstr ""
+msgid "At risk"
+msgstr ""
+
msgid "Attach a file"
msgstr ""
@@ -2426,6 +2536,9 @@ msgstr ""
msgid "Authenticating"
msgstr ""
+msgid "Authentication Failure"
+msgstr ""
+
msgid "Authentication Log"
msgstr ""
@@ -3059,9 +3172,6 @@ msgstr ""
msgid "CI / CD"
msgstr ""
-msgid "CI / CD Analytics"
-msgstr ""
-
msgid "CI / CD Charts"
msgstr ""
@@ -3155,6 +3265,9 @@ msgstr ""
msgid "Can't find variable: ZiteReader"
msgstr ""
+msgid "Can't load mermaid module: %{err}"
+msgstr ""
+
msgid "Can't remove group members without group managed account"
msgstr ""
@@ -3197,10 +3310,10 @@ msgstr ""
msgid "Cannot modify managed Kubernetes cluster"
msgstr ""
-msgid "Cannot refer to a group milestone by an internal id!"
+msgid "Cannot modify provider during creation"
msgstr ""
-msgid "Cannot render the image. Maximum character count (%{charLimit}) has been exceeded."
+msgid "Cannot refer to a group milestone by an internal id!"
msgstr ""
msgid "Cannot show preview. For previews on sketch files, they must have the file format introduced by Sketch version 43 and above."
@@ -3305,9 +3418,6 @@ msgstr ""
msgid "Changing group path can have unintended side effects."
msgstr ""
-msgid "Charts"
-msgstr ""
-
msgid "Charts can't be displayed as the request for data has timed out. %{documentationLink}"
msgstr ""
@@ -3377,7 +3487,7 @@ msgstr ""
msgid "Check your .gitlab-ci.yml"
msgstr ""
-msgid "Check your Docker images for known vulnerabilities"
+msgid "Check your Docker images for known vulnerabilities."
msgstr ""
msgid "Checking %{text} availability…"
@@ -3449,6 +3559,9 @@ msgstr ""
msgid "Checkout|Country"
msgstr ""
+msgid "Checkout|Create a new group"
+msgstr ""
+
msgid "Checkout|Credit card form failed to load. Please try again."
msgstr ""
@@ -3476,6 +3589,9 @@ msgstr ""
msgid "Checkout|Failed to register credit card. Please try again."
msgstr ""
+msgid "Checkout|GitLab group"
+msgstr ""
+
msgid "Checkout|GitLab plan"
msgstr ""
@@ -3500,6 +3616,9 @@ msgstr ""
msgid "Checkout|Please select a state"
msgstr ""
+msgid "Checkout|Select"
+msgstr ""
+
msgid "Checkout|State"
msgstr ""
@@ -3524,9 +3643,15 @@ msgstr ""
msgid "Checkout|Users"
msgstr ""
+msgid "Checkout|You'll create your new group after checkout"
+msgstr ""
+
msgid "Checkout|Your organization"
msgstr ""
+msgid "Checkout|Your subscription will be applied to this group"
+msgstr ""
+
msgid "Checkout|Zip code"
msgstr ""
@@ -3593,6 +3718,9 @@ msgstr ""
msgid "Choose what content you want to see on a group’s overview page"
msgstr ""
+msgid "Choose which groups you wish to synchronize to this secondary node"
+msgstr ""
+
msgid "Choose which repositories you want to connect and run CI/CD pipelines."
msgstr ""
@@ -3602,6 +3730,9 @@ msgstr ""
msgid "Choose which shards you wish to synchronize to this secondary node."
msgstr ""
+msgid "Choose which status most accurately reflects the current state of this issue:"
+msgstr ""
+
msgid "CiStatusLabel|canceled"
msgstr ""
@@ -3680,6 +3811,9 @@ msgstr ""
msgid "CiVariables|Cannot use Masked Variable with current value"
msgstr ""
+msgid "CiVariables|Environments"
+msgstr ""
+
msgid "CiVariables|Input variable key"
msgstr ""
@@ -3692,6 +3826,9 @@ msgstr ""
msgid "CiVariables|Masked"
msgstr ""
+msgid "CiVariables|Protected"
+msgstr ""
+
msgid "CiVariables|Remove variable row"
msgstr ""
@@ -3866,6 +4003,9 @@ msgstr ""
msgid "Closed this %{quick_action_target}."
msgstr ""
+msgid "Closed: %{closedIssuesCount}"
+msgstr ""
+
msgid "Closes this %{quick_action_target}."
msgstr ""
@@ -3875,6 +4015,12 @@ msgstr ""
msgid "Cluster cache cleared."
msgstr ""
+msgid "Cluster does not exist"
+msgstr ""
+
+msgid "Cluster level"
+msgstr ""
+
msgid "ClusterIntegration| %{custom_domain_start}More information%{custom_domain_end}."
msgstr ""
@@ -3890,6 +4036,9 @@ msgstr ""
msgid "ClusterIntegration|%{appList} was successfully installed on your Kubernetes cluster"
msgstr ""
+msgid "ClusterIntegration|%{external_ip}.nip.io"
+msgstr ""
+
msgid "ClusterIntegration|%{title} uninstalled successfully."
msgstr ""
@@ -3926,7 +4075,7 @@ msgstr ""
msgid "ClusterIntegration|Adding an integration will share the cluster across all projects."
msgstr ""
-msgid "ClusterIntegration|Advanced options on this Kubernetes cluster's integration"
+msgid "ClusterIntegration|Advanced options on this Kubernetes cluster’s integration"
msgstr ""
msgid "ClusterIntegration|All data not committed to GitLab will be deleted and cannot be restored."
@@ -3962,9 +4111,6 @@ msgstr ""
msgid "ClusterIntegration|Any running pipelines will be canceled."
msgstr ""
-msgid "ClusterIntegration|Applications"
-msgstr ""
-
msgid "ClusterIntegration|Apply for credit"
msgstr ""
@@ -4016,9 +4162,6 @@ msgstr ""
msgid "ClusterIntegration|Cluster being created"
msgstr ""
-msgid "ClusterIntegration|Cluster health"
-msgstr ""
-
msgid "ClusterIntegration|Cluster management project (alpha)"
msgstr ""
@@ -4118,9 +4261,6 @@ msgstr ""
msgid "ClusterIntegration|Enable Cloud Run for Anthos"
msgstr ""
-msgid "ClusterIntegration|Enable Web Application Firewall"
-msgstr ""
-
msgid "ClusterIntegration|Enable or disable GitLab's connection to your Kubernetes cluster."
msgstr ""
@@ -4211,7 +4351,7 @@ msgstr ""
msgid "ClusterIntegration|If you do not wish to delete all associated GitLab resources, you can simply remove the integration."
msgstr ""
-msgid "ClusterIntegration|In order to view the health of your cluster, you must first install Prometheus below."
+msgid "ClusterIntegration|In order to view the health of your cluster, you must first install Prometheus in the Applications tab."
msgstr ""
msgid "ClusterIntegration|Ingress"
@@ -4235,9 +4375,6 @@ msgstr ""
msgid "ClusterIntegration|Integrate Kubernetes cluster automation"
msgstr ""
-msgid "ClusterIntegration|Integration status"
-msgstr ""
-
msgid "ClusterIntegration|Issuer Email"
msgstr ""
@@ -4274,9 +4411,6 @@ msgstr ""
msgid "ClusterIntegration|Kubernetes cluster"
msgstr ""
-msgid "ClusterIntegration|Kubernetes cluster details"
-msgstr ""
-
msgid "ClusterIntegration|Kubernetes cluster is being created..."
msgstr ""
@@ -4304,9 +4438,6 @@ msgstr ""
msgid "ClusterIntegration|Learn more about %{help_link_start}zones%{help_link_end}."
msgstr ""
-msgid "ClusterIntegration|Learn more about %{startLink}ModSecurity%{endLink}"
-msgstr ""
-
msgid "ClusterIntegration|Learn more about %{startLink}Regions %{externalLinkIcon}%{endLink}."
msgstr ""
@@ -4427,6 +4558,9 @@ msgstr ""
msgid "ClusterIntegration|Prometheus is an open-source monitoring system with %{gitlabIntegrationLink} to monitor deployed applications."
msgstr ""
+msgid "ClusterIntegration|Provider details"
+msgstr ""
+
msgid "ClusterIntegration|Provision Role ARN"
msgstr ""
@@ -4436,6 +4570,9 @@ msgstr ""
msgid "ClusterIntegration|Read our %{link_start}help page%{link_end} on Kubernetes cluster integration."
msgstr ""
+msgid "ClusterIntegration|Real-time web application monitoring, logging and access control. %{linkStart}More information%{linkEnd}"
+msgstr ""
+
msgid "ClusterIntegration|Region"
msgstr ""
@@ -4583,6 +4720,9 @@ msgstr ""
msgid "ClusterIntegration|Something went wrong while installing %{title}"
msgstr ""
+msgid "ClusterIntegration|Something went wrong while trying to save your settings. Please try again."
+msgstr ""
+
msgid "ClusterIntegration|Something went wrong while uninstalling %{title}"
msgstr ""
@@ -4622,7 +4762,7 @@ msgstr ""
msgid "ClusterIntegration|The endpoint is in the process of being assigned. Please check your Kubernetes cluster or Quotas on Google Kubernetes Engine if it takes a long time."
msgstr ""
-msgid "ClusterIntegration|The namespace associated with your project. This will be used for deploy boards, pod logs, and Web terminals."
+msgid "ClusterIntegration|The namespace associated with your project. This will be used for deploy boards, logs, and Web terminals."
msgstr ""
msgid "ClusterIntegration|There was a problem authenticating with your cluster. Please ensure your CA Certificate and Token are valid."
@@ -4685,6 +4825,9 @@ msgstr ""
msgid "ClusterIntegration|You must have an RBAC-enabled cluster to install Knative."
msgstr ""
+msgid "ClusterIntegration|You should select at least two subnets"
+msgstr ""
+
msgid "ClusterIntegration|Your account must have %{link_to_kubernetes_engine}"
msgstr ""
@@ -4751,9 +4894,15 @@ msgstr ""
msgid "Code Owners to the merge request changes."
msgstr ""
+msgid "Code Quality"
+msgstr ""
+
msgid "Code Review"
msgstr ""
+msgid "Code Review Analytics displays a table of open merge requests considered to be in code review. There are currently no merge requests in review for this project and/or filters."
+msgstr ""
+
msgid "Code owner approval is required"
msgstr ""
@@ -4900,9 +5049,6 @@ msgstr ""
msgid "Commits|An error occurred while fetching merge requests data."
msgstr ""
-msgid "Commits|Commit: %{commitText}"
-msgstr ""
-
msgid "Commits|History"
msgstr ""
@@ -5023,6 +5169,9 @@ msgstr ""
msgid "Configure the %{link} integration."
msgstr ""
+msgid "Configure the Jira integration first on your project's %{strong_start} Settings > Integrations > Jira%{strong_end} page."
+msgstr ""
+
msgid "Configure the way a user creates a new account."
msgstr ""
@@ -5050,6 +5199,9 @@ msgstr ""
msgid "Connect your external repositories, and CI/CD pipelines will run for new commits. A GitLab project will be created with only CI/CD features enabled."
msgstr ""
+msgid "Connected"
+msgstr ""
+
msgid "Connecting"
msgstr ""
@@ -5131,6 +5283,9 @@ msgstr ""
msgid "ContainerRegistry|Docker tags with names matching this regex pattern will expire:"
msgstr ""
+msgid "ContainerRegistry|Edit Settings"
+msgstr ""
+
msgid "ContainerRegistry|Expiration interval:"
msgstr ""
@@ -5149,6 +5304,9 @@ msgstr ""
msgid "ContainerRegistry|Image ID"
msgstr ""
+msgid "ContainerRegistry|Image deleted successfully"
+msgstr ""
+
msgid "ContainerRegistry|Keep and protect the images that matter most."
msgstr ""
@@ -5175,27 +5333,57 @@ msgid_plural "ContainerRegistry|Remove tags"
msgstr[0] ""
msgstr[1] ""
+msgid "ContainerRegistry|Retention policy has been Enabled"
+msgstr ""
+
msgid "ContainerRegistry|Size"
msgstr ""
+msgid "ContainerRegistry|Something went wrong while deleting the image."
+msgstr ""
+
+msgid "ContainerRegistry|Something went wrong while deleting the tag."
+msgstr ""
+
+msgid "ContainerRegistry|Something went wrong while deleting the tags."
+msgstr ""
+
msgid "ContainerRegistry|Something went wrong while fetching the expiration policy."
msgstr ""
+msgid "ContainerRegistry|Something went wrong while fetching the packages list."
+msgstr ""
+
+msgid "ContainerRegistry|Something went wrong while fetching the tags list."
+msgstr ""
+
msgid "ContainerRegistry|Something went wrong while updating the expiration policy."
msgstr ""
msgid "ContainerRegistry|Tag"
msgstr ""
+msgid "ContainerRegistry|Tag deleted successfully"
+msgstr ""
+
msgid "ContainerRegistry|Tag expiration policy"
msgstr ""
msgid "ContainerRegistry|Tag expiration policy is designed to:"
msgstr ""
+msgid "ContainerRegistry|Tags deleted successfully"
+msgstr ""
+
msgid "ContainerRegistry|The last tag related to this image was recently removed. This empty image and any associated data will be automatically removed as part of the regular Garbage Collection process. If you have any questions, contact your administrator."
msgstr ""
+msgid "ContainerRegistry|The retention and expiration policy for this Container Registry has been enabled and will run in %{days}. For more information visit the %{linkStart}documentation%{linkEnd}"
+msgstr ""
+
+msgid "ContainerRegistry|The retention and expiration policy for this Container Registry has been enabled. For more information visit the %{linkStart}documentation%{linkEnd}"
+msgstr ""
+
msgid "ContainerRegistry|The value of this input should be less than 255 characters"
msgstr ""
@@ -5394,6 +5582,12 @@ msgstr ""
msgid "Copy commit SHA"
msgstr ""
+msgid "Copy environment"
+msgstr ""
+
+msgid "Copy evidence SHA"
+msgstr ""
+
msgid "Copy file contents"
msgstr ""
@@ -5403,6 +5597,9 @@ msgstr ""
msgid "Copy impersonation token"
msgstr ""
+msgid "Copy key"
+msgstr ""
+
msgid "Copy labels and milestone from %{source_issuable_reference}."
msgstr ""
@@ -5427,6 +5624,9 @@ msgstr ""
msgid "Copy trigger token"
msgstr ""
+msgid "Copy value"
+msgstr ""
+
msgid "Could not add admins as members"
msgstr ""
@@ -5439,6 +5639,9 @@ msgstr ""
msgid "Could not connect to FogBugz, check your URL"
msgstr ""
+msgid "Could not connect to Sentry. Refresh the page to try again."
+msgstr ""
+
msgid "Could not connect to Web IDE file mirror service."
msgstr ""
@@ -5451,9 +5654,6 @@ msgstr ""
msgid "Could not create group"
msgstr ""
-msgid "Could not create issue"
-msgstr ""
-
msgid "Could not create project"
msgstr ""
@@ -5463,9 +5663,6 @@ msgstr ""
msgid "Could not delete chat nickname %{chat_name}."
msgstr ""
-msgid "Could not fetch projects"
-msgstr ""
-
msgid "Could not find design"
msgstr ""
@@ -5490,6 +5687,9 @@ msgstr ""
msgid "Could not save prometheus manual configuration"
msgstr ""
+msgid "Could not upload your designs as one or more files uploaded are not supported."
+msgstr ""
+
msgid "Country"
msgstr ""
@@ -5499,6 +5699,9 @@ msgstr ""
msgid "Create"
msgstr ""
+msgid "Create %{environment}"
+msgstr ""
+
msgid "Create %{type} token"
msgstr ""
@@ -5658,12 +5861,6 @@ msgstr ""
msgid "Created a branch and a merge request to resolve this issue."
msgstr ""
-msgid "Created after"
-msgstr ""
-
-msgid "Created before"
-msgstr ""
-
msgid "Created branch '%{branch_name}' and a merge request to resolve this issue."
msgstr ""
@@ -5739,6 +5936,9 @@ msgstr ""
msgid "Current node"
msgstr ""
+msgid "Current node must be the primary node or you will be locking yourself out"
+msgstr ""
+
msgid "Current password"
msgstr ""
@@ -5805,6 +6005,9 @@ msgstr ""
msgid "CustomCycleAnalytics|Select stop event"
msgstr ""
+msgid "CustomCycleAnalytics|Stage name already exists"
+msgstr ""
+
msgid "CustomCycleAnalytics|Start event"
msgstr ""
@@ -5928,6 +6131,9 @@ msgstr ""
msgid "CycleAnalyticsStage|should be under a group"
msgstr ""
+msgid "CycleAnalytics|%{selectedLabelsCount} selected (%{maxLabels} max)"
+msgstr ""
+
msgid "CycleAnalytics|%{stageCount} stages selected"
msgstr ""
@@ -5949,6 +6155,9 @@ msgstr ""
msgid "CycleAnalytics|Number of tasks"
msgstr ""
+msgid "CycleAnalytics|Only %{maxLabels} labels can be selected at this time"
+msgstr ""
+
msgid "CycleAnalytics|Project selected"
msgid_plural "CycleAnalytics|%d projects selected"
msgstr[0] ""
@@ -6188,9 +6397,15 @@ msgstr ""
msgid "Delete this attachment"
msgstr ""
+msgid "Delete variable"
+msgstr ""
+
msgid "DeleteProject|Failed to remove project repository. Please try again or contact administrator."
msgstr ""
+msgid "DeleteProject|Failed to remove project snippets. Please try again or contact administrator."
+msgstr ""
+
msgid "DeleteProject|Failed to remove some tags in project container registry. Please try again or contact administrator."
msgstr ""
@@ -6212,6 +6427,9 @@ msgstr ""
msgid "Deleted in this version"
msgstr ""
+msgid "Deleting"
+msgstr ""
+
msgid "Deleting the license failed."
msgstr ""
@@ -6294,6 +6512,9 @@ msgstr ""
msgid "Dependency List"
msgstr ""
+msgid "Dependency List has no entries"
+msgstr ""
+
msgid "Dependency Proxy"
msgstr ""
@@ -6426,6 +6647,9 @@ msgstr ""
msgid "DeployTokens|Expires"
msgstr ""
+msgid "DeployTokens|Group deploy tokens allow read-only access to the repositories and registry images within the group."
+msgstr ""
+
msgid "DeployTokens|Name"
msgstr ""
@@ -6435,16 +6659,19 @@ msgstr ""
msgid "DeployTokens|Revoke"
msgstr ""
+msgid "DeployTokens|Revoke %{b_start}%{name}%{b_end}?"
+msgstr ""
+
msgid "DeployTokens|Revoke %{name}"
msgstr ""
msgid "DeployTokens|Scopes"
msgstr ""
-msgid "DeployTokens|This action cannot be undone."
+msgid "DeployTokens|This %{entity_type} has no active Deploy Tokens."
msgstr ""
-msgid "DeployTokens|This project has no active Deploy Tokens."
+msgid "DeployTokens|This action cannot be undone."
msgstr ""
msgid "DeployTokens|Use this token as a password. Make sure you save it - you won't be able to access it again."
@@ -6456,12 +6683,15 @@ msgstr ""
msgid "DeployTokens|Username"
msgstr ""
-msgid "DeployTokens|You are about to revoke"
+msgid "DeployTokens|You are about to revoke %{b_start}%{name}%{b_end}."
msgstr ""
msgid "DeployTokens|Your New Deploy Token"
msgstr ""
+msgid "DeployTokens|Your new group deploy token has been created."
+msgstr ""
+
msgid "DeployTokens|Your new project deploy token has been created."
msgstr ""
@@ -6594,10 +6824,7 @@ msgstr ""
msgid "DesignManagement|The maximum number of designs allowed to be uploaded is %{upload_limit}. Please try again."
msgstr ""
-msgid "DesignManagement|The one place for your designs"
-msgstr ""
-
-msgid "DesignManagement|Upload and view the latest designs for this issue. Consistent and easy to find, so everyone is up to date."
+msgid "DesignManagement|To enable design management, you'll need to %{requirements_link_start}meet the requirements%{requirements_link_end}. If you need help, reach out to our %{support_link_start}support team%{support_link_end} for assistance."
msgstr ""
msgid "DesignManagement|Upload skipped."
@@ -6621,7 +6848,7 @@ msgstr ""
msgid "Detect host keys"
msgstr ""
-msgid "Detected %{timeago} in pipeline %{pipeline_link}"
+msgid "Detected %{timeago} in pipeline %{pipelineLink}"
msgstr ""
msgid "DevOps Score"
@@ -6636,6 +6863,9 @@ msgstr ""
msgid "Difference between start date and now"
msgstr ""
+msgid "DiffsCompareBaseBranch|(HEAD)"
+msgstr ""
+
msgid "DiffsCompareBaseBranch|(base)"
msgstr ""
@@ -6678,13 +6908,10 @@ msgstr ""
msgid "Disabled mirrors can only be enabled by instance owners. It is recommended that you delete them."
msgstr ""
-msgid "Discard"
-msgstr ""
-
msgid "Discard all changes"
msgstr ""
-msgid "Discard all unstaged changes?"
+msgid "Discard all changes?"
msgstr ""
msgid "Discard changes"
@@ -6759,18 +6986,29 @@ msgstr ""
msgid "Dismiss"
msgstr ""
+msgid "Dismiss %d selected vulnerability as"
+msgid_plural "Dismiss %d selected vulnerabilities as"
+msgstr[0] ""
+msgstr[1] ""
+
msgid "Dismiss DevOps Score introduction"
msgstr ""
msgid "Dismiss Merge Request promotion"
msgstr ""
+msgid "Dismiss Selected"
+msgstr ""
+
msgid "Dismiss Value Stream Analytics introduction box"
msgstr ""
msgid "Dismiss trial promotion"
msgstr ""
+msgid "Dismissable"
+msgstr ""
+
msgid "Dismissed"
msgstr ""
@@ -6792,7 +7030,7 @@ msgstr ""
msgid "Display source"
msgstr ""
-msgid "Displays dependencies and known vulnerabilities, based on the %{linkStart}latest pipeline%{linkEnd} scan"
+msgid "Displays dependencies and known vulnerabilities, based on the %{linkStart}latest successful%{linkEnd} scan"
msgstr ""
msgid "Do not display offers from third parties within GitLab"
@@ -6816,12 +7054,18 @@ msgstr ""
msgid "Domain"
msgstr ""
+msgid "Domain cannot be deleted while associated to one or more clusters."
+msgstr ""
+
msgid "Domain verification is an essential security measure for public GitLab sites. Users are required to demonstrate they control a domain before it is enabled"
msgstr ""
msgid "Domain was successfully created."
msgstr ""
+msgid "Domain was successfully deleted."
+msgstr ""
+
msgid "Domain was successfully updated."
msgstr ""
@@ -6897,6 +7141,9 @@ msgstr ""
msgid "Downvotes"
msgstr ""
+msgid "Drop your designs to start your upload."
+msgstr ""
+
msgid "Due date"
msgstr ""
@@ -6951,9 +7198,6 @@ msgstr ""
msgid "Edit Pipeline Schedule %{id}"
msgstr ""
-msgid "Edit Project Hook"
-msgstr ""
-
msgid "Edit Release"
msgstr ""
@@ -7035,6 +7279,9 @@ msgstr ""
msgid "Elasticsearch integration. Elasticsearch AWS IAM."
msgstr ""
+msgid "Elasticsearch returned status code: %{status_code}"
+msgstr ""
+
msgid "Elastic|None. Select namespaces to index."
msgstr ""
@@ -7059,6 +7306,12 @@ msgstr ""
msgid "Email patch"
msgstr ""
+msgid "Email restrictions"
+msgstr ""
+
+msgid "Email restrictions for sign-ups"
+msgstr ""
+
msgid "Email the pipelines status to a list of recipients."
msgstr ""
@@ -7167,6 +7420,9 @@ msgstr ""
msgid "Enable classification control using an external service"
msgstr ""
+msgid "Enable email restrictions for sign ups"
+msgstr ""
+
msgid "Enable error tracking"
msgstr ""
@@ -7248,6 +7504,9 @@ msgstr ""
msgid "Enabling this will only make licensed EE features available to projects if the project namespace's plan includes the feature or if the project is public."
msgstr ""
+msgid "Encountered an error while rendering: %{err}"
+msgstr ""
+
msgid "End date"
msgstr ""
@@ -7263,6 +7522,9 @@ msgstr ""
msgid "Ensure your %{linkStart}environment is part of the deploy stage%{linkEnd} of your CI pipeline to track deployments to your cluster."
msgstr ""
+msgid "Enter 2FA for Admin Mode"
+msgstr ""
+
msgid "Enter Admin Mode"
msgstr ""
@@ -7272,6 +7534,9 @@ msgstr ""
msgid "Enter a number"
msgstr ""
+msgid "Enter a whole number between 0 and 100"
+msgstr ""
+
msgid "Enter at least three characters to search"
msgstr ""
@@ -7296,6 +7561,12 @@ msgstr ""
msgid "Enter number of issues"
msgstr ""
+msgid "Enter one or more user ID separated by commas"
+msgstr ""
+
+msgid "Enter the code from the two-factor app on your mobile device. If you've lost your device, you may enter one of your recovery codes."
+msgstr ""
+
msgid "Enter the issue description"
msgstr ""
@@ -7317,6 +7588,12 @@ msgstr ""
msgid "Environment"
msgstr ""
+msgid "Environment does not have deployments"
+msgstr ""
+
+msgid "Environment scope"
+msgstr ""
+
msgid "Environment variables are applied to environments via the runner. They can be protected by only exposing them to protected branches or tags. Additionally, they can be masked so they are hidden in job logs, though they must match certain regexp requirements to do so. You can use environment variables for passwords, secret keys, or whatever you want."
msgstr ""
@@ -7404,6 +7681,12 @@ msgstr ""
msgid "Environments|Commit"
msgstr ""
+msgid "Environments|Currently showing %{fetched} results."
+msgstr ""
+
+msgid "Environments|Currently showing all results."
+msgstr ""
+
msgid "Environments|Deploy to..."
msgstr ""
@@ -7434,6 +7717,12 @@ msgstr ""
msgid "Environments|Learn more about stopping environments"
msgstr ""
+msgid "Environments|Logs from"
+msgstr ""
+
+msgid "Environments|Logs from %{start} to %{end}."
+msgstr ""
+
msgid "Environments|New environment"
msgstr ""
@@ -7455,9 +7744,6 @@ msgstr ""
msgid "Environments|Open live environment"
msgstr ""
-msgid "Environments|Pod logs from"
-msgstr ""
-
msgid "Environments|Re-deploy"
msgstr ""
@@ -7557,6 +7843,9 @@ msgstr ""
msgid "Epics|An error occurred while saving the %{epicDateType} date"
msgstr ""
+msgid "Epics|An error occurred while updating labels."
+msgstr ""
+
msgid "Epics|Are you sure you want to remove %{bStart}%{targetIssueTitle}%{bEnd} from %{bStart}%{parentEpicTitle}%{bEnd}?"
msgstr ""
@@ -7587,6 +7876,9 @@ msgstr ""
msgid "Epics|Something went wrong while creating child epics."
msgstr ""
+msgid "Epics|Something went wrong while creating issue."
+msgstr ""
+
msgid "Epics|Something went wrong while fetching child epics."
msgstr ""
@@ -7626,6 +7918,9 @@ msgstr ""
msgid "Error creating epic"
msgstr ""
+msgid "Error creating label."
+msgstr ""
+
msgid "Error deleting %{issuableType}"
msgstr ""
@@ -7704,6 +7999,9 @@ msgstr ""
msgid "Error occurred when toggling the notification subscription"
msgstr ""
+msgid "Error occurred while updating the issue status"
+msgstr ""
+
msgid "Error occurred while updating the issue weight"
msgstr ""
@@ -7869,6 +8167,9 @@ msgstr ""
msgid "Everything on your to-do list is marked as done."
msgstr ""
+msgid "Everything you need to create a GitLab Pages site using Gatsby."
+msgstr ""
+
msgid "Everything you need to create a GitLab Pages site using GitBook."
msgstr ""
@@ -7887,6 +8188,9 @@ msgstr ""
msgid "Evidence collection"
msgstr ""
+msgid "Exactly one of %{attributes} is required"
+msgstr ""
+
msgid "Example: @sub\\.company\\.com$"
msgstr ""
@@ -7896,6 +8200,9 @@ msgstr ""
msgid "Except policy:"
msgstr ""
+msgid "Excluding merge commits. Limited to %{limit} commits."
+msgstr ""
+
msgid "Excluding merge commits. Limited to 6,000 commits."
msgstr ""
@@ -7905,6 +8212,9 @@ msgstr ""
msgid "Existing shares"
msgstr ""
+msgid "Existing sign in methods may be removed"
+msgstr ""
+
msgid "Expand"
msgstr ""
@@ -7950,6 +8260,9 @@ msgstr ""
msgid "Expires in %{expires_at}"
msgstr ""
+msgid "Expires:"
+msgstr ""
+
msgid "Explain the problem. If appropriate, provide a link to the relevant issue or comment."
msgstr ""
@@ -7983,6 +8296,9 @@ msgstr ""
msgid "Export this project with all its related data in order to move your project to a new GitLab instance. Once the export is finished, you can import the file from the \"New Project\" page."
msgstr ""
+msgid "Export variable to pipelines running on protected branches and tags only."
+msgstr ""
+
msgid "External Classification Policy Authorization"
msgstr ""
@@ -8079,7 +8395,7 @@ msgstr ""
msgid "Failed to create a branch for this issue. Please try again."
msgstr ""
-msgid "Failed to create repository via gitlab-shell"
+msgid "Failed to create repository"
msgstr ""
msgid "Failed to create resources"
@@ -8208,6 +8524,9 @@ msgstr ""
msgid "Failure"
msgstr ""
+msgid "False positive"
+msgstr ""
+
msgid "Fast-forward merge is not possible. Rebase the source branch onto %{targetBranch} to allow this merge request to be merged."
msgstr ""
@@ -8370,6 +8689,18 @@ msgstr ""
msgid "FeatureFlags|User IDs"
msgstr ""
+msgid "FeatureFlag|Delete strategy"
+msgstr ""
+
+msgid "FeatureFlag|Percentage"
+msgstr ""
+
+msgid "FeatureFlag|Type"
+msgstr ""
+
+msgid "FeatureFlag|User IDs"
+msgstr ""
+
msgid "Feb"
msgstr ""
@@ -8388,6 +8719,9 @@ msgstr ""
msgid "Fetching licenses failed. You are not permitted to perform this action."
msgstr ""
+msgid "File"
+msgstr ""
+
msgid "File Hooks"
msgstr ""
@@ -8535,6 +8869,9 @@ msgstr ""
msgid "Fixed:"
msgstr ""
+msgid "Flags"
+msgstr ""
+
msgid "FlowdockService|Flowdock Git source token"
msgstr ""
@@ -8619,9 +8956,6 @@ msgstr ""
msgid "Forking in progress"
msgstr ""
-msgid "Forking repository"
-msgstr ""
-
msgid "Forks"
msgstr ""
@@ -8682,6 +9016,9 @@ msgstr ""
msgid "From merge request merge until deploy to production"
msgstr ""
+msgid "From project"
+msgstr ""
+
msgid "From the Kubernetes cluster details view, install Runner from the applications list"
msgstr ""
@@ -9042,6 +9379,12 @@ msgstr ""
msgid "Geo|Remove"
msgstr ""
+msgid "Geo|Remove entry"
+msgstr ""
+
+msgid "Geo|Remove tracking database entry"
+msgstr ""
+
msgid "Geo|Repository sync capacity"
msgstr ""
@@ -9093,13 +9436,13 @@ msgstr ""
msgid "Geo|This is a primary node"
msgstr ""
-msgid "Geo|Tracking entry for project (%{project_id}) was successfully removed."
+msgid "Geo|Tracking database entry will be removed. Are you sure?"
msgstr ""
-msgid "Geo|Tracking entry for upload (%{type}/%{id}) was successfully removed."
+msgid "Geo|Tracking entry for project (%{project_id}) was successfully removed."
msgstr ""
-msgid "Geo|Tracking entry will be removed. Are you sure?"
+msgid "Geo|Tracking entry for upload (%{type}/%{id}) was successfully removed."
msgstr ""
msgid "Geo|URL"
@@ -9390,6 +9733,12 @@ msgstr ""
msgid "Go to %{link_to_google_takeout}."
msgstr ""
+msgid "Go to Pipelines"
+msgstr ""
+
+msgid "Go to Webhooks"
+msgstr ""
+
msgid "Go to commits"
msgstr ""
@@ -9630,6 +9979,9 @@ msgstr ""
msgid "Group pipeline minutes were successfully reset."
msgstr ""
+msgid "Group requires separate account"
+msgstr ""
+
msgid "Group variables (inherited)"
msgstr ""
@@ -9642,15 +9994,21 @@ msgstr ""
msgid "Group: %{name}"
msgstr ""
-msgid "GroupRoadmap|%{startDateInWords} &ndash; %{endDateInWords}"
+msgid "GroupRoadmap|%{dateWord} – No end date"
+msgstr ""
+
+msgid "GroupRoadmap|%{startDateInWords} – %{endDateInWords}"
msgstr ""
-msgid "GroupRoadmap|From %{dateWord}"
+msgid "GroupRoadmap|No start date – %{dateWord}"
msgstr ""
msgid "GroupRoadmap|Something went wrong while fetching epics"
msgstr ""
+msgid "GroupRoadmap|Something went wrong while fetching milestones"
+msgstr ""
+
msgid "GroupRoadmap|Sorry, no epics matched your search"
msgstr ""
@@ -9666,9 +10024,6 @@ msgstr ""
msgid "GroupRoadmap|To widen your search, change or remove filters; from %{startDate} to %{endDate}."
msgstr ""
-msgid "GroupRoadmap|Until %{dateWord}"
-msgstr ""
-
msgid "GroupSAML|Certificate fingerprint"
msgstr ""
@@ -9897,6 +10252,9 @@ msgstr ""
msgid "Groups can also be nested by creating %{subgroup_docs_link_start}subgroups%{subgroup_docs_link_end}."
msgstr ""
+msgid "Groups to synchronize"
+msgstr ""
+
msgid "Groups with access to %{strong_start}%{group_name}%{strong_end}"
msgstr ""
@@ -9972,6 +10330,9 @@ msgstr ""
msgid "HTTP Basic: Access denied\\nYou must use a personal access token with 'api' scope for Git over HTTP.\\nYou can generate one at %{profile_personal_access_tokens_url}"
msgstr ""
+msgid "Hashed Storage must be enabled to use Geo"
+msgstr ""
+
msgid "Hashed repository storage paths"
msgstr ""
@@ -9984,6 +10345,9 @@ msgstr ""
msgid "Header message"
msgstr ""
+msgid "Health"
+msgstr ""
+
msgid "Health Check"
msgstr ""
@@ -10193,6 +10557,12 @@ msgstr ""
msgid "IDE|This option is disabled because you don't have write permissions for the current branch."
msgstr ""
+msgid "INFO: Your SSH key has expired. Please generate a new key."
+msgstr ""
+
+msgid "INFO: Your SSH key is expiring soon. Please generate a new key."
+msgstr ""
+
msgid "IP Address"
msgstr ""
@@ -10265,13 +10635,10 @@ msgstr ""
msgid "Ignore"
msgstr ""
-msgid "Image %{imageName} was scheduled for deletion from the registry."
+msgid "Ignored"
msgstr ""
-msgid "Image ID"
-msgstr ""
-
-msgid "Image deleted successfully"
+msgid "Image %{imageName} was scheduled for deletion from the registry."
msgstr ""
msgid "Image: %{image}"
@@ -10298,9 +10665,15 @@ msgstr ""
msgid "Import"
msgstr ""
+msgid "Import %{status}"
+msgstr ""
+
msgid "Import CSV"
msgstr ""
+msgid "Import Jira issues"
+msgstr ""
+
msgid "Import Projects from Gitea"
msgstr ""
@@ -10322,6 +10695,9 @@ msgstr ""
msgid "Import issues"
msgstr ""
+msgid "Import issues from Jira"
+msgstr ""
+
msgid "Import members"
msgstr ""
@@ -10463,6 +10839,9 @@ msgstr ""
msgid "Incoming email"
msgstr ""
+msgid "Incoming!"
+msgstr ""
+
msgid "Incompatible Project"
msgstr ""
@@ -10543,10 +10922,13 @@ msgstr ""
msgid "Instance license"
msgstr ""
+msgid "Integration Settings"
+msgstr ""
+
msgid "Integrations"
msgstr ""
-msgid "Integrations Settings"
+msgid "Integrations allow you to integrate GitLab with other applications"
msgstr ""
msgid "Interested parties can even contribute by pushing commits if they want to."
@@ -10588,6 +10970,9 @@ msgstr ""
msgid "Invalid URL"
msgstr ""
+msgid "Invalid cursor value provided"
+msgstr ""
+
msgid "Invalid date"
msgstr ""
@@ -10603,6 +10988,9 @@ msgstr ""
msgid "Invalid field"
msgstr ""
+msgid "Invalid file format with specified file type"
+msgstr ""
+
msgid "Invalid file."
msgstr ""
@@ -10696,6 +11084,9 @@ msgstr ""
msgid "Issue events"
msgstr ""
+msgid "Issue or Merge Request ID is required"
+msgstr ""
+
msgid "Issue template (optional)"
msgstr ""
@@ -10801,6 +11192,9 @@ msgstr ""
msgid "It must have a header row and at least two columns: the first column is the issue title and the second column is the issue description. The separator is automatically detected."
msgstr ""
+msgid "It seems like the Dependency Scanning job ran successfully, but no dependencies have been detected in your project."
+msgstr ""
+
msgid "It's you"
msgstr ""
@@ -10816,6 +11210,9 @@ msgstr ""
msgid "January"
msgstr ""
+msgid "Jira Issue Import"
+msgstr ""
+
msgid "JiraService|Events for %{noteable_model_name} are disabled."
msgstr ""
@@ -11005,6 +11402,9 @@ msgstr ""
msgid "Kubernetes Clusters"
msgstr ""
+msgid "Kubernetes cluster"
+msgstr ""
+
msgid "Kubernetes cluster creation time exceeds timeout; %{timeout}"
msgstr ""
@@ -11127,9 +11527,6 @@ msgstr ""
msgid "Last Seen"
msgstr ""
-msgid "Last Updated"
-msgstr ""
-
msgid "Last accessed on"
msgstr ""
@@ -11304,31 +11701,19 @@ msgstr ""
msgid "LicenseCompliance|Add licenses manually to approve or blacklist"
msgstr ""
-msgid "LicenseCompliance|Approve"
-msgstr ""
-
-msgid "LicenseCompliance|Approve license"
-msgstr ""
-
-msgid "LicenseCompliance|Approve license?"
-msgstr ""
-
-msgid "LicenseCompliance|Approved"
+msgid "LicenseCompliance|Allow"
msgstr ""
-msgid "LicenseCompliance|Blacklist"
+msgid "LicenseCompliance|Allowed"
msgstr ""
-msgid "LicenseCompliance|Blacklist license"
+msgid "LicenseCompliance|Cancel"
msgstr ""
-msgid "LicenseCompliance|Blacklist license?"
+msgid "LicenseCompliance|Denied"
msgstr ""
-msgid "LicenseCompliance|Blacklisted"
-msgstr ""
-
-msgid "LicenseCompliance|Cancel"
+msgid "LicenseCompliance|Deny"
msgstr ""
msgid "LicenseCompliance|Here you can approve or blacklist licenses for this project. Using %{ci} or %{license} will allow you to see if there are any unmanaged licenses and approve or blacklist them in merge request."
@@ -11372,6 +11757,9 @@ msgstr ""
msgid "LicenseCompliance|License name"
msgstr ""
+msgid "LicenseCompliance|License review"
+msgstr ""
+
msgid "LicenseCompliance|Packages"
msgstr ""
@@ -11417,7 +11805,10 @@ msgstr ""
msgid "Licenses|Components"
msgstr ""
-msgid "Licenses|Displays licenses detected in the project, based on the %{linkStart}latest pipeline%{linkEnd} scan"
+msgid "Licenses|Detected in Project"
+msgstr ""
+
+msgid "Licenses|Displays licenses detected in the project, based on the %{linkStart}latest successful%{linkEnd} scan"
msgstr ""
msgid "Licenses|Error fetching the license list. Please check your network connection and try again."
@@ -11432,6 +11823,15 @@ msgstr ""
msgid "Licenses|Name"
msgstr ""
+msgid "Licenses|Policies"
+msgstr ""
+
+msgid "Licenses|Policy"
+msgstr ""
+
+msgid "Licenses|Specified policies in this project"
+msgstr ""
+
msgid "Licenses|The license list details information about the licenses used within your project."
msgstr ""
@@ -11473,6 +11873,9 @@ msgstr[1] ""
msgid "Line changes"
msgstr ""
+msgid "Link Prometheus monitoring to GitLab."
+msgstr ""
+
msgid "Link copied"
msgstr ""
@@ -11599,7 +12002,7 @@ msgstr ""
msgid "Logs"
msgstr ""
-msgid "Logs|To see the pod logs, deploy your code to an environment."
+msgid "Logs|To see the logs, deploy your code to an environment."
msgstr ""
msgid "Low vulnerabilities present"
@@ -11614,6 +12017,12 @@ msgstr ""
msgid "MERGED"
msgstr ""
+msgid "MR widget|Take a look at our %{beginnerLinkStart}Beginner's Guide to Continuous Integration%{beginnerLinkEnd} and our %{exampleLinkStart}examples of GitLab CI/CD%{exampleLinkEnd} to see all the cool stuff you can do with it."
+msgstr ""
+
+msgid "MR widget|The pipeline will now run automatically every time you commit code. Pipelines are useful for deploying static web pages, detecting vulnerabilities in dependencies, static or dynamic application security testing (SAST and DAST), and so much more!"
+msgstr ""
+
msgid "MRApprovals|Approved by"
msgstr ""
@@ -11773,6 +12182,9 @@ msgstr ""
msgid "Marks this issue as related to %{issue_ref}."
msgstr ""
+msgid "Mask variable"
+msgstr ""
+
msgid "Match not found; try refining your search query."
msgstr ""
@@ -12118,6 +12530,9 @@ msgstr ""
msgid "Metric was successfully updated."
msgstr ""
+msgid "Metric:"
+msgstr ""
+
msgid "MetricChart|Please select a metric"
msgstr ""
@@ -12176,7 +12591,9 @@ msgid "Metrics|Duplicating..."
msgstr ""
msgid "Metrics|Edit metric"
-msgstr ""
+msgid_plural "Metrics|Edit metrics"
+msgstr[0] ""
+msgstr[1] ""
msgid "Metrics|Environment"
msgstr ""
@@ -12211,6 +12628,9 @@ msgstr ""
msgid "Metrics|Prometheus Query Documentation"
msgstr ""
+msgid "Metrics|Reload this page"
+msgstr ""
+
msgid "Metrics|Show last"
msgstr ""
@@ -12256,6 +12676,9 @@ msgstr ""
msgid "Metrics|Validating query"
msgstr ""
+msgid "Metrics|Values"
+msgstr ""
+
msgid "Metrics|View logs"
msgstr ""
@@ -12384,7 +12807,13 @@ msgstr ""
msgid "Missing commit signatures endpoint!"
msgstr ""
-msgid "MissingSSHKeyWarningLink|add an SSH key"
+msgid "MissingSSHKeyWarningLink|Add SSH key"
+msgstr ""
+
+msgid "MissingSSHKeyWarningLink|Don't show again"
+msgstr ""
+
+msgid "MissingSSHKeyWarningLink|You won't be able to pull or push project code via SSH until you add an SSH key to your profile"
msgstr ""
msgid "Modal|Cancel"
@@ -12393,6 +12822,9 @@ msgstr ""
msgid "Modal|Close"
msgstr ""
+msgid "Modified"
+msgstr ""
+
msgid "Modified in this version"
msgstr ""
@@ -12489,6 +12921,15 @@ msgstr ""
msgid "Moves this issue to %{path_to_project}."
msgstr ""
+msgid "MrDeploymentActions|Deploy"
+msgstr ""
+
+msgid "MrDeploymentActions|Re-deploy"
+msgstr ""
+
+msgid "MrDeploymentActions|Stop environment"
+msgstr ""
+
msgid "Multiple issue boards"
msgstr ""
@@ -12513,10 +12954,10 @@ msgstr ""
msgid "Name new label"
msgstr ""
-msgid "Name your individual key via a title"
+msgid "Name:"
msgstr ""
-msgid "Name:"
+msgid "Namespace is empty"
msgstr ""
msgid "Namespace: %{namespace}"
@@ -12549,6 +12990,9 @@ msgstr ""
msgid "Need help?"
msgstr ""
+msgid "Needs attention"
+msgstr ""
+
msgid "Network"
msgstr ""
@@ -12662,6 +13106,9 @@ msgstr ""
msgid "New project"
msgstr ""
+msgid "New release"
+msgstr ""
+
msgid "New runners registration token has been generated!"
msgstr ""
@@ -12710,7 +13157,7 @@ msgstr ""
msgid "No %{providerTitle} repositories found"
msgstr ""
-msgid "No Design Repositories match this filter"
+msgid "No %{replicableType} match this filter"
msgstr ""
msgid "No Epic"
@@ -12776,9 +13223,6 @@ msgstr ""
msgid "No data to display"
msgstr ""
-msgid "No deployment platform available"
-msgstr ""
-
msgid "No deployments found"
msgstr ""
@@ -12827,6 +13271,12 @@ msgstr ""
msgid "No licenses found."
msgstr ""
+msgid "No matches found"
+msgstr ""
+
+msgid "No matching labels"
+msgstr ""
+
msgid "No matching results"
msgstr ""
@@ -12881,6 +13331,12 @@ msgstr ""
msgid "No template"
msgstr ""
+msgid "No test coverage"
+msgstr ""
+
+msgid "No thanks, don't show this again"
+msgstr ""
+
msgid "No value set by top-level parent group."
msgstr ""
@@ -12962,6 +13418,9 @@ msgstr ""
msgid "Note"
msgstr ""
+msgid "Note parameters are invalid: %{errors}"
+msgstr ""
+
msgid "Note that this invitation was sent to %{mail_to_invite_email}, but you are signed in as %{link_to_current_user} with email %{mail_to_current_user}."
msgstr ""
@@ -13025,6 +13484,9 @@ msgstr ""
msgid "NotificationEvent|Failed pipeline"
msgstr ""
+msgid "NotificationEvent|Fixed pipeline"
+msgstr ""
+
msgid "NotificationEvent|Merge merge request"
msgstr ""
@@ -13142,6 +13604,9 @@ msgstr ""
msgid "OfSearchInADropdown|Filter"
msgstr ""
+msgid "Oh no!"
+msgstr ""
+
msgid "Ok let's go"
msgstr ""
@@ -13151,6 +13616,9 @@ msgstr ""
msgid "Omnibus Protected Paths throttle is active. From 12.4, Omnibus throttle is deprecated and will be removed in a future release. Please read the %{relative_url_link_start}Migrating Protected Paths documentation%{relative_url_link_end}."
msgstr ""
+msgid "On track"
+msgstr ""
+
msgid "Onboarding"
msgstr ""
@@ -13192,6 +13660,12 @@ msgstr ""
msgid "Only 'Reporter' roles and above on tiers Premium / Silver and above can see Value Stream Analytics."
msgstr ""
+msgid "Only 1 appearances row can exist"
+msgstr ""
+
+msgid "Only Issue ID or Merge Request ID is required"
+msgstr ""
+
msgid "Only Project Members"
msgstr ""
@@ -13231,9 +13705,6 @@ msgstr ""
msgid "Open"
msgstr ""
-msgid "Open Documentation"
-msgstr ""
-
msgid "Open Selection"
msgstr ""
@@ -13264,6 +13735,9 @@ msgstr ""
msgid "Open source software to collaborate on code"
msgstr ""
+msgid "Open: %{openIssuesCount}"
+msgstr ""
+
msgid "Open: %{open} • Closed: %{closed}"
msgstr ""
@@ -13348,6 +13822,18 @@ msgstr ""
msgid "Outbound requests"
msgstr ""
+msgid "OutdatedBrowser|From May 2020 GitLab no longer supports Internet Explorer 11."
+msgstr ""
+
+msgid "OutdatedBrowser|GitLab may not work properly, because you are using an outdated web browser."
+msgstr ""
+
+msgid "OutdatedBrowser|Please install a %{browser_link_start}supported web browser%{browser_link_end} for a better experience."
+msgstr ""
+
+msgid "OutdatedBrowser|You can provide feedback %{feedback_link_start}on this issue%{feedback_link_end} or via your usual support channels."
+msgstr ""
+
msgid "Overview"
msgstr ""
@@ -13363,12 +13849,27 @@ msgstr ""
msgid "Owner"
msgstr ""
+msgid "Package Registry"
+msgstr ""
+
+msgid "Package already exists"
+msgstr ""
+
msgid "Package deleted successfully"
msgstr ""
msgid "Package information"
msgstr ""
+msgid "Package recipe already exists"
+msgstr ""
+
+msgid "Package type must be Conan"
+msgstr ""
+
+msgid "Package type must be Maven"
+msgstr ""
+
msgid "Package was removed"
msgstr ""
@@ -13378,6 +13879,9 @@ msgstr ""
msgid "PackageRegistry|Add NuGet Source"
msgstr ""
+msgid "PackageRegistry|Conan"
+msgstr ""
+
msgid "PackageRegistry|Conan Command"
msgstr ""
@@ -13441,31 +13945,43 @@ msgstr ""
msgid "PackageRegistry|Learn how to %{noPackagesLinkStart}publish and share your packages%{noPackagesLinkEnd} with GitLab."
msgstr ""
+msgid "PackageRegistry|Maven"
+msgstr ""
+
msgid "PackageRegistry|Maven Command"
msgstr ""
msgid "PackageRegistry|Maven XML"
msgstr ""
+msgid "PackageRegistry|NPM"
+msgstr ""
+
+msgid "PackageRegistry|NuGet"
+msgstr ""
+
msgid "PackageRegistry|NuGet Command"
msgstr ""
-msgid "PackageRegistry|Registry Setup"
+msgid "PackageRegistry|Pipeline %{linkStart}%{linkEnd} triggered %{timestamp} by %{author}"
msgstr ""
-msgid "PackageRegistry|Remove package"
+msgid "PackageRegistry|Published to the repository at %{timestamp}"
msgstr ""
-msgid "PackageRegistry|There are no packages yet"
+msgid "PackageRegistry|Registry Setup"
msgstr ""
-msgid "PackageRegistry|There was a problem fetching the details for this package."
+msgid "PackageRegistry|Remove package"
msgstr ""
-msgid "PackageRegistry|There was an error fetching the pipeline information."
+msgid "PackageRegistry|There are no %{packageType} packages yet"
msgstr ""
-msgid "PackageRegistry|Unable to fetch pipeline information"
+msgid "PackageRegistry|There are no packages yet"
+msgstr ""
+
+msgid "PackageRegistry|There was a problem fetching the details for this package."
msgstr ""
msgid "PackageRegistry|Unable to load package"
@@ -13477,7 +13993,7 @@ msgstr ""
msgid "PackageRegistry|You are about to delete version %{boldStart}%{version}%{boldEnd} of %{boldStart}%{name}%{boldEnd}. Are you sure?"
msgstr ""
-msgid "PackageRegistry|You may also need to setup authentication using an auth token. %{linkStart}See the documentation%{linkEnd} to find out more."
+msgid "PackageRegistry|You may also need to setup authentication using an auth token. %{linkStart}See the documentation%{linkEnd} to find out more."
msgstr ""
msgid "PackageRegistry|npm"
@@ -13534,18 +14050,12 @@ msgstr ""
msgid "Pagination|Next"
msgstr ""
-msgid "Pagination|Next ›"
-msgstr ""
-
msgid "Pagination|Prev"
msgstr ""
msgid "Pagination|« First"
msgstr ""
-msgid "Pagination|‹ Prev"
-msgstr ""
-
msgid "Parameter"
msgstr ""
@@ -13564,6 +14074,9 @@ msgstr ""
msgid "Part of merge request changes"
msgstr ""
+msgid "Partial token for reference only"
+msgstr ""
+
msgid "Participants"
msgstr ""
@@ -13636,6 +14149,9 @@ msgstr ""
msgid "People without permission will never get a notification."
msgstr ""
+msgid "Percent rollout (logged in users)"
+msgstr ""
+
msgid "Percentage"
msgstr ""
@@ -14023,6 +14539,9 @@ msgstr ""
msgid "Please create a username with only alphanumeric characters."
msgstr ""
+msgid "Please create an index before enabling indexing"
+msgstr ""
+
msgid "Please enable and migrate to hashed storage to avoid security issues and ensure data integrity. %{migrate_link}"
msgstr ""
@@ -14113,9 +14632,6 @@ msgstr ""
msgid "Pod does not exist"
msgstr ""
-msgid "Pod logs"
-msgstr ""
-
msgid "Pod not found"
msgstr ""
@@ -14269,7 +14785,7 @@ msgstr ""
msgid "Private"
msgstr ""
-msgid "Private - Project access must be granted explicitly to each user."
+msgid "Private - Project access must be granted explicitly to each user. If this project is part of a group, access will be granted to members of the group."
msgstr ""
msgid "Private - The group and its projects can only be viewed by members."
@@ -14287,6 +14803,9 @@ msgstr ""
msgid "Proceed"
msgstr ""
+msgid "Productivity"
+msgstr ""
+
msgid "Productivity Analytics"
msgstr ""
@@ -14443,12 +14962,21 @@ msgstr ""
msgid "Profiles|Enter your name, so people you know can recognize you"
msgstr ""
+msgid "Profiles|Expires at"
+msgstr ""
+
+msgid "Profiles|Expires:"
+msgstr ""
+
msgid "Profiles|Feed token was successfully reset"
msgstr ""
msgid "Profiles|Full name"
msgstr ""
+msgid "Profiles|Give your individual key a title"
+msgstr ""
+
msgid "Profiles|Impersonation"
msgstr ""
@@ -14470,6 +14998,9 @@ msgstr ""
msgid "Profiles|Key"
msgstr ""
+msgid "Profiles|Last used:"
+msgstr ""
+
msgid "Profiles|Learn more"
msgstr ""
@@ -14626,6 +15157,9 @@ msgstr ""
msgid "Profiles|Your email address was automatically set based on your %{provider_label} account"
msgstr ""
+msgid "Profiles|Your key has expired"
+msgstr ""
+
msgid "Profiles|Your location was automatically set based on your %{provider_label} account"
msgstr ""
@@ -14653,6 +15187,9 @@ msgstr ""
msgid "Profiles|your account"
msgstr ""
+msgid "Profile|%{job_title} at %{organization}"
+msgstr ""
+
msgid "Profiling - Performance bar"
msgstr ""
@@ -14701,16 +15238,13 @@ msgstr ""
msgid "Project Files"
msgstr ""
-msgid "Project Hooks"
-msgstr ""
-
msgid "Project ID"
msgstr ""
msgid "Project URL"
msgstr ""
-msgid "Project access must be granted explicitly to each user."
+msgid "Project access must be granted explicitly to each user. If this project is part of a group, access will be granted to members of the group."
msgstr ""
msgid "Project already deleted"
@@ -14866,30 +15400,18 @@ msgstr ""
msgid "ProjectService|Comment will be posted on each event"
msgstr ""
-msgid "ProjectService|Integrations"
-msgstr ""
-
msgid "ProjectService|Last edit"
msgstr ""
msgid "ProjectService|Perform common operations on GitLab project: %{project_name}"
msgstr ""
-msgid "ProjectService|Project services"
-msgstr ""
-
-msgid "ProjectService|Project services allow you to integrate GitLab with other applications"
-msgstr ""
-
msgid "ProjectService|Service"
msgstr ""
msgid "ProjectService|Services"
msgstr ""
-msgid "ProjectService|Settings"
-msgstr ""
-
msgid "ProjectService|To set up this service:"
msgstr ""
@@ -15088,6 +15610,9 @@ msgstr ""
msgid "ProjectSettings|View and edit files in this project"
msgstr ""
+msgid "ProjectSettings|View and edit files in this project. Non-project members will only have read access"
+msgstr ""
+
msgid "ProjectSettings|When conflicts arise the user is given the option to rebase"
msgstr ""
@@ -15124,6 +15649,9 @@ msgstr ""
msgid "ProjectTemplates|NodeJS Express"
msgstr ""
+msgid "ProjectTemplates|Pages/Gatsby"
+msgstr ""
+
msgid "ProjectTemplates|Pages/GitBook"
msgstr ""
@@ -15262,6 +15790,9 @@ msgstr ""
msgid "ProjectsNew|Want to house several dependent projects under the same namespace? %{link_start}Create a group.%{link_end}"
msgstr ""
+msgid "Prometheus"
+msgstr ""
+
msgid "PrometheusAlerts|%{count} alerts applied"
msgstr ""
@@ -15436,6 +15967,9 @@ msgstr ""
msgid "Prompt users to upload SSH keys"
msgstr ""
+msgid "Protect variable"
+msgstr ""
+
msgid "Protected"
msgstr ""
@@ -15574,6 +16108,12 @@ msgstr ""
msgid "Pull"
msgstr ""
+msgid "Pull requests from fork are not supported"
+msgstr ""
+
+msgid "Puma is running with a thread count above 1 and the Rugged service is enabled. This may decrease performance in some environments. See our %{link_start}documentation%{link_end} for details of this issue."
+msgstr ""
+
msgid "Purchase more minutes"
msgstr ""
@@ -15613,6 +16153,12 @@ msgstr ""
msgid "Pushes"
msgstr ""
+msgid "Pushing code and creation of issues and merge requests has been disabled."
+msgstr ""
+
+msgid "Pushing code and creation of issues and merge requests will be disabled on %{disabled_on}."
+msgstr ""
+
msgid "PushoverService|%{user_name} deleted branch \"%{ref}\"."
msgstr ""
@@ -15833,6 +16379,9 @@ msgid_plural "Releases"
msgstr[0] ""
msgstr[1] ""
+msgid "Release does not have the same project as the milestone"
+msgstr ""
+
msgid "Release notes"
msgstr ""
@@ -15851,6 +16400,9 @@ msgstr ""
msgid "Releases are based on Git tags. We recommend naming tags that fit within semantic versioning, for example %{codeStart}v1.0%{codeEnd}, %{codeStart}v2.0-pre%{codeEnd}."
msgstr ""
+msgid "Releases documentation"
+msgstr ""
+
msgid "Release|Something went wrong while getting the release details"
msgstr ""
@@ -16034,15 +16586,15 @@ msgstr ""
msgid "Removing a project places it into a read-only state until %{date}, at which point the project will be permanently removed."
msgstr ""
-msgid "Removing group will cause all child projects and resources to be removed."
-msgstr ""
-
msgid "Removing license…"
msgstr ""
msgid "Removing the project will delete its repository and all related resources including issues, merge requests etc."
msgstr ""
+msgid "Removing this group also removes all child projects, including archived projects, and their resources."
+msgstr ""
+
msgid "Rename file"
msgstr ""
@@ -16109,7 +16661,7 @@ msgstr ""
msgid "Reporting"
msgstr ""
-msgid "Reports|%{failedString} and %{resolvedString}"
+msgid "Reports|%{combinedString} and %{resolvedString}"
msgstr ""
msgid "Reports|Actions"
@@ -16163,9 +16715,6 @@ msgstr ""
msgid "Repository"
msgstr ""
-msgid "Repository Analytics"
-msgstr ""
-
msgid "Repository Graph"
msgstr ""
@@ -16241,6 +16790,9 @@ msgstr ""
msgid "Require users to prove ownership of custom domains"
msgstr ""
+msgid "Requirements"
+msgstr ""
+
msgid "Requires approval from %{names}."
msgid_plural "Requires %{count} more approvals from %{names}."
msgstr[0] ""
@@ -16251,6 +16803,9 @@ msgid_plural "Requires %d more approvals."
msgstr[0] ""
msgstr[1] ""
+msgid "Requires values to meet regular expression requirements."
+msgstr ""
+
msgid "Resend confirmation email"
msgstr ""
@@ -16293,9 +16848,6 @@ msgstr ""
msgid "Resolve conflicts on source branch"
msgstr ""
-msgid "Resolve discussion"
-msgstr ""
-
msgid "Resolve thread"
msgstr ""
@@ -16362,6 +16914,9 @@ msgstr ""
msgid "Restrict membership by email"
msgstr ""
+msgid "Restricts sign-ups for email addresses that match the given regex. See the %{supported_syntax_link_start}supported syntax%{supported_syntax_link_end} for more information."
+msgstr ""
+
msgid "Resume"
msgstr ""
@@ -16371,7 +16926,7 @@ msgstr ""
msgid "Resync"
msgstr ""
-msgid "Resync all designs"
+msgid "Resync all %{replicableType}"
msgstr ""
msgid "Retry"
@@ -16646,10 +17201,10 @@ msgstr ""
msgid "Scoped issue boards"
msgstr ""
-msgid "Scoped label"
+msgid "Scopes"
msgstr ""
-msgid "Scopes"
+msgid "Scopes can't be blank"
msgstr ""
msgid "Scroll down"
@@ -16742,7 +17297,7 @@ msgstr ""
msgid "Search users or groups"
msgstr ""
-msgid "Search your project dependencies for their licenses and apply policies"
+msgid "Search your project dependencies for their licenses and apply policies."
msgstr ""
msgid "Search your projects"
@@ -16929,6 +17484,9 @@ msgstr ""
msgid "Security Reports|There was an error deleting the comment."
msgstr ""
+msgid "Security Reports|There was an error dismissing the vulnerabilities."
+msgstr ""
+
msgid "Security Reports|There was an error dismissing the vulnerability."
msgstr ""
@@ -16953,10 +17511,10 @@ msgstr ""
msgid "Security dashboard"
msgstr ""
-msgid "Security report is out of date. Please incorporate latest changes from %{targetBranchName}"
+msgid "Security report is out of date. Please update your branch with the latest changes from the target branch (%{targetBranchName})"
msgstr ""
-msgid "Security report is out of date. Retry the pipeline for the target branch."
+msgid "Security report is out of date. Run %{newPipelineLinkStart}a new pipeline%{newPipelineLinkEnd} for the target branch (%{targetBranchName})"
msgstr ""
msgid "SecurityConfiguration|Configured"
@@ -16965,7 +17523,7 @@ msgstr ""
msgid "SecurityConfiguration|Feature"
msgstr ""
-msgid "SecurityConfiguration|Feature documentation"
+msgid "SecurityConfiguration|Feature documentation for %{featureName}"
msgstr ""
msgid "SecurityConfiguration|Not yet configured"
@@ -17085,6 +17643,9 @@ msgstr ""
msgid "Select a project to read Insights configuration file"
msgstr ""
+msgid "Select a reason"
+msgstr ""
+
msgid "Select a repository"
msgstr ""
@@ -17112,6 +17673,9 @@ msgstr ""
msgid "Select group or project"
msgstr ""
+msgid "Select groups to replicate"
+msgstr ""
+
msgid "Select labels"
msgstr ""
@@ -17145,6 +17709,9 @@ msgstr ""
msgid "Select source branch"
msgstr ""
+msgid "Select strategy activation method"
+msgstr ""
+
msgid "Select target branch"
msgstr ""
@@ -17163,9 +17730,6 @@ msgstr ""
msgid "Select user"
msgstr ""
-msgid "Select your role"
-msgstr ""
-
msgid "Selected levels cannot be used by non-admin users for groups, projects or snippets. If the public level is restricted, user profiles are only visible to logged in users."
msgstr ""
@@ -17505,13 +18069,13 @@ msgstr ""
msgid "Settings"
msgstr ""
-msgid "Settings to prevent self-approval across all projects in the instance. Only an administrator can modify these settings."
+msgid "Settings related to the use and experience of using GitLab's Package Registry."
msgstr ""
-msgid "Severity: %{severity}"
+msgid "Settings to prevent self-approval across all projects in the instance. Only an administrator can modify these settings."
msgstr ""
-msgid "Shards selected: %{count}"
+msgid "Severity: %{severity}"
msgstr ""
msgid "Shards to synchronize"
@@ -17580,6 +18144,9 @@ msgstr ""
msgid "Show latest version"
msgstr ""
+msgid "Show me how"
+msgstr ""
+
msgid "Show only direct members"
msgstr ""
@@ -17630,6 +18197,9 @@ msgstr ""
msgid "Sidebar|Only numeral characters allowed"
msgstr ""
+msgid "Sidebar|Status"
+msgstr ""
+
msgid "Sidebar|Weight"
msgstr ""
@@ -17714,6 +18284,9 @@ msgstr ""
msgid "Size settings for static websites"
msgstr ""
+msgid "Skip older, pending deployment jobs"
+msgstr ""
+
msgid "Skip this for now"
msgstr ""
@@ -17723,9 +18296,27 @@ msgstr ""
msgid "Slack application"
msgstr ""
+msgid "Slack channels (e.g. general, development)"
+msgstr ""
+
msgid "Slack integration allows you to interact with GitLab via slash commands in a chat window."
msgstr ""
+msgid "SlackIntegration|%{webhooks_link_start}Add an incoming webhook%{webhooks_link_end} in your Slack team. The default channel can be overridden for each event."
+msgstr ""
+
+msgid "SlackIntegration|<strong>Note:</strong> Usernames and private channels are not supported."
+msgstr ""
+
+msgid "SlackIntegration|Paste the <strong>Webhook URL</strong> into the field below."
+msgstr ""
+
+msgid "SlackIntegration|Select events below to enable notifications. The <strong>Slack channel names</strong> and <strong>Slack username</strong> fields are optional."
+msgstr ""
+
+msgid "SlackIntegration|This service send notifications about projects' events to Slack channels. To set up this service:"
+msgstr ""
+
msgid "SlackService|2. Paste the <strong>Token</strong> into the field below"
msgstr ""
@@ -17750,9 +18341,6 @@ msgstr ""
msgid "Smartcard authentication failed: client certificate header is missing."
msgstr ""
-msgid "Snippet Contents"
-msgstr ""
-
msgid "Snippets"
msgstr ""
@@ -17786,6 +18374,9 @@ msgstr ""
msgid "Snippets|Optionally add a description about what your snippet does or how to use it..."
msgstr ""
+msgid "Snippets|Optionally add a description about what your snippet does or how to use it…"
+msgstr ""
+
msgid "Snowplow"
msgstr ""
@@ -17840,22 +18431,16 @@ msgstr ""
msgid "Something went wrong while deleting description changes. Please try again."
msgstr ""
-msgid "Something went wrong while deleting the image."
-msgstr ""
-
msgid "Something went wrong while deleting the package."
msgstr ""
msgid "Something went wrong while deleting the source branch. Please try again."
msgstr ""
-msgid "Something went wrong while deleting the tag."
-msgstr ""
-
-msgid "Something went wrong while deleting the tags."
+msgid "Something went wrong while deleting your note. Please try again."
msgstr ""
-msgid "Something went wrong while deleting your note. Please try again."
+msgid "Something went wrong while deploying this environment. Please try again."
msgstr ""
msgid "Something went wrong while editing your comment. Please try again."
@@ -17879,6 +18464,9 @@ msgstr ""
msgid "Something went wrong while fetching projects"
msgstr ""
+msgid "Something went wrong while fetching projects."
+msgstr ""
+
msgid "Something went wrong while fetching related merge requests."
msgstr ""
@@ -17897,9 +18485,6 @@ msgstr ""
msgid "Something went wrong while fetching the registry list."
msgstr ""
-msgid "Something went wrong while fetching the tags list."
-msgstr ""
-
msgid "Something went wrong while initializing the OpenAPI viewer"
msgstr ""
@@ -18176,6 +18761,9 @@ msgstr ""
msgid "Specific Runners"
msgstr ""
+msgid "Specified URL cannot be used."
+msgstr ""
+
msgid "Specify an e-mail address regex pattern to identify default internal users."
msgstr ""
@@ -18200,21 +18788,12 @@ msgstr ""
msgid "Stage & Commit"
msgstr ""
-msgid "Stage all changes"
-msgstr ""
-
msgid "Stage data updated"
msgstr ""
msgid "Stage removed"
msgstr ""
-msgid "Staged"
-msgstr ""
-
-msgid "Staged %{type}"
-msgstr ""
-
msgid "Star a label to make it a priority label. Order the prioritized labels to change their relative priority, by dragging."
msgstr ""
@@ -18347,13 +18926,52 @@ msgstr ""
msgid "Status:"
msgstr ""
-msgid "Stay updated about the performance and health of your environment by configuring Prometheus to monitor your deployments."
+msgid "Status: %{title}"
msgstr ""
-msgid "Stop Terminal"
+msgid "StatusPage|AWS Secret access key"
+msgstr ""
+
+msgid "StatusPage|AWS access key ID"
+msgstr ""
+
+msgid "StatusPage|AWS documentation"
+msgstr ""
+
+msgid "StatusPage|AWS region"
+msgstr ""
+
+msgid "StatusPage|Active"
+msgstr ""
+
+msgid "StatusPage|Bucket %{docsLink}"
+msgstr ""
+
+msgid "StatusPage|Configure file storage settings to link issues in this project to an external status page."
+msgstr ""
+
+msgid "StatusPage|For help with configuration, visit %{docsLink}"
+msgstr ""
+
+msgid "StatusPage|S3 Bucket name"
+msgstr ""
+
+msgid "StatusPage|Status page"
+msgstr ""
+
+msgid "StatusPage|To publish incidents to an external status page, GitLab will store a JSON file in your Amazon S3 account in a location accessible to your external status page service. Make sure to also set up %{docsLink}"
+msgstr ""
+
+msgid "StatusPage|configuration documentation"
+msgstr ""
+
+msgid "StatusPage|your status page frontend."
msgstr ""
-msgid "Stop environment"
+msgid "Stay updated about the performance and health of your environment by configuring Prometheus to monitor your deployments."
+msgstr ""
+
+msgid "Stop Terminal"
msgstr ""
msgid "Stop impersonation"
@@ -18365,9 +18983,6 @@ msgstr ""
msgid "Stopped"
msgstr ""
-msgid "Stopping this environment is currently not possible as a deployment is in progress"
-msgstr ""
-
msgid "Stopping..."
msgstr ""
@@ -18464,6 +19079,9 @@ msgstr ""
msgid "Subscription deletion failed."
msgstr ""
+msgid "Subscription successfully applied to \"%{group_name}\""
+msgstr ""
+
msgid "Subscription successfully created."
msgstr ""
@@ -18542,9 +19160,6 @@ msgstr ""
msgid "Subscriptions"
msgstr ""
-msgid "Subscriptions allow successfully completed pipelines on the %{default_branch_docs} of the subscribed project to trigger a new pipeline on the default branch of this project."
-msgstr ""
-
msgid "Subtracted"
msgstr ""
@@ -18719,9 +19334,6 @@ msgstr ""
msgid "Tag"
msgstr ""
-msgid "Tag deleted successfully"
-msgstr ""
-
msgid "Tag list:"
msgstr ""
@@ -18740,9 +19352,6 @@ msgstr ""
msgid "Tags"
msgstr ""
-msgid "Tags deleted successfully"
-msgstr ""
-
msgid "Tags feed"
msgstr ""
@@ -18794,7 +19403,7 @@ msgstr ""
msgid "TagsPage|Optionally, add a message to the tag. Leaving this blank creates a %{link_start}lightweight tag.%{link_end}"
msgstr ""
-msgid "TagsPage|Optionally, add release notes to the tag. They will be stored in the GitLab database and displayed on the tags page."
+msgid "TagsPage|Optionally, create a public Release of your project, based on this tag. Release notes are displayed on the %{releases_page_link_start}Releases%{link_end} page. %{docs_link_start}More information%{link_end}"
msgstr ""
msgid "TagsPage|Release notes"
@@ -18878,6 +19487,11 @@ msgstr ""
msgid "Test coverage parsing"
msgstr ""
+msgid "Test coverage: %d hit"
+msgid_plural "Test coverage: %d hits"
+msgstr[0] ""
+msgstr[1] ""
+
msgid "Test failed."
msgstr ""
@@ -18953,6 +19567,9 @@ msgstr ""
msgid "Thanks! Don't show me this again"
msgstr ""
+msgid "That's it, well done!%{celebrate}"
+msgstr ""
+
msgid "The \"%{group_path}\" group allows you to sign in with your Single Sign-On Account"
msgstr ""
@@ -19131,6 +19748,9 @@ msgstr ""
msgid "The issue stage shows the time it takes from creating an issue to assigning the issue to a milestone, or add the issue to a list on your Issue Board. Begin creating issues to see data for this stage."
msgstr ""
+msgid "The license key is invalid. Make sure it is exactly as you received it from GitLab Inc."
+msgstr ""
+
msgid "The license was removed. GitLab has fallen back on the previous license."
msgstr ""
@@ -19167,6 +19787,9 @@ msgstr ""
msgid "The number of times an upload record could not find its file"
msgstr ""
+msgid "The one place for your designs"
+msgstr ""
+
msgid "The passphrase required to decrypt the private key. This is optional and the value is encrypted at rest."
msgstr ""
@@ -19329,6 +19952,9 @@ msgstr ""
msgid "There are no archived projects yet"
msgstr ""
+msgid "There are no changes"
+msgstr ""
+
msgid "There are no charts configured for this page"
msgstr ""
@@ -19365,13 +19991,10 @@ msgstr ""
msgid "There are no projects shared with this group yet"
msgstr ""
-msgid "There are no staged changes"
+msgid "There are no variables yet."
msgstr ""
-msgid "There are no unstaged changes"
-msgstr ""
-
-msgid "There is a limit of 100 subscriptions from or to a project."
+msgid "There is a limit of %{ci_project_subscriptions_limit} subscriptions from or to a project."
msgstr ""
msgid "There is already a repository with that name on disk"
@@ -19425,7 +20048,19 @@ msgstr ""
msgid "There was an error fetching median data for stages"
msgstr ""
-msgid "There was an error fetching the Designs"
+msgid "There was an error fetching the %{replicableType}"
+msgstr ""
+
+msgid "There was an error fetching the Node's Groups"
+msgstr ""
+
+msgid "There was an error fetching the environments information."
+msgstr ""
+
+msgid "There was an error fetching the top labels for the selected group"
+msgstr ""
+
+msgid "There was an error fetching the variables."
msgstr ""
msgid "There was an error fetching value stream analytics stages."
@@ -19461,12 +20096,21 @@ msgstr ""
msgid "There was an error subscribing to this label."
msgstr ""
-msgid "There was an error syncing the Design Repositories."
+msgid "There was an error syncing project %{name}"
+msgstr ""
+
+msgid "There was an error syncing the %{replicableType}"
msgstr ""
msgid "There was an error trying to validate your query"
msgstr ""
+msgid "There was an error updating the dashboard, branch name is invalid."
+msgstr ""
+
+msgid "There was an error updating the dashboard, branch named: %{branch} already exists."
+msgstr ""
+
msgid "There was an error when reseting email token."
msgstr ""
@@ -19593,6 +20237,12 @@ msgstr ""
msgid "This environment has no deployments yet."
msgstr ""
+msgid "This environment is being deployed"
+msgstr ""
+
+msgid "This environment is being re-deployed"
+msgstr ""
+
msgid "This epic already has the maximum number of child epics."
msgstr ""
@@ -19608,6 +20258,9 @@ msgstr ""
msgid "This group"
msgstr ""
+msgid "This group cannot be invited to a project inside a group with enforced SSO"
+msgstr ""
+
msgid "This group does not provide any group Runners yet."
msgstr ""
@@ -19764,6 +20417,9 @@ msgstr ""
msgid "This job will automatically run after its timer finishes. Often they are used for incremental roll-out deploys to production environments. When unscheduled it converts into a manual action."
msgstr ""
+msgid "This license has already expired."
+msgstr ""
+
msgid "This may expose confidential information as the selected fork is in another namespace that can have other members."
msgstr ""
@@ -19815,7 +20471,7 @@ msgstr ""
msgid "This project is archived and cannot be commented on."
msgstr ""
-msgid "This project path either does not exist or is private."
+msgid "This project path either does not exist or you do not have access."
msgstr ""
msgid "This project will be removed on %{date}"
@@ -19860,6 +20516,9 @@ msgstr ""
msgid "This user will be the author of all events in the activity feed that are the result of an update, like new branches being created or new commits being pushed to existing branches. Upon creation or when reassigning you can only assign yourself to be the mirror user."
msgstr ""
+msgid "This variable can not be masked"
+msgstr ""
+
msgid "This will help us personalize your onboarding experience."
msgstr ""
@@ -19875,10 +20534,10 @@ msgstr ""
msgid "Those emails automatically become issues (with the comments becoming the email conversation) listed here."
msgstr ""
-msgid "Threat Monitoring"
+msgid "Thread to reply to cannot be found"
msgstr ""
-msgid "ThreatMonitoring|A Web Application Firewall (WAF) provides monitoring and rules to protect production applications. GitLab adds the modsecurity WAF plug-in when you install the Ingress app in your Kubernetes cluster."
+msgid "Threat Monitoring"
msgstr ""
msgid "ThreatMonitoring|Anomalous Requests"
@@ -19887,22 +20546,34 @@ msgstr ""
msgid "ThreatMonitoring|At this time, threat monitoring only supports WAF data."
msgstr ""
+msgid "ThreatMonitoring|Container Network Policy"
+msgstr ""
+
+msgid "ThreatMonitoring|Dropped Packets"
+msgstr ""
+
msgid "ThreatMonitoring|Environment"
msgstr ""
msgid "ThreatMonitoring|No traffic to display"
msgstr ""
+msgid "ThreatMonitoring|Operations Per Second"
+msgstr ""
+
+msgid "ThreatMonitoring|Packet Activity"
+msgstr ""
+
msgid "ThreatMonitoring|Requests"
msgstr ""
msgid "ThreatMonitoring|Show last"
msgstr ""
-msgid "ThreatMonitoring|Something went wrong, unable to fetch WAF statistics"
+msgid "ThreatMonitoring|Something went wrong, unable to fetch environments"
msgstr ""
-msgid "ThreatMonitoring|Something went wrong, unable to fetch environments"
+msgid "ThreatMonitoring|Something went wrong, unable to fetch statistics"
msgstr ""
msgid "ThreatMonitoring|The graph below is an overview of traffic coming to your application as tracked by the Web Application Firewall (WAF). View the docs for instructions on how to access the WAF logs to see what type of malicious traffic is trying to access your app. The docs link is also accessible by clicking the \"?\" icon next to the title below."
@@ -19914,13 +20585,25 @@ msgstr ""
msgid "ThreatMonitoring|Threat Monitoring help page link"
msgstr ""
+msgid "ThreatMonitoring|Threat monitoring is not enabled"
+msgstr ""
+
+msgid "ThreatMonitoring|Threat monitoring provides security monitoring and rules to protect production applications."
+msgstr ""
+
msgid "ThreatMonitoring|Time"
msgstr ""
+msgid "ThreatMonitoring|Total Packets"
+msgstr ""
+
msgid "ThreatMonitoring|Total Requests"
msgstr ""
-msgid "ThreatMonitoring|Web Application Firewall not enabled"
+msgid "ThreatMonitoring|Web Application Firewall"
+msgstr ""
+
+msgid "ThreatMonitoring|While it's rare to have no traffic coming to your application, it can happen. In any event, we ask that you double check your settings to make sure you've set up the Network Policies correctly."
msgstr ""
msgid "ThreatMonitoring|While it's rare to have no traffic coming to your application, it can happen. In any event, we ask that you double check your settings to make sure you've set up the WAF correctly."
@@ -19971,6 +20654,9 @@ msgstr ""
msgid "Time to merge"
msgstr ""
+msgid "Time to subtract exceeds the total time spent"
+msgstr ""
+
msgid "Time tracking"
msgstr ""
@@ -20359,9 +21045,15 @@ msgstr ""
msgid "Total artifacts size: %{total_size}"
msgstr ""
+msgid "Total cores (vCPUs)"
+msgstr ""
+
msgid "Total issues"
msgstr ""
+msgid "Total memory (GB)"
+msgstr ""
+
msgid "Total test time for all commits/merges"
msgstr ""
@@ -20392,6 +21084,9 @@ msgstr ""
msgid "Track your project with Audit Events."
msgstr ""
+msgid "Transfer ownership"
+msgstr ""
+
msgid "Transfer project"
msgstr ""
@@ -20527,6 +21222,9 @@ msgstr ""
msgid "Two-Factor Authentication"
msgstr ""
+msgid "Two-Factor Authentication code"
+msgstr ""
+
msgid "Two-factor Authentication"
msgstr ""
@@ -20581,6 +21279,12 @@ msgstr ""
msgid "Unable to connect to server: %{error}"
msgstr ""
+msgid "Unable to convert Kubernetes logs encoding to UTF-8"
+msgstr ""
+
+msgid "Unable to fetch unscanned projects"
+msgstr ""
+
msgid "Unable to fetch vulnerable projects"
msgstr ""
@@ -20638,6 +21342,12 @@ msgstr ""
msgid "Uninstalling"
msgstr ""
+msgid "Units|ms"
+msgstr ""
+
+msgid "Units|s"
+msgstr ""
+
msgid "Unknown"
msgstr ""
@@ -20683,43 +21393,46 @@ msgstr ""
msgid "Unmarks this %{noun} as Work In Progress."
msgstr ""
-msgid "Unresolve"
+msgid "Unreachable"
msgstr ""
-msgid "Unresolve discussion"
+msgid "Unresolve"
msgstr ""
msgid "Unresolve thread"
msgstr ""
-msgid "UnscannedProjects|Default branch scanning by project"
+msgid "Unresolved"
msgstr ""
-msgid "UnscannedProjects|Out of date"
+msgid "UnscannedProjects|15 or more days"
msgstr ""
-msgid "UnscannedProjects|Project scanning"
+msgid "UnscannedProjects|30 or more days"
msgstr ""
-msgid "UnscannedProjects|Untested"
+msgid "UnscannedProjects|5 or more days"
msgstr ""
-msgid "UnscannedProjects|Your projects are up do date! Nice job!"
+msgid "UnscannedProjects|60 or more days"
msgstr ""
-msgid "Unschedule job"
+msgid "UnscannedProjects|Default branch scanning by project"
msgstr ""
-msgid "Unstage"
+msgid "UnscannedProjects|Out of date"
msgstr ""
-msgid "Unstage all changes"
+msgid "UnscannedProjects|Project scanning"
+msgstr ""
+
+msgid "UnscannedProjects|Untested"
msgstr ""
-msgid "Unstaged"
+msgid "UnscannedProjects|Your projects are up do date! Nice job!"
msgstr ""
-msgid "Unstaged %{type}"
+msgid "Unschedule job"
msgstr ""
msgid "Unstar"
@@ -20782,6 +21495,9 @@ msgstr ""
msgid "Update now"
msgstr ""
+msgid "Update variable"
+msgstr ""
+
msgid "Update your bookmarked URLs as filtered/sorted branches URL has been changed."
msgstr ""
@@ -20803,6 +21519,15 @@ msgstr ""
msgid "UpdateProject|Project could not be updated!"
msgstr ""
+msgid "UpdateRepositoryStorage|Error moving repository storage for %{project_full_path} - %{message}"
+msgstr ""
+
+msgid "UpdateRepositoryStorage|Failed to verify %{type} repository checksum from %{old} to %{new}"
+msgstr ""
+
+msgid "UpdateRepositoryStorage|Timeout waiting for %{type} repository pushes"
+msgstr ""
+
msgid "Updated"
msgstr ""
@@ -20854,6 +21579,12 @@ msgstr ""
msgid "Upload a certificate for your domain with all intermediates"
msgstr ""
+msgid "Upload a new license in the admin area to ensure uninterrupted service."
+msgstr ""
+
+msgid "Upload a new license in the admin area to restore service."
+msgstr ""
+
msgid "Upload a private key for your certificate"
msgstr ""
@@ -20962,6 +21693,9 @@ msgstr ""
msgid "Use an one time password authenticator on your mobile device or computer to enable two-factor authentication (2FA)."
msgstr ""
+msgid "Use custom color #FF0000"
+msgstr ""
+
msgid "Use group milestones to manage issues from multiple projects in the same milestone."
msgstr ""
@@ -21007,6 +21741,9 @@ msgstr ""
msgid "User Cohorts are only shown when the %{usage_ping_link_start}usage ping%{usage_ping_link_end} is enabled."
msgstr ""
+msgid "User IDs"
+msgstr ""
+
msgid "User OAuth applications"
msgstr ""
@@ -21025,6 +21762,9 @@ msgstr ""
msgid "User identity was successfully updated."
msgstr ""
+msgid "User is not allowed to resolve thread"
+msgstr ""
+
msgid "User key was successfully removed."
msgstr ""
@@ -21361,6 +22101,9 @@ msgstr ""
msgid "Value"
msgstr ""
+msgid "Value Stream"
+msgstr ""
+
msgid "Value Stream Analytics"
msgstr ""
@@ -21370,6 +22113,12 @@ msgstr ""
msgid "Value Stream Analytics gives an overview of how much time it takes to go from idea to production in your project."
msgstr ""
+msgid "Var"
+msgstr ""
+
+msgid "Variable will be masked in job logs."
+msgstr ""
+
msgid "Variables"
msgstr ""
@@ -21447,6 +22196,9 @@ msgstr ""
msgid "View group labels"
msgstr ""
+msgid "View incident issues."
+msgstr ""
+
msgid "View issue"
msgstr ""
@@ -21471,6 +22223,9 @@ msgstr ""
msgid "View open merge request"
msgstr ""
+msgid "View performance dashboard."
+msgstr ""
+
msgid "View project"
msgstr ""
@@ -21480,12 +22235,18 @@ msgstr ""
msgid "View replaced file @ "
msgstr ""
+msgid "View supported languages and frameworks"
+msgstr ""
+
msgid "View the documentation"
msgstr ""
msgid "View the latest successful deployment to this environment"
msgstr ""
+msgid "View the performance dashboard at"
+msgstr ""
+
msgid "Viewing commit"
msgstr ""
@@ -21534,6 +22295,9 @@ msgstr ""
msgid "VisualReviewApp|%{stepStart}Step 4%{stepEnd}. Leave feedback in the Review App."
msgstr ""
+msgid "VisualReviewApp|Cancel"
+msgstr ""
+
msgid "VisualReviewApp|Copy merge request ID"
msgstr ""
@@ -21546,13 +22310,16 @@ msgstr ""
msgid "VisualReviewApp|Follow the steps below to enable Visual Reviews inside your application."
msgstr ""
+msgid "VisualReviewApp|No review app found or available."
+msgstr ""
+
msgid "VisualReviewApp|Open review app"
msgstr ""
msgid "VisualReviewApp|Review"
msgstr ""
-msgid "VisualReviewApp|Steps 1 and 2 (and sometimes 3) are performed once by the developer before requesting feedback. Steps 3 (if necessary), 4, and 5 are performed by the reviewer each time they perform a review."
+msgid "VisualReviewApp|Steps 1 and 2 (and sometimes 3) are performed once by the developer before requesting feedback. Steps 3 (if necessary), 4 is performed by the reviewer each time they perform a review."
msgstr ""
msgid "Vulnerabilities"
@@ -21582,12 +22349,18 @@ msgstr ""
msgid "VulnerabilityManagement|Confirm"
msgstr ""
+msgid "VulnerabilityManagement|Create issue"
+msgstr ""
+
msgid "VulnerabilityManagement|Dismiss"
msgstr ""
msgid "VulnerabilityManagement|Resolved"
msgstr ""
+msgid "VulnerabilityManagement|Something went wrong, could not create an issue."
+msgstr ""
+
msgid "VulnerabilityManagement|Something went wrong, could not update vulnerability state."
msgstr ""
@@ -21618,6 +22391,9 @@ msgstr ""
msgid "Vulnerability|Links"
msgstr ""
+msgid "Vulnerability|Method"
+msgstr ""
+
msgid "Vulnerability|Namespace"
msgstr ""
@@ -21648,6 +22424,9 @@ msgstr ""
msgid "Warning:"
msgstr ""
+msgid "Warning: Displaying this diagram might cause performance issues on this page."
+msgstr ""
+
msgid "We could not determine the path to remove the epic"
msgstr ""
@@ -21693,6 +22472,15 @@ msgstr ""
msgid "WebIDE|Merge request"
msgstr ""
+msgid "Webhook"
+msgstr ""
+
+msgid "Webhook Logs"
+msgstr ""
+
+msgid "Webhook Settings"
+msgstr ""
+
msgid "Webhooks"
msgstr ""
@@ -21702,6 +22490,9 @@ msgstr ""
msgid "Webhooks allow you to trigger a URL if, for example, new code is pushed or a new issue is created. You can configure webhooks to listen for specific events like pushes, issues or merge requests. Group webhooks will apply to all projects in a group, allowing you to standardize webhook functionality across your entire group."
msgstr ""
+msgid "Webhooks have moved. They can now be found under the Settings menu."
+msgstr ""
+
msgid "Wednesday"
msgstr ""
@@ -21741,12 +22532,18 @@ msgstr ""
msgid "What are you searching for?"
msgstr ""
+msgid "When a deployment job is successful, skip older deployment jobs that are still pending"
+msgstr ""
+
msgid "When a runner is locked, it cannot be assigned to other projects"
msgstr ""
msgid "When enabled, any user visiting %{host} will be able to create an account."
msgstr ""
+msgid "When enabled, if an NPM package isn't found in the GitLab Registry, we will attempt to pull from the global NPM registry."
+msgstr ""
+
msgid "When enabled, users cannot use GitLab until the terms have been accepted."
msgstr ""
@@ -21791,9 +22588,15 @@ msgstr ""
msgid "Who will be able to see this group?"
msgstr ""
+msgid "Who will be using GitLab?"
+msgstr ""
+
msgid "Who will be using this GitLab subscription?"
msgstr ""
+msgid "Who will be using this GitLab trial?"
+msgstr ""
+
msgid "Wiki"
msgstr ""
@@ -21869,7 +22672,7 @@ msgstr ""
msgid "WikiMarkdownDocs|documentation"
msgstr ""
-msgid "WikiMarkdownTip|To link to a (new) page, simply type %{link_example}"
+msgid "WikiMarkdownTip|To link to a (new) page, simply type <code class=\"js-markup-link-example\">%{link_example}</code>"
msgstr ""
msgid "WikiNewPageTip|Tip: You can specify the full path for the new file. We will automatically create any missing directories."
@@ -21944,6 +22747,9 @@ msgstr ""
msgid "Withdraw Access Request"
msgstr ""
+msgid "Won't fix / Accept risk"
+msgstr ""
+
msgid "Work in progress Limit"
msgstr ""
@@ -21989,6 +22795,9 @@ msgstr ""
msgid "You"
msgstr ""
+msgid "You are about to transfer the control of your account to %{group_name} group. This action is NOT reversible, you won't be able to access any of your groups and projects outside of %{group_name} once this transfer is complete."
+msgstr ""
+
msgid "You are an admin, which means granting access to <strong>%{client_name}</strong> will allow them to interact with GitLab as an admin as well. Proceed with caution."
msgstr ""
@@ -22013,6 +22822,9 @@ msgstr ""
msgid "You are going to transfer %{project_full_name} to another owner. Are you ABSOLUTELY sure?"
msgstr ""
+msgid "You are not allowed to push into this branch. Create another branch or open a merge request."
+msgstr ""
+
msgid "You are not allowed to unlink your primary login account"
msgstr ""
@@ -22028,6 +22840,9 @@ msgstr ""
msgid "You are receiving this message because you are a GitLab administrator for %{url}."
msgstr ""
+msgid "You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico."
+msgstr ""
+
msgid "You can %{linkStart}view the blob%{linkEnd} instead."
msgstr ""
@@ -22112,6 +22927,9 @@ msgstr ""
msgid "You can only transfer the project to namespaces you manage."
msgstr ""
+msgid "You can only upload one design when dropping onto an existing design."
+msgstr ""
+
msgid "You can resolve the merge conflict using either the Interactive mode, by choosing %{use_ours} or %{use_theirs} buttons, or by editing the files directly. Commit these changes into %{branch_name}"
msgstr ""
@@ -22133,9 +22951,6 @@ msgstr ""
msgid "You can try again using %{begin_link}basic search%{end_link}"
msgstr ""
-msgid "You can't commit to this project"
-msgstr ""
-
msgid "You cannot access the raw file. Please wait a minute."
msgstr ""
@@ -22187,6 +23002,9 @@ msgstr ""
msgid "You don't have any deployments right now."
msgstr ""
+msgid "You don't have any open merge requests"
+msgstr ""
+
msgid "You don't have any projects available."
msgstr ""
@@ -22265,6 +23083,9 @@ msgstr ""
msgid "You must set up incoming email before it becomes active."
msgstr ""
+msgid "You must upload a file with the same file name when dropping onto an existing design."
+msgstr ""
+
msgid "You need a different license to enable FileLocks feature"
msgstr ""
@@ -22280,6 +23101,9 @@ msgstr ""
msgid "You need to register a two-factor authentication app before you can set up a U2F device."
msgstr ""
+msgid "You need to set terms to be enforced"
+msgstr ""
+
msgid "You need to specify both an Access Token and a Host URL."
msgstr ""
@@ -22292,10 +23116,13 @@ msgstr ""
msgid "You tried to fork %{link_to_the_project} but it failed for the following reason:"
msgstr ""
+msgid "You will be removed from existing projects/groups"
+msgstr ""
+
msgid "You will lose all changes you've made to this file. This action cannot be undone."
msgstr ""
-msgid "You will lose all the unstaged changes you've made in this project. This action cannot be undone."
+msgid "You will lose all uncommitted changes you've made in this project. This action cannot be undone."
msgstr ""
msgid "You will need to update your local repositories to point to the new location."
@@ -22319,9 +23146,6 @@ msgstr ""
msgid "You won't be able to pull or push project code via %{protocol} until you %{set_password_link} on your account"
msgstr ""
-msgid "You won't be able to pull or push project code via SSH until you %{add_ssh_key_link} to your profile"
-msgstr ""
-
msgid "You won't be able to pull or push project code via SSH until you add an SSH key to your profile"
msgstr ""
@@ -22472,6 +23296,9 @@ msgstr ""
msgid "Your dashboard has been copied. You can %{web_ide_link_start}edit it here%{web_ide_link_end}."
msgstr ""
+msgid "Your dashboard has been updated. You can %{web_ide_link_start}edit it here%{web_ide_link_end}."
+msgstr ""
+
msgid "Your deployment services will be broken, you will need to manually fix the services after renaming."
msgstr ""
@@ -22487,9 +23314,15 @@ msgstr ""
msgid "Your issues will be imported in the background. Once finished, you'll get a confirmation email."
msgstr ""
+msgid "Your license expired on %{expires_at}."
+msgstr ""
+
msgid "Your license is valid from"
msgstr ""
+msgid "Your license will expire in %{remaining_days}."
+msgstr ""
+
msgid "Your message here"
msgstr ""
@@ -22520,12 +23353,21 @@ msgstr ""
msgid "Your request for access has been queued for review."
msgstr ""
+msgid "Your trial license expired on %{expires_at}."
+msgstr ""
+
+msgid "Your trial license will expire in %{remaining_days}."
+msgstr ""
+
msgid "Zoom meeting added"
msgstr ""
msgid "Zoom meeting removed"
msgstr ""
+msgid "[No reason]"
+msgstr ""
+
msgid "a deleted user"
msgstr ""
@@ -22537,6 +23379,9 @@ msgid_plural "about %d hours"
msgstr[0] ""
msgstr[1] ""
+msgid "activated"
+msgstr ""
+
msgid "added %{created_at_timeago}"
msgstr ""
@@ -22709,9 +23554,6 @@ msgstr ""
msgid "ciReport|Base pipeline codequality artifact not found"
msgstr ""
-msgid "ciReport|Class"
-msgstr ""
-
msgid "ciReport|Code quality"
msgstr ""
@@ -22742,9 +23584,6 @@ msgstr ""
msgid "ciReport|Dependency scanning"
msgstr ""
-msgid "ciReport|Description"
-msgstr ""
-
msgid "ciReport|Download patch to resolve"
msgstr ""
@@ -22757,19 +23596,10 @@ msgstr ""
msgid "ciReport|Failed to load %{reportName} report"
msgstr ""
-msgid "ciReport|File"
-msgstr ""
-
msgid "ciReport|Fixed:"
msgstr ""
-msgid "ciReport|Identifiers"
-msgstr ""
-
-msgid "ciReport|Image"
-msgstr ""
-
-msgid "ciReport|Instances"
+msgid "ciReport|Found %{issuesWithCount}"
msgstr ""
msgid "ciReport|Investigate this vulnerability by creating an issue"
@@ -22778,27 +23608,21 @@ msgstr ""
msgid "ciReport|Learn more about interacting with security reports"
msgstr ""
-msgid "ciReport|Links"
-msgstr ""
-
msgid "ciReport|Loading %{reportName} report"
msgstr ""
msgid "ciReport|Manage licenses"
msgstr ""
-msgid "ciReport|Method"
-msgstr ""
-
-msgid "ciReport|Namespace"
-msgstr ""
-
msgid "ciReport|No changes to code quality"
msgstr ""
msgid "ciReport|No changes to performance metrics"
msgstr ""
+msgid "ciReport|No code quality issues found"
+msgstr ""
+
msgid "ciReport|Performance metrics"
msgstr ""
@@ -22814,9 +23638,6 @@ msgstr ""
msgid "ciReport|Security scanning failed loading any results"
msgstr ""
-msgid "ciReport|Severity"
-msgstr ""
-
msgid "ciReport|Solution"
msgstr ""
@@ -22832,6 +23653,9 @@ msgstr ""
msgid "ciReport|There was an error dismissing the vulnerability. Please try again."
msgstr ""
+msgid "ciReport|There was an error fetching the codequality report."
+msgstr ""
+
msgid "ciReport|There was an error reverting the dismissal. Please try again."
msgstr ""
@@ -22946,10 +23770,10 @@ msgstr ""
msgid "estimateCommand|%{slash_command} will update the estimated time with the latest command."
msgstr ""
-msgid "exceeds the limit of %{bytes} bytes for directory names"
+msgid "exceeds the limit of %{bytes} bytes"
msgstr ""
-msgid "exceeds the limit of %{bytes} bytes for page titles"
+msgid "exceeds the limit of %{bytes} bytes for directory name \"%{dirname}\""
msgstr ""
msgid "expired on %{milestone_due_date}"
@@ -23073,9 +23897,18 @@ msgstr ""
msgid "is not a valid X509 certificate."
msgstr ""
+msgid "is not a valid regular expression"
+msgstr ""
+
+msgid "is not allowed for sign-up"
+msgstr ""
+
msgid "is not an email you own"
msgstr ""
+msgid "is not in the group enforcing Group Managed Account"
+msgstr ""
+
msgid "is too long (%{current_value}). The maximum size is %{max_size}."
msgstr ""
@@ -23216,6 +24049,9 @@ msgstr ""
msgid "mrWidget|Approved by"
msgstr ""
+msgid "mrWidget|Are you adding technical debt or code vulnerabilities?"
+msgstr ""
+
msgid "mrWidget|Cancel automatic merge"
msgstr ""
@@ -23249,6 +24085,9 @@ msgstr ""
msgid "mrWidget|Deployment statistics are not available currently"
msgstr ""
+msgid "mrWidget|Detect issues before deployment with a CI pipeline"
+msgstr ""
+
msgid "mrWidget|Did not close"
msgstr ""
@@ -23426,6 +24265,9 @@ msgstr ""
msgid "mrWidget|Your password"
msgstr ""
+msgid "mrWidget|a quick guide that'll show you how to create"
+msgstr ""
+
msgid "mrWidget|branch does not exist."
msgstr ""
@@ -23435,6 +24277,15 @@ msgstr ""
msgid "mrWidget|into"
msgstr ""
+msgid "mrWidget|one. Make your code more secure and more"
+msgstr ""
+
+msgid "mrWidget|robust in just a minute."
+msgstr ""
+
+msgid "mrWidget|that continuously tests your code. We created"
+msgstr ""
+
msgid "mrWidget|to be added to the merge train when the pipeline succeeds"
msgstr ""
@@ -23586,6 +24437,9 @@ msgstr ""
msgid "security Reports|There was an error creating the merge request"
msgstr ""
+msgid "settings saved, but not activated"
+msgstr ""
+
msgid "severity|Critical"
msgstr ""
@@ -23658,6 +24512,18 @@ msgstr ""
msgid "success"
msgstr ""
+msgid "suggestPipeline|1/2: Choose a template"
+msgstr ""
+
+msgid "suggestPipeline|2/2: Commit your changes"
+msgstr ""
+
+msgid "suggestPipeline|Commit the changes and your pipeline will automatically run for the first time."
+msgstr ""
+
+msgid "suggestPipeline|We recommend the %{boldStart}Code Quality%{boldEnd} template, which will add a report widget to your Merge Requests. This way you’ll learn about code quality degradations much sooner. %{footerStart} Goodbye technical debt! %{footerEnd}"
+msgstr ""
+
msgid "syntax is correct"
msgstr ""
diff --git a/locale/unfound_translations.rb b/locale/unfound_translations.rb
index 1ae0958c43c..6f7934b4d65 100644
--- a/locale/unfound_translations.rb
+++ b/locale/unfound_translations.rb
@@ -14,4 +14,5 @@ N_('NotificationEvent|Close merge request')
N_('NotificationEvent|Reassign merge request')
N_('NotificationEvent|Merge merge request')
N_('NotificationEvent|Failed pipeline')
+N_('NotificationEvent|Fixed pipeline')
N_('NotificationEvent|New release')
diff --git a/package.json b/package.json
index 5113c25305e..c699da02441 100644
--- a/package.json
+++ b/package.json
@@ -2,6 +2,7 @@
"private": true,
"scripts": {
"check-dependencies": "scripts/frontend/check_dependencies.sh",
+ "block-dependencies": "node scripts/frontend/block_dependencies.js",
"clean": "rm -rf public/assets tmp/cache/*-loader",
"dev-server": "NODE_OPTIONS=\"--max-old-space-size=3584\" nodemon -w 'config/webpack.config.js' --exec 'webpack-dev-server --config config/webpack.config.js'",
"eslint": "eslint --cache --max-warnings 0 --report-unused-disable-directives --ext .js,.vue .",
@@ -31,41 +32,38 @@
"webpack-prod": "NODE_OPTIONS=\"--max-old-space-size=3584\" NODE_ENV=production webpack --config config/webpack.config.js"
},
"dependencies": {
- "@babel/core": "^7.6.2",
- "@babel/plugin-proposal-class-properties": "^7.5.5",
- "@babel/plugin-proposal-json-strings": "^7.2.0",
- "@babel/plugin-proposal-optional-chaining": "^7.7.5",
- "@babel/plugin-proposal-private-methods": "^7.6.0",
- "@babel/plugin-syntax-dynamic-import": "^7.2.0",
- "@babel/plugin-syntax-import-meta": "^7.2.0",
- "@babel/preset-env": "^7.6.2",
+ "@babel/core": "^7.8.4",
+ "@babel/plugin-proposal-class-properties": "^7.8.3",
+ "@babel/plugin-proposal-json-strings": "^7.8.3",
+ "@babel/plugin-proposal-private-methods": "^7.8.3",
+ "@babel/plugin-syntax-import-meta": "^7.8.3",
+ "@babel/preset-env": "^7.8.4",
"@gitlab/at.js": "^1.5.5",
- "@gitlab/svgs": "^1.96.0",
- "@gitlab/ui": "^9.8.0",
+ "@gitlab/svgs": "^1.113.0",
+ "@gitlab/ui": "^9.29.0",
"@gitlab/visual-review-tools": "1.5.1",
"@sentry/browser": "^5.10.2",
- "@sourcegraph/code-host-integration": "0.0.30",
+ "@sourcegraph/code-host-integration": "0.0.31",
"apollo-cache-inmemory": "^1.6.3",
"apollo-client": "^2.6.4",
"apollo-link": "^1.2.11",
"apollo-link-batch-http": "^1.2.11",
"apollo-upload-client": "^10.0.0",
- "autosize": "^4.0.0",
- "aws-sdk": "^2.526.0",
+ "autosize": "^4.0.2",
+ "aws-sdk": "^2.637.0",
"axios": "^0.19.0",
"babel-loader": "^8.0.6",
"babel-plugin-lodash": "^3.3.4",
"bootstrap": "4.3.1",
- "bootstrap-vue": "2.1.0",
"brace-expansion": "^1.1.8",
"cache-loader": "^4.1.0",
"chart.js": "2.7.2",
"classlist-polyfill": "^1.2.0",
"clipboard": "^1.7.1",
- "codesandbox-api": "^0.0.20",
+ "codesandbox-api": "0.0.23",
"compression-webpack-plugin": "^3.0.1",
"copy-webpack-plugin": "^5.0.5",
- "core-js": "^3.2.1",
+ "core-js": "^3.6.4",
"cropper": "^2.3.0",
"css-loader": "^1.0.0",
"d3": "^4.13.0",
@@ -74,15 +72,15 @@
"dateformat": "^3.0.3",
"deckar01-task_list": "^2.3.1",
"diff": "^3.4.0",
- "document-register-element": "1.13.1",
+ "document-register-element": "1.14.3",
"dropzone": "^4.2.0",
"emoji-regex": "^7.0.3",
"emoji-unicode-version": "^0.2.1",
"exports-loader": "^0.7.0",
- "file-loader": "^4.2.0",
- "formdata-polyfill": "^3.0.11",
- "fuzzaldrin-plus": "^0.5.0",
- "glob": "^7.1.2",
+ "file-loader": "^5.1.0",
+ "formdata-polyfill": "^3.0.19",
+ "fuzzaldrin-plus": "^0.6.0",
+ "glob": "^7.1.6",
"graphql": "^14.0.2",
"immer": "^5.2.1",
"imports-loader": "^0.8.0",
@@ -92,60 +90,62 @@
"jquery-ujs": "1.2.2",
"jquery.caret": "^0.3.1",
"jquery.waitforimages": "^2.2.0",
- "js-cookie": "^2.1.3",
+ "js-cookie": "^2.2.1",
"jszip": "^3.1.3",
"jszip-utils": "^0.0.2",
"katex": "^0.10.0",
"lodash": "^4.17.15",
"marked": "^0.3.12",
- "mermaid": "^8.4.5",
+ "mermaid": "^8.4.8",
"monaco-editor": "^0.18.1",
"monaco-editor-webpack-plugin": "^1.7.0",
"mousetrap": "^1.4.6",
"pdfjs-dist": "^2.0.943",
"pikaday": "^1.8.0",
- "popper.js": "^1.14.7",
+ "popper.js": "^1.16.1",
"prismjs": "^1.6.0",
"prosemirror-markdown": "^1.3.0",
"prosemirror-model": "^1.6.4",
"raphael": "^2.2.7",
- "raw-loader": "^3.1.0",
- "sanitize-html": "^1.20.0",
+ "raw-loader": "^4.0.0",
+ "sanitize-html": "^1.22.0",
"select2": "3.5.2-browserify",
- "smooshpack": "^0.0.54",
- "sortablejs": "^1.10.0",
+ "smooshpack": "^0.0.62",
+ "sortablejs": "^1.10.2",
"sql.js": "^0.4.0",
- "stickyfilljs": "^2.0.5",
- "style-loader": "^1.0.0",
+ "stickyfilljs": "^2.1.0",
+ "style-loader": "^1.1.3",
"svg4everybody": "2.1.9",
"swagger-ui-dist": "^3.24.3",
"three": "^0.84.0",
"three-orbit-controls": "^82.1.0",
"three-stl-loader": "^1.0.4",
- "timeago.js": "^4.0.1",
+ "timeago.js": "^4.0.2",
"tiptap": "^1.8.0",
"tiptap-commands": "^1.4.0",
"tiptap-extensions": "^1.8.0",
- "underscore": "^1.9.0",
- "url-loader": "^2.1.0",
+ "tributejs": "4.1.3",
+ "underscore": "^1.9.2",
+ "unfetch": "^4.1.0",
+ "url-loader": "^3.0.0",
"visibilityjs": "^1.2.4",
"vue": "^2.6.10",
"vue-apollo": "^3.0.0-beta.28",
- "vue-loader": "^15.7.1",
+ "vue-loader": "^15.9.0",
"vue-router": "^3.0.2",
"vue-template-compiler": "^2.6.10",
"vue-virtual-scroll-list": "^1.4.4",
"vuedraggable": "^2.23.0",
"vuex": "^3.1.0",
- "webpack": "^4.41.5",
- "webpack-bundle-analyzer": "^3.5.1",
- "webpack-cli": "^3.3.9",
- "webpack-stats-plugin": "^0.3.0",
+ "webpack": "^4.42.0",
+ "webpack-bundle-analyzer": "^3.6.0",
+ "webpack-cli": "^3.3.11",
+ "webpack-stats-plugin": "^0.3.1",
"worker-loader": "^2.0.0",
"xterm": "^3.5.0"
},
"devDependencies": {
- "@babel/plugin-transform-modules-commonjs": "^7.5.0",
+ "@babel/plugin-transform-modules-commonjs": "^7.8.3",
"@gitlab/eslint-config": "^3.0.0",
"@vue/test-utils": "^1.0.0-beta.30",
"axios-mock-adapter": "^1.15.0",
@@ -166,6 +166,9 @@
"gettext-extractor": "^3.4.3",
"gettext-extractor-vue": "^4.0.2",
"graphql-tag": "^2.10.0",
+ "istanbul-lib-coverage": "^3.0.0",
+ "istanbul-lib-report": "^3.0.0",
+ "istanbul-reports": "^3.0.0",
"jasmine-core": "^2.9.0",
"jasmine-diff": "^0.1.3",
"jasmine-jquery": "^2.1.1",
@@ -197,16 +200,19 @@
"stylelint-scss": "^3.9.2",
"timezone-mock": "^1.0.8",
"vue-jest": "^4.0.0-beta.2",
- "webpack-dev-server": "^3.8.1",
+ "webpack-dev-server": "^3.10.3",
"yarn-check-webpack-plugin": "^1.2.0",
"yarn-deduplicate": "^1.1.1"
},
+ "blockedDependencies": {
+ "bootstrap-vue": "https://docs.gitlab.com/ee/development/fe_guide/dependencies.md#bootstrapvue"
+ },
"resolutions": {
"vue-jest/ts-jest": "24.0.0",
"monaco-editor": "0.18.1"
},
"engines": {
- "node": ">=8.10.0",
+ "node": ">=10.13.0",
"yarn": "^1.10.0"
}
-}
+} \ No newline at end of file
diff --git a/qa/Dockerfile b/qa/Dockerfile
index 126d9fbc591..155a9761a6f 100644
--- a/qa/Dockerfile
+++ b/qa/Dockerfile
@@ -1,5 +1,5 @@
FROM ruby:2.6-stretch
-LABEL maintainer "Grzegorz Bizon <grzegorz@gitlab.com>"
+LABEL maintainer="GitLab Quality Department <quality@gitlab.com>"
ENV DEBIAN_FRONTEND noninteractive
##
diff --git a/qa/Gemfile b/qa/Gemfile
index 58118340f24..86d8aa5c025 100644
--- a/qa/Gemfile
+++ b/qa/Gemfile
@@ -1,7 +1,7 @@
source 'https://rubygems.org'
gem 'gitlab-qa'
-gem 'activesupport', '5.2.3' # This should stay in sync with the root's Gemfile
+gem 'activesupport', '6.0.2' # This should stay in sync with the root's Gemfile
gem 'capybara', '~> 3.29.0'
gem 'capybara-screenshot', '~> 1.0.23'
gem 'rake', '~> 12.3.0'
diff --git a/qa/Gemfile.lock b/qa/Gemfile.lock
index cd73e1b6539..ececdab80d5 100644
--- a/qa/Gemfile.lock
+++ b/qa/Gemfile.lock
@@ -1,11 +1,12 @@
GEM
remote: https://rubygems.org/
specs:
- activesupport (5.2.3)
+ activesupport (6.0.2)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 0.7, < 2)
minitest (~> 5.1)
tzinfo (~> 1.1)
+ zeitwerk (~> 2.2)
addressable (2.7.0)
public_suffix (>= 2.0.2, < 5.0)
airborne (0.2.13)
@@ -28,7 +29,7 @@ GEM
launchy
childprocess (3.0.0)
coderay (1.1.2)
- concurrent-ruby (1.1.5)
+ concurrent-ruby (1.1.6)
debase (0.2.4.1)
debase-ruby_core_source (>= 0.10.2)
debase-ruby_core_source (0.10.6)
@@ -40,7 +41,7 @@ GEM
gitlab-qa (4.0.0)
http-cookie (1.0.3)
domain_name (~> 0.5)
- i18n (1.6.0)
+ i18n (1.8.2)
concurrent-ruby (~> 1.0)
knapsack (1.17.1)
rake
@@ -52,7 +53,7 @@ GEM
mime-types-data (3.2016.0521)
mini_mime (1.0.2)
mini_portile2 (2.4.0)
- minitest (5.11.3)
+ minitest (5.14.0)
netrc (0.11.0)
nokogiri (1.10.5)
mini_portile2 (~> 2.4.0)
@@ -100,19 +101,20 @@ GEM
rubyzip (>= 1.2.2)
thread_safe (0.3.6)
timecop (0.9.1)
- tzinfo (1.2.5)
+ tzinfo (1.2.6)
thread_safe (~> 0.1)
unf (0.1.4)
unf_ext
unf_ext (0.0.7.4)
xpath (3.2.0)
nokogiri (~> 1.8)
+ zeitwerk (2.3.0)
PLATFORMS
ruby
DEPENDENCIES
- activesupport (= 5.2.3)
+ activesupport (= 6.0.2)
airborne (~> 0.2.13)
capybara (~> 3.29.0)
capybara-screenshot (~> 1.0.23)
diff --git a/qa/README.md b/qa/README.md
index cdc84da1c5e..c7c6e535963 100644
--- a/qa/README.md
+++ b/qa/README.md
@@ -44,6 +44,14 @@ Note: GitLab QA uses [Selenium WebDriver](https://www.seleniumhq.org/) via
the browser to use. You will need to have Chrome (or Chromium) and
[chromedriver](https://chromedriver.chromium.org/) installed / in your `$PATH`.
+### Writing tests
+
+- [Writing tests from scratch tutorial](../doc/development/testing_guide/end_to_end/quick_start_guide.md)
+ - [Best practices](../doc/development/testing_guide/best_practices.md)
+ - [Using page objects](../doc/development/testing_guide/end_to_end/page_objects.md)
+ - [Guidelines](../doc/development/testing_guide/index.md)
+ - [Tests with special setup for local environemnts](../doc/development/testing_guide/end_to_end/running_tests_that_require_special_setup.md)
+
### Run the end-to-end tests in a local development environment
Follow the GDK instructions to [prepare](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/prepare.md)
@@ -77,13 +85,6 @@ Once you have the license file you can export it as an environment variable and
export EE_LICENSE=$(cat /path/to/gitlab_license)
```
-### Writing tests
-
-- [Writing tests from scratch tutorial](../doc/development/testing_guide/end_to_end/quick_start_guide.md)
- - [Best practices](../doc/development/testing_guide/best_practices.md)
- - [Using page objects](../doc/development/testing_guide/end_to_end/page_objects.md)
- - [Guidelines](../doc/development/testing_guide/index.md)
-
### Running specific tests
You can also supply specific tests to run as another parameter. For example, to
diff --git a/qa/qa.rb b/qa/qa.rb
index fb0c06e33aa..57b622c7d93 100644
--- a/qa/qa.rb
+++ b/qa/qa.rb
@@ -296,7 +296,9 @@ module QA
autoload :Show, 'qa/page/project/operations/kubernetes/show'
end
- autoload :Metrics, 'qa/page/project/operations/metrics'
+ module Metrics
+ autoload :Show, 'qa/page/project/operations/metrics/show'
+ end
end
module Wiki
@@ -427,6 +429,7 @@ module QA
autoload :Gcloud, 'qa/service/cluster_provider/gcloud'
autoload :Minikube, 'qa/service/cluster_provider/minikube'
autoload :K3d, 'qa/service/cluster_provider/k3d'
+ autoload :K3s, 'qa/service/cluster_provider/k3s'
end
module DockerRun
@@ -438,6 +441,7 @@ module QA
autoload :GitlabRunner, 'qa/service/docker_run/gitlab_runner'
autoload :MailHog, 'qa/service/docker_run/mail_hog'
autoload :SamlIdp, 'qa/service/docker_run/saml_idp'
+ autoload :K3s, 'qa/service/docker_run/k3s'
end
end
diff --git a/qa/qa/fixtures/monitored_auto_devops/.gitlab-ci.yml b/qa/qa/fixtures/monitored_auto_devops/.gitlab-ci.yml
index a65ae5aa1d9..d8ca7b591ed 100644
--- a/qa/qa/fixtures/monitored_auto_devops/.gitlab-ci.yml
+++ b/qa/qa/fixtures/monitored_auto_devops/.gitlab-ci.yml
@@ -7,7 +7,7 @@ image: alpine:latest
variables:
# AUTO_DEVOPS_DOMAIN is the application deployment domain and should be set as a variable at the group or project level.
- AUTO_DEVOPS_DOMAIN: my-fake-domain.com
+ AUTO_DEVOPS_DOMAIN: $AUTO_DEVOPS_DOMAIN
POSTGRES_USER: user
POSTGRES_PASSWORD: testing-password
diff --git a/qa/qa/git/repository.rb b/qa/qa/git/repository.rb
index f56fab63198..771f135a95c 100644
--- a/qa/qa/git/repository.rb
+++ b/qa/qa/git/repository.rb
@@ -165,7 +165,7 @@ module QA
# ls-remote is one command known to respond to Git protocol v2 so we use
# it to get output including the version reported via Git tracing
output = run("git ls-remote #{uri}", "GIT_TRACE_PACKET=1")
- output[/git< version (\d+)/, 1] || 'unknown'
+ output.response[/git< version (\d+)/, 1] || 'unknown'
end
def try_add_credentials_to_netrc
diff --git a/qa/qa/page/dashboard/snippet/new.rb b/qa/qa/page/dashboard/snippet/new.rb
index df4c2902b31..da5013e787e 100644
--- a/qa/qa/page/dashboard/snippet/new.rb
+++ b/qa/qa/page/dashboard/snippet/new.rb
@@ -52,7 +52,7 @@ module QA
private
def text_area
- find('#editor>textarea', visible: false)
+ find('#editor textarea', visible: false)
end
end
end
diff --git a/qa/qa/page/group/menu.rb b/qa/qa/page/group/menu.rb
index 230511ce6f6..380984c283e 100644
--- a/qa/qa/page/group/menu.rb
+++ b/qa/qa/page/group/menu.rb
@@ -10,7 +10,6 @@ module QA
element :group_settings_item
element :group_members_item
element :general_settings_link
- element :contribution_analytics_link
end
view 'app/views/layouts/nav/sidebar/_analytics_links.html.haml' do
@@ -24,6 +23,12 @@ module QA
end
end
+ def click_settings
+ within_sidebar do
+ click_element(:group_settings_item)
+ end
+ end
+
def click_contribution_analytics_item
hover_element(:analytics_link) do
within_submenu(:analytics_sidebar_submenu) do
diff --git a/qa/qa/page/merge_request/show.rb b/qa/qa/page/merge_request/show.rb
index b69990bae9c..0da40b35938 100644
--- a/qa/qa/page/merge_request/show.rb
+++ b/qa/qa/page/merge_request/show.rb
@@ -14,6 +14,7 @@ module QA
element :dropdown_toggle
element :download_email_patches
element :download_plain_diff
+ element :open_in_web_ide_button
end
view 'app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline.vue' do
@@ -219,6 +220,10 @@ module QA
def wait_for_loading
finished_loading? && has_no_element?(:skeleton_note)
end
+
+ def click_open_in_web_ide
+ click_element :open_in_web_ide_button
+ end
end
end
end
diff --git a/qa/qa/page/project/issue/show.rb b/qa/qa/page/project/issue/show.rb
index a56083ea25c..8365ecb6348 100644
--- a/qa/qa/page/project/issue/show.rb
+++ b/qa/qa/page/project/issue/show.rb
@@ -127,7 +127,7 @@ module QA
click_element(:edit_link_labels)
labels.each do |label|
- has_element?(:labels_block, text: label)
+ has_element?(:labels_block, text: label, wait: 0)
end
refresh
diff --git a/qa/qa/page/project/operations/kubernetes/add_existing.rb b/qa/qa/page/project/operations/kubernetes/add_existing.rb
index 9f47841366e..c143b55d057 100644
--- a/qa/qa/page/project/operations/kubernetes/add_existing.rb
+++ b/qa/qa/page/project/operations/kubernetes/add_existing.rb
@@ -11,7 +11,7 @@ module QA
element :api_url, 'url_field :api_url' # rubocop:disable QA/ElementWithPattern
element :ca_certificate, 'text_area :ca_cert' # rubocop:disable QA/ElementWithPattern
element :token, 'text_field :token' # rubocop:disable QA/ElementWithPattern
- element :add_cluster_button, "submit s_('ClusterIntegration|Add Kubernetes cluster')" # rubocop:disable QA/ElementWithPattern
+ element :add_kubernetes_cluster_button
element :rbac_checkbox
end
@@ -32,7 +32,7 @@ module QA
end
def add_cluster!
- click_on 'Add Kubernetes cluster'
+ click_element :add_kubernetes_cluster_button, Page::Project::Operations::Kubernetes::Show
end
def uncheck_rbac!
diff --git a/qa/qa/page/project/operations/kubernetes/show.rb b/qa/qa/page/project/operations/kubernetes/show.rb
index 3d3eebdbec9..46fddfa6078 100644
--- a/qa/qa/page/project/operations/kubernetes/show.rb
+++ b/qa/qa/page/project/operations/kubernetes/show.rb
@@ -10,15 +10,39 @@ module QA
element :ingress_ip_address, 'id="ingress-endpoint"' # rubocop:disable QA/ElementWithPattern
end
- view 'app/views/clusters/clusters/_form.html.haml' do
- element :base_domain
- element :save_domain
+ view 'app/views/clusters/clusters/_gitlab_integration_form.html.haml' do
+ element :integration_status_toggle, required: true
+ element :base_domain_field, required: true
+ element :save_changes_button, required: true
+ end
+
+ view 'app/views/clusters/clusters/_details_tab.html.haml' do
+ element :details, required: true
+ end
+
+ view 'app/views/clusters/clusters/_applications_tab.html.haml' do
+ element :applications, required: true
+ end
+
+ view 'app/assets/javascripts/clusters/components/application_row.vue' do
+ element :install_button
+ element :uninstall_button
+ end
+
+ def open_details
+ has_element?(:details, wait: 30)
+ click_element :details
+ end
+
+ def open_applications
+ has_element?(:applications, wait: 30)
+ click_element :applications
end
def install!(application_name)
within_element(application_name) do
has_element?(:install_button, application: application_name, wait: 30)
- click_on 'Install' # TODO replace with click_element
+ click_element :install_button
end
end
@@ -41,11 +65,11 @@ module QA
end
def set_domain(domain)
- fill_element :base_domain, domain
+ fill_element :base_domain_field, domain
end
def save_domain
- click_element :save_domain
+ click_element :save_changes_button, Page::Project::Operations::Kubernetes::Show
end
end
end
diff --git a/qa/qa/page/project/operations/metrics.rb b/qa/qa/page/project/operations/metrics.rb
deleted file mode 100644
index 418cc925186..00000000000
--- a/qa/qa/page/project/operations/metrics.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-module QA
- module Page
- module Project
- module Operations
- class Metrics < Page::Base
- EXPECTED_TITLE = 'Memory Usage (Total)'
- LOADING_MESSAGE = 'Waiting for performance data'
-
- view 'app/assets/javascripts/monitoring/components/dashboard.vue' do
- element :prometheus_graphs
- end
-
- view 'app/assets/javascripts/monitoring/components/charts/time_series.vue' do
- element :prometheus_graph_widgets
- end
-
- view 'app/assets/javascripts/monitoring/components/panel_type.vue' do
- element :prometheus_widgets_dropdown
- element :alert_widget_menu_item
- end
-
- def wait_for_metrics
- wait_for_data
- return if has_metrics?
-
- wait_until(max_duration: 180) do
- wait_for_data
- has_metrics?
- end
- end
-
- def wait_for_data
- wait_until(reload: false) { !has_text?(LOADING_MESSAGE) } if has_text?(LOADING_MESSAGE)
- end
-
- def has_metrics?
- within_element :prometheus_graphs do
- has_text?(EXPECTED_TITLE)
- end
- end
- end
- end
- end
- end
-end
-
-QA::Page::Project::Operations::Metrics.prepend_if_ee('QA::EE::Page::Project::Operations::Metrics')
diff --git a/qa/qa/page/project/operations/metrics/show.rb b/qa/qa/page/project/operations/metrics/show.rb
new file mode 100644
index 00000000000..c94c1f6590f
--- /dev/null
+++ b/qa/qa/page/project/operations/metrics/show.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+module QA
+ module Page
+ module Project
+ module Operations
+ module Metrics
+ class Show < Page::Base
+ EXPECTED_TITLE = 'Memory Usage (Total)'
+ LOADING_MESSAGE = 'Waiting for performance data'
+
+ view 'app/assets/javascripts/monitoring/components/dashboard.vue' do
+ element :prometheus_graphs
+ end
+
+ view 'app/assets/javascripts/monitoring/components/panel_type.vue' do
+ element :prometheus_graph_widgets
+ element :prometheus_widgets_dropdown
+ element :alert_widget_menu_item
+ end
+
+ def wait_for_metrics
+ wait_for_data
+ return if has_metrics?
+
+ wait_until(max_duration: 180) do
+ wait_for_data
+ has_metrics?
+ end
+ end
+
+ def has_metrics?
+ within_element :prometheus_graphs do
+ has_text?(EXPECTED_TITLE)
+ end
+ end
+
+ private
+
+ def wait_for_data
+ wait_until(reload: false) { !has_text?(LOADING_MESSAGE) } if has_text?(LOADING_MESSAGE)
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+QA::Page::Project::Operations::Metrics::Show.prepend_if_ee('QA::EE::Page::Project::Operations::Metrics::Show')
diff --git a/qa/qa/page/project/settings/ci_cd.rb b/qa/qa/page/project/settings/ci_cd.rb
index 46f93fad61e..01f7ab43d27 100644
--- a/qa/qa/page/project/settings/ci_cd.rb
+++ b/qa/qa/page/project/settings/ci_cd.rb
@@ -13,6 +13,16 @@ module QA
element :variables_settings_content
end
+ view 'app/views/shared/deploy_tokens/_index.html.haml' do
+ element :deploy_tokens_settings
+ end
+
+ def expand_deploy_tokens(&block)
+ expand_section(:deploy_tokens_settings) do
+ Settings::DeployTokens.perform(&block)
+ end
+ end
+
def expand_runners_settings(&block)
expand_section(:runners_settings_content) do
Settings::Runners.perform(&block)
diff --git a/qa/qa/page/project/settings/deploy_tokens.rb b/qa/qa/page/project/settings/deploy_tokens.rb
index 3173752d40a..3c3ed4f8716 100644
--- a/qa/qa/page/project/settings/deploy_tokens.rb
+++ b/qa/qa/page/project/settings/deploy_tokens.rb
@@ -5,7 +5,7 @@ module QA
module Project
module Settings
class DeployTokens < Page::Base
- view 'app/views/projects/deploy_tokens/_form.html.haml' do
+ view 'app/views/shared/deploy_tokens/_form.html.haml' do
element :deploy_token_name
element :deploy_token_expires_at
element :deploy_token_read_repository
@@ -13,7 +13,7 @@ module QA
element :create_deploy_token
end
- view 'app/views/projects/deploy_tokens/_new_deploy_token.html.haml' do
+ view 'app/views/shared/deploy_tokens/_new_deploy_token.html.haml' do
element :created_deploy_token_section
element :deploy_token_user
element :deploy_token
diff --git a/qa/qa/page/project/settings/repository.rb b/qa/qa/page/project/settings/repository.rb
index 58ed37870b7..7875a38dcf3 100644
--- a/qa/qa/page/project/settings/repository.rb
+++ b/qa/qa/page/project/settings/repository.rb
@@ -31,12 +31,6 @@ module QA
end
end
- def expand_deploy_tokens(&block)
- expand_section(:deploy_tokens_settings) do
- DeployTokens.perform(&block)
- end
- end
-
def expand_mirroring_repositories(&block)
expand_section(:mirroring_repositories_settings_section) do
MirroringRepositories.perform(&block)
diff --git a/qa/qa/page/project/web_ide/edit.rb b/qa/qa/page/project/web_ide/edit.rb
index 73b0856b445..a9b82ac6046 100644
--- a/qa/qa/page/project/web_ide/edit.rb
+++ b/qa/qa/page/project/web_ide/edit.rb
@@ -48,6 +48,10 @@ module QA
element :start_new_mr_checkbox
end
+ view 'app/assets/javascripts/ide/components/repo_editor.vue' do
+ element :editor_container
+ end
+
def has_file?(file_name)
within_element(:file_list) do
page.has_content? file_name
@@ -113,6 +117,17 @@ module QA
raise "The changes do not appear to have been committed successfully." unless commit_success_msg_shown
end
+
+ def add_to_modified_content(content)
+ finished_loading?
+ modified_text_area.set content
+ end
+
+ def modified_text_area
+ within_element(:editor_container) do
+ find('.modified textarea.inputarea')
+ end
+ end
end
end
end
diff --git a/qa/qa/resource/deploy_key.rb b/qa/qa/resource/deploy_key.rb
index 26355729dab..4db4d7e1315 100644
--- a/qa/qa/resource/deploy_key.rb
+++ b/qa/qa/resource/deploy_key.rb
@@ -6,7 +6,7 @@ module QA
attr_accessor :title, :key
attribute :md5_fingerprint do
- Page::Project::Settings::Repository.perform do |setting|
+ Page::Project::Settings::CICD.perform do |setting|
setting.expand_deploy_keys do |key|
key.find_md5_fingerprint(title)
end
@@ -25,7 +25,7 @@ module QA
Page::Project::Menu.perform(&:go_to_repository_settings)
- Page::Project::Settings::Repository.perform do |setting|
+ Page::Project::Settings::CICD.perform do |setting|
setting.expand_deploy_keys do |page|
page.fill_key_title(title)
page.fill_key_value(key)
diff --git a/qa/qa/resource/deploy_token.rb b/qa/qa/resource/deploy_token.rb
index f97e76cc322..b4baaa47349 100644
--- a/qa/qa/resource/deploy_token.rb
+++ b/qa/qa/resource/deploy_token.rb
@@ -6,16 +6,16 @@ module QA
attr_accessor :name, :expires_at
attribute :username do
- Page::Project::Settings::Repository.perform do |repository_page|
- repository_page.expand_deploy_tokens do |token|
+ Page::Project::Settings::CICD.perform do |cicd_page|
+ cicd_page.expand_deploy_tokens do |token|
token.token_username
end
end
end
attribute :password do
- Page::Project::Settings::Repository.perform do |repository_page|
- repository_page.expand_deploy_tokens do |token|
+ Page::Project::Settings::CICD.perform do |cicd_page|
+ cicd_page.expand_deploy_tokens do |token|
token.token_password
end
end
@@ -31,12 +31,10 @@ module QA
def fabricate!
project.visit!
- Page::Project::Menu.act do
- go_to_repository_settings
- end
+ Page::Project::Menu.perform(&:go_to_ci_cd_settings)
- Page::Project::Settings::Repository.perform do |setting|
- setting.expand_deploy_tokens do |page|
+ Page::Project::Settings::CICD.perform do |cicd|
+ cicd.expand_deploy_tokens do |page|
page.fill_token_name(name)
page.fill_token_expires_at(expires_at)
page.fill_scopes(read_repository: true, read_registry: false)
diff --git a/qa/qa/resource/kubernetes_cluster.rb b/qa/qa/resource/kubernetes_cluster.rb
index 1930e0465b2..7306acfe2a4 100644
--- a/qa/qa/resource/kubernetes_cluster.rb
+++ b/qa/qa/resource/kubernetes_cluster.rb
@@ -38,6 +38,9 @@ module QA
# We must wait a few seconds for permissions to be set up correctly for new cluster
sleep 10
+ # Open applications tab
+ show.open_applications
+
# Helm must be installed before everything else
show.install!(:helm)
show.await_installed(:helm)
@@ -52,6 +55,8 @@ module QA
if @install_ingress
populate(:ingress_ip)
+
+ show.open_details
show.set_domain("#{ingress_ip}.nip.io")
show.save_domain
end
diff --git a/qa/qa/resource/ssh_key.rb b/qa/qa/resource/ssh_key.rb
index 22bdea424ca..3e130aef9e4 100644
--- a/qa/qa/resource/ssh_key.rb
+++ b/qa/qa/resource/ssh_key.rb
@@ -7,6 +7,8 @@ module QA
attr_accessor :title
+ attribute :id
+
def_delegators :key, :private_key, :public_key, :md5_fingerprint
def key
@@ -21,6 +23,35 @@ module QA
profile_page.add_key(public_key, title)
end
end
+
+ def fabricate_via_api!
+ api_post
+ end
+
+ def api_delete
+ QA::Runtime::Logger.debug("Deleting SSH key with title '#{title}' and fingerprint '#{md5_fingerprint}'")
+
+ super
+ end
+
+ def api_get_path
+ "/user/keys/#{id}"
+ end
+
+ def api_post_path
+ '/user/keys'
+ end
+
+ def api_post_body
+ {
+ title: title,
+ key: public_key
+ }
+ end
+
+ def api_delete_path
+ "/user/keys/#{id}"
+ end
end
end
end
diff --git a/qa/qa/runtime/browser.rb b/qa/qa/runtime/browser.rb
index 69ba90702be..7777d06b6f5 100644
--- a/qa/qa/runtime/browser.rb
+++ b/qa/qa/runtime/browser.rb
@@ -44,11 +44,21 @@ module QA
new.visit(address, page_class, &block)
end
+ # rubocop: disable Metrics/AbcSize
def self.configure!
RSpec.configure do |config|
config.define_derived_metadata(file_path: %r{/qa/specs/features/}) do |metadata|
metadata[:type] = :feature
end
+
+ config.around(:each) do |example|
+ example.run
+
+ if example.metadata[:screenshot]
+ screenshot = example.metadata[:screenshot][:image] || example.metadata[:screenshot][:html]
+ example.metadata[:stdout] = %{[[ATTACHMENT|#{screenshot}]]}
+ end
+ end
end
Capybara.server_port = 9887 + ENV['TEST_ENV_NUMBER'].to_i
@@ -122,8 +132,10 @@ module QA
driver.browser.save_screenshot(path)
end
+ Capybara::Screenshot.append_timestamp = false
+
Capybara::Screenshot.register_filename_prefix_formatter(:rspec) do |example|
- ::File.join(QA::Runtime::Namespace.name, example.file_path.sub('./qa/specs/features/', ''))
+ ::File.join(QA::Runtime::Namespace.name, example.full_description.downcase.parameterize(separator: "_")[0..99])
end
Capybara.configure do |config|
@@ -138,6 +150,7 @@ module QA
config.default_normalize_ws = true
end
end
+ # rubocop: enable Metrics/AbcSize
class Session
include Capybara::DSL
diff --git a/qa/qa/service/cluster_provider/k3s.rb b/qa/qa/service/cluster_provider/k3s.rb
new file mode 100644
index 00000000000..165de795683
--- /dev/null
+++ b/qa/qa/service/cluster_provider/k3s.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+module QA
+ module Service
+ module ClusterProvider
+ class K3s < Base
+ def validate_dependencies
+ Runtime::ApplicationSettings.set_application_settings(allow_local_requests_from_web_hooks_and_services: true)
+ end
+
+ def setup
+ @k3s = Service::DockerRun::K3s.new.tap do |k3s|
+ k3s.register!
+
+ shell "kubectl config set-cluster k3s --server https://#{k3s.host_name}:6443 --insecure-skip-tls-verify"
+ shell 'kubectl config set-credentials default --username=node --password=some-secret'
+ shell 'kubectl config set-context k3s --cluster=k3s --user=default'
+ shell 'kubectl config use-context k3s'
+
+ wait_for_server(k3s.host_name) do
+ shell 'kubectl version'
+
+ wait_for_namespaces do
+ # install local storage
+ shell 'kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/master/deploy/local-path-storage.yaml'
+
+ # patch local storage
+ shell %(kubectl patch storageclass local-path -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}')
+ end
+ end
+ end
+ end
+
+ def teardown
+ Runtime::ApplicationSettings.set_application_settings(allow_local_requests_from_web_hooks_and_services: false)
+
+ @k3s&.remove!
+ end
+
+ def set_credentials(admin_user)
+ end
+
+ # Fetch "real" certificate
+ # See https://github.com/rancher/k3s/issues/27
+ def filter_credentials(credentials)
+ kubeconfig = YAML.safe_load(@k3s.kubeconfig)
+ ca_certificate = kubeconfig.dig('clusters', 0, 'cluster', 'certificate-authority-data')
+
+ credentials.merge('data' => credentials['data'].merge('ca.crt' => ca_certificate))
+ end
+
+ private
+
+ def wait_for_server(host_name)
+ print "Waiting for K3s server at `https://#{host_name}:6443` to become available "
+
+ 60.times do
+ if service_available?('kubectl version')
+ return yield if block_given?
+
+ return true
+ end
+
+ sleep 1
+ print '.'
+ end
+
+ raise 'K3s server never came up'
+ end
+
+ def wait_for_namespaces
+ print 'Waiting for k8s namespaces to populate'
+
+ 60.times do
+ if service_available?('kubectl get pods --all-namespaces | grep --silent "Running"')
+ return yield if block_given?
+
+ return true
+ end
+
+ sleep 1
+ print '.'
+ end
+
+ raise 'K8s namespaces didnt populate correctly'
+ end
+
+ def service_available?(command)
+ system("#{command} > /dev/null 2>&1")
+ end
+ end
+ end
+ end
+end
diff --git a/qa/qa/service/docker_run/base.rb b/qa/qa/service/docker_run/base.rb
index 3f42c09ad2c..b02bbea8ff5 100644
--- a/qa/qa/service/docker_run/base.rb
+++ b/qa/qa/service/docker_run/base.rb
@@ -37,6 +37,10 @@ module QA
def running?
`docker ps -f name=#{@name}`.include?(@name)
end
+
+ def read_file(file_path)
+ `docker exec #{@name} /bin/cat #{file_path}`
+ end
end
end
end
diff --git a/qa/qa/service/docker_run/k3s.rb b/qa/qa/service/docker_run/k3s.rb
new file mode 100644
index 00000000000..da254497ff0
--- /dev/null
+++ b/qa/qa/service/docker_run/k3s.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module QA
+ module Service
+ module DockerRun
+ class K3s < Base
+ def initialize
+ @image = 'registry.gitlab.com/gitlab-org/cluster-integration/test-utils/k3s-gitlab-ci/releases/v0.6.1'
+ @name = 'k3s'
+ super
+ end
+
+ def register!
+ pull
+ start_k3s
+ end
+
+ def host_name
+ return 'localhost' unless Runtime::Env.running_in_ci?
+
+ super
+ end
+
+ def kubeconfig
+ read_file('/etc/rancher/k3s/k3s.yaml').chomp
+ end
+
+ def start_k3s
+ command = <<~CMD.tr("\n", ' ')
+ docker run -d --rm
+ --network #{network}
+ --hostname #{host_name}
+ --name #{@name}
+ --publish 6443:6443
+ --privileged
+ #{@image} server --cluster-secret some-secret
+ CMD
+
+ command.gsub!("--network #{network} ", '') unless QA::Runtime::Env.running_in_ci?
+
+ shell command
+ end
+ end
+ end
+ end
+end
diff --git a/qa/qa/specs/features/browser_ui/1_manage/project/view_project_activity_spec.rb b/qa/qa/specs/features/browser_ui/1_manage/project/view_project_activity_spec.rb
index 5f3b492ea81..d483dcc97a7 100644
--- a/qa/qa/specs/features/browser_ui/1_manage/project/view_project_activity_spec.rb
+++ b/qa/qa/specs/features/browser_ui/1_manage/project/view_project_activity_spec.rb
@@ -6,12 +6,11 @@ module QA
it 'user creates an event in the activity page upon Git push' do
Flow::Login.sign_in
- project_push = Resource::Repository::ProjectPush.fabricate! do |push|
+ Resource::Repository::ProjectPush.fabricate! do |push|
push.file_name = 'README.md'
push.file_content = '# This is a test project'
push.commit_message = 'Add README.md'
- end
- project_push.project.visit!
+ end.project.visit!
Page::Project::Menu.perform(&:click_activity)
Page::Project::Activity.perform(&:click_push_events)
diff --git a/qa/qa/specs/features/browser_ui/2_plan/issue/create_issue_spec.rb b/qa/qa/specs/features/browser_ui/2_plan/issue/create_issue_spec.rb
index 7b4418191a3..9c99f3ee377 100644
--- a/qa/qa/specs/features/browser_ui/2_plan/issue/create_issue_spec.rb
+++ b/qa/qa/specs/features/browser_ui/2_plan/issue/create_issue_spec.rb
@@ -17,7 +17,7 @@ module QA
end
end
- context 'when using attachments in comments', :object_storage do
+ context 'when using attachments in comments', :object_storage, quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/issues/205408', type: :flaky } do
let(:gif_file_name) { 'banana_sample.gif' }
let(:file_to_attach) do
File.absolute_path(File.join('spec', 'fixtures', gif_file_name))
diff --git a/qa/qa/specs/features/browser_ui/3_create/merge_request/rebase_merge_request_spec.rb b/qa/qa/specs/features/browser_ui/3_create/merge_request/rebase_merge_request_spec.rb
index 6ebe3e0b620..1c78deae2bf 100644
--- a/qa/qa/specs/features/browser_ui/3_create/merge_request/rebase_merge_request_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/merge_request/rebase_merge_request_spec.rb
@@ -6,7 +6,7 @@ module QA
it 'user rebases source branch of merge request' do
Flow::Login.sign_in
- project = Resource::Project.fabricate! do |project|
+ project = Resource::Project.fabricate_via_api! do |project|
project.name = "only-fast-forward"
end
project.visit!
diff --git a/qa/qa/specs/features/browser_ui/3_create/merge_request/squash_merge_request_spec.rb b/qa/qa/specs/features/browser_ui/3_create/merge_request/squash_merge_request_spec.rb
index 89f0fc37f3f..9236609934e 100644
--- a/qa/qa/specs/features/browser_ui/3_create/merge_request/squash_merge_request_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/merge_request/squash_merge_request_spec.rb
@@ -6,7 +6,7 @@ module QA
it 'user squashes commits while merging' do
Flow::Login.sign_in
- project = Resource::Project.fabricate! do |project|
+ project = Resource::Project.fabricate_via_api! do |project|
project.name = "squash-before-merge"
end
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/add_list_delete_branches_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/add_list_delete_branches_spec.rb
index 7b1c2a71158..bf5a9501cba 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/add_list_delete_branches_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/add_list_delete_branches_spec.rb
@@ -18,7 +18,7 @@ module QA
before do
Flow::Login.sign_in
- project = Resource::Project.fabricate! do |proj|
+ project = Resource::Project.fabricate_via_api! do |proj|
proj.name = 'project-qa-test'
proj.description = 'project for qa test'
end
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/add_ssh_key_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/add_ssh_key_spec.rb
index c3379d41ff2..25866e12185 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/add_ssh_key_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/add_ssh_key_spec.rb
@@ -8,7 +8,7 @@ module QA
it 'user adds and then removes an SSH key', :smoke do
Flow::Login.sign_in
- key = Resource::SSHKey.fabricate! do |resource|
+ key = Resource::SSHKey.fabricate_via_browser_ui! do |resource|
resource.title = key_title
end
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_http_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_http_spec.rb
index ff0f212c289..b210e747614 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_http_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_http_spec.rb
@@ -1,14 +1,13 @@
# frozen_string_literal: true
module QA
- # Git protocol v2 is temporarily disabled
- context 'Create', quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/issues/27828', type: :bug } do
+ context 'Create' do
describe 'Push over HTTP using Git protocol version 2', :requires_git_protocol_v2 do
it 'user pushes to the repository' do
Flow::Login.sign_in
# Create a project to push to
- project = Resource::Project.fabricate! do |project|
+ project = Resource::Project.fabricate_via_api! do |project|
project.name = 'git-protocol-project'
end
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb
index 0e8b8203c34..e845c3ca8ea 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb
@@ -11,30 +11,25 @@ module QA
let(:key_title) { "key for ssh tests #{Time.now.to_f}" }
let(:ssh_key) do
- Resource::SSHKey.fabricate! do |resource|
+ Resource::SSHKey.fabricate_via_api! do |resource|
resource.title = key_title
end
end
around do |example|
- # Create an SSH key to be used with Git
+ # Create an SSH key to be used with Git, then remove it after the test
Flow::Login.sign_in
ssh_key
example.run
- # Remove the SSH key
- Flow::Login.sign_in
- Page::Main::Menu.perform(&:click_settings_link)
- Page::Profile::Menu.perform(&:click_ssh_keys)
- Page::Profile::SSHKeys.perform do |ssh_keys|
- ssh_keys.remove_key(key_title)
- end
+ ssh_key.remove_via_api!
+
+ Page::Main::Menu.perform(&:sign_out_if_signed_in)
end
it 'user pushes to the repository' do
- # Create a project to push to
- project = Resource::Project.fabricate! do |project|
+ project = Resource::Project.fabricate_via_api! do |project|
project.name = 'git-protocol-project'
end
@@ -68,11 +63,8 @@ module QA
project.visit!
project.wait_for_push_new_branch
- # Check that the push worked
expect(page).to have_content(file_name)
expect(page).to have_content(file_content)
-
- # And check that the correct Git protocol was used
expect(git_protocol_reported).to eq(git_protocol)
end
end
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/push_mirroring_over_http_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/push_mirroring_over_http_spec.rb
index 059362704b4..ae95f5a7a44 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/push_mirroring_over_http_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/push_mirroring_over_http_spec.rb
@@ -7,7 +7,7 @@ module QA
Runtime::Browser.visit(:gitlab, Page::Main::Login)
Page::Main::Login.perform(&:sign_in_using_credentials)
- target_project = Resource::Project.fabricate! do |project|
+ target_project = Resource::Project.fabricate_via_api! do |project|
project.name = 'push-mirror-target-project'
end
target_project_uri = target_project.repository_http_location.uri
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_spec.rb
index 2bd54d763a6..9db5fe5292f 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_spec.rb
@@ -6,12 +6,11 @@ module QA
it 'user pushes code to the repository' do
Flow::Login.sign_in
- project_push = Resource::Repository::ProjectPush.fabricate! do |push|
+ Resource::Repository::ProjectPush.fabricate! do |push|
push.file_name = 'README.md'
push.file_content = '# This is a test project'
push.commit_message = 'Add README.md'
- end
- project_push.project.visit!
+ end.project.visit!
expect(page).to have_content('README.md')
expect(page).to have_content('This is a test project')
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/push_protected_branch_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/push_protected_branch_spec.rb
index 5d0f4b215f4..455db4d811d 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/push_protected_branch_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/push_protected_branch_spec.rb
@@ -6,7 +6,7 @@ module QA
let(:branch_name) { 'protected-branch' }
let(:commit_message) { 'Protected push commit message' }
let(:project) do
- Resource::Project.fabricate! do |resource|
+ Resource::Project.fabricate_via_api! do |resource|
resource.name = 'protected-branch-project'
resource.initialize_with_readme = true
end
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/use_ssh_key_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/use_ssh_key_spec.rb
index 1837a110d79..1a3c6d03098 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/use_ssh_key_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/use_ssh_key_spec.rb
@@ -11,18 +11,16 @@ module QA
it 'user adds an ssh key and pushes code to the repository' do
Flow::Login.sign_in
- key = Resource::SSHKey.fabricate! do |resource|
+ key = Resource::SSHKey.fabricate_via_api! do |resource|
resource.title = key_title
end
- project_push = Resource::Repository::ProjectPush.fabricate! do |push|
+ Resource::Repository::ProjectPush.fabricate! do |push|
push.ssh_key = key
push.file_name = 'README.md'
push.file_content = '# Test Use SSH Key'
push.commit_message = 'Add README.md'
- end
-
- project_push.project.visit!
+ end.project.visit!
expect(page).to have_content('README.md')
expect(page).to have_content('Test Use SSH Key')
diff --git a/qa/qa/specs/features/browser_ui/3_create/snippet/create_snippet_spec.rb b/qa/qa/specs/features/browser_ui/3_create/snippet/create_snippet_spec.rb
index 277e7364ada..dfcbf4b44c8 100644
--- a/qa/qa/specs/features/browser_ui/3_create/snippet/create_snippet_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/snippet/create_snippet_spec.rb
@@ -19,7 +19,7 @@ module QA
Page::Dashboard::Snippet::Show.perform do |snippet|
expect(snippet).to have_snippet_title('Snippet title')
expect(snippet).to have_snippet_description('Snippet description')
- expect(snippet).to have_visibility_type('Private')
+ expect(snippet).to have_visibility_type(/private/i)
expect(snippet).to have_file_name('New snippet file name')
expect(snippet).to have_file_content('Snippet file text')
end
diff --git a/qa/qa/specs/features/browser_ui/3_create/web_ide/review_merge_request_spec.rb b/qa/qa/specs/features/browser_ui/3_create/web_ide/review_merge_request_spec.rb
new file mode 100644
index 00000000000..c37ad6d4318
--- /dev/null
+++ b/qa/qa/specs/features/browser_ui/3_create/web_ide/review_merge_request_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module QA
+ context 'Create', quarantine: { type: :new } do
+ describe 'Review a merge request in Web IDE' do
+ let(:new_file) { 'awesome_new_file.txt' }
+ let(:review_text) { 'Reviewed ' }
+
+ let(:merge_request) do
+ Resource::MergeRequest.fabricate_via_api! do |mr|
+ mr.file_name = new_file
+ mr.file_content = 'Text'
+ end
+ end
+
+ before do
+ Flow::Login.sign_in
+
+ merge_request.visit!
+ end
+
+ it 'opens and edits a merge request in Web IDE' do
+ Page::MergeRequest::Show.perform do |show|
+ show.click_open_in_web_ide
+ end
+
+ Page::Project::WebIDE::Edit.perform do |ide|
+ ide.has_file?(new_file)
+ ide.add_to_modified_content(review_text)
+ ide.commit_changes
+ end
+
+ merge_request.visit!
+
+ Page::MergeRequest::Show.perform do |show|
+ show.click_diffs_tab
+ expect(show).to have_content(review_text)
+ end
+ end
+ end
+ end
+end
diff --git a/qa/qa/specs/features/browser_ui/3_create/wiki/create_edit_clone_push_wiki_spec.rb b/qa/qa/specs/features/browser_ui/3_create/wiki/create_edit_clone_push_wiki_spec.rb
index 42aa527da85..185d10a64ed 100644
--- a/qa/qa/specs/features/browser_ui/3_create/wiki/create_edit_clone_push_wiki_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/wiki/create_edit_clone_push_wiki_spec.rb
@@ -6,7 +6,7 @@ module QA
it 'user creates, edits, clones, and pushes to the wiki' do
Flow::Login.sign_in
- wiki = Resource::Wiki.fabricate! do |resource|
+ wiki = Resource::Wiki.fabricate_via_browser_ui! do |resource|
resource.title = 'Home'
resource.content = '# My First Wiki Content'
resource.message = 'Update home'
diff --git a/qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb b/qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb
index 9dad5ad8fb5..cff415dcf97 100644
--- a/qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb
+++ b/qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb
@@ -2,7 +2,7 @@
module QA
context 'Verify' do
- describe 'Add or Remove CI variable via UI', :smoke do
+ describe 'Add or Remove CI variable via UI', :smoke, quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/issues/207915', type: :stale } do
let!(:project) do
Resource::Project.fabricate_via_api! do |project|
project.name = 'project-with-ci-variables'
diff --git a/qa/qa/specs/features/browser_ui/6_release/deploy_key/add_deploy_key_spec.rb b/qa/qa/specs/features/browser_ui/6_release/deploy_key/add_deploy_key_spec.rb
index 89aba112407..2e0bfcec4bf 100644
--- a/qa/qa/specs/features/browser_ui/6_release/deploy_key/add_deploy_key_spec.rb
+++ b/qa/qa/specs/features/browser_ui/6_release/deploy_key/add_deploy_key_spec.rb
@@ -3,7 +3,7 @@
module QA
context 'Release' do
describe 'Deploy key creation' do
- it 'user adds a deploy key' do
+ it 'user adds a deploy key', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/208761' do
Flow::Login.sign_in
key = Runtime::Key::RSA.new
@@ -17,7 +17,7 @@ module QA
expect(deploy_key.md5_fingerprint).to eq key.md5_fingerprint
- Page::Project::Settings::Repository.perform do |setting|
+ Page::Project::Settings::CICD.perform do |setting|
setting.expand_deploy_keys do |keys|
expect(keys).to have_key(deploy_key_title, key.md5_fingerprint)
end
diff --git a/qa/qa/specs/features/browser_ui/6_release/deploy_key/clone_using_deploy_key_spec.rb b/qa/qa/specs/features/browser_ui/6_release/deploy_key/clone_using_deploy_key_spec.rb
index 581e6b8299e..ca32d5a84f0 100644
--- a/qa/qa/specs/features/browser_ui/6_release/deploy_key/clone_using_deploy_key_spec.rb
+++ b/qa/qa/specs/features/browser_ui/6_release/deploy_key/clone_using_deploy_key_spec.rb
@@ -35,7 +35,7 @@ module QA
]
keys.each do |(key_class, bits)|
- it "user sets up a deploy key with #{key_class}(#{bits}) to clone code using pipelines" do
+ it "user sets up a deploy key with #{key_class}(#{bits}) to clone code using pipelines", quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/208761' do
key = key_class.new(*bits)
Resource::DeployKey.fabricate_via_browser_ui! do |resource|
diff --git a/qa/qa/specs/features/browser_ui/7_configure/kubernetes/kubernetes_integration_spec.rb b/qa/qa/specs/features/browser_ui/7_configure/kubernetes/kubernetes_integration_spec.rb
index 73b5a579e08..3bf555bfa16 100644
--- a/qa/qa/specs/features/browser_ui/7_configure/kubernetes/kubernetes_integration_spec.rb
+++ b/qa/qa/specs/features/browser_ui/7_configure/kubernetes/kubernetes_integration_spec.rb
@@ -2,10 +2,9 @@
module QA
context 'Configure' do
- # This test requires GITLAB_QA_ADMIN_ACCESS_TOKEN to be specified
- describe 'Kubernetes Cluster Integration', :orchestrated, :kubernetes, :requires_admin, :skip do
+ describe 'Kubernetes Cluster Integration', :orchestrated, :kubernetes, :requires_admin do
context 'Project Clusters' do
- let(:cluster) { Service::KubernetesCluster.new(provider_class: Service::ClusterProvider::K3d).create! }
+ let(:cluster) { Service::KubernetesCluster.new(provider_class: Service::ClusterProvider::K3s).create! }
let(:project) do
Resource::Project.fabricate_via_api! do |project|
project.name = 'project-with-k8s'
@@ -21,7 +20,7 @@ module QA
cluster.remove!
end
- it 'can create and associate a project cluster', :smoke do
+ it 'can create and associate a project cluster', :smoke, quarantine: { type: :new } do
Resource::KubernetesCluster.fabricate_via_browser_ui! do |k8s_cluster|
k8s_cluster.project = project
k8s_cluster.cluster = cluster
@@ -35,18 +34,6 @@ module QA
expect(index).to have_cluster(cluster)
end
end
-
- it 'installs helm and tiller on a gitlab managed app' do
- Resource::KubernetesCluster.fabricate_via_browser_ui! do |k8s_cluster|
- k8s_cluster.project = project
- k8s_cluster.cluster = cluster
- k8s_cluster.install_helm_tiller = true
- end
-
- Page::Project::Operations::Kubernetes::Show.perform do |show|
- expect(show).to have_application_installed(:helm)
- end
- end
end
end
end
diff --git a/qa/spec/git/repository_spec.rb b/qa/spec/git/repository_spec.rb
index 5198eb6f58b..6cca9f55e11 100644
--- a/qa/spec/git/repository_spec.rb
+++ b/qa/spec/git/repository_spec.rb
@@ -69,18 +69,20 @@ describe QA::Git::Repository do
end
describe '#fetch_supported_git_protocol' do
+ Result = Struct.new(:response)
+
it "reports the detected version" do
- expect(repository).to receive(:run).and_return("packet: git< version 2")
+ expect(repository).to receive(:run).and_return(Result.new("packet: git< version 2"))
expect(repository.fetch_supported_git_protocol).to eq('2')
end
it 'reports unknown if version is unknown' do
- expect(repository).to receive(:run).and_return("packet: git< version -1")
+ expect(repository).to receive(:run).and_return(Result.new("packet: git< version -1"))
expect(repository.fetch_supported_git_protocol).to eq('unknown')
end
it 'reports unknown if content does not identify a version' do
- expect(repository).to receive(:run).and_return("foo")
+ expect(repository).to receive(:run).and_return(Result.new("foo"))
expect(repository.fetch_supported_git_protocol).to eq('unknown')
end
end
diff --git a/qa/spec/service/docker_run/k3s_spec.rb b/qa/spec/service/docker_run/k3s_spec.rb
new file mode 100644
index 00000000000..0224b7d6704
--- /dev/null
+++ b/qa/spec/service/docker_run/k3s_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module QA
+ describe Service::DockerRun::K3s do
+ describe '#host_name' do
+ context 'in CI' do
+ let(:name) { 'k3s-12345' }
+ let(:network) { 'thenet' }
+
+ before do
+ allow(Runtime::Env).to receive(:running_in_ci?).and_return(true)
+ allow(subject).to receive(:network).and_return(network)
+ subject.instance_variable_set(:@name, name)
+ end
+
+ it 'returns name.network' do
+ expect(subject.host_name).to eq("#{name}.#{network}")
+ end
+ end
+
+ context 'not in CI' do
+ before do
+ allow(Runtime::Env).to receive(:running_in_ci?).and_return(false)
+ end
+
+ it 'returns localhost if not running in a CI environment' do
+ expect(subject.host_name).to eq('localhost')
+ end
+ end
+ end
+ end
+end
diff --git a/rubocop/cop/ban_catch_throw.rb b/rubocop/cop/ban_catch_throw.rb
new file mode 100644
index 00000000000..42301d5512f
--- /dev/null
+++ b/rubocop/cop/ban_catch_throw.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module RuboCop
+ module Cop
+ # Bans the use of 'catch/throw', as exceptions are better for errors and
+ # they are equivalent to 'goto' for flow control, with all the problems
+ # that implies.
+ #
+ # @example
+ # # bad
+ # catch(:error) do
+ # throw(:error)
+ # end
+ #
+ # # good
+ # begin
+ # raise StandardError
+ # rescue StandardError => err
+ # # ...
+ # end
+ #
+ class BanCatchThrow < RuboCop::Cop::Cop
+ MSG = "Do not use catch or throw unless a gem's API demands it."
+
+ def on_send(node)
+ receiver, method_name, _ = *node
+
+ return unless receiver.nil? && %i[catch throw].include?(method_name)
+
+ add_offense(node, location: :expression)
+ end
+ end
+ end
+end
diff --git a/rubocop/cop/gitlab/keys-first-and-values-first.rb b/rubocop/cop/gitlab/keys-first-and-values-first.rb
index 9b68957cba2..544f9800304 100644
--- a/rubocop/cop/gitlab/keys-first-and-values-first.rb
+++ b/rubocop/cop/gitlab/keys-first-and-values-first.rb
@@ -26,7 +26,7 @@ module RuboCop
elsif node.descendants.first.method_name == :keys
'.each_key'
else
- throw("Expect '.values.first' or '.keys.first', but get #{node.descendants.first.method_name}.first")
+ throw("Expect '.values.first' or '.keys.first', but get #{node.descendants.first.method_name}.first") # rubocop:disable Cop/BanCatchThrow
end
upto_including_keys_or_values = node.descendants.first.source_range
diff --git a/rubocop/cop/migration/add_column_with_default.rb b/rubocop/cop/migration/add_column_with_default.rb
index d9f8fe62a86..68e53b17f19 100644
--- a/rubocop/cop/migration/add_column_with_default.rb
+++ b/rubocop/cop/migration/add_column_with_default.rb
@@ -10,7 +10,37 @@ module RuboCop
class AddColumnWithDefault < RuboCop::Cop::Cop
include MigrationHelpers
- WHITELISTED_TABLES = [:application_settings].freeze
+ # Tables >= 10 GB on GitLab.com as of 02/2020
+ BLACKLISTED_TABLES = %i[
+ audit_events
+ ci_build_trace_sections
+ ci_builds
+ ci_builds_metadata
+ ci_job_artifacts
+ ci_pipeline_variables
+ ci_pipelines
+ ci_stages
+ deployments
+ events
+ issues
+ merge_request_diff_commits
+ merge_request_diff_files
+ merge_request_diffs
+ merge_request_metrics
+ merge_requests
+ note_diff_files
+ notes
+ project_authorizations
+ projects
+ push_event_payloads
+ resource_label_events
+ sent_notifications
+ system_note_metadata
+ taggings
+ todos
+ users
+ web_hook_logs
+ ].freeze
MSG = '`add_column_with_default` without `allow_null: true` may cause prolonged lock situations and downtime, ' \
'see https://gitlab.com/gitlab-org/gitlab/issues/38060'.freeze
@@ -23,21 +53,23 @@ module RuboCop
return unless in_migration?(node)
add_column_with_default?(node) do |table, options|
- break if table_whitelisted?(table) || nulls_allowed?(options)
-
- add_offense(node, location: :selector)
+ add_offense(node, location: :selector) if offensive?(table, options)
end
end
private
+ def offensive?(table, options)
+ table_blacklisted?(table) && !nulls_allowed?(options)
+ end
+
def nulls_allowed?(options)
options.find { |opt| opt.key.value == :allow_null && opt.value.true_type? }
end
- def table_whitelisted?(symbol)
+ def table_blacklisted?(symbol)
symbol && symbol.type == :sym &&
- WHITELISTED_TABLES.include?(symbol.children[0])
+ BLACKLISTED_TABLES.include?(symbol.children[0])
end
end
end
diff --git a/rubocop/cop/migration/schedule_async.rb b/rubocop/cop/migration/schedule_async.rb
new file mode 100644
index 00000000000..f296628c3d6
--- /dev/null
+++ b/rubocop/cop/migration/schedule_async.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require_relative '../../migration_helpers'
+
+module RuboCop
+ module Cop
+ module Migration
+ class ScheduleAsync < RuboCop::Cop::Cop
+ include MigrationHelpers
+
+ ENFORCED_SINCE = 2020_02_12_00_00_00
+
+ MSG = <<~MSG
+ Don't call the background migration worker directly, use the `#migrate_async`,
+ `#migrate_in`, `#bulk_migrate_async` or `#bulk_migrate_in` migration helpers
+ instead.
+ MSG
+
+ def_node_matcher :calls_background_migration_worker?, <<~PATTERN
+ (send (const nil? :BackgroundMigrationWorker) {:perform_async :perform_in :bulk_perform_async :bulk_perform_in} ... )
+ PATTERN
+
+ def on_send(node)
+ return unless in_migration?(node)
+ return if version(node) < ENFORCED_SINCE
+
+ add_offense(node, location: :expression) if calls_background_migration_worker?(node)
+ end
+
+ def autocorrect(node)
+ # This gets rid of the receiver `BackgroundMigrationWorker` and
+ # replaces `perform` with `schedule`
+ schedule_method = method_name(node).to_s.sub('perform', 'migrate')
+ arguments = arguments(node).map(&:source).join(', ')
+
+ replacement = "#{schedule_method}(#{arguments})"
+ lambda do |corrector|
+ corrector.replace(node.source_range, replacement)
+ end
+ end
+
+ private
+
+ def method_name(node)
+ node.children.second
+ end
+
+ def arguments(node)
+ node.children[2..-1]
+ end
+ end
+ end
+ end
+end
diff --git a/rubocop/cop/migration/with_lock_retries_with_change.rb b/rubocop/cop/migration/with_lock_retries_with_change.rb
new file mode 100644
index 00000000000..36fc1f92833
--- /dev/null
+++ b/rubocop/cop/migration/with_lock_retries_with_change.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require_relative '../../migration_helpers'
+
+module RuboCop
+ module Cop
+ module Migration
+ # Cop that prevents usage of `with_lock_retries` within the `change` method.
+ class WithLockRetriesWithChange < RuboCop::Cop::Cop
+ include MigrationHelpers
+
+ MSG = '`with_lock_retries` cannot be used within `change` so you must manually define ' \
+ 'the `up` and `down` methods in your migration class and use `with_lock_retries` in both methods'.freeze
+
+ def on_send(node)
+ return unless in_migration?(node)
+ return unless node.children[1] == :with_lock_retries
+
+ node.each_ancestor(:def) do |def_node|
+ add_offense(def_node, location: :name) if method_name(def_node) == :change
+ end
+ end
+
+ def method_name(node)
+ node.children.first
+ end
+ end
+ end
+ end
+end
diff --git a/rubocop/cop/scalability/idempotent_worker.rb b/rubocop/cop/scalability/idempotent_worker.rb
new file mode 100644
index 00000000000..a38b457b7c7
--- /dev/null
+++ b/rubocop/cop/scalability/idempotent_worker.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require_relative '../../code_reuse_helpers.rb'
+
+module RuboCop
+ module Cop
+ module Scalability
+ # This cop checks for the `idempotent!` call in the worker scope.
+ #
+ # @example
+ #
+ # # bad
+ # class TroubleMakerWorker
+ # def perform
+ # end
+ # end
+ #
+ # # good
+ # class NiceWorker
+ # idempotent!
+ #
+ # def perform
+ # end
+ # end
+ #
+ class IdempotentWorker < RuboCop::Cop::Cop
+ include CodeReuseHelpers
+
+ HELP_LINK = 'https://github.com/mperham/sidekiq/wiki/Best-Practices#2-make-your-job-idempotent-and-transactional'
+
+ MSG = <<~MSG
+ Avoid adding not idempotent workers.
+
+ A worker is considered idempotent if:
+
+ 1. It can safely run multiple times with the same arguments
+ 2. The application side-effects are expected to happen once (or side-effects of a second run are not impactful)
+ 3. It can safely be skipped if another job with the same arguments is already in the queue
+
+ If all the above is true, make sure to mark it as so by calling the `idempotent!`
+ method in the worker scope.
+
+ See #{HELP_LINK}
+ MSG
+
+ def_node_search :idempotent?, <<~PATTERN
+ (send nil? :idempotent!)
+ PATTERN
+
+ def on_class(node)
+ return unless in_worker?(node)
+ return if idempotent?(node)
+
+ add_offense(node, location: :expression)
+ end
+ end
+ end
+ end
+end
diff --git a/rubocop/migration_helpers.rb b/rubocop/migration_helpers.rb
index 577f768da67..767cacaecaf 100644
--- a/rubocop/migration_helpers.rb
+++ b/rubocop/migration_helpers.rb
@@ -10,6 +10,10 @@ module RuboCop
dirname(node).end_with?('db/post_migrate', 'db/geo/post_migrate')
end
+ def version(node)
+ File.basename(node.location.expression.source_buffer.name).split('_').first.to_i
+ end
+
private
def dirname(node)
diff --git a/scripts/clean-old-cached-assets b/scripts/clean-old-cached-assets
index 8bdd3a9cdb6..9a373439e5e 100755
--- a/scripts/clean-old-cached-assets
+++ b/scripts/clean-old-cached-assets
@@ -2,5 +2,7 @@
# Clean up cached files that are older than 4 days
find tmp/cache/assets/sprockets/ -type f -mtime +4 -execdir rm -- "{}" \;
+find tmp/cache/webpack-dlls/ -maxdepth 1 -type d -mtime +4 -exec rm -rf -- "{}" \;
du -d 0 -h tmp/cache/assets/sprockets | cut -f1 | xargs -I % echo "tmp/cache/assets/sprockets/ is currently %"
+du -d 0 -h tmp/cache/webpack-dlls | cut -f1 | xargs -I % echo "tmp/cache/webpack-dlls is currently %"
diff --git a/scripts/frontend/block_dependencies.js b/scripts/frontend/block_dependencies.js
new file mode 100644
index 00000000000..c9257c9f72b
--- /dev/null
+++ b/scripts/frontend/block_dependencies.js
@@ -0,0 +1,21 @@
+const path = require('path');
+const packageJson = require(path.join(process.cwd(), 'package.json'));
+const blockedDependencies = packageJson.blockedDependencies || {};
+const dependencies = packageJson.dependencies;
+const devDependencies = packageJson.devDependencies;
+const blockedDependenciesNames = Object.keys(blockedDependencies);
+const blockedDependenciesFound = blockedDependenciesNames.filter(
+ blockedDependency => dependencies[blockedDependency] || devDependencies[blockedDependency],
+);
+
+if (blockedDependenciesFound.length) {
+ console.log('The following package.json dependencies are not allowed:');
+
+ blockedDependenciesFound.forEach(blockedDependency => {
+ const infoLink = blockedDependencies[blockedDependency];
+
+ console.log(`- ${blockedDependency}: See ${infoLink} for more information.`);
+ });
+
+ process.exit(-1);
+}
diff --git a/scripts/frontend/merge_coverage_frontend.js b/scripts/frontend/merge_coverage_frontend.js
new file mode 100644
index 00000000000..507695b45e5
--- /dev/null
+++ b/scripts/frontend/merge_coverage_frontend.js
@@ -0,0 +1,31 @@
+const { create } = require('istanbul-reports');
+const { createCoverageMap } = require('istanbul-lib-coverage');
+const { createContext } = require('istanbul-lib-report');
+const { resolve } = require('path');
+const { sync } = require('glob');
+
+const coverageMap = createCoverageMap();
+
+const coverageDir = resolve(__dirname, '../../coverage-frontend');
+const reportFiles = sync(`${coverageDir}/*/coverage-final.json`);
+
+// Normalize coverage report generated by jest that has additional "data" key
+// https://github.com/facebook/jest/issues/2418#issuecomment-423806659
+const normalizeReport = report => {
+ const normalizedReport = Object.assign({}, report);
+ Object.entries(normalizedReport).forEach(([k, v]) => {
+ if (v.data) normalizedReport[k] = v.data;
+ });
+ return normalizedReport;
+};
+
+reportFiles
+ .map(reportFile => require(reportFile))
+ .map(normalizeReport)
+ .forEach(report => coverageMap.merge(report));
+
+const context = createContext({ coverageMap: coverageMap, dir: 'coverage-frontend' });
+
+['json', 'lcov', 'text-summary', 'clover'].forEach(reporter => {
+ create(reporter, {}).execute(context);
+});
diff --git a/scripts/frontend/parallel_ci_sequencer.js b/scripts/frontend/parallel_ci_sequencer.js
new file mode 100644
index 00000000000..d7a674535a6
--- /dev/null
+++ b/scripts/frontend/parallel_ci_sequencer.js
@@ -0,0 +1,41 @@
+const Sequencer = require('@jest/test-sequencer').default;
+
+class ParallelCISequencer extends Sequencer {
+ constructor() {
+ super();
+ this.ciNodeIndex = Number(process.env.CI_NODE_INDEX || '1');
+ this.ciNodeTotal = Number(process.env.CI_NODE_TOTAL || '1');
+ }
+
+ sort(tests) {
+ const sortedTests = this.sortByPath(tests);
+ const testsForThisRunner = this.distributeAcrossCINodes(sortedTests);
+
+ console.log(`CI_NODE_INDEX: ${this.ciNodeIndex}`);
+ console.log(`CI_NODE_TOTAL: ${this.ciNodeTotal}`);
+ console.log(`Total number of tests: ${tests.length}`);
+ console.log(`Total number of tests for this runner: ${testsForThisRunner.length}`);
+
+ return testsForThisRunner;
+ }
+
+ sortByPath(tests) {
+ return tests.sort((test1, test2) => {
+ if (test1.path < test2.path) {
+ return -1;
+ }
+ if (test1.path > test2.path) {
+ return 1;
+ }
+ return 0;
+ });
+ }
+
+ distributeAcrossCINodes(tests) {
+ return tests.filter((test, index) => {
+ return index % this.ciNodeTotal === this.ciNodeIndex - 1;
+ });
+ }
+}
+
+module.exports = ParallelCISequencer;
diff --git a/scripts/gemfile_lock_changed.sh b/scripts/gemfile_lock_changed.sh
new file mode 100755
index 00000000000..24e2c685f11
--- /dev/null
+++ b/scripts/gemfile_lock_changed.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+gemfile_lock_changed() {
+ if [ -n "$(git diff --name-only -- Gemfile.lock)" ]; then
+ cat << EOF
+ Gemfile was updated but Gemfile.lock was not updated.
+
+ Usually, when Gemfile is updated, you should run
+ \`\`\`
+ bundle install
+ \`\`\`
+
+ or
+
+ \`\`\`
+ bundle update <the-added-or-updated-gem>
+ \`\`\`
+
+ and commit the Gemfile.lock changes.
+EOF
+
+ exit 1
+ fi
+}
+
+gemfile_lock_changed
diff --git a/scripts/lint-doc.sh b/scripts/lint-doc.sh
index 69c7a56c34f..d9de48fd0c0 100755
--- a/scripts/lint-doc.sh
+++ b/scripts/lint-doc.sh
@@ -2,36 +2,41 @@
cd "$(dirname "$0")/.."
echo "=> Linting documents at path $(pwd) as $(whoami)..."
+echo
+ERRORCODE=0
# Use long options (e.g. --header instead of -H) for curl examples in documentation.
echo '=> Checking for cURL short options...'
+echo
grep --extended-regexp --recursive --color=auto 'curl (.+ )?-[^- ].*' doc/ >/dev/null 2>&1
if [ $? -eq 0 ]
then
echo '✖ ERROR: Short options for curl should not be used in documentation!
Use long options (e.g., --header instead of -H):' >&2
grep --extended-regexp --recursive --color=auto 'curl (.+ )?-[^- ].*' doc/
- exit 1
+ ((ERRORCODE++))
fi
# Ensure that the CHANGELOG.md does not contain duplicate versions
DUPLICATE_CHANGELOG_VERSIONS=$(grep --extended-regexp '^## .+' CHANGELOG.md | sed -E 's| \(.+\)||' | sort -r | uniq -d)
echo '=> Checking for CHANGELOG.md duplicate entries...'
+echo
if [ "${DUPLICATE_CHANGELOG_VERSIONS}" != "" ]
then
echo '✖ ERROR: Duplicate versions in CHANGELOG.md:' >&2
echo "${DUPLICATE_CHANGELOG_VERSIONS}" >&2
- exit 1
+ ((ERRORCODE++))
fi
# Make sure no files in doc/ are executable
EXEC_PERM_COUNT=$(find doc/ -type f -perm 755 | wc -l)
echo "=> Checking $(pwd)/doc for executable permissions..."
+echo
if [ "${EXEC_PERM_COUNT}" -ne 0 ]
then
echo '✖ ERROR: Executable permissions should not be used in documentation! Use `chmod 644` to the files in question:' >&2
find doc/ -type f -perm 755
- exit 1
+ ((ERRORCODE++))
fi
# Do not use 'README.md', instead use 'index.md'
@@ -39,14 +44,56 @@ fi
NUMBER_READMES=46
FIND_READMES=$(find doc/ -name "README.md" | wc -l)
echo '=> Checking for new README.md files...'
+echo
if [ ${FIND_READMES} -ne $NUMBER_READMES ]
then
echo
echo ' ✖ ERROR: New README.md file(s) detected, prefer index.md over README.md.' >&2
echo ' https://docs.gitlab.com/ee/development/documentation/styleguide.html#work-with-directories-and-files'
echo
- exit 1
+ ((ERRORCODE++))
fi
-echo "✔ Linting passed"
-exit 0
+MD_DOC_PATH=${MD_DOC_PATH:-doc}
+
+function run_locally_or_in_docker() {
+ local cmd=$1
+ local args=$2
+
+ if hash ${cmd} 2>/dev/null
+ then
+ $cmd $args
+ elif hash docker 2>/dev/null
+ then
+ docker run -t -v ${PWD}:/gitlab -w /gitlab --rm registry.gitlab.com/gitlab-org/gitlab-docs:lint ${cmd} ${args}
+ else
+ echo
+ echo " ✖ ERROR: '${cmd}' not found. Install '${cmd}' or Docker to proceed." >&2
+ echo
+ ((ERRORCODE++))
+ fi
+
+ if [ $? -ne 0 ]
+ then
+ echo
+ echo " ✖ ERROR: '${cmd}' failed with errors." >&2
+ echo
+ ((ERRORCODE++))
+ fi
+}
+
+echo '=> Linting markdown style...'
+echo
+run_locally_or_in_docker 'markdownlint' "--config .markdownlint.json ${MD_DOC_PATH}"
+
+echo '=> Linting prose...'
+run_locally_or_in_docker 'vale' "--minAlertLevel error ${MD_DOC_PATH}"
+
+if [ $ERRORCODE -ne 0 ]
+then
+ echo "✖ ${ERRORCODE} lint test(s) failed. Review the log carefully to see full listing."
+ exit 1
+else
+ echo "✔ Linting passed"
+ exit 0
+fi
diff --git a/scripts/lint-rugged b/scripts/lint-rugged
index 1b3fb54f70b..dfa4df8333f 100755
--- a/scripts/lint-rugged
+++ b/scripts/lint-rugged
@@ -15,6 +15,9 @@ ALLOWED = [
'lib/gitlab/git/rugged_impl/',
'lib/gitlab/gitaly_client/storage_settings.rb',
+ # Needed to detect Rugged enabled: https://gitlab.com/gitlab-org/gitlab/issues/35371
+ 'lib/gitlab/config_checker/puma_rugged_checker.rb',
+
# Needed for logging
'config/initializers/peek.rb',
'config/initializers/lograge.rb',
@@ -27,7 +30,8 @@ ALLOWED = [
rugged_lines = IO.popen(%w[git grep -i -n rugged -- app config lib], &:read).lines
rugged_lines = rugged_lines.select { |l| /^[^:]*\.rb:/ =~ l }
rugged_lines = rugged_lines.reject { |l| l.start_with?(*ALLOWED) }
-rugged_lines = rugged_lines.reject { |l| /(include|prepend) Gitlab::Git::RuggedImpl/ =~ l}
+rugged_lines = rugged_lines.reject { |l| /(include|prepend) Gitlab::Git::RuggedImpl/ =~ l }
+rugged_lines = rugged_lines.reject { |l| l.include?('Gitlab::ConfigChecker::PumaRuggedChecker.check') }
rugged_lines = rugged_lines.reject do |line|
code, _comment = line.split('# ', 2)
code !~ /rugged/i
diff --git a/scripts/review_apps/automated_cleanup.rb b/scripts/review_apps/automated_cleanup.rb
index 8a04d8e00bc..0a073a28bf3 100755
--- a/scripts/review_apps/automated_cleanup.rb
+++ b/scripts/review_apps/automated_cleanup.rb
@@ -54,7 +54,7 @@ class AutomatedCleanup
end
def perform_gitlab_environment_cleanup!(days_for_stop:, days_for_delete:)
- puts "Checking for review apps not updated in the last #{days_for_stop} days..."
+ puts "Checking for Review Apps not updated in the last #{days_for_stop} days..."
checked_environments = []
delete_threshold = threshold_time(days: days_for_delete)
@@ -81,10 +81,13 @@ class AutomatedCleanup
release = Quality::HelmClient::Release.new(environment.slug, 1, deployed_at.to_s, nil, nil, review_apps_namespace)
releases_to_delete << release
end
- elsif deployed_at < stop_threshold
- stop_environment(environment, deployment)
else
- print_release_state(subject: 'Review app', release_name: environment.slug, release_date: last_deploy, action: 'leaving')
+ if deployed_at >= stop_threshold
+ print_release_state(subject: 'Review App', release_name: environment.slug, release_date: last_deploy, action: 'leaving')
+ else
+ environment_state = fetch_environment(environment)&.state
+ stop_environment(environment, deployment) if environment_state && environment_state != 'stopped'
+ end
end
checked_environments << environment.slug
@@ -94,9 +97,9 @@ class AutomatedCleanup
end
def perform_helm_releases_cleanup!(days:)
- puts "Checking for Helm releases not updated in the last #{days} days..."
+ puts "Checking for Helm releases that are FAILED or not updated in the last #{days} days..."
- threshold_day = threshold_time(days: days)
+ threshold = threshold_time(days: days)
releases_to_delete = []
@@ -104,7 +107,7 @@ class AutomatedCleanup
# Prevents deleting `dns-gitlab-review-app` releases or other unrelated releases
next unless release.name.start_with?('review-')
- if release.status == 'FAILED' || release.last_update < threshold_day
+ if release.status == 'FAILED' || release.last_update < threshold
releases_to_delete << release
else
print_release_state(subject: 'Release', release_name: release.name, release_date: release.last_update, action: 'leaving')
@@ -116,12 +119,19 @@ class AutomatedCleanup
private
+ def fetch_environment(environment)
+ gitlab.environment(project_path, environment.id)
+ rescue Errno::ETIMEDOUT => ex
+ puts "Failed to fetch '#{environment.name}' / '#{environment.slug}' (##{environment.id}):\n#{ex.message}"
+ nil
+ end
+
def delete_environment(environment, deployment)
print_release_state(subject: 'Review app', release_name: environment.slug, release_date: deployment.created_at, action: 'deleting')
gitlab.delete_environment(project_path, environment.id)
rescue Gitlab::Error::Forbidden
- puts "Review app '#{environment.slug}' is forbidden: skipping it"
+ puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) is forbidden: skipping it"
end
def stop_environment(environment, deployment)
@@ -129,7 +139,7 @@ class AutomatedCleanup
gitlab.stop_environment(project_path, environment.id)
rescue Gitlab::Error::Forbidden
- puts "Review app '#{environment.slug}' is forbidden: skipping it"
+ puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) is forbidden: skipping it"
end
def helm_releases
@@ -180,14 +190,14 @@ end
automated_cleanup = AutomatedCleanup.new
-timed('Review apps cleanup') do
- automated_cleanup.perform_gitlab_environment_cleanup!(days_for_stop: 2, days_for_delete: 3)
+timed('Review Apps cleanup') do
+ automated_cleanup.perform_gitlab_environment_cleanup!(days_for_stop: 5, days_for_delete: 6)
end
puts
timed('Helm releases cleanup') do
- automated_cleanup.perform_helm_releases_cleanup!(days: 3)
+ automated_cleanup.perform_helm_releases_cleanup!(days: 7)
end
exit(0)
diff --git a/scripts/review_apps/base-config.yaml b/scripts/review_apps/base-config.yaml
index 95b0295622a..a54dadb8d85 100644
--- a/scripts/review_apps/base-config.yaml
+++ b/scripts/review_apps/base-config.yaml
@@ -17,10 +17,10 @@ gitlab:
resources:
requests:
cpu: 1200m
- memory: 240M
+ memory: 245M
limits:
cpu: 1800m
- memory: 360M
+ memory: 367M
persistence:
size: 10G
gitlab-exporter:
@@ -51,11 +51,11 @@ gitlab:
sidekiq:
resources:
requests:
- cpu: 650m
- memory: 1018M
+ cpu: 855m
+ memory: 1071M
limits:
- cpu: 975m
- memory: 1527M
+ cpu: 1282m
+ memory: 1606M
hpa:
targetAverageValue: 650m
task-runner:
@@ -69,11 +69,11 @@ gitlab:
unicorn:
resources:
requests:
- cpu: 525m
- memory: 1711M
+ cpu: 746m
+ memory: 1873M
limits:
- cpu: 787m
- memory: 2566M
+ cpu: 1119m
+ memory: 2809M
deployment:
readinessProbe:
initialDelaySeconds: 5 # Default is 0
@@ -140,10 +140,10 @@ postgresql:
enabled: false
resources:
requests:
- cpu: 300m
+ cpu: 347m
memory: 250M
limits:
- cpu: 450m
+ cpu: 520m
memory: 375M
prometheus:
install: false
diff --git a/scripts/security-harness b/scripts/security-harness
index a1642489fe2..c101cd03454 100755
--- a/scripts/security-harness
+++ b/scripts/security-harness
@@ -28,9 +28,9 @@ HOOK_DATA = <<~HOOK
if [ -e "$harness" ]
then
- if [[ ("$url" != *"dev.gitlab.org"*) && ("$url" != *"gitlab-org/security/"*) ]]
+ if [[ "$url" != *"gitlab-org/security/"* ]]
then
- echo "Pushing to remotes other than dev.gitlab.org and gitlab.com/gitlab-org/security has been disabled!"
+ echo "Pushing to remotes other than gitlab.com/gitlab-org/security has been disabled!"
echo "Run scripts/security-harness to disable this check."
echo
@@ -58,7 +58,7 @@ def toggle
else
FileUtils.touch(harness_path)
- puts "#{SHELL_GREEN}Security harness installed -- you will only be able to push to dev.gitlab.org or gitlab.com/gitlab-org/security!#{SHELL_CLEAR}"
+ puts "#{SHELL_GREEN}Security harness installed -- you will only be able to push to gitlab.com/gitlab-org/security!#{SHELL_CLEAR}"
end
end
diff --git a/scripts/static-analysis b/scripts/static-analysis
index 1f55c035ed1..ede29b85b8d 100755
--- a/scripts/static-analysis
+++ b/scripts/static-analysis
@@ -45,11 +45,13 @@ def jobs_to_run(node_index, node_total)
%w[yarn run eslint],
%w[yarn run stylelint],
%w[yarn run prettier-all],
+ %w[yarn run block-dependencies],
%w[bundle exec rubocop --parallel],
%w[scripts/lint-conflicts.sh],
%w[scripts/lint-rugged],
%w[scripts/frontend/check_no_partial_karma_jest.sh],
- %w[scripts/lint-changelog-filenames]
+ %w[scripts/lint-changelog-filenames],
+ %w[scripts/gemfile_lock_changed.sh]
]
case node_total
diff --git a/scripts/trigger-build b/scripts/trigger-build
index 889dcc01043..c7b45480bf3 100755
--- a/scripts/trigger-build
+++ b/scripts/trigger-build
@@ -16,6 +16,14 @@ module Trigger
%w[gitlab gitlab-ee].include?(ENV['CI_PROJECT_NAME'])
end
+ def self.non_empty_variable_value(variable)
+ variable_value = ENV[variable]
+
+ return if variable_value.nil? || variable_value.empty?
+
+ variable_value
+ end
+
class Base
def invoke!(post_comment: false, downstream_job_name: nil)
pipeline_variables = variables
@@ -84,13 +92,15 @@ module Trigger
end
def base_variables
+ # Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA for omnibus checkouts due to pipeline for merged results,
+ # and fallback to CI_COMMIT_SHA for the `detached` pipelines.
{
'GITLAB_REF_SLUG' => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_REF_SLUG'],
'TRIGGERED_USER' => ENV['TRIGGERED_USER'] || ENV['GITLAB_USER_NAME'],
'TRIGGER_SOURCE' => ENV['CI_JOB_URL'],
'TOP_UPSTREAM_SOURCE_PROJECT' => ENV['CI_PROJECT_PATH'],
'TOP_UPSTREAM_SOURCE_JOB' => ENV['CI_JOB_URL'],
- 'TOP_UPSTREAM_SOURCE_SHA' => ENV['CI_COMMIT_SHA'],
+ 'TOP_UPSTREAM_SOURCE_SHA' => Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA'],
'TOP_UPSTREAM_SOURCE_REF' => ENV['CI_COMMIT_REF_NAME'],
'TOP_UPSTREAM_MERGE_REQUEST_PROJECT_ID' => ENV['CI_MERGE_REQUEST_PROJECT_ID'],
'TOP_UPSTREAM_MERGE_REQUEST_IID' => ENV['CI_MERGE_REQUEST_IID']
@@ -125,8 +135,10 @@ module Trigger
end
def extra_variables
+ # Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA for omnibus checkouts due to pipeline for merged results
+ # and fallback to CI_COMMIT_SHA for the `detached` pipelines.
{
- 'GITLAB_VERSION' => ENV['CI_COMMIT_SHA'],
+ 'GITLAB_VERSION' => Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA'],
'ALTERNATIVE_SOURCES' => 'true',
'ee' => Trigger.ee? ? 'true' : 'false',
'QA_BRANCH' => ENV['QA_BRANCH'] || 'master'
@@ -164,11 +176,9 @@ module Trigger
edition = Trigger.ee? ? 'EE' : 'CE'
{
- # Back-compatibility until https://gitlab.com/gitlab-org/build/CNG/merge_requests/189 is merged
- "GITLAB_#{edition}_VERSION" => ENV['CI_COMMIT_REF_NAME'],
- "GITLAB_VERSION" => ENV['CI_COMMIT_REF_NAME'],
+ "GITLAB_VERSION" => ENV['CI_COMMIT_SHA'],
"GITLAB_TAG" => ENV['CI_COMMIT_TAG'],
- "GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_REF_SLUG'],
+ "GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_SHA'],
"FORCE_RAILS_IMAGE_BUILDS" => 'true',
"#{edition}_PIPELINE" => 'true'
}
@@ -192,7 +202,7 @@ module Trigger
Gitlab.create_commit_comment(
ENV['CI_PROJECT_PATH'],
- ENV['CI_COMMIT_SHA'],
+ Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA'],
"The [`#{ENV['CI_JOB_NAME']}`](#{ENV['CI_JOB_URL']}) job from pipeline #{ENV['CI_PIPELINE_URL']} triggered #{downstream_pipeline.web_url} downstream.")
rescue Gitlab::Error::Error => error
diff --git a/scripts/utils.sh b/scripts/utils.sh
index 7eae9531f74..5d52ca0b40a 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -57,97 +57,3 @@ function echoinfo() {
printf "\033[0;33m%s\n\033[0m" "${1}" >&2;
fi
}
-
-function get_job_id() {
- local job_name="${1}"
- local query_string="${2:+&${2}}"
- local api_token="${API_TOKEN-${GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN}}"
- if [ -z "${api_token}" ]; then
- echoerr "Please provide an API token with \$API_TOKEN or \$GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN."
- return
- fi
-
- local max_page=3
- local page=1
-
- while true; do
- local url="https://gitlab.com/api/v4/projects/${CI_PROJECT_ID}/pipelines/${CI_PIPELINE_ID}/jobs?per_page=100&page=${page}${query_string}"
- echoinfo "GET ${url}"
-
- local job_id
- job_id=$(curl --silent --show-error --header "PRIVATE-TOKEN: ${api_token}" "${url}" | jq "map(select(.name == \"${job_name}\")) | map(.id) | last")
- [[ "${job_id}" == "null" && "${page}" -lt "$max_page" ]] || break
-
- let "page++"
- done
-
- if [[ "${job_id}" == "" ]]; then
- echoerr "The '${job_name}' job ID couldn't be retrieved!"
- else
- echoinfo "The '${job_name}' job ID is ${job_id}"
- echo "${job_id}"
- fi
-}
-
-function play_job() {
- local job_name="${1}"
- local job_id
- job_id=$(get_job_id "${job_name}" "scope=manual");
- if [ -z "${job_id}" ]; then return; fi
-
- local api_token="${API_TOKEN-${GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN}}"
- if [ -z "${api_token}" ]; then
- echoerr "Please provide an API token with \$API_TOKEN or \$GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN."
- return
- fi
-
- local url="https://gitlab.com/api/v4/projects/${CI_PROJECT_ID}/jobs/${job_id}/play"
- echoinfo "POST ${url}"
-
- local job_url
- job_url=$(curl --silent --show-error --request POST --header "PRIVATE-TOKEN: ${api_token}" "${url}" | jq ".web_url")
- echoinfo "Manual job '${job_name}' started at: ${job_url}"
-}
-
-function wait_for_job_to_be_done() {
- local job_name="${1}"
- local query_string="${2}"
- local job_id
- job_id=$(get_job_id "${job_name}" "${query_string}")
- if [ -z "${job_id}" ]; then return; fi
-
- local api_token="${API_TOKEN-${GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN}}"
- if [ -z "${api_token}" ]; then
- echoerr "Please provide an API token with \$API_TOKEN or \$GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN."
- return
- fi
-
- echoinfo "Waiting for the '${job_name}' job to finish..."
-
- local url="https://gitlab.com/api/v4/projects/${CI_PROJECT_ID}/jobs/${job_id}"
- echoinfo "GET ${url}"
-
- # In case the job hasn't finished yet. Keep trying until the job times out.
- local interval=30
- local elapsed_seconds=0
- while true; do
- local job_status
- job_status=$(curl --silent --show-error --header "PRIVATE-TOKEN: ${api_token}" "${url}" | jq ".status" | sed -e s/\"//g)
- [[ "${job_status}" == "pending" || "${job_status}" == "running" ]] || break
-
- printf "."
- let "elapsed_seconds+=interval"
- sleep ${interval}
- done
-
- local elapsed_minutes=$((elapsed_seconds / 60))
- echoinfo "Waited '${job_name}' for ${elapsed_minutes} minutes."
-
- if [[ "${job_status}" == "failed" ]]; then
- echoerr "The '${job_name}' failed."
- elif [[ "${job_status}" == "manual" ]]; then
- echoinfo "The '${job_name}' is manual."
- else
- echoinfo "The '${job_name}' passed."
- fi
-}
diff --git a/spec/bin/sidekiq_cluster_spec.rb b/spec/bin/sidekiq_cluster_spec.rb
new file mode 100644
index 00000000000..67de55ad9f5
--- /dev/null
+++ b/spec/bin/sidekiq_cluster_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'bin/sidekiq-cluster' do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'when selecting some queues and excluding others' do
+ where(:args, :included, :excluded) do
+ %w[--negate cronjob] | '-qdefault,1' | '-qcronjob,1'
+ %w[--experimental-queue-selector resource_boundary=cpu] | '-qupdate_merge_requests,1' | '-qdefault,1'
+ end
+
+ with_them do
+ it 'runs successfully', :aggregate_failures do
+ cmd = %w[bin/sidekiq-cluster --dryrun] + args
+
+ output, status = Gitlab::Popen.popen(cmd, Rails.root.to_s)
+
+ expect(status).to be(0)
+ expect(output).to include('"bundle", "exec", "sidekiq"')
+ expect(output).to include(included)
+ expect(output).not_to include(excluded)
+ end
+ end
+ end
+
+ context 'when selecting all queues' do
+ [
+ %w[*],
+ %w[--experimental-queue-selector *]
+ ].each do |args|
+ it "runs successfully with `#{args}`", :aggregate_failures do
+ cmd = %w[bin/sidekiq-cluster --dryrun] + args
+
+ output, status = Gitlab::Popen.popen(cmd, Rails.root.to_s)
+
+ expect(status).to be(0)
+ expect(output).to include('"bundle", "exec", "sidekiq"')
+ expect(output).to include('-qdefault,1')
+ expect(output).to include('-qcronjob:ci_archive_traces_cron,1')
+ end
+ end
+ end
+end
diff --git a/spec/config/settings_spec.rb b/spec/config/settings_spec.rb
index 26d92593a08..20c0b92f135 100644
--- a/spec/config/settings_spec.rb
+++ b/spec/config/settings_spec.rb
@@ -9,6 +9,12 @@ describe Settings do
end
end
+ describe '.load_dynamic_cron_schedules!' do
+ it 'generates a valid cron schedule' do
+ expect(Fugit::Cron.parse(described_class.load_dynamic_cron_schedules!)).to be_a(Fugit::Cron)
+ end
+ end
+
describe '.attr_encrypted_db_key_base_truncated' do
it 'is a string with maximum 32 bytes size' do
expect(described_class.attr_encrypted_db_key_base_truncated.bytesize)
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
new file mode 100644
index 00000000000..0641f64b0e3
--- /dev/null
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Admin::IntegrationsController do
+ let(:admin) { create(:admin) }
+ let!(:project) { create(:project) }
+
+ before do
+ sign_in(admin)
+ end
+
+ describe '#edit' do
+ context 'when instance_level_integrations not enabled' do
+ it 'returns not_found' do
+ allow(Feature).to receive(:enabled?).with(:instance_level_integrations) { false }
+
+ get :edit, params: { id: Service.available_services_names.sample }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ Service.available_services_names.each do |integration_name|
+ context "#{integration_name}" do
+ it 'successfully displays the template' do
+ get :edit, params: { id: integration_name }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
+ end
+ end
+ end
+ end
+
+ describe '#update' do
+ let(:integration) { create(:jira_service, project: project) }
+
+ before do
+ put :update, params: { id: integration.class.to_param, service: { url: url } }
+ end
+
+ context 'valid params' do
+ let(:url) { 'https://jira.gitlab-example.com' }
+
+ it 'updates the integration' do
+ expect(response).to have_gitlab_http_status(:found)
+ expect(integration.reload.url).to eq(url)
+ end
+ end
+
+ context 'invalid params' do
+ let(:url) { 'https://jira.localhost' }
+
+ it 'does not update the integration' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
+ expect(integration.reload.url).not_to eq(url)
+ end
+ end
+ end
+
+ describe '#test' do
+ context 'testable' do
+ let(:integration) { create(:jira_service, project: project) }
+
+ it 'returns ok' do
+ allow_any_instance_of(integration.class).to receive(:test) { { success: true } }
+
+ put :test, params: { id: integration.class.to_param }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'not testable' do
+ let(:integration) { create(:alerts_service, project: project) }
+
+ it 'returns not found' do
+ put :test, params: { id: integration.class.to_param }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/admin/serverless/domains_controller_spec.rb b/spec/controllers/admin/serverless/domains_controller_spec.rb
index aed83e190be..43c3f0117bc 100644
--- a/spec/controllers/admin/serverless/domains_controller_spec.rb
+++ b/spec/controllers/admin/serverless/domains_controller_spec.rb
@@ -15,7 +15,7 @@ describe Admin::Serverless::DomainsController do
it 'responds with 404' do
get :index
- expect(response.status).to eq(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -33,7 +33,7 @@ describe Admin::Serverless::DomainsController do
it 'responds with 404' do
get :index
- expect(response.status).to eq(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -81,7 +81,7 @@ describe Admin::Serverless::DomainsController do
it 'responds with 404' do
post :create, params: { pages_domain: create_params }
- expect(response.status).to eq(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -98,7 +98,7 @@ describe Admin::Serverless::DomainsController do
it 'responds with 404' do
post :create, params: { pages_domain: create_params }
- expect(response.status).to eq(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -169,7 +169,7 @@ describe Admin::Serverless::DomainsController do
it 'responds with 404' do
put :update, params: { id: domain.id, pages_domain: update_params }
- expect(response.status).to eq(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -186,7 +186,7 @@ describe Admin::Serverless::DomainsController do
it 'responds with 404' do
put :update, params: { id: domain.id, pages_domain: update_params }
- expect(response.status).to eq(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -221,7 +221,7 @@ describe Admin::Serverless::DomainsController do
it 'returns 404' do
put :update, params: { id: 0, pages_domain: update_params }
- expect(response.status).to eq(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -247,7 +247,7 @@ describe Admin::Serverless::DomainsController do
it 'responds with 404' do
post :verify, params: { id: domain.id }
- expect(response.status).to eq(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -272,7 +272,7 @@ describe Admin::Serverless::DomainsController do
it 'responds with 404' do
post :verify, params: { id: domain.id }
- expect(response.status).to eq(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -295,4 +295,76 @@ describe Admin::Serverless::DomainsController do
end
end
end
+
+ describe '#destroy' do
+ let!(:domain) { create(:pages_domain, :instance_serverless) }
+
+ context 'non-admin user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'responds with 404' do
+ delete :destroy, params: { id: domain.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'admin user' do
+ before do
+ sign_in(admin)
+ end
+
+ context 'with serverless_domain feature disabled' do
+ before do
+ stub_feature_flags(serverless_domain: false)
+ end
+
+ it 'responds with 404' do
+ delete :destroy, params: { id: domain.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when domain exists' do
+ context 'and is not associated to any clusters' do
+ it 'deletes the domain' do
+ expect { delete :destroy, params: { id: domain.id } }
+ .to change { PagesDomain.count }.from(1).to(0)
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(flash[:notice]).to include('Domain was successfully deleted.')
+ end
+ end
+
+ context 'and is associated to any clusters' do
+ before do
+ create(:serverless_domain_cluster, pages_domain: domain)
+ end
+
+ it 'does not delete the domain' do
+ expect { delete :destroy, params: { id: domain.id } }
+ .not_to change { PagesDomain.count }
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ expect(flash[:notice]).to include('Domain cannot be deleted while associated to one or more clusters.')
+ end
+ end
+ end
+
+ context 'when domain does not exist' do
+ before do
+ domain.destroy!
+ end
+
+ it 'responds with 404' do
+ delete :destroy, params: { id: domain.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
end
diff --git a/spec/controllers/admin/services_controller_spec.rb b/spec/controllers/admin/services_controller_spec.rb
index 44233776865..35801643181 100644
--- a/spec/controllers/admin/services_controller_spec.rb
+++ b/spec/controllers/admin/services_controller_spec.rb
@@ -30,9 +30,9 @@ describe Admin::ServicesController do
describe "#update" do
let(:project) { create(:project) }
- let!(:service) do
+ let!(:service_template) do
RedmineService.create(
- project: project,
+ project: nil,
active: false,
template: true,
properties: {
@@ -44,9 +44,9 @@ describe Admin::ServicesController do
end
it 'calls the propagation worker when service is active' do
- expect(PropagateServiceTemplateWorker).to receive(:perform_async).with(service.id)
+ expect(PropagateServiceTemplateWorker).to receive(:perform_async).with(service_template.id)
- put :update, params: { id: service.id, service: { active: true } }
+ put :update, params: { id: service_template.id, service: { active: true } }
expect(response).to have_gitlab_http_status(:found)
end
@@ -54,7 +54,7 @@ describe Admin::ServicesController do
it 'does not call the propagation worker when service is not active' do
expect(PropagateServiceTemplateWorker).not_to receive(:perform_async)
- put :update, params: { id: service.id, service: { properties: {} } }
+ put :update, params: { id: service_template.id, service: { properties: {} } }
expect(response).to have_gitlab_http_status(:found)
end
diff --git a/spec/controllers/admin/sessions_controller_spec.rb b/spec/controllers/admin/sessions_controller_spec.rb
index 4bab6b51102..fabd79133ec 100644
--- a/spec/controllers/admin/sessions_controller_spec.rb
+++ b/spec/controllers/admin/sessions_controller_spec.rb
@@ -68,7 +68,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
# triggering the auth form will request admin mode
get :new
- post :create, params: { password: user.password }
+ post :create, params: { user: { password: user.password } }
expect(response).to redirect_to admin_root_path
expect(controller.current_user_mode.admin_mode?).to be(true)
@@ -82,7 +82,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
# triggering the auth form will request admin mode
get :new
- post :create, params: { password: '' }
+ post :create, params: { user: { password: '' } }
expect(response).to render_template :new
expect(controller.current_user_mode.admin_mode?).to be(false)
@@ -95,7 +95,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
# do not trigger the auth form
- post :create, params: { password: user.password }
+ post :create, params: { user: { password: user.password } }
expect(response).to redirect_to(new_admin_session_path)
expect(controller.current_user_mode.admin_mode?).to be(false)
@@ -110,12 +110,118 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
get :new
Timecop.freeze(Gitlab::Auth::CurrentUserMode::ADMIN_MODE_REQUESTED_GRACE_PERIOD.from_now) do
- post :create, params: { password: user.password }
+ post :create, params: { user: { password: user.password } }
expect(response).to redirect_to(new_admin_session_path)
expect(controller.current_user_mode.admin_mode?).to be(false)
end
end
+
+ context 'when using two-factor authentication via OTP' do
+ let(:user) { create(:admin, :two_factor) }
+
+ def authenticate_2fa(user_params)
+ post(:create, params: { user: user_params }, session: { otp_user_id: user.id })
+ end
+
+ it 'requests two factor after a valid password is provided' do
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ # triggering the auth form will request admin mode
+ get :new
+
+ post :create, params: { user: { password: user.password } }
+
+ expect(response).to render_template('admin/sessions/two_factor')
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+ end
+
+ it 'can login with valid otp' do
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ controller.store_location_for(:redirect, admin_root_path)
+ controller.current_user_mode.request_admin_mode!
+
+ authenticate_2fa(otp_attempt: user.current_otp)
+
+ expect(response).to redirect_to admin_root_path
+ expect(controller.current_user_mode.admin_mode?).to be(true)
+ end
+
+ it 'cannot login with invalid otp' do
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ controller.current_user_mode.request_admin_mode!
+
+ authenticate_2fa(otp_attempt: 'invalid')
+
+ expect(response).to render_template('admin/sessions/two_factor')
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+ end
+
+ context 'with password authentication disabled' do
+ before do
+ stub_application_setting(password_authentication_enabled_for_web: false)
+ end
+
+ it 'allows 2FA stage of non-password login' do
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ controller.store_location_for(:redirect, admin_root_path)
+ controller.current_user_mode.request_admin_mode!
+
+ authenticate_2fa(otp_attempt: user.current_otp)
+
+ expect(response).to redirect_to admin_root_path
+ expect(controller.current_user_mode.admin_mode?).to be(true)
+ end
+ end
+ end
+
+ context 'when using two-factor authentication via U2F' do
+ let(:user) { create(:admin, :two_factor_via_u2f) }
+
+ def authenticate_2fa_u2f(user_params)
+ post(:create, params: { user: user_params }, session: { otp_user_id: user.id })
+ end
+
+ it 'requests two factor after a valid password is provided' do
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ # triggering the auth form will request admin mode
+ get :new
+ post :create, params: { user: { password: user.password } }
+
+ expect(response).to render_template('admin/sessions/two_factor')
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+ end
+
+ it 'can login with valid auth' do
+ allow(U2fRegistration).to receive(:authenticate).and_return(true)
+
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ controller.store_location_for(:redirect, admin_root_path)
+ controller.current_user_mode.request_admin_mode!
+
+ authenticate_2fa_u2f(login: user.username, device_response: '{}')
+
+ expect(response).to redirect_to admin_root_path
+ expect(controller.current_user_mode.admin_mode?).to be(true)
+ end
+
+ it 'cannot login with invalid auth' do
+ allow(U2fRegistration).to receive(:authenticate).and_return(false)
+
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ controller.current_user_mode.request_admin_mode!
+ authenticate_2fa_u2f(login: user.username, device_response: '{}')
+
+ expect(response).to render_template('admin/sessions/two_factor')
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+ end
+ end
end
end
@@ -136,7 +242,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
expect(controller.current_user_mode.admin_mode?).to be(false)
get :new
- post :create, params: { password: user.password }
+ post :create, params: { user: { password: user.password } }
expect(controller.current_user_mode.admin_mode?).to be(true)
post :destroy
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index bdac7369780..4a3d591e94d 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -530,41 +530,6 @@ describe ApplicationController do
expect(controller.last_payload).to include('correlation_id' => 'new-id')
end
-
- context '422 errors' do
- it 'logs a response with a string' do
- response = spy(ActionDispatch::Response, status: 422, body: 'Hello world', content_type: 'application/json', cookies: {})
- allow(controller).to receive(:response).and_return(response)
- get :index
-
- expect(controller.last_payload[:response]).to eq('Hello world')
- end
-
- it 'logs a response with an array' do
- body = ['I want', 'my hat back']
- response = spy(ActionDispatch::Response, status: 422, body: body, content_type: 'application/json', cookies: {})
- allow(controller).to receive(:response).and_return(response)
- get :index
-
- expect(controller.last_payload[:response]).to eq(body)
- end
-
- it 'does not log a string with an empty body' do
- response = spy(ActionDispatch::Response, status: 422, body: nil, content_type: 'application/json', cookies: {})
- allow(controller).to receive(:response).and_return(response)
- get :index
-
- expect(controller.last_payload.has_key?(:response)).to be_falsey
- end
-
- it 'does not log an HTML body' do
- response = spy(ActionDispatch::Response, status: 422, body: 'This is a test', content_type: 'application/html', cookies: {})
- allow(controller).to receive(:response).and_return(response)
- get :index
-
- expect(controller.last_payload.has_key?(:response)).to be_falsey
- end
- end
end
describe '#access_denied' do
@@ -725,6 +690,7 @@ describe ApplicationController do
get :index
expect(response.headers['Cache-Control']).to be_nil
+ expect(response.headers['Pragma']).to be_nil
end
end
@@ -735,6 +701,7 @@ describe ApplicationController do
get :index
expect(response.headers['Cache-Control']).to eq 'max-age=0, private, must-revalidate, no-store'
+ expect(response.headers['Pragma']).to eq 'no-cache'
end
it 'does not set the "no-store" header for XHR requests' do
diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb
index 605fff60c31..41c37cb84e5 100644
--- a/spec/controllers/boards/issues_controller_spec.rb
+++ b/spec/controllers/boards/issues_controller_spec.rb
@@ -57,6 +57,18 @@ describe Boards::IssuesController do
expect(development.issues.map(&:relative_position)).not_to include(nil)
end
+ it 'returns issues by closed_at in descending order in closed list' do
+ create(:closed_issue, project: project, title: 'New Issue 1', closed_at: 1.day.ago)
+ create(:closed_issue, project: project, title: 'New Issue 2', closed_at: 1.week.ago)
+
+ list_issues user: user, board: board, list: board.lists.last.id
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['issues'].length).to eq(2)
+ expect(json_response['issues'][0]['title']).to eq('New Issue 1')
+ expect(json_response['issues'][1]['title']).to eq('New Issue 2')
+ end
+
it 'avoids N+1 database queries' do
create(:labeled_issue, project: project, labels: [development])
control_count = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: board, list: list2) }.count
diff --git a/spec/controllers/explore/snippets_controller_spec.rb b/spec/controllers/explore/snippets_controller_spec.rb
index fa659c6df7f..ab91faa6cef 100644
--- a/spec/controllers/explore/snippets_controller_spec.rb
+++ b/spec/controllers/explore/snippets_controller_spec.rb
@@ -4,12 +4,33 @@ require 'spec_helper'
describe Explore::SnippetsController do
describe 'GET #index' do
- it_behaves_like 'paginated collection' do
- let(:collection) { Snippet.all }
+ let!(:project_snippet) { create_list(:project_snippet, 3, :public) }
+ let!(:personal_snippet) { create_list(:personal_snippet, 3, :public) }
- before do
- create(:personal_snippet, :public)
- end
+ before do
+ allow(Kaminari.config).to receive(:default_per_page).and_return(2)
+ end
+
+ it 'renders' do
+ get :index
+
+ snippets = assigns(:snippets)
+
+ expect(snippets).to be_a(::Kaminari::PaginatableWithoutCount)
+ expect(snippets.size).to eq(2)
+ expect(snippets).to all(be_a(PersonalSnippet))
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'renders pagination' do
+ get :index, params: { page: 2 }
+
+ snippets = assigns(:snippets)
+
+ expect(snippets).to be_a(::Kaminari::PaginatableWithoutCount)
+ expect(snippets.size).to eq(1)
+ expect(assigns(:snippets)).to all(be_a(PersonalSnippet))
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/groups/registry/repositories_controller_spec.rb b/spec/controllers/groups/registry/repositories_controller_spec.rb
index eb702d65325..3a74aa1dac0 100644
--- a/spec/controllers/groups/registry/repositories_controller_spec.rb
+++ b/spec/controllers/groups/registry/repositories_controller_spec.rb
@@ -93,7 +93,7 @@ describe Groups::Registry::RepositoriesController do
context 'with :vue_container_registry_explorer feature flag disabled' do
before do
- stub_feature_flags(vue_container_registry_explorer: false)
+ stub_feature_flags(vue_container_registry_explorer: { enabled: false, thing: group })
end
it 'has the correct response schema' do
diff --git a/spec/controllers/groups/settings/ci_cd_controller_spec.rb b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
index 4e8cb3f94fb..fbf88a01eb3 100644
--- a/spec/controllers/groups/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
@@ -210,4 +210,16 @@ describe Groups::Settings::CiCdController do
end
end
end
+
+ describe 'POST create_deploy_token' do
+ it_behaves_like 'a created deploy token' do
+ let(:entity) { group }
+ let(:create_entity_params) { { group_id: group } }
+ let(:deploy_token_type) { DeployToken.deploy_token_types[:group_type] }
+
+ before do
+ entity.add_owner(user)
+ end
+ end
+ end
end
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 1c58c2b5c97..11c70d3aeca 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -411,6 +411,13 @@ describe GroupsController do
expect(group.reload.project_creation_level).to eq(::Gitlab::Access::MAINTAINER_PROJECT_ACCESS)
end
+ it 'updates the default_branch_protection successfully' do
+ post :update, params: { id: group.to_param, group: { default_branch_protection: ::Gitlab::Access::PROTECTION_DEV_CAN_MERGE } }
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(group.reload.default_branch_protection).to eq(::Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
+ end
+
context 'when a project inside the group has container repositories' do
before do
stub_container_registry_config(enabled: true)
diff --git a/spec/controllers/import/gitea_controller_spec.rb b/spec/controllers/import/gitea_controller_spec.rb
index 730e3f98c98..b4834dffdb3 100644
--- a/spec/controllers/import/gitea_controller_spec.rb
+++ b/spec/controllers/import/gitea_controller_spec.rb
@@ -28,10 +28,24 @@ describe Import::GiteaController do
describe "GET status" do
it_behaves_like 'a GitHub-ish import controller: GET status' do
+ let(:extra_assign_expectations) { { gitea_host_url: host_url } }
+
before do
assign_host_url
end
- let(:extra_assign_expectations) { { gitea_host_url: host_url } }
+
+ context 'when host url is local or not http' do
+ %w[https://localhost:3000 http://192.168.0.1 ftp://testing].each do |url|
+ let(:host_url) { url }
+
+ it 'denies network request' do
+ get :status, format: :json
+
+ expect(controller).to redirect_to(new_import_url)
+ expect(flash[:alert]).to eq('Specified URL cannot be used.')
+ end
+ end
+ end
end
end
diff --git a/spec/controllers/import/gitlab_projects_controller_spec.rb b/spec/controllers/import/gitlab_projects_controller_spec.rb
index a9aaefda0f6..0b74e2bbcbf 100644
--- a/spec/controllers/import/gitlab_projects_controller_spec.rb
+++ b/spec/controllers/import/gitlab_projects_controller_spec.rb
@@ -39,4 +39,62 @@ describe Import::GitlabProjectsController do
it_behaves_like 'project import rate limiter'
end
+
+ describe 'POST authorize' do
+ let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
+
+ before do
+ request.headers['GitLab-Workhorse'] = '1.0'
+ request.headers[Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER] = workhorse_token
+ end
+
+ it 'authorizes importing project with workhorse header' do
+ post :authorize, format: :json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ end
+
+ it 'rejects requests that bypassed gitlab-workhorse or have invalid header' do
+ request.headers[Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER] = 'INVALID_HEADER'
+
+ expect { post :authorize, format: :json }.to raise_error(JWT::DecodeError)
+ end
+
+ context 'when using remote storage' do
+ context 'when direct upload is enabled' do
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: true)
+ end
+
+ it 'responds with status 200, location of file remote store and object details' do
+ post :authorize, format: :json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response).not_to have_key('TempPath')
+ expect(json_response['RemoteObject']).to have_key('ID')
+ expect(json_response['RemoteObject']).to have_key('GetURL')
+ expect(json_response['RemoteObject']).to have_key('StoreURL')
+ expect(json_response['RemoteObject']).to have_key('DeleteURL')
+ expect(json_response['RemoteObject']).to have_key('MultipartUpload')
+ end
+ end
+
+ context 'when direct upload is disabled' do
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: false)
+ end
+
+ it 'handles as a local file' do
+ post :authorize, format: :json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).to eq(ImportExportUploader.workhorse_local_upload_path)
+ expect(json_response['RemoteObject']).to be_nil
+ end
+ end
+ end
+ end
end
diff --git a/spec/controllers/profiles/keys_controller_spec.rb b/spec/controllers/profiles/keys_controller_spec.rb
index 3bed117deb0..8582ecbb06d 100644
--- a/spec/controllers/profiles/keys_controller_spec.rb
+++ b/spec/controllers/profiles/keys_controller_spec.rb
@@ -5,6 +5,22 @@ require 'spec_helper'
describe Profiles::KeysController do
let(:user) { create(:user) }
+ describe 'POST #create' do
+ before do
+ sign_in(user)
+ end
+
+ it 'creates a new key' do
+ expires_at = 3.days.from_now
+
+ expect do
+ post :create, params: { key: build(:key, expires_at: expires_at).attributes }
+ end.to change { Key.count }.by(1)
+
+ expect(Key.last.expires_at).to be_like_time(expires_at)
+ end
+ end
+
describe "#get_keys" do
describe "non existent user" do
it "does not generally work" do
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index 91f3bfcfa40..f0d83bb6bbd 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -89,6 +89,16 @@ describe ProfilesController, :request_store do
expect(user.reload.status.message).to eq('Working hard!')
expect(response).to have_gitlab_http_status(:found)
end
+
+ it 'allows updating user specified job title' do
+ title = 'Marketing Executive'
+ sign_in(user)
+
+ put :update, params: { user: { job_title: title } }
+
+ expect(user.reload.job_title).to eq(title)
+ expect(response).to have_gitlab_http_status(:found)
+ end
end
describe 'PUT update_username' do
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index c6345a2153c..a224a2101d3 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -23,7 +23,6 @@ describe Projects::ClustersController do
describe 'functionality' do
context 'when project has one or more clusters' do
- let(:project) { create(:project) }
let!(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, :production_environment, projects: [project]) }
@@ -53,8 +52,6 @@ describe Projects::ClustersController do
end
context 'when project does not have a cluster' do
- let(:project) { create(:project) }
-
it 'returns an empty state page' do
go
diff --git a/spec/controllers/projects/deploy_keys_controller_spec.rb b/spec/controllers/projects/deploy_keys_controller_spec.rb
index 25e3e8e37a9..a97f9ebf36b 100644
--- a/spec/controllers/projects/deploy_keys_controller_spec.rb
+++ b/spec/controllers/projects/deploy_keys_controller_spec.rb
@@ -19,10 +19,10 @@ describe Projects::DeployKeysController do
end
context 'when html requested' do
- it 'redirects to project settings with the correct anchor' do
+ it 'redirects to project ci / cd settings with the correct anchor' do
get :index, params: params
- expect(response).to redirect_to(project_settings_repository_path(project, anchor: 'js-deploy-keys-settings'))
+ expect(response).to redirect_to(project_settings_ci_cd_path(project, anchor: 'js-deploy-keys-settings'))
end
end
@@ -87,13 +87,13 @@ describe Projects::DeployKeysController do
it 'creates a new deploy key for the project' do
expect { post :create, params: create_params }.to change(project.deploy_keys, :count).by(1)
- expect(response).to redirect_to(project_settings_repository_path(project, anchor: 'js-deploy-keys-settings'))
+ expect(response).to redirect_to(project_settings_ci_cd_path(project, anchor: 'js-deploy-keys-settings'))
end
it 'redirects to project settings with the correct anchor' do
post :create, params: create_params
- expect(response).to redirect_to(project_settings_repository_path(project, anchor: 'js-deploy-keys-settings'))
+ expect(response).to redirect_to(project_settings_ci_cd_path(project, anchor: 'js-deploy-keys-settings'))
end
context 'when the deploy key is invalid' do
@@ -153,7 +153,7 @@ describe Projects::DeployKeysController do
expect(DeployKeysProject.where(project_id: project.id, deploy_key_id: deploy_key.id).count).to eq(1)
expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to(namespace_project_settings_repository_path(anchor: 'js-deploy-keys-settings'))
+ expect(response).to redirect_to(namespace_project_settings_ci_cd_path(anchor: 'js-deploy-keys-settings'))
end
it 'returns 404' do
@@ -175,7 +175,7 @@ describe Projects::DeployKeysController do
expect(DeployKeysProject.where(project_id: project.id, deploy_key_id: deploy_key.id).count).to eq(1)
expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to(namespace_project_settings_repository_path(anchor: 'js-deploy-keys-settings'))
+ expect(response).to redirect_to(namespace_project_settings_ci_cd_path(anchor: 'js-deploy-keys-settings'))
end
end
end
@@ -216,7 +216,7 @@ describe Projects::DeployKeysController do
put :disable, params: { id: deploy_key.id, namespace_id: project.namespace, project_id: project }
expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to(namespace_project_settings_repository_path(anchor: 'js-deploy-keys-settings'))
+ expect(response).to redirect_to(namespace_project_settings_ci_cd_path(anchor: 'js-deploy-keys-settings'))
expect { DeployKey.find(deploy_key.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
@@ -239,7 +239,7 @@ describe Projects::DeployKeysController do
end.to change { DeployKey.count }.by(-1)
expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to(namespace_project_settings_repository_path(anchor: 'js-deploy-keys-settings'))
+ expect(response).to redirect_to(namespace_project_settings_ci_cd_path(anchor: 'js-deploy-keys-settings'))
expect { DeployKey.find(deploy_key.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
diff --git a/spec/controllers/projects/forks_controller_spec.rb b/spec/controllers/projects/forks_controller_spec.rb
index e6a68459a84..e362790cd3c 100644
--- a/spec/controllers/projects/forks_controller_spec.rb
+++ b/spec/controllers/projects/forks_controller_spec.rb
@@ -209,6 +209,17 @@ describe Projects::ForksController do
expect(response).to redirect_to(namespace_project_import_path(user.namespace, project))
end
+ context 'when target namespace is not valid for forking' do
+ let(:params) { super().merge(namespace_key: another_group.id) }
+ let(:another_group) { create :group }
+
+ it 'responds with :not_found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
context 'continue params' do
let(:params) do
{
diff --git a/spec/controllers/projects/hooks_controller_spec.rb b/spec/controllers/projects/hooks_controller_spec.rb
index f50ef2d804c..e97f602d9ab 100644
--- a/spec/controllers/projects/hooks_controller_spec.rb
+++ b/spec/controllers/projects/hooks_controller_spec.rb
@@ -12,12 +12,11 @@ describe Projects::HooksController do
end
describe '#index' do
- it 'redirects to settings/integrations page' do
- get(:index, params: { namespace_id: project.namespace, project_id: project })
+ it 'renders index with 200 status code' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to redirect_to(
- project_settings_integrations_path(project)
- )
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
end
end
diff --git a/spec/controllers/projects/import/jira_controller_spec.rb b/spec/controllers/projects/import/jira_controller_spec.rb
new file mode 100644
index 00000000000..9d68104b755
--- /dev/null
+++ b/spec/controllers/projects/import/jira_controller_spec.rb
@@ -0,0 +1,173 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::Import::JiraController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ context 'with anonymous user' do
+ before do
+ stub_feature_flags(jira_issue_import: true)
+ end
+
+ context 'get show' do
+ it 'redirects to issues page' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ context 'post import' do
+ it 'redirects to issues page' do
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'Test' }
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+
+ context 'with logged in user' do
+ before do
+ sign_in(user)
+ project.add_maintainer(user)
+ end
+
+ context 'when feature flag not enabled' do
+ before do
+ stub_feature_flags(jira_issue_import: false)
+ end
+
+ context 'get show' do
+ it 'redirects to issues page' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to redirect_to(project_issues_path(project))
+ end
+ end
+
+ context 'post import' do
+ it 'redirects to issues page' do
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'Test' }
+
+ expect(response).to redirect_to(project_issues_path(project))
+ end
+ end
+ end
+
+ context 'when feature flag enabled' do
+ before do
+ stub_feature_flags(jira_issue_import: true)
+ end
+
+ context 'when jira service is enabled for the project' do
+ let_it_be(:jira_service) { create(:jira_service, project: project) }
+
+ context 'when running jira import first time' do
+ context 'get show' do
+ it 'renders show template' do
+ allow(JIRA::Resource::Project).to receive(:all).and_return([])
+ expect(project.import_state).to be_nil
+
+ get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
+
+ expect(response).to render_template :show
+ end
+ end
+
+ context 'post import' do
+ it 'creates import state' do
+ expect(project.import_state).to be_nil
+
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'Test' }
+
+ project.reload
+
+ jira_project = project.import_data.data.dig('jira', 'projects').first
+ expect(project.import_type).to eq 'jira'
+ expect(project.import_state.status).to eq 'scheduled'
+ expect(jira_project['key']).to eq 'Test'
+ expect(response).to redirect_to(project_import_jira_path(project))
+ end
+ end
+ end
+
+ context 'when import state is scheduled' do
+ let_it_be(:import_state) { create(:import_state, project: project, status: :scheduled) }
+
+ context 'get show' do
+ it 'renders import status' do
+ get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
+
+ expect(project.import_state.status).to eq 'scheduled'
+ expect(flash.now[:notice]).to eq 'Import scheduled'
+ end
+ end
+
+ context 'post import' do
+ before do
+ project.reload
+ project.create_import_data(
+ data: {
+ 'jira': {
+ 'projects': [{ 'key': 'Test', scheduled_at: 5.days.ago, scheduled_by: { user_id: user.id, name: user.name } }]
+ }
+ }
+ )
+ end
+
+ it 'uses the existing import data' do
+ expect(controller).not_to receive(:schedule_import)
+
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'New Project' }
+
+ expect(response).to redirect_to(project_import_jira_path(project))
+ end
+ end
+ end
+
+ context 'when jira import ran before' do
+ let_it_be(:import_state) { create(:import_state, project: project, status: :finished) }
+
+ context 'get show' do
+ it 'renders import status' do
+ allow(JIRA::Resource::Project).to receive(:all).and_return([])
+ get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
+
+ expect(project.import_state.status).to eq 'finished'
+ expect(flash.now[:notice]).to eq 'Import finished'
+ end
+ end
+
+ context 'post import' do
+ before do
+ project.reload
+ project.create_import_data(
+ data: {
+ 'jira': {
+ 'projects': [{ 'key': 'Test', scheduled_at: 5.days.ago, scheduled_by: { user_id: user.id, name: user.name } }]
+ }
+ }
+ )
+ end
+
+ it 'uses the existing import data' do
+ expect(controller).to receive(:schedule_import).and_call_original
+
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'New Project' }
+
+ project.reload
+ expect(project.import_state.status).to eq 'scheduled'
+ jira_imported_projects = project.import_data.data.dig('jira', 'projects')
+ expect(jira_imported_projects.size).to eq 2
+ expect(jira_imported_projects.first['key']).to eq 'Test'
+ expect(jira_imported_projects.last['key']).to eq 'New Project'
+ expect(response).to redirect_to(project_import_jira_path(project))
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index fb4d1cf59fe..806a4e2f52c 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -740,16 +740,16 @@ describe Projects::IssuesController do
.to log_spam(title: 'Spam title', noteable_type: 'Issue')
end
- it 'renders recaptcha_html json response' do
- update_issue
-
- expect(json_response).to have_key('recaptcha_html')
- end
+ context 'renders properly' do
+ render_views
- it 'returns 200 status' do
- update_issue
+ it 'renders recaptcha_html json response' do
+ update_issue
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key('recaptcha_html')
+ expect(json_response['recaptcha_html']).not_to be_empty
+ end
end
end
diff --git a/spec/controllers/projects/logs_controller_spec.rb b/spec/controllers/projects/logs_controller_spec.rb
new file mode 100644
index 00000000000..ea71dbe45aa
--- /dev/null
+++ b/spec/controllers/projects/logs_controller_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::LogsController do
+ include KubernetesHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:environment) do
+ create(:environment, name: 'production', project: project)
+ end
+
+ let(:pod_name) { "foo" }
+ let(:container) { 'container-1' }
+
+ before do
+ project.add_maintainer(user)
+
+ sign_in(user)
+ end
+
+ describe 'GET #index' do
+ let(:empty_project) { create(:project) }
+
+ it 'renders empty logs page if no environment exists' do
+ empty_project.add_maintainer(user)
+ get :index, params: { namespace_id: empty_project.namespace, project_id: empty_project }
+
+ expect(response).to be_ok
+ expect(response).to render_template 'empty_logs'
+ end
+
+ it 'renders index template' do
+ get :index, params: environment_params
+
+ expect(response).to be_ok
+ expect(response).to render_template 'index'
+ end
+ end
+
+ shared_examples 'pod logs service' do |endpoint, service|
+ let(:service_result) do
+ {
+ status: :success,
+ logs: ['Log 1', 'Log 2', 'Log 3'],
+ pods: [pod_name],
+ pod_name: pod_name,
+ container_name: container
+ }
+ end
+ let(:service_result_json) { JSON.parse(service_result.to_json) }
+
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*', projects: [project]) }
+
+ before do
+ allow_next_instance_of(service) do |instance|
+ allow(instance).to receive(:execute).and_return(service_result)
+ end
+ end
+
+ it 'returns the service result' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq(service_result_json)
+ end
+
+ it 'registers a usage of the endpoint' do
+ expect(::Gitlab::UsageCounters::PodLogs).to receive(:increment).with(project.id)
+
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+
+ it 'sets the polling header' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response.headers['Poll-Interval']).to eq('3000')
+ end
+
+ context 'when service is processing' do
+ let(:service_result) { nil }
+
+ it 'returns a 202' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+ end
+
+ shared_examples 'unsuccessful execution response' do |message|
+ let(:service_result) do
+ {
+ status: :error,
+ message: message
+ }
+ end
+
+ it 'returns the error' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq(service_result_json)
+ end
+ end
+
+ context 'when service is failing' do
+ it_behaves_like 'unsuccessful execution response', 'some error'
+ end
+
+ context 'when cluster is nil' do
+ let!(:cluster) { nil }
+
+ it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments'
+ end
+
+ context 'when namespace is empty' do
+ before do
+ allow(environment).to receive(:deployment_namespace).and_return('')
+ end
+
+ it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments'
+ end
+ end
+
+ describe 'GET #k8s' do
+ it_behaves_like 'pod logs service', :k8s, PodLogs::KubernetesService
+ end
+
+ describe 'GET #elasticsearch' do
+ it_behaves_like 'pod logs service', :elasticsearch, PodLogs::ElasticsearchService
+ end
+
+ def environment_params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace,
+ project_id: project,
+ environment_name: environment.name)
+ end
+end
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index da26eb94fb0..2b1890f6cbd 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -77,6 +77,18 @@ describe Projects::MergeRequestsController do
end
end
+ context 'when diff is missing' do
+ render_views
+
+ it 'renders merge request page' do
+ merge_request.merge_request_diff.destroy
+
+ go(format: :html)
+
+ expect(response).to be_successful
+ end
+ end
+
it "renders merge request page" do
expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
@@ -972,6 +984,136 @@ describe Projects::MergeRequestsController do
end
end
+ describe 'GET coverage_reports' do
+ let(:merge_request) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ target_project: project,
+ source_project: project)
+ end
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ :success,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+
+ before do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:find_coverage_reports)
+ .and_return(report)
+
+ allow_any_instance_of(MergeRequest)
+ .to receive(:actual_head_pipeline)
+ .and_return(pipeline)
+ end
+
+ subject do
+ get :coverage_reports, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid
+ },
+ format: :json
+ end
+
+ describe 'permissions on a public project with private CI/CD' do
+ let(:project) { create :project, :repository, :public, :builds_private }
+ let(:report) { { status: :parsed, data: [] } }
+
+ context 'while signed out' do
+ before do
+ sign_out(user)
+ end
+
+ it 'responds with a 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_blank
+ end
+ end
+
+ context 'while signed in as an unrelated user' do
+ before do
+ sign_in(create(:user))
+ end
+
+ it 'responds with a 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_blank
+ end
+ end
+ end
+
+ context 'when pipeline has jobs with coverage reports' do
+ before do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:has_coverage_reports?)
+ .and_return(true)
+ end
+
+ context 'when processing coverage reports is in progress' do
+ let(:report) { { status: :parsing } }
+
+ it 'sends polling interval' do
+ expect(Gitlab::PollingInterval).to receive(:set_header)
+
+ subject
+ end
+
+ it 'returns 204 HTTP status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'when processing coverage reports is completed' do
+ let(:report) { { status: :parsed, data: pipeline.coverage_reports } }
+
+ it 'returns coverage reports' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'files' => {} })
+ end
+ end
+
+ context 'when user created corrupted coverage reports' do
+ let(:report) { { status: :error, status_reason: 'Failed to parse coverage reports' } }
+
+ it 'does not send polling interval' do
+ expect(Gitlab::PollingInterval).not_to receive(:set_header)
+
+ subject
+ end
+
+ it 'returns 400 HTTP status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({ 'status_reason' => 'Failed to parse coverage reports' })
+ end
+ end
+ end
+
+ context 'when pipeline does not have jobs with coverage reports' do
+ let(:report) { double }
+
+ it 'returns no content' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_empty
+ end
+ end
+ end
+
describe 'GET test_reports' do
let(:merge_request) do
create(:merge_request,
diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb
index 6b698c6da66..ee61ef73b45 100644
--- a/spec/controllers/projects/milestones_controller_spec.rb
+++ b/spec/controllers/projects/milestones_controller_spec.rb
@@ -135,6 +135,10 @@ describe Projects::MilestonesController do
end
describe "#destroy" do
+ before do
+ stub_feature_flags(track_resource_milestone_change_events: false)
+ end
+
it "removes milestone" do
expect(issue.milestone_id).to eq(milestone.id)
diff --git a/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb b/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
index 1c29b68dc24..6a53e8f3dbf 100644
--- a/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
+++ b/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
@@ -129,4 +129,130 @@ describe Projects::PerformanceMonitoring::DashboardsController do
end
end
end
+
+ describe 'PUT #update' do
+ context 'authenticated user' do
+ before do
+ sign_in(user)
+ end
+
+ let(:file_content) do
+ {
+ "dashboard" => "Dashboard Title",
+ "panel_groups" => [{
+ "group" => "Group Title",
+ "panels" => [{
+ "type" => "area-chart",
+ "title" => "Chart Title",
+ "y_label" => "Y-Axis",
+ "metrics" => [{
+ "id" => "metric_of_ages",
+ "unit" => "count",
+ "label" => "Metric of Ages",
+ "query_range" => "http_requests_total"
+ }]
+ }]
+ }]
+ }
+ end
+
+ let(:params) do
+ {
+ namespace_id: namespace,
+ project_id: project,
+ dashboard: dashboard,
+ file_name: file_name,
+ file_content: file_content,
+ commit_message: commit_message,
+ branch: branch_name,
+ format: :json
+ }
+ end
+
+ context 'project with repository feature' do
+ context 'with rights to push to the repository' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'valid parameters' do
+ context 'request format json' do
+ let(:update_dashboard_service_params) { params.except(:namespace_id, :project_id, :format) }
+
+ let(:update_dashboard_service_results) do
+ {
+ status: :success,
+ http_status: :created,
+ dashboard: {
+ path: ".gitlab/dashboards/custom_dashboard.yml",
+ display_name: "custom_dashboard.yml",
+ default: false,
+ system_dashboard: false
+ }
+ }
+ end
+
+ let(:update_dashboard_service) { instance_double(::Metrics::Dashboard::UpdateDashboardService, execute: update_dashboard_service_results) }
+
+ it 'returns path to new file' do
+ allow(controller).to receive(:repository).and_return(repository)
+ allow(repository).to receive(:find_branch).and_return(branch)
+ allow(::Metrics::Dashboard::UpdateDashboardService).to receive(:new).with(project, user, update_dashboard_service_params).and_return(update_dashboard_service)
+
+ put :update, params: params
+
+ expect(response).to have_gitlab_http_status :created
+ expect(response).to set_flash[:notice].to eq("Your dashboard has been updated. You can <a href=\"/-/ide/project/#{namespace.path}/#{project.name}/edit/#{branch_name}/-/.gitlab/dashboards/#{file_name}\">edit it here</a>.")
+ expect(json_response).to eq('status' => 'success', 'dashboard' => { 'default' => false, 'display_name' => "custom_dashboard.yml", 'path' => ".gitlab/dashboards/#{file_name}", 'system_dashboard' => false })
+ end
+
+ context 'UpdateDashboardService failure' do
+ it 'returns json with failure message' do
+ allow(::Metrics::Dashboard::UpdateDashboardService).to receive(:new).and_return(double(execute: { status: :error, message: 'something went wrong', http_status: :bad_request }))
+
+ put :update, params: params
+
+ expect(response).to have_gitlab_http_status :bad_request
+ expect(json_response).to eq('error' => 'something went wrong')
+ end
+ end
+ end
+ end
+
+ context 'missing branch' do
+ let(:branch_name) { nil }
+
+ it 'raises responds with :bad_request status code and error message' do
+ put :update, params: params
+
+ expect(response).to have_gitlab_http_status :bad_request
+ expect(json_response).to eq('error' => "Request parameter branch is missing.")
+ end
+ end
+ end
+
+ context 'without rights to push to repository' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'responds with :forbidden status code' do
+ put :update, params: params
+
+ expect(response).to have_gitlab_http_status :forbidden
+ end
+ end
+ end
+
+ context 'project without repository feature' do
+ let!(:project) { create(:project, name: 'dashboard-project', namespace: namespace) }
+
+ it 'responds with :not_found status code' do
+ put :update, params: params
+
+ expect(response).to have_gitlab_http_status :not_found
+ end
+ end
+ end
+ end
end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index fd33f32e877..a929eaeba3f 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -10,6 +10,7 @@ describe Projects::PipelinesController do
let(:feature) { ProjectFeature::ENABLED }
before do
+ allow(Sidekiq.logger).to receive(:info)
stub_not_protect_default_branch
project.add_developer(user)
project.project_feature.update(builds_access_level: feature)
@@ -38,9 +39,9 @@ describe Projects::PipelinesController do
expect(response).to match_response_schema('pipeline')
expect(json_response).to include('pipelines')
- expect(json_response['pipelines'].count).to eq 5
- expect(json_response['count']['all']).to eq '5'
- expect(json_response['count']['running']).to eq '1'
+ expect(json_response['pipelines'].count).to eq 6
+ expect(json_response['count']['all']).to eq '6'
+ expect(json_response['count']['running']).to eq '2'
expect(json_response['count']['pending']).to eq '1'
expect(json_response['count']['finished']).to eq '3'
@@ -61,7 +62,7 @@ describe Projects::PipelinesController do
# There appears to be one extra query for Pipelines#has_warnings? for some reason
expect { get_pipelines_index_json }.not_to exceed_query_limit(control_count + 1)
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['pipelines'].count).to eq 10
+ expect(json_response['pipelines'].count).to eq 12
end
end
@@ -77,9 +78,9 @@ describe Projects::PipelinesController do
expect(response).to match_response_schema('pipeline')
expect(json_response).to include('pipelines')
- expect(json_response['pipelines'].count).to eq 5
- expect(json_response['count']['all']).to eq '5'
- expect(json_response['count']['running']).to eq '1'
+ expect(json_response['pipelines'].count).to eq 6
+ expect(json_response['count']['all']).to eq '6'
+ expect(json_response['count']['running']).to eq '2'
expect(json_response['count']['pending']).to eq '1'
expect(json_response['count']['finished']).to eq '3'
@@ -99,8 +100,9 @@ describe Projects::PipelinesController do
# There appears to be one extra query for Pipelines#has_warnings? for some reason
expect { get_pipelines_index_json }.not_to exceed_query_limit(control_count + 1)
+
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['pipelines'].count).to eq 10
+ expect(json_response['pipelines'].count).to eq 12
end
end
@@ -139,7 +141,7 @@ describe Projects::PipelinesController do
it 'returns the pipelines when the user has access' do
get_pipelines_index_json
- expect(json_response['pipelines'].size).to eq(5)
+ expect(json_response['pipelines'].size).to eq(6)
end
end
@@ -155,18 +157,32 @@ describe Projects::PipelinesController do
%w(pending running success failed canceled).each_with_index do |status, index|
create_pipeline(status, project.commit("HEAD~#{index}"))
end
+
+ create_pipeline_with_merge_request
end
- def create_pipeline(status, sha)
+ def create_pipeline_with_merge_request
+ # New merge requests must be created with different branches, so
+ # let's just create new ones with random names.
+ branch_name = "test-#{SecureRandom.hex}"
+ project.repository.create_branch(branch_name, project.repository.root_ref)
+ mr = create(:merge_request, source_project: project, target_project: project, source_branch: branch_name)
+ create_pipeline(:running, project.commit('HEAD'), merge_request: mr)
+ end
+
+ def create_pipeline(status, sha, merge_request: nil)
user = create(:user)
pipeline = create(:ci_empty_pipeline, status: status,
project: project,
sha: sha,
- user: user)
+ user: user,
+ merge_request: merge_request)
create_build(pipeline, 'build', 1, 'build', user)
create_build(pipeline, 'test', 2, 'test', user)
create_build(pipeline, 'deploy', 3, 'deploy', user)
+
+ pipeline
end
def create_build(pipeline, stage, stage_idx, name, user = nil)
@@ -568,6 +584,72 @@ describe Projects::PipelinesController do
end
end
+ describe 'POST create' do
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ project.add_developer(user)
+ project.project_feature.update(builds_access_level: feature)
+ end
+
+ context 'with a valid .gitlab-ci.yml file' do
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump({
+ test: {
+ stage: 'test',
+ script: 'echo'
+ }
+ }))
+ end
+
+ shared_examples 'creates a pipeline' do
+ it do
+ expect { post_request }.to change { project.ci_pipelines.count }.by(1)
+
+ pipeline = project.ci_pipelines.last
+ expected_redirect_path = Gitlab::Routing.url_helpers.project_pipeline_path(project, pipeline)
+ expect(pipeline).to be_pending
+ expect(response).to redirect_to(expected_redirect_path)
+ end
+ end
+
+ it_behaves_like 'creates a pipeline'
+
+ context 'when latest commit contains [ci skip]' do
+ before do
+ project.repository.create_file(user, 'new-file.txt', 'A new file',
+ message: '[skip ci] This is a test',
+ branch_name: 'master')
+ end
+
+ it_behaves_like 'creates a pipeline'
+ end
+ end
+
+ context 'with an invalid .gitlab-ci.yml file' do
+ before do
+ stub_ci_pipeline_yaml_file('invalid yaml file')
+ end
+
+ it 'does not persist a pipeline' do
+ expect { post_request }.not_to change { project.ci_pipelines.count }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response).to render_template('new')
+ end
+ end
+
+ def post_request
+ post :create, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ pipeline: {
+ ref: 'master'
+ }
+ }
+ end
+ end
+
describe 'POST retry.json' do
let!(:pipeline) { create(:ci_pipeline, :failed, project: project) }
let!(:build) { create(:ci_build, :failed, pipeline: pipeline) }
diff --git a/spec/controllers/projects/registry/repositories_controller_spec.rb b/spec/controllers/projects/registry/repositories_controller_spec.rb
index 5b9c0211b39..a64673a7f87 100644
--- a/spec/controllers/projects/registry/repositories_controller_spec.rb
+++ b/spec/controllers/projects/registry/repositories_controller_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Projects::Registry::RepositoriesController do
- let(:user) { create(:user) }
+ let(:user) { create(:user) }
let(:project) { create(:project, :private) }
before do
@@ -88,7 +88,7 @@ describe Projects::Registry::RepositoriesController do
context 'with :vue_container_registry_explorer feature flag disabled' do
before do
- stub_feature_flags(vue_container_registry_explorer: false)
+ stub_feature_flags(vue_container_registry_explorer: { enabled: false, thing: project.group })
stub_container_registry_tags(repository: project.full_path,
tags: %w[rc1 latest])
end
diff --git a/spec/controllers/projects/releases_controller_spec.rb b/spec/controllers/projects/releases_controller_spec.rb
index a03fabad2de..ca073c520cd 100644
--- a/spec/controllers/projects/releases_controller_spec.rb
+++ b/spec/controllers/projects/releases_controller_spec.rb
@@ -198,6 +198,103 @@ describe Projects::ReleasesController do
end
end
+ context 'GET #downloads' do
+ subject do
+ get :downloads, params: { namespace_id: project.namespace, project_id: project, tag: tag, filepath: filepath }
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ let(:release) { create(:release, project: project, tag: tag ) }
+ let!(:link) { create(:release_link, release: release, name: 'linux-amd64 binaries', filepath: '/binaries/linux-amd64', url: 'https://downloads.example.com/bin/gitlab-linux-amd64') }
+ let(:tag) { 'v11.9.0-rc2' }
+
+ context 'valid filepath' do
+ let(:filepath) { CGI.escape('/binaries/linux-amd64') }
+
+ it 'redirects to the asset direct link' do
+ subject
+
+ expect(response).to redirect_to('https://downloads.example.com/bin/gitlab-linux-amd64')
+ end
+
+ it 'redirects with a status of 302' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:redirect)
+ end
+ end
+
+ context 'invalid filepath' do
+ let(:filepath) { CGI.escape('/binaries/win32') }
+
+ it 'is not found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'GET #downloads' do
+ subject do
+ get :downloads, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ tag: tag,
+ filepath: filepath
+ }
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ let(:release) { create(:release, project: project, tag: tag ) }
+ let(:tag) { 'v11.9.0-rc2' }
+ let(:db_filepath) { '/binaries/linux-amd64' }
+ let!(:link) do
+ create :release_link,
+ release: release,
+ name: 'linux-amd64 binaries',
+ filepath: db_filepath,
+ url: 'https://downloads.example.com/bin/gitlab-linux-amd64'
+ end
+
+ context 'valid filepath' do
+ let(:filepath) { CGI.escape('/binaries/linux-amd64') }
+
+ it 'redirects to the asset direct link' do
+ subject
+
+ expect(response).to redirect_to(link.url)
+ end
+ end
+
+ context 'invalid filepath' do
+ let(:filepath) { CGI.escape('/binaries/win32') }
+
+ it 'is not found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'ignores filepath extension' do
+ let(:db_filepath) { '/binaries/linux-amd64.json' }
+ let(:filepath) { CGI.escape(db_filepath) }
+
+ it 'redirects to the asset direct link' do
+ subject
+
+ expect(response).to redirect_to(link.url)
+ end
+ end
+ end
+
describe 'GET #evidence' do
let_it_be(:tag_name) { "v1.1.0-evidence" }
let!(:release) { create(:release, :with_evidence, project: project, tag: tag_name) }
diff --git a/spec/controllers/projects/repositories_controller_spec.rb b/spec/controllers/projects/repositories_controller_spec.rb
index d4a81f24d9c..2d39f0afaee 100644
--- a/spec/controllers/projects/repositories_controller_spec.rb
+++ b/spec/controllers/projects/repositories_controller_spec.rb
@@ -6,6 +6,10 @@ describe Projects::RepositoriesController do
let(:project) { create(:project, :repository) }
describe "GET archive" do
+ before do
+ allow(controller).to receive(:archive_rate_limit_reached?).and_return(false)
+ end
+
context 'as a guest' do
it 'responds with redirect in correct format' do
get :archive, params: { namespace_id: project.namespace, project_id: project, id: "master" }, format: "zip"
@@ -96,6 +100,16 @@ describe Projects::RepositoriesController do
end
end
+ describe 'rate limiting' do
+ it 'rate limits user when thresholds hit' do
+ expect(controller).to receive(:archive_rate_limit_reached?).and_return(true)
+
+ get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master' }, format: "html"
+
+ expect(response).to have_gitlab_http_status(:too_many_requests)
+ end
+ end
+
describe 'caching' do
it 'sets appropriate caching headers' do
get_archive
diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb
index db7533eb609..203e1e49994 100644
--- a/spec/controllers/projects/serverless/functions_controller_spec.rb
+++ b/spec/controllers/projects/serverless/functions_controller_spec.rb
@@ -135,7 +135,7 @@ describe Projects::Serverless::FunctionsController do
context 'when there is no serverless domain for a cluster' do
it 'keeps function URL as it was' do
- expect(Gitlab::Serverless::Domain).not_to receive(:new)
+ expect(::Serverless::Domain).not_to receive(:new)
get :index, params: params({ format: :json })
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index fb7cca3997b..c669119fa4e 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -153,16 +153,6 @@ describe Projects::ServicesController do
expect(flash[:notice]).to eq 'Jira settings saved, but not activated.'
end
end
-
- context 'when activating Jira service from a template' do
- let(:service) do
- create(:jira_service, project: project, template: true)
- end
-
- it 'activate Jira service from template' do
- expect(flash[:notice]).to eq 'Jira activated.'
- end
- end
end
describe 'as JSON' do
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index 68260e4e101..a8631389e17 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -247,4 +247,12 @@ describe Projects::Settings::CiCdController do
end
end
end
+
+ describe 'POST create_deploy_token' do
+ it_behaves_like 'a created deploy token' do
+ let(:entity) { project }
+ let(:create_entity_params) { { namespace_id: project.namespace, project_id: project } }
+ let(:deploy_token_type) { DeployToken.deploy_token_types[:project_type] }
+ end
+ end
end
diff --git a/spec/controllers/projects/settings/repository_controller_spec.rb b/spec/controllers/projects/settings/repository_controller_spec.rb
index 64f5b8e34ae..67ae9ebda38 100644
--- a/spec/controllers/projects/settings/repository_controller_spec.rb
+++ b/spec/controllers/projects/settings/repository_controller_spec.rb
@@ -32,24 +32,4 @@ describe Projects::Settings::RepositoryController do
expect(RepositoryCleanupWorker).to have_received(:perform_async).once
end
end
-
- describe 'POST create_deploy_token' do
- let(:deploy_token_params) do
- {
- name: 'deployer_token',
- expires_at: 1.month.from_now.to_date.to_s,
- username: 'deployer',
- read_repository: '1'
- }
- end
-
- subject(:create_deploy_token) { post :create_deploy_token, params: { namespace_id: project.namespace, project_id: project, deploy_token: deploy_token_params } }
-
- it 'creates deploy token' do
- expect { create_deploy_token }.to change { DeployToken.active.count }.by(1)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:show)
- end
- end
end
diff --git a/spec/controllers/projects/snippets_controller_spec.rb b/spec/controllers/projects/snippets_controller_spec.rb
index 4d1537ae787..900569af6c6 100644
--- a/spec/controllers/projects/snippets_controller_spec.rb
+++ b/spec/controllers/projects/snippets_controller_spec.rb
@@ -3,9 +3,11 @@
require 'spec_helper'
describe Projects::SnippetsController do
+ include Gitlab::Routing
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
let(:project) { create(:project_empty_repo, :public) }
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
before do
project.add_maintainer(user)
@@ -318,14 +320,45 @@ describe Projects::SnippetsController do
end
end
+ shared_examples 'successful response' do
+ it 'renders the snippet' do
+ subject
+
+ expect(assigns(:snippet)).to eq(project_snippet)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'renders the blob from the repository' do
+ subject
+
+ expect(assigns(:blob)).to eq(project_snippet.blobs.first)
+ end
+
+ context 'when feature flag version_snippets is disabled' do
+ before do
+ stub_feature_flags(version_snippets: false)
+ end
+
+ it 'returns the snippet database content' do
+ subject
+
+ blob = assigns(:blob)
+
+ expect(blob.data).to eq(project_snippet.content)
+ end
+ end
+ end
+
%w[show raw].each do |action|
describe "GET ##{action}" do
context 'when the project snippet is private' do
- let(:project_snippet) { create(:project_snippet, :private, project: project, author: user) }
+ let(:project_snippet) { create(:project_snippet, :private, :repository, project: project, author: user) }
+
+ subject { get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param } }
context 'when anonymous' do
it 'responds with status 404' do
- get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }
+ subject
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -336,12 +369,7 @@ describe Projects::SnippetsController do
sign_in(user)
end
- it 'renders the snippet' do
- get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }
-
- expect(assigns(:snippet)).to eq(project_snippet)
- expect(response).to have_gitlab_http_status(:ok)
- end
+ it_behaves_like 'successful response'
end
context 'when signed in as a project member' do
@@ -349,19 +377,16 @@ describe Projects::SnippetsController do
sign_in(user2)
end
- it 'renders the snippet' do
- get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }
-
- expect(assigns(:snippet)).to eq(project_snippet)
- expect(response).to have_gitlab_http_status(:ok)
- end
+ it_behaves_like 'successful response'
end
end
context 'when the project snippet does not exist' do
+ subject { get action, params: { namespace_id: project.namespace, project_id: project, id: 42 } }
+
context 'when anonymous' do
it 'responds with status 404' do
- get action, params: { namespace_id: project.namespace, project_id: project, id: 42 }
+ subject
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -373,7 +398,7 @@ describe Projects::SnippetsController do
end
it 'responds with status 404' do
- get action, params: { namespace_id: project.namespace, project_id: project, id: 42 }
+ subject
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -383,18 +408,20 @@ describe Projects::SnippetsController do
end
describe "GET #show for embeddable content" do
- let(:project_snippet) { create(:project_snippet, snippet_permission, project: project, author: user) }
+ let(:project_snippet) { create(:project_snippet, :repository, snippet_permission, project: project, author: user) }
before do
sign_in(user)
-
- get :show, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }, format: :js
end
+ subject { get :show, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }, format: :js }
+
context 'when snippet is private' do
let(:snippet_permission) { :private }
it 'responds with status 404' do
+ subject
+
expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -402,10 +429,7 @@ describe Projects::SnippetsController do
context 'when snippet is public' do
let(:snippet_permission) { :public }
- it 'responds with status 200' do
- expect(assigns(:snippet)).to eq(project_snippet)
- expect(response).to have_gitlab_http_status(:ok)
- end
+ it_behaves_like 'successful response'
end
context 'when the project is private' do
@@ -415,6 +439,8 @@ describe Projects::SnippetsController do
let(:project_snippet) { create(:project_snippet, :public, project: project, author: user) }
it 'responds with status 404' do
+ subject
+
expect(assigns(:snippet)).to eq(project_snippet)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -423,14 +449,17 @@ describe Projects::SnippetsController do
end
describe 'GET #raw' do
+ let(:content) { "first line\r\nsecond line\r\nthird line" }
+ let(:formatted_content) { content.gsub(/\r\n/, "\n") }
let(:project_snippet) do
create(
- :project_snippet, :public,
+ :project_snippet, :public, :repository,
project: project,
author: user,
- content: "first line\r\nsecond line\r\nthird line"
+ content: content
)
end
+ let(:blob) { project_snippet.blobs.first }
context 'CRLF line ending' do
let(:params) do
@@ -441,16 +470,22 @@ describe Projects::SnippetsController do
}
end
+ before do
+ allow_next_instance_of(Blob) do |instance|
+ allow(instance).to receive(:data).and_return(content)
+ end
+ end
+
it 'returns LF line endings by default' do
get :raw, params: params
- expect(response.body).to eq("first line\nsecond line\nthird line")
+ expect(response.body).to eq(formatted_content)
end
it 'does not convert line endings when parameter present' do
get :raw, params: params.merge(line_ending: :raw)
- expect(response.body).to eq("first line\r\nsecond line\r\nthird line")
+ expect(response.body).to eq(content)
end
end
end
diff --git a/spec/controllers/projects/tags/releases_controller_spec.rb b/spec/controllers/projects/tags/releases_controller_spec.rb
index da87756d782..cb12e074732 100644
--- a/spec/controllers/projects/tags/releases_controller_spec.rb
+++ b/spec/controllers/projects/tags/releases_controller_spec.rb
@@ -67,13 +67,13 @@ describe Projects::Tags::ReleasesController do
expect(response).to have_gitlab_http_status(:found)
end
- it 'deletes release when description is empty' do
- initial_releases_count = project.releases.count
+ it 'does not delete release when description is empty' do
+ expect do
+ update_release(tag, "")
+ end.not_to change { project.releases.count }
- response = update_release(release.tag, "")
+ expect(release.reload.description).to eq("")
- expect(initial_releases_count).to eq(1)
- expect(project.releases.count).to eq(0)
expect(response).to have_gitlab_http_status(:found)
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 67e24841dee..53a57937e9b 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -1140,7 +1140,7 @@ describe ProjectsController do
end
it 'prevents requesting project export' do
- get action, params: { namespace_id: project.namespace, id: project }
+ post action, params: { namespace_id: project.namespace, id: project }
expect(flash[:alert]).to eq('This endpoint has been requested too many times. Try again later.')
expect(response).to have_gitlab_http_status(:found)
@@ -1152,7 +1152,7 @@ describe ProjectsController do
context 'when project export is enabled' do
it 'returns 302' do
- get action, params: { namespace_id: project.namespace, id: project }
+ post action, params: { namespace_id: project.namespace, id: project }
expect(response).to have_gitlab_http_status(:found)
end
@@ -1164,7 +1164,7 @@ describe ProjectsController do
end
it 'returns 404' do
- get action, params: { namespace_id: project.namespace, id: project }
+ post action, params: { namespace_id: project.namespace, id: project }
expect(response).to have_gitlab_http_status(:not_found)
end
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
index 10a7b72ca89..005db748e91 100644
--- a/spec/controllers/repositories/git_http_controller_spec.rb
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -6,16 +6,18 @@ describe Repositories::GitHttpController do
include GitHttpHelpers
let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :public, :repository) }
+ let_it_be(:project_snippet) { create(:project_snippet, :public, :repository, project: project) }
let(:namespace_id) { project.namespace.to_param }
let(:repository_id) { project.path + '.git' }
- let(:project_params) do
+ let(:container_params) do
{
namespace_id: namespace_id,
repository_id: repository_id
}
end
- let(:params) { project_params }
+ let(:params) { container_params }
describe 'HEAD #info_refs' do
it 'returns 403' do
@@ -27,7 +29,7 @@ describe Repositories::GitHttpController do
shared_examples 'info_refs behavior' do
describe 'GET #info_refs' do
- let(:params) { project_params.merge(service: 'git-upload-pack') }
+ let(:params) { container_params.merge(service: 'git-upload-pack') }
it 'returns 401 for unauthenticated requests to public repositories when http protocol is disabled' do
stub_application_setting(enabled_git_access_protocol: 'ssh')
@@ -41,8 +43,6 @@ describe Repositories::GitHttpController do
end
context 'with authorized user' do
- let(:user) { project.owner }
-
before do
request.headers.merge! auth_env(user.username, user.password, nil)
end
@@ -122,7 +122,7 @@ describe Repositories::GitHttpController do
end
shared_examples 'access checker class' do
- let(:params) { project_params.merge(service: 'git-upload-pack') }
+ let(:params) { container_params.merge(service: 'git-upload-pack') }
it 'calls the right access class checker with the right object' do
allow(controller).to receive(:verify_workhorse_api!).and_return(true)
@@ -135,12 +135,80 @@ describe Repositories::GitHttpController do
end
end
+ shared_examples 'snippet feature flag disabled behavior' do
+ before do
+ stub_feature_flags(version_snippets: false)
+
+ request.headers.merge! auth_env(user.username, user.password, nil)
+ end
+
+ describe 'GET #info_refs' do
+ let(:params) { container_params.merge(service: 'git-upload-pack') }
+
+ it 'returns 404' do
+ get :info_refs, params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ describe 'POST #git_upload_pack' do
+ before do
+ allow(controller).to receive(:authenticate_user).and_return(true)
+ allow(controller).to receive(:verify_workhorse_api!).and_return(true)
+ allow(controller).to receive(:access_check).and_return(nil)
+ end
+
+ it 'returns 404' do
+ post :git_upload_pack, params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
context 'when repository container is a project' do
- it_behaves_like 'info_refs behavior'
+ it_behaves_like 'info_refs behavior' do
+ let(:user) { project.owner }
+ end
it_behaves_like 'git_upload_pack behavior', true
it_behaves_like 'access checker class' do
let(:expected_class) { Gitlab::GitAccess }
let(:expected_object) { project }
end
end
+
+ context 'when repository container is a personal snippet' do
+ let(:namespace_id) { 'snippets' }
+ let(:repository_id) { personal_snippet.to_param + '.git' }
+
+ it_behaves_like 'info_refs behavior' do
+ let(:user) { personal_snippet.author }
+ end
+ it_behaves_like 'git_upload_pack behavior', false
+ it_behaves_like 'access checker class' do
+ let(:expected_class) { Gitlab::GitAccessSnippet }
+ let(:expected_object) { personal_snippet }
+ end
+ it_behaves_like 'snippet feature flag disabled behavior' do
+ let(:user) { personal_snippet.author }
+ end
+ end
+
+ context 'when repository container is a project snippet' do
+ let(:namespace_id) { project.full_path + '/snippets' }
+ let(:repository_id) { project_snippet.to_param + '.git' }
+
+ it_behaves_like 'info_refs behavior' do
+ let(:user) { project_snippet.author }
+ end
+ it_behaves_like 'git_upload_pack behavior', false
+ it_behaves_like 'access checker class' do
+ let(:expected_class) { Gitlab::GitAccessSnippet }
+ let(:expected_object) { project_snippet }
+ end
+ it_behaves_like 'snippet feature flag disabled behavior' do
+ let(:user) { project_snippet.author }
+ end
+ end
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 19786417d76..1fe313452fe 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -140,6 +140,14 @@ describe SearchController do
end
end
+ context 'snippet search' do
+ it 'forces title search' do
+ get :show, params: { scope: 'snippet_blobs', snippets: 'true', search: 'foo' }
+
+ expect(assigns[:scope]).to eq('snippet_titles')
+ end
+ end
+
it 'finds issue comments' do
project = create(:project, :public)
note = create(:note_on_issue, project: project)
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index 2f597fd5cb3..f3e2ea50913 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -495,4 +495,65 @@ describe SessionsController do
expect(session[:failed_login_attempts]).to eq(1)
end
end
+
+ describe '#set_current_context' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ set_devise_mapping(context: @request)
+ end
+
+ context 'when signed in' do
+ before do
+ sign_in(user)
+ end
+
+ it 'sets the username and caller_id in the context' do
+ expect(controller).to receive(:destroy).and_wrap_original do |m, *args|
+ expect(Labkit::Context.current.to_h)
+ .to include('meta.user' => user.username,
+ 'meta.caller_id' => 'SessionsController#destroy')
+
+ m.call(*args)
+ end
+
+ delete :destroy
+ end
+ end
+
+ context 'when not signed in' do
+ it 'sets the caller_id in the context' do
+ expect(controller).to receive(:new).and_wrap_original do |m, *args|
+ expect(Labkit::Context.current.to_h)
+ .to include('meta.caller_id' => 'SessionsController#new')
+ expect(Labkit::Context.current.to_h)
+ .not_to include('meta.user')
+
+ m.call(*args)
+ end
+
+ get :new
+ end
+ end
+
+ context 'when the user becomes locked' do
+ before do
+ user.update!(failed_attempts: User.maximum_attempts.pred)
+ end
+
+ it 'sets the caller_id in the context' do
+ allow_any_instance_of(User).to receive(:lock_access!).and_wrap_original do |m, *args|
+ expect(Labkit::Context.current.to_h)
+ .to include('meta.caller_id' => 'SessionsController#create')
+ expect(Labkit::Context.current.to_h)
+ .not_to include('meta.user')
+
+ m.call(*args)
+ end
+
+ post(:create,
+ params: { user: { login: user.username, password: user.password.succ } })
+ end
+ end
+ end
end
diff --git a/spec/controllers/snippets/notes_controller_spec.rb b/spec/controllers/snippets/notes_controller_spec.rb
index b93df3555ab..0676ed05212 100644
--- a/spec/controllers/snippets/notes_controller_spec.rb
+++ b/spec/controllers/snippets/notes_controller_spec.rb
@@ -108,7 +108,7 @@ describe Snippets::NotesController do
sign_in(user)
- expect_any_instance_of(Note).to receive(:cross_reference_not_visible_for?).and_return(true)
+ expect_any_instance_of(Note).to receive(:readable_by?).and_return(false)
end
it "does not return any note" do
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index 66fb1ef8129..a675014a77b 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -3,11 +3,9 @@
require 'spec_helper'
describe SnippetsController do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
describe 'GET #index' do
- let(:user) { create(:user) }
-
context 'when username parameter is present' do
it_behaves_like 'paginated collection' do
let(:collection) { Snippet.all }
@@ -75,8 +73,37 @@ describe SnippetsController do
end
describe 'GET #show' do
+ shared_examples 'successful response' do
+ it 'renders the snippet' do
+ subject
+
+ expect(assigns(:snippet)).to eq(personal_snippet)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'renders the blob from the repository' do
+ subject
+
+ expect(assigns(:blob)).to eq(personal_snippet.blobs.first)
+ end
+
+ context 'when feature flag version_snippets is disabled' do
+ before do
+ stub_feature_flags(version_snippets: false)
+ end
+
+ it 'returns the snippet database content' do
+ subject
+
+ blob = assigns(:blob)
+
+ expect(blob.data).to eq(personal_snippet.content)
+ end
+ end
+ end
+
context 'when the personal snippet is private' do
- let(:personal_snippet) { create(:personal_snippet, :private, author: user) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :private, :repository, author: user) }
context 'when signed in' do
before do
@@ -95,11 +122,8 @@ describe SnippetsController do
end
context 'when signed in user is the author' do
- it 'renders the snippet' do
- get :show, params: { id: personal_snippet.to_param }
-
- expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(:ok)
+ it_behaves_like 'successful response' do
+ subject { get :show, params: { id: personal_snippet.to_param } }
end
it 'responds with status 404 when embeddable content is requested' do
@@ -120,18 +144,15 @@ describe SnippetsController do
end
context 'when the personal snippet is internal' do
- let(:personal_snippet) { create(:personal_snippet, :internal, author: user) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :internal, :repository, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
- it 'renders the snippet' do
- get :show, params: { id: personal_snippet.to_param }
-
- expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(:ok)
+ it_behaves_like 'successful response' do
+ subject { get :show, params: { id: personal_snippet.to_param } }
end
it 'responds with status 404 when embeddable content is requested' do
@@ -151,18 +172,15 @@ describe SnippetsController do
end
context 'when the personal snippet is public' do
- let(:personal_snippet) { create(:personal_snippet, :public, author: user) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :public, :repository, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
- it 'renders the snippet' do
- get :show, params: { id: personal_snippet.to_param }
-
- expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(:ok)
+ it_behaves_like 'successful response' do
+ subject { get :show, params: { id: personal_snippet.to_param } }
end
it 'responds with status 200 when embeddable content is requested' do
@@ -483,8 +501,82 @@ describe SnippetsController do
end
describe "GET #raw" do
+ shared_examples '200 status' do
+ before do
+ subject
+ end
+
+ it 'responds with status 200' do
+ expect(assigns(:snippet)).to eq(snippet)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'has expected headers' do
+ expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
+ expect(response.header['Content-Disposition']).to match(/inline/)
+ end
+
+ it "sets #{Gitlab::Workhorse::DETECT_HEADER} header" do
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq 'true'
+ end
+ end
+
+ shared_examples 'CRLF line ending' do
+ let(:content) { "first line\r\nsecond line\r\nthird line" }
+ let(:formatted_content) { content.gsub(/\r\n/, "\n") }
+ let(:snippet) do
+ create(:personal_snippet, :public, :repository, author: user, content: content)
+ end
+
+ before do
+ allow_next_instance_of(Blob) do |instance|
+ allow(instance).to receive(:data).and_return(content)
+ end
+
+ subject
+ end
+
+ it 'returns LF line endings by default' do
+ expect(response.body).to eq(formatted_content)
+ end
+
+ context 'when parameter present' do
+ let(:params) { { id: snippet.to_param, line_ending: :raw } }
+
+ it 'does not convert line endings when parameter present' do
+ expect(response.body).to eq(content)
+ end
+ end
+ end
+
+ shared_examples 'successful response' do
+ it_behaves_like '200 status'
+ it_behaves_like 'CRLF line ending'
+
+ it 'returns snippet first blob data' do
+ subject
+
+ expect(response.body).to eq snippet.blobs.first.data
+ end
+
+ context 'when feature flag version_snippets is disabled' do
+ before do
+ stub_feature_flags(version_snippets: false)
+ end
+
+ it_behaves_like '200 status'
+ it_behaves_like 'CRLF line ending'
+
+ it 'returns snippet database content' do
+ subject
+
+ expect(response.body).to eq snippet.content
+ end
+ end
+ end
+
context 'when the personal snippet is private' do
- let(:personal_snippet) { create(:personal_snippet, :private, author: user) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :private, :repository, author: user) }
context 'when signed in' do
before do
@@ -503,24 +595,11 @@ describe SnippetsController do
end
context 'when signed in user is the author' do
- before do
- get :raw, params: { id: personal_snippet.to_param }
- end
+ it_behaves_like 'successful response' do
+ let(:snippet) { personal_snippet }
+ let(:params) { { id: snippet.to_param } }
- it 'responds with status 200' do
- expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'has expected headers' do
- expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
-
- expect(response.header['Content-Disposition']).to match(/inline/)
- end
-
- it "sets #{Gitlab::Workhorse::DETECT_HEADER} header" do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
+ subject { get :raw, params: params }
end
end
end
@@ -535,18 +614,18 @@ describe SnippetsController do
end
context 'when the personal snippet is internal' do
- let(:personal_snippet) { create(:personal_snippet, :internal, author: user) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :internal, :repository, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
- it 'responds with status 200' do
- get :raw, params: { id: personal_snippet.to_param }
+ it_behaves_like 'successful response' do
+ let(:snippet) { personal_snippet }
+ let(:params) { { id: snippet.to_param } }
- expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(:ok)
+ subject { get :raw, params: params }
end
end
@@ -560,36 +639,18 @@ describe SnippetsController do
end
context 'when the personal snippet is public' do
- let(:personal_snippet) { create(:personal_snippet, :public, author: user) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :public, :repository, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
- it 'responds with status 200' do
- get :raw, params: { id: personal_snippet.to_param }
-
- expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- context 'CRLF line ending' do
- let(:personal_snippet) do
- create(:personal_snippet, :public, author: user, content: "first line\r\nsecond line\r\nthird line")
- end
+ it_behaves_like 'successful response' do
+ let(:snippet) { personal_snippet }
+ let(:params) { { id: snippet.to_param } }
- it 'returns LF line endings by default' do
- get :raw, params: { id: personal_snippet.to_param }
-
- expect(response.body).to eq("first line\nsecond line\nthird line")
- end
-
- it 'does not convert line endings when parameter present' do
- get :raw, params: { id: personal_snippet.to_param, line_ending: :raw }
-
- expect(response.body).to eq("first line\r\nsecond line\r\nthird line")
- end
+ subject { get :raw, params: params }
end
end
diff --git a/spec/controllers/users/terms_controller_spec.rb b/spec/controllers/users/terms_controller_spec.rb
index e0bdec3df1d..99582652c39 100644
--- a/spec/controllers/users/terms_controller_spec.rb
+++ b/spec/controllers/users/terms_controller_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
describe Users::TermsController do
include TermsHelper
- let(:user) { create(:user) }
+
+ let_it_be(:user) { create(:user) }
let(:term) { create(:term) }
before do
@@ -12,88 +13,145 @@ describe Users::TermsController do
end
describe 'GET #index' do
- it 'redirects when no terms exist' do
- get :index
+ context 'when a user is signed in' do
+ it 'redirects when no terms exist' do
+ get :index
+
+ expect(response).to redirect_to(root_path)
+ end
+
+ context 'when terms exist' do
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ term
+ end
+
+ it 'shows terms when they exist' do
+ get :index
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+
+ it 'shows a message when the user already accepted the terms' do
+ accept_terms(user)
+
+ get :index
- expect(response).to have_gitlab_http_status(:redirect)
+ expect(controller).to set_flash.now[:notice].to(/already accepted/)
+ end
+ end
end
- context 'when terms exist' do
+ context 'when a user is not signed in' do
before do
- stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
- term
+ sign_out user
end
- it 'shows terms when they exist' do
- get :index
+ context 'when terms exist' do
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ term
+ end
- expect(response).to have_gitlab_http_status(:success)
- end
+ it 'returns success response' do
+ get :index
- it 'shows a message when the user already accepted the terms' do
- accept_terms(user)
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
- get :index
+ context 'when no terms exist' do
+ it 'redirects' do
+ get :index
- expect(controller).to set_flash.now[:notice].to(/already accepted/)
+ expect(response).to redirect_to(root_path)
+ end
end
end
end
describe 'POST #accept' do
- it 'saves that the user accepted the terms' do
- post :accept, params: { id: term.id }
+ context 'when a user is signed in' do
+ it 'saves that the user accepted the terms' do
+ post :accept, params: { id: term.id }
- agreement = user.term_agreements.find_by(term: term)
+ agreement = user.term_agreements.find_by(term: term)
- expect(agreement.accepted).to eq(true)
- end
+ expect(agreement.accepted).to eq(true)
+ end
- it 'redirects to a path when specified' do
- post :accept, params: { id: term.id, redirect: groups_path }
+ it 'redirects to a path when specified' do
+ post :accept, params: { id: term.id, redirect: groups_path }
- expect(response).to redirect_to(groups_path)
- end
+ expect(response).to redirect_to(groups_path)
+ end
- it 'redirects to the referer when no redirect specified' do
- request.env["HTTP_REFERER"] = groups_url
+ it 'redirects to the referer when no redirect specified' do
+ request.env["HTTP_REFERER"] = groups_url
- post :accept, params: { id: term.id }
+ post :accept, params: { id: term.id }
- expect(response).to redirect_to(groups_path)
- end
+ expect(response).to redirect_to(groups_path)
+ end
- context 'redirecting to another domain' do
- it 'is prevented when passing a redirect param' do
- post :accept, params: { id: term.id, redirect: '//example.com/random/path' }
+ context 'redirecting to another domain' do
+ it 'is prevented when passing a redirect param' do
+ post :accept, params: { id: term.id, redirect: '//example.com/random/path' }
- expect(response).to redirect_to(root_path)
+ expect(response).to redirect_to(root_path)
+ end
+
+ it 'is prevented when redirecting to the referer' do
+ request.env["HTTP_REFERER"] = 'http://example.com/and/a/path'
+
+ post :accept, params: { id: term.id }
+
+ expect(response).to redirect_to(root_path)
+ end
end
+ end
- it 'is prevented when redirecting to the referer' do
- request.env["HTTP_REFERER"] = 'http://example.com/and/a/path'
+ context 'when a user is not signed in' do
+ before do
+ sign_out user
+ end
+ it 'redirects to login page' do
post :accept, params: { id: term.id }
- expect(response).to redirect_to(root_path)
+ expect(response).to redirect_to(new_user_session_path)
end
end
end
describe 'POST #decline' do
- it 'stores that the user declined the terms' do
- post :decline, params: { id: term.id }
+ context 'when a user is signed in' do
+ it 'stores that the user declined the terms' do
+ post :decline, params: { id: term.id }
+
+ agreement = user.term_agreements.find_by(term: term)
- agreement = user.term_agreements.find_by(term: term)
+ expect(agreement.accepted).to eq(false)
+ end
- expect(agreement.accepted).to eq(false)
+ it 'signs out the user' do
+ post :decline, params: { id: term.id }
+
+ expect(response).to redirect_to(root_path)
+ expect(assigns(:current_user)).to be_nil
+ end
end
- it 'signs out the user' do
- post :decline, params: { id: term.id }
+ context 'when a user is not signed in' do
+ before do
+ sign_out user
+ end
- expect(response).to redirect_to(root_path)
- expect(assigns(:current_user)).to be_nil
+ it 'redirects to login page' do
+ post :decline, params: { id: term.id }
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
end
end
end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index f2f7d6cbafc..c6c842ce1a1 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -60,6 +60,7 @@ describe 'Database schema' do
oauth_access_grants: %w[resource_owner_id application_id],
oauth_access_tokens: %w[resource_owner_id application_id],
oauth_applications: %w[owner_id],
+ open_project_tracker_data: %w[closed_status_id],
project_group_links: %w[group_id],
project_statistics: %w[namespace_id],
projects: %w[creator_id namespace_id ci_id mirror_user_id],
diff --git a/spec/factories/broadcast_messages.rb b/spec/factories/broadcast_messages.rb
index ed6e267e7c4..fa8d255ae79 100644
--- a/spec/factories/broadcast_messages.rb
+++ b/spec/factories/broadcast_messages.rb
@@ -6,6 +6,8 @@ FactoryBot.define do
starts_at { 1.day.ago }
ends_at { 1.day.from_now }
+ broadcast_type { :banner }
+
trait :expired do
starts_at { 5.days.ago }
ends_at { 3.days.ago }
@@ -15,5 +17,9 @@ FactoryBot.define do
starts_at { 5.days.from_now }
ends_at { 6.days.from_now }
end
+
+ trait :notification do
+ broadcast_type { :notification }
+ end
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 5127d55645c..446c1c59030 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -230,8 +230,9 @@ FactoryBot.define do
# Build deployment/environment relations if environment name is set
# to the job. If `build.deployment` has already been set, it doesn't
# build a new instance.
+ environment = Gitlab::Ci::Pipeline::Seed::Environment.new(build).to_resource
build.deployment =
- Gitlab::Ci::Pipeline::Seed::Deployment.new(build).to_resource
+ Gitlab::Ci::Pipeline::Seed::Deployment.new(build, environment).to_resource
end
end
@@ -310,6 +311,12 @@ FactoryBot.define do
end
end
+ trait :coverage_reports do
+ after(:build) do |build|
+ build.job_artifacts << create(:ci_job_artifact, :cobertura, job: build)
+ end
+ end
+
trait :expired do
artifacts_expire_at { 1.minute.ago }
end
@@ -354,6 +361,8 @@ FactoryBot.define do
options { {} }
end
+ # TODO: move Security traits to ee_ci_build
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/210486
trait :dast do
options do
{
@@ -394,6 +403,14 @@ FactoryBot.define do
end
end
+ trait :license_scanning do
+ options do
+ {
+ artifacts: { reports: { license_management: 'gl-license-scanning-report.json' } }
+ }
+ end
+ end
+
trait :non_playable do
status { 'created' }
self.when { 'manual' }
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 590578aec9a..8fbf242a607 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -129,6 +129,36 @@ FactoryBot.define do
end
end
+ trait :cobertura do
+ file_type { :cobertura }
+ file_format { :gzip }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/cobertura/coverage.xml.gz'), 'application/x-gzip')
+ end
+ end
+
+ trait :coverage_gocov_xml do
+ file_type { :cobertura }
+ file_format { :gzip }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/cobertura/coverage_gocov_xml.xml.gz'), 'application/x-gzip')
+ end
+ end
+
+ trait :coverage_with_corrupted_data do
+ file_type { :cobertura }
+ file_format { :gzip }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gz'), 'application/x-gzip')
+ end
+ end
+
trait :codequality do
file_type { :codequality }
file_format { :raw }
@@ -141,11 +171,21 @@ FactoryBot.define do
trait :lsif do
file_type { :lsif }
- file_format { :raw }
+ file_format { :gzip }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/lsif.json.gz'), 'application/x-gzip')
+ end
+ end
+
+ trait :dotenv do
+ file_type { :dotenv }
+ file_format { :gzip }
after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload(
- Rails.root.join('spec/fixtures/lsif.json.gz'), 'application/octet-stream')
+ Rails.root.join('spec/fixtures/build.env.gz'), 'application/x-gzip')
end
end
diff --git a/spec/factories/ci/job_variables.rb b/spec/factories/ci/job_variables.rb
index 472a89d3bef..ae2b89ad98d 100644
--- a/spec/factories/ci/job_variables.rb
+++ b/spec/factories/ci/job_variables.rb
@@ -6,5 +6,9 @@ FactoryBot.define do
value { 'VARIABLE_VALUE' }
job factory: :ci_build
+
+ trait :dotenv_source do
+ source { :dotenv }
+ end
end
end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index afc203562ba..11686ed5277 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -22,6 +22,7 @@ FactoryBot.define do
factory :ci_pipeline do
trait :invalid do
+ status { :failed }
yaml_errors { 'invalid YAML' }
failure_reason { :config_error }
end
@@ -66,6 +67,14 @@ FactoryBot.define do
end
end
+ trait :with_coverage_reports do
+ status { :success }
+
+ after(:build) do |pipeline, evaluator|
+ pipeline.builds << build(:ci_build, :coverage_reports, pipeline: pipeline, project: pipeline.project)
+ end
+ end
+
trait :with_exposed_artifacts do
status { :success }
diff --git a/spec/factories/ci/ref.rb b/spec/factories/ci/ref.rb
new file mode 100644
index 00000000000..891d8848a72
--- /dev/null
+++ b/spec/factories/ci/ref.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_ref, class: 'Ci::Ref' do
+ ref { 'master' }
+ status { :success }
+ tag { false }
+ project
+
+ before(:create) do |ref, evaluator|
+ next if ref.pipelines.exists?
+
+ ref.update!(last_updated_by_pipeline: create(:ci_pipeline, project: evaluator.project, ref: evaluator.ref, tag: evaluator.tag, status: evaluator.status))
+ end
+ end
+end
diff --git a/spec/factories/ci/test_case.rb b/spec/factories/ci/test_case.rb
new file mode 100644
index 00000000000..8017111bcc7
--- /dev/null
+++ b/spec/factories/ci/test_case.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :test_case, class: 'Gitlab::Ci::Reports::TestCase' do
+ name { "test-1" }
+ classname { "trace" }
+ file { "spec/trace_spec.rb" }
+ execution_time { 1.23 }
+ status { "success" }
+ system_output { nil }
+ attachment { nil }
+
+ trait :with_attachment do
+ attachment { "some/path.png" }
+ end
+
+ skip_create
+
+ initialize_with do
+ new(
+ name: name,
+ classname: classname,
+ file: file,
+ execution_time: execution_time,
+ status: status,
+ system_output: system_output,
+ attachment: attachment
+ )
+ end
+ end
+end
diff --git a/spec/factories/clusters/applications/helm.rb b/spec/factories/clusters/applications/helm.rb
index ff9fc882dcc..0a4f0fba9ab 100644
--- a/spec/factories/clusters/applications/helm.rb
+++ b/spec/factories/clusters/applications/helm.rb
@@ -73,39 +73,71 @@ FactoryBot.define do
factory :clusters_applications_ingress, class: 'Clusters::Applications::Ingress' do
modsecurity_enabled { false }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
+
+ trait :no_helm_installed do
+ cluster factory: %i(cluster provided_by_gcp)
+ end
end
factory :clusters_applications_cert_manager, class: 'Clusters::Applications::CertManager' do
email { 'admin@example.com' }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
+
+ trait :no_helm_installed do
+ cluster factory: %i(cluster provided_by_gcp)
+ end
end
factory :clusters_applications_elastic_stack, class: 'Clusters::Applications::ElasticStack' do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
+
+ trait :no_helm_installed do
+ cluster factory: %i(cluster provided_by_gcp)
+ end
end
factory :clusters_applications_crossplane, class: 'Clusters::Applications::Crossplane' do
stack { 'gcp' }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
+
+ trait :no_helm_installed do
+ cluster factory: %i(cluster provided_by_gcp)
+ end
end
factory :clusters_applications_prometheus, class: 'Clusters::Applications::Prometheus' do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
+
+ trait :no_helm_installed do
+ cluster factory: %i(cluster provided_by_gcp)
+ end
end
factory :clusters_applications_runner, class: 'Clusters::Applications::Runner' do
runner factory: %i(ci_runner)
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
+
+ trait :no_helm_installed do
+ cluster factory: %i(cluster provided_by_gcp)
+ end
end
factory :clusters_applications_knative, class: 'Clusters::Applications::Knative' do
hostname { 'example.com' }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
+
+ trait :no_helm_installed do
+ cluster factory: %i(cluster provided_by_gcp)
+ end
end
factory :clusters_applications_jupyter, class: 'Clusters::Applications::Jupyter' do
oauth_application factory: :oauth_application
cluster factory: %i(cluster with_installed_helm provided_by_gcp project)
+
+ trait :no_helm_installed do
+ cluster factory: %i(cluster provided_by_gcp)
+ end
end
end
end
diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb
index f92e213a385..42046464213 100644
--- a/spec/factories/deployments.rb
+++ b/spec/factories/deployments.rb
@@ -7,7 +7,7 @@ FactoryBot.define do
tag { false }
user { nil }
project { nil }
- deployable factory: :ci_build
+ deployable { association :ci_build, environment: environment.name, project: environment.project }
environment factory: :environment
after(:build) do |deployment, evaluator|
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index 2344ffffa65..f717bab5f2a 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -121,6 +121,18 @@ FactoryBot.define do
end
end
+ trait :with_coverage_reports do
+ after(:build) do |merge_request|
+ merge_request.head_pipeline = build(
+ :ci_pipeline,
+ :success,
+ :with_coverage_reports,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+ end
+
trait :with_exposed_artifacts do
after(:build) do |merge_request|
merge_request.head_pipeline = build(
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index 848590efabc..a15c7625500 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -164,6 +164,10 @@ FactoryBot.define do
attachment { fixture_file_upload("spec/fixtures/git-cheat-sheet.pdf", "application/pdf") }
end
+ trait :confidential do
+ confidential { true }
+ end
+
transient do
in_reply_to { nil }
end
diff --git a/spec/factories/project_export_jobs.rb b/spec/factories/project_export_jobs.rb
new file mode 100644
index 00000000000..b2666555ea8
--- /dev/null
+++ b/spec/factories/project_export_jobs.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :project_export_job do
+ project
+ jid { SecureRandom.hex(8) }
+ end
+end
diff --git a/spec/factories/releases/link.rb b/spec/factories/releases/link.rb
index 82446dbdb69..001deeb71a0 100644
--- a/spec/factories/releases/link.rb
+++ b/spec/factories/releases/link.rb
@@ -5,5 +5,6 @@ FactoryBot.define do
release
sequence(:name) { |n| "release-18.#{n}.dmg" }
sequence(:url) { |n| "https://example.com/scrambled-url/app-#{n}.zip" }
+ sequence(:filepath) { |n| "/binaries/awesome-app-#{n}" }
end
end
diff --git a/spec/factories/resource_milestone_event.rb b/spec/factories/resource_milestone_event.rb
new file mode 100644
index 00000000000..86c54f2be68
--- /dev/null
+++ b/spec/factories/resource_milestone_event.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :resource_milestone_event do
+ issue { merge_request.nil? ? create(:issue) : nil }
+ merge_request { nil }
+ milestone
+ action { :add }
+ state { :opened }
+ user { issue&.author || merge_request&.author || create(:user) }
+ end
+end
diff --git a/spec/factories/serverless/domain.rb b/spec/factories/serverless/domain.rb
new file mode 100644
index 00000000000..7a6a048fb34
--- /dev/null
+++ b/spec/factories/serverless/domain.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :serverless_domain, class: '::Serverless::Domain' do
+ function_name { 'test-function' }
+ serverless_domain_cluster { create(:serverless_domain_cluster) }
+ environment { create(:environment) }
+
+ skip_create
+ end
+end
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index ffa51abf26f..2d6d8d71917 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -64,6 +64,7 @@ FactoryBot.define do
factory :jira_service do
project
active { true }
+ type { 'JiraService' }
transient do
create_data { true }
@@ -154,4 +155,14 @@ FactoryBot.define do
IssueTrackerService.set_callback(:validation, :before, :handle_properties)
end
end
+
+ trait :template do
+ project { nil }
+ template { true }
+ end
+
+ trait :instance do
+ project { nil }
+ instance { true }
+ end
end
diff --git a/spec/factories/snippets.rb b/spec/factories/snippets.rb
index 6fcb0319748..3d99a04ea1a 100644
--- a/spec/factories/snippets.rb
+++ b/spec/factories/snippets.rb
@@ -27,12 +27,14 @@ FactoryBot.define do
TestEnv.copy_repo(snippet,
bare_repo: TestEnv.factory_repo_path_bare,
refs: TestEnv::BRANCH_SHA)
+
+ snippet.track_snippet_repository
end
end
trait :empty_repo do
after(:create) do |snippet|
- raise "Failed to create repository!" unless snippet.repository.create_if_not_exists
+ raise "Failed to create repository!" unless snippet.create_repository
end
end
end
@@ -46,6 +48,7 @@ FactoryBot.define do
trait :secret do
visibility_level { Snippet::PUBLIC }
secret { true }
+ project { nil }
end
end
end
diff --git a/spec/factories/user_details.rb b/spec/factories/user_details.rb
new file mode 100644
index 00000000000..3442f057c44
--- /dev/null
+++ b/spec/factories/user_details.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :user_detail do
+ user
+ job_title { 'VP of Sales' }
+ end
+end
diff --git a/spec/factories/user_highest_roles.rb b/spec/factories/user_highest_roles.rb
new file mode 100644
index 00000000000..2e3447348b7
--- /dev/null
+++ b/spec/factories/user_highest_roles.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :user_highest_role do
+ user
+ end
+end
diff --git a/spec/factories/wiki_pages.rb b/spec/factories/wiki_pages.rb
index 761ba58edb2..86571f062ba 100644
--- a/spec/factories/wiki_pages.rb
+++ b/spec/factories/wiki_pages.rb
@@ -16,7 +16,7 @@ FactoryBot.define do
page { OpenStruct.new(url_path: 'some-name') }
association :wiki, factory: :project_wiki, strategy: :build
- initialize_with { new(wiki, page, true) }
+ initialize_with { new(wiki, page) }
before(:create) do |page, evaluator|
page.attributes = evaluator.attrs
diff --git a/spec/factories/x509_certificate.rb b/spec/factories/x509_certificate.rb
index 819ad0704dc..37912548434 100644
--- a/spec/factories/x509_certificate.rb
+++ b/spec/factories/x509_certificate.rb
@@ -8,5 +8,6 @@ FactoryBot.define do
email { 'gitlab@example.org' }
serial_number { 278969561018901340486471282831158785578 }
x509_issuer
+ certificate_status { :good }
end
end
diff --git a/spec/features/admin/admin_health_check_spec.rb b/spec/features/admin/admin_health_check_spec.rb
index 97c34d55d73..9ce96fe8020 100644
--- a/spec/features/admin/admin_health_check_spec.rb
+++ b/spec/features/admin/admin_health_check_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe "Admin Health Check", :feature do
include StubENV
- set(:admin) { create(:admin) }
+ let_it_be(:admin) { create(:admin) }
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
diff --git a/spec/features/admin/admin_mode/login_spec.rb b/spec/features/admin/admin_mode/login_spec.rb
new file mode 100644
index 00000000000..b8a910d3a40
--- /dev/null
+++ b/spec/features/admin/admin_mode/login_spec.rb
@@ -0,0 +1,184 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Admin Mode Login', :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode do
+ include TermsHelper
+ include UserLoginHelper
+
+ describe 'with two-factor authentication', :js do
+ def enter_code(code)
+ fill_in 'user_otp_attempt', with: code
+ click_button 'Verify code'
+ end
+
+ context 'with valid username/password' do
+ let(:user) { create(:admin, :two_factor) }
+
+ context 'using one-time code' do
+ it 'blocks login if we reuse the same code immediately' do
+ gitlab_sign_in(user, remember: true)
+
+ expect(page).to have_content('Two-Factor Authentication')
+
+ repeated_otp = user.current_otp
+ enter_code(repeated_otp)
+ gitlab_enable_admin_mode_sign_in(user)
+
+ expect(page).to have_content('Two-Factor Authentication')
+
+ enter_code(repeated_otp)
+
+ expect(current_path).to eq admin_session_path
+ expect(page).to have_content('Invalid two-factor code')
+ end
+
+ context 'not re-using codes' do
+ before do
+ gitlab_sign_in(user, remember: true)
+
+ expect(page).to have_content('Two-Factor Authentication')
+
+ enter_code(user.current_otp)
+ gitlab_enable_admin_mode_sign_in(user)
+
+ expect(page).to have_content('Two-Factor Authentication')
+ end
+
+ it 'allows login with valid code' do
+ # Cannot reuse the TOTP
+ Timecop.travel(30.seconds.from_now) do
+ enter_code(user.current_otp)
+
+ expect(current_path).to eq admin_root_path
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
+
+ it 'blocks login with invalid code' do
+ # Cannot reuse the TOTP
+ Timecop.travel(30.seconds.from_now) do
+ enter_code('foo')
+
+ expect(page).to have_content('Invalid two-factor code')
+ end
+ end
+
+ it 'allows login with invalid code, then valid code' do
+ # Cannot reuse the TOTP
+ Timecop.travel(30.seconds.from_now) do
+ enter_code('foo')
+
+ expect(page).to have_content('Invalid two-factor code')
+
+ enter_code(user.current_otp)
+
+ expect(current_path).to eq admin_root_path
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
+
+ context 'using backup code' do
+ let(:codes) { user.generate_otp_backup_codes! }
+
+ before do
+ expect(codes.size).to eq 10
+
+ # Ensure the generated codes get saved
+ user.save
+ end
+
+ context 'with valid code' do
+ it 'allows login' do
+ enter_code(codes.sample)
+
+ expect(current_path).to eq admin_root_path
+ expect(page).to have_content('Admin mode enabled')
+ end
+
+ it 'invalidates the used code' do
+ expect { enter_code(codes.sample) }
+ .to change { user.reload.otp_backup_codes.size }.by(-1)
+ end
+ end
+
+ context 'with invalid code' do
+ it 'blocks login' do
+ code = codes.sample
+ expect(user.invalidate_otp_backup_code!(code)).to eq true
+
+ user.save!
+ expect(user.reload.otp_backup_codes.size).to eq 9
+
+ enter_code(code)
+
+ expect(page).to have_content('Invalid two-factor code.')
+ end
+ end
+ end
+ end
+ end
+
+ context 'when logging in via omniauth' do
+ let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: 'my-uid', provider: 'saml')}
+ let(:mock_saml_response) do
+ File.read('spec/fixtures/authentication/saml_response.xml')
+ end
+
+ before do
+ stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'],
+ providers: [mock_saml_config_with_upstream_two_factor_authn_contexts])
+ end
+
+ context 'when authn_context is worth two factors' do
+ let(:mock_saml_response) do
+ File.read('spec/fixtures/authentication/saml_response.xml')
+ .gsub('urn:oasis:names:tc:SAML:2.0:ac:classes:Password',
+ 'urn:oasis:names:tc:SAML:2.0:ac:classes:SecondFactorOTPSMS')
+ end
+
+ it 'signs user in without prompting for second factor' do
+ sign_in_using_saml!
+
+ expect(page).not_to have_content('Two-Factor Authentication')
+
+ enable_admin_mode_using_saml!
+
+ expect(page).not_to have_content('Two-Factor Authentication')
+ expect(current_path).to eq admin_root_path
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
+
+ context 'when two factor authentication is required' do
+ it 'shows 2FA prompt after omniauth login' do
+ sign_in_using_saml!
+
+ expect(page).to have_content('Two-Factor Authentication')
+ enter_code(user.current_otp)
+
+ enable_admin_mode_using_saml!
+
+ expect(page).to have_content('Two-Factor Authentication')
+
+ # Cannot reuse the TOTP
+ Timecop.travel(30.seconds.from_now) do
+ enter_code(user.current_otp)
+
+ expect(current_path).to eq admin_root_path
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
+ end
+
+ def sign_in_using_saml!
+ gitlab_sign_in_via('saml', user, 'my-uid', mock_saml_response)
+ end
+
+ def enable_admin_mode_using_saml!
+ gitlab_enable_admin_mode_sign_in_via('saml', user, 'my-uid', mock_saml_response)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/admin/admin_mode/logout_spec.rb b/spec/features/admin/admin_mode/logout_spec.rb
new file mode 100644
index 00000000000..e1b4aba5724
--- /dev/null
+++ b/spec/features/admin/admin_mode/logout_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Admin Mode Logout', :js, :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode do
+ include TermsHelper
+ include UserLoginHelper
+
+ let(:user) { create(:admin) }
+
+ before do
+ gitlab_sign_in(user)
+ gitlab_enable_admin_mode_sign_in(user)
+ visit admin_root_path
+ end
+
+ it 'disable removes admin mode and redirects to root page' do
+ gitlab_disable_admin_mode
+
+ expect(current_path).to eq root_path
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+
+ it 'disable shows flash notice' do
+ gitlab_disable_admin_mode
+
+ expect(page).to have_selector('.flash-notice')
+ end
+
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
+
+ it 'disable removes admin mode and redirects to root page' do
+ gitlab_disable_admin_mode
+
+ expect(current_path).to eq root_path
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+ end
+end
diff --git a/spec/features/admin/admin_mode_spec.rb b/spec/features/admin/admin_mode_spec.rb
index 7b8990aceef..f642d614a5d 100644
--- a/spec/features/admin/admin_mode_spec.rb
+++ b/spec/features/admin/admin_mode_spec.rb
@@ -45,7 +45,7 @@ describe 'Admin mode', :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode
it 'can enter admin mode' do
visit new_admin_session_path
- fill_in 'password', with: admin.password
+ fill_in 'user_password', with: admin.password
click_button 'Enter Admin Mode'
@@ -60,7 +60,7 @@ describe 'Admin mode', :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode
it 'can enter admin mode' do
visit new_admin_session_path
- fill_in 'password', with: admin.password
+ fill_in 'user_password', with: admin.password
click_button 'Enter Admin Mode'
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index 6bcadda6523..ca2fd2f2e9e 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -57,7 +57,7 @@ describe "Admin Runners" do
expect(page).to have_content 'runner-active'
expect(page).to have_content 'runner-paused'
- input_filtered_search_keys('status=active')
+ input_filtered_search_keys('status:=active')
expect(page).to have_content 'runner-active'
expect(page).not_to have_content 'runner-paused'
end
@@ -68,7 +68,7 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('status=offline')
+ input_filtered_search_keys('status:=offline')
expect(page).not_to have_content 'runner-active'
expect(page).not_to have_content 'runner-paused'
@@ -83,12 +83,12 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('status=active')
+ input_filtered_search_keys('status:=active')
expect(page).to have_content 'runner-a-1'
expect(page).to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
- input_filtered_search_keys('status=active runner-a')
+ input_filtered_search_keys('status:=active runner-a')
expect(page).to have_content 'runner-a-1'
expect(page).not_to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
@@ -105,7 +105,7 @@ describe "Admin Runners" do
expect(page).to have_content 'runner-project'
expect(page).to have_content 'runner-group'
- input_filtered_search_keys('type=project_type')
+ input_filtered_search_keys('type:=project_type')
expect(page).to have_content 'runner-project'
expect(page).not_to have_content 'runner-group'
end
@@ -116,7 +116,7 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('type=instance_type')
+ input_filtered_search_keys('type:=instance_type')
expect(page).not_to have_content 'runner-project'
expect(page).not_to have_content 'runner-group'
@@ -131,12 +131,12 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('type=project_type')
+ input_filtered_search_keys('type:=project_type')
expect(page).to have_content 'runner-a-1'
expect(page).to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
- input_filtered_search_keys('type=project_type runner-a')
+ input_filtered_search_keys('type:=project_type runner-a')
expect(page).to have_content 'runner-a-1'
expect(page).not_to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
@@ -153,7 +153,7 @@ describe "Admin Runners" do
expect(page).to have_content 'runner-blue'
expect(page).to have_content 'runner-red'
- input_filtered_search_keys('tag=blue')
+ input_filtered_search_keys('tag:=blue')
expect(page).to have_content 'runner-blue'
expect(page).not_to have_content 'runner-red'
@@ -165,7 +165,7 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('tag=red')
+ input_filtered_search_keys('tag:=red')
expect(page).not_to have_content 'runner-blue'
expect(page).not_to have_content 'runner-blue'
@@ -179,13 +179,13 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('tag=blue')
+ input_filtered_search_keys('tag:=blue')
expect(page).to have_content 'runner-a-1'
expect(page).to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
- input_filtered_search_keys('tag=blue runner-a')
+ input_filtered_search_keys('tag:=blue runner-a')
expect(page).to have_content 'runner-a-1'
expect(page).not_to have_content 'runner-b-1'
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index 8aad598b843..0ac8e7c5fc8 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -519,7 +519,7 @@ describe 'Issue Boards', :js do
page.within(find('.board:nth-child(2)')) do
expect(page).to have_selector('.board-card', count: 8)
expect(find('.board-card', match: :first)).to have_content(bug.title)
- click_button(bug.title)
+ click_link(bug.title)
wait_for_requests
end
@@ -536,7 +536,7 @@ describe 'Issue Boards', :js do
it 'removes label filter by clicking label button on issue' do
page.within(find('.board:nth-child(2)')) do
page.within(find('.board-card', match: :first)) do
- click_button(bug.title)
+ click_link(bug.title)
end
wait_for_requests
@@ -624,7 +624,7 @@ describe 'Issue Boards', :js do
end
def set_filter(type, text)
- find('.filtered-search').native.send_keys("#{type}=#{text}")
+ find('.filtered-search').native.send_keys("#{type}:=#{text}")
end
def submit_filter
diff --git a/spec/features/boards/issue_ordering_spec.rb b/spec/features/boards/issue_ordering_spec.rb
index 62abd914fcb..4c723ddf324 100644
--- a/spec/features/boards/issue_ordering_spec.rb
+++ b/spec/features/boards/issue_ordering_spec.rb
@@ -47,6 +47,31 @@ describe 'Issue Boards', :js do
end
end
+ context 'closed issues' do
+ let!(:issue7) { create(:closed_issue, project: project, title: 'Closed issue 1', closed_at: 1.day.ago) }
+ let!(:issue8) { create(:closed_issue, project: project, title: 'Closed issue 2', closed_at: 1.week.ago) }
+ let!(:issue9) { create(:closed_issue, project: project, title: 'Closed issue 3', closed_at: 2.weeks.ago) }
+
+ before do
+ visit project_board_path(project, board)
+ wait_for_requests
+
+ expect(page).to have_selector('.board', count: 3)
+ end
+
+ it 'orders issues by closed_at' do
+ wait_for_requests
+
+ page.within(find('.board:nth-child(3)')) do
+ first, second, third = all('.board-card').to_a
+
+ expect(first).to have_content(issue7.title)
+ expect(second).to have_content(issue8.title)
+ expect(third).to have_content(issue9.title)
+ end
+ end
+ end
+
context 'ordering in list' do
before do
visit project_board_path(project, board)
diff --git a/spec/features/boards/modal_filter_spec.rb b/spec/features/boards/modal_filter_spec.rb
index d14041ecf3f..31f4c502c61 100644
--- a/spec/features/boards/modal_filter_spec.rb
+++ b/spec/features/boards/modal_filter_spec.rb
@@ -211,7 +211,7 @@ describe 'Issue Boards add issue modal filtering', :js do
end
def set_filter(type, text = '')
- find('.add-issues-modal .filtered-search').native.send_keys("#{type}=#{text}")
+ find('.add-issues-modal .filtered-search').native.send_keys("#{type}:=#{text}")
end
def submit_filter
diff --git a/spec/features/boards/multiple_boards_spec.rb b/spec/features/boards/multiple_boards_spec.rb
index 2389707be9c..8e56be6bdd0 100644
--- a/spec/features/boards/multiple_boards_spec.rb
+++ b/spec/features/boards/multiple_boards_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
describe 'Multiple Issue Boards', :js do
- set(:user) { create(:user) }
- set(:project) { create(:project, :public) }
- set(:planning) { create(:label, project: project, name: 'Planning') }
- set(:board) { create(:board, name: 'board1', project: project) }
- set(:board2) { create(:board, name: 'board2', project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:planning) { create(:label, project: project, name: 'Planning') }
+ let_it_be(:board) { create(:board, name: 'board1', project: project) }
+ let_it_be(:board2) { create(:board, name: 'board2', project: project) }
let(:parent) { project }
let(:boards_path) { project_boards_path(project) }
diff --git a/spec/features/boards/new_issue_spec.rb b/spec/features/boards/new_issue_spec.rb
index 730887370dd..2d41b5d612d 100644
--- a/spec/features/boards/new_issue_spec.rb
+++ b/spec/features/boards/new_issue_spec.rb
@@ -129,10 +129,10 @@ describe 'Issue Boards new issue', :js do
end
context 'group boards' do
- set(:group) { create(:group, :public) }
- set(:project) { create(:project, namespace: group) }
- set(:group_board) { create(:board, group: group) }
- set(:list) { create(:list, board: group_board, position: 0) }
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:group_board) { create(:board, group: group) }
+ let_it_be(:list) { create(:list, board: group_board, position: 0) }
context 'for unauthorized users' do
before do
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index c7edb574f19..f30689240c5 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -277,7 +277,7 @@ describe 'Issue Boards', :js do
wait_for_requests
page.within('.value') do
- expect(page).to have_selector('.badge', count: 2)
+ expect(page).to have_selector('.gl-label-text', count: 2)
expect(page).to have_content(development.title)
expect(page).to have_content(stretch.title)
end
@@ -299,13 +299,13 @@ describe 'Issue Boards', :js do
find('.dropdown-menu-close-icon').click
page.within('.value') do
- expect(page).to have_selector('.badge', count: 3)
+ expect(page).to have_selector('.gl-label-text', count: 3)
expect(page).to have_content(bug.title)
end
end
# 'Development' label does not show since the card is in a 'Development' list label
- expect(card).to have_selector('.badge', count: 2)
+ expect(card).to have_selector('.gl-label', count: 2)
expect(card).to have_content(bug.title)
end
@@ -328,14 +328,14 @@ describe 'Issue Boards', :js do
find('.dropdown-menu-close-icon').click
page.within('.value') do
- expect(page).to have_selector('.badge', count: 4)
+ expect(page).to have_selector('.gl-label-text', count: 4)
expect(page).to have_content(bug.title)
expect(page).to have_content(regression.title)
end
end
# 'Development' label does not show since the card is in a 'Development' list label
- expect(card).to have_selector('.badge', count: 3)
+ expect(card).to have_selector('.gl-label', count: 3)
expect(card).to have_content(bug.title)
expect(card).to have_content(regression.title)
end
@@ -357,13 +357,13 @@ describe 'Issue Boards', :js do
find('.dropdown-menu-close-icon').click
page.within('.value') do
- expect(page).to have_selector('.badge', count: 1)
+ expect(page).to have_selector('.gl-label-text', count: 1)
expect(page).not_to have_content(stretch.title)
end
end
# 'Development' label does not show since the card is in a 'Development' list label
- expect(card).to have_selector('.badge', count: 0)
+ expect(card).to have_selector('.gl-label-text', count: 0)
expect(card).not_to have_content(stretch.title)
end
diff --git a/spec/features/broadcast_messages_spec.rb b/spec/features/broadcast_messages_spec.rb
index 43fbf1010c9..809e53ed7f7 100644
--- a/spec/features/broadcast_messages_spec.rb
+++ b/spec/features/broadcast_messages_spec.rb
@@ -3,28 +3,69 @@
require 'spec_helper'
describe 'Broadcast Messages' do
- let!(:broadcast_message) { create(:broadcast_message, broadcast_type: 'notification', message: 'SampleMessage') }
+ shared_examples 'a Broadcast Messages' do
+ it 'shows broadcast message' do
+ visit root_path
- it 'shows broadcast message' do
- visit root_path
+ expect(page).to have_content 'SampleMessage'
+ end
+ end
+
+ shared_examples 'a dismissable Broadcast Messages' do
+ it 'hides broadcast message after dismiss', :js do
+ visit root_path
+
+ find('.js-dismiss-current-broadcast-notification').click
+
+ expect(page).not_to have_content 'SampleMessage'
+ end
- expect(page).to have_content 'SampleMessage'
+ it 'broadcast message is still hidden after refresh', :js do
+ visit root_path
+
+ find('.js-dismiss-current-broadcast-notification').click
+ visit root_path
+
+ expect(page).not_to have_content 'SampleMessage'
+ end
end
- it 'hides broadcast message after dismiss', :js do
- visit root_path
+ describe 'banner type' do
+ let!(:broadcast_message) { create(:broadcast_message, message: 'SampleMessage') }
- find('.js-dismiss-current-broadcast-notification').click
+ it_behaves_like 'a Broadcast Messages'
- expect(page).not_to have_content 'SampleMessage'
+ it 'shows broadcast message' do
+ visit root_path
+
+ expect(page).not_to have_selector('.js-dismiss-current-broadcast-notification')
+ end
end
- it 'broadcast message is still hidden after refresh', :js do
- visit root_path
+ describe 'dismissable banner type' do
+ let!(:broadcast_message) { create(:broadcast_message, dismissable: true, message: 'SampleMessage') }
+
+ it_behaves_like 'a Broadcast Messages'
+
+ it_behaves_like 'a dismissable Broadcast Messages'
+ end
+
+ describe 'notification type' do
+ let!(:broadcast_message) { create(:broadcast_message, broadcast_type: 'notification', message: 'SampleMessage') }
+
+ it_behaves_like 'a Broadcast Messages'
+
+ it_behaves_like 'a dismissable Broadcast Messages'
+ end
+
+ it 'renders broadcast message with placeholders' do
+ create(:broadcast_message, broadcast_type: 'notification', message: 'Hi {{name}}')
+
+ user = create(:user)
+ sign_in(user)
- find('.js-dismiss-current-broadcast-notification').click
visit root_path
- expect(page).not_to have_content 'SampleMessage'
+ expect(page).to have_content "Hi #{user.name}"
end
end
diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb
index 437e7f18c48..28d6c10f04f 100644
--- a/spec/features/clusters/cluster_detail_page_spec.rb
+++ b/spec/features/clusters/cluster_detail_page_spec.rb
@@ -17,7 +17,7 @@ describe 'Clusterable > Show page' do
it 'allow the user to set domain' do
visit cluster_path
- within '#cluster-integration' do
+ within '.js-cluster-integration-form' do
fill_in('cluster_base_domain', with: 'test.com')
click_on 'Save changes'
end
@@ -34,7 +34,7 @@ describe 'Clusterable > Show page' do
end
it 'shows help text with the domain as an alternative to custom domain' do
- within '#cluster-integration' do
+ within '.js-cluster-integration-form' do
expect(find(cluster_ingress_help_text_selector)).not_to match_css(hide_modifier_selector)
end
end
@@ -44,7 +44,7 @@ describe 'Clusterable > Show page' do
it 'alternative to custom domain is not shown' do
visit cluster_path
- within '#cluster-integration' do
+ within '.js-cluster-integration-form' do
expect(find(cluster_ingress_help_text_selector)).to match_css(hide_modifier_selector)
end
end
@@ -63,7 +63,7 @@ describe 'Clusterable > Show page' do
end
it 'is not able to edit the name, API url, CA certificate nor token' do
- within('#js-cluster-details') do
+ within('.js-provider-details') do
cluster_name_field = find('.cluster-name')
api_url_field = find('#cluster_platform_kubernetes_attributes_api_url')
ca_certificate_field = find('#cluster_platform_kubernetes_attributes_ca_cert')
@@ -77,6 +77,8 @@ describe 'Clusterable > Show page' do
end
it 'displays GKE information' do
+ click_link 'Advanced Settings'
+
within('#advanced-settings-section') do
expect(page).to have_content('Google Kubernetes Engine')
expect(page).to have_content('Manage your Kubernetes cluster by visiting')
@@ -91,7 +93,7 @@ describe 'Clusterable > Show page' do
end
it 'is able to edit the name, API url, CA certificate and token' do
- within('#js-cluster-details') do
+ within('.js-provider-details') do
cluster_name_field = find('#cluster_name')
api_url_field = find('#cluster_platform_kubernetes_attributes_api_url')
ca_certificate_field = find('#cluster_platform_kubernetes_attributes_ca_cert')
@@ -105,6 +107,8 @@ describe 'Clusterable > Show page' do
end
it 'does not display GKE information' do
+ click_link 'Advanced Settings'
+
within('#advanced-settings-section') do
expect(page).not_to have_content('Google Kubernetes Engine')
expect(page).not_to have_content('Manage your Kubernetes cluster by visiting')
diff --git a/spec/features/clusters/installing_applications_shared_examples.rb b/spec/features/clusters/installing_applications_shared_examples.rb
index ff44ce46213..8710e05e5cc 100644
--- a/spec/features/clusters/installing_applications_shared_examples.rb
+++ b/spec/features/clusters/installing_applications_shared_examples.rb
@@ -17,6 +17,12 @@ shared_examples "installing applications on a cluster" do
context 'when cluster is created' do
let(:cluster) { create(:cluster, :provided_by_gcp, *cluster_factory_args) }
+ before do
+ page.within('.js-edit-cluster-form') do
+ click_link 'Applications'
+ end
+ end
+
it 'user can install applications' do
wait_for_requests
@@ -29,6 +35,7 @@ shared_examples "installing applications on a cluster" do
context 'when user installs Helm' do
before do
allow(ClusterInstallAppWorker).to receive(:perform_async)
+ wait_for_requests
page.within('.js-cluster-application-row-helm') do
page.find(:css, '.js-cluster-application-install-button').click
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index f538df89fd3..d8b886b239f 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe 'Commits' do
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
describe 'CI' do
before do
@@ -183,4 +183,41 @@ describe 'Commits' do
expect(find('.js-project-refs-dropdown')).to have_content branch_name
end
end
+
+ context 'viewing commits for an author' do
+ let(:author_commit) { project.repository.commits(nil, limit: 1).first }
+ let(:commits) { project.repository.commits(nil, author: author, limit: 40) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ visit project_commits_path(project, nil, author: author)
+ end
+
+ shared_examples 'show commits by author' do
+ it "includes the author's commits" do
+ commits.each do |commit|
+ expect(page).to have_content("#{author_commit.author_name} authored #{commit.authored_date.strftime("%b %d, %Y")}")
+ end
+ end
+ end
+
+ context 'author is complete' do
+ let(:author) { "#{author_commit.author_name} <#{author_commit.author_email}>" }
+
+ it_behaves_like 'show commits by author'
+ end
+
+ context 'author is just a name' do
+ let(:author) { "#{author_commit.author_name}" }
+
+ it_behaves_like 'show commits by author'
+ end
+
+ context 'author is just an email' do
+ let(:author) { "#{author_commit.author_email}" }
+
+ it_behaves_like 'show commits by author'
+ end
+ end
end
diff --git a/spec/features/container_registry_spec.rb b/spec/features/container_registry_spec.rb
deleted file mode 100644
index 881cad1864b..00000000000
--- a/spec/features/container_registry_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe 'Container Registry', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
-
- let(:container_repository) do
- create(:container_repository, name: 'my/image')
- end
-
- before do
- sign_in(user)
- project.add_developer(user)
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: :any, tags: [])
- stub_feature_flags(vue_container_registry_explorer: false)
- end
-
- it 'has a page title set' do
- visit_container_registry
- expect(page).to have_title(_('Container Registry'))
- end
-
- context 'when there are no image repositories' do
- it 'user visits container registry main page' do
- visit_container_registry
-
- expect(page).to have_content 'no container images'
- end
- end
-
- context 'when there are image repositories' do
- before do
- stub_container_registry_tags(repository: %r{my/image}, tags: %w[latest], with_manifest: true)
- project.container_repositories << container_repository
- end
-
- it 'user wants to see multi-level container repository' do
- visit_container_registry
-
- expect(page).to have_content('my/image')
- end
-
- it 'user removes entire container repository', :sidekiq_might_not_need_inline do
- visit_container_registry
-
- expect_any_instance_of(ContainerRepository).to receive(:delete_tags!).and_return(true)
-
- click_on(class: 'js-remove-repo')
- expect(find('.modal .modal-title')).to have_content 'Remove repository'
- find('.modal .modal-footer .btn-danger').click
- end
-
- it 'user removes a specific tag from container repository' do
- visit_container_registry
-
- find('.js-toggle-repo').click
- wait_for_requests
-
- service = double('service')
- expect(service).to receive(:execute).with(container_repository) { { status: :success } }
- expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(container_repository.project, user, tags: ['latest']) { service }
-
- click_on(class: 'js-delete-registry-row', visible: false)
- expect(find('.modal .modal-title')).to have_content 'Remove tag'
- find('.modal .modal-footer .btn-danger').click
- end
- end
-
- def visit_container_registry
- visit project_container_registry_index_path(project)
- end
-end
diff --git a/spec/features/dashboard/issues_filter_spec.rb b/spec/features/dashboard/issues_filter_spec.rb
index 8e7fd1f500f..8e2a3d983b1 100644
--- a/spec/features/dashboard/issues_filter_spec.rb
+++ b/spec/features/dashboard/issues_filter_spec.rb
@@ -28,14 +28,14 @@ describe 'Dashboard Issues filtering', :js do
context 'filtering by milestone' do
it 'shows all issues with no milestone' do
- input_filtered_search("milestone=none")
+ input_filtered_search("milestone:=none")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_selector('.issue', count: 1)
end
it 'shows all issues with the selected milestone' do
- input_filtered_search("milestone=%\"#{milestone.title}\"")
+ input_filtered_search("milestone:=%\"#{milestone.title}\"")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_selector('.issue', count: 1)
@@ -63,7 +63,7 @@ describe 'Dashboard Issues filtering', :js do
let!(:label_link) { create(:label_link, label: label, target: issue) }
it 'shows all issues with the selected label' do
- input_filtered_search("label=~#{label.title}")
+ input_filtered_search("label:=~#{label.title}")
page.within 'ul.content-list' do
expect(page).to have_content issue.title
diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb
index a2ead1b5d33..ff661014fb9 100644
--- a/spec/features/dashboard/issues_spec.rb
+++ b/spec/features/dashboard/issues_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'Dashboard Issues' do
it 'shows issues when current user is author', :js do
reset_filters
- input_filtered_search("author=#{current_user.to_reference}")
+ input_filtered_search("author:=#{current_user.to_reference}")
expect(page).to have_content(authored_issue.title)
expect(page).to have_content(authored_issue_on_public_project.title)
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index bb515cfae82..0c728ab22de 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -107,7 +107,7 @@ describe 'Dashboard Merge Requests' do
it 'shows authored merge requests', :js do
reset_filters
- input_filtered_search("author=#{current_user.to_reference}")
+ input_filtered_search("author:=#{current_user.to_reference}")
expect(page).to have_content(authored_merge_request.title)
expect(page).to have_content(authored_merge_request_from_fork.title)
@@ -120,7 +120,7 @@ describe 'Dashboard Merge Requests' do
it 'shows labeled merge requests', :js do
reset_filters
- input_filtered_search("label=#{label.name}")
+ input_filtered_search("label:=#{label.name}")
expect(page).to have_content(labeled_merge_request.title)
diff --git a/spec/features/dashboard/root_explore_spec.rb b/spec/features/dashboard/root_explore_spec.rb
index 5b686d8b6f1..0e065dbed67 100644
--- a/spec/features/dashboard/root_explore_spec.rb
+++ b/spec/features/dashboard/root_explore_spec.rb
@@ -3,17 +3,17 @@
require 'spec_helper'
describe 'Root explore' do
- set(:public_project) { create(:project, :public) }
- set(:archived_project) { create(:project, :archived) }
- set(:internal_project) { create(:project, :internal) }
- set(:private_project) { create(:project, :private) }
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:archived_project) { create(:project, :archived) }
+ let_it_be(:internal_project) { create(:project, :internal) }
+ let_it_be(:private_project) { create(:project, :private) }
before do
allow(Gitlab).to receive(:com?).and_return(true)
end
context 'when logged in' do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
sign_in(user)
diff --git a/spec/features/discussion_comments/commit_spec.rb b/spec/features/discussion_comments/commit_spec.rb
index 0362ddbae82..f594a30165b 100644
--- a/spec/features/discussion_comments/commit_spec.rb
+++ b/spec/features/discussion_comments/commit_spec.rb
@@ -24,7 +24,7 @@ describe 'Thread Comments Commit', :js do
expect(page).to have_css('.js-note-emoji')
end
- it 'adds award to the correct note' do
+ it 'adds award to the correct note', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/207973' do
find("#note_#{commit_discussion_note2.id} .js-note-emoji").click
first('.emoji-menu .js-emoji-btn').click
diff --git a/spec/features/discussion_comments/merge_request_spec.rb b/spec/features/discussion_comments/merge_request_spec.rb
index 96184593655..c5457522c8e 100644
--- a/spec/features/discussion_comments/merge_request_spec.rb
+++ b/spec/features/discussion_comments/merge_request_spec.rb
@@ -12,6 +12,9 @@ describe 'Thread Comments Merge Request', :js do
sign_in(user)
visit project_merge_request_path(project, merge_request)
+
+ # Wait for MR widget to load
+ wait_for_requests
end
it_behaves_like 'thread comments', 'merge request'
diff --git a/spec/features/discussion_comments/snippets_spec.rb b/spec/features/discussion_comments/snippets_spec.rb
index 0dccb7f5bb3..bf78a5261c5 100644
--- a/spec/features/discussion_comments/snippets_spec.rb
+++ b/spec/features/discussion_comments/snippets_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe 'Thread Comments Snippet', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:snippet) { create(:project_snippet, :private, project: project, author: user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:snippet) { create(:project_snippet, :private, :repository, project: project, author: user) }
before do
stub_feature_flags(snippets_vue: false)
diff --git a/spec/features/error_tracking/user_filters_errors_by_status_spec.rb b/spec/features/error_tracking/user_filters_errors_by_status_spec.rb
new file mode 100644
index 00000000000..51e29e2a5ec
--- /dev/null
+++ b/spec/features/error_tracking/user_filters_errors_by_status_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'When a user filters Sentry errors by status', :js, :use_clean_rails_memory_store_caching, :sidekiq_inline do
+ include_context 'sentry error tracking context feature'
+
+ let_it_be(:issues_response_body) { fixture_file('sentry/issues_sample_response.json') }
+ let_it_be(:filtered_errors_by_status_response) { JSON.parse(issues_response_body).filter { |error| error['status'] == 'ignored' }.to_json }
+ let(:issues_api_url) { "#{sentry_api_urls.issues_url}?limit=20&query=is:unresolved" }
+ let(:issues_api_url_filter) { "#{sentry_api_urls.issues_url}?limit=20&query=is:ignored" }
+ let(:auth_token) {{ 'Authorization' => 'Bearer access_token_123' }}
+ let(:return_header) {{ 'Content-Type' => 'application/json' }}
+
+ before do
+ stub_request(:get, issues_api_url).with(headers: auth_token)
+ .to_return(status: 200, body: issues_response_body, headers: return_header)
+
+ stub_request(:get, issues_api_url_filter).with(headers: auth_token)
+ .to_return(status: 200, body: filtered_errors_by_status_response, headers: return_header)
+ end
+
+ it 'displays the results' do
+ sign_in(project.owner)
+ visit project_error_tracking_index_path(project)
+ page.within(find('.gl-table')) do
+ results = page.all('.table-row')
+ expect(results.count).to be(3)
+ end
+
+ find('.status-dropdown .dropdown-toggle').click
+ find('.dropdown-item', text: 'Ignored').click
+
+ page.within(find('.gl-table')) do
+ results = page.all('.table-row')
+ expect(results.count).to be(1)
+ expect(results.first).to have_content(filtered_errors_by_status_response[0]['title'])
+ end
+ end
+end
diff --git a/spec/features/error_tracking/user_searches_sentry_errors_spec.rb b/spec/features/error_tracking/user_searches_sentry_errors_spec.rb
index 690c60a3c3f..c5559081feb 100644
--- a/spec/features/error_tracking/user_searches_sentry_errors_spec.rb
+++ b/spec/features/error_tracking/user_searches_sentry_errors_spec.rb
@@ -26,7 +26,7 @@ describe 'When a user searches for Sentry errors', :js, :use_clean_rails_memory_
page.within(find('.gl-table')) do
results = page.all('.table-row')
- expect(results.count).to be(2)
+ expect(results.count).to be(3)
end
find('.gl-form-input').set('NotFound').native.send_keys(:return)
diff --git a/spec/features/explore/user_explores_projects_spec.rb b/spec/features/explore/user_explores_projects_spec.rb
index 9c3686dba2d..c64709c0b55 100644
--- a/spec/features/explore/user_explores_projects_spec.rb
+++ b/spec/features/explore/user_explores_projects_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
describe 'User explores projects' do
- set(:archived_project) { create(:project, :archived) }
- set(:internal_project) { create(:project, :internal) }
- set(:private_project) { create(:project, :private) }
- set(:public_project) { create(:project, :public) }
+ let_it_be(:archived_project) { create(:project, :archived) }
+ let_it_be(:internal_project) { create(:project, :internal) }
+ let_it_be(:private_project) { create(:project, :private) }
+ let_it_be(:public_project) { create(:project, :public) }
context 'when not signed in' do
context 'when viewing public projects' do
@@ -19,7 +19,7 @@ describe 'User explores projects' do
end
context 'when signed in' do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
sign_in(user)
diff --git a/spec/features/group_variables_spec.rb b/spec/features/group_variables_spec.rb
index 2b8d37bd629..524ae837b5e 100644
--- a/spec/features/group_variables_spec.rb
+++ b/spec/features/group_variables_spec.rb
@@ -11,7 +11,7 @@ describe 'Group variables', :js do
before do
group.add_owner(user)
gitlab_sign_in(user)
-
+ stub_feature_flags(new_variables_ui: false)
visit page_path
end
diff --git a/spec/features/groups/clusters/user_spec.rb b/spec/features/groups/clusters/user_spec.rb
index ceec50e4f58..e9ef66e31a2 100644
--- a/spec/features/groups/clusters/user_spec.rb
+++ b/spec/features/groups/clusters/user_spec.rb
@@ -34,7 +34,7 @@ describe 'User Cluster', :js do
it 'user sees a cluster details page' do
subject
- expect(page).to have_content('Kubernetes cluster integration')
+ expect(page).to have_content('GitLab Integration')
expect(page.find_field('cluster[name]').value).to eq('dev-cluster')
expect(page.find_field('cluster[platform_kubernetes_attributes][api_url]').value)
.to have_content('http://example.com')
@@ -93,7 +93,7 @@ describe 'User Cluster', :js do
context 'when user disables the cluster' do
before do
page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
- page.within('#cluster-integration') { click_button 'Save changes' }
+ page.within('.js-cluster-integration-form') { click_button 'Save changes' }
end
it 'user sees the successful message' do
@@ -105,7 +105,7 @@ describe 'User Cluster', :js do
before do
fill_in 'cluster_name', with: 'my-dev-cluster'
fill_in 'cluster_platform_kubernetes_attributes_token', with: 'new-token'
- page.within('#js-cluster-details') { click_button 'Save changes' }
+ page.within('.js-provider-details') { click_button 'Save changes' }
end
it 'user sees the successful message' do
@@ -117,6 +117,7 @@ describe 'User Cluster', :js do
context 'when user destroys the cluster' do
before do
+ click_link 'Advanced Settings'
click_button 'Remove integration and resources'
fill_in 'confirm_cluster_name_input', with: cluster.name
click_button 'Remove integration'
diff --git a/spec/features/groups/container_registry_spec.rb b/spec/features/groups/container_registry_spec.rb
new file mode 100644
index 00000000000..7e3c1728f3c
--- /dev/null
+++ b/spec/features/groups/container_registry_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Container Registry', :js do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:project) { create(:project, namespace: group) }
+
+ let(:container_repository) do
+ create(:container_repository, name: 'my/image')
+ end
+
+ before do
+ group.add_owner(user)
+ sign_in(user)
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: :any, tags: [])
+ end
+
+ it 'has a page title set' do
+ visit_container_registry
+
+ expect(page).to have_title _('Container Registry')
+ end
+
+ context 'when there are no image repositories' do
+ it 'list page has no container title' do
+ visit_container_registry
+
+ expect(page).to have_content _('There are no container images available in this group')
+ end
+ end
+
+ context 'when there are image repositories' do
+ before do
+ stub_container_registry_tags(repository: %r{my/image}, tags: %w[latest], with_manifest: true)
+ project.container_repositories << container_repository
+ end
+
+ it 'list page has a list of images' do
+ visit_container_registry
+
+ expect(page).to have_content 'my/image'
+ end
+
+ it 'image repository delete is disabled' do
+ visit_container_registry
+
+ delete_btn = find('[title="Remove repository"]')
+ expect(delete_btn).to be_disabled
+ end
+
+ it 'navigates to repo details' do
+ visit_container_registry_details('my/image')
+
+ expect(page).to have_content 'latest'
+ end
+
+ describe 'image repo details' do
+ before do
+ visit_container_registry_details 'my/image'
+ end
+
+ it 'shows the details breadcrumb' do
+ expect(find('.breadcrumbs')).to have_link 'my/image'
+ end
+
+ it 'shows the image title' do
+ expect(page).to have_content 'my/image tags'
+ end
+
+ it 'user removes a specific tag from container repository' do
+ service = double('service')
+ expect(service).to receive(:execute).with(container_repository) { { status: :success } }
+ expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(container_repository.project, user, tags: ['latest']) { service }
+
+ click_on(class: 'js-delete-registry')
+ expect(find('.modal .modal-title')).to have_content _('Remove tag')
+ find('.modal .modal-footer .btn-danger').click
+ end
+ end
+ end
+
+ def visit_container_registry
+ visit group_container_registries_path(group)
+ end
+
+ def visit_container_registry_details(name)
+ visit_container_registry
+ click_link(name)
+ end
+end
diff --git a/spec/features/groups/group_page_with_external_authorization_service_spec.rb b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
index 823c8cc8fad..a71b930d35f 100644
--- a/spec/features/groups/group_page_with_external_authorization_service_spec.rb
+++ b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
@@ -47,7 +47,7 @@ describe 'The group page' do
expect(page).to have_link('Group overview')
expect(page).to have_link('Details')
expect(page).not_to have_link('Activity')
- expect(page).not_to have_link('Contribution Analytics')
+ expect(page).not_to have_link('Contribution')
expect(page).not_to have_link('Issues')
expect(page).not_to have_link('Merge Requests')
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index a3fa87e3242..5b2e98804b0 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -48,7 +48,7 @@ describe 'Group issues page' do
let(:user2) { user_outside_group }
it 'filters by only group users' do
- filtered_search.set('assignee=')
+ filtered_search.set('assignee:=')
expect(find('#js-dropdown-assignee .filter-dropdown')).to have_content(user.name)
expect(find('#js-dropdown-assignee .filter-dropdown')).not_to have_content(user2.name)
diff --git a/spec/features/groups/labels/user_sees_links_to_issuables_spec.rb b/spec/features/groups/labels/user_sees_links_to_issuables_spec.rb
index 6199b566ebc..38561c71323 100644
--- a/spec/features/groups/labels/user_sees_links_to_issuables_spec.rb
+++ b/spec/features/groups/labels/user_sees_links_to_issuables_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'Groups > Labels > User sees links to issuables' do
- set(:group) { create(:group, :public) }
+ let_it_be(:group) { create(:group, :public) }
before do
create(:group_label, group: group, title: 'bug')
diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb
index 0038a8e4892..f87fa10e2f4 100644
--- a/spec/features/groups/merge_requests_spec.rb
+++ b/spec/features/groups/merge_requests_spec.rb
@@ -52,7 +52,7 @@ describe 'Group merge requests page' do
let(:user2) { user_outside_group }
it 'filters by assignee only group users' do
- filtered_search.set('assignee=')
+ filtered_search.set('assignee:=')
expect(find('#js-dropdown-assignee .filter-dropdown')).to have_content(user.name)
expect(find('#js-dropdown-assignee .filter-dropdown')).not_to have_content(user2.name)
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index 8c16dcec42f..0c457c11fce 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -10,7 +10,7 @@ describe 'Group navbar' do
{
nav_item: _('Analytics'),
nav_sub_items: [
- _('Contribution Analytics')
+ _('Contribution')
]
}
end
@@ -63,7 +63,22 @@ describe 'Group navbar' do
before do
stub_licensed_features(productivity_analytics: true)
- analytics_nav_item[:nav_sub_items] << _('Productivity Analytics')
+ analytics_nav_item[:nav_sub_items] << _('Productivity')
+
+ group.add_maintainer(user)
+ sign_in(user)
+
+ visit group_path(group)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
+
+ context 'when value stream analytics is available' do
+ before do
+ stub_licensed_features(cycle_analytics_for_groups: true)
+
+ analytics_nav_item[:nav_sub_items] << _('Value Stream')
group.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/groups/settings/ci_cd_spec.rb b/spec/features/groups/settings/ci_cd_spec.rb
index 5b1a9512c55..3fbc7c7a695 100644
--- a/spec/features/groups/settings/ci_cd_spec.rb
+++ b/spec/features/groups/settings/ci_cd_spec.rb
@@ -37,6 +37,19 @@ describe 'Group CI/CD settings' do
end
end
+ context 'Deploy tokens' do
+ let!(:deploy_token) { create(:deploy_token, :group, groups: [group]) }
+
+ before do
+ stub_container_registry_config(enabled: true)
+ visit group_settings_ci_cd_path(group)
+ end
+
+ it_behaves_like 'a deploy token in ci/cd settings' do
+ let(:entity_type) { 'group' }
+ end
+ end
+
describe 'Auto DevOps form' do
before do
stub_application_setting(auto_devops_enabled: true)
diff --git a/spec/features/ide/clientside_preview_csp_spec.rb b/spec/features/ide/clientside_preview_csp_spec.rb
new file mode 100644
index 00000000000..e097513def3
--- /dev/null
+++ b/spec/features/ide/clientside_preview_csp_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'IDE Clientside Preview CSP' do
+ let_it_be(:user) { create(:user) }
+
+ shared_context 'disable feature' do
+ before do
+ allow_next_instance_of(ApplicationSetting) do |instance|
+ allow(instance).to receive(:web_ide_clientside_preview_enabled?).and_return(false)
+ end
+ end
+ end
+
+ it_behaves_like 'setting CSP', 'frame-src' do
+ let(:whitelisted_url) { 'https://sandbox.gitlab-static.test' }
+ let(:extended_controller_class) { IdeController }
+
+ subject do
+ visit ide_path
+
+ response_headers['Content-Security-Policy']
+ end
+
+ before do
+ allow_next_instance_of(ApplicationSetting) do |instance|
+ allow(instance).to receive(:web_ide_clientside_preview_enabled?).and_return(true)
+ allow(instance).to receive(:web_ide_clientside_preview_bundler_url).and_return(whitelisted_url)
+ end
+
+ sign_in(user)
+ end
+ end
+end
diff --git a/spec/features/ide/static_object_external_storage_csp_spec.rb b/spec/features/ide/static_object_external_storage_csp_spec.rb
new file mode 100644
index 00000000000..739b3fe2471
--- /dev/null
+++ b/spec/features/ide/static_object_external_storage_csp_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Static Object External Storage Content Security Policy' do
+ let_it_be(:user) { create(:user) }
+
+ shared_context 'disable feature' do
+ before do
+ allow_any_instance_of(ApplicationSetting).to receive(:static_objects_external_storage_url).and_return(nil)
+ end
+ end
+
+ it_behaves_like 'setting CSP', 'connect-src' do
+ let_it_be(:whitelisted_url) { 'https://static-objects.test' }
+ let_it_be(:extended_controller_class) { IdeController }
+
+ subject do
+ visit ide_path
+
+ response_headers['Content-Security-Policy']
+ end
+
+ before do
+ allow_any_instance_of(ApplicationSetting).to receive(:static_objects_external_storage_url).and_return(whitelisted_url)
+ allow_any_instance_of(ApplicationSetting).to receive(:static_objects_external_storage_auth_token).and_return('letmein')
+
+ sign_in(user)
+ end
+ end
+end
diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb
index bcc05d313ad..7014a51ccdc 100644
--- a/spec/features/issuables/issuable_list_spec.rb
+++ b/spec/features/issuables/issuable_list_spec.rb
@@ -41,10 +41,10 @@ describe 'issuable list' do
visit_issuable_list(issuable_type)
- expect(all('.label-link')[0].text).to have_content('B')
- expect(all('.label-link')[1].text).to have_content('X')
- expect(all('.label-link')[2].text).to have_content('a')
- expect(all('.label-link')[3].text).to have_content('z')
+ expect(all('.gl-label-text')[0].text).to have_content('B')
+ expect(all('.gl-label-text')[1].text).to have_content('X')
+ expect(all('.gl-label-text')[2].text).to have_content('a')
+ expect(all('.gl-label-text')[3].text).to have_content('z')
end
end
diff --git a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
index 8aa29cddd5f..c207e91f02e 100644
--- a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
@@ -20,13 +20,13 @@ describe 'Dropdown assignee', :js do
describe 'behavior' do
it 'loads all the assignees when opened' do
- input_filtered_search('assignee=', submit: false, extra_space: false)
+ input_filtered_search('assignee:=', submit: false, extra_space: false)
expect_filtered_search_dropdown_results(filter_dropdown, 2)
end
it 'shows current user at top of dropdown' do
- input_filtered_search('assignee=', submit: false, extra_space: false)
+ input_filtered_search('assignee:=', submit: false, extra_space: false)
expect(filter_dropdown.first('.filter-dropdown-item')).to have_content(user.name)
end
@@ -35,7 +35,7 @@ describe 'Dropdown assignee', :js do
describe 'selecting from dropdown without Ajax call' do
before do
Gitlab::Testing::RequestBlockerMiddleware.block_requests!
- input_filtered_search('assignee=', submit: false, extra_space: false)
+ input_filtered_search('assignee:=', submit: false, extra_space: false)
end
after do
diff --git a/spec/features/issues/filtered_search/dropdown_author_spec.rb b/spec/features/issues/filtered_search/dropdown_author_spec.rb
index c95bd7071b3..8ded11b3b08 100644
--- a/spec/features/issues/filtered_search/dropdown_author_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_author_spec.rb
@@ -20,13 +20,13 @@ describe 'Dropdown author', :js do
describe 'behavior' do
it 'loads all the authors when opened' do
- input_filtered_search('author=', submit: false, extra_space: false)
+ input_filtered_search('author:=', submit: false, extra_space: false)
expect_filtered_search_dropdown_results(filter_dropdown, 2)
end
it 'shows current user at top of dropdown' do
- input_filtered_search('author=', submit: false, extra_space: false)
+ input_filtered_search('author:=', submit: false, extra_space: false)
expect(filter_dropdown.first('.filter-dropdown-item')).to have_content(user.name)
end
@@ -35,7 +35,7 @@ describe 'Dropdown author', :js do
describe 'selecting from dropdown without Ajax call' do
before do
Gitlab::Testing::RequestBlockerMiddleware.block_requests!
- input_filtered_search('author=', submit: false, extra_space: false)
+ input_filtered_search('author:=', submit: false, extra_space: false)
end
after do
diff --git a/spec/features/issues/filtered_search/dropdown_base_spec.rb b/spec/features/issues/filtered_search/dropdown_base_spec.rb
index 2a800f054a0..14d3f48b8fc 100644
--- a/spec/features/issues/filtered_search/dropdown_base_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_base_spec.rb
@@ -27,14 +27,14 @@ describe 'Dropdown base', :js do
it 'shows loading indicator when opened' do
slow_requests do
# We aren't using `input_filtered_search` because we want to see the loading indicator
- filtered_search.set('assignee=')
+ filtered_search.set('assignee:=')
expect(page).to have_css("#{js_dropdown_assignee} .filter-dropdown-loading", visible: true)
end
end
it 'hides loading indicator when loaded' do
- input_filtered_search('assignee=', submit: false, extra_space: false)
+ input_filtered_search('assignee:=', submit: false, extra_space: false)
expect(find(js_dropdown_assignee)).not_to have_css('.filter-dropdown-loading')
end
@@ -42,7 +42,7 @@ describe 'Dropdown base', :js do
describe 'caching requests' do
it 'caches requests after the first load' do
- input_filtered_search('assignee=', submit: false, extra_space: false)
+ input_filtered_search('assignee:=', submit: false, extra_space: false)
initial_size = dropdown_assignee_size
expect(initial_size).to be > 0
@@ -50,7 +50,7 @@ describe 'Dropdown base', :js do
new_user = create(:user)
project.add_maintainer(new_user)
find('.filtered-search-box .clear-search').click
- input_filtered_search('assignee=', submit: false, extra_space: false)
+ input_filtered_search('assignee:=', submit: false, extra_space: false)
expect(dropdown_assignee_size).to eq(initial_size)
end
diff --git a/spec/features/issues/filtered_search/dropdown_emoji_spec.rb b/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
index 4c11f83318b..9ab0f49cd15 100644
--- a/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
@@ -43,19 +43,19 @@ describe 'Dropdown emoji', :js do
describe 'behavior' do
it 'opens when the search bar has my-reaction=' do
- filtered_search.set('my-reaction=')
+ filtered_search.set('my-reaction:=')
expect(page).to have_css(js_dropdown_emoji, visible: true)
end
it 'loads all the emojis when opened' do
- input_filtered_search('my-reaction=', submit: false, extra_space: false)
+ input_filtered_search('my-reaction:=', submit: false, extra_space: false)
expect_filtered_search_dropdown_results(filter_dropdown, 3)
end
it 'shows the most populated emoji at top of dropdown' do
- input_filtered_search('my-reaction=', submit: false, extra_space: false)
+ input_filtered_search('my-reaction:=', submit: false, extra_space: false)
expect(first("#{js_dropdown_emoji} .filter-dropdown li")).to have_content(award_emoji_star.name)
end
diff --git a/spec/features/issues/filtered_search/dropdown_label_spec.rb b/spec/features/issues/filtered_search/dropdown_label_spec.rb
index 1e90efc8d56..a982053dbcb 100644
--- a/spec/features/issues/filtered_search/dropdown_label_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_label_spec.rb
@@ -21,7 +21,7 @@ describe 'Dropdown label', :js do
describe 'behavior' do
it 'loads all the labels when opened' do
create(:label, project: project, title: 'bug-label')
- filtered_search.set('label=')
+ filtered_search.set('label:=')
expect_filtered_search_dropdown_results(filter_dropdown, 1)
end
diff --git a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
index 1f62a8e0c8d..56beb35a1c5 100644
--- a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
@@ -23,7 +23,7 @@ describe 'Dropdown milestone', :js do
describe 'behavior' do
before do
- filtered_search.set('milestone=')
+ filtered_search.set('milestone:=')
end
it 'loads all the milestones when opened' do
diff --git a/spec/features/issues/filtered_search/dropdown_release_spec.rb b/spec/features/issues/filtered_search/dropdown_release_spec.rb
index fd0a98f9ddc..ae1c84d71b4 100644
--- a/spec/features/issues/filtered_search/dropdown_release_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_release_spec.rb
@@ -23,7 +23,7 @@ describe 'Dropdown release', :js do
describe 'behavior' do
before do
- filtered_search.set('release=')
+ filtered_search.set('release:=')
end
it 'loads all the releases when opened' do
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index ee5773f1484..756699fb854 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -67,7 +67,7 @@ describe 'Filter issues', :js do
it 'filters by all available tokens' do
search_term = 'issue'
- input_filtered_search("assignee=@#{user.username} author=@#{user.username} label=~#{caps_sensitive_label.title} milestone=%#{milestone.title} #{search_term}")
+ input_filtered_search("assignee:=@#{user.username} author:=@#{user.username} label:=~#{caps_sensitive_label.title} milestone:=%#{milestone.title} #{search_term}")
wait_for_requests
@@ -84,7 +84,7 @@ describe 'Filter issues', :js do
describe 'filter issues by author' do
context 'only author' do
it 'filters issues by searched author' do
- input_filtered_search("author=@#{user.username}")
+ input_filtered_search("author:=@#{user.username}")
wait_for_requests
@@ -98,7 +98,7 @@ describe 'Filter issues', :js do
describe 'filter issues by assignee' do
context 'only assignee' do
it 'filters issues by searched assignee' do
- input_filtered_search("assignee=@#{user.username}")
+ input_filtered_search("assignee:=@#{user.username}")
wait_for_requests
@@ -108,7 +108,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by no assignee' do
- input_filtered_search('assignee=none')
+ input_filtered_search('assignee:=none')
expect_tokens([assignee_token('None')])
expect_issues_list_count(3)
@@ -122,7 +122,7 @@ describe 'Filter issues', :js do
it 'filters issues by multiple assignees' do
create(:issue, project: project, author: user, assignees: [user2, user])
- input_filtered_search("assignee=@#{user.username} assignee=@#{user2.username}")
+ input_filtered_search("assignee:=@#{user.username} assignee:=@#{user2.username}")
expect_tokens([
assignee_token(user.name),
@@ -138,7 +138,7 @@ describe 'Filter issues', :js do
describe 'filter issues by label' do
context 'only label' do
it 'filters issues by searched label' do
- input_filtered_search("label=~#{bug_label.title}")
+ input_filtered_search("label:=~#{bug_label.title}")
expect_tokens([label_token(bug_label.title)])
expect_issues_list_count(2)
@@ -146,7 +146,7 @@ describe 'Filter issues', :js do
end
it 'filters issues not containing searched label' do
- input_filtered_search("label!=~#{bug_label.title}")
+ input_filtered_search("label:!=~#{bug_label.title}")
expect_tokens([label_token(bug_label.title)])
expect_issues_list_count(6)
@@ -154,7 +154,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by no label' do
- input_filtered_search('label=none')
+ input_filtered_search('label:=none')
expect_tokens([label_token('None', false)])
expect_issues_list_count(4)
@@ -162,7 +162,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by no label' do
- input_filtered_search('label!=none')
+ input_filtered_search('label:!=none')
expect_tokens([label_token('None', false)])
expect_issues_list_count(4)
@@ -170,7 +170,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by multiple labels' do
- input_filtered_search("label=~#{bug_label.title} label=~#{caps_sensitive_label.title}")
+ input_filtered_search("label:=~#{bug_label.title} label:=~#{caps_sensitive_label.title}")
expect_tokens([
label_token(bug_label.title),
@@ -181,7 +181,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by multiple labels with not operator' do
- input_filtered_search("label!=~#{bug_label.title} label=~#{caps_sensitive_label.title}")
+ input_filtered_search("label:!=~#{bug_label.title} label:=~#{caps_sensitive_label.title}")
expect_tokens([
label_token(bug_label.title),
@@ -196,7 +196,7 @@ describe 'Filter issues', :js do
special_issue = create(:issue, title: "Issue with special character label", project: project)
special_issue.labels << special_label
- input_filtered_search("label=~#{special_label.title}")
+ input_filtered_search("label:=~#{special_label.title}")
expect_tokens([label_token(special_label.title)])
expect_issues_list_count(1)
@@ -208,7 +208,7 @@ describe 'Filter issues', :js do
special_issue = create(:issue, title: "Issue with special character label", project: project)
special_issue.labels << special_label
- input_filtered_search("label!=~#{special_label.title}")
+ input_filtered_search("label:!=~#{special_label.title}")
expect_tokens([label_token(special_label.title)])
expect_issues_list_count(8)
@@ -218,7 +218,7 @@ describe 'Filter issues', :js do
it 'does not show issues for unused labels' do
new_label = create(:label, project: project, title: 'new_label')
- input_filtered_search("label=~#{new_label.title}")
+ input_filtered_search("label:=~#{new_label.title}")
expect_tokens([label_token(new_label.title)])
expect_no_issues_list
@@ -226,7 +226,7 @@ describe 'Filter issues', :js do
end
it 'does show issues for bug label' do
- input_filtered_search("label!=~#{bug_label.title}")
+ input_filtered_search("label:!=~#{bug_label.title}")
expect_tokens([label_token(bug_label.title)])
expect_issues_list_count(6)
@@ -240,7 +240,7 @@ describe 'Filter issues', :js do
special_multiple_issue = create(:issue, title: "Issue with special character multiple words label", project: project)
special_multiple_issue.labels << special_multiple_label
- input_filtered_search("label=~'#{special_multiple_label.title}'")
+ input_filtered_search("label:=~'#{special_multiple_label.title}'")
# Check for search results (which makes sure that the page has changed)
expect_issues_list_count(1)
@@ -252,7 +252,7 @@ describe 'Filter issues', :js do
end
it 'single quotes' do
- input_filtered_search("label=~'#{multiple_words_label.title}'")
+ input_filtered_search("label:=~'#{multiple_words_label.title}'")
expect_issues_list_count(1)
expect_tokens([label_token("\"#{multiple_words_label.title}\"")])
@@ -260,7 +260,7 @@ describe 'Filter issues', :js do
end
it 'double quotes' do
- input_filtered_search("label=~\"#{multiple_words_label.title}\"")
+ input_filtered_search("label:=~\"#{multiple_words_label.title}\"")
expect_tokens([label_token("\"#{multiple_words_label.title}\"")])
expect_issues_list_count(1)
@@ -272,7 +272,7 @@ describe 'Filter issues', :js do
double_quotes_label_issue = create(:issue, title: "Issue with double quotes label", project: project)
double_quotes_label_issue.labels << double_quotes_label
- input_filtered_search("label=~'#{double_quotes_label.title}'")
+ input_filtered_search("label:=~'#{double_quotes_label.title}'")
expect_tokens([label_token("'#{double_quotes_label.title}'")])
expect_issues_list_count(1)
@@ -284,7 +284,7 @@ describe 'Filter issues', :js do
single_quotes_label_issue = create(:issue, title: "Issue with single quotes label", project: project)
single_quotes_label_issue.labels << single_quotes_label
- input_filtered_search("label=~\"#{single_quotes_label.title}\"")
+ input_filtered_search("label:=~\"#{single_quotes_label.title}\"")
expect_tokens([label_token("\"#{single_quotes_label.title}\"")])
expect_issues_list_count(1)
@@ -296,7 +296,7 @@ describe 'Filter issues', :js do
it 'filters issues by searched label, label2, author, assignee, milestone and text' do
search_term = 'bug'
- input_filtered_search("label=~#{bug_label.title} label=~#{caps_sensitive_label.title} author=@#{user.username} assignee=@#{user.username} milestone=%#{milestone.title} #{search_term}")
+ input_filtered_search("label:=~#{bug_label.title} label:=~#{caps_sensitive_label.title} author:=@#{user.username} assignee:=@#{user.username} milestone:=%#{milestone.title} #{search_term}")
wait_for_requests
@@ -314,7 +314,7 @@ describe 'Filter issues', :js do
it 'filters issues by searched label, label2, author, assignee, not included in a milestone' do
search_term = 'bug'
- input_filtered_search("label=~#{bug_label.title} label=~#{caps_sensitive_label.title} author=@#{user.username} assignee=@#{user.username} milestone!=%#{milestone.title} #{search_term}")
+ input_filtered_search("label:=~#{bug_label.title} label:=~#{caps_sensitive_label.title} author:=@#{user.username} assignee:=@#{user.username} milestone:!=%#{milestone.title} #{search_term}")
wait_for_requests
@@ -332,7 +332,7 @@ describe 'Filter issues', :js do
context 'issue label clicked' do
it 'filters and displays in search bar' do
- find('.issues-list .issue .issuable-main-info .issuable-info a .badge', text: multiple_words_label.title).click
+ find('.issues-list .issue .issuable-main-info .issuable-info a .gl-label-text', text: multiple_words_label.title).click
expect_issues_list_count(1)
expect_tokens([label_token("\"#{multiple_words_label.title}\"")])
@@ -344,7 +344,7 @@ describe 'Filter issues', :js do
describe 'filter issues by milestone' do
context 'only milestone' do
it 'filters issues by searched milestone' do
- input_filtered_search("milestone=%#{milestone.title}")
+ input_filtered_search("milestone:=%#{milestone.title}")
expect_tokens([milestone_token(milestone.title)])
expect_issues_list_count(5)
@@ -352,7 +352,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by no milestone' do
- input_filtered_search("milestone=none")
+ input_filtered_search("milestone:=none")
expect_tokens([milestone_token('None', false)])
expect_issues_list_count(3)
@@ -360,7 +360,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by negation of no milestone' do
- input_filtered_search("milestone!=none ")
+ input_filtered_search("milestone:!=none ")
expect_tokens([milestone_token('None', false, '!=')])
expect_issues_list_count(5)
@@ -372,7 +372,7 @@ describe 'Filter issues', :js do
create(:issue, project: project, milestone: future_milestone, author: user)
end
- input_filtered_search("milestone=upcoming")
+ input_filtered_search("milestone:=upcoming")
expect_tokens([milestone_token('Upcoming', false)])
expect_issues_list_count(1)
@@ -384,7 +384,7 @@ describe 'Filter issues', :js do
create(:issue, project: project, milestone: future_milestone, author: user)
end
- input_filtered_search("milestone!=upcoming")
+ input_filtered_search("milestone:!=upcoming")
expect_tokens([milestone_token('Upcoming', false, '!=')])
expect_issues_list_count(8)
@@ -392,7 +392,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by started milestones' do
- input_filtered_search("milestone=started")
+ input_filtered_search("milestone:=started")
expect_tokens([milestone_token('Started', false)])
expect_issues_list_count(5)
@@ -400,7 +400,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by negation of started milestones' do
- input_filtered_search("milestone!=started")
+ input_filtered_search("milestone:!=started")
expect_tokens([milestone_token('Started', false, '!=')])
expect_issues_list_count(3)
@@ -411,7 +411,7 @@ describe 'Filter issues', :js do
special_milestone = create(:milestone, title: '!@\#{$%^&*()}', project: project)
create(:issue, project: project, milestone: special_milestone)
- input_filtered_search("milestone=%#{special_milestone.title}")
+ input_filtered_search("milestone:=%#{special_milestone.title}")
expect_tokens([milestone_token(special_milestone.title)])
expect_issues_list_count(1)
@@ -422,7 +422,7 @@ describe 'Filter issues', :js do
special_milestone = create(:milestone, title: '!@\#{$%^&*()}', project: project)
create(:issue, project: project, milestone: special_milestone)
- input_filtered_search("milestone!=%#{special_milestone.title}")
+ input_filtered_search("milestone:!=%#{special_milestone.title}")
expect_tokens([milestone_token(special_milestone.title, false, '!=')])
expect_issues_list_count(8)
@@ -432,7 +432,7 @@ describe 'Filter issues', :js do
it 'does not show issues for unused milestones' do
new_milestone = create(:milestone, title: 'new', project: project)
- input_filtered_search("milestone=%#{new_milestone.title}")
+ input_filtered_search("milestone:=%#{new_milestone.title}")
expect_tokens([milestone_token(new_milestone.title)])
expect_no_issues_list
@@ -442,7 +442,7 @@ describe 'Filter issues', :js do
it 'show issues for unused milestones' do
new_milestone = create(:milestone, title: 'new', project: project)
- input_filtered_search("milestone!=%#{new_milestone.title}")
+ input_filtered_search("milestone:!=%#{new_milestone.title}")
expect_tokens([milestone_token(new_milestone.title, false, '!=')])
expect_issues_list_count(8)
@@ -521,7 +521,7 @@ describe 'Filter issues', :js do
context 'searched text with other filters' do
it 'filters issues by searched text, author, text, assignee, text, label1, text, label2, text, milestone and text' do
- input_filtered_search("bug author=@#{user.username} report label=~#{bug_label.title} label=~#{caps_sensitive_label.title} milestone=%#{milestone.title} foo")
+ input_filtered_search("bug author:=@#{user.username} report label:=~#{bug_label.title} label:=~#{caps_sensitive_label.title} milestone:=%#{milestone.title} foo")
expect_issues_list_count(1)
expect_filtered_search_input('bug report foo')
@@ -595,7 +595,7 @@ describe 'Filter issues', :js do
end
it 'milestone dropdown loads milestones' do
- input_filtered_search("milestone=", submit: false)
+ input_filtered_search("milestone:=", submit: false)
within('#js-dropdown-milestone') do
expect(page).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1)
@@ -603,7 +603,7 @@ describe 'Filter issues', :js do
end
it 'label dropdown load labels' do
- input_filtered_search("label=", submit: false)
+ input_filtered_search("label:=", submit: false)
within('#js-dropdown-label') do
expect(page).to have_selector('.filter-dropdown .filter-dropdown-item', count: 3)
diff --git a/spec/features/issues/filtered_search/visual_tokens_spec.rb b/spec/features/issues/filtered_search/visual_tokens_spec.rb
index 2af2e096bcc..29111bff344 100644
--- a/spec/features/issues/filtered_search/visual_tokens_spec.rb
+++ b/spec/features/issues/filtered_search/visual_tokens_spec.rb
@@ -36,7 +36,7 @@ describe 'Visual tokens', :js do
describe 'editing a single token' do
before do
- input_filtered_search('author=@root assignee=none', submit: false)
+ input_filtered_search('author:=@root assignee:=none', submit: false)
first('.tokens-container .filtered-search-token').click
wait_for_requests
end
@@ -77,7 +77,7 @@ describe 'Visual tokens', :js do
describe 'editing multiple tokens' do
before do
- input_filtered_search('author=@root assignee=none', submit: false)
+ input_filtered_search('author:=@root assignee:=none', submit: false)
first('.tokens-container .filtered-search-token').click
end
@@ -93,7 +93,7 @@ describe 'Visual tokens', :js do
describe 'editing a search term while editing another filter token' do
before do
- input_filtered_search('foo assignee=', submit: false)
+ input_filtered_search('foo assignee:=', submit: false)
first('.tokens-container .filtered-search-term').click
end
@@ -112,7 +112,7 @@ describe 'Visual tokens', :js do
describe 'add new token after editing existing token' do
before do
- input_filtered_search('author=@root assignee=none', submit: false)
+ input_filtered_search('author:=@root assignee:=none', submit: false)
first('.tokens-container .filtered-search-token').double_click
filtered_search.send_keys(' ')
end
@@ -123,7 +123,7 @@ describe 'Visual tokens', :js do
end
it 'opens token dropdown' do
- filtered_search.send_keys('author=')
+ filtered_search.send_keys('author:=')
expect(page).to have_css('#js-dropdown-author', visible: true)
end
@@ -131,7 +131,7 @@ describe 'Visual tokens', :js do
describe 'visual tokens' do
it 'creates visual token' do
- filtered_search.send_keys('author=@thomas ')
+ filtered_search.send_keys('author:=@thomas ')
token = page.all('.tokens-container .filtered-search-token')[1]
expect(token.find('.name').text).to eq('Author')
@@ -140,7 +140,7 @@ describe 'Visual tokens', :js do
end
it 'does not tokenize incomplete token' do
- filtered_search.send_keys('author=')
+ filtered_search.send_keys('author:=')
find('body').click
token = page.all('.tokens-container .js-visual-token')[1]
@@ -152,7 +152,7 @@ describe 'Visual tokens', :js do
describe 'search using incomplete visual tokens' do
before do
- input_filtered_search('author=@root assignee=none', extra_space: false)
+ input_filtered_search('author:=@root assignee:=none', extra_space: false)
end
it 'tokenizes the search term to complete visual token' do
diff --git a/spec/features/issues/issue_detail_spec.rb b/spec/features/issues/issue_detail_spec.rb
index 0d24b02a64c..3bb70fdf376 100644
--- a/spec/features/issues/issue_detail_spec.rb
+++ b/spec/features/issues/issue_detail_spec.rb
@@ -28,7 +28,7 @@ describe 'Issue Detail', :js do
visit project_issue_path(project, issue)
end
- it 'encodes the description to prevent xss issues' do
+ it 'encodes the description to prevent xss issues', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/207951' do
page.within('.issuable-details .detail-page-description') do
image = find('img.js-lazy-loaded')
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index 9baba80bf06..e7c675bf6bf 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -225,6 +225,29 @@ describe 'Issue Sidebar' do
it 'does not have a option to edit labels' do
expect(page).not_to have_selector('.block.labels .edit-link')
end
+
+ context 'interacting with collapsed sidebar', :js do
+ collapsed_sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
+ expanded_sidebar_selector = 'aside.right-sidebar.right-sidebar-expanded'
+ lock_sidebar_block = '.block.lock'
+ lock_button = '.block.lock .btn-close'
+ collapsed_sidebar_block_icon = '.sidebar-collapsed-icon'
+
+ before do
+ resize_screen_sm
+ end
+
+ it 'expands then does not show the lock dialog form' do
+ expect(page).to have_css(collapsed_sidebar_selector)
+
+ page.within(lock_sidebar_block) do
+ find(collapsed_sidebar_block_icon).click
+ end
+
+ expect(page).to have_css(expanded_sidebar_selector)
+ expect(page).not_to have_selector(lock_button)
+ end
+ end
end
def visit_issue(project, issue)
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index b0a2a734877..efcaa8247df 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -156,7 +156,7 @@ describe "User creates issue" do
expect(page.find_field("issue_description").value).not_to match /\n\n$/
end
- it "cancels a file upload correctly" do
+ it "cancels a file upload correctly", :capybara_ignore_server_errors do
slow_requests do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb
index ad984cf07e2..d50cf16d8ef 100644
--- a/spec/features/issues/user_edits_issue_spec.rb
+++ b/spec/features/issues/user_edits_issue_spec.rb
@@ -143,16 +143,11 @@ describe "Issues > User edits issue", :js do
end
it 'allows user to unselect themselves' do
- issue2 = create(:issue, project: project, author: user)
+ issue2 = create(:issue, project: project, author: user, assignees: [user])
visit project_issue_path(project, issue2)
page.within '.assignee' do
- click_link 'Edit'
- click_link user.name
-
- close_dropdown_menu_if_visible
-
page.within '.value .author' do
expect(page).to have_content user.name
end
diff --git a/spec/features/issues/user_views_issues_spec.rb b/spec/features/issues/user_views_issues_spec.rb
index 8f174472f49..796e618c7c8 100644
--- a/spec/features/issues/user_views_issues_spec.rb
+++ b/spec/features/issues/user_views_issues_spec.rb
@@ -7,7 +7,7 @@ describe "User views issues" do
let!(:open_issue1) { create(:issue, project: project) }
let!(:open_issue2) { create(:issue, project: project) }
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
shared_examples "opens issue from list" do
it "opens issue" do
diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb
index c1a2e22a0c2..c66d858a019 100644
--- a/spec/features/labels_hierarchy_spec.rb
+++ b/spec/features/labels_hierarchy_spec.rb
@@ -161,9 +161,9 @@ describe 'Labels Hierarchy', :js do
find('.btn-success').click
expect(page.find('.issue-details h2.title')).to have_content('new created issue')
- expect(page).to have_selector('span.badge', text: grandparent_group_label.title)
- expect(page).to have_selector('span.badge', text: parent_group_label.title)
- expect(page).to have_selector('span.badge', text: project_label_1.title)
+ expect(page).to have_selector('span.gl-label-text', text: grandparent_group_label.title)
+ expect(page).to have_selector('span.gl-label-text', text: parent_group_label.title)
+ expect(page).to have_selector('span.gl-label-text', text: project_label_1.title)
end
end
diff --git a/spec/features/markdown/mermaid_spec.rb b/spec/features/markdown/mermaid_spec.rb
index 542caccb18d..4bf7edf98ca 100644
--- a/spec/features/markdown/mermaid_spec.rb
+++ b/spec/features/markdown/mermaid_spec.rb
@@ -38,7 +38,9 @@ describe 'Mermaid rendering', :js do
visit project_issue_path(project, issue)
- expected = '<text><tspan xml:space="preserve" dy="1em" x="1">Line 1</tspan><tspan xml:space="preserve" dy="1em" x="1">Line 2</tspan></text>'
+ wait_for_requests
+
+ expected = '<text style=""><tspan xml:space="preserve" dy="1em" x="1">Line 1</tspan><tspan xml:space="preserve" dy="1em" x="1">Line 2</tspan></text>'
expect(page.html.scan(expected).count).to be(4)
end
@@ -94,8 +96,67 @@ describe 'Mermaid rendering', :js do
page.find('summary').click
svg = page.find('svg.mermaid')
- expect(svg[:width].to_i).to be_within(5).of(120)
- expect(svg[:height].to_i).to be_within(5).of(220)
+ expect(svg[:style]).to match(/max-width/)
+ expect(svg[:width].to_i).to eq(100)
+ expect(svg[:height].to_i).to eq(0)
+ end
+ end
+
+ it 'correctly sizes mermaid diagram block', :js do
+ description = <<~MERMAID
+ ```mermaid
+ graph TD;
+ A-->B;
+ A-->C;
+ B-->D;
+ C-->D;
+ ```
+ MERMAID
+
+ project = create(:project, :public)
+ issue = create(:issue, project: project, description: description)
+
+ visit project_issue_path(project, issue)
+
+ svg = page.find('svg.mermaid')
+ expect(svg[:style]).to match(/max-width/)
+ expect(svg[:width].to_i).to eq(100)
+ expect(svg[:height].to_i).to eq(0)
+ end
+
+ it 'display button when diagram exceeds length', :js do
+ graph_edges = "A-->B;B-->A;" * 420
+
+ description = <<~MERMAID
+ ```mermaid
+ graph LR
+ #{graph_edges}
+ ```
+ MERMAID
+
+ project = create(:project, :public)
+ issue = create(:issue, project: project, description: description)
+
+ visit project_issue_path(project, issue)
+
+ page.within('.description') do
+ expect(page).not_to have_selector('svg')
+
+ expect(page).to have_selector('pre.mermaid')
+
+ expect(page).to have_selector('.lazy-alert-shown')
+
+ expect(page).to have_selector('.js-lazy-render-mermaid-container')
+ end
+
+ wait_for_requests
+
+ find('.js-lazy-render-mermaid').click
+
+ page.within('.description') do
+ expect(page).to have_selector('svg')
+
+ expect(page).not_to have_selector('.js-lazy-render-mermaid-container')
end
end
end
diff --git a/spec/features/markdown/metrics_spec.rb b/spec/features/markdown/metrics_spec.rb
index 69e93268b57..6ef4f6ddecc 100644
--- a/spec/features/markdown/metrics_spec.rb
+++ b/spec/features/markdown/metrics_spec.rb
@@ -2,25 +2,32 @@
require 'spec_helper'
-describe 'Metrics rendering', :js, :use_clean_rails_memory_store_caching, :sidekiq_might_not_need_inline do
+describe 'Metrics rendering', :js, :use_clean_rails_memory_store_caching, :sidekiq_inline do
include PrometheusHelpers
include GrafanaApiHelpers
+ include MetricsDashboardUrlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:prometheus_project) }
+ let_it_be(:environment) { create(:environment, project: project) }
- let(:user) { create(:user) }
- let(:project) { create(:prometheus_project) }
- let(:environment) { create(:environment, project: project) }
let(:issue) { create(:issue, project: project, description: description) }
let(:description) { "See [metrics dashboard](#{metrics_url}) for info." }
- let(:metrics_url) { metrics_project_environment_url(project, environment) }
+ let(:metrics_url) { urls.metrics_project_environment_url(project, environment) }
before do
- configure_host
+ clear_host_from_memoized_variables
+
+ allow(::Gitlab.config.gitlab)
+ .to receive(:url)
+ .and_return(urls.root_url.chomp('/'))
+
project.add_developer(user)
sign_in(user)
end
after do
- restore_host
+ clear_host_from_memoized_variables
end
context 'internal metrics embeds' do
@@ -38,7 +45,7 @@ describe 'Metrics rendering', :js, :use_clean_rails_memory_store_caching, :sidek
end
context 'when dashboard params are in included the url' do
- let(:metrics_url) { metrics_project_environment_url(project, environment, **chart_params) }
+ let(:metrics_url) { urls.metrics_project_environment_url(project, environment, **chart_params) }
let(:chart_params) do
{
@@ -55,6 +62,29 @@ describe 'Metrics rendering', :js, :use_clean_rails_memory_store_caching, :sidek
expect(page).to have_text(chart_params[:title])
expect(page).to have_text(chart_params[:y_label])
end
+
+ context 'when two dashboard urls are included' do
+ let(:chart_params_2) do
+ {
+ group: 'System metrics (Kubernetes)',
+ title: 'Core Usage (Total)',
+ y_label: 'Total Cores'
+ }
+ end
+ let(:metrics_url_2) { urls.metrics_project_environment_url(project, environment, **chart_params_2) }
+ let(:description) { "See [metrics dashboard](#{metrics_url}) for info. \n See [metrics dashboard](#{metrics_url_2}) for info." }
+ let(:issue) { create(:issue, project: project, description: description) }
+
+ it 'shows embedded metrics for both urls' do
+ visit project_issue_path(project, issue)
+
+ expect(page).to have_css('div.prometheus-graph')
+ expect(page).to have_text(chart_params[:title])
+ expect(page).to have_text(chart_params[:y_label])
+ expect(page).to have_text(chart_params_2[:title])
+ expect(page).to have_text(chart_params_2[:y_label])
+ end
+ end
end
end
@@ -81,32 +111,4 @@ describe 'Metrics rendering', :js, :use_clean_rails_memory_store_caching, :sidek
def import_common_metrics
::Gitlab::DatabaseImporters::CommonMetrics::Importer.new.execute
end
-
- def configure_host
- @original_default_host = default_url_options[:host]
- @original_gitlab_url = Gitlab.config.gitlab[:url]
-
- # Ensure we create a metrics url with the right host.
- # Configure host for route helpers in specs (also updates root_url):
- default_url_options[:host] = Capybara.server_host
-
- # Ensure we identify urls with the appropriate host.
- # Configure host to include port in app:
- Gitlab.config.gitlab[:url] = root_url.chomp('/')
-
- clear_host_from_memoized_variables
- end
-
- def restore_host
- default_url_options[:host] = @original_default_host
- Gitlab.config.gitlab[:url] = @original_gitlab_url
-
- clear_host_from_memoized_variables
- end
-
- def clear_host_from_memoized_variables
- [:metrics_regex, :grafana_regex].each do |method_name|
- Gitlab::Metrics::Dashboard::Url.clear_memoization(method_name)
- end
- end
end
diff --git a/spec/features/merge_request/maintainer_edits_fork_spec.rb b/spec/features/merge_request/maintainer_edits_fork_spec.rb
index 17ff494a6fa..f1ee6aaa897 100644
--- a/spec/features/merge_request/maintainer_edits_fork_spec.rb
+++ b/spec/features/merge_request/maintainer_edits_fork_spec.rb
@@ -20,7 +20,7 @@ describe 'a maintainer edits files on a source-branch of an MR from a fork', :js
end
before do
- stub_feature_flags(web_ide_default: false, single_mr_diff_view: false, code_navigation: false)
+ stub_feature_flags(web_ide_default: false, single_mr_diff_view: { enabled: false, thing: target_project }, code_navigation: false)
target_project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
index e1b139c1b3b..cea9056cd93 100644
--- a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
@@ -230,7 +230,7 @@ describe 'Merge request > User creates image diff notes', :js do
it_behaves_like 'onion skin'
end
- describe 'swipe view' do
+ describe 'swipe view', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/209999' do
before do
switch_to_swipe_view
end
diff --git a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
index 92d90926c0a..8633d67f875 100644
--- a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
+++ b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
@@ -10,7 +10,7 @@ describe 'Batch diffs', :js do
let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'empty-branch') }
before do
- stub_feature_flags(single_mr_diff_view: true)
+ stub_feature_flags(single_mr_diff_view: { enabled: true, thing: project })
stub_feature_flags(diffs_batch_load: true)
sign_in(project.owner)
diff --git a/spec/features/merge_request/user_manages_subscription_spec.rb b/spec/features/merge_request/user_manages_subscription_spec.rb
index 5c6072c57ff..54d27a06bb1 100644
--- a/spec/features/merge_request/user_manages_subscription_spec.rb
+++ b/spec/features/merge_request/user_manages_subscription_spec.rb
@@ -16,6 +16,8 @@ describe 'User manages subscription', :js do
it 'toggles subscription' do
page.within('.js-issuable-subscribe-button') do
+ wait_for_requests
+
expect(page).to have_css 'button:not(.is-checked)'
find('button:not(.is-checked)').click
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
index a37fc120b86..5cc61333bb4 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
@@ -64,6 +64,10 @@ describe 'Merge request > User merges when pipeline succeeds', :js do
before do
click_button "Merge when pipeline succeeds"
click_link "Cancel automatic merge"
+
+ wait_for_requests
+
+ expect(page).to have_content 'Merge when pipeline succeeds', wait: 0
end
it_behaves_like 'Merge when pipeline succeeds activator'
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index f24e7090605..b22f5a6c211 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -5,8 +5,7 @@ require 'spec_helper'
describe 'Merge request > User posts notes', :js do
include NoteInteractionHelpers
- set(:project) { create(:project, :repository) }
-
+ let_it_be(:project) { create(:project, :repository) }
let(:user) { project.creator }
let(:merge_request) do
create(:merge_request, source_project: project, target_project: project)
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index 38a31d3bbd9..b8a5a4036a5 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -568,5 +568,8 @@ describe 'Merge request > User resolves diff notes and threads', :js do
def visit_merge_request(mr = nil)
mr ||= merge_request
visit project_merge_request_path(mr.project, mr)
+
+ # Wait for MR widget to load
+ wait_for_requests
end
end
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index c482d783bab..21599164ac3 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -190,7 +190,7 @@ describe 'Merge request > User sees avatars on diff notes', :js do
def find_line(line_code)
line = find("[id='#{line_code}']")
- line = line.find(:xpath, 'preceding-sibling::*[1][self::td]') if line.tag_name == 'td'
+ line = line.find(:xpath, 'preceding-sibling::*[1][self::td]/preceding-sibling::*[1][self::td]') if line.tag_name == 'td'
line
end
end
diff --git a/spec/features/merge_request/user_sees_diff_spec.rb b/spec/features/merge_request/user_sees_diff_spec.rb
index 2ef4a18f78d..868451883d8 100644
--- a/spec/features/merge_request/user_sees_diff_spec.rb
+++ b/spec/features/merge_request/user_sees_diff_spec.rb
@@ -61,10 +61,6 @@ describe 'Merge request > User sees diff', :js do
let(:merge_request) { create(:merge_request_with_diffs, source_project: forked_project, target_project: project, author: author_user) }
let(:changelog_id) { Digest::SHA1.hexdigest("CHANGELOG") }
- before do
- forked_project.repository.after_import
- end
-
context 'as author' do
it 'shows direct edit link', :sidekiq_might_not_need_inline do
sign_in(author_user)
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 94f57cdda74..eca011bc786 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -585,10 +585,10 @@ describe 'Merge request > User sees merge widget', :js do
within(".js-reports-container") do
click_button 'Expand'
- expect(page).to have_content('Test summary contained 1 failed/error test result out of 2 total tests')
+ expect(page).to have_content('Test summary contained 1 failed out of 2 total tests')
within(".js-report-section-container") do
expect(page).to have_content('rspec found no changed test results out of 1 total test')
- expect(page).to have_content('junit found 1 failed/error test result out of 1 total test')
+ expect(page).to have_content('junit found 1 failed out of 1 total test')
expect(page).to have_content('New')
expect(page).to have_content('addTest')
end
@@ -630,9 +630,9 @@ describe 'Merge request > User sees merge widget', :js do
within(".js-reports-container") do
click_button 'Expand'
- expect(page).to have_content('Test summary contained 1 failed/error test result out of 2 total tests')
+ expect(page).to have_content('Test summary contained 1 failed out of 2 total tests')
within(".js-report-section-container") do
- expect(page).to have_content('rspec found 1 failed/error test result out of 1 total test')
+ expect(page).to have_content('rspec found 1 failed out of 1 total test')
expect(page).to have_content('junit found no changed test results out of 1 total test')
expect(page).not_to have_content('New')
expect(page).to have_content('Test#sum when a is 1 and b is 3 returns summary')
@@ -718,10 +718,10 @@ describe 'Merge request > User sees merge widget', :js do
within(".js-reports-container") do
click_button 'Expand'
- expect(page).to have_content('Test summary contained 1 failed/error test result out of 2 total tests')
+ expect(page).to have_content('Test summary contained 1 error out of 2 total tests')
within(".js-report-section-container") do
expect(page).to have_content('rspec found no changed test results out of 1 total test')
- expect(page).to have_content('junit found 1 failed/error test result out of 1 total test')
+ expect(page).to have_content('junit found 1 error out of 1 total test')
expect(page).to have_content('New')
expect(page).to have_content('addTest')
end
@@ -762,9 +762,9 @@ describe 'Merge request > User sees merge widget', :js do
within(".js-reports-container") do
click_button 'Expand'
- expect(page).to have_content('Test summary contained 1 failed/error test result out of 2 total tests')
+ expect(page).to have_content('Test summary contained 1 error out of 2 total tests')
within(".js-report-section-container") do
- expect(page).to have_content('rspec found 1 failed/error test result out of 1 total test')
+ expect(page).to have_content('rspec found 1 error out of 1 total test')
expect(page).to have_content('junit found no changed test results out of 1 total test')
expect(page).not_to have_content('New')
expect(page).to have_content('Test#sum when a is 4 and b is 4 returns summary')
@@ -857,10 +857,10 @@ describe 'Merge request > User sees merge widget', :js do
within(".js-reports-container") do
click_button 'Expand'
- expect(page).to have_content('Test summary contained 20 failed/error test results out of 20 total tests')
+ expect(page).to have_content('Test summary contained 20 failed out of 20 total tests')
within(".js-report-section-container") do
- expect(page).to have_content('rspec found 10 failed/error test results out of 10 total tests')
- expect(page).to have_content('junit found 10 failed/error test results out of 10 total tests')
+ expect(page).to have_content('rspec found 10 failed out of 10 total tests')
+ expect(page).to have_content('junit found 10 failed out of 10 total tests')
expect(page).to have_content('Test#sum when a is 1 and b is 3 returns summary', count: 2)
end
diff --git a/spec/features/merge_request/user_views_diffs_spec.rb b/spec/features/merge_request/user_views_diffs_spec.rb
index e0e4058dd47..cd0cf1cc78a 100644
--- a/spec/features/merge_request/user_views_diffs_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_spec.rb
@@ -7,10 +7,11 @@ describe 'User views diffs', :js do
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
end
let(:project) { create(:project, :public, :repository) }
+ let(:view) { 'inline' }
before do
stub_feature_flags(diffs_batch_load: false)
- visit(diffs_project_merge_request_path(project, merge_request))
+ visit(diffs_project_merge_request_path(project, merge_request, view: view))
wait_for_requests
@@ -20,12 +21,20 @@ describe 'User views diffs', :js do
shared_examples 'unfold diffs' do
it 'unfolds diffs upwards' do
first('.js-unfold').click
- expect(find('.file-holder[id="a5cc2925ca8258af241be7e5b0381edf30266302"] .text-file')).to have_content('.bundle')
+
+ page.within('.file-holder[id="a5cc2925ca8258af241be7e5b0381edf30266302"]') do
+ expect(find('.text-file')).to have_content('.bundle')
+ expect(page).to have_selector('.new_line [data-linenumber="1"]', count: 1)
+ end
end
- it 'unfolds diffs to the start' do
- first('.js-unfold-all').click
- expect(find('.file-holder[id="a5cc2925ca8258af241be7e5b0381edf30266302"] .text-file')).to have_content('.rbc')
+ it 'unfolds diffs in the middle' do
+ page.within('.file-holder[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd"]') do
+ all('.js-unfold-all')[1].click
+
+ expect(page).to have_selector('.new_line [data-linenumber="24"]', count: 1)
+ expect(page).not_to have_selector('.new_line [data-linenumber="1"]')
+ end
end
it 'unfolds diffs downwards' do
@@ -66,13 +75,7 @@ describe 'User views diffs', :js do
end
context 'when in the side-by-side view' do
- before do
- find('.js-show-diff-settings').click
-
- click_button 'Side-by-side'
-
- wait_for_requests
- end
+ let(:view) { 'parallel' }
it 'shows diffs in parallel' do
expect(page).to have_css('.parallel')
diff --git a/spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb b/spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb
index 26984a1fb5e..a38bc4f702b 100644
--- a/spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb
+++ b/spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb
@@ -20,7 +20,7 @@ describe 'User views merged merge request from deleted fork' do
fork_owner = source_project.namespace.owners.first
# Place the source_project in the weird in between state
source_project.update_attribute(:pending_delete, true)
- Projects::DestroyService.new(source_project, fork_owner, {}).__send__(:trash_repositories!)
+ Projects::DestroyService.new(source_project, fork_owner, {}).__send__(:trash_project_repositories!)
end
it 'correctly shows the merge request' do
diff --git a/spec/features/merge_requests/filters_generic_behavior_spec.rb b/spec/features/merge_requests/filters_generic_behavior_spec.rb
index c3400acae4f..2bea819cc33 100644
--- a/spec/features/merge_requests/filters_generic_behavior_spec.rb
+++ b/spec/features/merge_requests/filters_generic_behavior_spec.rb
@@ -23,7 +23,7 @@ describe 'Merge Requests > Filters generic behavior', :js do
context 'when filtered by a label' do
before do
- input_filtered_search('label=~bug')
+ input_filtered_search('label:=~bug')
end
describe 'state tabs' do
diff --git a/spec/features/merge_requests/user_filters_by_assignees_spec.rb b/spec/features/merge_requests/user_filters_by_assignees_spec.rb
index 3abee3b656a..12d682bbb15 100644
--- a/spec/features/merge_requests/user_filters_by_assignees_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_assignees_spec.rb
@@ -18,7 +18,7 @@ describe 'Merge Requests > User filters by assignees', :js do
context 'filtering by assignee:none' do
it 'applies the filter' do
- input_filtered_search('assignee=none')
+ input_filtered_search('assignee:=none')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).not_to have_content 'Bugfix1'
@@ -28,7 +28,7 @@ describe 'Merge Requests > User filters by assignees', :js do
context 'filtering by assignee=@username' do
it 'applies the filter' do
- input_filtered_search("assignee=@#{user.username}")
+ input_filtered_search("assignee:=@#{user.username}")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content 'Bugfix1'
diff --git a/spec/features/merge_requests/user_filters_by_labels_spec.rb b/spec/features/merge_requests/user_filters_by_labels_spec.rb
index 7a80ebe9be3..6308579d2d9 100644
--- a/spec/features/merge_requests/user_filters_by_labels_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_labels_spec.rb
@@ -22,7 +22,7 @@ describe 'Merge Requests > User filters by labels', :js do
context 'filtering by label:none' do
it 'applies the filter' do
- input_filtered_search('label=none')
+ input_filtered_search('label:=none')
expect(page).to have_issuable_counts(open: 0, closed: 0, all: 0)
expect(page).not_to have_content 'Bugfix1'
@@ -32,7 +32,7 @@ describe 'Merge Requests > User filters by labels', :js do
context 'filtering by label:~enhancement' do
it 'applies the filter' do
- input_filtered_search('label=~enhancement')
+ input_filtered_search('label:=~enhancement')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content 'Bugfix2'
@@ -42,7 +42,7 @@ describe 'Merge Requests > User filters by labels', :js do
context 'filtering by label:~enhancement and label:~bug' do
it 'applies the filters' do
- input_filtered_search('label=~bug label=~enhancement')
+ input_filtered_search('label:=~bug label:=~enhancement')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content 'Bugfix2'
diff --git a/spec/features/merge_requests/user_filters_by_milestones_spec.rb b/spec/features/merge_requests/user_filters_by_milestones_spec.rb
index 8cb686e191e..d2a420be996 100644
--- a/spec/features/merge_requests/user_filters_by_milestones_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_milestones_spec.rb
@@ -18,14 +18,14 @@ describe 'Merge Requests > User filters by milestones', :js do
end
it 'filters by no milestone' do
- input_filtered_search('milestone=none')
+ input_filtered_search('milestone:=none')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_css('.merge-request', count: 1)
end
it 'filters by a specific milestone' do
- input_filtered_search("milestone=%'#{milestone.title}'")
+ input_filtered_search("milestone:=%'#{milestone.title}'")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_css('.merge-request', count: 1)
@@ -33,7 +33,7 @@ describe 'Merge Requests > User filters by milestones', :js do
describe 'filters by upcoming milestone' do
it 'does not show merge requests with no expiry' do
- input_filtered_search('milestone=upcoming')
+ input_filtered_search('milestone:=upcoming')
expect(page).to have_issuable_counts(open: 0, closed: 0, all: 0)
expect(page).to have_css('.merge-request', count: 0)
@@ -43,7 +43,7 @@ describe 'Merge Requests > User filters by milestones', :js do
let(:milestone) { create(:milestone, project: project, due_date: Date.tomorrow) }
it 'shows merge requests' do
- input_filtered_search('milestone=upcoming')
+ input_filtered_search('milestone:=upcoming')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_css('.merge-request', count: 1)
@@ -54,7 +54,7 @@ describe 'Merge Requests > User filters by milestones', :js do
let(:milestone) { create(:milestone, project: project, due_date: Date.yesterday) }
it 'does not show any merge requests' do
- input_filtered_search('milestone=upcoming')
+ input_filtered_search('milestone:=upcoming')
expect(page).to have_issuable_counts(open: 0, closed: 0, all: 0)
expect(page).to have_css('.merge-request', count: 0)
diff --git a/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb b/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb
index 5c9d53778d2..5fac31e58ba 100644
--- a/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb
@@ -20,7 +20,7 @@ describe 'Merge requests > User filters by multiple criteria', :js do
describe 'filtering by label:~"Won\'t fix" and assignee:~bug' do
it 'applies the filters' do
- input_filtered_search("label=~\"Won't fix\" assignee=@#{user.username}")
+ input_filtered_search("label:=~\"Won't fix\" assignee:=@#{user.username}")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content 'Bugfix2'
@@ -30,7 +30,7 @@ describe 'Merge requests > User filters by multiple criteria', :js do
describe 'filtering by text, author, assignee, milestone, and label' do
it 'filters by text, author, assignee, milestone, and label' do
- input_filtered_search_keys("author=@#{user.username} assignee=@#{user.username} milestone=%\"v1.1\" label=~\"Won't fix\" Bug")
+ input_filtered_search_keys("author:=@#{user.username} assignee:=@#{user.username} milestone:=%\"v1.1\" label:=~\"Won't fix\" Bug")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content 'Bugfix2'
diff --git a/spec/features/merge_requests/user_filters_by_target_branch_spec.rb b/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
index faff7de729d..abe97d4c07e 100644
--- a/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
@@ -17,7 +17,7 @@ describe 'Merge Requests > User filters by target branch', :js do
context 'filtering by target-branch:master' do
it 'applies the filter' do
- input_filtered_search('target-branch=master')
+ input_filtered_search('target-branch:=master')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content mr1.title
@@ -27,7 +27,7 @@ describe 'Merge Requests > User filters by target branch', :js do
context 'filtering by target-branch:merged-target' do
it 'applies the filter' do
- input_filtered_search('target-branch=merged-target')
+ input_filtered_search('target-branch:=merged-target')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).not_to have_content mr1.title
@@ -37,7 +37,7 @@ describe 'Merge Requests > User filters by target branch', :js do
context 'filtering by target-branch:feature' do
it 'applies the filter' do
- input_filtered_search('target-branch=feature')
+ input_filtered_search('target-branch:=feature')
expect(page).to have_issuable_counts(open: 0, closed: 0, all: 0)
expect(page).not_to have_content mr1.title
diff --git a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
index 3c217786d43..5a84bcb0c44 100644
--- a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
@@ -10,10 +10,10 @@ describe 'User sorts merge requests' do
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
end
- set(:user) { create(:user) }
- set(:group) { create(:group) }
- set(:group_member) { create(:group_member, :maintainer, user: user, group: group) }
- set(:project) { create(:project, :public, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_member) { create(:group_member, :maintainer, user: user, group: group) }
+ let_it_be(:project) { create(:project, :public, group: group) }
before do
sign_in(user)
diff --git a/spec/features/merge_requests/user_views_open_merge_requests_spec.rb b/spec/features/merge_requests/user_views_open_merge_requests_spec.rb
index 932090bdbce..4aaa20f0455 100644
--- a/spec/features/merge_requests/user_views_open_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_views_open_merge_requests_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'User views open merge requests' do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
shared_examples_for 'shows merge requests' do
it 'shows merge requests' do
@@ -12,7 +12,7 @@ describe 'User views open merge requests' do
end
context 'when project is public' do
- set(:project) { create(:project, :public, :repository) }
+ let_it_be(:project) { create(:project, :public, :repository) }
context 'when not signed in' do
context "when the target branch is the project's default branch" do
@@ -114,7 +114,7 @@ describe 'User views open merge requests' do
context 'when project is internal' do
let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- set(:project) { create(:project, :internal, :repository) }
+ let_it_be(:project) { create(:project, :internal, :repository) }
context 'when signed in' do
before do
diff --git a/spec/features/milestones/user_creates_milestone_spec.rb b/spec/features/milestones/user_creates_milestone_spec.rb
index 5c93ddcf6f8..368a2ddecdf 100644
--- a/spec/features/milestones/user_creates_milestone_spec.rb
+++ b/spec/features/milestones/user_creates_milestone_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe "User creates milestone", :js do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
before do
project.add_developer(user)
diff --git a/spec/features/milestones/user_edits_milestone_spec.rb b/spec/features/milestones/user_edits_milestone_spec.rb
index b41b8f3282f..be05685aff7 100644
--- a/spec/features/milestones/user_edits_milestone_spec.rb
+++ b/spec/features/milestones/user_edits_milestone_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe "User edits milestone", :js do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:milestone) { create(:milestone, project: project, start_date: Date.today, due_date: 5.days.from_now) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:milestone) { create(:milestone, project: project, start_date: Date.today, due_date: 5.days.from_now) }
before do
project.add_developer(user)
diff --git a/spec/features/milestones/user_promotes_milestone_spec.rb b/spec/features/milestones/user_promotes_milestone_spec.rb
index 7678b6cbfa5..d14097e1ef4 100644
--- a/spec/features/milestones/user_promotes_milestone_spec.rb
+++ b/spec/features/milestones/user_promotes_milestone_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
describe 'User promotes milestone' do
- set(:group) { create(:group) }
- set(:user) { create(:user) }
- set(:project) { create(:project, namespace: group) }
- set(:milestone) { create(:milestone, project: project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
context 'when user can admin group milestones' do
before do
diff --git a/spec/features/milestones/user_views_milestone_spec.rb b/spec/features/milestones/user_views_milestone_spec.rb
index 71abb195ad1..cbc21dd02e5 100644
--- a/spec/features/milestones/user_views_milestone_spec.rb
+++ b/spec/features/milestones/user_views_milestone_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
describe "User views milestone" do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:milestone) { create(:milestone, project: project) }
- set(:labels) { create_list(:label, 2, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:labels) { create_list(:label, 2, project: project) }
before do
project.add_developer(user)
diff --git a/spec/features/milestones/user_views_milestones_spec.rb b/spec/features/milestones/user_views_milestones_spec.rb
index c91fe95aa77..e17797a8165 100644
--- a/spec/features/milestones/user_views_milestones_spec.rb
+++ b/spec/features/milestones/user_views_milestones_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe "User views milestones" do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:milestone) { create(:milestone, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
before do
project.add_developer(user)
@@ -22,8 +22,8 @@ describe "User views milestones" do
end
context "with issues" do
- set(:issue) { create(:issue, project: project, milestone: milestone) }
- set(:closed_issue) { create(:closed_issue, project: project, milestone: milestone) }
+ let_it_be(:issue) { create(:issue, project: project, milestone: milestone) }
+ let_it_be(:closed_issue) { create(:closed_issue, project: project, milestone: milestone) }
it "opens milestone" do
click_link(milestone.title)
@@ -38,7 +38,7 @@ describe "User views milestones" do
end
context "with associated releases" do
- set(:first_release) { create(:release, project: project, name: "The first release", milestones: [milestone], released_at: Time.zone.parse('2019-10-07')) }
+ let_it_be(:first_release) { create(:release, project: project, name: "The first release", milestones: [milestone], released_at: Time.zone.parse('2019-10-07')) }
context "with a single associated release" do
it "shows the associated release" do
@@ -48,10 +48,10 @@ describe "User views milestones" do
end
context "with lots of associated releases" do
- set(:second_release) { create(:release, project: project, name: "The second release", milestones: [milestone], released_at: first_release.released_at + 1.day) }
- set(:third_release) { create(:release, project: project, name: "The third release", milestones: [milestone], released_at: second_release.released_at + 1.day) }
- set(:fourth_release) { create(:release, project: project, name: "The fourth release", milestones: [milestone], released_at: third_release.released_at + 1.day) }
- set(:fifth_release) { create(:release, project: project, name: "The fifth release", milestones: [milestone], released_at: fourth_release.released_at + 1.day) }
+ let_it_be(:second_release) { create(:release, project: project, name: "The second release", milestones: [milestone], released_at: first_release.released_at + 1.day) }
+ let_it_be(:third_release) { create(:release, project: project, name: "The third release", milestones: [milestone], released_at: second_release.released_at + 1.day) }
+ let_it_be(:fourth_release) { create(:release, project: project, name: "The fourth release", milestones: [milestone], released_at: third_release.released_at + 1.day) }
+ let_it_be(:fifth_release) { create(:release, project: project, name: "The fifth release", milestones: [milestone], released_at: fourth_release.released_at + 1.day) }
it "shows the associated releases and the truncation text" do
expect(page).to have_content("Releases #{fifth_release.name} • #{fourth_release.name} • #{third_release.name} • 2 more releases")
@@ -66,9 +66,9 @@ describe "User views milestones" do
end
describe "User views milestones with no MR" do
- set(:user) { create(:user) }
- set(:project) { create(:project, :merge_requests_disabled) }
- set(:milestone) { create(:milestone, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :merge_requests_disabled) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
before do
project.add_developer(user)
diff --git a/spec/features/profiles/active_sessions_spec.rb b/spec/features/profiles/active_sessions_spec.rb
index bab6251a5d4..8f63ce2a197 100644
--- a/spec/features/profiles/active_sessions_spec.rb
+++ b/spec/features/profiles/active_sessions_spec.rb
@@ -11,81 +11,77 @@ describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state do
let(:admin) { create(:admin) }
- around do |example|
- Timecop.freeze(Time.zone.parse('2018-03-12 09:06')) do
- example.run
- end
- end
-
it 'User sees their active sessions' do
- Capybara::Session.new(:session1)
- Capybara::Session.new(:session2)
- Capybara::Session.new(:session3)
-
- # note: headers can only be set on the non-js (aka. rack-test) driver
- using_session :session1 do
- Capybara.page.driver.header(
- 'User-Agent',
- 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:58.0) Gecko/20100101 Firefox/58.0'
- )
-
- gitlab_sign_in(user)
- end
-
- # set an additional session on another device
- using_session :session2 do
- Capybara.page.driver.header(
- 'User-Agent',
- 'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12B466 [FBDV/iPhone7,2]'
- )
-
- gitlab_sign_in(user)
- end
-
- # set an admin session impersonating the user
- using_session :session3 do
- Capybara.page.driver.header(
- 'User-Agent',
- 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36'
- )
-
- gitlab_sign_in(admin)
-
- visit admin_user_path(user)
-
- click_link 'Impersonate'
- end
-
- using_session :session1 do
- visit profile_active_sessions_path
-
- expect(page).to(
- have_selector('ul.list-group li.list-group-item', { text: 'Signed in on',
- count: 2 }))
-
- expect(page).to have_content(
- '127.0.0.1 ' \
- 'This is your current session ' \
- 'Firefox on Ubuntu ' \
- 'Signed in on 12 Mar 09:06'
- )
-
- expect(page).to have_selector '[title="Desktop"]', count: 1
-
- expect(page).to have_content(
- '127.0.0.1 ' \
- 'Last accessed on 12 Mar 09:06 ' \
- 'Mobile Safari on iOS ' \
- 'Signed in on 12 Mar 09:06'
- )
-
- expect(page).to have_selector '[title="Smartphone"]', count: 1
-
- expect(page).not_to have_content('Chrome on Windows')
+ Timecop.freeze(Time.zone.parse('2018-03-12 09:06')) do
+ Capybara::Session.new(:session1)
+ Capybara::Session.new(:session2)
+ Capybara::Session.new(:session3)
+
+ # note: headers can only be set on the non-js (aka. rack-test) driver
+ using_session :session1 do
+ Capybara.page.driver.header(
+ 'User-Agent',
+ 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:58.0) Gecko/20100101 Firefox/58.0'
+ )
+
+ gitlab_sign_in(user)
+ end
+
+ # set an additional session on another device
+ using_session :session2 do
+ Capybara.page.driver.header(
+ 'User-Agent',
+ 'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12B466 [FBDV/iPhone7,2]'
+ )
+
+ gitlab_sign_in(user)
+ end
+
+ # set an admin session impersonating the user
+ using_session :session3 do
+ Capybara.page.driver.header(
+ 'User-Agent',
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36'
+ )
+
+ gitlab_sign_in(admin)
+
+ visit admin_user_path(user)
+
+ click_link 'Impersonate'
+ end
+
+ using_session :session1 do
+ visit profile_active_sessions_path
+
+ expect(page).to(
+ have_selector('ul.list-group li.list-group-item', { text: 'Signed in on',
+ count: 2 }))
+
+ expect(page).to have_content(
+ '127.0.0.1 ' \
+ 'This is your current session ' \
+ 'Firefox on Ubuntu ' \
+ 'Signed in on 12 Mar 09:06'
+ )
+
+ expect(page).to have_selector '[title="Desktop"]', count: 1
+
+ expect(page).to have_content(
+ '127.0.0.1 ' \
+ 'Last accessed on 12 Mar 09:06 ' \
+ 'Mobile Safari on iOS ' \
+ 'Signed in on 12 Mar 09:06'
+ )
+
+ expect(page).to have_selector '[title="Smartphone"]', count: 1
+
+ expect(page).not_to have_content('Chrome on Windows')
+ end
end
end
- it 'User can revoke a session', :js, :redis_session_store do
+ it 'User can revoke a session', :js do
Capybara::Session.new(:session1)
Capybara::Session.new(:session2)
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index 9839b3d6c80..171dfb353f0 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -15,6 +15,11 @@ describe 'User edit profile' do
wait_for_requests if respond_to?(:wait_for_requests)
end
+ def visit_user
+ visit user_path(user)
+ wait_for_requests
+ end
+
it 'changes user profile' do
fill_in 'user_skype', with: 'testskype'
fill_in 'user_linkedin', with: 'testlinkedin'
@@ -22,8 +27,8 @@ describe 'User edit profile' do
fill_in 'user_website_url', with: 'testurl'
fill_in 'user_location', with: 'Ukraine'
fill_in 'user_bio', with: 'I <3 GitLab'
+ fill_in 'user_job_title', with: 'Frontend Engineer'
fill_in 'user_organization', with: 'GitLab'
- select 'Data Analyst', from: 'user_role'
submit_settings
expect(user.reload).to have_attributes(
@@ -32,8 +37,8 @@ describe 'User edit profile' do
twitter: 'testtwitter',
website_url: 'testurl',
bio: 'I <3 GitLab',
- organization: 'GitLab',
- role: 'data_analyst'
+ job_title: 'Frontend Engineer',
+ organization: 'GitLab'
)
expect(find('#user_location').value).to eq 'Ukraine'
@@ -94,11 +99,6 @@ describe 'User edit profile' do
end
context 'user status', :js do
- def visit_user
- visit user_path(user)
- wait_for_requests
- end
-
def select_emoji(emoji_name, is_modal = false)
emoji_menu_class = is_modal ? '.js-modal-status-emoji-menu' : '.js-status-emoji-menu'
toggle_button = find('.js-toggle-emoji-menu')
@@ -381,4 +381,40 @@ describe 'User edit profile' do
end
end
end
+
+ context 'work information', :js do
+ context 'when job title and organziation are entered' do
+ it "shows job title and organzation on user's profile" do
+ fill_in 'user_job_title', with: 'Frontend Engineer'
+ fill_in 'user_organization', with: 'GitLab - work info test'
+ submit_settings
+
+ visit_user
+
+ expect(page).to have_content('Frontend Engineer at GitLab - work info test')
+ end
+ end
+
+ context 'when only job title is entered' do
+ it "shows only job title on user's profile" do
+ fill_in 'user_job_title', with: 'Frontend Engineer - work info test'
+ submit_settings
+
+ visit_user
+
+ expect(page).to have_content('Frontend Engineer - work info test')
+ end
+ end
+
+ context 'when only organization is entered' do
+ it "shows only organization on user's profile" do
+ fill_in 'user_organization', with: 'GitLab - work info test'
+ submit_settings
+
+ visit_user
+
+ expect(page).to have_content('GitLab - work info test')
+ end
+ end
+ end
end
diff --git a/spec/features/project_group_variables_spec.rb b/spec/features/project_group_variables_spec.rb
index c1f1c442937..242fc993718 100644
--- a/spec/features/project_group_variables_spec.rb
+++ b/spec/features/project_group_variables_spec.rb
@@ -24,6 +24,7 @@ describe 'Project group variables', :js do
sign_in(user)
project.add_maintainer(user)
group.add_owner(user)
+ stub_feature_flags(new_variables_ui: false)
end
it 'project in group shows inherited vars from ancestor group' do
diff --git a/spec/features/project_variables_spec.rb b/spec/features/project_variables_spec.rb
index 9e3f8a843a1..1452317c22b 100644
--- a/spec/features/project_variables_spec.rb
+++ b/spec/features/project_variables_spec.rb
@@ -12,7 +12,7 @@ describe 'Project variables', :js do
sign_in(user)
project.add_maintainer(user)
project.variables << variable
-
+ stub_feature_flags(new_variables_ui: false)
visit page_path
end
diff --git a/spec/features/projects/active_tabs_spec.rb b/spec/features/projects/active_tabs_spec.rb
index 41c0e583815..815cf3b9c58 100644
--- a/spec/features/projects/active_tabs_spec.rb
+++ b/spec/features/projects/active_tabs_spec.rb
@@ -7,8 +7,6 @@ describe 'Project active tab' do
let(:project) { create(:project, :repository) }
before do
- stub_feature_flags(analytics_pages_under_project_analytics_sidebar: { enabled: false, thing: project })
-
project.add_maintainer(user)
sign_in(user)
end
@@ -45,7 +43,7 @@ describe 'Project active tab' do
it_behaves_like 'page has active tab', 'Repository'
- %w(Files Commits Graph Compare Charts Branches Tags).each do |sub_menu|
+ %w(Files Commits Graph Compare Branches Tags).each do |sub_menu|
context "on project Repository/#{sub_menu}" do
before do
click_tab(sub_menu)
@@ -124,34 +122,23 @@ describe 'Project active tab' do
end
end
- context 'when `analytics_pages_under_project_analytics_sidebar` feature flag is enabled' do
+ context 'on project Analytics' do
before do
- stub_feature_flags(analytics_pages_under_project_analytics_sidebar: { enabled: true, thing: project })
+ visit charts_project_graph_path(project, 'master')
end
- context 'on project Analytics' do
- before do
- visit charts_project_graph_path(project, 'master')
- end
-
- context 'on project Analytics/Repository Analytics' do
- it_behaves_like 'page has active tab', _('Analytics')
- it_behaves_like 'page has active sub tab', _('Repository Analytics')
- end
+ context 'on project Analytics/Repository Analytics' do
+ it_behaves_like 'page has active tab', _('Analytics')
+ it_behaves_like 'page has active sub tab', _('Repository')
+ end
- context 'on project Analytics/Repository Analytics' do
- it_behaves_like 'page has active tab', _('Analytics')
- it_behaves_like 'page has active sub tab', _('Repository Analytics')
+ context 'on project Analytics/Cycle Analytics' do
+ before do
+ click_tab(_('CI / CD'))
end
- context 'on project Analytics/Cycle Analytics' do
- before do
- click_tab(_('CI / CD Analytics'))
- end
-
- it_behaves_like 'page has active tab', _('Analytics')
- it_behaves_like 'page has active sub tab', _('CI / CD Analytics')
- end
+ it_behaves_like 'page has active tab', _('Analytics')
+ it_behaves_like 'page has active sub tab', _('CI / CD')
end
end
end
diff --git a/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb b/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
index fb70076fcf1..3cbf276c02d 100644
--- a/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
+++ b/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
@@ -3,9 +3,9 @@
require "spec_helper"
describe "User downloads artifacts" do
- set(:project) { create(:project, :repository, :public) }
- set(:pipeline) { create(:ci_empty_pipeline, status: :success, sha: project.commit.id, project: project) }
- set(:job) { create(:ci_build, :artifacts, :success, pipeline: pipeline) }
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, status: :success, sha: project.commit.id, project: project) }
+ let_it_be(:job) { create(:ci_build, :artifacts, :success, pipeline: pipeline) }
shared_examples "downloading" do
it "downloads the zip" do
diff --git a/spec/features/projects/badges/pipeline_badge_spec.rb b/spec/features/projects/badges/pipeline_badge_spec.rb
index 5ddaf1e1591..b2f09a9d0b7 100644
--- a/spec/features/projects/badges/pipeline_badge_spec.rb
+++ b/spec/features/projects/badges/pipeline_badge_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'Pipeline Badge' do
- set(:project) { create(:project, :repository, :public) }
+ let_it_be(:project) { create(:project, :repository, :public) }
let(:ref) { project.default_branch }
context 'when the project has a pipeline' do
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 0ff3e45c956..9fc70412975 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -308,6 +308,48 @@ describe 'File blob', :js do
end
end
+ context 'Jupiter Notebook file' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add Jupiter Notebook",
+ file_path: 'files/basic.ipynb',
+ file_content: project.repository.blob_at('add-ipython-files', 'files/ipython/basic.ipynb').data
+ ).execute
+
+ visit_blob('files/basic.ipynb')
+
+ wait_for_requests
+ end
+
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows rendered notebook
+ expect(page).to have_selector('.js-notebook-viewer-mounted')
+
+ # does show a viewer switcher
+ expect(page).to have_selector('.js-blob-viewer-switcher')
+
+ # show a disabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
+
+ # shows a raw button
+ expect(page).to have_link('Open raw')
+
+ # shows a download button
+ expect(page).to have_link('Download')
+
+ # shows the rendered notebook
+ expect(page).to have_content('test')
+ end
+ end
+ end
+
context 'ISO file (stored in LFS)' do
context 'when LFS is enabled on the project' do
before do
diff --git a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
new file mode 100644
index 00000000000..b23cea65b37
--- /dev/null
+++ b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'User follows pipeline suggest nudge spec when feature is enabled', :js do
+ let(:user) { create(:user, :admin) }
+ let(:project) { create(:project, :empty_repo) }
+
+ describe 'viewing the new blob page' do
+ before do
+ stub_feature_flags(suggest_pipeline: true)
+ sign_in(user)
+ end
+
+ context 'when the page is loaded from the link using the suggest_gitlab_ci_yml param' do
+ before do
+ visit namespace_project_new_blob_path(namespace_id: project.namespace, project_id: project, id: 'master', suggest_gitlab_ci_yml: 'true')
+ end
+
+ it 'pre-fills .gitlab-ci.yml for file name' do
+ file_name = page.find_by_id('file_name')
+
+ expect(file_name.value).to have_content('.gitlab-ci.yml')
+ end
+
+ it 'chooses the .gitlab-ci.yml Template Type' do
+ template_type = page.find(:css, '.template-type-selector .dropdown-toggle-text')
+
+ expect(template_type.text).to have_content('.gitlab-ci.yml')
+ end
+
+ it 'displays suggest_gitlab_ci_yml popover' do
+ popover_selector = '.suggest-gitlab-ci-yml'
+
+ expect(page).to have_css(popover_selector, visible: true)
+
+ page.within(popover_selector) do
+ expect(page).to have_content('1/2: Choose a template')
+ end
+ end
+ end
+
+ context 'when the page is visited without the param' do
+ before do
+ visit namespace_project_new_blob_path(namespace_id: project.namespace, project_id: project, id: 'master')
+ end
+
+ it 'does not pre-fill .gitlab-ci.yml for file name' do
+ file_name = page.find_by_id('file_name')
+
+ expect(file_name.value).not_to have_content('.gitlab-ci.yml')
+ end
+
+ it 'does not choose the .gitlab-ci.yml Template Type' do
+ template_type = page.find(:css, '.template-type-selector .dropdown-toggle-text')
+
+ expect(template_type.text).to have_content('Select a template type')
+ end
+
+ it 'does not display suggest_gitlab_ci_yml popover' do
+ popover_selector = '.b-popover.suggest-gitlab-ci-yml'
+
+ expect(page).not_to have_css(popover_selector, visible: true)
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/branches/user_deletes_branch_spec.rb b/spec/features/projects/branches/user_deletes_branch_spec.rb
index ad63a75a149..184954c1c78 100644
--- a/spec/features/projects/branches/user_deletes_branch_spec.rb
+++ b/spec/features/projects/branches/user_deletes_branch_spec.rb
@@ -3,7 +3,7 @@
require "spec_helper"
describe "User deletes branch", :js do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:project) { create(:project, :repository) }
before do
diff --git a/spec/features/projects/branches/user_views_branches_spec.rb b/spec/features/projects/branches/user_views_branches_spec.rb
index f3810611094..e127e784b94 100644
--- a/spec/features/projects/branches/user_views_branches_spec.rb
+++ b/spec/features/projects/branches/user_views_branches_spec.rb
@@ -3,8 +3,8 @@
require "spec_helper"
describe "User views branches" do
- set(:project) { create(:project, :repository) }
- set(:user) { project.owner }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
before do
sign_in(user)
@@ -23,7 +23,7 @@ describe "User views branches" do
end
context "protected branches" do
- set(:protected_branch) { create(:protected_branch, project: project) }
+ let_it_be(:protected_branch) { create(:protected_branch, project: project) }
before do
visit(project_protected_branches_path(project))
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 0143461eadb..df786cf0818 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -119,7 +119,7 @@ describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
context 'when user disables the cluster' do
before do
page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
- page.within('#cluster-integration') { click_button 'Save changes' }
+ page.within('.js-cluster-integration-form') { click_button 'Save changes' }
end
it 'user sees the successful message' do
@@ -130,7 +130,7 @@ describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
context 'when user changes cluster parameters' do
before do
fill_in 'cluster_platform_kubernetes_attributes_namespace', with: 'my-namespace'
- page.within('#js-cluster-details') { click_button 'Save changes' }
+ page.within('.js-provider-details') { click_button 'Save changes' }
end
it 'user sees the successful message' do
@@ -141,6 +141,7 @@ describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
context 'when user destroys the cluster' do
before do
+ click_link 'Advanced Settings'
click_button 'Remove integration and resources'
fill_in 'confirm_cluster_name_input', with: cluster.name
click_button 'Remove integration'
@@ -200,7 +201,7 @@ describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
end
end
- context 'when third party offers are disabled' do
+ context 'when third party offers are disabled', :clean_gitlab_redis_shared_state do
let(:admin) { create(:admin) }
before do
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index 38efcf758e1..79676927fa2 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -41,7 +41,7 @@ describe 'User Cluster', :js do
it 'user sees a cluster details page' do
subject
- expect(page).to have_content('Kubernetes cluster integration')
+ expect(page).to have_content('GitLab Integration')
expect(page.find_field('cluster[name]').value).to eq('dev-cluster')
expect(page.find_field('cluster[platform_kubernetes_attributes][api_url]').value)
.to have_content('http://example.com')
@@ -79,7 +79,7 @@ describe 'User Cluster', :js do
context 'when user disables the cluster' do
before do
page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
- page.within('#cluster-integration') { click_button 'Save changes' }
+ page.within('.js-cluster-integration-form') { click_button 'Save changes' }
end
it 'user sees the successful message' do
@@ -91,7 +91,7 @@ describe 'User Cluster', :js do
before do
fill_in 'cluster_name', with: 'my-dev-cluster'
fill_in 'cluster_platform_kubernetes_attributes_namespace', with: 'my-namespace'
- page.within('#js-cluster-details') { click_button 'Save changes' }
+ page.within('.js-provider-details') { click_button 'Save changes' }
end
it 'user sees the successful message' do
@@ -103,6 +103,7 @@ describe 'User Cluster', :js do
context 'when user destroys the cluster' do
before do
+ click_link 'Advanced Settings'
click_button 'Remove integration and resources'
fill_in 'confirm_cluster_name_input', with: cluster.name
click_button 'Remove integration'
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index ad51533c42c..fc2de4df5ec 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -11,6 +11,7 @@ describe 'Clusters', :js do
before do
project.add_maintainer(user)
gitlab_sign_in(user)
+ stub_feature_flags(clusters_list_redesign: false)
end
context 'when user does not have a cluster and visits cluster index page' do
diff --git a/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb b/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb
index e78b7f7ae08..c07f6081d2c 100644
--- a/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb
+++ b/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
describe 'Project > Commit > View user status' do
include RepoHelpers
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:commit_author) { create(:user, email: sample_commit.author_email) }
before do
diff --git a/spec/features/projects/container_registry_spec.rb b/spec/features/projects/container_registry_spec.rb
new file mode 100644
index 00000000000..de17d831fbd
--- /dev/null
+++ b/spec/features/projects/container_registry_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Container Registry', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+
+ let(:container_repository) do
+ create(:container_repository, name: 'my/image')
+ end
+
+ before do
+ sign_in(user)
+ project.add_developer(user)
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: :any, tags: [])
+ end
+
+ describe 'Registry explorer is off' do
+ before do
+ stub_feature_flags(vue_container_registry_explorer: { enabled: false, thing: project.group })
+ end
+
+ it 'has a page title set' do
+ visit_container_registry
+
+ expect(page).to have_title _('Container Registry')
+ end
+
+ context 'when there are no image repositories' do
+ it 'user visits container registry main page' do
+ visit_container_registry
+
+ expect(page).to have_content _('no container images')
+ end
+ end
+
+ context 'when there are image repositories' do
+ before do
+ stub_container_registry_tags(repository: %r{my/image}, tags: %w[latest], with_manifest: true)
+ project.container_repositories << container_repository
+ end
+
+ it 'user wants to see multi-level container repository' do
+ visit_container_registry
+
+ expect(page).to have_content 'my/image'
+ end
+
+ it 'user removes entire container repository', :sidekiq_might_not_need_inline do
+ visit_container_registry
+
+ expect_any_instance_of(ContainerRepository).to receive(:delete_tags!).and_return(true)
+
+ click_on(class: 'js-remove-repo')
+ expect(find('.modal .modal-title')).to have_content _('Remove repository')
+ find('.modal .modal-footer .btn-danger').click
+ end
+
+ it 'user removes a specific tag from container repository' do
+ visit_container_registry
+
+ find('.js-toggle-repo').click
+ wait_for_requests
+
+ service = double('service')
+ expect(service).to receive(:execute).with(container_repository) { { status: :success } }
+ expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(container_repository.project, user, tags: ['latest']) { service }
+
+ click_on(class: 'js-delete-registry-row', visible: false)
+ expect(find('.modal .modal-title')).to have_content _('Remove tag')
+ find('.modal .modal-footer .btn-danger').click
+ end
+ end
+ end
+
+ describe 'Registry explorer is on' do
+ it 'has a page title set' do
+ visit_container_registry
+
+ expect(page).to have_title _('Container Registry')
+ end
+
+ context 'when there are no image repositories' do
+ it 'list page has no container title' do
+ visit_container_registry
+
+ expect(page).to have_content _('There are no container images stored for this project')
+ end
+
+ it 'list page has quickstart' do
+ visit_container_registry
+
+ expect(page).to have_content _('Quick Start')
+ end
+ end
+
+ context 'when there are image repositories' do
+ before do
+ stub_container_registry_tags(repository: %r{my/image}, tags: %w[latest], with_manifest: true)
+ project.container_repositories << container_repository
+ end
+
+ it 'list page has a list of images' do
+ visit_container_registry
+
+ expect(page).to have_content 'my/image'
+ end
+
+ it 'user removes entire container repository', :sidekiq_might_not_need_inline do
+ visit_container_registry
+
+ expect_any_instance_of(ContainerRepository).to receive(:delete_tags!).and_return(true)
+
+ find('[title="Remove repository"]').click
+ expect(find('.modal .modal-title')).to have_content _('Remove repository')
+ find('.modal .modal-footer .btn-danger').click
+ end
+
+ it 'navigates to repo details' do
+ visit_container_registry_details('my/image')
+
+ expect(page).to have_content 'latest'
+ end
+
+ describe 'image repo details' do
+ before do
+ stub_container_registry_tags(repository: %r{my/image}, tags: ('1'..'20').to_a, with_manifest: true)
+ visit_container_registry_details 'my/image'
+ end
+
+ it 'shows the details breadcrumb' do
+ expect(find('.breadcrumbs')).to have_link 'my/image'
+ end
+
+ it 'shows the image title' do
+ expect(page).to have_content 'my/image tags'
+ end
+
+ it 'user removes a specific tag from container repository' do
+ service = double('service')
+ expect(service).to receive(:execute).with(container_repository) { { status: :success } }
+ expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(container_repository.project, user, tags: ['1']) { service }
+
+ first('.js-delete-registry').click
+ expect(find('.modal .modal-title')).to have_content _('Remove tag')
+ find('.modal .modal-footer .btn-danger').click
+ end
+
+ it('pagination navigate to the second page') do
+ pagination = find('.gl-pagination')
+ pagination.click_link('2')
+ expect(page).to have_content '20'
+ end
+ end
+ end
+ end
+
+ def visit_container_registry
+ visit project_container_registry_index_path(project)
+ end
+
+ def visit_container_registry_details(name)
+ visit_container_registry
+ click_link(name)
+ end
+end
diff --git a/spec/features/projects/deploy_keys_spec.rb b/spec/features/projects/deploy_keys_spec.rb
index 0f3e7646673..03b7c54faf6 100644
--- a/spec/features/projects/deploy_keys_spec.rb
+++ b/spec/features/projects/deploy_keys_spec.rb
@@ -17,9 +17,9 @@ describe 'Project deploy keys', :js do
end
it 'removes association between project and deploy key' do
- visit project_settings_repository_path(project)
+ visit project_settings_ci_cd_path(project)
- page.within(find('.deploy-keys')) do
+ page.within(find('.qa-deploy-keys-settings')) do
expect(page).to have_selector('.deploy-key', count: 1)
accept_confirm { find('.ic-remove').click }
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 12412e87a7b..d7f12411a93 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -37,6 +37,7 @@ describe 'Environments page', :js do
expect(page).to have_css('.environments-container')
expect(page.all('.environment-name').length).to eq(1)
+ expect(page.all('.ic-stop').length).to eq(1)
end
end
@@ -105,6 +106,7 @@ describe 'Environments page', :js do
expect(page).to have_css('.environments-container')
expect(page.all('.environment-name').length).to eq(1)
+ expect(page.all('.ic-stop').length).to eq(0)
end
end
end
diff --git a/spec/features/projects/labels/user_creates_labels_spec.rb b/spec/features/projects/labels/user_creates_labels_spec.rb
index 257e064ae3d..180cd8eff14 100644
--- a/spec/features/projects/labels/user_creates_labels_spec.rb
+++ b/spec/features/projects/labels/user_creates_labels_spec.rb
@@ -3,8 +3,8 @@
require "spec_helper"
describe "User creates labels" do
- set(:project) { create(:project_empty_repo, :public) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:user) { create(:user) }
shared_examples_for "label creation" do
it "creates new label" do
@@ -66,7 +66,7 @@ describe "User creates labels" do
end
context "in another project" do
- set(:another_project) { create(:project_empty_repo, :public) }
+ let_it_be(:another_project) { create(:project_empty_repo, :public) }
before do
create(:label, project: project, title: "bug") # Create label for `project` (not `another_project`) project.
diff --git a/spec/features/projects/labels/user_edits_labels_spec.rb b/spec/features/projects/labels/user_edits_labels_spec.rb
index da33ae3af3a..add959ccda6 100644
--- a/spec/features/projects/labels/user_edits_labels_spec.rb
+++ b/spec/features/projects/labels/user_edits_labels_spec.rb
@@ -3,9 +3,9 @@
require "spec_helper"
describe "User edits labels" do
- set(:project) { create(:project_empty_repo, :public) }
- set(:label) { create(:label, project: project) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:user) { create(:user) }
before do
project.add_maintainer(user)
diff --git a/spec/features/projects/labels/user_promotes_label_spec.rb b/spec/features/projects/labels/user_promotes_label_spec.rb
index fdecafd4c50..cf7320d3cf9 100644
--- a/spec/features/projects/labels/user_promotes_label_spec.rb
+++ b/spec/features/projects/labels/user_promotes_label_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
describe 'User promotes label' do
- set(:group) { create(:group) }
- set(:user) { create(:user) }
- set(:project) { create(:project, namespace: group) }
- set(:label) { create(:label, project: project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:label) { create(:label, project: project) }
context 'when user can admin group labels' do
before do
diff --git a/spec/features/projects/labels/user_sees_links_to_issuables_spec.rb b/spec/features/projects/labels/user_sees_links_to_issuables_spec.rb
index 7a9b9e6eac2..f60e7e9703f 100644
--- a/spec/features/projects/labels/user_sees_links_to_issuables_spec.rb
+++ b/spec/features/projects/labels/user_sees_links_to_issuables_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'Projects > Labels > User sees links to issuables' do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
label # creates the label
@@ -50,7 +50,7 @@ describe 'Projects > Labels > User sees links to issuables' do
end
context 'with a group label' do
- set(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
let(:label) { create(:group_label, group: group, title: 'bug') }
context 'when merge requests and issues are enabled for the project' do
diff --git a/spec/features/projects/labels/user_views_labels_spec.rb b/spec/features/projects/labels/user_views_labels_spec.rb
index a6f7968c535..7f70ac903d6 100644
--- a/spec/features/projects/labels/user_views_labels_spec.rb
+++ b/spec/features/projects/labels/user_views_labels_spec.rb
@@ -3,9 +3,8 @@
require "spec_helper"
describe "User views labels" do
- set(:project) { create(:project_empty_repo, :public) }
- set(:user) { create(:user) }
-
+ let_it_be(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:user) { create(:user) }
let(:label_titles) { %w[bug enhancement feature] }
let!(:prioritized_label) { create(:label, project: project, title: 'prioritized-label-name', priority: 1) }
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index bcb05e1c718..2b8dfc4a5fa 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -3,102 +3,144 @@
require 'spec_helper'
describe 'Project navbar' do
- it_behaves_like 'verified navigation bar' do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
- let(:structure) do
- [
- {
- nav_item: _('Project overview'),
- nav_sub_items: [
- _('Details'),
- _('Activity'),
- _('Releases')
- ]
- },
- {
- nav_item: _('Repository'),
- nav_sub_items: [
- _('Files'),
- _('Commits'),
- _('Branches'),
- _('Tags'),
- _('Contributors'),
- _('Graph'),
- _('Compare'),
- (_('Locked Files') if Gitlab.ee?)
- ]
- },
- {
- nav_item: _('Issues'),
- nav_sub_items: [
- _('List'),
- _('Boards'),
- _('Labels'),
- _('Milestones')
- ]
- },
- {
- nav_item: _('Merge Requests'),
- nav_sub_items: []
- },
- {
- nav_item: _('CI / CD'),
- nav_sub_items: [
- _('Pipelines'),
- _('Jobs'),
- _('Artifacts'),
- _('Schedules')
- ]
- },
- {
- nav_item: _('Operations'),
- nav_sub_items: [
- _('Metrics'),
- _('Environments'),
- _('Error Tracking'),
- _('Serverless'),
- _('Kubernetes')
- ]
- },
- {
- nav_item: _('Analytics'),
- nav_sub_items: [
- _('CI / CD Analytics'),
- (_('Code Review') if Gitlab.ee?),
- _('Repository Analytics'),
- _('Value Stream Analytics')
- ]
- },
- {
- nav_item: _('Wiki'),
- nav_sub_items: []
- },
- {
- nav_item: _('Snippets'),
- nav_sub_items: []
- },
- {
- nav_item: _('Settings'),
- nav_sub_items: [
- _('General'),
- _('Members'),
- _('Integrations'),
- _('Repository'),
- _('CI / CD'),
- _('Operations'),
- (_('Audit Events') if Gitlab.ee?)
- ].compact
- }
+ let(:analytics_nav_item) do
+ {
+ nav_item: _('Analytics'),
+ nav_sub_items: [
+ _('CI / CD'),
+ (_('Code Review') if Gitlab.ee?),
+ _('Repository'),
+ _('Value Stream')
]
- end
+ }
+ end
- before do
- project.add_maintainer(user)
- sign_in(user)
+ let(:requirements_nav_item) do
+ {
+ nav_item: _('Requirements'),
+ nav_sub_items: [_('List')]
+ }
+ end
+ let(:structure) do
+ [
+ {
+ nav_item: _('Project overview'),
+ nav_sub_items: [
+ _('Details'),
+ _('Activity'),
+ _('Releases')
+ ]
+ },
+ {
+ nav_item: _('Repository'),
+ nav_sub_items: [
+ _('Files'),
+ _('Commits'),
+ _('Branches'),
+ _('Tags'),
+ _('Contributors'),
+ _('Graph'),
+ _('Compare'),
+ (_('Locked Files') if Gitlab.ee?)
+ ]
+ },
+ {
+ nav_item: _('Issues'),
+ nav_sub_items: [
+ _('List'),
+ _('Boards'),
+ _('Labels'),
+ _('Milestones')
+ ]
+ },
+ {
+ nav_item: _('Merge Requests'),
+ nav_sub_items: []
+ },
+ (requirements_nav_item if Gitlab.ee?),
+ {
+ nav_item: _('CI / CD'),
+ nav_sub_items: [
+ _('Pipelines'),
+ _('Jobs'),
+ _('Artifacts'),
+ _('Schedules')
+ ]
+ },
+ {
+ nav_item: _('Operations'),
+ nav_sub_items: [
+ _('Metrics'),
+ _('Environments'),
+ _('Error Tracking'),
+ _('Serverless'),
+ _('Logs'),
+ _('Kubernetes')
+ ]
+ },
+ analytics_nav_item,
+ {
+ nav_item: _('Wiki'),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _('Snippets'),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _('Settings'),
+ nav_sub_items: [
+ _('General'),
+ _('Members'),
+ _('Integrations'),
+ _('Webhooks'),
+ _('Repository'),
+ _('CI / CD'),
+ _('Operations'),
+ (_('Audit Events') if Gitlab.ee?)
+ ].compact
+ }
+ ]
+ end
+
+ before do
+ stub_licensed_features(requirements: false)
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ it_behaves_like 'verified navigation bar' do
+ before do
visit project_path(project)
end
end
+
+ if Gitlab.ee?
+ context 'when issues analytics is available' do
+ before do
+ stub_licensed_features(issues_analytics: true)
+
+ analytics_nav_item[:nav_sub_items] << _('Issues')
+ analytics_nav_item[:nav_sub_items].sort!
+
+ visit project_path(project)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
+
+ context 'when requirements is available' do
+ before do
+ stub_licensed_features(requirements: true)
+
+ visit project_path(project)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
+ end
end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 63c0695fe95..561c0552007 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -356,7 +356,7 @@ describe 'Pipeline', :js do
end
end
- context 'test tabs' do
+ describe 'test tabs' do
let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
before do
@@ -364,21 +364,31 @@ describe 'Pipeline', :js do
wait_for_requests
end
- it 'shows badge counter in Tests tab' do
- expect(pipeline.test_reports.total_count).to eq(4)
- expect(page.find('.js-test-report-badge-counter').text).to eq(pipeline.test_reports.total_count.to_s)
- end
+ context 'with test reports' do
+ it 'shows badge counter in Tests tab' do
+ expect(pipeline.test_reports.total_count).to eq(4)
+ expect(page.find('.js-test-report-badge-counter').text).to eq(pipeline.test_reports.total_count.to_s)
+ end
+
+ it 'does not call test_report.json endpoint by default', :js do
+ expect(page).to have_selector('.js-no-tests-to-show', visible: :all)
+ end
+
+ it 'does call test_report.json endpoint when tab is selected', :js do
+ find('.js-tests-tab-link').click
+ wait_for_requests
- it 'does not call test_report.json endpoint by default', :js do
- expect(page).to have_selector('.js-no-tests-to-show', visible: :all)
+ expect(page).to have_content('Test suites')
+ expect(page).to have_selector('.js-tests-detail', visible: :all)
+ end
end
- it 'does call test_report.json endpoint when tab is selected', :js do
- find('.js-tests-tab-link').click
- wait_for_requests
+ context 'without test reports' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
- expect(page).to have_content('Test suites')
- expect(page).to have_selector('.js-tests-detail', visible: :all)
+ it 'shows nothing' do
+ expect(page.find('.js-test-report-badge-counter', visible: :all).text).to eq("")
+ end
end
end
@@ -1077,8 +1087,6 @@ describe 'Pipeline', :js do
end
context 'when pipeline has configuration errors' do
- include_context 'pipeline builds'
-
let(:pipeline) do
create(:ci_pipeline,
:invalid,
@@ -1119,6 +1127,10 @@ describe 'Pipeline', :js do
%Q{span[title="#{pipeline.present.failure_reason}"]})
end
end
+
+ it 'contains a pipeline header with title' do
+ expect(page).to have_content "Pipeline ##{pipeline.id}"
+ end
end
context 'when pipeline is stuck' do
diff --git a/spec/features/projects/releases/user_views_edit_release_spec.rb b/spec/features/projects/releases/user_views_edit_release_spec.rb
new file mode 100644
index 00000000000..820e8277af3
--- /dev/null
+++ b/spec/features/projects/releases/user_views_edit_release_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'User edits Release', :js do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:release) { create(:release, project: project, name: 'The first release' ) }
+ let_it_be(:user) { create(:user) }
+ let(:show_feature_flag) { true }
+
+ before do
+ stub_feature_flags(release_show_page: show_feature_flag)
+
+ project.add_developer(user)
+
+ gitlab_sign_in(user)
+
+ visit edit_project_release_path(project, release)
+
+ wait_for_requests
+ end
+
+ def fill_out_form_and_click(button_to_click)
+ fill_in 'Release title', with: 'Updated Release title'
+ fill_in 'Release notes', with: 'Updated Release notes'
+
+ click_link_or_button button_to_click
+
+ wait_for_all_requests
+ end
+
+ it 'renders the breadcrumbs' do
+ within('.breadcrumbs') do
+ expect(page).to have_content("#{project.creator.name} #{project.name} Edit Release")
+
+ expect(page).to have_link(project.creator.name, href: user_path(project.creator))
+ expect(page).to have_link(project.name, href: project_path(project))
+ expect(page).to have_link('Edit Release', href: edit_project_release_path(project, release))
+ end
+ end
+
+ it 'renders the edit Release form' do
+ expect(page).to have_content('Releases are based on Git tags. We recommend naming tags that fit within semantic versioning, for example v1.0, v2.0-pre.')
+
+ expect(find_field('Tag name', { disabled: true }).value).to eq(release.tag)
+ expect(find_field('Release title').value).to eq(release.name)
+ expect(find_field('Release notes').value).to eq(release.description)
+
+ expect(page).to have_button('Save changes')
+ expect(page).to have_link('Cancel')
+ end
+
+ it 'does not update the Release when "Cancel" is clicked' do
+ original_name = release.name
+ original_description = release.description
+
+ fill_out_form_and_click 'Cancel'
+
+ release.reload
+
+ expect(release.name).to eq(original_name)
+ expect(release.description).to eq(original_description)
+ end
+
+ it 'updates the Release when "Save changes" is clicked' do
+ fill_out_form_and_click 'Save changes'
+
+ release.reload
+
+ expect(release.name).to eq('Updated Release title')
+ expect(release.description).to eq('Updated Release notes')
+ end
+
+ context 'when the release_show_page feature flag is disabled' do
+ let(:show_feature_flag) { false }
+
+ it 'redirects to the main Releases page when "Cancel" is clicked' do
+ fill_out_form_and_click 'Cancel'
+
+ expect(page).to have_current_path(project_releases_path(project))
+ end
+
+ it 'redirects to the main Releases page when "Save changes" is clicked' do
+ fill_out_form_and_click 'Save changes'
+
+ expect(page).to have_current_path(project_releases_path(project))
+ end
+ end
+
+ context 'when the release_show_page feature flag is enabled' do
+ it 'redirects to the previous page when "Cancel" is clicked when the url includes a back_url query parameter' do
+ back_path = project_releases_path(project, params: { page: 2 })
+ visit edit_project_release_path(project, release, params: { back_url: back_path })
+
+ fill_out_form_and_click 'Cancel'
+
+ expect(page).to have_current_path(back_path)
+ end
+
+ it 'redirects to the main Releases page when "Cancel" is clicked when the url does not include a back_url query parameter' do
+ fill_out_form_and_click 'Cancel'
+
+ expect(page).to have_current_path(project_releases_path(project))
+ end
+
+ it 'redirects to the dedicated Release page when "Save changes" is clicked' do
+ fill_out_form_and_click 'Save changes'
+
+ expect(page).to have_current_path(project_release_path(project, release))
+ end
+ end
+end
diff --git a/spec/features/projects/releases/user_views_release_spec.rb b/spec/features/projects/releases/user_views_release_spec.rb
new file mode 100644
index 00000000000..6120acb4f1f
--- /dev/null
+++ b/spec/features/projects/releases/user_views_release_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'User views Release', :js do
+ let(:project) { create(:project, :repository) }
+ let(:release) { create(:release, project: project, name: 'The first release' ) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_developer(user)
+
+ gitlab_sign_in(user)
+
+ visit project_release_path(project, release)
+ end
+
+ it 'renders the breadcrumbs' do
+ within('.breadcrumbs') do
+ expect(page).to have_content("#{project.creator.name} #{project.name} Releases #{release.name}")
+
+ expect(page).to have_link(project.creator.name, href: user_path(project.creator))
+ expect(page).to have_link(project.name, href: project_path(project))
+ expect(page).to have_link('Releases', href: project_releases_path(project))
+ expect(page).to have_link(release.name, href: project_release_path(project, release))
+ end
+ end
+
+ it 'renders the release details' do
+ within('.release-block') do
+ expect(page).to have_content(release.name)
+ expect(page).to have_content(release.tag)
+ expect(page).to have_content(release.commit.short_id)
+ expect(page).to have_content(release.description)
+ end
+ end
+end
diff --git a/spec/features/projects/releases/user_views_releases_spec.rb b/spec/features/projects/releases/user_views_releases_spec.rb
index 4507d90576b..a4ba81ffeb9 100644
--- a/spec/features/projects/releases/user_views_releases_spec.rb
+++ b/spec/features/projects/releases/user_views_releases_spec.rb
@@ -24,16 +24,31 @@ describe 'User views releases', :js do
context 'when there is a link as an asset' do
let!(:release_link) { create(:release_link, release: release, url: url ) }
let(:url) { "#{project.web_url}/-/jobs/1/artifacts/download" }
+ let(:direct_asset_link) { Gitlab::Routing.url_helpers.project_release_url(project, release) << release_link.filepath }
it 'sees the link' do
visit project_releases_path(project)
page.within('.js-assets-list') do
- expect(page).to have_link release_link.name, href: release_link.url
+ expect(page).to have_link release_link.name, href: direct_asset_link
expect(page).not_to have_content('(external source)')
end
end
+ context 'when there is a link redirect' do
+ let!(:release_link) { create(:release_link, release: release, name: 'linux-amd64 binaries', filepath: '/binaries/linux-amd64', url: url) }
+ let(:url) { "#{project.web_url}/-/jobs/1/artifacts/download" }
+
+ it 'sees the link' do
+ visit project_releases_path(project)
+
+ page.within('.js-assets-list') do
+ expect(page).to have_link release_link.name, href: direct_asset_link
+ expect(page).not_to have_content('(external source)')
+ end
+ end
+ end
+
context 'when url points to external resource' do
let(:url) { 'http://google.com/download' }
diff --git a/spec/features/projects/services/user_views_services_spec.rb b/spec/features/projects/services/user_views_services_spec.rb
index d9358a40602..cf403a131b0 100644
--- a/spec/features/projects/services/user_views_services_spec.rb
+++ b/spec/features/projects/services/user_views_services_spec.rb
@@ -14,7 +14,7 @@ describe 'User views services' do
end
it 'shows the list of available services' do
- expect(page).to have_content('Project services')
+ expect(page).to have_content('Integrations')
expect(page).to have_content('Campfire')
expect(page).to have_content('HipChat')
expect(page).to have_content('Assembla')
diff --git a/spec/features/projects/settings/ci_cd_settings_spec.rb b/spec/features/projects/settings/ci_cd_settings_spec.rb
new file mode 100644
index 00000000000..8b9b1ac00c3
--- /dev/null
+++ b/spec/features/projects/settings/ci_cd_settings_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Projects > Settings > CI / CD settings' do
+ let_it_be(:project) { create(:project_empty_repo) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:role) { :maintainer }
+
+ context 'Deploy tokens' do
+ let!(:deploy_token) { create(:deploy_token, projects: [project]) }
+
+ before do
+ project.add_role(user, role)
+ sign_in(user)
+ stub_container_registry_config(enabled: true)
+ visit project_settings_ci_cd_path(project)
+ end
+
+ it_behaves_like 'a deploy token in ci/cd settings' do
+ let(:entity_type) { 'project' }
+ end
+ end
+
+ context 'Deploy Keys', :js do
+ let_it_be(:private_deploy_key) { create(:deploy_key, title: 'private_deploy_key', public: false) }
+ let_it_be(:public_deploy_key) { create(:another_deploy_key, title: 'public_deploy_key', public: true) }
+ let(:new_ssh_key) { attributes_for(:key)[:key] }
+
+ before do
+ project.add_role(user, role)
+ sign_in(user)
+ end
+
+ it 'get list of keys' do
+ project.deploy_keys << private_deploy_key
+ project.deploy_keys << public_deploy_key
+
+ visit project_settings_ci_cd_path(project)
+
+ expect(page).to have_content('private_deploy_key')
+ expect(page).to have_content('public_deploy_key')
+ end
+
+ it 'add a new deploy key' do
+ visit project_settings_ci_cd_path(project)
+
+ fill_in 'deploy_key_title', with: 'new_deploy_key'
+ fill_in 'deploy_key_key', with: new_ssh_key
+ check 'deploy_key_deploy_keys_projects_attributes_0_can_push'
+ click_button 'Add key'
+
+ expect(page).to have_content('new_deploy_key')
+ expect(page).to have_content('Write access allowed')
+ end
+
+ it 'edit an existing deploy key' do
+ project.deploy_keys << private_deploy_key
+ visit project_settings_ci_cd_path(project)
+
+ find('.deploy-key', text: private_deploy_key.title).find('.ic-pencil').click
+
+ fill_in 'deploy_key_title', with: 'updated_deploy_key'
+ check 'deploy_key_deploy_keys_projects_attributes_0_can_push'
+ click_button 'Save changes'
+
+ expect(page).to have_content('updated_deploy_key')
+ expect(page).to have_content('Write access allowed')
+ end
+
+ it 'edit an existing public deploy key to be writable' do
+ project.deploy_keys << public_deploy_key
+ visit project_settings_ci_cd_path(project)
+
+ find('.deploy-key', text: public_deploy_key.title).find('.ic-pencil').click
+
+ check 'deploy_key_deploy_keys_projects_attributes_0_can_push'
+ click_button 'Save changes'
+
+ expect(page).to have_content('public_deploy_key')
+ expect(page).to have_content('Write access allowed')
+ end
+
+ it 'edit a deploy key from projects user has access to' do
+ project2 = create(:project_empty_repo)
+ project2.add_role(user, role)
+ project2.deploy_keys << private_deploy_key
+
+ visit project_settings_ci_cd_path(project)
+
+ find('.js-deployKeys-tab-available_project_keys').click
+
+ find('.deploy-key', text: private_deploy_key.title).find('.ic-pencil').click
+
+ fill_in 'deploy_key_title', with: 'updated_deploy_key'
+ click_button 'Save changes'
+
+ find('.js-deployKeys-tab-available_project_keys').click
+
+ expect(page).to have_content('updated_deploy_key')
+ end
+
+ it 'remove an existing deploy key' do
+ project.deploy_keys << private_deploy_key
+ visit project_settings_ci_cd_path(project)
+
+ accept_confirm { find('.deploy-key', text: private_deploy_key.title).find('.ic-remove').click }
+
+ expect(page).not_to have_content(private_deploy_key.title)
+ end
+ end
+end
diff --git a/spec/features/projects/settings/integration_settings_spec.rb b/spec/features/projects/settings/integration_settings_spec.rb
deleted file mode 100644
index de987b879eb..00000000000
--- a/spec/features/projects/settings/integration_settings_spec.rb
+++ /dev/null
@@ -1,144 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe 'Projects > Settings > Integration settings' do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
- let(:role) { :developer }
- let(:integrations_path) { project_settings_integrations_path(project) }
-
- before do
- sign_in(user)
- project.add_role(user, role)
- end
-
- context 'for developer' do
- let(:role) { :developer }
-
- it 'to be disallowed to view' do
- visit integrations_path
-
- expect(page.status_code).to eq(404)
- end
- end
-
- context 'for maintainer' do
- let(:role) { :maintainer }
-
- context 'Webhooks' do
- let(:hook) { create(:project_hook, :all_events_enabled, enable_ssl_verification: true, project: project) }
- let(:url) { generate(:url) }
-
- it 'show list of webhooks' do
- hook
-
- visit integrations_path
-
- expect(page.status_code).to eq(200)
- expect(page).to have_content(hook.url)
- expect(page).to have_content('SSL Verification: enabled')
- expect(page).to have_content('Push events')
- expect(page).to have_content('Tag push events')
- expect(page).to have_content('Issues events')
- expect(page).to have_content('Confidential issues events')
- expect(page).to have_content('Note events')
- expect(page).to have_content('Merge requests events')
- expect(page).to have_content('Pipeline events')
- expect(page).to have_content('Wiki page events')
- end
-
- it 'create webhook' do
- visit integrations_path
-
- fill_in 'hook_url', with: url
- check 'Tag push events'
- fill_in 'hook_push_events_branch_filter', with: 'master'
- check 'Enable SSL verification'
- check 'Job events'
-
- click_button 'Add webhook'
-
- expect(page).to have_content(url)
- expect(page).to have_content('SSL Verification: enabled')
- expect(page).to have_content('Push events')
- expect(page).to have_content('Tag push events')
- expect(page).to have_content('Job events')
- end
-
- it 'edit existing webhook' do
- hook
- visit integrations_path
-
- click_link 'Edit'
- fill_in 'hook_url', with: url
- check 'Enable SSL verification'
- click_button 'Save changes'
-
- expect(page).to have_content 'SSL Verification: enabled'
- expect(page).to have_content(url)
- end
-
- it 'test existing webhook', :js do
- WebMock.stub_request(:post, hook.url)
- visit integrations_path
-
- find('.hook-test-button.dropdown').click
- click_link 'Push events'
-
- expect(current_path).to eq(integrations_path)
- end
-
- context 'delete existing webhook' do
- it 'from webhooks list page' do
- hook
- visit integrations_path
-
- expect { click_link 'Delete' }.to change(ProjectHook, :count).by(-1)
- end
-
- it 'from webhook edit page' do
- hook
- visit integrations_path
- click_link 'Edit'
-
- expect { click_link 'Delete' }.to change(ProjectHook, :count).by(-1)
- end
- end
- end
-
- context 'Webhook logs' do
- let(:hook) { create(:project_hook, project: project) }
- let(:hook_log) { create(:web_hook_log, web_hook: hook, internal_error_message: 'some error') }
-
- it 'show list of hook logs' do
- hook_log
- visit edit_project_hook_path(project, hook)
-
- expect(page).to have_content('Recent Deliveries')
- expect(page).to have_content(hook_log.url)
- end
-
- it 'show hook log details' do
- hook_log
- visit edit_project_hook_path(project, hook)
- click_link 'View details'
-
- expect(page).to have_content("POST #{hook_log.url}")
- expect(page).to have_content(hook_log.internal_error_message)
- expect(page).to have_content('Resend Request')
- end
-
- it 'retry hook log' do
- WebMock.stub_request(:post, hook.url)
-
- hook_log
- visit edit_project_hook_path(project, hook)
- click_link 'View details'
- click_link 'Resend Request'
-
- expect(current_path).to eq(edit_project_hook_path(project, hook))
- end
- end
- end
-end
diff --git a/spec/features/projects/settings/operations_settings_spec.rb b/spec/features/projects/settings/operations_settings_spec.rb
index d57401471ff..3c9102431e8 100644
--- a/spec/features/projects/settings/operations_settings_spec.rb
+++ b/spec/features/projects/settings/operations_settings_spec.rb
@@ -35,7 +35,7 @@ describe 'Projects > Settings > For a forked project', :js do
end
it 'renders form for incident management' do
- expect(page).to have_selector('h4', text: 'Incidents')
+ expect(page).to have_selector('h3', text: 'Incidents')
end
it 'sets correct default values' do
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index 23358d5cd67..87e467571e6 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -61,6 +61,28 @@ describe "Projects > Settings > Pipelines settings" do
expect(checkbox).to be_checked
end
+ it 'updates forward_deployment_enabled' do
+ visit project_settings_ci_cd_path(project)
+
+ checkbox = find_field('project_forward_deployment_enabled')
+ expect(checkbox).to be_checked
+
+ checkbox.set(false)
+
+ page.within '#js-general-pipeline-settings' do
+ click_on 'Save changes'
+ end
+
+ expect(page.status_code).to eq(200)
+
+ page.within '#js-general-pipeline-settings' do
+ expect(page).to have_button('Save changes', disabled: false)
+ end
+
+ checkbox = find_field('project_forward_deployment_enabled')
+ expect(checkbox).not_to be_checked
+ end
+
describe 'Auto DevOps' do
context 'when auto devops is turned on instance-wide' do
before do
diff --git a/spec/features/projects/settings/project_settings_spec.rb b/spec/features/projects/settings/project_settings_spec.rb
index b601866c96b..9fc91550667 100644
--- a/spec/features/projects/settings/project_settings_spec.rb
+++ b/spec/features/projects/settings/project_settings_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'Projects settings' do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:user) { project.owner }
let(:panel) { find('.general-settings', match: :first) }
let(:button) { panel.find('.btn.js-settings-toggle') }
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index fc1a85c3efe..0613148172f 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -10,6 +10,7 @@ describe 'Project > Settings > CI/CD > Container registry tag expiration policy'
before do
sign_in(user)
stub_container_registry_config(enabled: true)
+ stub_feature_flags(new_variables_ui: false)
end
context 'as owner' do
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 18031a40f15..9030cd6a648 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -25,122 +25,6 @@ describe 'Projects > Settings > Repository settings' do
context 'for maintainer' do
let(:role) { :maintainer }
- context 'Deploy Keys', :js do
- let(:private_deploy_key) { create(:deploy_key, title: 'private_deploy_key', public: false) }
- let(:public_deploy_key) { create(:another_deploy_key, title: 'public_deploy_key', public: true) }
- let(:new_ssh_key) { attributes_for(:key)[:key] }
-
- it 'get list of keys' do
- project.deploy_keys << private_deploy_key
- project.deploy_keys << public_deploy_key
-
- visit project_settings_repository_path(project)
-
- expect(page).to have_content('private_deploy_key')
- expect(page).to have_content('public_deploy_key')
- end
-
- it 'add a new deploy key' do
- visit project_settings_repository_path(project)
-
- fill_in 'deploy_key_title', with: 'new_deploy_key'
- fill_in 'deploy_key_key', with: new_ssh_key
- check 'deploy_key_deploy_keys_projects_attributes_0_can_push'
- click_button 'Add key'
-
- expect(page).to have_content('new_deploy_key')
- expect(page).to have_content('Write access allowed')
- end
-
- it 'edit an existing deploy key' do
- project.deploy_keys << private_deploy_key
- visit project_settings_repository_path(project)
-
- find('.deploy-key', text: private_deploy_key.title).find('.ic-pencil').click
-
- fill_in 'deploy_key_title', with: 'updated_deploy_key'
- check 'deploy_key_deploy_keys_projects_attributes_0_can_push'
- click_button 'Save changes'
-
- expect(page).to have_content('updated_deploy_key')
- expect(page).to have_content('Write access allowed')
- end
-
- it 'edit an existing public deploy key to be writable' do
- project.deploy_keys << public_deploy_key
- visit project_settings_repository_path(project)
-
- find('.deploy-key', text: public_deploy_key.title).find('.ic-pencil').click
-
- check 'deploy_key_deploy_keys_projects_attributes_0_can_push'
- click_button 'Save changes'
-
- expect(page).to have_content('public_deploy_key')
- expect(page).to have_content('Write access allowed')
- end
-
- it 'edit a deploy key from projects user has access to' do
- project2 = create(:project_empty_repo)
- project2.add_role(user, role)
- project2.deploy_keys << private_deploy_key
-
- visit project_settings_repository_path(project)
-
- find('.js-deployKeys-tab-available_project_keys').click
-
- find('.deploy-key', text: private_deploy_key.title).find('.ic-pencil').click
-
- fill_in 'deploy_key_title', with: 'updated_deploy_key'
- click_button 'Save changes'
-
- find('.js-deployKeys-tab-available_project_keys').click
-
- expect(page).to have_content('updated_deploy_key')
- end
-
- it 'remove an existing deploy key' do
- project.deploy_keys << private_deploy_key
- visit project_settings_repository_path(project)
-
- accept_confirm { find('.deploy-key', text: private_deploy_key.title).find('.ic-remove').click }
-
- expect(page).not_to have_content(private_deploy_key.title)
- end
- end
-
- context 'Deploy tokens' do
- let!(:deploy_token) { create(:deploy_token, projects: [project]) }
-
- before do
- stub_container_registry_config(enabled: true)
- visit project_settings_repository_path(project)
- end
-
- it 'view deploy tokens' do
- within('.deploy-tokens') do
- expect(page).to have_content(deploy_token.name)
- expect(page).to have_content('read_repository')
- expect(page).to have_content('read_registry')
- end
- end
-
- it 'add a new deploy token' do
- fill_in 'deploy_token_name', with: 'new_deploy_key'
- fill_in 'deploy_token_expires_at', with: (Date.today + 1.month).to_s
- fill_in 'deploy_token_username', with: 'deployer'
- check 'deploy_token_read_repository'
- check 'deploy_token_read_registry'
- click_button 'Create deploy token'
-
- expect(page).to have_content('Your new project deploy token has been created')
-
- within('.created-deploy-token-container') do
- expect(page).to have_selector("input[name='deploy-token-user'][value='deployer']")
- expect(page).to have_selector("input[name='deploy-token'][readonly='readonly']")
- end
- end
- end
-
context 'remote mirror settings' do
let(:user2) { create(:user) }
diff --git a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
index cd9299150b2..ac7788ba1fa 100644
--- a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
+++ b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
@@ -20,7 +20,7 @@ describe "User interacts with deploy keys", :js do
click_button("Enable")
expect(page).not_to have_selector(".fa-spinner")
- expect(current_path).to eq(project_settings_repository_path(project))
+ expect(current_path).to eq(project_settings_ci_cd_path(project))
find(".js-deployKeys-tab-enabled_keys").click
@@ -96,7 +96,7 @@ describe "User interacts with deploy keys", :js do
click_button("Add key")
- expect(current_path).to eq(project_settings_repository_path(project))
+ expect(current_path).to eq(project_settings_ci_cd_path(project))
page.within(".deploy-keys") do
expect(page).to have_content(DEPLOY_KEY_TITLE)
diff --git a/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb b/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb
index 3e9bfed1e47..a9253c20896 100644
--- a/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb
+++ b/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb
@@ -11,7 +11,7 @@ describe 'Repository Settings > User sees revoke deploy token modal', :js do
before do
project.add_role(user, role)
sign_in(user)
- visit(project_settings_repository_path(project))
+ visit(project_settings_ci_cd_path(project))
click_link('Revoke')
end
diff --git a/spec/features/projects/settings/webhooks_settings_spec.rb b/spec/features/projects/settings/webhooks_settings_spec.rb
new file mode 100644
index 00000000000..7e22117c63c
--- /dev/null
+++ b/spec/features/projects/settings/webhooks_settings_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Projects > Settings > Webhook Settings' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:webhooks_path) { project_hooks_path(project) }
+
+ before do
+ sign_in(user)
+ project.add_role(user, role)
+ end
+
+ context 'for developer' do
+ let(:role) { :developer }
+
+ it 'to be disallowed to view' do
+ visit webhooks_path
+
+ expect(page.status_code).to eq(404)
+ end
+ end
+
+ context 'for maintainer' do
+ let(:role) { :maintainer }
+
+ context 'Webhooks' do
+ let(:hook) { create(:project_hook, :all_events_enabled, enable_ssl_verification: true, project: project) }
+ let(:url) { generate(:url) }
+
+ it 'show list of webhooks' do
+ hook
+
+ visit webhooks_path
+
+ expect(page.status_code).to eq(200)
+ expect(page).to have_content(hook.url)
+ expect(page).to have_content('SSL Verification: enabled')
+ expect(page).to have_content('Push events')
+ expect(page).to have_content('Tag push events')
+ expect(page).to have_content('Issues events')
+ expect(page).to have_content('Confidential issues events')
+ expect(page).to have_content('Note events')
+ expect(page).to have_content('Merge requests events')
+ expect(page).to have_content('Pipeline events')
+ expect(page).to have_content('Wiki page events')
+ end
+
+ it 'create webhook' do
+ visit webhooks_path
+
+ fill_in 'hook_url', with: url
+ check 'Tag push events'
+ fill_in 'hook_push_events_branch_filter', with: 'master'
+ check 'Enable SSL verification'
+ check 'Job events'
+
+ click_button 'Add webhook'
+
+ expect(page).to have_content(url)
+ expect(page).to have_content('SSL Verification: enabled')
+ expect(page).to have_content('Push events')
+ expect(page).to have_content('Tag push events')
+ expect(page).to have_content('Job events')
+ end
+
+ it 'edit existing webhook' do
+ hook
+ visit webhooks_path
+
+ click_link 'Edit'
+ fill_in 'hook_url', with: url
+ check 'Enable SSL verification'
+ click_button 'Save changes'
+
+ expect(page).to have_content 'SSL Verification: enabled'
+ expect(page).to have_content(url)
+ end
+
+ it 'test existing webhook', :js do
+ WebMock.stub_request(:post, hook.url)
+ visit webhooks_path
+
+ find('.hook-test-button.dropdown').click
+ click_link 'Push events'
+
+ expect(current_path).to eq(webhooks_path)
+ end
+
+ context 'delete existing webhook' do
+ it 'from webhooks list page' do
+ hook
+ visit webhooks_path
+
+ expect { click_link 'Delete' }.to change(ProjectHook, :count).by(-1)
+ end
+
+ it 'from webhook edit page' do
+ hook
+ visit webhooks_path
+ click_link 'Edit'
+
+ expect { click_link 'Delete' }.to change(ProjectHook, :count).by(-1)
+ end
+ end
+ end
+
+ context 'Webhook logs' do
+ let(:hook) { create(:project_hook, project: project) }
+ let(:hook_log) { create(:web_hook_log, web_hook: hook, internal_error_message: 'some error') }
+
+ it 'show list of hook logs' do
+ hook_log
+ visit edit_project_hook_path(project, hook)
+
+ expect(page).to have_content('Recent Deliveries')
+ expect(page).to have_content(hook_log.url)
+ end
+
+ it 'show hook log details' do
+ hook_log
+ visit edit_project_hook_path(project, hook)
+ click_link 'View details'
+
+ expect(page).to have_content("POST #{hook_log.url}")
+ expect(page).to have_content(hook_log.internal_error_message)
+ expect(page).to have_content('Resend Request')
+ end
+
+ it 'retry hook log' do
+ WebMock.stub_request(:post, hook.url)
+
+ hook_log
+ visit edit_project_hook_path(project, hook)
+ click_link 'View details'
+ click_link 'Resend Request'
+
+ expect(current_path).to eq(edit_project_hook_path(project, hook))
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/show/user_manages_notifications_spec.rb b/spec/features/projects/show/user_manages_notifications_spec.rb
index 851a09cf28a..0cd6743304e 100644
--- a/spec/features/projects/show/user_manages_notifications_spec.rb
+++ b/spec/features/projects/show/user_manages_notifications_spec.rb
@@ -7,7 +7,6 @@ describe 'Projects > Show > User manages notifications', :js do
before do
sign_in(project.owner)
- visit project_path(project)
end
def click_notifications_button
@@ -15,6 +14,7 @@ describe 'Projects > Show > User manages notifications', :js do
end
it 'changes the notification setting' do
+ visit project_path(project)
click_notifications_button
click_link 'On mention'
@@ -26,6 +26,7 @@ describe 'Projects > Show > User manages notifications', :js do
end
it 'changes the notification setting to disabled' do
+ visit project_path(project)
click_notifications_button
click_link 'Disabled'
@@ -50,11 +51,13 @@ describe 'Projects > Show > User manages notifications', :js do
:reassign_merge_request,
:merge_merge_request,
:failed_pipeline,
+ :fixed_pipeline,
:success_pipeline
]
end
it 'shows notification settings checkbox' do
+ visit project_path(project)
click_notifications_button
page.find('a[data-notification-level="custom"]').click
@@ -64,12 +67,27 @@ describe 'Projects > Show > User manages notifications', :js do
end
end
end
+
+ context 'when ci_pipeline_fixed_notifications is disabled' do
+ before do
+ stub_feature_flags(ci_pipeline_fixed_notifications: false)
+ end
+
+ it 'hides fixed_pipeline checkbox' do
+ visit project_path(project)
+ click_notifications_button
+ page.find('a[data-notification-level="custom"]').click
+
+ expect(page).not_to have_selector("input[name='notification_setting[fixed_pipeline]']")
+ end
+ end
end
context 'when project emails are disabled' do
let(:project) { create(:project, :public, :repository, emails_disabled: true) }
it 'is disabled' do
+ visit project_path(project)
expect(page).to have_selector('.notifications-btn.disabled', visible: true)
end
end
diff --git a/spec/features/projects/show/user_sees_git_instructions_spec.rb b/spec/features/projects/show/user_sees_git_instructions_spec.rb
index dde9490a5e1..0c486056329 100644
--- a/spec/features/projects/show/user_sees_git_instructions_spec.rb
+++ b/spec/features/projects/show/user_sees_git_instructions_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'Projects > Show > User sees Git instructions' do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
shared_examples_for 'redirects to the sign in page' do
it 'redirects to the sign in page' do
@@ -49,7 +49,7 @@ describe 'Projects > Show > User sees Git instructions' do
context 'when project is public' do
context 'when project has no repo' do
- set(:project) { create(:project, :public) }
+ let_it_be(:project) { create(:project, :public) }
before do
sign_in(project.owner)
@@ -60,7 +60,7 @@ describe 'Projects > Show > User sees Git instructions' do
end
context 'when project is empty' do
- set(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:project) { create(:project_empty_repo, :public) }
context 'when not signed in' do
before do
@@ -98,7 +98,7 @@ describe 'Projects > Show > User sees Git instructions' do
end
context 'when project is not empty' do
- set(:project) { create(:project, :public, :repository) }
+ let_it_be(:project) { create(:project, :public, :repository) }
before do
visit(project_path(project))
@@ -141,7 +141,7 @@ describe 'Projects > Show > User sees Git instructions' do
end
context 'when project is internal' do
- set(:project) { create(:project, :internal, :repository) }
+ let_it_be(:project) { create(:project, :internal, :repository) }
context 'when not signed in' do
before do
@@ -163,7 +163,7 @@ describe 'Projects > Show > User sees Git instructions' do
end
context 'when project is private' do
- set(:project) { create(:project, :private) }
+ let_it_be(:project) { create(:project, :private) }
before do
visit(project_path(project))
diff --git a/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb b/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
index cf1a679102c..c0fcd10f394 100644
--- a/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
+++ b/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'Projects > Show > User sees last commit CI status' do
- set(:project) { create(:project, :repository, :public) }
+ let_it_be(:project) { create(:project, :repository, :public) }
it 'shows the project README', :js do
project.enable_ci
@@ -14,7 +14,7 @@ describe 'Projects > Show > User sees last commit CI status' do
page.within '.commit-detail' do
expect(page).to have_content(project.commit.sha[0..6])
- expect(page).to have_selector('[aria-label="Commit: skipped"]')
+ expect(page).to have_selector('[aria-label="Pipeline: skipped"]')
end
end
end
diff --git a/spec/features/projects/show/user_sees_readme_spec.rb b/spec/features/projects/show/user_sees_readme_spec.rb
index 98906de4620..52745b06cd3 100644
--- a/spec/features/projects/show/user_sees_readme_spec.rb
+++ b/spec/features/projects/show/user_sees_readme_spec.rb
@@ -3,9 +3,8 @@
require 'spec_helper'
describe 'Projects > Show > User sees README' do
- set(:user) { create(:user) }
-
- set(:project) { create(:project, :repository, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, :public) }
it 'shows the project README', :js do
visit project_path(project)
diff --git a/spec/features/projects/snippets/create_snippet_spec.rb b/spec/features/projects/snippets/create_snippet_spec.rb
index 5a425fb5d27..b55a42e07a9 100644
--- a/spec/features/projects/snippets/create_snippet_spec.rb
+++ b/spec/features/projects/snippets/create_snippet_spec.rb
@@ -2,14 +2,13 @@
require 'spec_helper'
-describe 'Projects > Snippets > Create Snippet', :js do
- include DropzoneHelper
-
- let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
+shared_examples_for 'snippet editor' do
+ before do
+ stub_feature_flags(monaco_snippets: flag)
+ end
def description_field
- find('.js-description-input input,textarea')
+ find('.js-description-input').find('input,textarea')
end
def fill_form
@@ -20,7 +19,8 @@ describe 'Projects > Snippets > Create Snippet', :js do
fill_in 'project_snippet_description', with: 'My Snippet **Description**'
page.within('.file-editor') do
- find('.ace_text-input', visible: false).send_keys('Hello World!')
+ el = flag == true ? find('.inputarea') : find('.ace_text-input', visible: false)
+ el.send_keys 'Hello World!'
end
end
@@ -32,7 +32,10 @@ describe 'Projects > Snippets > Create Snippet', :js do
visit project_snippets_path(project)
+ # Wait for the SVG to ensure the button location doesn't shift
+ within('.empty-state') { find('img.js-lazy-loaded') }
click_on('New snippet')
+ wait_for_requests
end
it 'shows collapsible description input' do
@@ -94,6 +97,29 @@ describe 'Projects > Snippets > Create Snippet', :js do
link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
expect(link).to match(%r{/#{Regexp.escape(project.full_path)}/uploads/\h{32}/banana_sample\.gif\z})
end
+
+ context 'when the git operation fails' do
+ let(:error) { 'This is a git error' }
+
+ before do
+ allow_next_instance_of(Snippets::CreateService) do |instance|
+ allow(instance).to receive(:create_commit).and_raise(StandardError, error)
+ end
+
+ fill_form
+
+ click_button('Create snippet')
+ wait_for_requests
+ end
+
+ it 'displays the error' do
+ expect(page).to have_content(error)
+ end
+
+ it 'renders new page' do
+ expect(page).to have_content('New Snippet')
+ end
+ end
end
context 'when a user is not authenticated' do
@@ -102,7 +128,7 @@ describe 'Projects > Snippets > Create Snippet', :js do
end
it 'shows a public snippet on the index page but not the New snippet button' do
- snippet = create(:project_snippet, :public, project: project)
+ snippet = create(:project_snippet, :public, :repository, project: project)
visit project_snippets_path(project)
@@ -111,3 +137,22 @@ describe 'Projects > Snippets > Create Snippet', :js do
end
end
end
+
+describe 'Projects > Snippets > Create Snippet', :js do
+ include DropzoneHelper
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+
+ context 'when using Monaco' do
+ it_behaves_like "snippet editor" do
+ let(:flag) { true }
+ end
+ end
+
+ context 'when using ACE' do
+ it_behaves_like "snippet editor" do
+ let(:flag) { false }
+ end
+ end
+end
diff --git a/spec/features/projects/snippets/user_comments_on_snippet_spec.rb b/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
index 11707378996..9d11b55228b 100644
--- a/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
+++ b/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe 'Projects > Snippets > User comments on a snippet', :js do
- let(:project) { create(:project) }
- let!(:snippet) { create(:project_snippet, project: project, author: user) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:snippet) { create(:project_snippet, :repository, project: project, author: user) }
before do
stub_feature_flags(snippets_vue: false)
diff --git a/spec/features/projects/snippets/user_updates_snippet_spec.rb b/spec/features/projects/snippets/user_updates_snippet_spec.rb
index 93a5b4a7262..f9628b37089 100644
--- a/spec/features/projects/snippets/user_updates_snippet_spec.rb
+++ b/spec/features/projects/snippets/user_updates_snippet_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe 'Projects > Snippets > User updates a snippet' do
- let(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
let!(:snippet) { create(:project_snippet, project: project, author: user) }
- let(:user) { create(:user) }
before do
stub_feature_flags(snippets_vue: false)
@@ -13,16 +13,33 @@ describe 'Projects > Snippets > User updates a snippet' do
sign_in(user)
visit(project_snippet_path(project, snippet))
- end
- it 'updates a snippet' do
page.within('.detail-page-header') do
first(:link, 'Edit').click
end
+ end
+ it 'updates a snippet' do
fill_in('project_snippet_title', with: 'Snippet new title')
click_button('Save')
expect(page).to have_content('Snippet new title')
end
+
+ context 'when the git operation fails' do
+ before do
+ allow_next_instance_of(Snippets::UpdateService) do |instance|
+ allow(instance).to receive(:create_commit).and_raise(StandardError)
+ end
+
+ fill_in('project_snippet_title', with: 'Snippet new title')
+
+ click_button('Save')
+ end
+
+ it 'renders edit page and displays the error' do
+ expect(page.find('.flash-container span').text).to eq('Error updating the snippet')
+ expect(page).to have_content('Edit Snippet')
+ end
+ end
end
diff --git a/spec/features/projects/sourcegraph_csp_spec.rb b/spec/features/projects/sourcegraph_csp_spec.rb
index 57d1e8e3034..f252d3cd027 100644
--- a/spec/features/projects/sourcegraph_csp_spec.rb
+++ b/spec/features/projects/sourcegraph_csp_spec.rb
@@ -5,94 +5,28 @@ require 'spec_helper'
describe 'Sourcegraph Content Security Policy' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
- let_it_be(:default_csp_values) { "'self' https://some-cdn.test" }
- let_it_be(:sourcegraph_url) { 'https://sourcegraph.test' }
- let(:sourcegraph_enabled) { true }
- subject do
- visit project_blob_path(project, File.join('master', 'README.md'))
-
- response_headers['Content-Security-Policy']
- end
-
- before do
- allow(Gitlab::CurrentSettings).to receive(:sourcegraph_url).and_return(sourcegraph_url)
- allow(Gitlab::CurrentSettings).to receive(:sourcegraph_enabled).and_return(sourcegraph_enabled)
-
- sign_in(user)
- end
-
- shared_context 'csp config' do |csp_rule|
+ shared_context 'disable feature' do
before do
- csp = ActionDispatch::ContentSecurityPolicy.new do |p|
- p.send(csp_rule, default_csp_values) if csp_rule
- end
-
- expect_next_instance_of(Projects::BlobController) do |controller|
- expect(controller).to receive(:current_content_security_policy).and_return(csp)
- end
+ allow(Gitlab::CurrentSettings).to receive(:sourcegraph_enabled).and_return(false)
end
end
- context 'when no CSP config' do
- include_context 'csp config', nil
+ it_behaves_like 'setting CSP', 'connect-src' do
+ let_it_be(:whitelisted_url) { 'https://sourcegraph.test' }
+ let_it_be(:extended_controller_class) { Projects::BlobController }
- it 'does not add CSP directives' do
- is_expected.to be_blank
- end
- end
-
- describe 'when a CSP config exists for connect-src' do
- include_context 'csp config', :connect_src
+ subject do
+ visit project_blob_path(project, File.join('master', 'README.md'))
- context 'when sourcegraph enabled' do
- it 'appends to connect-src' do
- is_expected.to eql("connect-src #{default_csp_values} #{sourcegraph_url}")
- end
+ response_headers['Content-Security-Policy']
end
- context 'when sourcegraph disabled' do
- let(:sourcegraph_enabled) { false }
-
- it 'keeps original connect-src' do
- is_expected.to eql("connect-src #{default_csp_values}")
- end
- end
- end
-
- describe 'when a CSP config exists for default-src but not connect-src' do
- include_context 'csp config', :default_src
-
- context 'when sourcegraph enabled' do
- it 'uses default-src values in connect-src' do
- is_expected.to eql("default-src #{default_csp_values}; connect-src #{default_csp_values} #{sourcegraph_url}")
- end
- end
-
- context 'when sourcegraph disabled' do
- let(:sourcegraph_enabled) { false }
-
- it 'does not add connect-src' do
- is_expected.to eql("default-src #{default_csp_values}")
- end
- end
- end
-
- describe 'when a CSP config exists for font-src but not connect-src' do
- include_context 'csp config', :font_src
-
- context 'when sourcegraph enabled' do
- it 'uses default-src values in connect-src' do
- is_expected.to eql("font-src #{default_csp_values}; connect-src #{sourcegraph_url}")
- end
- end
-
- context 'when sourcegraph disabled' do
- let(:sourcegraph_enabled) { false }
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:sourcegraph_url).and_return(whitelisted_url)
+ allow(Gitlab::CurrentSettings).to receive(:sourcegraph_enabled).and_return(true)
- it 'does not add connect-src' do
- is_expected.to eql("font-src #{default_csp_values}")
- end
+ sign_in(user)
end
end
end
diff --git a/spec/features/projects/tags/user_edits_tags_spec.rb b/spec/features/projects/tags/user_edits_tags_spec.rb
index b1cb7685f63..6388875a619 100644
--- a/spec/features/projects/tags/user_edits_tags_spec.rb
+++ b/spec/features/projects/tags/user_edits_tags_spec.rb
@@ -68,7 +68,7 @@ describe 'Project > Tags', :js do
end
end
- it 'shows "Attaching a file" message on uploading 1 file', :js do
+ it 'shows "Attaching a file" message on uploading 1 file', :js, :capybara_ignore_server_errors do
slow_requests do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
diff --git a/spec/features/projects/user_sees_user_popover_spec.rb b/spec/features/projects/user_sees_user_popover_spec.rb
index adbf9073d59..fafb3773866 100644
--- a/spec/features/projects/user_sees_user_popover_spec.rb
+++ b/spec/features/projects/user_sees_user_popover_spec.rb
@@ -3,8 +3,7 @@
require 'spec_helper'
describe 'User sees user popover', :js do
- set(:project) { create(:project, :repository) }
-
+ let_it_be(:project) { create(:project, :repository) }
let(:user) { project.creator }
let(:merge_request) do
create(:merge_request, source_project: project, target_project: project)
diff --git a/spec/features/projects/user_uses_shortcuts_spec.rb b/spec/features/projects/user_uses_shortcuts_spec.rb
index beed1c07e51..2d629ef538a 100644
--- a/spec/features/projects/user_uses_shortcuts_spec.rb
+++ b/spec/features/projects/user_uses_shortcuts_spec.rb
@@ -7,8 +7,6 @@ describe 'User uses shortcuts', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(analytics_pages_under_project_analytics_sidebar: { enabled: false, thing: project })
-
project.add_maintainer(user)
sign_in(user)
@@ -119,8 +117,8 @@ describe 'User uses shortcuts', :js do
find('body').native.send_key('g')
find('body').native.send_key('d')
- expect(page).to have_active_navigation('Repository')
- expect(page).to have_active_sub_navigation('Charts')
+ expect(page).to have_active_navigation(_('Analytics'))
+ expect(page).to have_active_sub_navigation(_('Repository'))
end
end
@@ -211,18 +209,4 @@ describe 'User uses shortcuts', :js do
expect(page).to have_active_navigation('Wiki')
end
end
-
- context 'when `analytics_pages_under_project_analytics_sidebar` feature flag is enabled' do
- before do
- stub_feature_flags(analytics_pages_under_project_analytics_sidebar: { enabled: true, thing: project })
- end
-
- it 'redirects to the repository charts page' do
- find('body').native.send_key('g')
- find('body').native.send_key('d')
-
- expect(page).to have_active_navigation(_('Analytics'))
- expect(page).to have_active_sub_navigation(_('Repository Analytics'))
- end
- end
end
diff --git a/spec/features/projects/wiki/markdown_preview_spec.rb b/spec/features/projects/wiki/markdown_preview_spec.rb
index 331ba58d067..7d18c0f7a14 100644
--- a/spec/features/projects/wiki/markdown_preview_spec.rb
+++ b/spec/features/projects/wiki/markdown_preview_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'Projects > Wiki > User previews markdown changes', :js do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
let(:wiki_page) { create(:wiki_page, wiki: project.wiki, attrs: { title: 'home', content: '[some link](other-page)' }) }
let(:wiki_content) do
diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
index 7503c8aa52e..e67982bbd31 100644
--- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
@@ -3,6 +3,8 @@
require "spec_helper"
describe "User creates wiki page" do
+ include WikiHelpers
+
let(:user) { create(:user) }
let(:wiki) { ProjectWiki.new(project, user) }
let(:project) { create(:project) }
@@ -14,9 +16,11 @@ describe "User creates wiki page" do
end
context "when wiki is empty" do
- before do
+ before do |example|
visit(project_wikis_path(project))
+ wait_for_svg_to_be_loaded(example)
+
click_link "Create your first page"
end
@@ -45,7 +49,7 @@ describe "User creates wiki page" do
expect(page).to have_content("Create New Page")
end
- it "shows non-escaped link in the pages list", :quarantine do
+ it "shows non-escaped link in the pages list" do
fill_in(:wiki_title, with: "one/two/three-test")
page.within(".wiki-form") do
@@ -163,7 +167,7 @@ describe "User creates wiki page" do
expect(page).to have_link('Link to Home', href: "/#{project.full_path}/-/wikis/home")
end
- it_behaves_like 'wiki file attachments', :quarantine
+ it_behaves_like 'wiki file attachments'
end
context "in a group namespace", :js do
@@ -175,7 +179,7 @@ describe "User creates wiki page" do
expect(page).to have_field("wiki[message]", with: "Create home")
end
- it "creates a page from the home page", :quarantine do
+ it "creates a page from the home page" do
page.within(".wiki-form") do
fill_in(:wiki_content, with: "My awesome wiki!")
diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
index d3a0c9b790b..9d9c83331fb 100644
--- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe 'User updates wiki page' do
+ include WikiHelpers
+
let(:user) { create(:user) }
before do
@@ -11,8 +13,11 @@ describe 'User updates wiki page' do
end
context 'when wiki is empty' do
- before do
+ before do |example|
visit(project_wikis_path(project))
+
+ wait_for_svg_to_be_loaded(example)
+
click_link "Create your first page"
end
diff --git a/spec/features/projects/wiki/user_views_wiki_page_spec.rb b/spec/features/projects/wiki/user_views_wiki_page_spec.rb
index c7856342fb2..8a338756323 100644
--- a/spec/features/projects/wiki/user_views_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_views_wiki_page_spec.rb
@@ -19,9 +19,12 @@ describe 'User views a wiki page' do
sign_in(user)
end
- context 'when wiki is empty' do
+ context 'when wiki is empty', :js do
before do
- visit(project_wikis_path(project))
+ visit project_wikis_path(project)
+
+ wait_for_svg_to_be_loaded
+
click_link "Create your first page"
fill_in(:wiki_title, with: 'one/two/three-test')
@@ -30,9 +33,11 @@ describe 'User views a wiki page' do
fill_in(:wiki_content, with: 'wiki content')
click_on('Create page')
end
+
+ expect(page).to have_content('Wiki was successfully updated.')
end
- it 'shows the history of a page that has a path', :js do
+ it 'shows the history of a page that has a path' do
expect(current_path).to include('one/two/three-test')
first(:link, text: 'three').click
@@ -45,7 +50,7 @@ describe 'User views a wiki page' do
end
end
- it 'shows an old version of a page', :js do
+ it 'shows an old version of a page' do
expect(current_path).to include('one/two/three-test')
expect(find('.wiki-pages')).to have_content('three')
@@ -59,8 +64,10 @@ describe 'User views a wiki page' do
expect(page).to have_content('Edit Page')
fill_in('Content', with: 'Updated Wiki Content')
-
click_on('Save changes')
+
+ expect(page).to have_content('Wiki was successfully updated.')
+
click_on('Page history')
page.within(:css, '.nav-text') do
@@ -129,6 +136,36 @@ describe 'User views a wiki page' do
end
end
+ context 'when a page has special characters in its title' do
+ let(:title) { '<foo> !@#$%^&*()[]{}=_+\'"\\|<>? <bar>' }
+
+ before do
+ wiki_page.update(title: title )
+ end
+
+ it 'preserves the special characters' do
+ visit(project_wiki_path(project, wiki_page))
+
+ expect(page).to have_css('.wiki-page-title', text: title)
+ expect(page).to have_css('.wiki-pages li', text: title)
+ end
+ end
+
+ context 'when a page has XSS in its title or content' do
+ let(:title) { '<script>alert("title")<script>' }
+
+ before do
+ wiki_page.update(title: title, content: 'foo <script>alert("content")</script> bar')
+ end
+
+ it 'safely displays the page' do
+ visit(project_wiki_path(project, wiki_page))
+
+ expect(page).to have_css('.wiki-page-title', text: title)
+ expect(page).to have_content('foo bar')
+ end
+ end
+
context 'when a page has XSS in its message' do
before do
wiki_page.update(message: '<script>alert(true)<script>', content: 'XSS update')
@@ -162,9 +199,12 @@ describe 'User views a wiki page' do
end
it 'opens a default wiki page', :js do
- visit(project_path(project))
+ visit project_path(project)
find('.shortcuts-wiki').click
+
+ wait_for_svg_to_be_loaded
+
click_link "Create your first page"
expect(page).to have_content('Create New Page')
diff --git a/spec/features/read_only_spec.rb b/spec/features/read_only_spec.rb
index 619d34ebed4..a33535a7b0b 100644
--- a/spec/features/read_only_spec.rb
+++ b/spec/features/read_only_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'read-only message' do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
sign_in(user)
diff --git a/spec/features/reportable_note/snippets_spec.rb b/spec/features/reportable_note/snippets_spec.rb
index bd37675315f..a4e609ce40c 100644
--- a/spec/features/reportable_note/snippets_spec.rb
+++ b/spec/features/reportable_note/snippets_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe 'Reportable note on snippets', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
before do
stub_feature_flags(snippets_vue: false)
@@ -13,8 +13,8 @@ describe 'Reportable note on snippets', :js do
end
describe 'on project snippet' do
- let(:snippet) { create(:project_snippet, :public, project: project, author: user) }
- let!(:note) { create(:note_on_project_snippet, noteable: snippet, project: project) }
+ let_it_be(:snippet) { create(:project_snippet, :public, :repository, project: project, author: user) }
+ let_it_be(:note) { create(:note_on_project_snippet, noteable: snippet, project: project) }
before do
visit project_snippet_path(project, snippet)
diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb
index ed1dbe15d65..45b57b5cb1b 100644
--- a/spec/features/security/project/internal_access_spec.rb
+++ b/spec/features/security/project/internal_access_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe "Internal Project Access" do
include AccessMatchers
- set(:project) { create(:project, :internal, :repository) }
+ let_it_be(:project, reload: true) { create(:project, :internal, :repository) }
describe "Project should be internal" do
describe '#internal?' do
diff --git a/spec/features/security/project/private_access_spec.rb b/spec/features/security/project/private_access_spec.rb
index 97e6b3bd4ff..9aeb3ffbd43 100644
--- a/spec/features/security/project/private_access_spec.rb
+++ b/spec/features/security/project/private_access_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe "Private Project Access" do
include AccessMatchers
- set(:project) { create(:project, :private, :repository, public_builds: false) }
+ let_it_be(:project, reload: true) { create(:project, :private, :repository, public_builds: false) }
describe "Project should be private" do
describe '#private?' do
diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb
index 24bbb8d9b9e..4d8c2c7822c 100644
--- a/spec/features/security/project/public_access_spec.rb
+++ b/spec/features/security/project/public_access_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe "Public Project Access" do
include AccessMatchers
- set(:project) { create(:project, :public, :repository) }
+ let_it_be(:project, reload: true) { create(:project, :public, :repository) }
describe "Project should be public" do
describe '#public?' do
diff --git a/spec/features/snippets/search_snippets_spec.rb b/spec/features/snippets/search_snippets_spec.rb
index dce790e5708..691716d3576 100644
--- a/spec/features/snippets/search_snippets_spec.rb
+++ b/spec/features/snippets/search_snippets_spec.rb
@@ -16,45 +16,4 @@ describe 'Search Snippets' do
expect(page).to have_link(public_snippet.title)
expect(page).to have_link(private_snippet.title)
end
-
- it 'User searches for snippet contents' do
- create(:personal_snippet,
- :public,
- title: 'Many lined snippet',
- content: <<-CONTENT.strip_heredoc
- |line one
- |line two
- |line three
- |line four
- |line five
- |line six
- |line seven
- |line eight
- |line nine
- |line ten
- |line eleven
- |line twelve
- |line thirteen
- |line fourteen
- CONTENT
- )
-
- sign_in create(:user)
- visit dashboard_snippets_path
- submit_search('line seven')
-
- expect(page).to have_content('line seven')
-
- # 3 lines before the matched line should be visible
- expect(page).to have_content('line six')
- expect(page).to have_content('line five')
- expect(page).to have_content('line four')
- expect(page).not_to have_content('line three')
-
- # 3 lines after the matched line should be visible
- expect(page).to have_content('line eight')
- expect(page).to have_content('line nine')
- expect(page).to have_content('line ten')
- expect(page).not_to have_content('line eleven')
- end
end
diff --git a/spec/features/snippets/spam_snippets_spec.rb b/spec/features/snippets/spam_snippets_spec.rb
index dac36ba2b28..efe1bdc963d 100644
--- a/spec/features/snippets/spam_snippets_spec.rb
+++ b/spec/features/snippets/spam_snippets_spec.rb
@@ -2,16 +2,15 @@
require 'spec_helper'
-describe 'User creates snippet', :js do
- let(:user) { create(:user) }
-
+shared_examples_for 'snippet editor' do
def description_field
- find('.js-description-input input,textarea')
+ find('.js-description-input').find('input,textarea')
end
before do
stub_feature_flags(allow_possible_spam: false)
stub_feature_flags(snippets_vue: false)
+ stub_feature_flags(monaco_snippets: flag)
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
Gitlab::CurrentSettings.update!(
@@ -33,7 +32,8 @@ describe 'User creates snippet', :js do
find('#personal_snippet_visibility_level_20').set(true)
page.within('.file-editor') do
- find('.ace_text-input', visible: false).send_keys 'Hello World!'
+ el = flag == true ? find('.inputarea') : find('.ace_text-input', visible: false)
+ el.send_keys 'Hello World!'
end
end
@@ -80,3 +80,19 @@ describe 'User creates snippet', :js do
end
end
end
+
+describe 'User creates snippet', :js do
+ let_it_be(:user) { create(:user) }
+
+ context 'when using Monaco' do
+ it_behaves_like "snippet editor" do
+ let(:flag) { true }
+ end
+ end
+
+ context 'when using ACE' do
+ it_behaves_like "snippet editor" do
+ let(:flag) { false }
+ end
+ end
+end
diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb
index eb55613b954..f200355c6d2 100644
--- a/spec/features/snippets/user_creates_snippet_spec.rb
+++ b/spec/features/snippets/user_creates_snippet_spec.rb
@@ -2,19 +2,16 @@
require 'spec_helper'
-describe 'User creates snippet', :js do
- include DropzoneHelper
-
- let(:user) { create(:user) }
-
+shared_examples_for 'snippet editor' do
before do
stub_feature_flags(snippets_vue: false)
+ stub_feature_flags(monaco_snippets: flag)
sign_in(user)
visit new_snippet_path
end
def description_field
- find('.js-description-input input,textarea')
+ find('.js-description-input').find('input,textarea')
end
def fill_form
@@ -25,7 +22,8 @@ describe 'User creates snippet', :js do
fill_in 'personal_snippet_description', with: 'My Snippet **Description**'
page.within('.file-editor') do
- find('.ace_text-input', visible: false).send_keys 'Hello World!'
+ el = flag == true ? find('.inputarea') : find('.ace_text-input', visible: false)
+ el.send_keys 'Hello World!'
end
end
@@ -53,7 +51,7 @@ describe 'User creates snippet', :js do
page.within('#new_personal_snippet .md-preview-holder') do
expect(page).to have_content('My Snippet')
- link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
+ link = find('a.no-attachment-icon img.js-lazy-loaded[alt="banana_sample"]')['src']
expect(link).to match(%r{/uploads/-/system/user/#{user.id}/\h{32}/banana_sample\.gif\z})
# Adds a cache buster for checking if the image exists as Selenium is now handling the cached regquests
@@ -73,13 +71,36 @@ describe 'User creates snippet', :js do
click_button('Create snippet')
wait_for_requests
- link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
+ link = find('a.no-attachment-icon img.js-lazy-loaded[alt="banana_sample"]')['src']
expect(link).to match(%r{/uploads/-/system/personal_snippet/#{Snippet.last.id}/\h{32}/banana_sample\.gif\z})
reqs = inspect_requests { visit("#{link}?ran=#{SecureRandom.base64(20)}") }
expect(reqs.first.status_code).to eq(200)
end
+ context 'when the git operation fails' do
+ let(:error) { 'This is a git error' }
+
+ before do
+ allow_next_instance_of(Snippets::CreateService) do |instance|
+ allow(instance).to receive(:create_commit).and_raise(StandardError, error)
+ end
+
+ fill_form
+
+ click_button('Create snippet')
+ wait_for_requests
+ end
+
+ it 'displays the error' do
+ expect(page).to have_content(error)
+ end
+
+ it 'renders new page' do
+ expect(page).to have_content('New Snippet')
+ end
+ end
+
it 'validation fails for the first time' do
fill_in 'personal_snippet_title', with: 'My Snippet Title'
click_button('Create snippet')
@@ -98,7 +119,7 @@ describe 'User creates snippet', :js do
expect(page).to have_selector('strong')
end
expect(page).to have_content('Hello World!')
- link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
+ link = find('a.no-attachment-icon img.js-lazy-loaded[alt="banana_sample"]')['src']
expect(link).to match(%r{/uploads/-/system/personal_snippet/#{Snippet.last.id}/\h{32}/banana_sample\.gif\z})
reqs = inspect_requests { visit("#{link}?ran=#{SecureRandom.base64(20)}") }
@@ -109,7 +130,8 @@ describe 'User creates snippet', :js do
fill_in 'personal_snippet_title', with: 'My Snippet Title'
page.within('.file-editor') do
find(:xpath, "//input[@id='personal_snippet_file_name']").set 'snippet+file+name'
- find('.ace_text-input', visible: false).send_keys 'Hello World!'
+ el = flag == true ? find('.inputarea') : find('.ace_text-input', visible: false)
+ el.send_keys 'Hello World!'
end
click_button 'Create snippet'
@@ -120,3 +142,21 @@ describe 'User creates snippet', :js do
expect(page).to have_content('Hello World!')
end
end
+
+describe 'User creates snippet', :js do
+ include DropzoneHelper
+
+ let_it_be(:user) { create(:user) }
+
+ context 'when using Monaco' do
+ it_behaves_like "snippet editor" do
+ let(:flag) { true }
+ end
+ end
+
+ context 'when using ACE' do
+ it_behaves_like "snippet editor" do
+ let(:flag) { false }
+ end
+ end
+end
diff --git a/spec/features/snippets/user_edits_snippet_spec.rb b/spec/features/snippets/user_edits_snippet_spec.rb
index 1d26660a4f6..b003e50aab7 100644
--- a/spec/features/snippets/user_edits_snippet_spec.rb
+++ b/spec/features/snippets/user_edits_snippet_spec.rb
@@ -8,7 +8,7 @@ describe 'User edits snippet', :js do
let(:file_name) { 'test.rb' }
let(:content) { 'puts "test"' }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content, author: user) }
before do
@@ -58,4 +58,21 @@ describe 'User edits snippet', :js do
expect(page).to have_no_xpath("//i[@class='fa fa-lock']")
expect(page).to have_xpath("//i[@class='fa fa-globe']")
end
+
+ context 'when the git operation fails' do
+ before do
+ allow_next_instance_of(Snippets::UpdateService) do |instance|
+ allow(instance).to receive(:create_commit).and_raise(StandardError)
+ end
+
+ fill_in 'personal_snippet_title', with: 'New Snippet Title'
+
+ click_button('Save changes')
+ end
+
+ it 'renders edit page and displays the error' do
+ expect(page.find('.flash-container span').text).to eq('Error updating the snippet')
+ expect(page).to have_content('Edit Snippet')
+ end
+ end
end
diff --git a/spec/features/uploads/user_uploads_file_to_note_spec.rb b/spec/features/uploads/user_uploads_file_to_note_spec.rb
index 30b5cf267ae..570ecad41fa 100644
--- a/spec/features/uploads/user_uploads_file_to_note_spec.rb
+++ b/spec/features/uploads/user_uploads_file_to_note_spec.rb
@@ -22,7 +22,7 @@ describe 'User uploads file to note' do
end
end
- context 'uploading is in progress' do
+ context 'uploading is in progress', :capybara_ignore_server_errors do
it 'cancels uploading on clicking to "Cancel" button', :js do
slow_requests do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
diff --git a/spec/features/user_sorts_things_spec.rb b/spec/features/user_sorts_things_spec.rb
index 41f8f3761e8..8397854df27 100644
--- a/spec/features/user_sorts_things_spec.rb
+++ b/spec/features/user_sorts_things_spec.rb
@@ -10,10 +10,10 @@ describe "User sorts things" do
include Spec::Support::Helpers::Features::SortingHelpers
include DashboardHelper
- set(:project) { create(:project_empty_repo, :public) }
- set(:current_user) { create(:user) } # Using `current_user` instead of just `user` because of the hardoced call in `assigned_mrs_dashboard_path` which is used below.
- set(:issue) { create(:issue, project: project, author: current_user) }
- set(:merge_request) { create(:merge_request, target_project: project, source_project: project, author: current_user) }
+ let_it_be(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:current_user) { create(:user) } # Using `current_user` instead of just `user` because of the hardoced call in `assigned_mrs_dashboard_path` which is used below.
+ let_it_be(:issue) { create(:issue, project: project, author: current_user) }
+ let_it_be(:merge_request) { create(:merge_request, target_project: project, source_project: project, author: current_user) }
before do
project.add_developer(current_user)
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index 8c2b555305a..a45389a7ed5 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -26,6 +26,34 @@ describe 'User page' do
expect(page).not_to have_content("This user has a private profile")
end
+
+ context 'work information' do
+ subject { visit(user_path(user)) }
+
+ it 'shows job title and organization details' do
+ user.update(organization: 'GitLab - work info test', job_title: 'Frontend Engineer')
+
+ subject
+
+ expect(page).to have_content('Frontend Engineer at GitLab - work info test')
+ end
+
+ it 'shows job title' do
+ user.update(organization: nil, job_title: 'Frontend Engineer - work info test')
+
+ subject
+
+ expect(page).to have_content('Frontend Engineer - work info test')
+ end
+
+ it 'shows organization details' do
+ user.update(organization: 'GitLab - work info test', job_title: '')
+
+ subject
+
+ expect(page).to have_content('GitLab - work info test')
+ end
+ end
end
context 'with private profile' do
diff --git a/spec/finders/award_emojis_finder_spec.rb b/spec/finders/award_emojis_finder_spec.rb
index bdfd2a9a3f4..975722e780b 100644
--- a/spec/finders/award_emojis_finder_spec.rb
+++ b/spec/finders/award_emojis_finder_spec.rb
@@ -20,6 +20,11 @@ describe AwardEmojisFinder do
)
end
+ it 'does not raise an error if `name` is numeric' do
+ subject = described_class.new(issue_1, { name: 100 })
+ expect { subject.execute }.not_to raise_error
+ end
+
it 'raises an error if `awarded_by` is invalid' do
expectation = [ArgumentError, 'Invalid awarded_by param']
diff --git a/spec/finders/ci/jobs_finder_spec.rb b/spec/finders/ci/jobs_finder_spec.rb
new file mode 100644
index 00000000000..7083e8fbf43
--- /dev/null
+++ b/spec/finders/ci/jobs_finder_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::JobsFinder, '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:project) { create(:project, :private, public_builds: false) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:job_1) { create(:ci_build) }
+ let_it_be(:job_2) { create(:ci_build, :running) }
+ let_it_be(:job_3) { create(:ci_build, :success, pipeline: pipeline) }
+
+ let(:params) { {} }
+
+ context 'no project' do
+ subject { described_class.new(current_user: admin, params: params).execute }
+
+ it 'returns all jobs' do
+ expect(subject).to match_array([job_1, job_2, job_3])
+ end
+
+ context 'non admin user' do
+ let(:admin) { user }
+
+ it 'returns no jobs' do
+ expect(subject).to be_empty
+ end
+ end
+
+ context 'without user' do
+ let(:admin) { nil }
+
+ it 'returns no jobs' do
+ expect(subject).to be_empty
+ end
+ end
+
+ context 'scope is present' do
+ let(:jobs) { [job_1, job_2, job_3] }
+
+ where(:scope, :index) do
+ [
+ ['pending', 0],
+ ['running', 1],
+ ['finished', 2]
+ ]
+ end
+
+ with_them do
+ let(:params) { { scope: scope } }
+
+ it { expect(subject).to match_array([jobs[index]]) }
+ end
+ end
+ end
+
+ context 'a project is present' do
+ subject { described_class.new(current_user: user, project: project, params: params).execute }
+
+ context 'user has access to the project' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns jobs for the specified project' do
+ expect(subject).to match_array([job_3])
+ end
+ end
+
+ context 'user has no access to project builds' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'returns no jobs' do
+ expect(subject).to be_empty
+ end
+ end
+
+ context 'without user' do
+ let(:user) { nil }
+
+ it 'returns no jobs' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/finders/ci/pipeline_schedules_finder_spec.rb b/spec/finders/ci/pipeline_schedules_finder_spec.rb
new file mode 100644
index 00000000000..5b5154ce834
--- /dev/null
+++ b/spec/finders/ci/pipeline_schedules_finder_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PipelineSchedulesFinder do
+ let(:project) { create(:project) }
+
+ let!(:active_schedule) { create(:ci_pipeline_schedule, project: project) }
+ let!(:inactive_schedule) { create(:ci_pipeline_schedule, :inactive, project: project) }
+
+ subject { described_class.new(project).execute(params) }
+
+ describe "#execute" do
+ context 'when the scope is nil' do
+ let(:params) { { scope: nil } }
+
+ it 'selects all pipeline schedules' do
+ expect(subject.count).to be(2)
+ expect(subject).to include(active_schedule, inactive_schedule)
+ end
+ end
+
+ context 'when the scope is active' do
+ let(:params) { { scope: 'active' } }
+
+ it 'selects only active pipelines' do
+ expect(subject.count).to be(1)
+ expect(subject).to include(active_schedule)
+ expect(subject).not_to include(inactive_schedule)
+ end
+ end
+
+ context 'when the scope is inactve' do
+ let(:params) { { scope: 'inactive' } }
+
+ it 'selects only inactive pipelines' do
+ expect(subject.count).to be(1)
+ expect(subject).not_to include(active_schedule)
+ expect(subject).to include(inactive_schedule)
+ end
+ end
+ end
+end
diff --git a/spec/finders/ci/pipelines_finder_spec.rb b/spec/finders/ci/pipelines_finder_spec.rb
new file mode 100644
index 00000000000..6528093731e
--- /dev/null
+++ b/spec/finders/ci/pipelines_finder_spec.rb
@@ -0,0 +1,271 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PipelinesFinder do
+ let(:project) { create(:project, :public, :repository) }
+ let(:current_user) { nil }
+ let(:params) { {} }
+
+ subject { described_class.new(project, current_user, params).execute }
+
+ describe "#execute" do
+ context 'when params is empty' do
+ let(:params) { {} }
+ let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) }
+
+ it 'returns all pipelines' do
+ is_expected.to match_array(pipelines)
+ end
+ end
+
+ %w[running pending].each do |target|
+ context "when scope is #{target}" do
+ let(:params) { { scope: target } }
+ let!(:pipeline) { create(:ci_pipeline, project: project, status: target) }
+
+ it 'returns matched pipelines' do
+ is_expected.to eq([pipeline])
+ end
+ end
+ end
+
+ context 'when scope is finished' do
+ let(:params) { { scope: 'finished' } }
+ let!(:pipelines) do
+ [create(:ci_pipeline, project: project, status: 'success'),
+ create(:ci_pipeline, project: project, status: 'failed'),
+ create(:ci_pipeline, project: project, status: 'canceled')]
+ end
+
+ it 'returns matched pipelines' do
+ is_expected.to match_array(pipelines)
+ end
+ end
+
+ context 'when scope is branches or tags' do
+ let!(:pipeline_branch) { create(:ci_pipeline, project: project) }
+ let!(:pipeline_tag) { create(:ci_pipeline, project: project, ref: 'v1.0.0', tag: true) }
+
+ context 'when scope is branches' do
+ let(:params) { { scope: 'branches' } }
+
+ it 'returns matched pipelines' do
+ is_expected.to eq([pipeline_branch])
+ end
+ end
+
+ context 'when scope is tags' do
+ let(:params) { { scope: 'tags' } }
+
+ it 'returns matched pipelines' do
+ is_expected.to eq([pipeline_tag])
+ end
+ end
+ end
+
+ context 'when project has child pipelines' do
+ let!(:parent_pipeline) { create(:ci_pipeline, project: project) }
+ let!(:child_pipeline) { create(:ci_pipeline, project: project, source: :parent_pipeline) }
+
+ let!(:pipeline_source) do
+ create(:ci_sources_pipeline, pipeline: child_pipeline, source_pipeline: parent_pipeline)
+ end
+
+ it 'filters out child pipelines and show only the parents' do
+ is_expected.to eq([parent_pipeline])
+ end
+ end
+
+ HasStatus::AVAILABLE_STATUSES.each do |target|
+ context "when status is #{target}" do
+ let(:params) { { status: target } }
+ let!(:pipeline) { create(:ci_pipeline, project: project, status: target) }
+
+ before do
+ exception_status = HasStatus::AVAILABLE_STATUSES - [target]
+ create(:ci_pipeline, project: project, status: exception_status.first)
+ end
+
+ it 'returns matched pipelines' do
+ is_expected.to eq([pipeline])
+ end
+ end
+ end
+
+ context 'when ref is specified' do
+ let!(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when ref exists' do
+ let(:params) { { ref: 'master' } }
+
+ it 'returns matched pipelines' do
+ is_expected.to eq([pipeline])
+ end
+ end
+
+ context 'when ref does not exist' do
+ let(:params) { { ref: 'invalid-ref' } }
+
+ it 'returns empty' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ context 'when name is specified' do
+ let(:user) { create(:user) }
+ let!(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+
+ context 'when name exists' do
+ let(:params) { { name: user.name } }
+
+ it 'returns matched pipelines' do
+ is_expected.to eq([pipeline])
+ end
+ end
+
+ context 'when name does not exist' do
+ let(:params) { { name: 'invalid-name' } }
+
+ it 'returns empty' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ context 'when username is specified' do
+ let(:user) { create(:user) }
+ let!(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+
+ context 'when username exists' do
+ let(:params) { { username: user.username } }
+
+ it 'returns matched pipelines' do
+ is_expected.to eq([pipeline])
+ end
+ end
+
+ context 'when username does not exist' do
+ let(:params) { { username: 'invalid-username' } }
+
+ it 'returns empty' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ context 'when yaml_errors is specified' do
+ let!(:pipeline1) { create(:ci_pipeline, project: project, yaml_errors: 'Syntax error') }
+ let!(:pipeline2) { create(:ci_pipeline, project: project) }
+
+ context 'when yaml_errors is true' do
+ let(:params) { { yaml_errors: true } }
+
+ it 'returns matched pipelines' do
+ is_expected.to eq([pipeline1])
+ end
+ end
+
+ context 'when yaml_errors is false' do
+ let(:params) { { yaml_errors: false } }
+
+ it 'returns matched pipelines' do
+ is_expected.to eq([pipeline2])
+ end
+ end
+
+ context 'when yaml_errors is invalid' do
+ let(:params) { { yaml_errors: "invalid-yaml_errors" } }
+
+ it 'returns all pipelines' do
+ is_expected.to match_array([pipeline1, pipeline2])
+ end
+ end
+ end
+
+ context 'when updated_at filters are specified' do
+ let(:params) { { updated_before: 1.day.ago, updated_after: 3.days.ago } }
+ let!(:pipeline1) { create(:ci_pipeline, project: project, updated_at: 2.days.ago) }
+ let!(:pipeline2) { create(:ci_pipeline, project: project, updated_at: 4.days.ago) }
+ let!(:pipeline3) { create(:ci_pipeline, project: project, updated_at: 1.hour.ago) }
+
+ it 'returns deployments with matched updated_at' do
+ is_expected.to match_array([pipeline1])
+ end
+ end
+
+ context 'when sha is specified' do
+ let!(:pipeline) { create(:ci_pipeline, project: project, sha: '97de212e80737a608d939f648d959671fb0a0142') }
+
+ context 'when sha exists' do
+ let(:params) { { sha: '97de212e80737a608d939f648d959671fb0a0142' } }
+
+ it 'returns matched pipelines' do
+ is_expected.to eq([pipeline])
+ end
+ end
+
+ context 'when sha does not exist' do
+ let(:params) { { sha: 'invalid-sha' } }
+
+ it 'returns empty' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ context 'when the project has limited access to pipelines' do
+ let(:project) { create(:project, :private, :repository) }
+ let(:current_user) { create(:user) }
+ let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) }
+
+ context 'when the user has access' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'is expected to return pipelines' do
+ is_expected.to contain_exactly(*pipelines)
+ end
+ end
+
+ context 'the user is not allowed to read pipelines' do
+ it 'returns empty' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ describe 'ordering' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:params) { { order_by: order_by, sort: sort } }
+
+ let!(:pipeline_1) { create(:ci_pipeline, :scheduled, project: project, iid: 11, ref: 'master', created_at: Time.now, updated_at: Time.now, user: create(:user)) }
+ let!(:pipeline_2) { create(:ci_pipeline, :created, project: project, iid: 12, ref: 'feature', created_at: 1.day.ago, updated_at: 2.hours.ago, user: create(:user)) }
+ let!(:pipeline_3) { create(:ci_pipeline, :success, project: project, iid: 8, ref: 'patch', created_at: 2.days.ago, updated_at: 1.hour.ago, user: create(:user)) }
+
+ where(:order_by, :sort, :ordered_pipelines) do
+ 'id' | 'asc' | [:pipeline_1, :pipeline_2, :pipeline_3]
+ 'id' | 'desc' | [:pipeline_3, :pipeline_2, :pipeline_1]
+ 'ref' | 'asc' | [:pipeline_2, :pipeline_1, :pipeline_3]
+ 'ref' | 'desc' | [:pipeline_3, :pipeline_1, :pipeline_2]
+ 'status' | 'asc' | [:pipeline_2, :pipeline_1, :pipeline_3]
+ 'status' | 'desc' | [:pipeline_3, :pipeline_1, :pipeline_2]
+ 'updated_at' | 'asc' | [:pipeline_2, :pipeline_3, :pipeline_1]
+ 'updated_at' | 'desc' | [:pipeline_1, :pipeline_3, :pipeline_2]
+ 'user_id' | 'asc' | [:pipeline_1, :pipeline_2, :pipeline_3]
+ 'user_id' | 'desc' | [:pipeline_3, :pipeline_2, :pipeline_1]
+ 'invalid' | 'asc' | [:pipeline_1, :pipeline_2, :pipeline_3]
+ 'id' | 'err' | [:pipeline_3, :pipeline_2, :pipeline_1]
+ end
+
+ with_them do
+ it 'returns the pipelines ordered' do
+ expect(subject).to eq(ordered_pipelines.map { |name| public_send(name) })
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
new file mode 100644
index 00000000000..c49ac487519
--- /dev/null
+++ b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PipelinesForMergeRequestFinder do
+ describe '#all' do
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.source_project }
+
+ subject { described_class.new(merge_request) }
+
+ shared_examples 'returning pipelines with proper ordering' do
+ let!(:all_pipelines) do
+ merge_request.all_commit_shas.map do |sha|
+ create(:ci_empty_pipeline,
+ project: project, sha: sha, ref: merge_request.source_branch)
+ end
+ end
+
+ it 'returns all pipelines' do
+ expect(subject.all).not_to be_empty
+ expect(subject.all).to eq(all_pipelines.reverse)
+ end
+ end
+
+ context 'with single merge_request_diffs' do
+ it_behaves_like 'returning pipelines with proper ordering'
+ end
+
+ context 'with multiple irrelevant merge_request_diffs' do
+ before do
+ merge_request.update(target_branch: 'v1.0.0')
+ end
+
+ it_behaves_like 'returning pipelines with proper ordering'
+ end
+
+ context 'with unsaved merge request' do
+ let(:merge_request) { build(:merge_request) }
+
+ let!(:pipeline) do
+ create(:ci_empty_pipeline, project: project,
+ sha: merge_request.diff_head_sha, ref: merge_request.source_branch)
+ end
+
+ it 'returns pipelines from diff_head_sha' do
+ expect(subject.all).to contain_exactly(pipeline)
+ end
+ end
+
+ context 'when pipelines exist for the branch and merge request' do
+ let(:source_ref) { 'feature' }
+ let(:target_ref) { 'master' }
+
+ let!(:branch_pipeline) do
+ create(:ci_pipeline, source: :push, project: project,
+ ref: source_ref, sha: shas.second)
+ end
+
+ let!(:tag_pipeline) do
+ create(:ci_pipeline, project: project, ref: source_ref, tag: true)
+ end
+
+ let!(:detached_merge_request_pipeline) do
+ create(:ci_pipeline, source: :merge_request_event, project: project,
+ ref: source_ref, sha: shas.second, merge_request: merge_request)
+ end
+
+ let(:merge_request) do
+ create(:merge_request, source_project: project, source_branch: source_ref,
+ target_project: project, target_branch: target_ref)
+ end
+
+ let(:project) { create(:project, :repository) }
+ let(:shas) { project.repository.commits(source_ref, limit: 2).map(&:id) }
+
+ before do
+ create(:merge_request_diff_commit,
+ merge_request_diff: merge_request.merge_request_diff,
+ sha: shas.second, relative_order: 1)
+ end
+
+ it 'returns merge request pipeline first' do
+ expect(subject.all).to eq([detached_merge_request_pipeline, branch_pipeline])
+ end
+
+ context 'when there are a branch pipeline and a merge request pipeline' do
+ let!(:branch_pipeline_2) do
+ create(:ci_pipeline, source: :push, project: project,
+ ref: source_ref, sha: shas.first)
+ end
+
+ let!(:detached_merge_request_pipeline_2) do
+ create(:ci_pipeline, source: :merge_request_event, project: project,
+ ref: source_ref, sha: shas.first, merge_request: merge_request)
+ end
+
+ it 'returns merge request pipelines first' do
+ expect(subject.all)
+ .to eq([detached_merge_request_pipeline_2,
+ detached_merge_request_pipeline,
+ branch_pipeline_2,
+ branch_pipeline])
+ end
+ end
+
+ context 'when there are multiple merge request pipelines from the same branch' do
+ let!(:branch_pipeline_2) do
+ create(:ci_pipeline, source: :push, project: project,
+ ref: source_ref, sha: shas.first)
+ end
+
+ let!(:detached_merge_request_pipeline_2) do
+ create(:ci_pipeline, source: :merge_request_event, project: project,
+ ref: source_ref, sha: shas.first, merge_request: merge_request_2)
+ end
+
+ let(:merge_request_2) do
+ create(:merge_request, source_project: project, source_branch: source_ref,
+ target_project: project, target_branch: 'stable')
+ end
+
+ before do
+ shas.each.with_index do |sha, index|
+ create(:merge_request_diff_commit,
+ merge_request_diff: merge_request_2.merge_request_diff,
+ sha: sha, relative_order: index)
+ end
+ end
+
+ it 'returns only related merge request pipelines' do
+ expect(subject.all)
+ .to eq([detached_merge_request_pipeline,
+ branch_pipeline_2,
+ branch_pipeline])
+
+ expect(described_class.new(merge_request_2).all)
+ .to eq([detached_merge_request_pipeline_2,
+ branch_pipeline_2,
+ branch_pipeline])
+ end
+ end
+
+ context 'when detached merge request pipeline is run on head ref of the merge request' do
+ let!(:detached_merge_request_pipeline) do
+ create(:ci_pipeline, source: :merge_request_event, project: project,
+ ref: merge_request.ref_path, sha: shas.second, merge_request: merge_request)
+ end
+
+ it 'sets the head ref of the merge request to the pipeline ref' do
+ expect(detached_merge_request_pipeline.ref).to match(%r{refs/merge-requests/\d+/head})
+ end
+
+ it 'includes the detached merge request pipeline even though the ref is custom path' do
+ expect(merge_request.all_pipelines).to include(detached_merge_request_pipeline)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/ci/runner_jobs_finder_spec.rb b/spec/finders/ci/runner_jobs_finder_spec.rb
new file mode 100644
index 00000000000..a3245119291
--- /dev/null
+++ b/spec/finders/ci/runner_jobs_finder_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::RunnerJobsFinder do
+ let(:project) { create(:project) }
+ let(:runner) { create(:ci_runner, :instance) }
+
+ subject { described_class.new(runner, params).execute }
+
+ describe '#execute' do
+ context 'when params is empty' do
+ let(:params) { {} }
+ let!(:job) { create(:ci_build, runner: runner, project: project) }
+ let!(:job1) { create(:ci_build, project: project) }
+
+ it 'returns all jobs assigned to Runner' do
+ is_expected.to match_array(job)
+ is_expected.not_to match_array(job1)
+ end
+ end
+
+ context 'when params contains status' do
+ HasStatus::AVAILABLE_STATUSES.each do |target_status|
+ context "when status is #{target_status}" do
+ let(:params) { { status: target_status } }
+ let!(:job) { create(:ci_build, runner: runner, project: project, status: target_status) }
+
+ before do
+ exception_status = HasStatus::AVAILABLE_STATUSES - [target_status]
+ create(:ci_build, runner: runner, project: project, status: exception_status.first)
+ end
+
+ it 'returns matched job' do
+ is_expected.to eq([job])
+ end
+ end
+ end
+ end
+
+ context 'when order_by and sort are specified' do
+ context 'when order_by id and sort is asc' do
+ let(:params) { { order_by: 'id', sort: 'asc' } }
+ let!(:jobs) { create_list(:ci_build, 2, runner: runner, project: project, user: create(:user)) }
+
+ it 'sorts as id: :asc' do
+ is_expected.to eq(jobs.sort_by(&:id))
+ end
+ end
+ end
+
+ context 'when order_by is specified and sort is not specified' do
+ context 'when order_by id and sort is not specified' do
+ let(:params) { { order_by: 'id' } }
+ let!(:jobs) { create_list(:ci_build, 2, runner: runner, project: project, user: create(:user)) }
+
+ it 'sorts as id: :desc' do
+ is_expected.to eq(jobs.sort_by(&:id).reverse)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/fork_targets_finder_spec.rb b/spec/finders/fork_targets_finder_spec.rb
new file mode 100644
index 00000000000..f8c03cdf9b3
--- /dev/null
+++ b/spec/finders/fork_targets_finder_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ForkTargetsFinder do
+ subject(:finder) { described_class.new(project, user) }
+
+ let(:project) { create(:project, namespace: create(:group)) }
+ let(:user) { create(:user) }
+ let!(:maintained_group) do
+ create(:group).tap { |g| g.add_maintainer(user) }
+ end
+ let!(:owned_group) do
+ create(:group).tap { |g| g.add_owner(user) }
+ end
+ let!(:developer_group) do
+ create(:group).tap { |g| g.add_developer(user) }
+ end
+ let!(:reporter_group) do
+ create(:group).tap { |g| g.add_reporter(user) }
+ end
+ let!(:guest_group) do
+ create(:group).tap { |g| g.add_guest(user) }
+ end
+
+ before do
+ project.namespace.add_owner(user)
+ end
+
+ describe '#execute' do
+ it 'returns all user manageable namespaces' do
+ expect(finder.execute).to match_array([user.namespace, maintained_group, owned_group, project.namespace])
+ end
+ end
+end
diff --git a/spec/finders/jobs_finder_spec.rb b/spec/finders/jobs_finder_spec.rb
deleted file mode 100644
index 01f9ec03c79..00000000000
--- a/spec/finders/jobs_finder_spec.rb
+++ /dev/null
@@ -1,89 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe JobsFinder, '#execute' do
- let_it_be(:user) { create(:user) }
- let_it_be(:admin) { create(:user, :admin) }
- let_it_be(:project) { create(:project, :private, public_builds: false) }
- let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
- let_it_be(:job_1) { create(:ci_build) }
- let_it_be(:job_2) { create(:ci_build, :running) }
- let_it_be(:job_3) { create(:ci_build, :success, pipeline: pipeline) }
-
- let(:params) { {} }
-
- context 'no project' do
- subject { described_class.new(current_user: admin, params: params).execute }
-
- it 'returns all jobs' do
- expect(subject).to match_array([job_1, job_2, job_3])
- end
-
- context 'non admin user' do
- let(:admin) { user }
-
- it 'returns no jobs' do
- expect(subject).to be_empty
- end
- end
-
- context 'without user' do
- let(:admin) { nil }
-
- it 'returns no jobs' do
- expect(subject).to be_empty
- end
- end
-
- context 'scope is present' do
- let(:jobs) { [job_1, job_2, job_3] }
-
- where(:scope, :index) do
- [
- ['pending', 0],
- ['running', 1],
- ['finished', 2]
- ]
- end
-
- with_them do
- let(:params) { { scope: scope } }
-
- it { expect(subject).to match_array([jobs[index]]) }
- end
- end
- end
-
- context 'a project is present' do
- subject { described_class.new(current_user: user, project: project, params: params).execute }
-
- context 'user has access to the project' do
- before do
- project.add_maintainer(user)
- end
-
- it 'returns jobs for the specified project' do
- expect(subject).to match_array([job_3])
- end
- end
-
- context 'user has no access to project builds' do
- before do
- project.add_guest(user)
- end
-
- it 'returns no jobs' do
- expect(subject).to be_empty
- end
- end
-
- context 'without user' do
- let(:user) { nil }
-
- it 'returns no jobs' do
- expect(subject).to be_empty
- end
- end
- end
-end
diff --git a/spec/finders/pipeline_schedules_finder_spec.rb b/spec/finders/pipeline_schedules_finder_spec.rb
deleted file mode 100644
index 8d0bde15e03..00000000000
--- a/spec/finders/pipeline_schedules_finder_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe PipelineSchedulesFinder do
- let(:project) { create(:project) }
-
- let!(:active_schedule) { create(:ci_pipeline_schedule, project: project) }
- let!(:inactive_schedule) { create(:ci_pipeline_schedule, :inactive, project: project) }
-
- subject { described_class.new(project).execute(params) }
-
- describe "#execute" do
- context 'when the scope is nil' do
- let(:params) { { scope: nil } }
-
- it 'selects all pipeline schedules' do
- expect(subject.count).to be(2)
- expect(subject).to include(active_schedule, inactive_schedule)
- end
- end
-
- context 'when the scope is active' do
- let(:params) { { scope: 'active' } }
-
- it 'selects only active pipelines' do
- expect(subject.count).to be(1)
- expect(subject).to include(active_schedule)
- expect(subject).not_to include(inactive_schedule)
- end
- end
-
- context 'when the scope is inactve' do
- let(:params) { { scope: 'inactive' } }
-
- it 'selects only inactive pipelines' do
- expect(subject.count).to be(1)
- expect(subject).not_to include(active_schedule)
- expect(subject).to include(inactive_schedule)
- end
- end
- end
-end
diff --git a/spec/finders/pipelines_finder_spec.rb b/spec/finders/pipelines_finder_spec.rb
deleted file mode 100644
index 1dbf9491118..00000000000
--- a/spec/finders/pipelines_finder_spec.rb
+++ /dev/null
@@ -1,271 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe PipelinesFinder do
- let(:project) { create(:project, :public, :repository) }
- let(:current_user) { nil }
- let(:params) { {} }
-
- subject { described_class.new(project, current_user, params).execute }
-
- describe "#execute" do
- context 'when params is empty' do
- let(:params) { {} }
- let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) }
-
- it 'returns all pipelines' do
- is_expected.to match_array(pipelines)
- end
- end
-
- %w[running pending].each do |target|
- context "when scope is #{target}" do
- let(:params) { { scope: target } }
- let!(:pipeline) { create(:ci_pipeline, project: project, status: target) }
-
- it 'returns matched pipelines' do
- is_expected.to eq([pipeline])
- end
- end
- end
-
- context 'when scope is finished' do
- let(:params) { { scope: 'finished' } }
- let!(:pipelines) do
- [create(:ci_pipeline, project: project, status: 'success'),
- create(:ci_pipeline, project: project, status: 'failed'),
- create(:ci_pipeline, project: project, status: 'canceled')]
- end
-
- it 'returns matched pipelines' do
- is_expected.to match_array(pipelines)
- end
- end
-
- context 'when scope is branches or tags' do
- let!(:pipeline_branch) { create(:ci_pipeline, project: project) }
- let!(:pipeline_tag) { create(:ci_pipeline, project: project, ref: 'v1.0.0', tag: true) }
-
- context 'when scope is branches' do
- let(:params) { { scope: 'branches' } }
-
- it 'returns matched pipelines' do
- is_expected.to eq([pipeline_branch])
- end
- end
-
- context 'when scope is tags' do
- let(:params) { { scope: 'tags' } }
-
- it 'returns matched pipelines' do
- is_expected.to eq([pipeline_tag])
- end
- end
- end
-
- context 'when project has child pipelines' do
- let!(:parent_pipeline) { create(:ci_pipeline, project: project) }
- let!(:child_pipeline) { create(:ci_pipeline, project: project, source: :parent_pipeline) }
-
- let!(:pipeline_source) do
- create(:ci_sources_pipeline, pipeline: child_pipeline, source_pipeline: parent_pipeline)
- end
-
- it 'filters out child pipelines and show only the parents' do
- is_expected.to eq([parent_pipeline])
- end
- end
-
- HasStatus::AVAILABLE_STATUSES.each do |target|
- context "when status is #{target}" do
- let(:params) { { status: target } }
- let!(:pipeline) { create(:ci_pipeline, project: project, status: target) }
-
- before do
- exception_status = HasStatus::AVAILABLE_STATUSES - [target]
- create(:ci_pipeline, project: project, status: exception_status.first)
- end
-
- it 'returns matched pipelines' do
- is_expected.to eq([pipeline])
- end
- end
- end
-
- context 'when ref is specified' do
- let!(:pipeline) { create(:ci_pipeline, project: project) }
-
- context 'when ref exists' do
- let(:params) { { ref: 'master' } }
-
- it 'returns matched pipelines' do
- is_expected.to eq([pipeline])
- end
- end
-
- context 'when ref does not exist' do
- let(:params) { { ref: 'invalid-ref' } }
-
- it 'returns empty' do
- is_expected.to be_empty
- end
- end
- end
-
- context 'when name is specified' do
- let(:user) { create(:user) }
- let!(:pipeline) { create(:ci_pipeline, project: project, user: user) }
-
- context 'when name exists' do
- let(:params) { { name: user.name } }
-
- it 'returns matched pipelines' do
- is_expected.to eq([pipeline])
- end
- end
-
- context 'when name does not exist' do
- let(:params) { { name: 'invalid-name' } }
-
- it 'returns empty' do
- is_expected.to be_empty
- end
- end
- end
-
- context 'when username is specified' do
- let(:user) { create(:user) }
- let!(:pipeline) { create(:ci_pipeline, project: project, user: user) }
-
- context 'when username exists' do
- let(:params) { { username: user.username } }
-
- it 'returns matched pipelines' do
- is_expected.to eq([pipeline])
- end
- end
-
- context 'when username does not exist' do
- let(:params) { { username: 'invalid-username' } }
-
- it 'returns empty' do
- is_expected.to be_empty
- end
- end
- end
-
- context 'when yaml_errors is specified' do
- let!(:pipeline1) { create(:ci_pipeline, project: project, yaml_errors: 'Syntax error') }
- let!(:pipeline2) { create(:ci_pipeline, project: project) }
-
- context 'when yaml_errors is true' do
- let(:params) { { yaml_errors: true } }
-
- it 'returns matched pipelines' do
- is_expected.to eq([pipeline1])
- end
- end
-
- context 'when yaml_errors is false' do
- let(:params) { { yaml_errors: false } }
-
- it 'returns matched pipelines' do
- is_expected.to eq([pipeline2])
- end
- end
-
- context 'when yaml_errors is invalid' do
- let(:params) { { yaml_errors: "invalid-yaml_errors" } }
-
- it 'returns all pipelines' do
- is_expected.to match_array([pipeline1, pipeline2])
- end
- end
- end
-
- context 'when updated_at filters are specified' do
- let(:params) { { updated_before: 1.day.ago, updated_after: 3.days.ago } }
- let!(:pipeline1) { create(:ci_pipeline, project: project, updated_at: 2.days.ago) }
- let!(:pipeline2) { create(:ci_pipeline, project: project, updated_at: 4.days.ago) }
- let!(:pipeline3) { create(:ci_pipeline, project: project, updated_at: 1.hour.ago) }
-
- it 'returns deployments with matched updated_at' do
- is_expected.to match_array([pipeline1])
- end
- end
-
- context 'when sha is specified' do
- let!(:pipeline) { create(:ci_pipeline, project: project, sha: '97de212e80737a608d939f648d959671fb0a0142') }
-
- context 'when sha exists' do
- let(:params) { { sha: '97de212e80737a608d939f648d959671fb0a0142' } }
-
- it 'returns matched pipelines' do
- is_expected.to eq([pipeline])
- end
- end
-
- context 'when sha does not exist' do
- let(:params) { { sha: 'invalid-sha' } }
-
- it 'returns empty' do
- is_expected.to be_empty
- end
- end
- end
-
- context 'when the project has limited access to pipelines' do
- let(:project) { create(:project, :private, :repository) }
- let(:current_user) { create(:user) }
- let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) }
-
- context 'when the user has access' do
- before do
- project.add_developer(current_user)
- end
-
- it 'is expected to return pipelines' do
- is_expected.to contain_exactly(*pipelines)
- end
- end
-
- context 'the user is not allowed to read pipelines' do
- it 'returns empty' do
- is_expected.to be_empty
- end
- end
- end
-
- describe 'ordering' do
- using RSpec::Parameterized::TableSyntax
-
- let(:params) { { order_by: order_by, sort: sort } }
-
- let!(:pipeline_1) { create(:ci_pipeline, :scheduled, project: project, iid: 11, ref: 'master', created_at: Time.now, updated_at: Time.now, user: create(:user)) }
- let!(:pipeline_2) { create(:ci_pipeline, :created, project: project, iid: 12, ref: 'feature', created_at: 1.day.ago, updated_at: 2.hours.ago, user: create(:user)) }
- let!(:pipeline_3) { create(:ci_pipeline, :success, project: project, iid: 8, ref: 'patch', created_at: 2.days.ago, updated_at: 1.hour.ago, user: create(:user)) }
-
- where(:order_by, :sort, :ordered_pipelines) do
- 'id' | 'asc' | [:pipeline_1, :pipeline_2, :pipeline_3]
- 'id' | 'desc' | [:pipeline_3, :pipeline_2, :pipeline_1]
- 'ref' | 'asc' | [:pipeline_2, :pipeline_1, :pipeline_3]
- 'ref' | 'desc' | [:pipeline_3, :pipeline_1, :pipeline_2]
- 'status' | 'asc' | [:pipeline_2, :pipeline_1, :pipeline_3]
- 'status' | 'desc' | [:pipeline_3, :pipeline_1, :pipeline_2]
- 'updated_at' | 'asc' | [:pipeline_2, :pipeline_3, :pipeline_1]
- 'updated_at' | 'desc' | [:pipeline_1, :pipeline_3, :pipeline_2]
- 'user_id' | 'asc' | [:pipeline_1, :pipeline_2, :pipeline_3]
- 'user_id' | 'desc' | [:pipeline_3, :pipeline_2, :pipeline_1]
- 'invalid' | 'asc' | [:pipeline_1, :pipeline_2, :pipeline_3]
- 'id' | 'err' | [:pipeline_3, :pipeline_2, :pipeline_1]
- end
-
- with_them do
- it 'returns the pipelines ordered' do
- expect(subject).to eq(ordered_pipelines.map { |name| public_send(name) })
- end
- end
- end
- end
-end
diff --git a/spec/finders/projects/export_job_finder_spec.rb b/spec/finders/projects/export_job_finder_spec.rb
new file mode 100644
index 00000000000..31b68717d13
--- /dev/null
+++ b/spec/finders/projects/export_job_finder_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::ExportJobFinder do
+ let(:project) { create(:project) }
+ let(:project_export_job1) { create(:project_export_job, project: project) }
+ let(:project_export_job2) { create(:project_export_job, project: project) }
+
+ describe '#execute' do
+ subject { described_class.new(project, params).execute }
+
+ context 'when queried for a project' do
+ let(:params) { {} }
+
+ it 'scopes to the project' do
+ expect(subject).to contain_exactly(
+ project_export_job1, project_export_job2
+ )
+ end
+ end
+
+ context 'when queried by job id' do
+ let(:params) { { jid: project_export_job1.jid } }
+
+ it 'filters records' do
+ expect(subject).to contain_exactly(project_export_job1)
+ end
+ end
+
+ context 'when queried by status' do
+ let(:params) { { status: :started } }
+
+ before do
+ project_export_job2.start!
+ end
+
+ it 'filters records' do
+ expect(subject).to contain_exactly(project_export_job2)
+ end
+ end
+
+ context 'when queried by invalid status' do
+ let(:params) { { status: '1234ad' } }
+
+ it 'raises exception' do
+ expect { subject }.to raise_error(described_class::InvalidExportJobStatusError, 'Invalid export job status')
+ end
+ end
+ end
+end
diff --git a/spec/finders/runner_jobs_finder_spec.rb b/spec/finders/runner_jobs_finder_spec.rb
deleted file mode 100644
index c11f9182036..00000000000
--- a/spec/finders/runner_jobs_finder_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe RunnerJobsFinder do
- let(:project) { create(:project) }
- let(:runner) { create(:ci_runner, :instance) }
-
- subject { described_class.new(runner, params).execute }
-
- describe '#execute' do
- context 'when params is empty' do
- let(:params) { {} }
- let!(:job) { create(:ci_build, runner: runner, project: project) }
- let!(:job1) { create(:ci_build, project: project) }
-
- it 'returns all jobs assigned to Runner' do
- is_expected.to match_array(job)
- is_expected.not_to match_array(job1)
- end
- end
-
- context 'when params contains status' do
- HasStatus::AVAILABLE_STATUSES.each do |target_status|
- context "when status is #{target_status}" do
- let(:params) { { status: target_status } }
- let!(:job) { create(:ci_build, runner: runner, project: project, status: target_status) }
-
- before do
- exception_status = HasStatus::AVAILABLE_STATUSES - [target_status]
- create(:ci_build, runner: runner, project: project, status: exception_status.first)
- end
-
- it 'returns matched job' do
- is_expected.to eq([job])
- end
- end
- end
- end
-
- context 'when order_by and sort are specified' do
- context 'when order_by id and sort is asc' do
- let(:params) { { order_by: 'id', sort: 'asc' } }
- let!(:jobs) { create_list(:ci_build, 2, runner: runner, project: project, user: create(:user)) }
-
- it 'sorts as id: :asc' do
- is_expected.to eq(jobs.sort_by(&:id))
- end
- end
- end
-
- context 'when order_by is specified and sort is not specified' do
- context 'when order_by id and sort is not specified' do
- let(:params) { { order_by: 'id' } }
- let!(:jobs) { create_list(:ci_build, 2, runner: runner, project: project, user: create(:user)) }
-
- it 'sorts as id: :desc' do
- is_expected.to eq(jobs.sort_by(&:id).reverse)
- end
- end
- end
- end
-end
diff --git a/spec/finders/serverless_domain_finder_spec.rb b/spec/finders/serverless_domain_finder_spec.rb
new file mode 100644
index 00000000000..c41f09535d3
--- /dev/null
+++ b/spec/finders/serverless_domain_finder_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ServerlessDomainFinder do
+ let(:function_name) { 'test-function' }
+ let(:pages_domain_name) { 'serverless.gitlab.io' }
+ let(:valid_cluster_uuid) { 'aba1cdef123456f278' }
+ let(:invalid_cluster_uuid) { 'aba1cdef123456f178' }
+ let!(:environment) { create(:environment, name: 'test') }
+
+ let(:pages_domain) do
+ create(
+ :pages_domain,
+ :instance_serverless,
+ domain: pages_domain_name
+ )
+ end
+
+ let(:knative_with_ingress) do
+ create(
+ :clusters_applications_knative,
+ external_ip: '10.0.0.1'
+ )
+ end
+
+ let!(:serverless_domain_cluster) do
+ create(
+ :serverless_domain_cluster,
+ uuid: 'abcdef12345678',
+ pages_domain: pages_domain,
+ knative: knative_with_ingress
+ )
+ end
+
+ let(:valid_uri) { "https://#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
+ let(:valid_fqdn) { "#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
+ let(:invalid_uri) { "https://#{function_name}-#{invalid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
+
+ let(:valid_finder) { described_class.new(valid_uri) }
+ let(:invalid_finder) { described_class.new(invalid_uri) }
+
+ describe '#serverless?' do
+ context 'with a valid URI' do
+ subject { valid_finder.serverless? }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with an invalid URI' do
+ subject { invalid_finder.serverless? }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
+ describe '#serverless_domain_cluster_uuid' do
+ context 'with a valid URI' do
+ subject { valid_finder.serverless_domain_cluster_uuid }
+
+ it { is_expected.to eq serverless_domain_cluster.uuid }
+ end
+
+ context 'with an invalid URI' do
+ subject { invalid_finder.serverless_domain_cluster_uuid }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#execute' do
+ context 'with a valid URI' do
+ let(:serverless_domain) do
+ create(
+ :serverless_domain,
+ function_name: function_name,
+ serverless_domain_cluster: serverless_domain_cluster,
+ environment: environment
+ )
+ end
+
+ subject { valid_finder.execute }
+
+ it 'has the correct function_name' do
+ expect(subject.function_name).to eq function_name
+ end
+
+ it 'has the correct serverless_domain_cluster' do
+ expect(subject.serverless_domain_cluster).to eq serverless_domain_cluster
+ end
+
+ it 'has the correct environment' do
+ expect(subject.environment).to eq environment
+ end
+ end
+
+ context 'with an invalid URI' do
+ subject { invalid_finder.execute }
+
+ it { is_expected.to be_nil }
+ end
+ end
+end
diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb
index 8f83cb77709..69e03c4c473 100644
--- a/spec/finders/snippets_finder_spec.rb
+++ b/spec/finders/snippets_finder_spec.rb
@@ -284,6 +284,17 @@ describe SnippetsFinder do
expect(described_class.new(user).execute).to contain_exactly(private_personal_snippet, internal_personal_snippet, public_personal_snippet)
end
end
+
+ context 'when project snippets are disabled' do
+ it 'returns quickly' do
+ disabled_snippets_project = create(:project, :snippets_disabled)
+ finder = described_class.new(user, project: disabled_snippets_project.id)
+
+ expect(finder).not_to receive(:init_collection)
+ expect(Snippet).to receive(:none).and_call_original
+ expect(finder.execute).to be_empty
+ end
+ end
end
it_behaves_like 'snippet visibility'
diff --git a/spec/fixtures/api/schemas/cluster_status.json b/spec/fixtures/api/schemas/cluster_status.json
index 29c56b5c820..6017ca9e2d5 100644
--- a/spec/fixtures/api/schemas/cluster_status.json
+++ b/spec/fixtures/api/schemas/cluster_status.json
@@ -39,9 +39,15 @@
"stack": { "type": ["string", "null"] },
"modsecurity_enabled": { "type": ["boolean", "null"] },
"update_available": { "type": ["boolean", "null"] },
- "can_uninstall": { "type": "boolean" }
+ "can_uninstall": { "type": "boolean" },
+ "available_domains": {
+ "type": "array",
+ "items": { "$ref": "#/definitions/domain" }
+ },
+ "pages_domain": { "type": [ { "$ref": "#/definitions/domain" }, "null"] }
},
"required" : [ "name", "status" ]
- }
+ },
+ "domain": { "id": "integer", "domain": "string" }
}
}
diff --git a/spec/fixtures/api/schemas/entities/issue_board.json b/spec/fixtures/api/schemas/entities/issue_board.json
index 09f66813c95..d7e3c45b13b 100644
--- a/spec/fixtures/api/schemas/entities/issue_board.json
+++ b/spec/fixtures/api/schemas/entities/issue_board.json
@@ -5,6 +5,7 @@
"iid": { "type": "integer" },
"title": { "type": "string" },
"confidential": { "type": "boolean" },
+ "closed": { "type": "boolean" },
"due_date": { "type": "date" },
"project_id": { "type": "integer" },
"relative_position": { "type": ["integer", "null"] },
diff --git a/spec/fixtures/api/schemas/entities/user.json b/spec/fixtures/api/schemas/entities/user.json
index 82d80b75cef..1e0c8885609 100644
--- a/spec/fixtures/api/schemas/entities/user.json
+++ b/spec/fixtures/api/schemas/entities/user.json
@@ -17,7 +17,8 @@
"path": { "type": "string" },
"name": { "type": "string" },
"username": { "type": "string" },
- "status_tooltip_html": { "$ref": "../types/nullable_string.json" }
+ "status_tooltip_html": { "$ref": "../types/nullable_string.json" },
+ "is_gitlab_employee": { "type": "boolean" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/environment.json b/spec/fixtures/api/schemas/environment.json
index 321c495a575..84217a2a01c 100644
--- a/spec/fixtures/api/schemas/environment.json
+++ b/spec/fixtures/api/schemas/environment.json
@@ -26,7 +26,9 @@
"stop_path": { "type": "string" },
"cancel_auto_stop_path": { "type": "string" },
"folder_path": { "type": "string" },
- "project_path": { "type": "string" },
+ "logs_path": { "type": "string" },
+ "logs_api_path": { "type": "string" },
+ "enable_advanced_logs_querying": { "type": "boolean" },
"created_at": { "type": "string", "format": "date-time" },
"updated_at": { "type": "string", "format": "date-time" },
"auto_stop_at": { "type": "string", "format": "date-time" },
diff --git a/spec/fixtures/api/schemas/internal/serverless/lookup_path.json b/spec/fixtures/api/schemas/internal/serverless/lookup_path.json
new file mode 100644
index 00000000000..c20ea926587
--- /dev/null
+++ b/spec/fixtures/api/schemas/internal/serverless/lookup_path.json
@@ -0,0 +1,28 @@
+{
+ "type": "object",
+ "required": [
+ "source"
+ ],
+ "properties": {
+ "source": { "type": "object",
+ "required": ["type", "service", "cluster"],
+ "properties" : {
+ "type": { "type": "string", "enum": ["serverless"] },
+ "service": { "type": "string" },
+ "cluster": { "type": "object",
+ "required": ["hostname", "address", "port", "cert", "key"],
+ "properties": {
+ "hostname": { "type": "string" },
+ "address": { "type": "string" },
+ "port": { "type": "integer" },
+ "cert": { "type": "string" },
+ "key": { "type": "string" }
+ },
+ "additionalProperties": false
+ }
+ },
+ "additionalProperties": false
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/internal/serverless/virtual_domain.json b/spec/fixtures/api/schemas/internal/serverless/virtual_domain.json
new file mode 100644
index 00000000000..50e899ef2f8
--- /dev/null
+++ b/spec/fixtures/api/schemas/internal/serverless/virtual_domain.json
@@ -0,0 +1,14 @@
+{
+ "type": "object",
+ "required": [
+ "lookup_paths",
+ "certificate",
+ "key"
+ ],
+ "properties": {
+ "certificate": { "type": ["string", "null"] },
+ "key": { "type": ["string", "null"] },
+ "lookup_paths": { "type": "array", "items": { "$ref": "lookup_path.json" } }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/commit/basic.json b/spec/fixtures/api/schemas/public_api/v4/commit/basic.json
index 9d99628a286..da99e99c692 100644
--- a/spec/fixtures/api/schemas/public_api/v4/commit/basic.json
+++ b/spec/fixtures/api/schemas/public_api/v4/commit/basic.json
@@ -12,7 +12,8 @@
"authored_date",
"committer_name",
"committer_email",
- "committed_date"
+ "committed_date",
+ "web_url"
],
"properties" : {
"id": { "type": ["string", "null"] },
@@ -32,6 +33,7 @@
"authored_date": { "type": "date" },
"committer_name": { "type": "string" },
"committer_email": { "type": "string" },
- "committed_date": { "type": "date" }
+ "committed_date": { "type": "date" },
+ "web_url": { "type": "string" }
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/deploy_token.json b/spec/fixtures/api/schemas/public_api/v4/deploy_token.json
new file mode 100644
index 00000000000..7cb9f136b0d
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/deploy_token.json
@@ -0,0 +1,33 @@
+{
+ "type": "object",
+ "required": [
+ "id",
+ "name",
+ "username",
+ "expires_at",
+ "scopes"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer"
+ },
+ "name": {
+ "type": "string"
+ },
+ "username": {
+ "type": "string"
+ },
+ "expires_at": {
+ "type": "date"
+ },
+ "scopes": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "token": {
+ "type": "string"
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/fixtures/api/schemas/public_api/v4/deploy_tokens.json b/spec/fixtures/api/schemas/public_api/v4/deploy_tokens.json
new file mode 100644
index 00000000000..71c30cb2a73
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/deploy_tokens.json
@@ -0,0 +1,6 @@
+{
+ "type": "array",
+ "items": {
+ "$ref": "deploy_token.json"
+ }
+} \ No newline at end of file
diff --git a/spec/fixtures/api/schemas/public_api/v4/merge_request_simple.json b/spec/fixtures/api/schemas/public_api/v4/merge_request_simple.json
new file mode 100644
index 00000000000..45507e3e400
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/merge_request_simple.json
@@ -0,0 +1,26 @@
+{
+ "type": "object",
+ "properties" : {
+ "properties" : {
+ "id": { "type": "integer" },
+ "iid": { "type": "integer" },
+ "project_id": { "type": "integer" },
+ "title": { "type": "string" },
+ "description": { "type": ["string", "null"] },
+ "state": { "type": "string" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "web_url": { "type": "uri" }
+ },
+ "required": [
+ "id", "iid", "project_id", "title", "description",
+ "state", "created_at", "updated_at", "web_url"
+ ],
+ "head_pipeline": {
+ "oneOf": [
+ { "type": "null" },
+ { "$ref": "pipeline/detail.json" }
+ ]
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json b/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json
new file mode 100644
index 00000000000..e2475545ee9
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json
@@ -0,0 +1,30 @@
+{
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "iid": { "type": "integer" },
+ "project_id": { "type": ["integer", "null"] },
+ "group_id": { "type": ["integer", "null"] },
+ "title": { "type": "string" },
+ "description": { "type": ["string", "null"] },
+ "state": { "type": "string" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "start_date": { "type": "date" },
+ "due_date": { "type": "date" },
+ "web_url": { "type": "string" },
+ "issue_stats": {
+ "required": ["total", "closed"],
+ "properties": {
+ "total": { "type": "integer" },
+ "closed": { "type": "integer" }
+ },
+ "additionalProperties": false
+ }
+ },
+ "required": [
+ "id", "iid", "title", "description", "state",
+ "state", "created_at", "updated_at", "start_date", "due_date", "issue_stats"
+ ],
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/project/export_status.json b/spec/fixtures/api/schemas/public_api/v4/project/export_status.json
index 81c8815caf6..fd35ba34b49 100644
--- a/spec/fixtures/api/schemas/public_api/v4/project/export_status.json
+++ b/spec/fixtures/api/schemas/public_api/v4/project/export_status.json
@@ -13,9 +13,10 @@
"type": "string",
"enum": [
"none",
+ "queued",
"started",
"finished",
- "after_export_action"
+ "regeneration_in_progress"
]
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/release.json b/spec/fixtures/api/schemas/public_api/v4/release.json
index 46703c69dc0..a239be09919 100644
--- a/spec/fixtures/api/schemas/public_api/v4/release.json
+++ b/spec/fixtures/api/schemas/public_api/v4/release.json
@@ -17,7 +17,7 @@
},
"milestones": {
"type": "array",
- "items": { "$ref": "milestone.json" }
+ "items": { "$ref": "milestone_with_stats.json" }
},
"commit_path": { "type": "string" },
"tag_path": { "type": "string" },
diff --git a/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json b/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
index bce74892059..1a1e92ac778 100644
--- a/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
+++ b/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
@@ -10,7 +10,7 @@
"upcoming_release": { "type": "boolean" },
"milestones": {
"type": "array",
- "items": { "$ref": "../milestone.json" }
+ "items": { "$ref": "../milestone_with_stats.json" }
},
"commit_path": { "type": "string" },
"tag_path": { "type": "string" },
diff --git a/spec/fixtures/api/schemas/release/link.json b/spec/fixtures/api/schemas/release/link.json
index 97347cb91cc..bf175be2bc0 100644
--- a/spec/fixtures/api/schemas/release/link.json
+++ b/spec/fixtures/api/schemas/release/link.json
@@ -4,7 +4,9 @@
"properties": {
"id": { "type": "integer" },
"name": { "type": "string" },
+ "filepath": { "type": "string" },
"url": { "type": "string" },
+ "direct_asset_url": { "type": "string" },
"external": { "type": "boolean" }
},
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/remote_mirror.json b/spec/fixtures/api/schemas/remote_mirror.json
index 416b0f080d9..87bde189db5 100644
--- a/spec/fixtures/api/schemas/remote_mirror.json
+++ b/spec/fixtures/api/schemas/remote_mirror.json
@@ -10,7 +10,7 @@
"last_successful_update_at",
"last_error",
"only_protected_branches"
- ],
+ ],
"properties": {
"id": { "type": "integer" },
"enabled": { "type": "boolean" },
@@ -20,7 +20,8 @@
"last_update_started_at": { "type": ["string", "null"] },
"last_successful_update_at": { "type": ["string", "null"] },
"last_error": { "type": ["string", "null"] },
- "only_protected_branches": { "type": "boolean" }
+ "only_protected_branches": { "type": "boolean" },
+ "keep_divergent_refs": { "type": ["boolean", "null"] }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/build.env.gz b/spec/fixtures/build.env.gz
new file mode 100644
index 00000000000..39ad1e17ffe
--- /dev/null
+++ b/spec/fixtures/build.env.gz
Binary files differ
diff --git a/spec/fixtures/ci_build_artifacts.zip b/spec/fixtures/ci_build_artifacts.zip
index dae976d918e..91ec9f7dcd0 100644
--- a/spec/fixtures/ci_build_artifacts.zip
+++ b/spec/fixtures/ci_build_artifacts.zip
Binary files differ
diff --git a/spec/fixtures/cobertura/coverage.xml b/spec/fixtures/cobertura/coverage.xml
new file mode 100644
index 00000000000..01e8085b8d8
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage.xml
@@ -0,0 +1,43 @@
+<?xml version='1.0'?>
+<!DOCTYPE coverage SYSTEM "http://cobertura.sourceforge.net/xml/coverage-04.dtd">
+<!-- cobertura example file - generated by simplecov-cobertura - subset of gitlab-org/gitlab - manually modified -->
+<!-- Generated by simplecov-cobertura version 1.3.1 (https://github.com/dashingrocket/simplecov-cobertura) -->
+<coverage line-rate="0.5" branch-rate="0" lines-covered="73865" lines-valid="147397" branches-covered="0" branches-valid="0" complexity="0" version="0" timestamp="1577128350">
+ <sources>
+ <source>/tmp/projects/gitlab-ce/gitlab</source>
+ </sources>
+ <packages>
+ <package name="Controllers" line-rate="0.43" branch-rate="0" complexity="0">
+ <classes>
+ <class name="abuse_reports_controller" filename="app/controllers/abuse_reports_controller.rb" line-rate="0.3" branch-rate="0" complexity="0">
+ <methods/>
+ <lines>
+ <line number="3" branch="false" hits="1"/>
+ <line number="4" branch="false" hits="1"/>
+ <line number="6" branch="false" hits="1"/>
+ <line number="7" branch="false" hits="0"/>
+ <line number="8" branch="false" hits="0"/>
+ <line number="9" branch="false" hits="0"/>
+ <line number="12" branch="false" hits="1"/>
+ <line number="13" branch="false" hits="0"/>
+ <line number="14" branch="false" hits="0"/>
+ <line number="16" branch="false" hits="0"/>
+ <line number="17" branch="false" hits="0"/>
+ <line number="19" branch="false" hits="0"/>
+ <line number="20" branch="false" hits="0"/>
+ <line number="22" branch="false" hits="0"/>
+ <line number="26" branch="false" hits="1"/>
+ <line number="28" branch="false" hits="1"/>
+ <line number="29" branch="false" hits="0"/>
+ <line number="36" branch="false" hits="1"/>
+ <line number="37" branch="false" hits="0"/>
+ <line number="39" branch="false" hits="0"/>
+ <line number="40" branch="false" hits="0"/>
+ <line number="41" branch="false" hits="0"/>
+ <line number="42" branch="false" hits="0"/>
+ </lines>
+ </class>
+ </classes>
+ </package>
+ </packages>
+</coverage> \ No newline at end of file
diff --git a/spec/fixtures/cobertura/coverage.xml.gz b/spec/fixtures/cobertura/coverage.xml.gz
new file mode 100644
index 00000000000..1a5a5f02ced
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage.xml.gz
Binary files differ
diff --git a/spec/fixtures/cobertura/coverage_gocov_xml.xml b/spec/fixtures/cobertura/coverage_gocov_xml.xml
new file mode 100644
index 00000000000..c4da14efb40
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage_gocov_xml.xml
@@ -0,0 +1,216 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE coverage SYSTEM "http://cobertura.sourceforge.net/xml/coverage-04.dtd">
+<!-- cobertura example file - generated by gocov-xml - subset of gitlab-org/gitaly -->
+<coverage line-rate="0.7966102" branch-rate="0" lines-covered="47" lines-valid="59" branches-covered="0" branches-valid="0" complexity="0" version="" timestamp="1577127162320">
+ <packages>
+ <package name="gitlab.com/gitlab-org/gitaly/auth" line-rate="0.7966102" branch-rate="0" complexity="0" line-count="59" line-hits="47">
+ <classes>
+ <class name="-" filename="auth/rpccredentials.go" line-rate="0.2" branch-rate="0" complexity="0" line-count="5" line-hits="1">
+ <methods>
+ <method name="RPCCredentials" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="17" hits="1"></line>
+ </lines>
+ </method>
+ <method name="RPCCredentialsV2" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="34" hits="0"></line>
+ </lines>
+ </method>
+ <method name="hmacToken" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="52" hits="0"></line>
+ <line number="53" hits="0"></line>
+ <line number="55" hits="0"></line>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line number="17" hits="1"></line>
+ <line number="34" hits="0"></line>
+ <line number="52" hits="0"></line>
+ <line number="53" hits="0"></line>
+ <line number="55" hits="0"></line>
+ </lines>
+ </class>
+ <class name="rpcCredentials" filename="auth/rpccredentials.go" line-rate="0.5" branch-rate="0" complexity="0" line-count="2" line-hits="1">
+ <methods>
+ <method name="RequireTransportSecurity" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="24" hits="0"></line>
+ </lines>
+ </method>
+ <method name="GetRequestMetadata" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="27" hits="1"></line>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line number="24" hits="0"></line>
+ <line number="27" hits="1"></line>
+ </lines>
+ </class>
+ <class name="rpcCredentialsV2" filename="auth/rpccredentials.go" line-rate="0" branch-rate="0" complexity="0" line-count="3" line-hits="0">
+ <methods>
+ <method name="RequireTransportSecurity" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="41" hits="0"></line>
+ </lines>
+ </method>
+ <method name="GetRequestMetadata" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="44" hits="0"></line>
+ </lines>
+ </method>
+ <method name="hmacToken" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="48" hits="0"></line>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line number="41" hits="0"></line>
+ <line number="44" hits="0"></line>
+ <line number="48" hits="0"></line>
+ </lines>
+ </class>
+ <class name="-" filename="auth/token.go" line-rate="0.9183673" branch-rate="0" complexity="0" line-count="49" line-hits="45">
+ <methods>
+ <method name="init" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="38" hits="1"></line>
+ </lines>
+ </method>
+ <method name="CheckToken" signature="" line-rate="0.9285714" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="52" hits="1"></line>
+ <line number="53" hits="0"></line>
+ <line number="56" hits="1"></line>
+ <line number="57" hits="1"></line>
+ <line number="58" hits="1"></line>
+ <line number="61" hits="1"></line>
+ <line number="63" hits="1"></line>
+ <line number="64" hits="1"></line>
+ <line number="65" hits="1"></line>
+ <line number="68" hits="1"></line>
+ <line number="69" hits="1"></line>
+ <line number="72" hits="1"></line>
+ <line number="73" hits="1"></line>
+ <line number="77" hits="1"></line>
+ </lines>
+ </method>
+ <method name="tokensEqual" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="81" hits="1"></line>
+ </lines>
+ </method>
+ <method name="ExtractAuthInfo" signature="" line-rate="0.90909094" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="86" hits="1"></line>
+ <line number="88" hits="1"></line>
+ <line number="89" hits="1"></line>
+ <line number="92" hits="1"></line>
+ <line number="96" hits="1"></line>
+ <line number="97" hits="1"></line>
+ <line number="100" hits="1"></line>
+ <line number="101" hits="1"></line>
+ <line number="102" hits="1"></line>
+ <line number="103" hits="0"></line>
+ <line number="106" hits="1"></line>
+ </lines>
+ </method>
+ <method name="countV2Error" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="109" hits="1"></line>
+ </lines>
+ </method>
+ <method name="v2HmacInfoValid" signature="" line-rate="0.8888889" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="112" hits="1"></line>
+ <line number="113" hits="1"></line>
+ <line number="114" hits="1"></line>
+ <line number="115" hits="1"></line>
+ <line number="118" hits="1"></line>
+ <line number="119" hits="1"></line>
+ <line number="120" hits="0"></line>
+ <line number="121" hits="0"></line>
+ <line number="124" hits="1"></line>
+ <line number="125" hits="1"></line>
+ <line number="126" hits="1"></line>
+ <line number="128" hits="1"></line>
+ <line number="129" hits="1"></line>
+ <line number="130" hits="1"></line>
+ <line number="133" hits="1"></line>
+ <line number="134" hits="1"></line>
+ <line number="135" hits="1"></line>
+ <line number="138" hits="1"></line>
+ </lines>
+ </method>
+ <method name="hmacSign" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="142" hits="1"></line>
+ <line number="143" hits="1"></line>
+ <line number="145" hits="1"></line>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line number="38" hits="1"></line>
+ <line number="52" hits="1"></line>
+ <line number="53" hits="0"></line>
+ <line number="56" hits="1"></line>
+ <line number="57" hits="1"></line>
+ <line number="58" hits="1"></line>
+ <line number="61" hits="1"></line>
+ <line number="63" hits="1"></line>
+ <line number="64" hits="1"></line>
+ <line number="65" hits="1"></line>
+ <line number="68" hits="1"></line>
+ <line number="69" hits="1"></line>
+ <line number="72" hits="1"></line>
+ <line number="73" hits="1"></line>
+ <line number="77" hits="1"></line>
+ <line number="81" hits="1"></line>
+ <line number="86" hits="1"></line>
+ <line number="88" hits="1"></line>
+ <line number="89" hits="1"></line>
+ <line number="92" hits="1"></line>
+ <line number="96" hits="1"></line>
+ <line number="97" hits="1"></line>
+ <line number="100" hits="1"></line>
+ <line number="101" hits="1"></line>
+ <line number="102" hits="1"></line>
+ <line number="103" hits="0"></line>
+ <line number="106" hits="1"></line>
+ <line number="109" hits="1"></line>
+ <line number="112" hits="1"></line>
+ <line number="113" hits="1"></line>
+ <line number="114" hits="1"></line>
+ <line number="115" hits="1"></line>
+ <line number="118" hits="1"></line>
+ <line number="119" hits="1"></line>
+ <line number="120" hits="0"></line>
+ <line number="121" hits="0"></line>
+ <line number="124" hits="1"></line>
+ <line number="125" hits="1"></line>
+ <line number="126" hits="1"></line>
+ <line number="128" hits="1"></line>
+ <line number="129" hits="1"></line>
+ <line number="130" hits="1"></line>
+ <line number="133" hits="1"></line>
+ <line number="134" hits="1"></line>
+ <line number="135" hits="1"></line>
+ <line number="138" hits="1"></line>
+ <line number="142" hits="1"></line>
+ <line number="143" hits="1"></line>
+ <line number="145" hits="1"></line>
+ </lines>
+ </class>
+ </classes>
+ </package>
+ </packages>
+ <sources>
+ <source>/tmp/projects/gitlab-ce/gitaly/src/gitlab.com/gitlab-org/gitaly</source>
+ </sources>
+</coverage>
diff --git a/spec/fixtures/cobertura/coverage_gocov_xml.xml.gz b/spec/fixtures/cobertura/coverage_gocov_xml.xml.gz
new file mode 100644
index 00000000000..e51dc50c2ed
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage_gocov_xml.xml.gz
Binary files differ
diff --git a/spec/fixtures/cobertura/coverage_with_corrupted_data.xml b/spec/fixtures/cobertura/coverage_with_corrupted_data.xml
new file mode 100644
index 00000000000..ab0973eba28
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage_with_corrupted_data.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" ?>
+<!DOCTYPE coverage SYSTEM "http://cobertura.sourceforge.net/xml/coverage-04.dtd">
+<!-- cobertura example file - generated by NYC - manually modified -->
+<coverage lines-valid="22" lines-covered="16" line-rate="0.7273000000000001" branches-valid="4" branches-covered="2" branch-rate="0.5" timestamp="1576756029756" complexity="0" version="0.1">
+ <sources>
+ <source>/tmp/projects/coverage-test</source>
+ </sources>
+ <packages>
+ <package name="coverage-test" line-rate="0.6842" branch-rate="0.5">
+ <classes>
+ <class name="index.js" filename="index.js" line-rate="0.6842" branch-rate="0.5">
+ <methods>
+ <method name="(anonymous_3)" hits="0" signature="()V">
+ <lines>
+ <line number="21" hits="0"/>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line number="21" hits="1" branch="false"/>
+ <line number="22" hits="0" branch="false"/>
+ <line number="25" hits="1" branch="true" condition-coverage="50% (1/2)"/>
+ <line number="26" hits="0" branch="false"/>
+ <line number="27" hits="0" branch="false"/>
+ <line number="28" hits="0" branch="false"/>
+ <line number="29" hits="0" branch="false"/>
+ </lines>
+ </class>
+ </classes>
+ </package>
+ <package name="coverage-test.lib.math" line-rate="1" branch-rate="1">
+ <classes>
+ <class name="add.js" filename="lib/math/add.js" line-rate="1" branch-rate="1">
+ <methods>
+ <method name="(anonymous_0)" hits="1" signature="()V">
+ <lines>
+ <line number="1" hits="1"/>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line null="test" hits="1" branch="false"/>
+ <line number="2" hits="1" branch="false"/>
+ <line number="3" hits="1" branch="false"/>
+ </lines>
+ </class>
+ </classes>
+ </package>
+ </packages>
+</coverage>
diff --git a/spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gz b/spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gz
new file mode 100644
index 00000000000..4d06c42ba0b
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gz
Binary files differ
diff --git a/spec/fixtures/csv_gitlab_export.csv b/spec/fixtures/csv_gitlab_export.csv
new file mode 100644
index 00000000000..65422509eef
--- /dev/null
+++ b/spec/fixtures/csv_gitlab_export.csv
@@ -0,0 +1,5 @@
+Issue ID,URL,Title,State,Description,Author,Author Username,Assignee,Assignee Username,Confidential,Locked,Due Date,Created At (UTC),Updated At (UTC),Closed At (UTC),Milestone,Weight,Labels,Time Estimate,Time Spent,Epic ID,Epic Title
+1,http://localhost:3000/jashkenas/underscore/issues/1,Title,Open,,Elva Jerde,jamel,Tierra Effertz,aurora_hahn,No,No,,2020-01-17 10:36:26,2020-02-19 10:36:26,,v1.0,,"Brene,Cutlass,Escort,GVM",0,0,,
+3,http://localhost:3000/jashkenas/underscore/issues/3,Nihil impedit neque quos totam ut aut enim cupiditate doloribus molestiae.,Open,Omnis aliquid sint laudantium quam.,Marybeth Goodwin,rocio.blanda,Annemarie Von,reynalda_howe,No,No,,2020-01-23 10:36:26,2020-02-19 10:36:27,,v1.0,,"Brene,Cutlass,Escort,GVM",0,0,,
+34,http://localhost:3000/jashkenas/underscore/issues/34,Dismiss Cipher with no integrity,Open,,Marybeth Goodwin,rocio.blanda,"","",No,No,,2020-02-19 10:38:49,2020-02-19 10:38:49,,,,,0,0,,
+35,http://localhost:3000/jashkenas/underscore/issues/35,Test Title,Open,Test Description,Marybeth Goodwin,rocio.blanda,"","",No,No,,2020-02-19 10:38:49,2020-02-19 10:38:49,,,,,0,0,,
diff --git a/spec/fixtures/group_export_invalid_subrelations.tar.gz b/spec/fixtures/group_export_invalid_subrelations.tar.gz
new file mode 100644
index 00000000000..6a8f1453517
--- /dev/null
+++ b/spec/fixtures/group_export_invalid_subrelations.tar.gz
Binary files differ
diff --git a/spec/fixtures/lib/elasticsearch/logs_response.json b/spec/fixtures/lib/elasticsearch/logs_response.json
new file mode 100644
index 00000000000..7a733882089
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/logs_response.json
@@ -0,0 +1,73 @@
+{
+ "took": 7087,
+ "timed_out": false,
+ "_shards": {
+ "total": 151,
+ "successful": 151,
+ "skipped": 0,
+ "failed": 0,
+ "failures": []
+ },
+ "hits": {
+ "total": 486924,
+ "max_score": null,
+ "hits": [
+ {
+ "_index": "filebeat-6.7.0-2019.10.25",
+ "_type": "doc",
+ "_id": "SkbxAW4BWzhswgK-C5-R",
+ "_score": null,
+ "_source": {
+ "message": "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13",
+ "@timestamp": "2019-12-13T14:35:34.034Z"
+ },
+ "sort": [
+ 9999998,
+ 1571990602947
+ ]
+ },
+ {
+ "_index": "filebeat-6.7.0-2019.10.27",
+ "_type": "doc",
+ "_id": "wEigD24BWzhswgK-WUU2",
+ "_score": null,
+ "_source": {
+ "message": "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13",
+ "@timestamp": "2019-12-13T14:35:35.034Z"
+ },
+ "sort": [
+ 9999949,
+ 1572220194500
+ ]
+ },
+ {
+ "_index": "filebeat-6.7.0-2019.11.04",
+ "_type": "doc",
+ "_id": "gE6uOG4BWzhswgK-M0x2",
+ "_score": null,
+ "_source": {
+ "message": "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13",
+ "@timestamp": "2019-12-13T14:35:36.034Z"
+ },
+ "sort": [
+ 9999944,
+ 1572908964497
+ ]
+ },
+ {
+ "_index": "filebeat-6.7.0-2019.10.30",
+ "_type": "doc",
+ "_id": "0klPHW4BWzhswgK-nfCF",
+ "_score": null,
+ "_source": {
+ "message": "- -\u003e /",
+ "@timestamp": "2019-12-13T14:35:37.034Z"
+ },
+ "sort": [
+ 9999934,
+ 1572449784442
+ ]
+ }
+ ]
+ }
+}
diff --git a/spec/fixtures/lib/elasticsearch/query.json b/spec/fixtures/lib/elasticsearch/query.json
new file mode 100644
index 00000000000..565c871b1c7
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query.json
@@ -0,0 +1,39 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_container.json b/spec/fixtures/lib/elasticsearch/query_with_container.json
new file mode 100644
index 00000000000..21eac5d7dbe
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_container.json
@@ -0,0 +1,46 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.container.name": {
+ "query": "auto-deploy-app"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_cursor.json b/spec/fixtures/lib/elasticsearch/query_with_cursor.json
new file mode 100644
index 00000000000..1264fdb0322
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_cursor.json
@@ -0,0 +1,43 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "search_after": [
+ 9999934,
+ 1572449784442
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_end_time.json b/spec/fixtures/lib/elasticsearch/query_with_end_time.json
new file mode 100644
index 00000000000..2859e6427d4
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_end_time.json
@@ -0,0 +1,48 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ],
+ "filter": [
+ {
+ "range": {
+ "@timestamp": {
+ "lt": "2019-12-13T14:35:34.034Z"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_search.json b/spec/fixtures/lib/elasticsearch/query_with_search.json
new file mode 100644
index 00000000000..3c9bed047fa
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_search.json
@@ -0,0 +1,48 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ },
+ {
+ "simple_query_string": {
+ "query": "foo +bar ",
+ "fields": [
+ "message"
+ ],
+ "default_operator": "and"
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_start_time.json b/spec/fixtures/lib/elasticsearch/query_with_start_time.json
new file mode 100644
index 00000000000..0c5cfca42f7
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_start_time.json
@@ -0,0 +1,48 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ],
+ "filter": [
+ {
+ "range": {
+ "@timestamp": {
+ "gte": "2019-12-13T14:35:34.034Z"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_times.json b/spec/fixtures/lib/elasticsearch/query_with_times.json
new file mode 100644
index 00000000000..7108d42217e
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_times.json
@@ -0,0 +1,49 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ],
+ "filter": [
+ {
+ "range": {
+ "@timestamp": {
+ "gte": "2019-12-13T14:35:34.034Z",
+ "lt": "2019-12-13T14:35:34.034Z"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
index 4d6211a1251..72ed8b818d8 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json
@@ -6318,7 +6318,7 @@
]
}
],
- "pipelines": [
+ "ci_pipelines": [
{
"id": 36,
"project_id": 5,
@@ -6452,7 +6452,7 @@
]
},
{
- "id": 37,
+ "id": 26,
"project_id": 5,
"ref": "master",
"sha": "048721d90c449b244b7b4c53a9186b04330174ec",
@@ -6744,7 +6744,7 @@
]
},
{
- "id": 40,
+ "id": 19,
"project_id": 5,
"ref": "master",
"sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
@@ -6851,7 +6851,7 @@
]
},
{
- "id": 42,
+ "id": 20,
"project_id": 5,
"ref": "master",
"sha": "ce84140e8b878ce6e7c4d298c7202ff38170e3ac",
diff --git a/spec/fixtures/lib/gitlab/import_export/group/project.json b/spec/fixtures/lib/gitlab/import_export/group/project.json
index ce4fa1981ff..e8e1e53a86a 100644
--- a/spec/fixtures/lib/gitlab/import_export/group/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/group/project.json
@@ -205,36 +205,40 @@
"iid": 1,
"group_id": 100
},
- "epic": {
- "id": 1,
- "group_id": 5,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-12-08T19:37:07.098Z",
- "updated_at": "2019-12-08T19:43:11.568Z",
- "title": "An epic",
- "description": null,
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "milestone_id": null
+ "epic_issue": {
+ "id": 78,
+ "relative_position": 1073740323,
+ "epic": {
+ "id": 1,
+ "group_id": 5,
+ "author_id": 1,
+ "assignee_id": null,
+ "iid": 1,
+ "updated_by_id": null,
+ "last_edited_by_id": null,
+ "lock_version": 0,
+ "start_date": null,
+ "end_date": null,
+ "last_edited_at": null,
+ "created_at": "2019-12-08T19:37:07.098Z",
+ "updated_at": "2019-12-08T19:43:11.568Z",
+ "title": "An epic",
+ "description": null,
+ "start_date_sourcing_milestone_id": null,
+ "due_date_sourcing_milestone_id": null,
+ "start_date_fixed": null,
+ "due_date_fixed": null,
+ "start_date_is_fixed": null,
+ "due_date_is_fixed": null,
+ "closed_by_id": null,
+ "closed_at": null,
+ "parent_id": null,
+ "relative_position": null,
+ "state_id": "opened",
+ "start_date_sourcing_epic_id": null,
+ "due_date_sourcing_epic_id": null,
+ "milestone_id": null
+ }
}
}
],
diff --git a/spec/fixtures/lib/gitlab/import_export/invalid_json/project.json b/spec/fixtures/lib/gitlab/import_export/invalid_json/project.json
new file mode 100644
index 00000000000..83cb34eea91
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/invalid_json/project.json
@@ -0,0 +1,3 @@
+{
+ "invalid" json
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/light/project.json b/spec/fixtures/lib/gitlab/import_export/light/project.json
index 2971ca0f0f8..51e2e9ac623 100644
--- a/spec/fixtures/lib/gitlab/import_export/light/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/light/project.json
@@ -111,6 +111,28 @@
"active": false,
"properties": {},
"template": true,
+ "instance": false,
+ "push_events": true,
+ "issues_events": true,
+ "merge_requests_events": true,
+ "tag_push_events": true,
+ "note_events": true,
+ "job_events": true,
+ "type": "TeamcityService",
+ "category": "ci",
+ "default": false,
+ "wiki_page_events": true
+ },
+ {
+ "id": 101,
+ "title": "JetBrains TeamCity CI",
+ "project_id": 5,
+ "created_at": "2016-06-14T15:01:51.315Z",
+ "updated_at": "2016-06-14T15:01:51.315Z",
+ "active": false,
+ "properties": {},
+ "template": false,
+ "instance": true,
"push_events": true,
"issues_events": true,
"merge_requests_events": true,
diff --git a/spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/project.json b/spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/project.json
new file mode 100644
index 00000000000..5ca803cc11f
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/project.json
@@ -0,0 +1,144 @@
+{
+ "approvals_before_merge": 0,
+ "archived": false,
+ "auto_cancel_pending_pipelines": "enabled",
+ "autoclose_referenced_issues": true,
+ "boards": [],
+ "build_allow_git_fetch": true,
+ "build_coverage_regex": null,
+ "build_timeout": 3600,
+ "ci_cd_settings": {
+ "group_runners_enabled": true
+ },
+ "ci_config_path": null,
+ "ci_pipelines": [
+ {
+ "before_sha": "0000000000000000000000000000000000000000",
+ "committed_at": null,
+ "config_source": "repository_source",
+ "created_at": "2020-02-25T12:08:40.615Z",
+ "duration": 61,
+ "external_pull_request": {
+ "created_at": "2020-02-25T12:08:40.478Z",
+ "id": 59023,
+ "project_id": 17121868,
+ "pull_request_iid": 4,
+ "source_branch": "new-branch",
+ "source_repository": "liptonshmidt/dotfiles",
+ "source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
+ "status": "open",
+ "target_branch": "master",
+ "target_repository": "liptonshmidt/dotfiles",
+ "target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
+ "updated_at": "2020-02-25T12:08:40.478Z"
+ },
+ "failure_reason": null,
+ "finished_at": "2020-02-25T12:09:44.464Z",
+ "id": 120842687,
+ "iid": 8,
+ "lock_version": 3,
+ "notes": [],
+ "project_id": 17121868,
+ "protected": false,
+ "ref": "new-branch",
+ "sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
+ "source": "external_pull_request_event",
+ "source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
+ "stages": [],
+ "started_at": "2020-02-25T12:08:42.511Z",
+ "status": "success",
+ "tag": false,
+ "target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
+ "updated_at": "2020-02-25T12:09:44.473Z",
+ "user_id": 4087087,
+ "yaml_errors": null
+ },
+ {
+ "before_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
+ "committed_at": null,
+ "config_source": "repository_source",
+ "created_at": "2020-02-25T12:08:37.434Z",
+ "duration": 57,
+ "external_pull_request": {
+ "created_at": "2020-02-25T12:08:40.478Z",
+ "id": 59023,
+ "project_id": 17121868,
+ "pull_request_iid": 4,
+ "source_branch": "new-branch",
+ "source_repository": "liptonshmidt/dotfiles",
+ "source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
+ "status": "open",
+ "target_branch": "master",
+ "target_repository": "liptonshmidt/dotfiles",
+ "target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
+ "updated_at": "2020-02-25T12:08:40.478Z"
+ },
+ "failure_reason": null,
+ "finished_at": "2020-02-25T12:09:36.557Z",
+ "id": 120842675,
+ "iid": 7,
+ "lock_version": 3,
+ "notes": [],
+ "project_id": 17121868,
+ "protected": false,
+ "ref": "new-branch",
+ "sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
+ "source": "external_pull_request_event",
+ "source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
+ "stages": [],
+ "started_at": "2020-02-25T12:08:38.682Z",
+ "status": "success",
+ "tag": false,
+ "target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
+ "updated_at": "2020-02-25T12:09:36.565Z",
+ "user_id": 4087087,
+ "yaml_errors": null
+ }
+ ],
+ "custom_attributes": [],
+ "delete_error": null,
+ "description": "Vim, Tmux and others",
+ "disable_overriding_approvers_per_merge_request": null,
+ "external_authorization_classification_label": "",
+ "external_pull_requests": [
+ {
+ "created_at": "2020-02-25T12:08:40.478Z",
+ "id": 59023,
+ "project_id": 17121868,
+ "pull_request_iid": 4,
+ "source_branch": "new-branch",
+ "source_repository": "liptonshmidt/dotfiles",
+ "source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
+ "status": "open",
+ "target_branch": "master",
+ "target_repository": "liptonshmidt/dotfiles",
+ "target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
+ "updated_at": "2020-02-25T12:08:40.478Z"
+ }
+ ],
+ "external_webhook_token": "D3mVYFzZkgZ5kMfcW_wx",
+ "issues": [],
+ "labels": [],
+ "milestones": [],
+ "pipeline_schedules": [],
+ "project_feature": {
+ "builds_access_level": 20,
+ "created_at": "2020-02-25T11:20:09.925Z",
+ "forking_access_level": 20,
+ "id": 17494715,
+ "issues_access_level": 0,
+ "merge_requests_access_level": 0,
+ "pages_access_level": 20,
+ "project_id": 17121868,
+ "repository_access_level": 20,
+ "snippets_access_level": 0,
+ "updated_at": "2020-02-25T11:20:10.376Z",
+ "wiki_access_level": 0
+ },
+ "public_builds": true,
+ "releases": [],
+ "shared_runners_enabled": true,
+ "snippets": [],
+ "triggers": [],
+ "visibility_level": 20
+}
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/axis.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/axis.json
new file mode 100644
index 00000000000..ed8fa58393f
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/axis.json
@@ -0,0 +1,10 @@
+{
+ "type": "object",
+ "required": [],
+ "properties": {
+ "name": { "type": "string" },
+ "precision": { "type": "number" },
+ "format": { "type": "string" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
index a16f1ef592f..9f39e9c77cb 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
@@ -9,6 +9,7 @@
"title": { "type": "string" },
"type": { "type": "string" },
"y_label": { "type": "string" },
+ "y_axis": { "$ref": "axis.json" },
"weight": { "type": "number" },
"metrics": {
"type": "array",
diff --git a/spec/fixtures/sentry/issues_sample_response.json b/spec/fixtures/sentry/issues_sample_response.json
index 495562ac960..980e2fb3bd6 100644
--- a/spec/fixtures/sentry/issues_sample_response.json
+++ b/spec/fixtures/sentry/issues_sample_response.json
@@ -82,5 +82,47 @@
"name": "Internal"
},
"statusDetails": {}
+ },
+ {
+ "lastSeen": "2018-12-31T12:00:11Z",
+ "numComments": 0,
+ "userCount": 0,
+ "stats": {
+ "24h": [
+ [
+ 1546437600,
+ 0
+ ]
+ ]
+ },
+ "culprit": "sentry.tasks.reports.deliver_organization_user_report",
+ "title": "Service unknown",
+ "id": "12",
+ "assignedTo": null,
+ "logger": null,
+ "type": "error",
+ "annotations": [],
+ "metadata": {
+ "type": "gaierror",
+ "value": "Service unknown"
+ },
+ "status": "ignored",
+ "subscriptionDetails": null,
+ "isPublic": false,
+ "hasSeen": false,
+ "shortId": "INTERNAL-4",
+ "shareId": null,
+ "firstSeen": "2018-12-17T12:00:14Z",
+ "count": "70",
+ "permalink": "35.228.54.90/sentry/internal/issues/12/",
+ "level": "error",
+ "isSubscribed": true,
+ "isBookmarked": false,
+ "project": {
+ "slug": "internal",
+ "id": "1",
+ "name": "Internal"
+ },
+ "statusDetails": {}
}
]
diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace
index d774d154496..e76712782be 100644
--- a/spec/fixtures/trace/sample_trace
+++ b/spec/fixtures/trace/sample_trace
@@ -1442,7 +1442,7 @@ TodoService
marks a single todo id as done
caches the number of todos of a user
-Gitlab::ImportExport::ProjectTreeSaver
+Gitlab::ImportExport::Project::TreeSaver
saves the project tree into a json object
saves project successfully
JSON
@@ -2772,8 +2772,6 @@ Service
should return false by default
#deprecation_message
should be empty by default
- .find_by_template
- returns service template
#api_field_names
filters out sensitive fields
diff --git a/spec/frontend/__mocks__/monaco-editor/index.js b/spec/frontend/__mocks__/monaco-editor/index.js
new file mode 100644
index 00000000000..18cc3a7c377
--- /dev/null
+++ b/spec/frontend/__mocks__/monaco-editor/index.js
@@ -0,0 +1,13 @@
+// NOTE:
+// These imports are pulled from 'monaco-editor/esm/vs/editor/editor.main.js'
+// We don't want to include 'monaco-editor/esm/vs/editor/edcore' because it causes
+// lots of compatability issues with Jest
+// Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/209863
+import 'monaco-editor/esm/vs/language/typescript/monaco.contribution';
+import 'monaco-editor/esm/vs/language/css/monaco.contribution';
+import 'monaco-editor/esm/vs/language/json/monaco.contribution';
+import 'monaco-editor/esm/vs/language/html/monaco.contribution';
+import 'monaco-editor/esm/vs/basic-languages/monaco.contribution';
+
+export * from 'monaco-editor/esm/vs/editor/editor.api';
+export default global.monaco;
diff --git a/spec/frontend/__mocks__/mousetrap/index.js b/spec/frontend/__mocks__/mousetrap/index.js
new file mode 100644
index 00000000000..63c92fa9a09
--- /dev/null
+++ b/spec/frontend/__mocks__/mousetrap/index.js
@@ -0,0 +1,6 @@
+/* global Mousetrap */
+// `mousetrap` uses amd which webpack understands but Jest does not
+// Thankfully it also writes to a global export so we can es6-ify it
+import 'mousetrap';
+
+export default Mousetrap;
diff --git a/spec/frontend/badges/components/badge_form_spec.js b/spec/frontend/badges/components/badge_form_spec.js
new file mode 100644
index 00000000000..d61bd29ca9d
--- /dev/null
+++ b/spec/frontend/badges/components/badge_form_spec.js
@@ -0,0 +1,197 @@
+import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import axios from '~/lib/utils/axios_utils';
+import store from '~/badges/store';
+import createEmptyBadge from '~/badges/empty_badge';
+import BadgeForm from '~/badges/components/badge_form.vue';
+import { DUMMY_IMAGE_URL, TEST_HOST } from 'helpers/test_constants';
+
+// avoid preview background process
+BadgeForm.methods.debouncedPreview = () => {};
+
+describe('BadgeForm component', () => {
+ const Component = Vue.extend(BadgeForm);
+ let axiosMock;
+ let vm;
+
+ beforeEach(() => {
+ setFixtures(`
+ <div id="dummy-element"></div>
+ `);
+
+ axiosMock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ axiosMock.restore();
+ });
+
+ describe('methods', () => {
+ beforeEach(() => {
+ vm = mountComponentWithStore(Component, {
+ el: '#dummy-element',
+ store,
+ props: {
+ isEditing: false,
+ },
+ });
+ });
+
+ describe('onCancel', () => {
+ it('calls stopEditing', () => {
+ jest.spyOn(vm, 'stopEditing').mockImplementation(() => {});
+
+ vm.onCancel();
+
+ expect(vm.stopEditing).toHaveBeenCalled();
+ });
+ });
+ });
+
+ const sharedSubmitTests = submitAction => {
+ const nameSelector = '#badge-name';
+ const imageUrlSelector = '#badge-image-url';
+ const findImageUrlElement = () => vm.$el.querySelector(imageUrlSelector);
+ const linkUrlSelector = '#badge-link-url';
+ const findLinkUrlElement = () => vm.$el.querySelector(linkUrlSelector);
+ const setValue = (inputElementSelector, value) => {
+ const inputElement = vm.$el.querySelector(inputElementSelector);
+ inputElement.value = value;
+ inputElement.dispatchEvent(new Event('input'));
+ };
+ const submitForm = () => {
+ const submitButton = vm.$el.querySelector('button[type="submit"]');
+ submitButton.click();
+ };
+ const expectInvalidInput = inputElementSelector => {
+ const inputElement = vm.$el.querySelector(inputElementSelector);
+
+ expect(inputElement.checkValidity()).toBe(false);
+ const feedbackElement = vm.$el.querySelector(`${inputElementSelector} + .invalid-feedback`);
+
+ expect(feedbackElement).toBeVisible();
+ };
+
+ beforeEach(done => {
+ jest.spyOn(vm, submitAction).mockReturnValue(Promise.resolve());
+ store.replaceState({
+ ...store.state,
+ badgeInAddForm: createEmptyBadge(),
+ badgeInEditForm: createEmptyBadge(),
+ isSaving: false,
+ });
+
+ Vue.nextTick()
+ .then(() => {
+ setValue(nameSelector, 'TestBadge');
+ setValue(linkUrlSelector, `${TEST_HOST}/link/url`);
+ setValue(imageUrlSelector, `${window.location.origin}${DUMMY_IMAGE_URL}`);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('returns immediately if imageUrl is empty', () => {
+ setValue(imageUrlSelector, '');
+
+ submitForm();
+
+ expectInvalidInput(imageUrlSelector);
+
+ expect(vm[submitAction]).not.toHaveBeenCalled();
+ });
+
+ it('returns immediately if imageUrl is malformed', () => {
+ setValue(imageUrlSelector, 'not-a-url');
+
+ submitForm();
+
+ expectInvalidInput(imageUrlSelector);
+
+ expect(vm[submitAction]).not.toHaveBeenCalled();
+ });
+
+ it('returns immediately if linkUrl is empty', () => {
+ setValue(linkUrlSelector, '');
+
+ submitForm();
+
+ expectInvalidInput(linkUrlSelector);
+
+ expect(vm[submitAction]).not.toHaveBeenCalled();
+ });
+
+ it('returns immediately if linkUrl is malformed', () => {
+ setValue(linkUrlSelector, 'not-a-url');
+
+ submitForm();
+
+ expectInvalidInput(linkUrlSelector);
+
+ expect(vm[submitAction]).not.toHaveBeenCalled();
+ });
+
+ it(`calls ${submitAction}`, () => {
+ submitForm();
+
+ expect(findImageUrlElement().checkValidity()).toBe(true);
+ expect(findLinkUrlElement().checkValidity()).toBe(true);
+ expect(vm[submitAction]).toHaveBeenCalled();
+ });
+ };
+
+ describe('if isEditing is false', () => {
+ beforeEach(() => {
+ vm = mountComponentWithStore(Component, {
+ el: '#dummy-element',
+ store,
+ props: {
+ isEditing: false,
+ },
+ });
+ });
+
+ it('renders one button', () => {
+ expect(vm.$el.querySelector('.row-content-block')).toBeNull();
+ const buttons = vm.$el.querySelectorAll('.form-group:last-of-type button');
+
+ expect(buttons.length).toBe(1);
+ const buttonAddElement = buttons[0];
+
+ expect(buttonAddElement).toBeVisible();
+ expect(buttonAddElement).toHaveText('Add badge');
+ });
+
+ sharedSubmitTests('addBadge');
+ });
+
+ describe('if isEditing is true', () => {
+ beforeEach(() => {
+ vm = mountComponentWithStore(Component, {
+ el: '#dummy-element',
+ store,
+ props: {
+ isEditing: true,
+ },
+ });
+ });
+
+ it('renders two buttons', () => {
+ const buttons = vm.$el.querySelectorAll('.row-content-block button');
+
+ expect(buttons.length).toBe(2);
+ const buttonSaveElement = buttons[0];
+
+ expect(buttonSaveElement).toBeVisible();
+ expect(buttonSaveElement).toHaveText('Save changes');
+ const buttonCancelElement = buttons[1];
+
+ expect(buttonCancelElement).toBeVisible();
+ expect(buttonCancelElement).toHaveText('Cancel');
+ });
+
+ sharedSubmitTests('saveBadge');
+ });
+});
diff --git a/spec/frontend/badges/components/badge_list_row_spec.js b/spec/frontend/badges/components/badge_list_row_spec.js
new file mode 100644
index 00000000000..31f0d850857
--- /dev/null
+++ b/spec/frontend/badges/components/badge_list_row_spec.js
@@ -0,0 +1,109 @@
+import Vue from 'vue';
+import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants';
+import store from '~/badges/store';
+import BadgeListRow from '~/badges/components/badge_list_row.vue';
+import { createDummyBadge } from '../dummy_badge';
+
+describe('BadgeListRow component', () => {
+ const Component = Vue.extend(BadgeListRow);
+ let badge;
+ let vm;
+
+ beforeEach(() => {
+ setFixtures(`
+ <div id="delete-badge-modal" class="modal"></div>
+ <div id="dummy-element"></div>
+ `);
+ store.replaceState({
+ ...store.state,
+ kind: PROJECT_BADGE,
+ });
+ badge = createDummyBadge();
+ vm = mountComponentWithStore(Component, {
+ el: '#dummy-element',
+ store,
+ props: { badge },
+ });
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('renders the badge', () => {
+ const badgeElement = vm.$el.querySelector('.project-badge');
+
+ expect(badgeElement).not.toBeNull();
+ expect(badgeElement.getAttribute('src')).toBe(badge.renderedImageUrl);
+ });
+
+ it('renders the badge name', () => {
+ expect(vm.$el.innerText).toMatch(badge.name);
+ });
+
+ it('renders the badge link', () => {
+ expect(vm.$el.innerText).toMatch(badge.linkUrl);
+ });
+
+ it('renders the badge kind', () => {
+ expect(vm.$el.innerText).toMatch('Project Badge');
+ });
+
+ it('shows edit and delete buttons', () => {
+ const buttons = vm.$el.querySelectorAll('.table-button-footer button');
+
+ expect(buttons).toHaveLength(2);
+ const buttonEditElement = buttons[0];
+
+ expect(buttonEditElement).toBeVisible();
+ expect(buttonEditElement).toHaveSpriteIcon('pencil');
+ const buttonDeleteElement = buttons[1];
+
+ expect(buttonDeleteElement).toBeVisible();
+ expect(buttonDeleteElement).toHaveSpriteIcon('remove');
+ });
+
+ it('calls editBadge when clicking then edit button', () => {
+ jest.spyOn(vm, 'editBadge').mockImplementation(() => {});
+
+ const editButton = vm.$el.querySelector('.table-button-footer button:first-of-type');
+ editButton.click();
+
+ expect(vm.editBadge).toHaveBeenCalled();
+ });
+
+ it('calls updateBadgeInModal and shows modal when clicking then delete button', done => {
+ jest.spyOn(vm, 'updateBadgeInModal').mockImplementation(() => {});
+
+ const deleteButton = vm.$el.querySelector('.table-button-footer button:last-of-type');
+ deleteButton.click();
+
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.updateBadgeInModal).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ describe('for a group badge', () => {
+ beforeEach(done => {
+ badge.kind = GROUP_BADGE;
+
+ Vue.nextTick()
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('renders the badge kind', () => {
+ expect(vm.$el.innerText).toMatch('Group Badge');
+ });
+
+ it('hides edit and delete buttons', () => {
+ const buttons = vm.$el.querySelectorAll('.table-button-footer button');
+
+ expect(buttons).toHaveLength(0);
+ });
+ });
+});
diff --git a/spec/frontend/badges/components/badge_list_spec.js b/spec/frontend/badges/components/badge_list_spec.js
new file mode 100644
index 00000000000..5ffc046eb97
--- /dev/null
+++ b/spec/frontend/badges/components/badge_list_spec.js
@@ -0,0 +1,95 @@
+import Vue from 'vue';
+import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants';
+import store from '~/badges/store';
+import BadgeList from '~/badges/components/badge_list.vue';
+import { createDummyBadge } from '../dummy_badge';
+
+describe('BadgeList component', () => {
+ const Component = Vue.extend(BadgeList);
+ const numberOfDummyBadges = 3;
+ let vm;
+
+ beforeEach(() => {
+ setFixtures('<div id="dummy-element"></div>');
+ const badges = [];
+ for (let id = 0; id < numberOfDummyBadges; id += 1) {
+ badges.push({ id, ...createDummyBadge() });
+ }
+ store.replaceState({
+ ...store.state,
+ badges,
+ kind: PROJECT_BADGE,
+ isLoading: false,
+ });
+
+ // Can be removed once GlLoadingIcon no longer throws a warning
+ jest.spyOn(global.console, 'warn').mockImplementation(() => jest.fn());
+
+ vm = mountComponentWithStore(Component, {
+ el: '#dummy-element',
+ store,
+ });
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('renders a header with the badge count', () => {
+ const header = vm.$el.querySelector('.card-header');
+
+ expect(header).toHaveText(new RegExp(`Your badges\\s+${numberOfDummyBadges}`));
+ });
+
+ it('renders a row for each badge', () => {
+ const rows = vm.$el.querySelectorAll('.gl-responsive-table-row');
+
+ expect(rows).toHaveLength(numberOfDummyBadges);
+ });
+
+ it('renders a message if no badges exist', done => {
+ store.state.badges = [];
+
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.$el.innerText).toMatch('This project has no badges');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('shows a loading icon when loading', done => {
+ store.state.isLoading = true;
+
+ Vue.nextTick()
+ .then(() => {
+ const loadingIcon = vm.$el.querySelector('.gl-spinner');
+
+ expect(loadingIcon).toBeVisible();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ describe('for group badges', () => {
+ beforeEach(done => {
+ store.state.kind = GROUP_BADGE;
+
+ Vue.nextTick()
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('renders a message if no badges exist', done => {
+ store.state.badges = [];
+
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.$el.innerText).toMatch('This group has no badges');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/frontend/badges/components/badge_settings_spec.js b/spec/frontend/badges/components/badge_settings_spec.js
new file mode 100644
index 00000000000..8c3f1ea2749
--- /dev/null
+++ b/spec/frontend/badges/components/badge_settings_spec.js
@@ -0,0 +1,117 @@
+import Vue from 'vue';
+import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import store from '~/badges/store';
+import BadgeSettings from '~/badges/components/badge_settings.vue';
+import { createDummyBadge } from '../dummy_badge';
+
+describe('BadgeSettings component', () => {
+ const Component = Vue.extend(BadgeSettings);
+ let vm;
+
+ beforeEach(() => {
+ setFixtures(`
+ <div id="dummy-element"></div>
+ <button
+ id="dummy-modal-button"
+ type="button"
+ data-toggle="modal"
+ data-target="#delete-badge-modal"
+ >Show modal</button>
+ `);
+
+ // Can be removed once GlLoadingIcon no longer throws a warning
+ jest.spyOn(global.console, 'warn').mockImplementation(() => jest.fn());
+
+ vm = mountComponentWithStore(Component, {
+ el: '#dummy-element',
+ store,
+ });
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('displays modal if button is clicked', done => {
+ const badge = createDummyBadge();
+ store.state.badgeInModal = badge;
+ const modal = vm.$el.querySelector('#delete-badge-modal');
+ const button = document.getElementById('dummy-modal-button');
+
+ button.click();
+
+ Vue.nextTick()
+ .then(() => {
+ expect(modal.innerText).toMatch('Delete badge?');
+ const badgeElement = modal.querySelector('img.project-badge');
+ expect(badgeElement).not.toBe(null);
+ expect(badgeElement.getAttribute('src')).toBe(badge.renderedImageUrl);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('displays a form to add a badge', () => {
+ const form = vm.$el.querySelector('form:nth-of-type(2)');
+
+ expect(form).not.toBe(null);
+ const button = form.querySelector('.btn-success');
+
+ expect(button).not.toBe(null);
+ expect(button).toHaveText(/Add badge/);
+ });
+
+ it('displays badge list', () => {
+ const badgeListElement = vm.$el.querySelector('.card');
+
+ expect(badgeListElement).not.toBe(null);
+ expect(badgeListElement).toBeVisible();
+ expect(badgeListElement.innerText).toMatch('Your badges');
+ });
+
+ describe('when editing', () => {
+ beforeEach(done => {
+ store.state.isEditing = true;
+
+ Vue.nextTick()
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('displays a form to edit a badge', () => {
+ const form = vm.$el.querySelector('form:nth-of-type(1)');
+
+ expect(form).not.toBe(null);
+ const submitButton = form.querySelector('.btn-success');
+
+ expect(submitButton).not.toBe(null);
+ expect(submitButton).toHaveText(/Save changes/);
+ const cancelButton = form.querySelector('.btn-cancel');
+
+ expect(cancelButton).not.toBe(null);
+ expect(cancelButton).toHaveText(/Cancel/);
+ });
+
+ it('displays no badge list', () => {
+ const badgeListElement = vm.$el.querySelector('.card');
+
+ expect(badgeListElement).toBeHidden();
+ });
+ });
+
+ describe('methods', () => {
+ describe('onSubmitModal', () => {
+ it('triggers ', () => {
+ jest.spyOn(vm, 'deleteBadge').mockImplementation(() => Promise.resolve());
+ const modal = vm.$el.querySelector('#delete-badge-modal');
+ const deleteButton = modal.querySelector('.btn-danger');
+
+ deleteButton.click();
+
+ const badge = store.state.badgeInModal;
+
+ expect(vm.deleteBadge).toHaveBeenCalledWith(badge);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/badges/components/badge_spec.js b/spec/frontend/badges/components/badge_spec.js
new file mode 100644
index 00000000000..43004004fb2
--- /dev/null
+++ b/spec/frontend/badges/components/badge_spec.js
@@ -0,0 +1,152 @@
+import Vue from 'vue';
+import mountComponent from 'helpers/vue_mount_component_helper';
+import { DUMMY_IMAGE_URL, TEST_HOST } from 'spec/test_constants';
+import Badge from '~/badges/components/badge.vue';
+
+describe('Badge component', () => {
+ const Component = Vue.extend(Badge);
+ const dummyProps = {
+ imageUrl: DUMMY_IMAGE_URL,
+ linkUrl: `${TEST_HOST}/badge/link/url`,
+ };
+ let vm;
+
+ const findElements = () => {
+ const buttons = vm.$el.querySelectorAll('button');
+ return {
+ badgeImage: vm.$el.querySelector('img.project-badge'),
+ loadingIcon: vm.$el.querySelector('.gl-spinner'),
+ reloadButton: buttons[buttons.length - 1],
+ };
+ };
+
+ const createComponent = (props, el = null) => {
+ vm = mountComponent(Component, props, el);
+ const { badgeImage } = findElements();
+ return new Promise(resolve => {
+ badgeImage.addEventListener('load', resolve);
+ // Manually dispatch load event as it is not triggered
+ badgeImage.dispatchEvent(new Event('load'));
+ }).then(() => Vue.nextTick());
+ };
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('watchers', () => {
+ describe('imageUrl', () => {
+ it('sets isLoading and resets numRetries and hasError', done => {
+ const props = { ...dummyProps };
+ createComponent(props)
+ .then(() => {
+ expect(vm.isLoading).toBe(false);
+ vm.hasError = true;
+ vm.numRetries = 42;
+
+ vm.imageUrl = `${props.imageUrl}#something/else`;
+
+ return Vue.nextTick();
+ })
+ .then(() => {
+ expect(vm.isLoading).toBe(true);
+ expect(vm.numRetries).toBe(0);
+ expect(vm.hasError).toBe(false);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+ });
+
+ describe('methods', () => {
+ beforeEach(done => {
+ createComponent({ ...dummyProps })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('onError resets isLoading and sets hasError', () => {
+ vm.hasError = false;
+ vm.isLoading = true;
+
+ vm.onError();
+
+ expect(vm.hasError).toBe(true);
+ expect(vm.isLoading).toBe(false);
+ });
+
+ it('onLoad sets isLoading', () => {
+ vm.isLoading = true;
+
+ vm.onLoad();
+
+ expect(vm.isLoading).toBe(false);
+ });
+
+ it('reloadImage resets isLoading and hasError and increases numRetries', () => {
+ vm.hasError = true;
+ vm.isLoading = false;
+ vm.numRetries = 0;
+
+ vm.reloadImage();
+
+ expect(vm.hasError).toBe(false);
+ expect(vm.isLoading).toBe(true);
+ expect(vm.numRetries).toBe(1);
+ });
+ });
+
+ describe('behavior', () => {
+ beforeEach(done => {
+ setFixtures('<div id="dummy-element"></div>');
+ createComponent({ ...dummyProps }, '#dummy-element')
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('shows a badge image after loading', () => {
+ expect(vm.isLoading).toBe(false);
+ expect(vm.hasError).toBe(false);
+ const { badgeImage, loadingIcon, reloadButton } = findElements();
+
+ expect(badgeImage).toBeVisible();
+ expect(loadingIcon).toBeHidden();
+ expect(reloadButton).toBeHidden();
+ expect(vm.$el.querySelector('.btn-group')).toBeHidden();
+ });
+
+ it('shows a loading icon when loading', done => {
+ vm.isLoading = true;
+
+ Vue.nextTick()
+ .then(() => {
+ const { badgeImage, loadingIcon, reloadButton } = findElements();
+
+ expect(badgeImage).toBeHidden();
+ expect(loadingIcon).toBeVisible();
+ expect(reloadButton).toBeHidden();
+ expect(vm.$el.querySelector('.btn-group')).toBeHidden();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('shows an error and reload button if loading failed', done => {
+ vm.hasError = true;
+
+ Vue.nextTick()
+ .then(() => {
+ const { badgeImage, loadingIcon, reloadButton } = findElements();
+
+ expect(badgeImage).toBeHidden();
+ expect(loadingIcon).toBeHidden();
+ expect(reloadButton).toBeVisible();
+ expect(reloadButton).toHaveSpriteIcon('retry');
+ expect(vm.$el.innerText.trim()).toBe('No badge image');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/javascripts/badges/dummy_badge.js b/spec/frontend/badges/dummy_badge.js
index a0dee89736e..a0dee89736e 100644
--- a/spec/javascripts/badges/dummy_badge.js
+++ b/spec/frontend/badges/dummy_badge.js
diff --git a/spec/frontend/badges/store/actions_spec.js b/spec/frontend/badges/store/actions_spec.js
new file mode 100644
index 00000000000..921c21cb55e
--- /dev/null
+++ b/spec/frontend/badges/store/actions_spec.js
@@ -0,0 +1,622 @@
+import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'spec/test_constants';
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
+import actions, { transformBackendBadge } from '~/badges/store/actions';
+import mutationTypes from '~/badges/store/mutation_types';
+import createState from '~/badges/store/state';
+import { createDummyBadge, createDummyBadgeResponse } from '../dummy_badge';
+
+describe('Badges store actions', () => {
+ const dummyEndpointUrl = `${TEST_HOST}/badges/endpoint`;
+ const dummyBadges = [{ ...createDummyBadge(), id: 5 }, { ...createDummyBadge(), id: 6 }];
+
+ let axiosMock;
+ let badgeId;
+ let state;
+
+ beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
+ state = {
+ ...createState(),
+ apiEndpointUrl: dummyEndpointUrl,
+ badges: dummyBadges,
+ };
+ badgeId = state.badges[0].id;
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
+ describe('requestNewBadge', () => {
+ it('commits REQUEST_NEW_BADGE', done => {
+ testAction(
+ actions.requestNewBadge,
+ null,
+ state,
+ [{ type: mutationTypes.REQUEST_NEW_BADGE }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveNewBadge', () => {
+ it('commits RECEIVE_NEW_BADGE', done => {
+ const newBadge = createDummyBadge();
+ testAction(
+ actions.receiveNewBadge,
+ newBadge,
+ state,
+ [{ type: mutationTypes.RECEIVE_NEW_BADGE, payload: newBadge }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveNewBadgeError', () => {
+ it('commits RECEIVE_NEW_BADGE_ERROR', done => {
+ testAction(
+ actions.receiveNewBadgeError,
+ null,
+ state,
+ [{ type: mutationTypes.RECEIVE_NEW_BADGE_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('addBadge', () => {
+ let badgeInAddForm;
+ let dispatch;
+ let endpointMock;
+
+ beforeEach(() => {
+ endpointMock = axiosMock.onPost(dummyEndpointUrl);
+ dispatch = jest.fn();
+ badgeInAddForm = createDummyBadge();
+ state = {
+ ...state,
+ badgeInAddForm,
+ };
+ });
+
+ it('dispatches requestNewBadge and receiveNewBadge for successful response', done => {
+ const dummyResponse = createDummyBadgeResponse();
+
+ endpointMock.replyOnce(req => {
+ expect(req.data).toBe(
+ JSON.stringify({
+ name: 'TestBadge',
+ image_url: badgeInAddForm.imageUrl,
+ link_url: badgeInAddForm.linkUrl,
+ }),
+ );
+
+ expect(dispatch.mock.calls).toEqual([['requestNewBadge']]);
+ dispatch.mockClear();
+ return [200, dummyResponse];
+ });
+
+ const dummyBadge = transformBackendBadge(dummyResponse);
+ actions
+ .addBadge({ state, dispatch })
+ .then(() => {
+ expect(dispatch.mock.calls).toEqual([['receiveNewBadge', dummyBadge]]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('dispatches requestNewBadge and receiveNewBadgeError for error response', done => {
+ endpointMock.replyOnce(req => {
+ expect(req.data).toBe(
+ JSON.stringify({
+ name: 'TestBadge',
+ image_url: badgeInAddForm.imageUrl,
+ link_url: badgeInAddForm.linkUrl,
+ }),
+ );
+
+ expect(dispatch.mock.calls).toEqual([['requestNewBadge']]);
+ dispatch.mockClear();
+ return [500, ''];
+ });
+
+ actions
+ .addBadge({ state, dispatch })
+ .then(() => done.fail('Expected Ajax call to fail!'))
+ .catch(() => {
+ expect(dispatch.mock.calls).toEqual([['receiveNewBadgeError']]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('requestDeleteBadge', () => {
+ it('commits REQUEST_DELETE_BADGE', done => {
+ testAction(
+ actions.requestDeleteBadge,
+ badgeId,
+ state,
+ [{ type: mutationTypes.REQUEST_DELETE_BADGE, payload: badgeId }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveDeleteBadge', () => {
+ it('commits RECEIVE_DELETE_BADGE', done => {
+ testAction(
+ actions.receiveDeleteBadge,
+ badgeId,
+ state,
+ [{ type: mutationTypes.RECEIVE_DELETE_BADGE, payload: badgeId }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveDeleteBadgeError', () => {
+ it('commits RECEIVE_DELETE_BADGE_ERROR', done => {
+ testAction(
+ actions.receiveDeleteBadgeError,
+ badgeId,
+ state,
+ [{ type: mutationTypes.RECEIVE_DELETE_BADGE_ERROR, payload: badgeId }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('deleteBadge', () => {
+ let dispatch;
+ let endpointMock;
+
+ beforeEach(() => {
+ endpointMock = axiosMock.onDelete(`${dummyEndpointUrl}/${badgeId}`);
+ dispatch = jest.fn();
+ });
+
+ it('dispatches requestDeleteBadge and receiveDeleteBadge for successful response', done => {
+ endpointMock.replyOnce(() => {
+ expect(dispatch.mock.calls).toEqual([['requestDeleteBadge', badgeId]]);
+ dispatch.mockClear();
+ return [200, ''];
+ });
+
+ actions
+ .deleteBadge({ state, dispatch }, { id: badgeId })
+ .then(() => {
+ expect(dispatch.mock.calls).toEqual([['receiveDeleteBadge', badgeId]]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('dispatches requestDeleteBadge and receiveDeleteBadgeError for error response', done => {
+ endpointMock.replyOnce(() => {
+ expect(dispatch.mock.calls).toEqual([['requestDeleteBadge', badgeId]]);
+ dispatch.mockClear();
+ return [500, ''];
+ });
+
+ actions
+ .deleteBadge({ state, dispatch }, { id: badgeId })
+ .then(() => done.fail('Expected Ajax call to fail!'))
+ .catch(() => {
+ expect(dispatch.mock.calls).toEqual([['receiveDeleteBadgeError', badgeId]]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('editBadge', () => {
+ it('commits START_EDITING', done => {
+ const dummyBadge = createDummyBadge();
+ testAction(
+ actions.editBadge,
+ dummyBadge,
+ state,
+ [{ type: mutationTypes.START_EDITING, payload: dummyBadge }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('requestLoadBadges', () => {
+ it('commits REQUEST_LOAD_BADGES', done => {
+ const dummyData = 'this is not real data';
+ testAction(
+ actions.requestLoadBadges,
+ dummyData,
+ state,
+ [{ type: mutationTypes.REQUEST_LOAD_BADGES, payload: dummyData }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveLoadBadges', () => {
+ it('commits RECEIVE_LOAD_BADGES', done => {
+ const badges = dummyBadges;
+ testAction(
+ actions.receiveLoadBadges,
+ badges,
+ state,
+ [{ type: mutationTypes.RECEIVE_LOAD_BADGES, payload: badges }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveLoadBadgesError', () => {
+ it('commits RECEIVE_LOAD_BADGES_ERROR', done => {
+ testAction(
+ actions.receiveLoadBadgesError,
+ null,
+ state,
+ [{ type: mutationTypes.RECEIVE_LOAD_BADGES_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('loadBadges', () => {
+ let dispatch;
+ let endpointMock;
+
+ beforeEach(() => {
+ endpointMock = axiosMock.onGet(dummyEndpointUrl);
+ dispatch = jest.fn();
+ });
+
+ it('dispatches requestLoadBadges and receiveLoadBadges for successful response', done => {
+ const dummyData = 'this is just some data';
+ const dummyReponse = [
+ createDummyBadgeResponse(),
+ createDummyBadgeResponse(),
+ createDummyBadgeResponse(),
+ ];
+ endpointMock.replyOnce(() => {
+ expect(dispatch.mock.calls).toEqual([['requestLoadBadges', dummyData]]);
+ dispatch.mockClear();
+ return [200, dummyReponse];
+ });
+
+ actions
+ .loadBadges({ state, dispatch }, dummyData)
+ .then(() => {
+ const badges = dummyReponse.map(transformBackendBadge);
+
+ expect(dispatch.mock.calls).toEqual([['receiveLoadBadges', badges]]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('dispatches requestLoadBadges and receiveLoadBadgesError for error response', done => {
+ const dummyData = 'this is just some data';
+ endpointMock.replyOnce(() => {
+ expect(dispatch.mock.calls).toEqual([['requestLoadBadges', dummyData]]);
+ dispatch.mockClear();
+ return [500, ''];
+ });
+
+ actions
+ .loadBadges({ state, dispatch }, dummyData)
+ .then(() => done.fail('Expected Ajax call to fail!'))
+ .catch(() => {
+ expect(dispatch.mock.calls).toEqual([['receiveLoadBadgesError']]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('requestRenderedBadge', () => {
+ it('commits REQUEST_RENDERED_BADGE', done => {
+ testAction(
+ actions.requestRenderedBadge,
+ null,
+ state,
+ [{ type: mutationTypes.REQUEST_RENDERED_BADGE }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveRenderedBadge', () => {
+ it('commits RECEIVE_RENDERED_BADGE', done => {
+ const dummyBadge = createDummyBadge();
+ testAction(
+ actions.receiveRenderedBadge,
+ dummyBadge,
+ state,
+ [{ type: mutationTypes.RECEIVE_RENDERED_BADGE, payload: dummyBadge }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveRenderedBadgeError', () => {
+ it('commits RECEIVE_RENDERED_BADGE_ERROR', done => {
+ testAction(
+ actions.receiveRenderedBadgeError,
+ null,
+ state,
+ [{ type: mutationTypes.RECEIVE_RENDERED_BADGE_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('renderBadge', () => {
+ let dispatch;
+ let endpointMock;
+ let badgeInForm;
+
+ beforeEach(() => {
+ badgeInForm = createDummyBadge();
+ state = {
+ ...state,
+ badgeInAddForm: badgeInForm,
+ };
+ const urlParameters = [
+ `link_url=${encodeURIComponent(badgeInForm.linkUrl)}`,
+ `image_url=${encodeURIComponent(badgeInForm.imageUrl)}`,
+ ].join('&');
+ endpointMock = axiosMock.onGet(`${dummyEndpointUrl}/render?${urlParameters}`);
+ dispatch = jest.fn();
+ });
+
+ it('returns immediately if imageUrl is empty', done => {
+ jest.spyOn(axios, 'get').mockImplementation(() => {});
+ badgeInForm.imageUrl = '';
+
+ actions
+ .renderBadge({ state, dispatch })
+ .then(() => {
+ expect(axios.get).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('returns immediately if linkUrl is empty', done => {
+ jest.spyOn(axios, 'get').mockImplementation(() => {});
+ badgeInForm.linkUrl = '';
+
+ actions
+ .renderBadge({ state, dispatch })
+ .then(() => {
+ expect(axios.get).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('escapes user input', done => {
+ jest
+ .spyOn(axios, 'get')
+ .mockImplementation(() => Promise.resolve({ data: createDummyBadgeResponse() }));
+ badgeInForm.imageUrl = '&make-sandwich=true';
+ badgeInForm.linkUrl = '<script>I am dangerous!</script>';
+
+ actions
+ .renderBadge({ state, dispatch })
+ .then(() => {
+ expect(axios.get.mock.calls.length).toBe(1);
+ const url = axios.get.mock.calls[0][0];
+
+ expect(url).toMatch(new RegExp(`^${dummyEndpointUrl}/render?`));
+ expect(url).toMatch(
+ new RegExp('\\?link_url=%3Cscript%3EI%20am%20dangerous!%3C%2Fscript%3E&'),
+ );
+ expect(url).toMatch(new RegExp('&image_url=%26make-sandwich%3Dtrue$'));
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('dispatches requestRenderedBadge and receiveRenderedBadge for successful response', done => {
+ const dummyReponse = createDummyBadgeResponse();
+ endpointMock.replyOnce(() => {
+ expect(dispatch.mock.calls).toEqual([['requestRenderedBadge']]);
+ dispatch.mockClear();
+ return [200, dummyReponse];
+ });
+
+ actions
+ .renderBadge({ state, dispatch })
+ .then(() => {
+ const renderedBadge = transformBackendBadge(dummyReponse);
+
+ expect(dispatch.mock.calls).toEqual([['receiveRenderedBadge', renderedBadge]]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('dispatches requestRenderedBadge and receiveRenderedBadgeError for error response', done => {
+ endpointMock.replyOnce(() => {
+ expect(dispatch.mock.calls).toEqual([['requestRenderedBadge']]);
+ dispatch.mockClear();
+ return [500, ''];
+ });
+
+ actions
+ .renderBadge({ state, dispatch })
+ .then(() => done.fail('Expected Ajax call to fail!'))
+ .catch(() => {
+ expect(dispatch.mock.calls).toEqual([['receiveRenderedBadgeError']]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('requestUpdatedBadge', () => {
+ it('commits REQUEST_UPDATED_BADGE', done => {
+ testAction(
+ actions.requestUpdatedBadge,
+ null,
+ state,
+ [{ type: mutationTypes.REQUEST_UPDATED_BADGE }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveUpdatedBadge', () => {
+ it('commits RECEIVE_UPDATED_BADGE', done => {
+ const updatedBadge = createDummyBadge();
+ testAction(
+ actions.receiveUpdatedBadge,
+ updatedBadge,
+ state,
+ [{ type: mutationTypes.RECEIVE_UPDATED_BADGE, payload: updatedBadge }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveUpdatedBadgeError', () => {
+ it('commits RECEIVE_UPDATED_BADGE_ERROR', done => {
+ testAction(
+ actions.receiveUpdatedBadgeError,
+ null,
+ state,
+ [{ type: mutationTypes.RECEIVE_UPDATED_BADGE_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('saveBadge', () => {
+ let badgeInEditForm;
+ let dispatch;
+ let endpointMock;
+
+ beforeEach(() => {
+ badgeInEditForm = createDummyBadge();
+ state = {
+ ...state,
+ badgeInEditForm,
+ };
+ endpointMock = axiosMock.onPut(`${dummyEndpointUrl}/${badgeInEditForm.id}`);
+ dispatch = jest.fn();
+ });
+
+ it('dispatches requestUpdatedBadge and receiveUpdatedBadge for successful response', done => {
+ const dummyResponse = createDummyBadgeResponse();
+
+ endpointMock.replyOnce(req => {
+ expect(req.data).toBe(
+ JSON.stringify({
+ name: 'TestBadge',
+ image_url: badgeInEditForm.imageUrl,
+ link_url: badgeInEditForm.linkUrl,
+ }),
+ );
+
+ expect(dispatch.mock.calls).toEqual([['requestUpdatedBadge']]);
+ dispatch.mockClear();
+ return [200, dummyResponse];
+ });
+
+ const updatedBadge = transformBackendBadge(dummyResponse);
+ actions
+ .saveBadge({ state, dispatch })
+ .then(() => {
+ expect(dispatch.mock.calls).toEqual([['receiveUpdatedBadge', updatedBadge]]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('dispatches requestUpdatedBadge and receiveUpdatedBadgeError for error response', done => {
+ endpointMock.replyOnce(req => {
+ expect(req.data).toBe(
+ JSON.stringify({
+ name: 'TestBadge',
+ image_url: badgeInEditForm.imageUrl,
+ link_url: badgeInEditForm.linkUrl,
+ }),
+ );
+
+ expect(dispatch.mock.calls).toEqual([['requestUpdatedBadge']]);
+ dispatch.mockClear();
+ return [500, ''];
+ });
+
+ actions
+ .saveBadge({ state, dispatch })
+ .then(() => done.fail('Expected Ajax call to fail!'))
+ .catch(() => {
+ expect(dispatch.mock.calls).toEqual([['receiveUpdatedBadgeError']]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('stopEditing', () => {
+ it('commits STOP_EDITING', done => {
+ testAction(
+ actions.stopEditing,
+ null,
+ state,
+ [{ type: mutationTypes.STOP_EDITING }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('updateBadgeInForm', () => {
+ it('commits UPDATE_BADGE_IN_FORM', done => {
+ const dummyBadge = createDummyBadge();
+ testAction(
+ actions.updateBadgeInForm,
+ dummyBadge,
+ state,
+ [{ type: mutationTypes.UPDATE_BADGE_IN_FORM, payload: dummyBadge }],
+ [],
+ done,
+ );
+ });
+
+ describe('updateBadgeInModal', () => {
+ it('commits UPDATE_BADGE_IN_MODAL', done => {
+ const dummyBadge = createDummyBadge();
+ testAction(
+ actions.updateBadgeInModal,
+ dummyBadge,
+ state,
+ [{ type: mutationTypes.UPDATE_BADGE_IN_MODAL, payload: dummyBadge }],
+ [],
+ done,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/badges/store/mutations_spec.js b/spec/frontend/badges/store/mutations_spec.js
index 8d26f83339d..8d26f83339d 100644
--- a/spec/javascripts/badges/store/mutations_spec.js
+++ b/spec/frontend/badges/store/mutations_spec.js
diff --git a/spec/frontend/behaviors/quick_submit_spec.js b/spec/frontend/behaviors/quick_submit_spec.js
new file mode 100644
index 00000000000..2dc2bb198e8
--- /dev/null
+++ b/spec/frontend/behaviors/quick_submit_spec.js
@@ -0,0 +1,153 @@
+import $ from 'jquery';
+import '~/behaviors/quick_submit';
+
+describe('Quick Submit behavior', () => {
+ let testContext;
+
+ const keydownEvent = (options = { keyCode: 13, metaKey: true }) => $.Event('keydown', options);
+
+ preloadFixtures('snippets/show.html');
+
+ beforeEach(() => {
+ loadFixtures('snippets/show.html');
+
+ testContext = {};
+
+ testContext.spies = {
+ submit: jest.fn(),
+ };
+
+ $('form').submit(e => {
+ // Prevent a form submit from moving us off the testing page
+ e.preventDefault();
+ // Explicitly call the spie to know this function get's not called
+ testContext.spies.submit();
+ });
+ testContext.textarea = $('.js-quick-submit textarea').first();
+ });
+
+ it('does not respond to other keyCodes', () => {
+ testContext.textarea.trigger(
+ keydownEvent({
+ keyCode: 32,
+ }),
+ );
+
+ expect(testContext.spies.submit).not.toHaveBeenCalled();
+ });
+
+ it('does not respond to Enter alone', () => {
+ testContext.textarea.trigger(
+ keydownEvent({
+ ctrlKey: false,
+ metaKey: false,
+ }),
+ );
+
+ expect(testContext.spies.submit).not.toHaveBeenCalled();
+ });
+
+ it('does not respond to repeated events', () => {
+ testContext.textarea.trigger(
+ keydownEvent({
+ repeat: true,
+ }),
+ );
+
+ expect(testContext.spies.submit).not.toHaveBeenCalled();
+ });
+
+ it('disables input of type submit', () => {
+ const submitButton = $('.js-quick-submit input[type=submit]');
+ testContext.textarea.trigger(keydownEvent());
+
+ expect(submitButton).toBeDisabled();
+ });
+
+ it('disables button of type submit', () => {
+ const submitButton = $('.js-quick-submit input[type=submit]');
+ testContext.textarea.trigger(keydownEvent());
+
+ expect(submitButton).toBeDisabled();
+ });
+
+ it('only clicks one submit', () => {
+ const existingSubmit = $('.js-quick-submit input[type=submit]');
+ // Add an extra submit button
+ const newSubmit = $('<button type="submit">Submit it</button>');
+ newSubmit.insertAfter(testContext.textarea);
+
+ const spies = {
+ oldClickSpy: jest.fn(),
+ newClickSpy: jest.fn(),
+ };
+ existingSubmit.on('click', () => {
+ spies.oldClickSpy();
+ });
+ newSubmit.on('click', () => {
+ spies.newClickSpy();
+ });
+
+ testContext.textarea.trigger(keydownEvent());
+
+ expect(spies.oldClickSpy).not.toHaveBeenCalled();
+ expect(spies.newClickSpy).toHaveBeenCalled();
+ });
+ // We cannot stub `navigator.userAgent` for CI's `rake karma` task, so we'll
+ // only run the tests that apply to the current platform
+ if (navigator.userAgent.match(/Macintosh/)) {
+ describe('In Macintosh', () => {
+ it('responds to Meta+Enter', () => {
+ testContext.textarea.trigger(keydownEvent());
+
+ expect(testContext.spies.submit).toHaveBeenCalled();
+ });
+
+ it('excludes other modifier keys', () => {
+ testContext.textarea.trigger(
+ keydownEvent({
+ altKey: true,
+ }),
+ );
+ testContext.textarea.trigger(
+ keydownEvent({
+ ctrlKey: true,
+ }),
+ );
+ testContext.textarea.trigger(
+ keydownEvent({
+ shiftKey: true,
+ }),
+ );
+
+ expect(testContext.spies.submit).not.toHaveBeenCalled();
+ });
+ });
+ } else {
+ it('responds to Ctrl+Enter', () => {
+ testContext.textarea.trigger(keydownEvent());
+
+ expect(testContext.spies.submit).toHaveBeenCalled();
+ });
+
+ it('excludes other modifier keys', () => {
+ testContext.textarea.trigger(
+ keydownEvent({
+ altKey: true,
+ }),
+ );
+ testContext.textarea.trigger(
+ keydownEvent({
+ metaKey: true,
+ }),
+ );
+ testContext.textarea.trigger(
+ keydownEvent({
+ shiftKey: true,
+ }),
+ );
+
+ expect(testContext.spies.submit).not.toHaveBeenCalled();
+ });
+ }
+});
diff --git a/spec/javascripts/blob/3d_viewer/mesh_object_spec.js b/spec/frontend/blob/3d_viewer/mesh_object_spec.js
index 60be285039f..60be285039f 100644
--- a/spec/javascripts/blob/3d_viewer/mesh_object_spec.js
+++ b/spec/frontend/blob/3d_viewer/mesh_object_spec.js
diff --git a/spec/frontend/blob/balsamiq/balsamiq_viewer_spec.js b/spec/frontend/blob/balsamiq/balsamiq_viewer_spec.js
new file mode 100644
index 00000000000..3b64e4910e2
--- /dev/null
+++ b/spec/frontend/blob/balsamiq/balsamiq_viewer_spec.js
@@ -0,0 +1,361 @@
+import sqljs from 'sql.js';
+import axios from '~/lib/utils/axios_utils';
+import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer';
+import ClassSpecHelper from '../../helpers/class_spec_helper';
+
+jest.mock('sql.js');
+
+describe('BalsamiqViewer', () => {
+ const mockArrayBuffer = new ArrayBuffer(10);
+ let balsamiqViewer;
+ let viewer;
+
+ describe('class constructor', () => {
+ beforeEach(() => {
+ viewer = {};
+
+ balsamiqViewer = new BalsamiqViewer(viewer);
+ });
+
+ it('should set .viewer', () => {
+ expect(balsamiqViewer.viewer).toBe(viewer);
+ });
+ });
+
+ describe('loadFile', () => {
+ let bv;
+ const endpoint = 'endpoint';
+ const requestSuccess = Promise.resolve({
+ data: mockArrayBuffer,
+ status: 200,
+ });
+
+ beforeEach(() => {
+ viewer = {};
+ bv = new BalsamiqViewer(viewer);
+ });
+
+ it('should call `axios.get` on `endpoint` param with responseType set to `arraybuffer', () => {
+ jest.spyOn(axios, 'get').mockReturnValue(requestSuccess);
+ jest.spyOn(bv, 'renderFile').mockReturnValue();
+
+ bv.loadFile(endpoint);
+
+ expect(axios.get).toHaveBeenCalledWith(
+ endpoint,
+ expect.objectContaining({
+ responseType: 'arraybuffer',
+ }),
+ );
+ });
+
+ it('should call `renderFile` on request success', done => {
+ jest.spyOn(axios, 'get').mockReturnValue(requestSuccess);
+ jest.spyOn(bv, 'renderFile').mockImplementation(() => {});
+
+ bv.loadFile(endpoint)
+ .then(() => {
+ expect(bv.renderFile).toHaveBeenCalledWith(mockArrayBuffer);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('should not call `renderFile` on request failure', done => {
+ jest.spyOn(axios, 'get').mockReturnValue(Promise.reject());
+ jest.spyOn(bv, 'renderFile').mockImplementation(() => {});
+
+ bv.loadFile(endpoint)
+ .then(() => {
+ done.fail('Expected loadFile to throw error!');
+ })
+ .catch(() => {
+ expect(bv.renderFile).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('renderFile', () => {
+ let container;
+ let previews;
+
+ beforeEach(() => {
+ viewer = {
+ appendChild: jest.fn(),
+ };
+ previews = [document.createElement('ul'), document.createElement('ul')];
+
+ balsamiqViewer = {
+ initDatabase: jest.fn(),
+ getPreviews: jest.fn(),
+ renderPreview: jest.fn(),
+ };
+ balsamiqViewer.viewer = viewer;
+
+ balsamiqViewer.getPreviews.mockReturnValue(previews);
+ balsamiqViewer.renderPreview.mockImplementation(preview => preview);
+ viewer.appendChild.mockImplementation(containerElement => {
+ container = containerElement;
+ });
+
+ BalsamiqViewer.prototype.renderFile.call(balsamiqViewer, mockArrayBuffer);
+ });
+
+ it('should call .initDatabase', () => {
+ expect(balsamiqViewer.initDatabase).toHaveBeenCalledWith(mockArrayBuffer);
+ });
+
+ it('should call .getPreviews', () => {
+ expect(balsamiqViewer.getPreviews).toHaveBeenCalled();
+ });
+
+ it('should call .renderPreview for each preview', () => {
+ const allArgs = balsamiqViewer.renderPreview.mock.calls;
+
+ expect(allArgs.length).toBe(2);
+
+ previews.forEach((preview, i) => {
+ expect(allArgs[i][0]).toBe(preview);
+ });
+ });
+
+ it('should set the container HTML', () => {
+ expect(container.innerHTML).toBe('<ul></ul><ul></ul>');
+ });
+
+ it('should add inline preview classes', () => {
+ expect(container.classList[0]).toBe('list-inline');
+ expect(container.classList[1]).toBe('previews');
+ });
+
+ it('should call viewer.appendChild', () => {
+ expect(viewer.appendChild).toHaveBeenCalledWith(container);
+ });
+ });
+
+ describe('initDatabase', () => {
+ let uint8Array;
+ let data;
+
+ beforeEach(() => {
+ uint8Array = {};
+ data = 'data';
+ balsamiqViewer = {};
+ window.Uint8Array = jest.fn();
+ window.Uint8Array.mockReturnValue(uint8Array);
+
+ BalsamiqViewer.prototype.initDatabase.call(balsamiqViewer, data);
+ });
+
+ it('should instantiate Uint8Array', () => {
+ expect(window.Uint8Array).toHaveBeenCalledWith(data);
+ });
+
+ it('should call sqljs.Database', () => {
+ expect(sqljs.Database).toHaveBeenCalledWith(uint8Array);
+ });
+
+ it('should set .database', () => {
+ expect(balsamiqViewer.database).not.toBe(null);
+ });
+ });
+
+ describe('getPreviews', () => {
+ let database;
+ let thumbnails;
+ let getPreviews;
+
+ beforeEach(() => {
+ database = {
+ exec: jest.fn(),
+ };
+ thumbnails = [{ values: [0, 1, 2] }];
+
+ balsamiqViewer = {
+ database,
+ };
+
+ jest.spyOn(BalsamiqViewer, 'parsePreview').mockImplementation(preview => preview.toString());
+ database.exec.mockReturnValue(thumbnails);
+
+ getPreviews = BalsamiqViewer.prototype.getPreviews.call(balsamiqViewer);
+ });
+
+ it('should call database.exec', () => {
+ expect(database.exec).toHaveBeenCalledWith('SELECT * FROM thumbnails');
+ });
+
+ it('should call .parsePreview for each value', () => {
+ const allArgs = BalsamiqViewer.parsePreview.mock.calls;
+
+ expect(allArgs.length).toBe(3);
+
+ thumbnails[0].values.forEach((value, i) => {
+ expect(allArgs[i][0]).toBe(value);
+ });
+ });
+
+ it('should return an array of parsed values', () => {
+ expect(getPreviews).toEqual(['0', '1', '2']);
+ });
+ });
+
+ describe('getResource', () => {
+ let database;
+ let resourceID;
+ let resource;
+ let getResource;
+
+ beforeEach(() => {
+ database = {
+ exec: jest.fn(),
+ };
+ resourceID = 4;
+ resource = ['resource'];
+
+ balsamiqViewer = {
+ database,
+ };
+
+ database.exec.mockReturnValue(resource);
+
+ getResource = BalsamiqViewer.prototype.getResource.call(balsamiqViewer, resourceID);
+ });
+
+ it('should call database.exec', () => {
+ expect(database.exec).toHaveBeenCalledWith(
+ `SELECT * FROM resources WHERE id = '${resourceID}'`,
+ );
+ });
+
+ it('should return the selected resource', () => {
+ expect(getResource).toBe(resource[0]);
+ });
+ });
+
+ describe('renderPreview', () => {
+ let previewElement;
+ let innerHTML;
+ let preview;
+ let renderPreview;
+
+ beforeEach(() => {
+ innerHTML = '<a>innerHTML</a>';
+ previewElement = {
+ outerHTML: '<p>outerHTML</p>',
+ classList: {
+ add: jest.fn(),
+ },
+ };
+ preview = {};
+
+ balsamiqViewer = {
+ renderTemplate: jest.fn(),
+ };
+
+ jest.spyOn(document, 'createElement').mockReturnValue(previewElement);
+ balsamiqViewer.renderTemplate.mockReturnValue(innerHTML);
+
+ renderPreview = BalsamiqViewer.prototype.renderPreview.call(balsamiqViewer, preview);
+ });
+
+ it('should call classList.add', () => {
+ expect(previewElement.classList.add).toHaveBeenCalledWith('preview');
+ });
+
+ it('should call .renderTemplate', () => {
+ expect(balsamiqViewer.renderTemplate).toHaveBeenCalledWith(preview);
+ });
+
+ it('should set .innerHTML', () => {
+ expect(previewElement.innerHTML).toBe(innerHTML);
+ });
+
+ it('should return element', () => {
+ expect(renderPreview).toBe(previewElement);
+ });
+ });
+
+ describe('renderTemplate', () => {
+ let preview;
+ let name;
+ let resource;
+ let template;
+ let renderTemplate;
+
+ beforeEach(() => {
+ preview = { resourceID: 1, image: 'image' };
+ name = 'name';
+ resource = 'resource';
+ template = `
+ <div class="card">
+ <div class="card-header">name</div>
+ <div class="card-body">
+ <img class="img-thumbnail" src="data:image/png;base64,image"/>
+ </div>
+ </div>
+ `;
+
+ balsamiqViewer = {
+ getResource: jest.fn(),
+ };
+
+ jest.spyOn(BalsamiqViewer, 'parseTitle').mockReturnValue(name);
+ balsamiqViewer.getResource.mockReturnValue(resource);
+
+ renderTemplate = BalsamiqViewer.prototype.renderTemplate.call(balsamiqViewer, preview);
+ });
+
+ it('should call .getResource', () => {
+ expect(balsamiqViewer.getResource).toHaveBeenCalledWith(preview.resourceID);
+ });
+
+ it('should call .parseTitle', () => {
+ expect(BalsamiqViewer.parseTitle).toHaveBeenCalledWith(resource);
+ });
+
+ it('should return the template string', () => {
+ expect(renderTemplate.replace(/\s/g, '')).toEqual(template.replace(/\s/g, ''));
+ });
+ });
+
+ describe('parsePreview', () => {
+ let preview;
+ let parsePreview;
+
+ beforeEach(() => {
+ preview = ['{}', '{ "id": 1 }'];
+
+ jest.spyOn(JSON, 'parse');
+
+ parsePreview = BalsamiqViewer.parsePreview(preview);
+ });
+
+ ClassSpecHelper.itShouldBeAStaticMethod(BalsamiqViewer, 'parsePreview');
+
+ it('should return the parsed JSON', () => {
+ expect(parsePreview).toEqual(JSON.parse('{ "id": 1 }'));
+ });
+ });
+
+ describe('parseTitle', () => {
+ let title;
+ let parseTitle;
+
+ beforeEach(() => {
+ title = { values: [['{}', '{}', '{"name":"name"}']] };
+
+ jest.spyOn(JSON, 'parse');
+
+ parseTitle = BalsamiqViewer.parseTitle(title);
+ });
+
+ ClassSpecHelper.itShouldBeAStaticMethod(BalsamiqViewer, 'parsePreview');
+
+ it('should return the name value', () => {
+ expect(parseTitle).toBe('name');
+ });
+ });
+});
diff --git a/spec/frontend/blob/blob_file_dropzone_spec.js b/spec/frontend/blob/blob_file_dropzone_spec.js
new file mode 100644
index 00000000000..4e9a05418df
--- /dev/null
+++ b/spec/frontend/blob/blob_file_dropzone_spec.js
@@ -0,0 +1,50 @@
+import $ from 'jquery';
+import BlobFileDropzone from '~/blob/blob_file_dropzone';
+
+describe('BlobFileDropzone', () => {
+ preloadFixtures('blob/show.html');
+ let dropzone;
+ let replaceFileButton;
+ const jQueryMock = {
+ enable: jest.fn(),
+ disable: jest.fn(),
+ };
+
+ beforeEach(() => {
+ loadFixtures('blob/show.html');
+ const form = $('.js-upload-blob-form');
+ // eslint-disable-next-line no-new
+ new BlobFileDropzone(form, 'POST');
+ dropzone = $('.js-upload-blob-form .dropzone').get(0).dropzone;
+ dropzone.processQueue = jest.fn();
+ replaceFileButton = $('#submit-all');
+ $.fn.extend(jQueryMock);
+ });
+
+ describe('submit button', () => {
+ it('requires file', () => {
+ jest.spyOn(window, 'alert').mockImplementation(() => {});
+
+ replaceFileButton.click();
+
+ expect(window.alert).toHaveBeenCalled();
+ });
+
+ it('is disabled while uploading', () => {
+ jest.spyOn(window, 'alert').mockImplementation(() => {});
+
+ const file = new File([], 'some-file.jpg');
+ const fakeEvent = $.Event('drop', {
+ dataTransfer: { files: [file] },
+ });
+
+ dropzone.listeners[0].events.drop(fakeEvent);
+
+ replaceFileButton.click();
+
+ expect(window.alert).not.toHaveBeenCalled();
+ expect(jQueryMock.enable).toHaveBeenCalled();
+ expect(dropzone.processQueue).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/__snapshots__/blob_edit_content_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_edit_content_spec.js.snap
new file mode 100644
index 00000000000..0409b118222
--- /dev/null
+++ b/spec/frontend/blob/components/__snapshots__/blob_edit_content_spec.js.snap
@@ -0,0 +1,14 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Blob Header Editing rendering matches the snapshot 1`] = `
+<div
+ class="file-content code"
+>
+ <pre
+ data-editor-loading=""
+ id="editor"
+ >
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ </pre>
+</div>
+`;
diff --git a/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap
new file mode 100644
index 00000000000..e47a7dcfa2a
--- /dev/null
+++ b/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap
@@ -0,0 +1,16 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Blob Header Editing rendering matches the snapshot 1`] = `
+<div
+ class="js-file-title file-title-flex-parent"
+>
+ <gl-form-input-stub
+ class="form-control js-snippet-file-name qa-snippet-file-name"
+ id="snippet_file_name"
+ name="snippet_file_name"
+ placeholder="Give your file a name to add code highlighting, e.g. example.rb for Ruby"
+ type="text"
+ value="foo.md"
+ />
+</div>
+`;
diff --git a/spec/frontend/blob/components/blob_edit_content_spec.js b/spec/frontend/blob/components/blob_edit_content_spec.js
new file mode 100644
index 00000000000..eff53fe7ce9
--- /dev/null
+++ b/spec/frontend/blob/components/blob_edit_content_spec.js
@@ -0,0 +1,81 @@
+import { shallowMount } from '@vue/test-utils';
+import BlobEditContent from '~/blob/components/blob_edit_content.vue';
+import { initEditorLite } from '~/blob/utils';
+import { nextTick } from 'vue';
+
+jest.mock('~/blob/utils', () => ({
+ initEditorLite: jest.fn(),
+}));
+
+describe('Blob Header Editing', () => {
+ let wrapper;
+ const value = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.';
+ const fileName = 'lorem.txt';
+
+ function createComponent() {
+ wrapper = shallowMount(BlobEditContent, {
+ propsData: {
+ value,
+ fileName,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('rendering', () => {
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('renders content', () => {
+ expect(wrapper.text()).toContain(value);
+ });
+ });
+
+ describe('functionality', () => {
+ it('initialises Editor Lite', () => {
+ const el = wrapper.find({ ref: 'editor' }).element;
+ expect(initEditorLite).toHaveBeenCalledWith({
+ el,
+ blobPath: fileName,
+ blobContent: value,
+ });
+ });
+
+ it('reacts to the changes in fileName', () => {
+ wrapper.vm.editor = {
+ updateModelLanguage: jest.fn(),
+ };
+
+ const newFileName = 'ipsum.txt';
+
+ wrapper.setProps({
+ fileName: newFileName,
+ });
+
+ return nextTick().then(() => {
+ expect(wrapper.vm.editor.updateModelLanguage).toHaveBeenCalledWith(newFileName);
+ });
+ });
+
+ it('emits input event when the blob content is changed', () => {
+ const editorEl = wrapper.find({ ref: 'editor' });
+ wrapper.vm.editor = {
+ getValue: jest.fn().mockReturnValue(value),
+ };
+
+ editorEl.trigger('focusout');
+
+ return nextTick().then(() => {
+ expect(wrapper.emitted().input[0]).toEqual([value]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/blob_edit_header_spec.js b/spec/frontend/blob/components/blob_edit_header_spec.js
new file mode 100644
index 00000000000..db7d7d7d48d
--- /dev/null
+++ b/spec/frontend/blob/components/blob_edit_header_spec.js
@@ -0,0 +1,50 @@
+import { shallowMount } from '@vue/test-utils';
+import BlobEditHeader from '~/blob/components/blob_edit_header.vue';
+import { GlFormInput } from '@gitlab/ui';
+
+describe('Blob Header Editing', () => {
+ let wrapper;
+ const value = 'foo.md';
+
+ function createComponent() {
+ wrapper = shallowMount(BlobEditHeader, {
+ propsData: {
+ value,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('rendering', () => {
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('contains a form input field', () => {
+ expect(wrapper.contains(GlFormInput)).toBe(true);
+ });
+ });
+
+ describe('functionality', () => {
+ it('emits input event when the blob name is changed', () => {
+ const inputComponent = wrapper.find(GlFormInput);
+ const newValue = 'bar.txt';
+
+ wrapper.setData({
+ name: newValue,
+ });
+ inputComponent.vm.$emit('change');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().input[0]).toEqual([newValue]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/blob/notebook/notebook_viever_spec.js b/spec/frontend/blob/notebook/notebook_viever_spec.js
new file mode 100644
index 00000000000..535d2bd544a
--- /dev/null
+++ b/spec/frontend/blob/notebook/notebook_viever_spec.js
@@ -0,0 +1,108 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import component from '~/blob/notebook/notebook_viewer.vue';
+import NotebookLab from '~/notebook/index.vue';
+import waitForPromises from 'helpers/wait_for_promises';
+
+describe('iPython notebook renderer', () => {
+ let wrapper;
+ let mock;
+
+ const endpoint = 'test';
+ const mockNotebook = {
+ cells: [
+ {
+ cell_type: 'markdown',
+ source: ['# test'],
+ },
+ {
+ cell_type: 'code',
+ execution_count: 1,
+ source: ['def test(str)', ' return str'],
+ outputs: [],
+ },
+ ],
+ };
+
+ const mountComponent = () => {
+ wrapper = shallowMount(component, { propsData: { endpoint } });
+ };
+
+ const findLoading = () => wrapper.find(GlLoadingIcon);
+ const findNotebookLab = () => wrapper.find(NotebookLab);
+ const findLoadErrorMessage = () => wrapper.find({ ref: 'loadErrorMessage' });
+ const findParseErrorMessage = () => wrapper.find({ ref: 'parsingErrorMessage' });
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ mock.restore();
+ });
+
+ it('shows loading icon', () => {
+ mock.onGet(endpoint).reply(() => new Promise(() => {}));
+ mountComponent({ loadFile: jest.fn() });
+ expect(findLoading().exists()).toBe(true);
+ });
+
+ describe('successful response', () => {
+ beforeEach(() => {
+ mock.onGet(endpoint).reply(200, mockNotebook);
+ mountComponent();
+ return waitForPromises();
+ });
+
+ it('does not show loading icon', () => {
+ expect(findLoading().exists()).toBe(false);
+ });
+
+ it('renders the notebook', () => {
+ expect(findNotebookLab().exists()).toBe(true);
+ });
+ });
+
+ describe('error in JSON response', () => {
+ beforeEach(() => {
+ mock.onGet(endpoint).reply(() =>
+ // eslint-disable-next-line prefer-promise-reject-errors
+ Promise.reject({ status: 200 }),
+ );
+
+ mountComponent();
+ return waitForPromises();
+ });
+
+ it('does not show loading icon', () => {
+ expect(findLoading().exists()).toBe(false);
+ });
+
+ it('shows error message', () => {
+ expect(findParseErrorMessage().text()).toEqual('An error occurred while parsing the file.');
+ });
+ });
+
+ describe('error getting file', () => {
+ beforeEach(() => {
+ mock.onGet(endpoint).reply(500, '');
+
+ mountComponent();
+ return waitForPromises();
+ });
+
+ it('does not show loading icon', () => {
+ expect(findLoading().exists()).toBe(false);
+ });
+
+ it('shows error message', () => {
+ expect(findLoadErrorMessage().text()).toEqual(
+ 'An error occurred while loading the file. Please try again later.',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/blob/pdf/pdf_viewer_spec.js b/spec/frontend/blob/pdf/pdf_viewer_spec.js
new file mode 100644
index 00000000000..0eea3aea639
--- /dev/null
+++ b/spec/frontend/blob/pdf/pdf_viewer_spec.js
@@ -0,0 +1,67 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+
+import { FIXTURES_PATH } from 'spec/test_constants';
+import component from '~/blob/pdf/pdf_viewer.vue';
+import PdfLab from '~/pdf/index.vue';
+
+const testPDF = `${FIXTURES_PATH}/blob/pdf/test.pdf`;
+
+describe('PDF renderer', () => {
+ let wrapper;
+
+ const mountComponent = () => {
+ wrapper = shallowMount(component, {
+ propsData: {
+ pdf: testPDF,
+ },
+ });
+ };
+
+ const findLoading = () => wrapper.find(GlLoadingIcon);
+ const findPdfLab = () => wrapper.find(PdfLab);
+ const findLoadError = () => wrapper.find({ ref: 'loadError' });
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('shows loading icon', () => {
+ expect(findLoading().exists()).toBe(true);
+ });
+
+ describe('successful response', () => {
+ beforeEach(() => {
+ findPdfLab().vm.$emit('pdflabload');
+ });
+
+ it('does not show loading icon', () => {
+ expect(findLoading().exists()).toBe(false);
+ });
+
+ it('renders the PDF', () => {
+ expect(findPdfLab().exists()).toBe(true);
+ });
+ });
+
+ describe('error getting file', () => {
+ beforeEach(() => {
+ findPdfLab().vm.$emit('pdflaberror', 'foo');
+ });
+
+ it('does not show loading icon', () => {
+ expect(findLoading().exists()).toBe(false);
+ });
+
+ it('shows error message', () => {
+ expect(findLoadError().text()).toBe(
+ 'An error occurred while loading the file. Please try again later.',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/blob/pipeline_tour_success_spec.js b/spec/frontend/blob/pipeline_tour_success_spec.js
new file mode 100644
index 00000000000..f6783b31a73
--- /dev/null
+++ b/spec/frontend/blob/pipeline_tour_success_spec.js
@@ -0,0 +1,40 @@
+import pipelineTourSuccess from '~/blob/pipeline_tour_success_modal.vue';
+import { shallowMount } from '@vue/test-utils';
+import Cookies from 'js-cookie';
+import { GlSprintf, GlModal } from '@gitlab/ui';
+
+describe('PipelineTourSuccessModal', () => {
+ let wrapper;
+ let cookieSpy;
+ const goToPipelinesPath = 'some_pipeline_path';
+ const commitCookie = 'some_cookie';
+
+ beforeEach(() => {
+ wrapper = shallowMount(pipelineTourSuccess, {
+ propsData: {
+ goToPipelinesPath,
+ commitCookie,
+ },
+ });
+
+ cookieSpy = jest.spyOn(Cookies, 'remove');
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('has expected structure', () => {
+ const modal = wrapper.find(GlModal);
+ const sprintf = modal.find(GlSprintf);
+
+ expect(modal.attributes('title')).toContain("That's it, well done!");
+ expect(sprintf.exists()).toBe(true);
+ });
+
+ it('calls to remove cookie', () => {
+ wrapper.vm.disableModalFromRenderingAgain();
+
+ expect(cookieSpy).toHaveBeenCalledWith(commitCookie);
+ });
+});
diff --git a/spec/frontend/blob/sketch/index_spec.js b/spec/frontend/blob/sketch/index_spec.js
new file mode 100644
index 00000000000..f5e9da21b2a
--- /dev/null
+++ b/spec/frontend/blob/sketch/index_spec.js
@@ -0,0 +1,92 @@
+import JSZip from 'jszip';
+import SketchLoader from '~/blob/sketch';
+
+jest.mock('jszip');
+
+describe('Sketch viewer', () => {
+ preloadFixtures('static/sketch_viewer.html');
+
+ beforeEach(() => {
+ loadFixtures('static/sketch_viewer.html');
+ window.URL = {
+ createObjectURL: jest.fn(() => 'http://foo/bar'),
+ };
+ });
+
+ afterEach(() => {
+ window.URL = {};
+ });
+
+ describe('with error message', () => {
+ beforeEach(done => {
+ jest.spyOn(SketchLoader.prototype, 'getZipFile').mockImplementation(
+ () =>
+ new Promise((resolve, reject) => {
+ reject();
+ done();
+ }),
+ );
+
+ return new SketchLoader(document.getElementById('js-sketch-viewer'));
+ });
+
+ it('renders error message', () => {
+ expect(document.querySelector('#js-sketch-viewer p')).not.toBeNull();
+
+ expect(document.querySelector('#js-sketch-viewer p').textContent.trim()).toContain(
+ 'Cannot show preview.',
+ );
+ });
+
+ it('removes the loading icon', () => {
+ expect(document.querySelector('.js-loading-icon')).toBeNull();
+ });
+ });
+
+ describe('success', () => {
+ beforeEach(done => {
+ const loadAsyncMock = {
+ files: {
+ 'previews/preview.png': {
+ async: jest.fn(),
+ },
+ },
+ };
+
+ loadAsyncMock.files['previews/preview.png'].async.mockImplementation(
+ () =>
+ new Promise(resolve => {
+ resolve('foo');
+ done();
+ }),
+ );
+
+ jest.spyOn(SketchLoader.prototype, 'getZipFile').mockResolvedValue();
+ jest.spyOn(JSZip, 'loadAsync').mockResolvedValue(loadAsyncMock);
+ return new SketchLoader(document.getElementById('js-sketch-viewer'));
+ });
+
+ it('does not render error message', () => {
+ expect(document.querySelector('#js-sketch-viewer p')).toBeNull();
+ });
+
+ it('removes the loading icon', () => {
+ expect(document.querySelector('.js-loading-icon')).toBeNull();
+ });
+
+ it('renders preview img', () => {
+ const img = document.querySelector('#js-sketch-viewer img');
+
+ expect(img).not.toBeNull();
+ expect(img.classList.contains('img-fluid')).toBeTruthy();
+ });
+
+ it('renders link to image', () => {
+ const img = document.querySelector('#js-sketch-viewer img');
+ const link = document.querySelector('#js-sketch-viewer a');
+
+ expect(link.href).toBe(img.src);
+ expect(link.target).toBe('_blank');
+ });
+ });
+});
diff --git a/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js b/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
new file mode 100644
index 00000000000..43e92bdca5f
--- /dev/null
+++ b/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
@@ -0,0 +1,100 @@
+import { shallowMount } from '@vue/test-utils';
+import Popover from '~/blob/suggest_gitlab_ci_yml/components/popover.vue';
+import Cookies from 'js-cookie';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import * as utils from '~/lib/utils/common_utils';
+
+jest.mock('~/lib/utils/common_utils', () => ({
+ ...jest.requireActual('~/lib/utils/common_utils'),
+ scrollToElement: jest.fn(),
+}));
+
+const target = 'gitlab-ci-yml-selector';
+const dismissKey = 'suggest_gitlab_ci_yml_99';
+const defaultTrackLabel = 'suggest_gitlab_ci_yml';
+const commitTrackLabel = 'suggest_commit_first_project_gitlab_ci_yml';
+const humanAccess = 'owner';
+
+describe('Suggest gitlab-ci.yml Popover', () => {
+ let wrapper;
+
+ function createWrapper(trackLabel) {
+ wrapper = shallowMount(Popover, {
+ propsData: {
+ target,
+ trackLabel,
+ dismissKey,
+ humanAccess,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when no dismiss cookie is set', () => {
+ beforeEach(() => {
+ createWrapper(defaultTrackLabel);
+ });
+
+ it('sets popoverDismissed to false', () => {
+ expect(wrapper.vm.popoverDismissed).toEqual(false);
+ });
+ });
+
+ describe('when the dismiss cookie is set', () => {
+ beforeEach(() => {
+ Cookies.set(dismissKey, true);
+ createWrapper(defaultTrackLabel);
+ });
+
+ it('sets popoverDismissed to true', () => {
+ expect(wrapper.vm.popoverDismissed).toEqual(true);
+ });
+
+ afterEach(() => {
+ Cookies.remove(dismissKey);
+ });
+ });
+
+ describe('tracking', () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ createWrapper(commitTrackLabel);
+ trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('sends a tracking event with the expected properties for the popover being viewed', () => {
+ const expectedCategory = undefined;
+ const expectedAction = undefined;
+ const expectedLabel = 'suggest_commit_first_project_gitlab_ci_yml';
+ const expectedProperty = 'owner';
+
+ document.body.dataset.page = 'projects:blob:new';
+
+ wrapper.vm.trackOnShow();
+
+ expect(trackingSpy).toHaveBeenCalledWith(expectedCategory, expectedAction, {
+ label: expectedLabel,
+ property: expectedProperty,
+ });
+ });
+ });
+
+ describe('when the popover is mounted with the trackLabel of the Confirm button popover at the bottom of the page', () => {
+ it('calls scrollToElement so that the Confirm button and popover will be in sight', () => {
+ const scrollToElementSpy = jest.spyOn(utils, 'scrollToElement');
+
+ createWrapper(commitTrackLabel);
+
+ expect(scrollToElementSpy).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/blob/utils_spec.js b/spec/frontend/blob/utils_spec.js
new file mode 100644
index 00000000000..39a73aae444
--- /dev/null
+++ b/spec/frontend/blob/utils_spec.js
@@ -0,0 +1,95 @@
+import Editor from '~/editor/editor_lite';
+import * as utils from '~/blob/utils';
+
+const mockCreateMonacoInstance = jest.fn();
+jest.mock('~/editor/editor_lite', () => {
+ return jest.fn().mockImplementation(() => {
+ return { createInstance: mockCreateMonacoInstance };
+ });
+});
+
+const mockCreateAceInstance = jest.fn();
+global.ace = {
+ edit: mockCreateAceInstance,
+};
+
+describe('Blob utilities', () => {
+ beforeEach(() => {
+ Editor.mockClear();
+ });
+
+ describe('initEditorLite', () => {
+ let editorEl;
+ const blobPath = 'foo.txt';
+ const blobContent = 'Foo bar';
+
+ beforeEach(() => {
+ setFixtures('<div id="editor"></div>');
+ editorEl = document.getElementById('editor');
+ });
+
+ describe('Monaco editor', () => {
+ let origProp;
+
+ beforeEach(() => {
+ origProp = window.gon;
+ window.gon = {
+ features: {
+ monacoSnippets: true,
+ },
+ };
+ });
+
+ afterEach(() => {
+ window.gon = origProp;
+ });
+
+ it('initializes the Editor Lite', () => {
+ utils.initEditorLite({ el: editorEl });
+ expect(Editor).toHaveBeenCalled();
+ });
+
+ it('creates the instance with the passed parameters', () => {
+ utils.initEditorLite({ el: editorEl });
+ expect(mockCreateMonacoInstance.mock.calls[0]).toEqual([
+ {
+ el: editorEl,
+ blobPath: undefined,
+ blobContent: undefined,
+ },
+ ]);
+
+ utils.initEditorLite({ el: editorEl, blobPath, blobContent });
+ expect(mockCreateMonacoInstance.mock.calls[1]).toEqual([
+ {
+ el: editorEl,
+ blobPath,
+ blobContent,
+ },
+ ]);
+ });
+ });
+ describe('ACE editor', () => {
+ let origProp;
+
+ beforeEach(() => {
+ origProp = window.gon;
+ window.gon = {
+ features: {
+ monacoSnippets: false,
+ },
+ };
+ });
+
+ afterEach(() => {
+ window.gon = origProp;
+ });
+
+ it('does not initialize the Editor Lite', () => {
+ utils.initEditorLite({ el: editorEl });
+ expect(Editor).not.toHaveBeenCalled();
+ expect(mockCreateAceInstance).toHaveBeenCalledWith(editorEl);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/blob/viewer/index_spec.js b/spec/frontend/blob/viewer/index_spec.js
new file mode 100644
index 00000000000..7239f59c6fa
--- /dev/null
+++ b/spec/frontend/blob/viewer/index_spec.js
@@ -0,0 +1,179 @@
+/* eslint-disable no-new */
+
+import $ from 'jquery';
+import MockAdapter from 'axios-mock-adapter';
+import BlobViewer from '~/blob/viewer/index';
+import axios from '~/lib/utils/axios_utils';
+
+describe('Blob viewer', () => {
+ let blob;
+ let mock;
+
+ const jQueryMock = {
+ tooltip: jest.fn(),
+ };
+
+ preloadFixtures('snippets/show.html');
+
+ beforeEach(() => {
+ $.fn.extend(jQueryMock);
+ mock = new MockAdapter(axios);
+
+ loadFixtures('snippets/show.html');
+ $('#modal-upload-blob').remove();
+
+ blob = new BlobViewer();
+
+ mock.onGet('http://test.host/snippets/1.json?viewer=rich').reply(200, {
+ html: '<div>testing</div>',
+ });
+
+ mock.onGet('http://test.host/snippets/1.json?viewer=simple').reply(200, {
+ html: '<div>testing</div>',
+ });
+
+ jest.spyOn(axios, 'get');
+ });
+
+ afterEach(() => {
+ mock.restore();
+ window.location.hash = '';
+ });
+
+ it('loads source file after switching views', done => {
+ document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
+
+ setImmediate(() => {
+ expect(
+ document
+ .querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
+ .classList.contains('hidden'),
+ ).toBeFalsy();
+
+ done();
+ });
+ });
+
+ it('loads source file when line number is in hash', done => {
+ window.location.hash = '#L1';
+
+ new BlobViewer();
+
+ setImmediate(() => {
+ expect(
+ document
+ .querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
+ .classList.contains('hidden'),
+ ).toBeFalsy();
+
+ done();
+ });
+ });
+
+ it('doesnt reload file if already loaded', () => {
+ const asyncClick = () =>
+ new Promise(resolve => {
+ document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
+
+ setImmediate(resolve);
+ });
+
+ return asyncClick()
+ .then(() => asyncClick())
+ .then(() => {
+ expect(
+ document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'),
+ ).toBe('true');
+ });
+ });
+
+ describe('copy blob button', () => {
+ let copyButton;
+
+ beforeEach(() => {
+ copyButton = document.querySelector('.js-copy-blob-source-btn');
+ });
+
+ it('disabled on load', () => {
+ expect(copyButton.classList.contains('disabled')).toBeTruthy();
+ });
+
+ it('has tooltip when disabled', () => {
+ expect(copyButton.getAttribute('title')).toBe(
+ 'Switch to the source to copy the file contents',
+ );
+ });
+
+ it('is blurred when clicked and disabled', () => {
+ jest.spyOn(copyButton, 'blur').mockImplementation(() => {});
+
+ copyButton.click();
+
+ expect(copyButton.blur).toHaveBeenCalled();
+ });
+
+ it('is not blurred when clicked and not disabled', () => {
+ jest.spyOn(copyButton, 'blur').mockImplementation(() => {});
+
+ copyButton.classList.remove('disabled');
+ copyButton.click();
+
+ expect(copyButton.blur).not.toHaveBeenCalled();
+ });
+
+ it('enables after switching to simple view', done => {
+ document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
+
+ setImmediate(() => {
+ expect(copyButton.classList.contains('disabled')).toBeFalsy();
+
+ done();
+ });
+ });
+
+ it('updates tooltip after switching to simple view', done => {
+ document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
+
+ setImmediate(() => {
+ expect(copyButton.getAttribute('title')).toBe('Copy file contents');
+
+ done();
+ });
+ });
+ });
+
+ describe('switchToViewer', () => {
+ it('removes active class from old viewer button', () => {
+ blob.switchToViewer('simple');
+
+ expect(
+ document.querySelector('.js-blob-viewer-switch-btn.active[data-viewer="rich"]'),
+ ).toBeNull();
+ });
+
+ it('adds active class to new viewer button', () => {
+ const simpleBtn = document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]');
+
+ jest.spyOn(simpleBtn, 'blur').mockImplementation(() => {});
+
+ blob.switchToViewer('simple');
+
+ expect(simpleBtn.classList.contains('active')).toBeTruthy();
+
+ expect(simpleBtn.blur).toHaveBeenCalled();
+ });
+
+ it('sends AJAX request when switching to simple view', () => {
+ blob.switchToViewer('simple');
+
+ expect(axios.get).toHaveBeenCalled();
+ });
+
+ it('does not send AJAX request when switching to rich view', () => {
+ blob.switchToViewer('simple');
+ blob.switchToViewer('rich');
+
+ expect(axios.get.mock.calls.length).toBe(1);
+ });
+ });
+});
diff --git a/spec/frontend/blob_edit/blob_bundle_spec.js b/spec/frontend/blob_edit/blob_bundle_spec.js
new file mode 100644
index 00000000000..be438781850
--- /dev/null
+++ b/spec/frontend/blob_edit/blob_bundle_spec.js
@@ -0,0 +1,31 @@
+import $ from 'jquery';
+import blobBundle from '~/blob_edit/blob_bundle';
+
+jest.mock('~/blob_edit/edit_blob');
+
+describe('BlobBundle', () => {
+ beforeEach(() => {
+ setFixtures(`
+ <div class="js-edit-blob-form" data-blob-filename="blah">
+ <button class="js-commit-button"></button>
+ <a class="btn btn-cancel" href="#"></a>
+ </div>`);
+ blobBundle();
+ });
+
+ it('sets the window beforeunload listener to a function returning a string', () => {
+ expect(window.onbeforeunload()).toBe('');
+ });
+
+ it('removes beforeunload listener if commit button is clicked', () => {
+ $('.js-commit-button').click();
+
+ expect(window.onbeforeunload).toBeNull();
+ });
+
+ it('removes beforeunload listener when cancel link is clicked', () => {
+ $('.btn.btn-cancel').click();
+
+ expect(window.onbeforeunload).toBeNull();
+ });
+});
diff --git a/spec/frontend/boards/boards_store_spec.js b/spec/frontend/boards/boards_store_spec.js
index 2dc9039bc9d..5c5315fd465 100644
--- a/spec/frontend/boards/boards_store_spec.js
+++ b/spec/frontend/boards/boards_store_spec.js
@@ -440,23 +440,6 @@ describe('boardsStore', () => {
});
});
- describe('allBoards', () => {
- const url = `${endpoints.boardsEndpoint}.json`;
-
- it('makes a request to fetch all boards', () => {
- axiosMock.onGet(url).replyOnce(200, dummyResponse);
- const expectedResponse = expect.objectContaining({ data: dummyResponse });
-
- return expect(boardsStore.allBoards()).resolves.toEqual(expectedResponse);
- });
-
- it('fails for error response', () => {
- axiosMock.onGet(url).replyOnce(500);
-
- return expect(boardsStore.allBoards()).rejects.toThrow();
- });
- });
-
describe('recentBoards', () => {
const url = `${endpoints.recentBoardsEndpoint}.json`;
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
new file mode 100644
index 00000000000..b1ae86c2d3f
--- /dev/null
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -0,0 +1,210 @@
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
+import { GlDropdown, GlLoadingIcon } from '@gitlab/ui';
+import { TEST_HOST } from 'spec/test_constants';
+import BoardsSelector from '~/boards/components/boards_selector.vue';
+import boardsStore from '~/boards/stores/boards_store';
+
+const throttleDuration = 1;
+
+function boardGenerator(n) {
+ return new Array(n).fill().map((board, index) => {
+ const id = `${index}`;
+ const name = `board${id}`;
+
+ return {
+ id,
+ name,
+ };
+ });
+}
+
+describe('BoardsSelector', () => {
+ let wrapper;
+ let allBoardsResponse;
+ let recentBoardsResponse;
+ const boards = boardGenerator(20);
+ const recentBoards = boardGenerator(5);
+
+ const fillSearchBox = filterTerm => {
+ const searchBox = wrapper.find({ ref: 'searchBox' });
+ const searchBoxInput = searchBox.find('input');
+ searchBoxInput.setValue(filterTerm);
+ searchBoxInput.trigger('input');
+ };
+
+ const getDropdownItems = () => wrapper.findAll('.js-dropdown-item');
+ const getDropdownHeaders = () => wrapper.findAll('.dropdown-bold-header');
+ const getLoadingIcon = () => wrapper.find(GlLoadingIcon);
+
+ beforeEach(() => {
+ const $apollo = {
+ queries: {
+ boards: {
+ loading: false,
+ },
+ },
+ };
+
+ boardsStore.setEndpoints({
+ boardsEndpoint: '',
+ recentBoardsEndpoint: '',
+ listsEndpoint: '',
+ bulkUpdatePath: '',
+ boardId: '',
+ });
+
+ allBoardsResponse = Promise.resolve({
+ data: {
+ group: {
+ boards: {
+ edges: boards.map(board => ({ node: board })),
+ },
+ },
+ },
+ });
+ recentBoardsResponse = Promise.resolve({
+ data: recentBoards,
+ });
+
+ boardsStore.allBoards = jest.fn(() => allBoardsResponse);
+ boardsStore.recentBoards = jest.fn(() => recentBoardsResponse);
+
+ wrapper = mount(BoardsSelector, {
+ propsData: {
+ throttleDuration,
+ currentBoard: {
+ id: 1,
+ name: 'Development',
+ milestone_id: null,
+ weight: null,
+ assignee_id: null,
+ labels: [],
+ },
+ milestonePath: `${TEST_HOST}/milestone/path`,
+ boardBaseUrl: `${TEST_HOST}/board/base/url`,
+ hasMissingBoards: false,
+ canAdminBoard: true,
+ multipleIssueBoardsAvailable: true,
+ labelsPath: `${TEST_HOST}/labels/path`,
+ projectId: 42,
+ groupId: 19,
+ scopedIssueBoardFeatureEnabled: true,
+ weights: [],
+ },
+ mocks: { $apollo },
+ attachToDocument: true,
+ });
+
+ wrapper.vm.$apollo.addSmartQuery = jest.fn((_, options) => {
+ wrapper.setData({
+ [options.loadingKey]: true,
+ });
+ });
+
+ // Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
+ wrapper.find(GlDropdown).vm.$emit('show');
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('loading', () => {
+ // we are testing loading state, so don't resolve responses until after the tests
+ afterEach(() => {
+ return Promise.all([allBoardsResponse, recentBoardsResponse]).then(() => nextTick());
+ });
+
+ it('shows loading spinner', () => {
+ expect(getDropdownHeaders()).toHaveLength(0);
+ expect(getDropdownItems()).toHaveLength(0);
+ expect(getLoadingIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('loaded', () => {
+ beforeEach(() => {
+ return Promise.all([allBoardsResponse, recentBoardsResponse]).then(() => nextTick());
+ });
+
+ it('hides loading spinner', () => {
+ expect(getLoadingIcon().exists()).toBe(false);
+ });
+
+ describe('filtering', () => {
+ beforeEach(() => {
+ wrapper.setData({
+ boards,
+ });
+
+ return nextTick();
+ });
+
+ it('shows all boards without filtering', () => {
+ expect(getDropdownItems()).toHaveLength(boards.length + recentBoards.length);
+ });
+
+ it('shows only matching boards when filtering', () => {
+ const filterTerm = 'board1';
+ const expectedCount = boards.filter(board => board.name.includes(filterTerm)).length;
+
+ fillSearchBox(filterTerm);
+
+ return nextTick().then(() => {
+ expect(getDropdownItems()).toHaveLength(expectedCount);
+ });
+ });
+
+ it('shows message if there are no matching boards', () => {
+ fillSearchBox('does not exist');
+
+ return nextTick().then(() => {
+ expect(getDropdownItems()).toHaveLength(0);
+ expect(wrapper.text().includes('No matching boards found')).toBe(true);
+ });
+ });
+ });
+
+ describe('recent boards section', () => {
+ it('shows only when boards are greater than 10', () => {
+ wrapper.setData({
+ boards,
+ });
+
+ return nextTick().then(() => {
+ expect(getDropdownHeaders()).toHaveLength(2);
+ });
+ });
+
+ it('does not show when boards are less than 10', () => {
+ wrapper.setData({
+ boards: boards.slice(0, 5),
+ });
+
+ return nextTick().then(() => {
+ expect(getDropdownHeaders()).toHaveLength(0);
+ });
+ });
+
+ it('does not show when recentBoards api returns empty array', () => {
+ wrapper.setData({
+ recentBoards: [],
+ });
+
+ return nextTick().then(() => {
+ expect(getDropdownHeaders()).toHaveLength(0);
+ });
+ });
+
+ it('does not show when search is active', () => {
+ fillSearchBox('Random string');
+
+ return nextTick().then(() => {
+ expect(getDropdownHeaders()).toHaveLength(0);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/issue_card_inner_scoped_label_spec.js b/spec/frontend/boards/components/issue_card_inner_scoped_label_spec.js
deleted file mode 100644
index 7389cb14ecb..00000000000
--- a/spec/frontend/boards/components/issue_card_inner_scoped_label_spec.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import IssueCardInnerScopedLabel from '~/boards/components/issue_card_inner_scoped_label.vue';
-
-describe('IssueCardInnerScopedLabel Component', () => {
- let vm;
- const Component = Vue.extend(IssueCardInnerScopedLabel);
- const props = {
- label: { title: 'Foo::Bar', description: 'Some Random Description' },
- labelStyle: { background: 'white', color: 'black' },
- scopedLabelsDocumentationLink: '/docs-link',
- };
- const createComponent = () => mountComponent(Component, { ...props });
-
- beforeEach(() => {
- vm = createComponent();
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('should render label title', () => {
- expect(vm.$el.querySelector('.color-label').textContent.trim()).toEqual('Foo::Bar');
- });
-
- it('should render question mark symbol', () => {
- expect(vm.$el.querySelector('.fa-question-circle')).not.toBeNull();
- });
-
- it('should render label style provided', () => {
- const node = vm.$el.querySelector('.color-label');
-
- expect(node.style.background).toEqual(props.labelStyle.background);
- expect(node.style.color).toEqual(props.labelStyle.color);
- });
-
- it('should render the docs link', () => {
- expect(vm.$el.querySelector('a.scoped-label').href).toContain(
- props.scopedLabelsDocumentationLink,
- );
- });
-});
diff --git a/spec/frontend/boards/components/issue_due_date_spec.js b/spec/frontend/boards/components/issue_due_date_spec.js
index 68e26b68f04..880859287e1 100644
--- a/spec/frontend/boards/components/issue_due_date_spec.js
+++ b/spec/frontend/boards/components/issue_due_date_spec.js
@@ -1,65 +1,78 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
import dateFormat from 'dateformat';
import IssueDueDate from '~/boards/components/issue_due_date.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
+
+const createComponent = (dueDate = new Date(), closed = false) =>
+ shallowMount(IssueDueDate, {
+ propsData: {
+ closed,
+ date: dateFormat(dueDate, 'yyyy-mm-dd', true),
+ },
+ });
+
+const findTime = wrapper => wrapper.find('time');
describe('Issue Due Date component', () => {
- let vm;
+ let wrapper;
let date;
- const Component = Vue.extend(IssueDueDate);
- const createComponent = (dueDate = new Date()) =>
- mountComponent(Component, { date: dateFormat(dueDate, 'yyyy-mm-dd', true) });
beforeEach(() => {
date = new Date();
- vm = createComponent();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('should render "Today" if the due date is today', () => {
- const timeContainer = vm.$el.querySelector('time');
+ wrapper = createComponent();
- expect(timeContainer.textContent.trim()).toEqual('Today');
+ expect(findTime(wrapper).text()).toBe('Today');
});
it('should render "Yesterday" if the due date is yesterday', () => {
date.setDate(date.getDate() - 1);
- vm = createComponent(date);
+ wrapper = createComponent(date);
- expect(vm.$el.querySelector('time').textContent.trim()).toEqual('Yesterday');
+ expect(findTime(wrapper).text()).toBe('Yesterday');
});
it('should render "Tomorrow" if the due date is one day from now', () => {
date.setDate(date.getDate() + 1);
- vm = createComponent(date);
+ wrapper = createComponent(date);
- expect(vm.$el.querySelector('time').textContent.trim()).toEqual('Tomorrow');
+ expect(findTime(wrapper).text()).toBe('Tomorrow');
});
it('should render day of the week if due date is one week away', () => {
date.setDate(date.getDate() + 5);
- vm = createComponent(date);
+ wrapper = createComponent(date);
- expect(vm.$el.querySelector('time').textContent.trim()).toEqual(dateFormat(date, 'dddd'));
+ expect(findTime(wrapper).text()).toBe(dateFormat(date, 'dddd'));
});
it('should render month and day for other dates', () => {
date.setDate(date.getDate() + 17);
- vm = createComponent(date);
+ wrapper = createComponent(date);
const today = new Date();
const isDueInCurrentYear = today.getFullYear() === date.getFullYear();
const format = isDueInCurrentYear ? 'mmm d' : 'mmm d, yyyy';
- expect(vm.$el.querySelector('time').textContent.trim()).toEqual(dateFormat(date, format));
+ expect(findTime(wrapper).text()).toBe(dateFormat(date, format));
+ });
+
+ it('should contain the correct `.text-danger` css class for overdue issue that is open', () => {
+ date.setDate(date.getDate() - 17);
+ wrapper = createComponent(date);
+
+ expect(findTime(wrapper).classes('text-danger')).toBe(true);
});
- it('should contain the correct `.text-danger` css class for overdue issue', () => {
+ it('should not contain the `.text-danger` css class for overdue issue that is closed', () => {
date.setDate(date.getDate() - 17);
- vm = createComponent(date);
+ const closed = true;
+ wrapper = createComponent(date, closed);
- expect(vm.$el.querySelector('time').classList.contains('text-danger')).toEqual(true);
+ expect(findTime(wrapper).classes('text-danger')).toBe(false);
});
});
diff --git a/spec/frontend/boards/issue_card_spec.js b/spec/frontend/boards/issue_card_spec.js
index 1fd2b417aba..09b5c664bee 100644
--- a/spec/frontend/boards/issue_card_spec.js
+++ b/spec/frontend/boards/issue_card_spec.js
@@ -8,6 +8,7 @@ import '~/boards/models/list';
import IssueCardInner from '~/boards/components/issue_card_inner.vue';
import { listObj } from '../../javascripts/boards/mock_data';
import store from '~/boards/stores';
+import { GlLabel } from '@gitlab/ui';
describe('Issue card component', () => {
const user = new ListAssignee({
@@ -20,7 +21,7 @@ describe('Issue card component', () => {
const label1 = new ListLabel({
id: 3,
title: 'testing 123',
- color: 'blue',
+ color: '#000CFF',
text_color: 'white',
description: 'test',
});
@@ -50,6 +51,9 @@ describe('Issue card component', () => {
rootPath: '/',
},
store,
+ stubs: {
+ GlLabel: true,
+ },
});
});
@@ -151,18 +155,17 @@ describe('Issue card component', () => {
describe('assignee default avatar', () => {
beforeEach(done => {
+ global.gon.default_avatar_url = 'default_avatar';
+
wrapper.setProps({
issue: {
...wrapper.props('issue'),
assignees: [
- new ListAssignee(
- {
- id: 1,
- name: 'testing 123',
- username: 'test',
- },
- 'default_avatar',
- ),
+ new ListAssignee({
+ id: 1,
+ name: 'testing 123',
+ username: 'test',
+ }),
],
},
});
@@ -170,6 +173,10 @@ describe('Issue card component', () => {
wrapper.vm.$nextTick(done);
});
+ afterEach(() => {
+ global.gon.default_avatar_url = null;
+ });
+
it('displays defaults avatar if users avatar is null', () => {
expect(wrapper.find('.board-card-assignee img').exists()).toBe(true);
expect(wrapper.find('.board-card-assignee img').attributes('src')).toBe(
@@ -290,25 +297,11 @@ describe('Issue card component', () => {
});
it('does not render list label but renders all other labels', () => {
- expect(wrapper.findAll('.badge').length).toBe(1);
- });
-
- it('renders label', () => {
- const nodes = wrapper.findAll('.badge').wrappers.map(label => label.attributes('title'));
-
- expect(nodes.includes(label1.description)).toBe(true);
- });
-
- it('sets label description as title', () => {
- expect(wrapper.find('.badge').attributes('title')).toContain(label1.description);
- });
-
- it('sets background color of button', () => {
- const nodes = wrapper
- .findAll('.badge')
- .wrappers.map(label => label.element.style.backgroundColor);
-
- expect(nodes.includes(label1.color)).toBe(true);
+ expect(wrapper.findAll(GlLabel).length).toBe(1);
+ const label = wrapper.find(GlLabel);
+ expect(label.props('title')).toEqual(label1.title);
+ expect(label.props('description')).toEqual(label1.description);
+ expect(label.props('backgroundColor')).toEqual(label1.color);
});
it('does not render label if label does not have an ID', done => {
@@ -321,7 +314,7 @@ describe('Issue card component', () => {
wrapper.vm
.$nextTick()
.then(() => {
- expect(wrapper.findAll('.badge').length).toBe(1);
+ expect(wrapper.findAll(GlLabel).length).toBe(1);
expect(wrapper.text()).not.toContain('closed');
done();
})
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
new file mode 100644
index 00000000000..058aca16ea0
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -0,0 +1,99 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
+import createStore from '~/ci_variable_list/store';
+import mockData from '../services/mock_data';
+import ModalStub from '../stubs';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Ci variable modal', () => {
+ let wrapper;
+ let store;
+
+ const createComponent = () => {
+ store = createStore();
+ wrapper = shallowMount(CiVariableModal, {
+ stubs: {
+ GlModal: ModalStub,
+ },
+ localVue,
+ store,
+ });
+ };
+
+ const findModal = () => wrapper.find(ModalStub);
+ const addOrUpdateButton = index =>
+ findModal()
+ .findAll(GlButton)
+ .at(index);
+ const deleteVariableButton = () =>
+ findModal()
+ .findAll(GlButton)
+ .at(1);
+
+ beforeEach(() => {
+ createComponent();
+ jest.spyOn(store, 'dispatch').mockImplementation();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('button is disabled when no key/value pair are present', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ });
+
+ describe('Adding a new variable', () => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ store.state.variable = variable;
+ });
+
+ it('button is enabled when key/value pair are present', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeFalsy();
+ });
+
+ it('Add variable button dispatches addVariable action', () => {
+ addOrUpdateButton(1).vm.$emit('click');
+ expect(store.dispatch).toHaveBeenCalledWith('addVariable');
+ });
+
+ it('Clears the modal state once modal is hidden', () => {
+ findModal().vm.$emit('hidden');
+ expect(store.dispatch).toHaveBeenCalledWith('clearModal');
+ });
+ });
+
+ describe('Editing a variable', () => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ store.state.variableBeingEdited = variable;
+ });
+
+ it('button text is Update variable when updating', () => {
+ expect(addOrUpdateButton(2).text()).toBe('Update variable');
+ });
+
+ it('Update variable button dispatches updateVariable with correct variable', () => {
+ addOrUpdateButton(2).vm.$emit('click');
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'updateVariable',
+ store.state.variableBeingEdited,
+ );
+ });
+
+ it('Resets the editing state once modal is hidden', () => {
+ findModal().vm.$emit('hidden');
+ expect(store.dispatch).toHaveBeenCalledWith('resetEditing');
+ });
+
+ it('dispatches deleteVariable with correct variable to delete', () => {
+ deleteVariableButton().vm.$emit('click');
+ expect(store.dispatch).toHaveBeenCalledWith('deleteVariable', mockData.mockVariables[0]);
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_popover_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_popover_spec.js
new file mode 100644
index 00000000000..5d37f059161
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_variable_popover_spec.js
@@ -0,0 +1,48 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import CiVariablePopover from '~/ci_variable_list/components/ci_variable_popover.vue';
+import mockData from '../services/mock_data';
+
+describe('Ci Variable Popover', () => {
+ let wrapper;
+
+ const defaultProps = {
+ target: 'ci-variable-value-22',
+ value: mockData.mockPemCert,
+ tooltipText: 'Copy value',
+ };
+
+ const createComponent = (props = defaultProps) => {
+ wrapper = shallowMount(CiVariablePopover, {
+ propsData: { ...props },
+ });
+ };
+
+ const findButton = () => wrapper.find(GlButton);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('displays max count plus ... when character count is over 95', () => {
+ expect(wrapper.text()).toHaveLength(98);
+ });
+
+ it('copies full value to clipboard', () => {
+ expect(findButton().attributes('data-clipboard-text')).toEqual(mockData.mockPemCert);
+ });
+
+ it('displays full value when count is less than max count', () => {
+ createComponent({
+ target: 'ci-variable-value-22',
+ value: 'test_variable_value',
+ tooltipText: 'Copy value',
+ });
+ expect(wrapper.text()).toEqual('test_variable_value');
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js
new file mode 100644
index 00000000000..7dcd82eac5e
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js
@@ -0,0 +1,39 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import CiVariableSettings from '~/ci_variable_list/components/ci_variable_settings.vue';
+import createStore from '~/ci_variable_list/store';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Ci variable table', () => {
+ let wrapper;
+ let store;
+ let isGroup;
+
+ const createComponent = groupState => {
+ store = createStore();
+ store.state.isGroup = groupState;
+ jest.spyOn(store, 'dispatch').mockImplementation();
+ wrapper = shallowMount(CiVariableSettings, {
+ localVue,
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('dispatches fetchEnvironments when mounted', () => {
+ isGroup = false;
+ createComponent(isGroup);
+ expect(store.dispatch).toHaveBeenCalledWith('fetchEnvironments');
+ });
+
+ it('does not dispatch fetchenvironments when in group context', () => {
+ isGroup = true;
+ createComponent(isGroup);
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
new file mode 100644
index 00000000000..36aeffe7798
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
@@ -0,0 +1,84 @@
+import Vuex from 'vuex';
+import { createLocalVue, mount } from '@vue/test-utils';
+import { GlTable } from '@gitlab/ui';
+import CiVariableTable from '~/ci_variable_list/components/ci_variable_table.vue';
+import createStore from '~/ci_variable_list/store';
+import mockData from '../services/mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Ci variable table', () => {
+ let wrapper;
+ let store;
+
+ const createComponent = () => {
+ store = createStore();
+ store.state.isGroup = true;
+ jest.spyOn(store, 'dispatch').mockImplementation();
+ wrapper = mount(CiVariableTable, {
+ attachToDocument: true,
+ localVue,
+ store,
+ });
+ };
+
+ const findRevealButton = () => wrapper.find({ ref: 'secret-value-reveal-button' });
+ const findEditButton = () => wrapper.find({ ref: 'edit-ci-variable' });
+ const findEmptyVariablesPlaceholder = () => wrapper.find({ ref: 'empty-variables' });
+ const findTable = () => wrapper.find(GlTable);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('dispatches fetchVariables when mounted', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('fetchVariables');
+ });
+
+ it('fields prop does not contain environment_scope if group', () => {
+ expect(findTable().props('fields')).not.toEqual(
+ expect.arrayContaining([
+ expect.objectContaining({
+ key: 'environment_scope',
+ label: 'Environment Scope',
+ }),
+ ]),
+ );
+ });
+
+ describe('Renders correct data', () => {
+ it('displays empty message when variables are not present', () => {
+ expect(findEmptyVariablesPlaceholder().exists()).toBe(true);
+ });
+
+ it('displays correct amount of variables present and no empty message', () => {
+ store.state.variables = mockData.mockVariables;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.findAll('.js-ci-variable-row').length).toBe(1);
+ expect(findEmptyVariablesPlaceholder().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('Table click actions', () => {
+ beforeEach(() => {
+ store.state.variables = mockData.mockVariables;
+ });
+
+ it('reveals secret values when button is clicked', () => {
+ findRevealButton().trigger('click');
+ expect(store.dispatch).toHaveBeenCalledWith('toggleValues', false);
+ });
+
+ it('dispatches editVariable with correct variable to edit', () => {
+ findEditButton().trigger('click');
+ expect(store.dispatch).toHaveBeenCalledWith('editVariable', mockData.mockVariables[0]);
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/services/mock_data.js b/spec/frontend/ci_variable_list/services/mock_data.js
new file mode 100644
index 00000000000..5e0fa55a20c
--- /dev/null
+++ b/spec/frontend/ci_variable_list/services/mock_data.js
@@ -0,0 +1,91 @@
+export default {
+ mockVariables: [
+ {
+ environment_scope: 'All environments',
+ id: 113,
+ key: 'test_var',
+ masked: false,
+ protected: false,
+ secret_value: 'test_val',
+ value: 'test_val',
+ variable_type: 'Var',
+ },
+ ],
+
+ mockVariablesApi: [
+ {
+ environment_scope: '*',
+ id: 113,
+ key: 'test_var',
+ masked: false,
+ protected: false,
+ secret_value: 'test_val',
+ value: 'test_val',
+ variable_type: 'env_var',
+ },
+ {
+ environment_scope: '*',
+ id: 114,
+ key: 'test_var_2',
+ masked: false,
+ protected: false,
+ secret_value: 'test_val_2',
+ value: 'test_val_2',
+ variable_type: 'file',
+ },
+ ],
+
+ mockVariablesDisplay: [
+ {
+ environment_scope: 'All',
+ id: 113,
+ key: 'test_var',
+ masked: false,
+ protected: false,
+ secret_value: 'test_val',
+ value: 'test_val',
+ variable_type: 'Var',
+ },
+ {
+ environment_scope: 'All',
+ id: 114,
+ key: 'test_var_2',
+ masked: false,
+ protected: false,
+ secret_value: 'test_val_2',
+ value: 'test_val_2',
+ variable_type: 'File',
+ },
+ ],
+
+ mockEnvironments: [
+ {
+ id: 28,
+ name: 'staging',
+ slug: 'staging',
+ external_url: 'https://staging.example.com',
+ state: 'available',
+ },
+ {
+ id: 29,
+ name: 'production',
+ slug: 'production',
+ external_url: 'https://production.example.com',
+ state: 'available',
+ },
+ ],
+
+ mockPemCert: `-----BEGIN CERTIFICATE REQUEST-----
+ MIIB9TCCAWACAQAwgbgxGTAXBgNVBAoMEFF1b1ZhZGlzIExpbWl0ZWQxHDAaBgNV
+ BAsME0RvY3VtZW50IERlcGFydG1lbnQxOTA3BgNVBAMMMFdoeSBhcmUgeW91IGRl
+ Y29kaW5nIG1lPyAgVGhpcyBpcyBvbmx5IGEgdGVzdCEhITERMA8GA1UEBwwISGFt
+ aWx0b24xETAPBgNVBAgMCFBlbWJyb2tlMQswCQYDVQQGEwJCTTEPMA0GCSqGSIb3
+ DQEJARYAMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCJ9WRanG/fUvcfKiGl
+ EL4aRLjGt537mZ28UU9/3eiJeJznNSOuNLnF+hmabAu7H0LT4K7EdqfF+XUZW/2j
+ RKRYcvOUDGF9A7OjW7UfKk1In3+6QDCi7X34RE161jqoaJjrm/T18TOKcgkkhRzE
+ apQnIDm0Ea/HVzX/PiSOGuertwIDAQABMAsGCSqGSIb3DQEBBQOBgQBzMJdAV4QP
+ Awel8LzGx5uMOshezF/KfP67wJ93UW+N7zXY6AwPgoLj4Kjw+WtU684JL8Dtr9FX
+ ozakE+8p06BpxegR4BR3FMHf6p+0jQxUEAkAyb/mVgm66TyghDGC6/YkiKoZptXQ
+ 98TwDIK/39WEB/V607As+KoYazQG8drorw==
+ -----END CERTIFICATE REQUEST-----`,
+};
diff --git a/spec/frontend/ci_variable_list/store/actions_spec.js b/spec/frontend/ci_variable_list/store/actions_spec.js
new file mode 100644
index 00000000000..84455612f0c
--- /dev/null
+++ b/spec/frontend/ci_variable_list/store/actions_spec.js
@@ -0,0 +1,279 @@
+import Api from '~/api';
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
+import createFlash from '~/flash';
+import getInitialState from '~/ci_variable_list/store/state';
+import * as actions from '~/ci_variable_list/store/actions';
+import * as types from '~/ci_variable_list/store/mutation_types';
+import mockData from '../services/mock_data';
+import { prepareDataForDisplay, prepareEnvironments } from '~/ci_variable_list/store/utils';
+
+jest.mock('~/api.js');
+jest.mock('~/flash.js');
+
+describe('CI variable list store actions', () => {
+ let mock;
+ let state;
+ const mockVariable = {
+ environment_scope: '*',
+ id: 63,
+ key: 'test_var',
+ masked: false,
+ protected: false,
+ value: 'test_val',
+ variable_type: 'env_var',
+ _destory: true,
+ };
+ const payloadError = new Error('Request failed with status code 500');
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ state = getInitialState();
+ state.endpoint = '/variables';
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('toggleValues', () => {
+ const valuesHidden = false;
+ it('commits TOGGLE_VALUES mutation', () => {
+ testAction(actions.toggleValues, valuesHidden, {}, [
+ {
+ type: types.TOGGLE_VALUES,
+ payload: valuesHidden,
+ },
+ ]);
+ });
+ });
+
+ describe('clearModal', () => {
+ it('commits CLEAR_MODAL mutation', () => {
+ testAction(actions.clearModal, {}, {}, [
+ {
+ type: types.CLEAR_MODAL,
+ },
+ ]);
+ });
+ });
+
+ describe('resetEditing', () => {
+ it('commits RESET_EDITING mutation', () => {
+ testAction(
+ actions.resetEditing,
+ {},
+ {},
+ [
+ {
+ type: types.RESET_EDITING,
+ },
+ ],
+ [{ type: 'fetchVariables' }],
+ );
+ });
+ });
+
+ describe('deleteVariable', () => {
+ it('dispatch correct actions on successful deleted variable', done => {
+ mock.onPatch(state.endpoint).reply(200);
+
+ testAction(
+ actions.deleteVariable,
+ mockVariable,
+ state,
+ [],
+ [
+ { type: 'requestDeleteVariable' },
+ { type: 'receiveDeleteVariableSuccess' },
+ { type: 'fetchVariables' },
+ ],
+ () => {
+ done();
+ },
+ );
+ });
+
+ it('should show flash error and set error in state on delete failure', done => {
+ mock.onPatch(state.endpoint).reply(500, '');
+
+ testAction(
+ actions.deleteVariable,
+ mockVariable,
+ state,
+ [],
+ [
+ { type: 'requestDeleteVariable' },
+ {
+ type: 'receiveDeleteVariableError',
+ payload: payloadError,
+ },
+ ],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+ });
+
+ describe('updateVariable', () => {
+ it('dispatch correct actions on successful updated variable', done => {
+ mock.onPatch(state.endpoint).reply(200);
+
+ testAction(
+ actions.updateVariable,
+ mockVariable,
+ state,
+ [],
+ [
+ { type: 'requestUpdateVariable' },
+ { type: 'receiveUpdateVariableSuccess' },
+ { type: 'fetchVariables' },
+ ],
+ () => {
+ done();
+ },
+ );
+ });
+
+ it('should show flash error and set error in state on update failure', done => {
+ mock.onPatch(state.endpoint).reply(500, '');
+
+ testAction(
+ actions.updateVariable,
+ mockVariable,
+ state,
+ [],
+ [
+ { type: 'requestUpdateVariable' },
+ {
+ type: 'receiveUpdateVariableError',
+ payload: payloadError,
+ },
+ ],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+ });
+
+ describe('addVariable', () => {
+ it('dispatch correct actions on successful added variable', done => {
+ mock.onPatch(state.endpoint).reply(200);
+
+ testAction(
+ actions.addVariable,
+ {},
+ state,
+ [],
+ [
+ { type: 'requestAddVariable' },
+ { type: 'receiveAddVariableSuccess' },
+ { type: 'fetchVariables' },
+ ],
+ () => {
+ done();
+ },
+ );
+ });
+
+ it('should show flash error and set error in state on add failure', done => {
+ mock.onPatch(state.endpoint).reply(500, '');
+
+ testAction(
+ actions.addVariable,
+ {},
+ state,
+ [],
+ [
+ { type: 'requestAddVariable' },
+ {
+ type: 'receiveAddVariableError',
+ payload: payloadError,
+ },
+ ],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+ });
+
+ describe('fetchVariables', () => {
+ it('dispatch correct actions on fetchVariables', done => {
+ mock.onGet(state.endpoint).reply(200, { variables: mockData.mockVariables });
+
+ testAction(
+ actions.fetchVariables,
+ {},
+ state,
+ [],
+ [
+ { type: 'requestVariables' },
+ {
+ type: 'receiveVariablesSuccess',
+ payload: prepareDataForDisplay(mockData.mockVariables),
+ },
+ ],
+ () => {
+ done();
+ },
+ );
+ });
+
+ it('should show flash error and set error in state on fetch variables failure', done => {
+ mock.onGet(state.endpoint).reply(500);
+
+ testAction(actions.fetchVariables, {}, state, [], [{ type: 'requestVariables' }], () => {
+ expect(createFlash).toHaveBeenCalledWith('There was an error fetching the variables.');
+ done();
+ });
+ });
+ });
+
+ describe('fetchEnvironments', () => {
+ it('dispatch correct actions on fetchEnvironments', done => {
+ Api.environments = jest.fn().mockResolvedValue({ data: mockData.mockEnvironments });
+
+ testAction(
+ actions.fetchEnvironments,
+ {},
+ state,
+ [],
+ [
+ { type: 'requestEnvironments' },
+ {
+ type: 'receiveEnvironmentsSuccess',
+ payload: prepareEnvironments(mockData.mockEnvironments),
+ },
+ ],
+ () => {
+ done();
+ },
+ );
+ });
+
+ it('should show flash error and set error in state on fetch environments failure', done => {
+ Api.environments = jest.fn().mockRejectedValue();
+
+ testAction(
+ actions.fetchEnvironments,
+ {},
+ state,
+ [],
+ [{ type: 'requestEnvironments' }],
+ () => {
+ expect(createFlash).toHaveBeenCalledWith(
+ 'There was an error fetching the environments information.',
+ );
+ done();
+ },
+ );
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/store/mutations_spec.js b/spec/frontend/ci_variable_list/store/mutations_spec.js
new file mode 100644
index 00000000000..05513edff7b
--- /dev/null
+++ b/spec/frontend/ci_variable_list/store/mutations_spec.js
@@ -0,0 +1,64 @@
+import state from '~/ci_variable_list/store/state';
+import mutations from '~/ci_variable_list/store/mutations';
+import * as types from '~/ci_variable_list/store/mutation_types';
+
+describe('CI variable list mutations', () => {
+ let stateCopy;
+
+ beforeEach(() => {
+ stateCopy = state();
+ });
+
+ describe('TOGGLE_VALUES', () => {
+ it('should toggle state', () => {
+ const valuesHidden = false;
+
+ mutations[types.TOGGLE_VALUES](stateCopy, valuesHidden);
+
+ expect(stateCopy.valuesHidden).toEqual(valuesHidden);
+ });
+ });
+
+ describe('VARIABLE_BEING_EDITED', () => {
+ it('should set variable that is being edited', () => {
+ const variableBeingEdited = {
+ environment_scope: '*',
+ id: 63,
+ key: 'test_var',
+ masked: false,
+ protected: false,
+ value: 'test_val',
+ variable_type: 'env_var',
+ };
+
+ mutations[types.VARIABLE_BEING_EDITED](stateCopy, variableBeingEdited);
+
+ expect(stateCopy.variableBeingEdited).toEqual(variableBeingEdited);
+ });
+ });
+
+ describe('RESET_EDITING', () => {
+ it('should reset variableBeingEdited to null', () => {
+ mutations[types.RESET_EDITING](stateCopy);
+
+ expect(stateCopy.variableBeingEdited).toEqual(null);
+ });
+ });
+
+ describe('CLEAR_MODAL', () => {
+ it('should clear modal state ', () => {
+ const modalState = {
+ variable_type: 'Var',
+ key: '',
+ secret_value: '',
+ protected: false,
+ masked: false,
+ environment_scope: 'All',
+ };
+
+ mutations[types.CLEAR_MODAL](stateCopy);
+
+ expect(stateCopy.variable).toEqual(modalState);
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/store/utils_spec.js b/spec/frontend/ci_variable_list/store/utils_spec.js
new file mode 100644
index 00000000000..5b10370324a
--- /dev/null
+++ b/spec/frontend/ci_variable_list/store/utils_spec.js
@@ -0,0 +1,49 @@
+import {
+ prepareDataForDisplay,
+ prepareEnvironments,
+ prepareDataForApi,
+} from '~/ci_variable_list/store/utils';
+import mockData from '../services/mock_data';
+
+describe('CI variables store utils', () => {
+ it('prepares ci variables for display', () => {
+ expect(prepareDataForDisplay(mockData.mockVariablesApi)).toStrictEqual(
+ mockData.mockVariablesDisplay,
+ );
+ });
+
+ it('prepares single ci variable for api', () => {
+ expect(prepareDataForApi(mockData.mockVariablesDisplay[0])).toStrictEqual({
+ environment_scope: '*',
+ id: 113,
+ key: 'test_var',
+ masked: 'false',
+ protected: 'false',
+ secret_value: 'test_val',
+ value: 'test_val',
+ variable_type: 'env_var',
+ });
+
+ expect(prepareDataForApi(mockData.mockVariablesDisplay[1])).toStrictEqual({
+ environment_scope: '*',
+ id: 114,
+ key: 'test_var_2',
+ masked: 'false',
+ protected: 'false',
+ secret_value: 'test_val_2',
+ value: 'test_val_2',
+ variable_type: 'file',
+ });
+ });
+
+ it('prepares single ci variable for delete', () => {
+ expect(prepareDataForApi(mockData.mockVariablesDisplay[0], true)).toHaveProperty(
+ '_destroy',
+ true,
+ );
+ });
+
+ it('prepares environments for display', () => {
+ expect(prepareEnvironments(mockData.mockEnvironments)).toStrictEqual(['staging', 'production']);
+ });
+});
diff --git a/spec/frontend/ci_variable_list/stubs.js b/spec/frontend/ci_variable_list/stubs.js
new file mode 100644
index 00000000000..5769d6190f6
--- /dev/null
+++ b/spec/frontend/ci_variable_list/stubs.js
@@ -0,0 +1,14 @@
+const ModalStub = {
+ name: 'glmodal-stub',
+ template: `
+ <div>
+ <slot></slot>
+ <slot name="modal-footer"></slot>
+ </div>
+ `,
+ methods: {
+ hide: jest.fn(),
+ },
+};
+
+export default ModalStub;
diff --git a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
index a35348d86ea..d4269bf14ba 100644
--- a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
@@ -9,9 +9,12 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
class="btn btn-danger"
type="button"
>
-
- Remove integration and resources
-
+ <span
+ class="gl-dropdown-toggle-text"
+ >
+ Remove integration and resources
+ </span>
+
<!---->
</button>
<button
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
index c3336edfe59..3e25c825fe8 100644
--- a/spec/frontend/clusters/components/applications_spec.js
+++ b/spec/frontend/clusters/components/applications_spec.js
@@ -7,6 +7,7 @@ import { APPLICATIONS_MOCK_STATE } from '../services/mock_data';
import eventHub from '~/clusters/event_hub';
import KnativeDomainEditor from '~/clusters/components/knative_domain_editor.vue';
import CrossplaneProviderStack from '~/clusters/components/crossplane_provider_stack.vue';
+import IngressModsecuritySettings from '~/clusters/components/ingress_modsecurity_settings.vue';
describe('Applications', () => {
let vm;
@@ -156,6 +157,30 @@ describe('Applications', () => {
});
describe('Ingress application', () => {
+ describe('with nested component', () => {
+ const propsData = {
+ applications: {
+ ...APPLICATIONS_MOCK_STATE,
+ ingress: {
+ title: 'Ingress',
+ status: 'installed',
+ },
+ },
+ };
+
+ let wrapper;
+ beforeEach(() => {
+ wrapper = shallowMount(Applications, { propsData });
+ });
+ afterEach(() => {
+ wrapper.destroy();
+ });
+ it('renders IngressModsecuritySettings', () => {
+ const modsecuritySettings = wrapper.find(IngressModsecuritySettings);
+ expect(modsecuritySettings.exists()).toBe(true);
+ });
+ });
+
describe('when installed', () => {
describe('with ip address', () => {
it('renders ip address with a clipboard button', () => {
diff --git a/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js b/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
new file mode 100644
index 00000000000..beb0721260b
--- /dev/null
+++ b/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
@@ -0,0 +1,129 @@
+import { shallowMount } from '@vue/test-utils';
+import IngressModsecuritySettings from '~/clusters/components/ingress_modsecurity_settings.vue';
+import { APPLICATION_STATUS, INGRESS } from '~/clusters/constants';
+import { GlAlert, GlToggle } from '@gitlab/ui';
+import eventHub from '~/clusters/event_hub';
+
+const { UPDATING } = APPLICATION_STATUS;
+
+describe('IngressModsecuritySettings', () => {
+ let wrapper;
+
+ const defaultProps = {
+ modsecurity_enabled: false,
+ status: 'installable',
+ installed: false,
+ };
+
+ const createComponent = (props = defaultProps) => {
+ wrapper = shallowMount(IngressModsecuritySettings, {
+ propsData: {
+ ingress: {
+ ...defaultProps,
+ ...props,
+ },
+ },
+ });
+ };
+
+ const findSaveButton = () => wrapper.find('.btn-success');
+ const findCancelButton = () => wrapper.find('[variant="secondary"]');
+ const findModSecurityToggle = () => wrapper.find(GlToggle);
+
+ describe('when ingress is installed', () => {
+ beforeEach(() => {
+ createComponent({ installed: true, status: 'installed' });
+ jest.spyOn(eventHub, '$emit');
+ });
+
+ it('does not render save and cancel buttons', () => {
+ expect(findSaveButton().exists()).toBe(false);
+ expect(findCancelButton().exists()).toBe(false);
+ });
+
+ describe('with toggle changed by the user', () => {
+ beforeEach(() => {
+ findModSecurityToggle().vm.$emit('change');
+ });
+
+ it('renders both save and cancel buttons', () => {
+ expect(findSaveButton().exists()).toBe(true);
+ expect(findCancelButton().exists()).toBe(true);
+ });
+
+ describe('and the save changes button is clicked', () => {
+ beforeEach(() => {
+ findSaveButton().vm.$emit('click');
+ });
+
+ it('triggers save event and pass current modsecurity value', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('updateApplication', {
+ id: INGRESS,
+ params: { modsecurity_enabled: false },
+ });
+ });
+ });
+
+ describe('and the cancel button is clicked', () => {
+ beforeEach(() => {
+ findCancelButton().vm.$emit('click');
+ });
+
+ it('triggers reset event and hides both cancel and save changes button', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('resetIngressModSecurityEnabled', INGRESS);
+ expect(findSaveButton().exists()).toBe(false);
+ expect(findCancelButton().exists()).toBe(false);
+ });
+ });
+ });
+
+ it('triggers set event to be propagated with the current modsecurity value', () => {
+ wrapper.setData({ modSecurityEnabled: true });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('setIngressModSecurityEnabled', {
+ id: INGRESS,
+ modSecurityEnabled: true,
+ });
+ });
+ });
+
+ describe(`when ingress status is ${UPDATING}`, () => {
+ beforeEach(() => {
+ createComponent({ installed: true, status: UPDATING });
+ });
+
+ it('renders loading spinner in save button', () => {
+ expect(findSaveButton().props('loading')).toBe(true);
+ });
+
+ it('renders disabled save button', () => {
+ expect(findSaveButton().props('disabled')).toBe(true);
+ });
+
+ it('renders save button with "Saving" label', () => {
+ expect(findSaveButton().text()).toBe('Saving');
+ });
+ });
+
+ describe('when ingress fails to update', () => {
+ beforeEach(() => {
+ createComponent({ updateFailed: true });
+ });
+
+ it('displays a error message', () => {
+ expect(wrapper.find(GlAlert).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('when ingress is not installed', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('does not render the save button', () => {
+ expect(findSaveButton().exists()).toBe(false);
+ expect(findModSecurityToggle().props('value')).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/clusters/services/mock_data.js b/spec/frontend/clusters/services/mock_data.js
index f0bcf5d980f..52d78ea1176 100644
--- a/spec/frontend/clusters/services/mock_data.js
+++ b/spec/frontend/clusters/services/mock_data.js
@@ -20,6 +20,7 @@ const CLUSTERS_MOCK_DATA = {
external_ip: null,
external_hostname: null,
can_uninstall: false,
+ modsecurity_enabled: false,
},
{
name: 'runner',
diff --git a/spec/frontend/clusters/stores/clusters_store_spec.js b/spec/frontend/clusters/stores/clusters_store_spec.js
index f2dbdd0638b..d3775c6cfba 100644
--- a/spec/frontend/clusters/stores/clusters_store_spec.js
+++ b/spec/frontend/clusters/stores/clusters_store_spec.js
@@ -81,8 +81,10 @@ describe('Clusters Store', () => {
externalIp: null,
externalHostname: null,
installed: false,
+ isEditingModSecurityEnabled: false,
installFailed: true,
uninstallable: false,
+ updateFailed: false,
uninstallSuccessful: false,
uninstallFailed: false,
validationError: null,
diff --git a/spec/frontend/clusters_list/components/clusters_spec.js b/spec/frontend/clusters_list/components/clusters_spec.js
new file mode 100644
index 00000000000..85c86b2c0a9
--- /dev/null
+++ b/spec/frontend/clusters_list/components/clusters_spec.js
@@ -0,0 +1,78 @@
+import Vuex from 'vuex';
+import { createLocalVue, mount } from '@vue/test-utils';
+import { GlTable, GlLoadingIcon } from '@gitlab/ui';
+import Clusters from '~/clusters_list/components/clusters.vue';
+import mockData from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Clusters', () => {
+ let wrapper;
+
+ const findTable = () => wrapper.find(GlTable);
+ const findLoader = () => wrapper.find(GlLoadingIcon);
+ const findStatuses = () => findTable().findAll('.js-status');
+
+ const mountComponent = _state => {
+ const state = { clusters: mockData, endpoint: 'some/endpoint', ..._state };
+ const store = new Vuex.Store({
+ state,
+ });
+
+ wrapper = mount(Clusters, { localVue, store });
+ };
+
+ beforeEach(() => {
+ mountComponent({ loading: false });
+ });
+
+ describe('clusters table', () => {
+ it('displays a loader instead of the table while loading', () => {
+ mountComponent({ loading: true });
+ expect(findLoader().exists()).toBe(true);
+ expect(findTable().exists()).toBe(false);
+ });
+
+ it('displays a table component', () => {
+ expect(findTable().exists()).toBe(true);
+ expect(findTable().exists()).toBe(true);
+ });
+
+ it('renders the correct table headers', () => {
+ const tableHeaders = wrapper.vm.$options.fields;
+ const headers = findTable().findAll('th');
+
+ expect(headers.length).toBe(tableHeaders.length);
+
+ tableHeaders.forEach((headerText, i) =>
+ expect(headers.at(i).text()).toEqual(headerText.label),
+ );
+ });
+
+ it('should stack on smaller devices', () => {
+ expect(findTable().classes()).toContain('b-table-stacked-md');
+ });
+ });
+
+ describe('cluster status', () => {
+ it.each`
+ statusName | className | lineNumber
+ ${'disabled'} | ${'disabled'} | ${0}
+ ${'unreachable'} | ${'bg-danger'} | ${1}
+ ${'authentication_failure'} | ${'bg-warning'} | ${2}
+ ${'deleting'} | ${null} | ${3}
+ ${'connected'} | ${'bg-success'} | ${4}
+ `('renders a status for each cluster', ({ statusName, className, lineNumber }) => {
+ const statuses = findStatuses();
+ const status = statuses.at(lineNumber);
+ if (statusName !== 'deleting') {
+ const statusIndicator = status.find('.cluster-status-indicator');
+ expect(statusIndicator.exists()).toBe(true);
+ expect(statusIndicator.classes()).toContain(className);
+ } else {
+ expect(status.find(GlLoadingIcon).exists()).toBe(true);
+ }
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/mock_data.js b/spec/frontend/clusters_list/mock_data.js
new file mode 100644
index 00000000000..5398975d81c
--- /dev/null
+++ b/spec/frontend/clusters_list/mock_data.js
@@ -0,0 +1,47 @@
+export default [
+ {
+ name: 'My Cluster 1',
+ environmentScope: '*',
+ size: '3',
+ clusterType: 'group_type',
+ status: 'disabled',
+ cpu: '6 (100% free)',
+ memory: '22.50 (30% free)',
+ },
+ {
+ name: 'My Cluster 2',
+ environmentScope: 'development',
+ size: '12',
+ clusterType: 'project_type',
+ status: 'unreachable',
+ cpu: '3 (50% free)',
+ memory: '11 (60% free)',
+ },
+ {
+ name: 'My Cluster 3',
+ environmentScope: 'development',
+ size: '12',
+ clusterType: 'project_type',
+ status: 'authentication_failure',
+ cpu: '1 (0% free)',
+ memory: '22 (33% free)',
+ },
+ {
+ name: 'My Cluster 4',
+ environmentScope: 'production',
+ size: '12',
+ clusterType: 'project_type',
+ status: 'deleting',
+ cpu: '6 (100% free)',
+ memory: '45 (15% free)',
+ },
+ {
+ name: 'My Cluster 5',
+ environmentScope: 'development',
+ size: '12',
+ clusterType: 'project_type',
+ status: 'connected',
+ cpu: '6 (100% free)',
+ memory: '20.12 (35% free)',
+ },
+];
diff --git a/spec/frontend/clusters_list/store/actions_spec.js b/spec/frontend/clusters_list/store/actions_spec.js
new file mode 100644
index 00000000000..e903200bf1d
--- /dev/null
+++ b/spec/frontend/clusters_list/store/actions_spec.js
@@ -0,0 +1,50 @@
+import MockAdapter from 'axios-mock-adapter';
+import flashError from '~/flash';
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
+import * as types from '~/clusters_list/store/mutation_types';
+import * as actions from '~/clusters_list/store/actions';
+
+jest.mock('~/flash.js');
+
+describe('Clusters store actions', () => {
+ describe('fetchClusters', () => {
+ let mock;
+ const endpoint = '/clusters';
+ const clusters = [{ name: 'test' }];
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => mock.restore());
+
+ it('should commit SET_CLUSTERS_DATA with received response', done => {
+ mock.onGet().reply(200, clusters);
+
+ testAction(
+ actions.fetchClusters,
+ { endpoint },
+ {},
+ [
+ { type: types.SET_CLUSTERS_DATA, payload: clusters },
+ { type: types.SET_LOADING_STATE, payload: false },
+ ],
+ [],
+ () => done(),
+ );
+ });
+
+ it('should show flash on API error', done => {
+ mock.onGet().reply(400, 'Not Found');
+
+ testAction(actions.fetchClusters, { endpoint }, {}, [], [], () => {
+ expect(flashError).toHaveBeenCalledWith(expect.stringMatching('error'));
+ done();
+ });
+ });
+ });
+});
+
+// prevent babel-plugin-rewire from generating an invalid default during karma tests
+export default () => {};
diff --git a/spec/frontend/code_navigation/store/actions_spec.js b/spec/frontend/code_navigation/store/actions_spec.js
index 2230e0880bb..9c44480ca67 100644
--- a/spec/frontend/code_navigation/store/actions_spec.js
+++ b/spec/frontend/code_navigation/store/actions_spec.js
@@ -45,18 +45,20 @@ describe('Code navigation actions', () => {
describe('success', () => {
beforeEach(() => {
- mock.onGet(apiUrl).replyOnce(200, [
- {
- start_line: 0,
- start_char: 0,
- hover: { value: '123' },
- },
- {
- start_line: 1,
- start_char: 0,
- hover: null,
- },
- ]);
+ mock.onGet(apiUrl).replyOnce(200, {
+ index: [
+ {
+ start_line: 0,
+ start_char: 0,
+ hover: { value: '123' },
+ },
+ {
+ start_line: 1,
+ start_char: 0,
+ hover: null,
+ },
+ ],
+ });
});
it('commits REQUEST_DATA_SUCCESS with normalized data', done => {
diff --git a/spec/frontend/confirm_modal_spec.js b/spec/frontend/confirm_modal_spec.js
new file mode 100644
index 00000000000..89cfc3ef3a3
--- /dev/null
+++ b/spec/frontend/confirm_modal_spec.js
@@ -0,0 +1,126 @@
+import Vue from 'vue';
+import initConfirmModal from '~/confirm_modal';
+import { TEST_HOST } from 'helpers/test_constants';
+
+describe('ConfirmModal', () => {
+ const buttons = [
+ {
+ path: `${TEST_HOST}/1`,
+ method: 'delete',
+ modalAttributes: {
+ title: 'Remove tracking database entry',
+ message: 'Tracking database entry will be removed. Are you sure?',
+ okVariant: 'danger',
+ okTitle: 'Remove entry',
+ },
+ },
+ {
+ path: `${TEST_HOST}/1`,
+ method: 'post',
+ modalAttributes: {
+ title: 'Update tracking database entry',
+ message: 'Tracking database entry will be updated. Are you sure?',
+ okVariant: 'success',
+ okTitle: 'Update entry',
+ },
+ },
+ ];
+
+ beforeEach(() => {
+ const buttonContainer = document.createElement('div');
+
+ buttons.forEach(x => {
+ const button = document.createElement('button');
+ button.setAttribute('class', 'js-confirm-modal-button');
+ button.setAttribute('data-path', x.path);
+ button.setAttribute('data-method', x.method);
+ button.setAttribute('data-modal-attributes', JSON.stringify(x.modalAttributes));
+ button.innerHTML = 'Action';
+ buttonContainer.appendChild(button);
+ });
+
+ document.body.appendChild(buttonContainer);
+ });
+
+ afterEach(() => {
+ document.body.innerHTML = '';
+ });
+
+ const findJsHooks = () => document.querySelectorAll('.js-confirm-modal-button');
+ const findModal = () => document.querySelector('.gl-modal');
+ const findModalOkButton = (modal, variant) =>
+ modal.querySelector(`.modal-footer .btn-${variant}`);
+ const findModalCancelButton = modal => modal.querySelector('.modal-footer .btn-secondary');
+ const modalIsHidden = () => findModal().getAttribute('aria-hidden') === 'true';
+
+ const serializeModal = (modal, buttonIndex) => {
+ const { modalAttributes } = buttons[buttonIndex];
+
+ return {
+ path: modal.querySelector('form').action,
+ method: modal.querySelector('input[name="_method"]').value,
+ modalAttributes: {
+ title: modal.querySelector('.modal-title').innerHTML,
+ message: modal.querySelector('.modal-body div').innerHTML,
+ okVariant: [...findModalOkButton(modal, modalAttributes.okVariant).classList]
+ .find(x => x.match('btn-'))
+ .replace('btn-', ''),
+ okTitle: findModalOkButton(modal, modalAttributes.okVariant).innerHTML,
+ },
+ };
+ };
+
+ it('starts with only JsHooks', () => {
+ expect(findJsHooks()).toHaveLength(buttons.length);
+ expect(findModal()).not.toExist();
+ });
+
+ describe('when button clicked', () => {
+ beforeEach(() => {
+ initConfirmModal();
+ findJsHooks()
+ .item(0)
+ .click();
+ });
+
+ it('does not replace JsHook with GlModal', () => {
+ expect(findJsHooks()).toHaveLength(buttons.length);
+ });
+
+ describe('GlModal', () => {
+ it('is rendered', () => {
+ expect(findModal()).toExist();
+ expect(modalIsHidden()).toBe(false);
+ });
+
+ describe('Cancel Button', () => {
+ beforeEach(() => {
+ findModalCancelButton(findModal()).click();
+
+ return Vue.nextTick();
+ });
+
+ it('closes the modal', () => {
+ expect(modalIsHidden()).toBe(true);
+ });
+ });
+ });
+ });
+
+ describe.each`
+ index
+ ${0}
+ ${1}
+ `(`when multiple buttons exist`, ({ index }) => {
+ beforeEach(() => {
+ initConfirmModal();
+ findJsHooks()
+ .item(index)
+ .click();
+ });
+
+ it('correct props are passed to gl-modal', () => {
+ expect(serializeModal(findModal(), index)).toEqual(buttons[index]);
+ });
+ });
+});
diff --git a/spec/frontend/contributors/store/getters_spec.js b/spec/frontend/contributors/store/getters_spec.js
index 62ae9b36f87..e6342a669b7 100644
--- a/spec/frontend/contributors/store/getters_spec.js
+++ b/spec/frontend/contributors/store/getters_spec.js
@@ -29,33 +29,39 @@ describe('Contributors Store Getters', () => {
beforeAll(() => {
state.chartData = [
+ { author_name: 'John Smith', author_email: 'jawnnypoo@gmail.com', date: '2019-05-05' },
{ author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-05-05' },
- { author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-05-05' },
- { author_name: 'Carlson', author_email: 'jawnnypoo@gmail.com', date: '2019-03-03' },
- { author_name: 'Carlson', author_email: 'jawnnypoo@gmail.com', date: '2019-05-05' },
- { author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-04-04' },
+ { author_name: 'Carlson', author_email: 'carlson123@gitlab.com', date: '2019-03-03' },
+ { author_name: 'Carlson', author_email: 'carlson123@gmail.com', date: '2019-05-05' },
{ author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-04-04' },
+ { author_name: 'Johan', author_email: 'jawnnypoo@gmail.com', date: '2019-04-04' },
{ author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-03-03' },
];
parsed = getters.parsedData(state);
});
- it('should group contributions by date ', () => {
+ it('should group contributions by date', () => {
expect(parsed.total).toMatchObject({ '2019-05-05': 3, '2019-03-03': 2, '2019-04-04': 2 });
});
- it('should group contributions by author ', () => {
- expect(parsed.byAuthor).toMatchObject({
- Carlson: {
- email: 'jawnnypoo@gmail.com',
- commits: 2,
+ it('should group contributions by email and use most recent name', () => {
+ expect(parsed.byAuthorEmail).toMatchObject({
+ 'carlson123@gmail.com': {
+ name: 'Carlson',
+ commits: 1,
dates: {
- '2019-03-03': 1,
'2019-05-05': 1,
},
},
- John: {
- email: 'jawnnypoo@gmail.com',
+ 'carlson123@gitlab.com': {
+ name: 'Carlson',
+ commits: 1,
+ dates: {
+ '2019-03-03': 1,
+ },
+ },
+ 'jawnnypoo@gmail.com': {
+ name: 'John Smith',
commits: 5,
dates: {
'2019-03-03': 1,
diff --git a/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js b/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
index 292b8694fbc..14f2a527dfb 100644
--- a/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
+++ b/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
@@ -7,22 +7,22 @@ import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue
import DropdownSearchInput from '~/vue_shared/components/dropdown/dropdown_search_input.vue';
describe('ClusterFormDropdown', () => {
- let vm;
+ let wrapper;
const firstItem = { name: 'item 1', value: 1 };
const secondItem = { name: 'item 2', value: 2 };
const items = [firstItem, secondItem, { name: 'item 3', value: 3 }];
beforeEach(() => {
- vm = shallowMount(ClusterFormDropdown);
+ wrapper = shallowMount(ClusterFormDropdown);
});
- afterEach(() => vm.destroy());
+ afterEach(() => wrapper.destroy());
describe('when initial value is provided', () => {
it('sets selectedItem to initial value', () => {
- vm.setProps({ items, value: secondItem.value });
+ wrapper.setProps({ items, value: secondItem.value });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
});
});
});
@@ -31,28 +31,29 @@ describe('ClusterFormDropdown', () => {
it('displays placeholder text', () => {
const placeholder = 'placeholder';
- vm.setProps({ placeholder });
+ wrapper.setProps({ placeholder });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(placeholder);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(placeholder);
});
});
});
describe('when an item is selected', () => {
beforeEach(() => {
- vm.setProps({ items });
+ wrapper.setProps({ items });
- return vm.vm.$nextTick().then(() => {
- vm.findAll('.js-dropdown-item')
+ return wrapper.vm.$nextTick().then(() => {
+ wrapper
+ .findAll('.js-dropdown-item')
.at(1)
.trigger('click');
- return vm.vm.$nextTick();
+ return wrapper.vm.$nextTick();
});
});
it('emits input event with selected item', () => {
- expect(vm.emitted('input')[0]).toEqual([secondItem.value]);
+ expect(wrapper.emitted('input')[0]).toEqual([secondItem.value]);
});
});
@@ -60,37 +61,54 @@ describe('ClusterFormDropdown', () => {
const value = [1];
beforeEach(() => {
- vm.setProps({ items, multiple: true, value });
- return vm.vm
+ wrapper.setProps({ items, multiple: true, value });
+ return wrapper.vm
.$nextTick()
.then(() => {
- vm.findAll('.js-dropdown-item')
+ wrapper
+ .findAll('.js-dropdown-item')
.at(0)
.trigger('click');
- return vm.vm.$nextTick();
+ return wrapper.vm.$nextTick();
})
.then(() => {
- vm.findAll('.js-dropdown-item')
+ wrapper
+ .findAll('.js-dropdown-item')
.at(1)
.trigger('click');
- return vm.vm.$nextTick();
+ return wrapper.vm.$nextTick();
});
});
it('emits input event with an array of selected items', () => {
- expect(vm.emitted('input')[1]).toEqual([[firstItem.value, secondItem.value]]);
+ expect(wrapper.emitted('input')[1]).toEqual([[firstItem.value, secondItem.value]]);
});
});
describe('when multiple items can be selected', () => {
beforeEach(() => {
- vm.setProps({ items, multiple: true, value: firstItem.value });
- return vm.vm.$nextTick();
+ wrapper.setProps({ items, multiple: true, value: firstItem.value });
+ return wrapper.vm.$nextTick();
});
it('displays a checked GlIcon next to the item', () => {
- expect(vm.find(GlIcon).is('.invisible')).toBe(false);
- expect(vm.find(GlIcon).props('name')).toBe('mobile-issue-close');
+ expect(wrapper.find(GlIcon).is('.invisible')).toBe(false);
+ expect(wrapper.find(GlIcon).props('name')).toBe('mobile-issue-close');
+ });
+ });
+
+ describe('when multiple values can be selected and initial value is null', () => {
+ it('emits input event with an array of a single selected item', () => {
+ wrapper.setProps({ items, multiple: true, value: null });
+
+ return wrapper.vm.$nextTick().then(() => {
+ wrapper
+ .findAll('.js-dropdown-item')
+ .at(0)
+ .trigger('click');
+
+ expect(wrapper.emitted('input')[0]).toEqual([[firstItem.value]]);
+ });
});
});
@@ -101,20 +119,20 @@ describe('ClusterFormDropdown', () => {
const currentValue = 1;
const customLabelItems = [{ [labelProperty]: label, value: currentValue }];
- vm.setProps({ labelProperty, items: customLabelItems, value: currentValue });
+ wrapper.setProps({ labelProperty, items: customLabelItems, value: currentValue });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(label);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(label);
});
});
});
describe('when loading', () => {
it('dropdown button isLoading', () => {
- vm.setProps({ loading: true });
+ wrapper.setProps({ loading: true });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('isLoading')).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('isLoading')).toBe(true);
});
});
});
@@ -123,20 +141,20 @@ describe('ClusterFormDropdown', () => {
it('uses loading text as toggle button text', () => {
const loadingText = 'loading text';
- vm.setProps({ loading: true, loadingText });
+ wrapper.setProps({ loading: true, loadingText });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(loadingText);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(loadingText);
});
});
});
describe('when disabled', () => {
it('dropdown button isDisabled', () => {
- vm.setProps({ disabled: true });
+ wrapper.setProps({ disabled: true });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('isDisabled')).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('isDisabled')).toBe(true);
});
});
});
@@ -145,20 +163,20 @@ describe('ClusterFormDropdown', () => {
it('uses disabled text as toggle button text', () => {
const disabledText = 'disabled text';
- vm.setProps({ disabled: true, disabledText });
+ wrapper.setProps({ disabled: true, disabledText });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toBe(disabledText);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toBe(disabledText);
});
});
});
describe('when has errors', () => {
it('sets border-danger class selector to dropdown toggle', () => {
- vm.setProps({ hasErrors: true });
+ wrapper.setProps({ hasErrors: true });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).classes('border-danger')).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).classes('border-danger')).toBe(true);
});
});
});
@@ -167,10 +185,10 @@ describe('ClusterFormDropdown', () => {
it('displays error message', () => {
const errorMessage = 'error message';
- vm.setProps({ hasErrors: true, errorMessage });
+ wrapper.setProps({ hasErrors: true, errorMessage });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find('.js-eks-dropdown-error-message').text()).toEqual(errorMessage);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.js-eks-dropdown-error-message').text()).toEqual(errorMessage);
});
});
});
@@ -179,10 +197,10 @@ describe('ClusterFormDropdown', () => {
it('displays empty text', () => {
const emptyText = 'error message';
- vm.setProps({ items: [], emptyText });
+ wrapper.setProps({ items: [], emptyText });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find('.js-empty-text').text()).toEqual(emptyText);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.js-empty-text').text()).toEqual(emptyText);
});
});
});
@@ -190,34 +208,36 @@ describe('ClusterFormDropdown', () => {
it('displays search field placeholder', () => {
const searchFieldPlaceholder = 'Placeholder';
- vm.setProps({ searchFieldPlaceholder });
+ wrapper.setProps({ searchFieldPlaceholder });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownSearchInput).props('placeholderText')).toEqual(searchFieldPlaceholder);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownSearchInput).props('placeholderText')).toEqual(
+ searchFieldPlaceholder,
+ );
});
});
it('it filters results by search query', () => {
const searchQuery = secondItem.name;
- vm.setProps({ items });
- vm.setData({ searchQuery });
+ wrapper.setProps({ items });
+ wrapper.setData({ searchQuery });
- return vm.vm.$nextTick().then(() => {
- expect(vm.findAll('.js-dropdown-item').length).toEqual(1);
- expect(vm.find('.js-dropdown-item').text()).toEqual(secondItem.name);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll('.js-dropdown-item').length).toEqual(1);
+ expect(wrapper.find('.js-dropdown-item').text()).toEqual(secondItem.name);
});
});
it('focuses dropdown search input when dropdown is displayed', () => {
- const dropdownEl = vm.find('.dropdown').element;
+ const dropdownEl = wrapper.find('.dropdown').element;
- expect(vm.find(DropdownSearchInput).props('focused')).toBe(false);
+ expect(wrapper.find(DropdownSearchInput).props('focused')).toBe(false);
$(dropdownEl).trigger('shown.bs.dropdown');
- return vm.vm.$nextTick(() => {
- expect(vm.find(DropdownSearchInput).props('focused')).toBe(true);
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(DropdownSearchInput).props('focused')).toBe(true);
});
});
});
diff --git a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
index 25034dcf5ad..34d9ee733c4 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
@@ -13,6 +13,7 @@ localVue.use(Vuex);
describe('EksClusterConfigurationForm', () => {
let store;
let actions;
+ let getters;
let state;
let rolesState;
let regionsState;
@@ -29,8 +30,7 @@ describe('EksClusterConfigurationForm', () => {
let securityGroupsActions;
let vm;
- beforeEach(() => {
- state = eksClusterFormState();
+ const createStore = (config = {}) => {
actions = {
createCluster: jest.fn(),
setClusterName: jest.fn(),
@@ -64,29 +64,44 @@ describe('EksClusterConfigurationForm', () => {
securityGroupsActions = {
fetchItems: jest.fn(),
};
+ state = {
+ ...eksClusterFormState(),
+ ...config.initialState,
+ };
rolesState = {
...clusterDropdownStoreState(),
+ ...config.rolesState,
};
regionsState = {
...clusterDropdownStoreState(),
+ ...config.regionsState,
};
vpcsState = {
...clusterDropdownStoreState(),
+ ...config.vpcsState,
};
subnetsState = {
...clusterDropdownStoreState(),
+ ...config.subnetsState,
};
keyPairsState = {
...clusterDropdownStoreState(),
+ ...config.keyPairsState,
};
securityGroupsState = {
...clusterDropdownStoreState(),
+ ...config.securityGroupsState,
};
instanceTypesState = {
...clusterDropdownStoreState(),
+ ...config.instanceTypesState,
+ };
+ getters = {
+ subnetValid: config?.getters?.subnetValid || (() => false),
};
store = new Vuex.Store({
state,
+ getters,
actions,
modules: {
vpcs: {
@@ -125,9 +140,29 @@ describe('EksClusterConfigurationForm', () => {
},
},
});
- });
+ };
- beforeEach(() => {
+ const createValidStateStore = initialState => {
+ createStore({
+ initialState: {
+ clusterName: 'cluster name',
+ environmentScope: '*',
+ selectedRegion: 'region',
+ selectedRole: 'role',
+ selectedKeyPair: 'key pair',
+ selectedVpc: 'vpc',
+ selectedSubnet: ['subnet 1', 'subnet 2'],
+ selectedSecurityGroup: 'group',
+ selectedInstanceType: 'small-1',
+ ...initialState,
+ },
+ getters: {
+ subnetValid: () => true,
+ },
+ });
+ };
+
+ const buildWrapper = () => {
vm = shallowMount(EksClusterConfigurationForm, {
localVue,
store,
@@ -137,27 +172,17 @@ describe('EksClusterConfigurationForm', () => {
externalLinkIcon: '',
},
});
+ };
+
+ beforeEach(() => {
+ createStore();
+ buildWrapper();
});
afterEach(() => {
vm.destroy();
});
- const setAllConfigurationFields = () => {
- store.replaceState({
- ...state,
- clusterName: 'cluster name',
- environmentScope: '*',
- selectedRegion: 'region',
- selectedRole: 'role',
- selectedKeyPair: 'key pair',
- selectedVpc: 'vpc',
- selectedSubnet: 'subnet',
- selectedSecurityGroup: 'group',
- selectedInstanceType: 'small-1',
- });
- };
-
const findCreateClusterButton = () => vm.find('.js-create-cluster');
const findClusterNameInput = () => vm.find('[id=eks-cluster-name]');
const findEnvironmentScopeInput = () => vm.find('[id=eks-environment-scope]');
@@ -310,12 +335,29 @@ describe('EksClusterConfigurationForm', () => {
expect(findSubnetDropdown().props('items')).toBe(subnetsState.items);
});
- it('sets SubnetDropdown hasErrors to true when loading subnets fails', () => {
- subnetsState.loadingItemsError = new Error();
+ it('displays a validation error in the subnet dropdown when loading subnets fails', () => {
+ createStore({
+ subnetsState: {
+ loadingItemsError: new Error(),
+ },
+ });
+ buildWrapper();
- return Vue.nextTick().then(() => {
- expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
+ expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
+ });
+
+ it('displays a validation error in the subnet dropdown when a single subnet is selected', () => {
+ createStore({
+ initialState: {
+ selectedSubnet: ['subnet 1'],
+ },
});
+ buildWrapper();
+
+ expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
+ expect(findSubnetDropdown().props('errorMessage')).toEqual(
+ 'You should select at least two subnets',
+ );
});
it('disables SecurityGroupDropdown when no vpc is selected', () => {
@@ -386,11 +428,7 @@ describe('EksClusterConfigurationForm', () => {
});
it('cleans selected subnet', () => {
- expect(actions.setSubnet).toHaveBeenCalledWith(
- expect.anything(),
- { subnet: null },
- undefined,
- );
+ expect(actions.setSubnet).toHaveBeenCalledWith(expect.anything(), { subnet: [] }, undefined);
});
it('cleans selected security group', () => {
@@ -464,11 +502,7 @@ describe('EksClusterConfigurationForm', () => {
});
it('cleans selected subnet', () => {
- expect(actions.setSubnet).toHaveBeenCalledWith(
- expect.anything(),
- { subnet: null },
- undefined,
- );
+ expect(actions.setSubnet).toHaveBeenCalledWith(expect.anything(), { subnet: [] }, undefined);
});
it('cleans selected security group', () => {
@@ -573,22 +607,19 @@ describe('EksClusterConfigurationForm', () => {
});
describe('when all cluster configuration fields are set', () => {
- beforeEach(() => {
- setAllConfigurationFields();
- });
-
it('enables create cluster button', () => {
+ createValidStateStore();
+ buildWrapper();
expect(findCreateClusterButton().props('disabled')).toBe(false);
});
});
describe('when at least one cluster configuration field is not set', () => {
beforeEach(() => {
- setAllConfigurationFields();
- store.replaceState({
- ...state,
- clusterName: '',
+ createValidStateStore({
+ clusterName: null,
});
+ buildWrapper();
});
it('disables create cluster button', () => {
@@ -596,13 +627,12 @@ describe('EksClusterConfigurationForm', () => {
});
});
- describe('when isCreatingCluster', () => {
+ describe('when is creating cluster', () => {
beforeEach(() => {
- setAllConfigurationFields();
- store.replaceState({
- ...state,
+ createValidStateStore({
isCreatingCluster: true,
});
+ buildWrapper();
});
it('sets create cluster button as loading', () => {
diff --git a/spec/frontend/create_cluster/eks_cluster/store/getters_spec.js b/spec/frontend/create_cluster/eks_cluster/store/getters_spec.js
new file mode 100644
index 00000000000..7c26aeb9b93
--- /dev/null
+++ b/spec/frontend/create_cluster/eks_cluster/store/getters_spec.js
@@ -0,0 +1,13 @@
+import { subnetValid } from '~/create_cluster/eks_cluster/store/getters';
+
+describe('EKS Cluster Store Getters', () => {
+ describe('subnetValid', () => {
+ it('returns true if there are 2 or more selected subnets', () => {
+ expect(subnetValid({ selectedSubnet: [1, 2] })).toBe(true);
+ });
+
+ it.each([[[], [1]]])('returns false if there are 1 or less selected subnets', subnets => {
+ expect(subnetValid({ selectedSubnet: subnets })).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/create_cluster/gke_cluster/components/gke_zone_dropdown_spec.js b/spec/frontend/create_cluster/gke_cluster/components/gke_zone_dropdown_spec.js
new file mode 100644
index 00000000000..c07e3f81964
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/components/gke_zone_dropdown_spec.js
@@ -0,0 +1,101 @@
+import { shallowMount } from '@vue/test-utils';
+import GkeZoneDropdown from '~/create_cluster/gke_cluster/components/gke_zone_dropdown.vue';
+import DropdownHiddenInput from '~/vue_shared/components/dropdown/dropdown_hidden_input.vue';
+import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
+import { createStore } from '~/create_cluster/gke_cluster/store';
+import {
+ SET_PROJECT,
+ SET_ZONES,
+ SET_PROJECT_BILLING_STATUS,
+} from '~/create_cluster/gke_cluster/store/mutation_types';
+import { selectedZoneMock, selectedProjectMock, gapiZonesResponseMock } from '../mock_data';
+
+const propsData = {
+ fieldId: 'cluster_provider_gcp_attributes_gcp_zone',
+ fieldName: 'cluster[provider_gcp_attributes][gcp_zone]',
+};
+
+const LABELS = {
+ LOADING: 'Fetching zones',
+ DISABLED: 'Select project to choose zone',
+ DEFAULT: 'Select zone',
+};
+
+describe('GkeZoneDropdown', () => {
+ let store;
+ let wrapper;
+
+ beforeEach(() => {
+ store = createStore();
+ wrapper = shallowMount(GkeZoneDropdown, { propsData, store });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('toggleText', () => {
+ let dropdownButton;
+
+ beforeEach(() => {
+ dropdownButton = wrapper.find(DropdownButton);
+ });
+
+ it('returns disabled state toggle text', () => {
+ expect(dropdownButton.props('toggleText')).toBe(LABELS.DISABLED);
+ });
+
+ describe('isLoading', () => {
+ beforeEach(() => {
+ wrapper.setData({ isLoading: true });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('returns loading toggle text', () => {
+ expect(dropdownButton.props('toggleText')).toBe(LABELS.LOADING);
+ });
+ });
+
+ describe('project is set', () => {
+ beforeEach(() => {
+ wrapper.vm.$store.commit(SET_PROJECT, selectedProjectMock);
+ wrapper.vm.$store.commit(SET_PROJECT_BILLING_STATUS, true);
+ return wrapper.vm.$nextTick();
+ });
+
+ it('returns default toggle text', () => {
+ expect(dropdownButton.props('toggleText')).toBe(LABELS.DEFAULT);
+ });
+ });
+
+ describe('project is selected', () => {
+ beforeEach(() => {
+ wrapper.vm.setItem(selectedZoneMock);
+ return wrapper.vm.$nextTick();
+ });
+
+ it('returns project name if project selected', () => {
+ expect(dropdownButton.props('toggleText')).toBe(selectedZoneMock);
+ });
+ });
+ });
+
+ describe('selectItem', () => {
+ beforeEach(() => {
+ wrapper.vm.$store.commit(SET_ZONES, gapiZonesResponseMock.items);
+ return wrapper.vm.$nextTick();
+ });
+
+ it('reflects new value when dropdown item is clicked', () => {
+ const dropdown = wrapper.find(DropdownHiddenInput);
+
+ expect(dropdown.attributes('value')).toBe('');
+
+ wrapper.find('.dropdown-content button').trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(dropdown.attributes('value')).toBe(selectedZoneMock);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/create_cluster/gke_cluster/helpers.js b/spec/frontend/create_cluster/gke_cluster/helpers.js
new file mode 100644
index 00000000000..52b43b82698
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/helpers.js
@@ -0,0 +1,64 @@
+import {
+ gapiProjectsResponseMock,
+ gapiZonesResponseMock,
+ gapiMachineTypesResponseMock,
+} from './mock_data';
+
+const cloudbilling = {
+ projects: {
+ getBillingInfo: jest.fn(
+ () =>
+ new Promise(resolve => {
+ resolve({
+ result: { billingEnabled: true },
+ });
+ }),
+ ),
+ },
+};
+
+const cloudresourcemanager = {
+ projects: {
+ list: jest.fn(
+ () =>
+ new Promise(resolve => {
+ resolve({
+ result: { ...gapiProjectsResponseMock },
+ });
+ }),
+ ),
+ },
+};
+
+const compute = {
+ zones: {
+ list: jest.fn(
+ () =>
+ new Promise(resolve => {
+ resolve({
+ result: { ...gapiZonesResponseMock },
+ });
+ }),
+ ),
+ },
+ machineTypes: {
+ list: jest.fn(
+ () =>
+ new Promise(resolve => {
+ resolve({
+ result: { ...gapiMachineTypesResponseMock },
+ });
+ }),
+ ),
+ },
+};
+
+const gapi = {
+ client: {
+ cloudbilling,
+ cloudresourcemanager,
+ compute,
+ },
+};
+
+export { gapi as default };
diff --git a/spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js b/spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js
new file mode 100644
index 00000000000..8c3525207d6
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js
@@ -0,0 +1,140 @@
+import testAction from 'helpers/vuex_action_helper';
+import * as actions from '~/create_cluster/gke_cluster/store/actions';
+import { createStore } from '~/create_cluster/gke_cluster/store';
+import gapi from '../helpers';
+import { selectedProjectMock, selectedZoneMock, selectedMachineTypeMock } from '../mock_data';
+
+describe('GCP Cluster Dropdown Store Actions', () => {
+ let store;
+
+ beforeEach(() => {
+ store = createStore();
+ });
+
+ describe('setProject', () => {
+ it('should set project', done => {
+ testAction(
+ actions.setProject,
+ selectedProjectMock,
+ { selectedProject: {} },
+ [{ type: 'SET_PROJECT', payload: selectedProjectMock }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setZone', () => {
+ it('should set zone', done => {
+ testAction(
+ actions.setZone,
+ selectedZoneMock,
+ { selectedZone: '' },
+ [{ type: 'SET_ZONE', payload: selectedZoneMock }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setMachineType', () => {
+ it('should set machine type', done => {
+ testAction(
+ actions.setMachineType,
+ selectedMachineTypeMock,
+ { selectedMachineType: '' },
+ [{ type: 'SET_MACHINE_TYPE', payload: selectedMachineTypeMock }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setIsValidatingProjectBilling', () => {
+ it('should set machine type', done => {
+ testAction(
+ actions.setIsValidatingProjectBilling,
+ true,
+ { isValidatingProjectBilling: null },
+ [{ type: 'SET_IS_VALIDATING_PROJECT_BILLING', payload: true }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('async fetch methods', () => {
+ let originalGapi;
+
+ beforeAll(() => {
+ originalGapi = window.gapi;
+ window.gapi = gapi;
+ });
+
+ afterAll(() => {
+ window.gapi = originalGapi;
+ });
+
+ describe('fetchProjects', () => {
+ it('fetches projects from Google API', done => {
+ store
+ .dispatch('fetchProjects')
+ .then(() => {
+ expect(store.state.projects[0].projectId).toEqual(selectedProjectMock.projectId);
+ expect(store.state.projects[0].name).toEqual(selectedProjectMock.name);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('validateProjectBilling', () => {
+ it('checks project billing status from Google API', done => {
+ testAction(
+ actions.validateProjectBilling,
+ true,
+ {
+ selectedProject: selectedProjectMock,
+ selectedZone: '',
+ selectedMachineType: '',
+ projectHasBillingEnabled: null,
+ },
+ [
+ { type: 'SET_ZONE', payload: '' },
+ { type: 'SET_MACHINE_TYPE', payload: '' },
+ { type: 'SET_PROJECT_BILLING_STATUS', payload: true },
+ ],
+ [{ type: 'setIsValidatingProjectBilling', payload: false }],
+ done,
+ );
+ });
+ });
+
+ describe('fetchZones', () => {
+ it('fetches zones from Google API', done => {
+ store
+ .dispatch('fetchZones')
+ .then(() => {
+ expect(store.state.zones[0].name).toEqual(selectedZoneMock);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('fetchMachineTypes', () => {
+ it('fetches machine types from Google API', done => {
+ store
+ .dispatch('fetchMachineTypes')
+ .then(() => {
+ expect(store.state.machineTypes[0].name).toEqual(selectedMachineTypeMock);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/create_cluster/gke_cluster/stores/getters_spec.js b/spec/frontend/create_cluster/gke_cluster/stores/getters_spec.js
index 39106c3f6ca..39106c3f6ca 100644
--- a/spec/javascripts/create_cluster/gke_cluster/stores/getters_spec.js
+++ b/spec/frontend/create_cluster/gke_cluster/stores/getters_spec.js
diff --git a/spec/javascripts/create_cluster/gke_cluster/stores/mutations_spec.js b/spec/frontend/create_cluster/gke_cluster/stores/mutations_spec.js
index 7ee6ff436e2..7ee6ff436e2 100644
--- a/spec/javascripts/create_cluster/gke_cluster/stores/mutations_spec.js
+++ b/spec/frontend/create_cluster/gke_cluster/stores/mutations_spec.js
diff --git a/spec/frontend/create_merge_request_dropdown_spec.js b/spec/frontend/create_merge_request_dropdown_spec.js
index 7d26f17ed23..bdf03853597 100644
--- a/spec/frontend/create_merge_request_dropdown_spec.js
+++ b/spec/frontend/create_merge_request_dropdown_spec.js
@@ -15,7 +15,7 @@ describe('CreateMergeRequestDropdown', () => {
<div id="dummy-wrapper-element">
<div class="available"></div>
<div class="unavailable">
- <div class="fa"></div>
+ <div class="spinner"></div>
<div class="text"></div>
</div>
<div class="js-ref"></div>
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
new file mode 100644
index 00000000000..78e3ff4a60c
--- /dev/null
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -0,0 +1,728 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'spec/test_constants';
+import Mousetrap from 'mousetrap';
+import App from '~/diffs/components/app.vue';
+import NoChanges from '~/diffs/components/no_changes.vue';
+import DiffFile from '~/diffs/components/diff_file.vue';
+import CompareVersions from '~/diffs/components/compare_versions.vue';
+import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
+import CommitWidget from '~/diffs/components/commit_widget.vue';
+import TreeList from '~/diffs/components/tree_list.vue';
+import { INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '~/diffs/constants';
+import createDiffsStore from '../create_diffs_store';
+import axios from '~/lib/utils/axios_utils';
+import diffsMockData from '../mock_data/merge_request_diffs';
+
+const mergeRequestDiff = { version_index: 1 };
+const TEST_ENDPOINT = `${TEST_HOST}/diff/endpoint`;
+
+describe('diffs/components/app', () => {
+ const oldMrTabs = window.mrTabs;
+ let store;
+ let wrapper;
+ let mock;
+
+ function createComponent(props = {}, extendStore = () => {}) {
+ const localVue = createLocalVue();
+
+ localVue.use(Vuex);
+
+ store = createDiffsStore();
+ store.state.diffs.isLoading = false;
+
+ extendStore(store);
+
+ wrapper = shallowMount(localVue.extend(App), {
+ localVue,
+ propsData: {
+ endpoint: TEST_ENDPOINT,
+ endpointMetadata: `${TEST_HOST}/diff/endpointMetadata`,
+ endpointBatch: `${TEST_HOST}/diff/endpointBatch`,
+ endpointCoverage: `${TEST_HOST}/diff/endpointCoverage`,
+ projectPath: 'namespace/project',
+ currentUser: {},
+ changesEmptyStateIllustration: '',
+ dismissEndpoint: '',
+ showSuggestPopover: true,
+ ...props,
+ },
+ store,
+ methods: {
+ isLatestVersion() {
+ return true;
+ },
+ },
+ });
+ }
+
+ function getOppositeViewType(currentViewType) {
+ return currentViewType === INLINE_DIFF_VIEW_TYPE
+ ? PARALLEL_DIFF_VIEW_TYPE
+ : INLINE_DIFF_VIEW_TYPE;
+ }
+
+ beforeEach(() => {
+ // setup globals (needed for component to mount :/)
+ window.mrTabs = {
+ resetViewContainer: jest.fn(),
+ };
+ window.mrTabs.expandViewContainer = jest.fn();
+ mock = new MockAdapter(axios);
+ mock.onGet(TEST_ENDPOINT).reply(200, {});
+ });
+
+ afterEach(() => {
+ // reset globals
+ window.mrTabs = oldMrTabs;
+
+ // reset component
+ wrapper.destroy();
+
+ mock.restore();
+ });
+
+ describe('fetch diff methods', () => {
+ beforeEach(done => {
+ const fetchResolver = () => {
+ store.state.diffs.retrievingBatches = false;
+ store.state.notes.discussions = 'test';
+ return Promise.resolve({ real_size: 100 });
+ };
+ jest.spyOn(window, 'requestIdleCallback').mockImplementation(fn => fn());
+ createComponent();
+ jest.spyOn(wrapper.vm, 'fetchDiffFiles').mockImplementation(fetchResolver);
+ jest.spyOn(wrapper.vm, 'fetchDiffFilesMeta').mockImplementation(fetchResolver);
+ jest.spyOn(wrapper.vm, 'fetchDiffFilesBatch').mockImplementation(fetchResolver);
+ jest.spyOn(wrapper.vm, 'fetchCoverageFiles').mockImplementation(fetchResolver);
+ jest.spyOn(wrapper.vm, 'setDiscussions').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm, 'startRenderDiffsQueue').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm, 'unwatchDiscussions').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm, 'unwatchRetrievingBatches').mockImplementation(() => {});
+ store.state.diffs.retrievingBatches = true;
+ store.state.diffs.diffFiles = [];
+ wrapper.vm.$nextTick(done);
+ });
+
+ describe('when the diff view type changes and it should load a single diff view style', () => {
+ const noLinesDiff = {
+ highlighted_diff_lines: [],
+ parallel_diff_lines: [],
+ };
+ const parallelLinesDiff = {
+ highlighted_diff_lines: [],
+ parallel_diff_lines: ['line'],
+ };
+ const inlineLinesDiff = {
+ highlighted_diff_lines: ['line'],
+ parallel_diff_lines: [],
+ };
+ const fullDiff = {
+ highlighted_diff_lines: ['line'],
+ parallel_diff_lines: ['line'],
+ };
+
+ function expectFetchToOccur({
+ vueInstance,
+ done = () => {},
+ batch = false,
+ existingFiles = 1,
+ } = {}) {
+ vueInstance.$nextTick(() => {
+ expect(vueInstance.diffFiles.length).toEqual(existingFiles);
+
+ if (!batch) {
+ expect(vueInstance.fetchDiffFiles).toHaveBeenCalled();
+ expect(vueInstance.fetchDiffFilesBatch).not.toHaveBeenCalled();
+ } else {
+ expect(vueInstance.fetchDiffFiles).not.toHaveBeenCalled();
+ expect(vueInstance.fetchDiffFilesBatch).toHaveBeenCalled();
+ }
+
+ done();
+ });
+ }
+
+ beforeEach(() => {
+ wrapper.vm.glFeatures.singleMrDiffView = true;
+ });
+
+ it('fetches diffs if it has none', done => {
+ wrapper.vm.isLatestVersion = () => false;
+
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, batch: false, existingFiles: 0, done });
+ });
+
+ it('fetches diffs if it has both view styles, but no lines in either', done => {
+ wrapper.vm.isLatestVersion = () => false;
+
+ store.state.diffs.diffFiles.push(noLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, done });
+ });
+
+ it('fetches diffs if it only has inline view style', done => {
+ wrapper.vm.isLatestVersion = () => false;
+
+ store.state.diffs.diffFiles.push(inlineLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, done });
+ });
+
+ it('fetches diffs if it only has parallel view style', done => {
+ wrapper.vm.isLatestVersion = () => false;
+
+ store.state.diffs.diffFiles.push(parallelLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, done });
+ });
+
+ it('fetches batch diffs if it has none', done => {
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, existingFiles: 0, done });
+ });
+
+ it('fetches batch diffs if it has both view styles, but no lines in either', done => {
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+
+ store.state.diffs.diffFiles.push(noLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, done });
+ });
+
+ it('fetches batch diffs if it only has inline view style', done => {
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+
+ store.state.diffs.diffFiles.push(inlineLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, done });
+ });
+
+ it('fetches batch diffs if it only has parallel view style', done => {
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+
+ store.state.diffs.diffFiles.push(parallelLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, done });
+ });
+
+ it('does not fetch diffs if it has already fetched both styles of diff', () => {
+ wrapper.vm.glFeatures.diffsBatchLoad = false;
+
+ store.state.diffs.diffFiles.push(fullDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expect(wrapper.vm.diffFiles.length).toEqual(1);
+ expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
+ });
+
+ it('does not fetch batch diffs if it has already fetched both styles of diff', () => {
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+
+ store.state.diffs.diffFiles.push(fullDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expect(wrapper.vm.diffFiles.length).toEqual(1);
+ expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
+ });
+ });
+
+ it('calls fetchDiffFiles if diffsBatchLoad is not enabled', done => {
+ expect(wrapper.vm.diffFilesLength).toEqual(0);
+ wrapper.vm.glFeatures.diffsBatchLoad = false;
+ wrapper.vm.fetchData(false);
+
+ expect(wrapper.vm.fetchDiffFiles).toHaveBeenCalled();
+ setImmediate(() => {
+ expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesMeta).not.toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
+ expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
+ expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
+ expect(wrapper.vm.diffFilesLength).toEqual(100);
+ expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
+
+ done();
+ });
+ });
+
+ it('calls batch methods if diffsBatchLoad is enabled, and not latest version', done => {
+ expect(wrapper.vm.diffFilesLength).toEqual(0);
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+ wrapper.vm.isLatestVersion = () => false;
+ wrapper.vm.fetchData(false);
+
+ expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
+ setImmediate(() => {
+ expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
+ expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
+ expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
+ expect(wrapper.vm.diffFilesLength).toEqual(100);
+ expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
+ done();
+ });
+ });
+
+ it('calls batch methods if diffsBatchLoad is enabled, and latest version', done => {
+ expect(wrapper.vm.diffFilesLength).toEqual(0);
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+ wrapper.vm.fetchData(false);
+
+ expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
+ setImmediate(() => {
+ expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
+ expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
+ expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
+ expect(wrapper.vm.diffFilesLength).toEqual(100);
+ expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
+ done();
+ });
+ });
+ });
+
+ it('adds container-limiting classes when showFileTree is false with inline diffs', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.showTreeList = false;
+ state.diffs.isParallelView = false;
+ });
+
+ expect(wrapper.contains('.container-limited.limit-container-width')).toBe(true);
+ });
+
+ it('does not add container-limiting classes when showFileTree is false with inline diffs', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.showTreeList = true;
+ state.diffs.isParallelView = false;
+ });
+
+ expect(wrapper.contains('.container-limited.limit-container-width')).toBe(false);
+ });
+
+ it('does not add container-limiting classes when isFluidLayout', () => {
+ createComponent({ isFluidLayout: true }, ({ state }) => {
+ state.diffs.isParallelView = false;
+ });
+
+ expect(wrapper.contains('.container-limited.limit-container-width')).toBe(false);
+ });
+
+ it('displays loading icon on loading', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.isLoading = true;
+ });
+
+ expect(wrapper.contains(GlLoadingIcon)).toBe(true);
+ });
+
+ it('displays loading icon on batch loading', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.isBatchLoading = true;
+ });
+
+ expect(wrapper.contains(GlLoadingIcon)).toBe(true);
+ });
+
+ it('displays diffs container when not loading', () => {
+ createComponent();
+
+ expect(wrapper.contains(GlLoadingIcon)).toBe(false);
+ expect(wrapper.contains('#diffs')).toBe(true);
+ });
+
+ it('does not show commit info', () => {
+ createComponent();
+
+ expect(wrapper.contains('.blob-commit-info')).toBe(false);
+ });
+
+ describe('row highlighting', () => {
+ beforeEach(() => {
+ window.location.hash = 'ABC_123';
+ });
+
+ it('sets highlighted row if hash exists in location object', done => {
+ createComponent({
+ shouldShow: true,
+ });
+
+ // Component uses $nextTick so we wait until that has finished
+ setImmediate(() => {
+ expect(store.state.diffs.highlightedRow).toBe('ABC_123');
+
+ done();
+ });
+ });
+
+ it('marks current diff file based on currently highlighted row', () => {
+ createComponent({
+ shouldShow: true,
+ });
+
+ // Component uses $nextTick so we wait until that has finished
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.state.diffs.currentDiffFileId).toBe('ABC');
+ });
+ });
+ });
+
+ describe('resizable', () => {
+ afterEach(() => {
+ localStorage.removeItem('mr_tree_list_width');
+ });
+
+ it('sets initial width when no localStorage has been set', () => {
+ createComponent();
+
+ expect(wrapper.vm.treeWidth).toEqual(320);
+ });
+
+ it('sets initial width to localStorage size', () => {
+ localStorage.setItem('mr_tree_list_width', '200');
+
+ createComponent();
+
+ expect(wrapper.vm.treeWidth).toEqual(200);
+ });
+
+ it('sets width of tree list', () => {
+ createComponent();
+
+ expect(wrapper.find('.js-diff-tree-list').element.style.width).toEqual('320px');
+ });
+ });
+
+ it('marks current diff file based on currently highlighted row', done => {
+ createComponent({
+ shouldShow: true,
+ });
+
+ // Component uses $nextTick so we wait until that has finished
+ setImmediate(() => {
+ expect(store.state.diffs.currentDiffFileId).toBe('ABC');
+
+ done();
+ });
+ });
+
+ describe('empty state', () => {
+ it('renders empty state when no diff files exist', () => {
+ createComponent();
+
+ expect(wrapper.contains(NoChanges)).toBe(true);
+ });
+
+ it('does not render empty state when diff files exist', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles.push({
+ id: 1,
+ });
+ });
+
+ expect(wrapper.contains(NoChanges)).toBe(false);
+ expect(wrapper.findAll(DiffFile).length).toBe(1);
+ });
+
+ it('does not render empty state when versions match', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.startVersion = mergeRequestDiff;
+ state.diffs.mergeRequestDiff = mergeRequestDiff;
+ });
+
+ expect(wrapper.contains(NoChanges)).toBe(false);
+ });
+ });
+
+ describe('keyboard shortcut navigation', () => {
+ const mappings = {
+ '[': -1,
+ k: -1,
+ ']': +1,
+ j: +1,
+ };
+ let spy;
+
+ describe('visible app', () => {
+ beforeEach(() => {
+ spy = jest.fn();
+
+ createComponent({
+ shouldShow: true,
+ });
+ wrapper.setMethods({
+ jumpToFile: spy,
+ });
+ });
+
+ it.each(Object.keys(mappings))(
+ 'calls `jumpToFile()` with correct parameter whenever pre-defined %s is pressed',
+ key => {
+ return wrapper.vm.$nextTick().then(() => {
+ expect(spy).not.toHaveBeenCalled();
+
+ Mousetrap.trigger(key);
+
+ expect(spy).toHaveBeenCalledWith(mappings[key]);
+ });
+ },
+ );
+
+ it('does not call `jumpToFile()` when unknown key is pressed', done => {
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ Mousetrap.trigger('d');
+
+ expect(spy).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('hideen app', () => {
+ beforeEach(() => {
+ spy = jest.fn();
+
+ createComponent({
+ shouldShow: false,
+ });
+ wrapper.setMethods({
+ jumpToFile: spy,
+ });
+ });
+
+ it('stops calling `jumpToFile()` when application is hidden', done => {
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ Object.keys(mappings).forEach(key => {
+ Mousetrap.trigger(key);
+
+ expect(spy).not.toHaveBeenCalled();
+ });
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+ });
+
+ describe('jumpToFile', () => {
+ let spy;
+
+ beforeEach(() => {
+ spy = jest.fn();
+
+ createComponent({}, () => {
+ store.state.diffs.diffFiles = [
+ { file_hash: '111', file_path: '111.js' },
+ { file_hash: '222', file_path: '222.js' },
+ { file_hash: '333', file_path: '333.js' },
+ ];
+ });
+
+ wrapper.setMethods({
+ scrollToFile: spy,
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('jumps to next and previous files in the list', done => {
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ wrapper.vm.jumpToFile(+1);
+
+ expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['222.js']);
+ store.state.diffs.currentDiffFileId = '222';
+ wrapper.vm.jumpToFile(+1);
+
+ expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['333.js']);
+ store.state.diffs.currentDiffFileId = '333';
+ wrapper.vm.jumpToFile(-1);
+
+ expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['222.js']);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('does not jump to previous file from the first one', done => {
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ store.state.diffs.currentDiffFileId = '333';
+
+ expect(wrapper.vm.currentDiffIndex).toEqual(2);
+
+ wrapper.vm.jumpToFile(+1);
+
+ expect(wrapper.vm.currentDiffIndex).toEqual(2);
+ expect(spy).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('does not jump to next file from the last one', done => {
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.vm.currentDiffIndex).toEqual(0);
+
+ wrapper.vm.jumpToFile(-1);
+
+ expect(wrapper.vm.currentDiffIndex).toEqual(0);
+ expect(spy).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('diffs', () => {
+ it('should render compare versions component', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.mergeRequestDiffs = diffsMockData;
+ state.diffs.targetBranchName = 'target-branch';
+ state.diffs.mergeRequestDiff = mergeRequestDiff;
+ });
+
+ expect(wrapper.contains(CompareVersions)).toBe(true);
+ expect(wrapper.find(CompareVersions).props()).toEqual(
+ expect.objectContaining({
+ targetBranch: {
+ branchName: 'target-branch',
+ versionIndex: -1,
+ path: '',
+ },
+ mergeRequestDiffs: diffsMockData,
+ mergeRequestDiff,
+ }),
+ );
+ });
+
+ it('should render hidden files warning if render overflow warning is present', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.renderOverflowWarning = true;
+ state.diffs.realSize = '5';
+ state.diffs.plainDiffPath = 'plain diff path';
+ state.diffs.emailPatchPath = 'email patch path';
+ state.diffs.size = 1;
+ });
+
+ expect(wrapper.contains(HiddenFilesWarning)).toBe(true);
+ expect(wrapper.find(HiddenFilesWarning).props()).toEqual(
+ expect.objectContaining({
+ total: '5',
+ plainDiffPath: 'plain diff path',
+ emailPatchPath: 'email patch path',
+ visible: 1,
+ }),
+ );
+ });
+
+ it('should display commit widget if store has a commit', () => {
+ createComponent({}, () => {
+ store.state.diffs.commit = {
+ author: 'John Doe',
+ };
+ });
+
+ expect(wrapper.contains(CommitWidget)).toBe(true);
+ });
+
+ it('should display diff file if there are diff files', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles.push({ sha: '123' });
+ });
+
+ expect(wrapper.contains(DiffFile)).toBe(true);
+ });
+
+ it('should render tree list', () => {
+ createComponent();
+
+ expect(wrapper.find(TreeList).exists()).toBe(true);
+ });
+ });
+
+ describe('hideTreeListIfJustOneFile', () => {
+ let toggleShowTreeList;
+
+ beforeEach(() => {
+ toggleShowTreeList = jest.fn();
+ });
+
+ afterEach(() => {
+ localStorage.removeItem('mr_tree_show');
+ });
+
+ it('calls toggleShowTreeList when only 1 file', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles.push({ sha: '123' });
+ });
+
+ wrapper.setMethods({
+ toggleShowTreeList,
+ });
+
+ wrapper.vm.hideTreeListIfJustOneFile();
+
+ expect(toggleShowTreeList).toHaveBeenCalledWith(false);
+ });
+
+ it('does not call toggleShowTreeList when more than 1 file', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles.push({ sha: '123' });
+ state.diffs.diffFiles.push({ sha: '124' });
+ });
+
+ wrapper.setMethods({
+ toggleShowTreeList,
+ });
+
+ wrapper.vm.hideTreeListIfJustOneFile();
+
+ expect(toggleShowTreeList).not.toHaveBeenCalled();
+ });
+
+ it('does not call toggleShowTreeList when localStorage is set', () => {
+ localStorage.setItem('mr_tree_show', 'true');
+
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles.push({ sha: '123' });
+ });
+
+ wrapper.setMethods({
+ toggleShowTreeList,
+ });
+
+ wrapper.vm.hideTreeListIfJustOneFile();
+
+ expect(toggleShowTreeList).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/diffs/components/commit_item_spec.js b/spec/frontend/diffs/components/commit_item_spec.js
new file mode 100644
index 00000000000..61bab77964e
--- /dev/null
+++ b/spec/frontend/diffs/components/commit_item_spec.js
@@ -0,0 +1,181 @@
+import { mount } from '@vue/test-utils';
+import { TEST_HOST } from 'helpers/test_constants';
+import { trimText } from 'helpers/text_helper';
+import { getTimeago } from '~/lib/utils/datetime_utility';
+import Component from '~/diffs/components/commit_item.vue';
+import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
+import getDiffWithCommit from '../mock_data/diff_with_commit';
+
+jest.mock('~/user_popovers');
+
+const TEST_AUTHOR_NAME = 'test';
+const TEST_AUTHOR_EMAIL = 'test+test@gitlab.com';
+const TEST_AUTHOR_GRAVATAR = `${TEST_HOST}/avatar/test?s=40`;
+const TEST_SIGNATURE_HTML = '<a>Legit commit</a>';
+const TEST_PIPELINE_STATUS_PATH = `${TEST_HOST}/pipeline/status`;
+
+describe('diffs/components/commit_item', () => {
+ let wrapper;
+
+ const timeago = getTimeago();
+ const { commit } = getDiffWithCommit();
+
+ const getTitleElement = () => wrapper.find('.commit-row-message.item-title');
+ const getDescElement = () => wrapper.find('pre.commit-row-description');
+ const getDescExpandElement = () =>
+ wrapper.find('.commit-content .text-expander.js-toggle-button');
+ const getShaElement = () => wrapper.find('.commit-sha-group');
+ const getAvatarElement = () => wrapper.find('.user-avatar-link');
+ const getCommitterElement = () => wrapper.find('.committer');
+ const getCommitActionsElement = () => wrapper.find('.commit-actions');
+ const getCommitPipelineStatus = () => wrapper.find(CommitPipelineStatus);
+
+ const defaultProps = {
+ commit: getDiffWithCommit().commit,
+ };
+ const mountComponent = (propsData = defaultProps) => {
+ wrapper = mount(Component, {
+ propsData,
+ stubs: {
+ CommitPipelineStatus: true,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('default state', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('renders commit title', () => {
+ const titleElement = getTitleElement();
+
+ expect(titleElement.attributes('href')).toBe(commit.commit_url);
+ expect(titleElement.text()).toBe(commit.title_html);
+ });
+
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/209776
+ // eslint-disable-next-line jest/no-disabled-tests
+ it.skip('renders commit description', () => {
+ const descElement = getDescElement();
+ const descExpandElement = getDescExpandElement();
+
+ const expected = commit.description_html.replace(/&#x000A;/g, '');
+
+ expect(trimText(descElement.text())).toEqual(trimText(expected));
+ expect(descExpandElement.exists()).toBe(true);
+ });
+
+ it('renders commit sha', () => {
+ const shaElement = getShaElement();
+ const labelElement = shaElement.find('.label');
+ const buttonElement = shaElement.find('button');
+
+ expect(labelElement.text()).toEqual(commit.short_id);
+ expect(buttonElement.props('text')).toBe(commit.id);
+ });
+
+ it('renders author avatar', () => {
+ const avatarElement = getAvatarElement();
+ const imgElement = avatarElement.find('img');
+
+ expect(avatarElement.attributes('href')).toBe(commit.author.web_url);
+ expect(imgElement.classes()).toContain('s40');
+ expect(imgElement.attributes('alt')).toBe(commit.author.name);
+ expect(imgElement.attributes('src')).toBe(commit.author.avatar_url);
+ });
+
+ it('renders committer text', () => {
+ const committerElement = getCommitterElement();
+ const nameElement = committerElement.find('a');
+
+ const expectTimeText = timeago.format(commit.authored_date);
+ const expectedText = `${commit.author.name} authored ${expectTimeText}`;
+
+ expect(trimText(committerElement.text())).toEqual(expectedText);
+ expect(nameElement.attributes('href')).toBe(commit.author.web_url);
+ expect(nameElement.text()).toBe(commit.author.name);
+ expect(nameElement.classes()).toContain('js-user-link');
+ expect(nameElement.attributes('data-user-id')).toEqual(commit.author.id.toString());
+ });
+ });
+
+ describe('without commit description', () => {
+ beforeEach(() => {
+ mountComponent({ defaultProps, commit: { ...defaultProps.commit, description_html: '' } });
+ });
+
+ it('hides description', () => {
+ const descElement = getDescElement();
+ const descExpandElement = getDescExpandElement();
+
+ expect(descElement.exists()).toBeFalsy();
+ expect(descExpandElement.exists()).toBeFalsy();
+ });
+ });
+
+ describe('with no matching user', () => {
+ beforeEach(() => {
+ mountComponent({
+ defaultProps,
+ commit: {
+ ...defaultProps.commit,
+ author: null,
+ author_email: TEST_AUTHOR_EMAIL,
+ author_name: TEST_AUTHOR_NAME,
+ author_gravatar_url: TEST_AUTHOR_GRAVATAR,
+ },
+ });
+ });
+
+ it('renders author avatar', () => {
+ const avatarElement = getAvatarElement();
+ const imgElement = avatarElement.find('img');
+
+ expect(avatarElement.attributes('href')).toBe(`mailto:${TEST_AUTHOR_EMAIL}`);
+ expect(imgElement.attributes('alt')).toBe(TEST_AUTHOR_NAME);
+ expect(imgElement.attributes('src')).toBe(TEST_AUTHOR_GRAVATAR);
+ });
+
+ it('renders committer text', () => {
+ const committerElement = getCommitterElement();
+ const nameElement = committerElement.find('a');
+
+ expect(nameElement.attributes('href')).toBe(`mailto:${TEST_AUTHOR_EMAIL}`);
+ expect(nameElement.text()).toBe(TEST_AUTHOR_NAME);
+ });
+ });
+
+ describe('with signature', () => {
+ beforeEach(() => {
+ mountComponent({
+ defaultProps,
+ commit: { ...defaultProps.commit, signature_html: TEST_SIGNATURE_HTML },
+ });
+ });
+
+ it('renders signature html', () => {
+ const actionsElement = getCommitActionsElement();
+
+ expect(actionsElement.html()).toContain(TEST_SIGNATURE_HTML);
+ });
+ });
+
+ describe('with pipeline status', () => {
+ beforeEach(() => {
+ mountComponent({
+ defaultProps,
+ commit: { ...defaultProps.commit, pipeline_status_path: TEST_PIPELINE_STATUS_PATH },
+ });
+ });
+
+ it('renders pipeline status', () => {
+ expect(getCommitPipelineStatus().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/diffs/components/compare_versions_dropdown_spec.js b/spec/frontend/diffs/components/compare_versions_dropdown_spec.js
new file mode 100644
index 00000000000..5033bdd9044
--- /dev/null
+++ b/spec/frontend/diffs/components/compare_versions_dropdown_spec.js
@@ -0,0 +1,179 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import CompareVersionsDropdown from '~/diffs/components/compare_versions_dropdown.vue';
+import diffsMockData from '../mock_data/merge_request_diffs';
+import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
+import { TEST_HOST } from 'helpers/test_constants';
+
+const localVue = createLocalVue();
+const targetBranch = { branchName: 'tmp-wine-dev', versionIndex: -1 };
+const startVersion = { version_index: 4 };
+const mergeRequestVersion = {
+ version_path: '123',
+};
+const baseVersionPath = '/gnuwget/wget2/-/merge_requests/6/diffs?diff_id=37';
+
+describe('CompareVersionsDropdown', () => {
+ let wrapper;
+
+ const findSelectedVersion = () => wrapper.find('.dropdown-menu-toggle');
+ const findVersionsListElements = () => wrapper.findAll('li');
+ const findLinkElement = index =>
+ findVersionsListElements()
+ .at(index)
+ .find('a');
+ const findLastLink = () => findLinkElement(findVersionsListElements().length - 1);
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(localVue.extend(CompareVersionsDropdown), {
+ localVue,
+ propsData: { ...props },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('selected version name', () => {
+ it('shows latest version when latest is selected', () => {
+ createComponent({
+ mergeRequestVersion,
+ startVersion,
+ otherVersions: diffsMockData,
+ });
+
+ expect(findSelectedVersion().text()).toBe('latest version');
+ });
+
+ it('shows target branch name for base branch', () => {
+ createComponent({
+ targetBranch,
+ });
+
+ expect(findSelectedVersion().text()).toBe('tmp-wine-dev');
+ });
+
+ it('shows correct version for non-base and non-latest branches', () => {
+ createComponent({
+ startVersion,
+ targetBranch,
+ });
+
+ expect(findSelectedVersion().text()).toBe(`version ${startVersion.version_index}`);
+ });
+ });
+
+ describe('target versions list', () => {
+ it('should have the same length as otherVersions if merge request version is present', () => {
+ createComponent({
+ mergeRequestVersion,
+ otherVersions: diffsMockData,
+ });
+
+ expect(findVersionsListElements().length).toEqual(diffsMockData.length);
+ });
+
+ it('should have an otherVersions length plus 1 if no merge request version is present', () => {
+ createComponent({
+ targetBranch,
+ otherVersions: diffsMockData,
+ });
+
+ expect(findVersionsListElements().length).toEqual(diffsMockData.length + 1);
+ });
+
+ it('should have base branch link as active on base branch', () => {
+ createComponent({
+ targetBranch,
+ otherVersions: diffsMockData,
+ });
+
+ expect(findLastLink().classes()).toContain('is-active');
+ });
+
+ it('should have correct branch link as active if start version present', () => {
+ createComponent({
+ targetBranch,
+ startVersion,
+ otherVersions: diffsMockData,
+ });
+
+ expect(findLinkElement(0).classes()).toContain('is-active');
+ });
+
+ it('should render a correct base version link', () => {
+ createComponent({
+ baseVersionPath,
+ otherVersions: diffsMockData.slice(1),
+ targetBranch,
+ });
+
+ expect(findLastLink().attributes('href')).toEqual(baseVersionPath);
+ expect(findLastLink().text()).toContain('(base)');
+ expect(findLastLink().text()).not.toContain('(HEAD)');
+ });
+
+ it('should render a correct head version link', () => {
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: { href: `${TEST_HOST}?diff_head=true` },
+ });
+
+ createComponent({
+ baseVersionPath,
+ otherVersions: diffsMockData.slice(1),
+ targetBranch,
+ });
+
+ expect(findLastLink().attributes('href')).toEqual(baseVersionPath);
+ expect(findLastLink().text()).not.toContain('(base)');
+ expect(findLastLink().text()).toContain('(HEAD)');
+ });
+
+ it('should not render commits count if no showCommitsCount is passed', () => {
+ createComponent({
+ otherVersions: diffsMockData,
+ targetBranch,
+ });
+
+ const commitsCount = diffsMockData[0].commits_count;
+
+ expect(findLinkElement(0).text()).not.toContain(`${commitsCount} commit`);
+ });
+
+ it('should render correct commits count if showCommitsCount is passed', () => {
+ createComponent({
+ otherVersions: diffsMockData,
+ targetBranch,
+ showCommitCount: true,
+ });
+
+ const commitsCount = diffsMockData[0].commits_count;
+
+ expect(findLinkElement(0).text()).toContain(`${commitsCount} commit`);
+ });
+
+ it('should render correct commit sha', () => {
+ createComponent({
+ otherVersions: diffsMockData,
+ targetBranch,
+ });
+
+ const commitShaElement = findLinkElement(0).find('.commit-sha');
+
+ expect(commitShaElement.text()).toBe(diffsMockData[0].short_commit_sha);
+ });
+
+ it('should render correct time-ago ', () => {
+ createComponent({
+ otherVersions: diffsMockData,
+ targetBranch,
+ });
+
+ const timeAgoElement = findLinkElement(0).find(TimeAgo);
+
+ expect(timeAgoElement.exists()).toBe(true);
+ expect(timeAgoElement.props('time')).toBe(diffsMockData[0].created_at);
+ });
+ });
+});
diff --git a/spec/frontend/diffs/components/diff_file_row_spec.js b/spec/frontend/diffs/components/diff_file_row_spec.js
index 9b7a16d0cb5..856622b89cb 100644
--- a/spec/frontend/diffs/components/diff_file_row_spec.js
+++ b/spec/frontend/diffs/components/diff_file_row_spec.js
@@ -44,12 +44,14 @@ describe('Diff File Row component', () => {
level: 4,
file: {},
hideFileStats: false,
+ showTooltip: true,
});
expect(wrapper.find(ChangedFileIcon).props()).toEqual(
expect.objectContaining({
file: {},
size: 16,
+ showTooltip: true,
}),
);
});
diff --git a/spec/frontend/diffs/components/tree_list_spec.js b/spec/frontend/diffs/components/tree_list_spec.js
new file mode 100644
index 00000000000..f78c5f25ee7
--- /dev/null
+++ b/spec/frontend/diffs/components/tree_list_spec.js
@@ -0,0 +1,138 @@
+import Vuex from 'vuex';
+import { mount, createLocalVue } from '@vue/test-utils';
+import TreeList from '~/diffs/components/tree_list.vue';
+import createStore from '~/diffs/store/modules';
+
+describe('Diffs tree list component', () => {
+ let wrapper;
+ const getFileRows = () => wrapper.findAll('.file-row');
+ const localVue = createLocalVue();
+ localVue.use(Vuex);
+
+ const createComponent = state => {
+ const store = new Vuex.Store({
+ modules: {
+ diffs: createStore(),
+ },
+ });
+
+ // Setup initial state
+ store.state.diffs.diffFiles.push('test');
+ store.state.diffs = {
+ addedLines: 10,
+ removedLines: 20,
+ ...store.state.diffs,
+ ...state,
+ };
+
+ wrapper = mount(TreeList, {
+ store,
+ localVue,
+ propsData: { hideFileStats: false },
+ });
+ };
+
+ beforeEach(() => {
+ localStorage.removeItem('mr_diff_tree_list');
+
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders empty text', () => {
+ expect(wrapper.text()).toContain('No files found');
+ });
+
+ describe('with files', () => {
+ beforeEach(() => {
+ const treeEntries = {
+ 'index.js': {
+ addedLines: 0,
+ changed: true,
+ deleted: false,
+ fileHash: 'test',
+ key: 'index.js',
+ name: 'index.js',
+ path: 'app/index.js',
+ removedLines: 0,
+ tempFile: true,
+ type: 'blob',
+ parentPath: 'app',
+ },
+ app: {
+ key: 'app',
+ path: 'app',
+ name: 'app',
+ type: 'tree',
+ tree: [],
+ },
+ };
+
+ createComponent({
+ treeEntries,
+ tree: [treeEntries['index.js'], treeEntries.app],
+ });
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('renders tree', () => {
+ expect(getFileRows()).toHaveLength(2);
+ expect(
+ getFileRows()
+ .at(0)
+ .text(),
+ ).toContain('index.js');
+ expect(
+ getFileRows()
+ .at(1)
+ .text(),
+ ).toContain('app');
+ });
+
+ it('hides file stats', () => {
+ wrapper.setProps({ hideFileStats: true });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.file-row-stats').exists()).toBe(false);
+ });
+ });
+
+ it('calls toggleTreeOpen when clicking folder', () => {
+ jest.spyOn(wrapper.vm.$store, 'dispatch').mockReturnValue(undefined);
+
+ getFileRows()
+ .at(1)
+ .trigger('click');
+
+ expect(wrapper.vm.$store.dispatch).toHaveBeenCalledWith('diffs/toggleTreeOpen', 'app');
+ });
+
+ it('calls scrollToFile when clicking blob', () => {
+ jest.spyOn(wrapper.vm.$store, 'dispatch').mockReturnValue(undefined);
+
+ wrapper.find('.file-row').trigger('click');
+
+ expect(wrapper.vm.$store.dispatch).toHaveBeenCalledWith('diffs/scrollToFile', 'app/index.js');
+ });
+
+ it('renders as file list when renderTreeList is false', () => {
+ wrapper.vm.$store.state.diffs.renderTreeList = false;
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(getFileRows()).toHaveLength(1);
+ });
+ });
+
+ it('renders file paths when renderTreeList is false', () => {
+ wrapper.vm.$store.state.diffs.renderTreeList = false;
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.file-row').text()).toContain('index.js');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/diffs/create_diffs_store.js b/spec/frontend/diffs/create_diffs_store.js
new file mode 100644
index 00000000000..aacde99964c
--- /dev/null
+++ b/spec/frontend/diffs/create_diffs_store.js
@@ -0,0 +1,15 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import diffsModule from '~/diffs/store/modules';
+import notesModule from '~/notes/stores/modules';
+
+Vue.use(Vuex);
+
+export default function createDiffsStore() {
+ return new Vuex.Store({
+ modules: {
+ diffs: diffsModule(),
+ notes: notesModule(),
+ },
+ });
+}
diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js
new file mode 100644
index 00000000000..f3d2bd2462e
--- /dev/null
+++ b/spec/frontend/environments/environments_app_spec.js
@@ -0,0 +1,168 @@
+import { mount, shallowMount } from '@vue/test-utils';
+import axios from '~/lib/utils/axios_utils';
+import MockAdapter from 'axios-mock-adapter';
+import Container from '~/environments/components/container.vue';
+import EmptyState from '~/environments/components/empty_state.vue';
+import EnvironmentsApp from '~/environments/components/environments_app.vue';
+import { environment, folder } from './mock_data';
+
+describe('Environment', () => {
+ let mock;
+ let wrapper;
+
+ const mockData = {
+ endpoint: 'environments.json',
+ canCreateEnvironment: true,
+ canReadEnvironment: true,
+ newEnvironmentPath: 'environments/new',
+ helpPagePath: 'help',
+ canaryDeploymentFeatureId: 'canary_deployment',
+ showCanaryDeploymentCallout: true,
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
+ };
+
+ const mockRequest = (response, body) => {
+ mock.onGet(mockData.endpoint).reply(response, body, {
+ 'X-nExt-pAge': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '1',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '37',
+ 'X-Total-Pages': '2',
+ });
+ };
+
+ const createWrapper = (shallow = false) => {
+ const fn = shallow ? shallowMount : mount;
+ wrapper = fn(EnvironmentsApp, { propsData: mockData });
+ return axios.waitForAll();
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mock.restore();
+ });
+
+ describe('successful request', () => {
+ describe('without environments', () => {
+ beforeEach(() => {
+ mockRequest(200, { environments: [] });
+ return createWrapper(true);
+ });
+
+ it('should render the empty state', () => {
+ expect(wrapper.find(EmptyState).exists()).toBe(true);
+ });
+
+ describe('when it is possible to enable a review app', () => {
+ beforeEach(() => {
+ mockRequest(200, { environments: [], review_app: { can_setup_review_app: true } });
+ return createWrapper();
+ });
+
+ it('should render the enable review app button', () => {
+ expect(wrapper.find('.js-enable-review-app-button').text()).toContain(
+ 'Enable review app',
+ );
+ });
+ });
+ });
+
+ describe('with paginated environments', () => {
+ const environmentList = [environment];
+
+ beforeEach(() => {
+ mockRequest(200, {
+ environments: environmentList,
+ stopped_count: 1,
+ available_count: 0,
+ });
+ return createWrapper();
+ });
+
+ it('should render a conatiner table with environments', () => {
+ const containerTable = wrapper.find(Container);
+
+ expect(containerTable.exists()).toBe(true);
+ expect(containerTable.props('environments').length).toEqual(environmentList.length);
+ expect(containerTable.find('.environment-name').text()).toEqual(environmentList[0].name);
+ });
+
+ describe('pagination', () => {
+ it('should render pagination', () => {
+ expect(wrapper.findAll('.gl-pagination li').length).toEqual(9);
+ });
+
+ it('should make an API request when page is clicked', () => {
+ jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
+
+ wrapper.find('.gl-pagination li:nth-child(3) .page-link').trigger('click');
+ expect(wrapper.vm.updateContent).toHaveBeenCalledWith({ scope: 'available', page: '2' });
+ });
+
+ it('should make an API request when using tabs', () => {
+ jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
+ wrapper.find('.js-environments-tab-stopped').trigger('click');
+ expect(wrapper.vm.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
+ });
+ });
+ });
+ });
+
+ describe('unsuccessful request', () => {
+ beforeEach(() => {
+ mockRequest(500, {});
+ return createWrapper(true);
+ });
+
+ it('should render empty state', () => {
+ expect(wrapper.find(EmptyState).exists()).toBe(true);
+ });
+ });
+
+ describe('expandable folders', () => {
+ beforeEach(() => {
+ mockRequest(200, {
+ environments: [folder],
+ stopped_count: 1,
+ available_count: 0,
+ });
+
+ mock.onGet(environment.folder_path).reply(200, { environments: [environment] });
+
+ return createWrapper().then(() => {
+ // open folder
+ wrapper.find('.folder-name').trigger('click');
+ return axios.waitForAll();
+ });
+ });
+
+ it('should open a closed folder', () => {
+ expect(wrapper.find('.folder-icon.ic-chevron-right').exists()).toBe(false);
+ });
+
+ it('should close an opened folder', () => {
+ expect(wrapper.find('.folder-icon.ic-chevron-down').exists()).toBe(true);
+
+ // close folder
+ wrapper.find('.folder-name').trigger('click');
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.folder-icon.ic-chevron-down').exists()).toBe(false);
+ });
+ });
+
+ it('should show children environments', () => {
+ expect(wrapper.findAll('.js-child-row').length).toEqual(1);
+ });
+
+ it('should show a button to show all environments', () => {
+ expect(wrapper.find('.text-center > a.btn').text()).toContain('Show all');
+ });
+ });
+});
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index ca3431920fe..72b0466a1f0 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -1,8 +1,16 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
import { __ } from '~/locale';
-import { GlLoadingIcon, GlLink, GlBadge, GlFormInput, GlAlert, GlSprintf } from '@gitlab/ui';
-import LoadingButton from '~/vue_shared/components/loading_button.vue';
+import createFlash from '~/flash';
+import {
+ GlButton,
+ GlLoadingIcon,
+ GlLink,
+ GlBadge,
+ GlFormInput,
+ GlAlert,
+ GlSprintf,
+} from '@gitlab/ui';
import Stacktrace from '~/error_tracking/components/stacktrace.vue';
import ErrorDetails from '~/error_tracking/components/error_details.vue';
import {
@@ -11,6 +19,8 @@ import {
errorStatus,
} from '~/error_tracking/components/constants';
+jest.mock('~/flash');
+
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -28,7 +38,7 @@ describe('ErrorDetails', () => {
function mountComponent() {
wrapper = shallowMount(ErrorDetails, {
- stubs: { LoadingButton, GlSprintf },
+ stubs: { GlButton, GlSprintf },
localVue,
store,
mocks,
@@ -42,18 +52,6 @@ describe('ErrorDetails', () => {
csrfToken: 'fakeToken',
},
});
- wrapper.setData({
- error: {
- id: 'gid://gitlab/Gitlab::ErrorTracking::DetailedError/129381',
- sentryId: 129381,
- title: 'Issue title',
- externalUrl: 'http://sentry.gitlab.net/gitlab',
- firstSeen: '2017-05-26T13:32:48Z',
- lastSeen: '2018-05-26T13:32:48Z',
- count: 12,
- userCount: 2,
- },
- });
}
beforeEach(() => {
@@ -71,6 +69,7 @@ describe('ErrorDetails', () => {
const state = {
stacktraceData: {},
loadingStacktrace: true,
+ errorStatus: '',
};
store = new Vuex.Store({
@@ -92,6 +91,7 @@ describe('ErrorDetails', () => {
error: {
loading: true,
stopPolling: jest.fn(),
+ setOptions: jest.fn(),
},
},
},
@@ -116,10 +116,61 @@ describe('ErrorDetails', () => {
});
});
+ describe('sentry response timeout', () => {
+ const initTime = 300000;
+ const endTime = initTime + 10000;
+
+ beforeEach(() => {
+ mocks.$apollo.queries.error.loading = false;
+ jest.spyOn(Date, 'now').mockReturnValue(initTime);
+ mountComponent();
+ });
+
+ it('when before timeout, still shows loading', () => {
+ Date.now.mockReturnValue(endTime - 1);
+
+ wrapper.vm.onNoApolloResult();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(createFlash).not.toHaveBeenCalled();
+ expect(mocks.$apollo.queries.error.stopPolling).not.toHaveBeenCalled();
+ });
+ });
+
+ it('when timeout is hit and no apollo result, stops loading and shows flash', () => {
+ Date.now.mockReturnValue(endTime + 1);
+
+ wrapper.vm.onNoApolloResult();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(GlLink).exists()).toBe(false);
+ expect(createFlash).toHaveBeenCalledWith(
+ 'Could not connect to Sentry. Refresh the page to try again.',
+ 'warning',
+ );
+ expect(mocks.$apollo.queries.error.stopPolling).toHaveBeenCalled();
+ });
+ });
+ });
+
describe('Error details', () => {
beforeEach(() => {
mocks.$apollo.queries.error.loading = false;
mountComponent();
+ wrapper.setData({
+ error: {
+ id: 'gid://gitlab/Gitlab::ErrorTracking::DetailedError/129381',
+ sentryId: 129381,
+ title: 'Issue title',
+ externalUrl: 'http://sentry.gitlab.net/gitlab',
+ firstSeen: '2017-05-26T13:32:48Z',
+ lastSeen: '2018-05-26T13:32:48Z',
+ count: 12,
+ userCount: 2,
+ },
+ });
});
it('should show Sentry error details without stacktrace', () => {
@@ -127,7 +178,7 @@ describe('ErrorDetails', () => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
expect(wrapper.find(Stacktrace).exists()).toBe(false);
expect(wrapper.find(GlBadge).exists()).toBe(false);
- expect(wrapper.findAll('button').length).toBe(3);
+ expect(wrapper.findAll(GlButton).length).toBe(3);
});
describe('unsafe chars for culprit field', () => {
@@ -225,10 +276,6 @@ describe('ErrorDetails', () => {
});
describe('When a user clicks the create issue button', () => {
- beforeEach(() => {
- mountComponent();
- });
-
it('should send sentry_issue_identifier', () => {
const sentryErrorIdInput = findInput(
'issue[sentry_issue_attributes][sentry_issue_identifier]',
@@ -248,7 +295,7 @@ describe('ErrorDetails', () => {
it('should submit the form', () => {
window.HTMLFormElement.prototype.submit = () => {};
const submitSpy = jest.spyOn(wrapper.vm.$refs.sentryIssueForm, 'submit');
- wrapper.find('[data-qa-selector="create_issue_button"]').trigger('click');
+ wrapper.find('[data-qa-selector="create_issue_button"]').vm.$emit('click');
expect(submitSpy).toHaveBeenCalled();
submitSpy.mockRestore();
});
@@ -268,7 +315,8 @@ describe('ErrorDetails', () => {
describe('when error is unresolved', () => {
beforeEach(() => {
store.state.details.errorStatus = errorStatus.UNRESOLVED;
- mountComponent();
+
+ return wrapper.vm.$nextTick();
});
it('displays Ignore and Resolve buttons', () => {
@@ -277,14 +325,14 @@ describe('ErrorDetails', () => {
});
it('marks error as ignored when ignore button is clicked', () => {
- findUpdateIgnoreStatusButton().trigger('click');
+ findUpdateIgnoreStatusButton().vm.$emit('click');
expect(actions.updateIgnoreStatus.mock.calls[0][1]).toEqual(
expect.objectContaining({ status: errorStatus.IGNORED }),
);
});
it('marks error as resolved when resolve button is clicked', () => {
- findUpdateResolveStatusButton().trigger('click');
+ findUpdateResolveStatusButton().vm.$emit('click');
expect(actions.updateResolveStatus.mock.calls[0][1]).toEqual(
expect.objectContaining({ status: errorStatus.RESOLVED }),
);
@@ -294,7 +342,8 @@ describe('ErrorDetails', () => {
describe('when error is ignored', () => {
beforeEach(() => {
store.state.details.errorStatus = errorStatus.IGNORED;
- mountComponent();
+
+ return wrapper.vm.$nextTick();
});
it('displays Undo Ignore and Resolve buttons', () => {
@@ -303,14 +352,14 @@ describe('ErrorDetails', () => {
});
it('marks error as unresolved when ignore button is clicked', () => {
- findUpdateIgnoreStatusButton().trigger('click');
+ findUpdateIgnoreStatusButton().vm.$emit('click');
expect(actions.updateIgnoreStatus.mock.calls[0][1]).toEqual(
expect.objectContaining({ status: errorStatus.UNRESOLVED }),
);
});
it('marks error as resolved when resolve button is clicked', () => {
- findUpdateResolveStatusButton().trigger('click');
+ findUpdateResolveStatusButton().vm.$emit('click');
expect(actions.updateResolveStatus.mock.calls[0][1]).toEqual(
expect.objectContaining({ status: errorStatus.RESOLVED }),
);
@@ -320,7 +369,8 @@ describe('ErrorDetails', () => {
describe('when error is resolved', () => {
beforeEach(() => {
store.state.details.errorStatus = errorStatus.RESOLVED;
- mountComponent();
+
+ return wrapper.vm.$nextTick();
});
it('displays Ignore and Unresolve buttons', () => {
@@ -329,14 +379,14 @@ describe('ErrorDetails', () => {
});
it('marks error as ignored when ignore button is clicked', () => {
- findUpdateIgnoreStatusButton().trigger('click');
+ findUpdateIgnoreStatusButton().vm.$emit('click');
expect(actions.updateIgnoreStatus.mock.calls[0][1]).toEqual(
expect.objectContaining({ status: errorStatus.IGNORED }),
);
});
it('marks error as unresolved when unresolve button is clicked', () => {
- findUpdateResolveStatusButton().trigger('click');
+ findUpdateResolveStatusButton().vm.$emit('click');
expect(actions.updateResolveStatus.mock.calls[0][1]).toEqual(
expect.objectContaining({ status: errorStatus.UNRESOLVED }),
);
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index f852a3091aa..cd6dd5c7519 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -1,6 +1,6 @@
import { createLocalVue, mount } from '@vue/test-utils';
import Vuex from 'vuex';
-import { GlEmptyState, GlLoadingIcon, GlFormInput, GlPagination } from '@gitlab/ui';
+import { GlEmptyState, GlLoadingIcon, GlFormInput, GlPagination, GlDropdown } from '@gitlab/ui';
import stubChildren from 'helpers/stub_children';
import ErrorTrackingList from '~/error_tracking/components/error_tracking_list.vue';
import errorsList from './list_mock.json';
@@ -15,9 +15,19 @@ describe('ErrorTrackingList', () => {
const findErrorListTable = () => wrapper.find('table');
const findErrorListRows = () => wrapper.findAll('tbody tr');
- const findSortDropdown = () => wrapper.find('.sort-dropdown');
+ const dropdownsArray = () => wrapper.findAll(GlDropdown);
const findRecentSearchesDropdown = () =>
- wrapper.find('.filtered-search-history-dropdown-wrapper');
+ dropdownsArray()
+ .at(0)
+ .find(GlDropdown);
+ const findStatusFilterDropdown = () =>
+ dropdownsArray()
+ .at(1)
+ .find(GlDropdown);
+ const findSortDropdown = () =>
+ dropdownsArray()
+ .at(2)
+ .find(GlDropdown);
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findPagination = () => wrapper.find(GlPagination);
@@ -60,6 +70,7 @@ describe('ErrorTrackingList', () => {
fetchPaginatedResults: jest.fn(),
updateStatus: jest.fn(),
removeIgnoredResolvedErrors: jest.fn(),
+ filterByStatus: jest.fn(),
};
const state = {
@@ -167,10 +178,16 @@ describe('ErrorTrackingList', () => {
});
it('it sorts by fields', () => {
- const findSortItem = () => wrapper.find('.dropdown-item');
+ const findSortItem = () => findSortDropdown().find('.dropdown-item');
findSortItem().trigger('click');
expect(actions.sortByField).toHaveBeenCalled();
});
+
+ it('it filters by status', () => {
+ const findStatusFilter = () => findStatusFilterDropdown().find('.dropdown-item');
+ findStatusFilter().trigger('click');
+ expect(actions.filterByStatus).toHaveBeenCalled();
+ });
});
});
@@ -215,7 +232,7 @@ describe('ErrorTrackingList', () => {
expect(wrapper.find(GlEmptyState).exists()).toBe(true);
expect(findLoadingIcon().exists()).toBe(false);
expect(findErrorListTable().exists()).toBe(false);
- expect(findSortDropdown().exists()).toBe(false);
+ expect(dropdownsArray().length).toBe(0);
});
});
diff --git a/spec/frontend/error_tracking/store/list/actions_spec.js b/spec/frontend/error_tracking/store/list/actions_spec.js
index 54fdde88818..3cb740bf05d 100644
--- a/spec/frontend/error_tracking/store/list/actions_spec.js
+++ b/spec/frontend/error_tracking/store/list/actions_spec.js
@@ -88,6 +88,20 @@ describe('error tracking actions', () => {
});
});
+ describe('filterByStatus', () => {
+ it('should search errors by status', () => {
+ const status = 'ignored';
+
+ testAction(
+ actions.filterByStatus,
+ status,
+ {},
+ [{ type: types.SET_STATUS_FILTER, payload: status }],
+ [{ type: 'stopPolling' }, { type: 'startPolling' }],
+ );
+ });
+ });
+
describe('sortByField', () => {
it('should search by query', () => {
const field = 'frequency';
diff --git a/spec/frontend/error_tracking/store/list/mutation_spec.js b/spec/frontend/error_tracking/store/list/mutation_spec.js
index 65f11aeeda1..a326a6c55c0 100644
--- a/spec/frontend/error_tracking/store/list/mutation_spec.js
+++ b/spec/frontend/error_tracking/store/list/mutation_spec.js
@@ -6,6 +6,7 @@ const ADD_RECENT_SEARCH = mutations[types.ADD_RECENT_SEARCH];
const CLEAR_RECENT_SEARCHES = mutations[types.CLEAR_RECENT_SEARCHES];
const LOAD_RECENT_SEARCHES = mutations[types.LOAD_RECENT_SEARCHES];
const REMOVE_IGNORED_RESOLVED_ERRORS = mutations[types.REMOVE_IGNORED_RESOLVED_ERRORS];
+const SET_STATUS_FILTER = mutations[types.SET_STATUS_FILTER];
describe('Error tracking mutations', () => {
describe('SET_ERRORS', () => {
@@ -139,5 +140,15 @@ describe('Error tracking mutations', () => {
expect(state.errors).not.toContain(ignoredError);
});
});
+
+ describe('SET_STATUS_FILTER', () => {
+ it('sets the filter to ignored, resolved or unresolved', () => {
+ state.statusFilter = 'unresolved';
+
+ SET_STATUS_FILTER(state, 'ignored');
+
+ expect(state.statusFilter).toBe('ignored');
+ });
+ });
});
});
diff --git a/spec/frontend/fixtures/groups.rb b/spec/frontend/fixtures/groups.rb
index 237fc711594..2421b67a130 100644
--- a/spec/frontend/fixtures/groups.rb
+++ b/spec/frontend/fixtures/groups.rb
@@ -15,6 +15,7 @@ describe 'Groups (JavaScript fixtures)', type: :controller do
end
before do
+ stub_feature_flags(new_variables_ui: false)
group.add_maintainer(admin)
sign_in(admin)
end
diff --git a/spec/frontend/fixtures/labels.rb b/spec/frontend/fixtures/labels.rb
index e4d66dbcd0a..e5a0501ac03 100644
--- a/spec/frontend/fixtures/labels.rb
+++ b/spec/frontend/fixtures/labels.rb
@@ -41,6 +41,23 @@ describe 'Labels (JavaScript fixtures)' do
end
end
+ describe API::Helpers::LabelHelpers, type: :request do
+ include JavaScriptFixturesHelpers
+ include ApiHelpers
+
+ let(:user) { create(:user) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ it 'api/group_labels.json' do
+ get api("/groups/#{group.id}/labels", user)
+
+ expect(response).to be_successful
+ end
+ end
+
describe Projects::LabelsController, '(JavaScript fixtures)', type: :controller do
render_views
diff --git a/spec/frontend/fixtures/metrics_dashboard.rb b/spec/frontend/fixtures/metrics_dashboard.rb
new file mode 100644
index 00000000000..f0c741af37d
--- /dev/null
+++ b/spec/frontend/fixtures/metrics_dashboard.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MetricsDashboard, '(JavaScript fixtures)', type: :controller do
+ include JavaScriptFixturesHelpers
+ include MetricsDashboardHelpers
+
+ let(:user) { create(:user) }
+ let(:project) { project_with_dashboard('.gitlab/dashboards/test.yml') }
+ let(:environment) { create(:environment, project: project) }
+ let(:params) { { environment: environment } }
+
+ before(:all) do
+ clean_frontend_fixtures('metrics_dashboard/')
+ end
+
+ controller(::ApplicationController) do
+ include MetricsDashboard
+ end
+
+ before do
+ sign_in(user)
+ project.add_maintainer(user)
+
+ allow(controller).to receive(:project).and_return(project)
+ allow(controller)
+ .to receive(:metrics_dashboard_params)
+ .and_return(params)
+ end
+
+ after do
+ remove_repository(project)
+ end
+
+ it 'metrics_dashboard/environment_metrics_dashboard.json' do
+ routes.draw { get "metrics_dashboard" => "anonymous#metrics_dashboard" }
+ response = get :metrics_dashboard, format: :json
+ expect(response).to be_successful
+ end
+end
diff --git a/spec/frontend/fixtures/projects.rb b/spec/frontend/fixtures/projects.rb
index af5b70fbbeb..ff21dbaebe8 100644
--- a/spec/frontend/fixtures/projects.rb
+++ b/spec/frontend/fixtures/projects.rb
@@ -20,6 +20,7 @@ describe 'Projects (JavaScript fixtures)', type: :controller do
end
before do
+ stub_feature_flags(new_variables_ui: false)
project.add_maintainer(admin)
sign_in(admin)
allow(SecureRandom).to receive(:hex).and_return('securerandomhex:thereisnospoon')
diff --git a/spec/frontend/fixtures/static/notebook_viewer.html b/spec/frontend/fixtures/static/notebook_viewer.html
deleted file mode 100644
index 4bbb7bf1094..00000000000
--- a/spec/frontend/fixtures/static/notebook_viewer.html
+++ /dev/null
@@ -1 +0,0 @@
-<div class="file-content" data-endpoint="/test" id="js-notebook-viewer"></div>
diff --git a/spec/frontend/fixtures/static/pdf_viewer.html b/spec/frontend/fixtures/static/pdf_viewer.html
deleted file mode 100644
index 350d35a262f..00000000000
--- a/spec/frontend/fixtures/static/pdf_viewer.html
+++ /dev/null
@@ -1 +0,0 @@
-<div class="file-content" data-endpoint="/test" id="js-pdf-viewer"></div>
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
new file mode 100644
index 00000000000..ab5784b8f7a
--- /dev/null
+++ b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
@@ -0,0 +1,101 @@
+import { shallowMount } from '@vue/test-utils';
+import { trimText } from 'helpers/text_helper';
+import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue';
+import { mockProject } from '../mock_data'; // can also use 'mockGroup', but not useful to test here
+
+describe('FrequentItemsListItemComponent', () => {
+ let wrapper;
+
+ const findTitle = () => wrapper.find({ ref: 'frequentItemsItemTitle' });
+ const findAvatar = () => wrapper.find({ ref: 'frequentItemsItemAvatar' });
+ const findAllTitles = () => wrapper.findAll({ ref: 'frequentItemsItemTitle' });
+ const findNamespace = () => wrapper.find({ ref: 'frequentItemsItemNamespace' });
+ const findAllAnchors = () => wrapper.findAll('a');
+ const findAllNamespace = () => wrapper.findAll({ ref: 'frequentItemsItemNamespace' });
+ const findAvatarContainer = () => wrapper.findAll({ ref: 'frequentItemsItemAvatarContainer' });
+ const findAllMetadataContainers = () =>
+ wrapper.findAll({ ref: 'frequentItemsItemMetadataContainer' });
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(frequentItemsListItemComponent, {
+ propsData: {
+ itemId: mockProject.id,
+ itemName: mockProject.name,
+ namespace: mockProject.namespace,
+ webUrl: mockProject.webUrl,
+ avatarUrl: mockProject.avatarUrl,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('computed', () => {
+ describe('highlightedItemName', () => {
+ it('should enclose part of project name in <b> & </b> which matches with `matcher` prop', () => {
+ createComponent({ matcher: 'lab' });
+
+ expect(findTitle().element.innerHTML).toContain('<b>L</b><b>a</b><b>b</b>');
+ });
+
+ it('should return project name as it is if `matcher` is not available', () => {
+ createComponent({ matcher: null });
+
+ expect(trimText(findTitle().text())).toBe(mockProject.name);
+ });
+ });
+
+ describe('truncatedNamespace', () => {
+ it('should truncate project name from namespace string', () => {
+ createComponent({ namespace: 'platform / nokia-3310' });
+
+ expect(trimText(findNamespace().text())).toBe('platform');
+ });
+
+ it('should truncate namespace string from the middle if it includes more than two groups in path', () => {
+ createComponent({
+ namespace: 'platform / hardware / broadcom / Wifi Group / Mobile Chipset / nokia-3310',
+ });
+
+ expect(trimText(findNamespace().text())).toBe('platform / ... / Mobile Chipset');
+ });
+ });
+ });
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should render avatar if avatarUrl is present', () => {
+ wrapper.setProps({ avatarUrl: 'path/to/avatar.png' });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findAvatar().exists()).toBe(true);
+ });
+ });
+
+ it('should not render avatar if avatarUrl is not present', () => {
+ expect(findAvatar().exists()).toBe(false);
+ });
+
+ it('renders root element with the right classes', () => {
+ expect(wrapper.classes('frequent-items-list-item-container')).toBe(true);
+ });
+
+ it.each`
+ name | selector | expected
+ ${'anchor'} | ${findAllAnchors} | ${1}
+ ${'avatar container'} | ${findAvatarContainer} | ${1}
+ ${'metadata container'} | ${findAllMetadataContainers} | ${1}
+ ${'title'} | ${findAllTitles} | ${1}
+ ${'namespace'} | ${findAllNamespace} | ${1}
+ `('should render $expected $name', ({ selector, expected }) => {
+ expect(selector()).toHaveLength(expected);
+ });
+ });
+});
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_spec.js
new file mode 100644
index 00000000000..238fd508053
--- /dev/null
+++ b/spec/frontend/frequent_items/components/frequent_items_list_spec.js
@@ -0,0 +1,101 @@
+import { mount } from '@vue/test-utils';
+import frequentItemsListComponent from '~/frequent_items/components/frequent_items_list.vue';
+import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue';
+import { mockFrequentProjects } from '../mock_data';
+
+describe('FrequentItemsListComponent', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(frequentItemsListComponent, {
+ propsData: {
+ namespace: 'projects',
+ items: mockFrequentProjects,
+ isFetchFailed: false,
+ hasSearchQuery: false,
+ matcher: 'lab',
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('computed', () => {
+ describe('isListEmpty', () => {
+ it('should return `true` or `false` representing whether if `items` is empty or not with projects', () => {
+ createComponent({
+ items: [],
+ });
+
+ expect(wrapper.vm.isListEmpty).toBe(true);
+
+ wrapper.setProps({
+ items: mockFrequentProjects,
+ });
+
+ expect(wrapper.vm.isListEmpty).toBe(false);
+ });
+ });
+
+ describe('fetched item messages', () => {
+ it('should return appropriate empty list message based on value of `localStorageFailed` prop with projects', () => {
+ createComponent({
+ isFetchFailed: true,
+ });
+
+ expect(wrapper.vm.listEmptyMessage).toBe(
+ 'This feature requires browser localStorage support',
+ );
+
+ wrapper.setProps({
+ isFetchFailed: false,
+ });
+
+ expect(wrapper.vm.listEmptyMessage).toBe('Projects you visit often will appear here');
+ });
+ });
+
+ describe('searched item messages', () => {
+ it('should return appropriate empty list message based on value of `searchFailed` prop with projects', () => {
+ createComponent({
+ hasSearchQuery: true,
+ isFetchFailed: true,
+ });
+
+ expect(wrapper.vm.listEmptyMessage).toBe('Something went wrong on our end.');
+
+ wrapper.setProps({
+ isFetchFailed: false,
+ });
+
+ expect(wrapper.vm.listEmptyMessage).toBe('Sorry, no projects matched your search');
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('should render component element with list of projects', () => {
+ createComponent();
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.classes('frequent-items-list-container')).toBe(true);
+ expect(wrapper.findAll({ ref: 'frequentItemsList' })).toHaveLength(1);
+ expect(wrapper.findAll(frequentItemsListItemComponent)).toHaveLength(5);
+ });
+ });
+
+ it('should render component element with empty message', () => {
+ createComponent({
+ items: [],
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.$el.querySelectorAll('li.section-empty')).toHaveLength(1);
+ expect(wrapper.findAll(frequentItemsListItemComponent)).toHaveLength(0);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/frequent_items/mock_data.js b/spec/frontend/frequent_items/mock_data.js
new file mode 100644
index 00000000000..5cd4cddd877
--- /dev/null
+++ b/spec/frontend/frequent_items/mock_data.js
@@ -0,0 +1,57 @@
+import { TEST_HOST } from 'helpers/test_constants';
+
+export const mockFrequentProjects = [
+ {
+ id: 1,
+ name: 'GitLab Community Edition',
+ namespace: 'gitlab-org / gitlab-ce',
+ webUrl: `${TEST_HOST}/gitlab-org/gitlab-foss`,
+ avatarUrl: null,
+ frequency: 1,
+ lastAccessedOn: Date.now(),
+ },
+ {
+ id: 2,
+ name: 'GitLab CI',
+ namespace: 'gitlab-org / gitlab-ci',
+ webUrl: `${TEST_HOST}/gitlab-org/gitlab-ci`,
+ avatarUrl: null,
+ frequency: 9,
+ lastAccessedOn: Date.now(),
+ },
+ {
+ id: 3,
+ name: 'Typeahead.Js',
+ namespace: 'twitter / typeahead-js',
+ webUrl: `${TEST_HOST}/twitter/typeahead-js`,
+ avatarUrl: '/uploads/-/system/project/avatar/7/TWBS.png',
+ frequency: 2,
+ lastAccessedOn: Date.now(),
+ },
+ {
+ id: 4,
+ name: 'Intel',
+ namespace: 'platform / hardware / bsp / intel',
+ webUrl: `${TEST_HOST}/platform/hardware/bsp/intel`,
+ avatarUrl: null,
+ frequency: 3,
+ lastAccessedOn: Date.now(),
+ },
+ {
+ id: 5,
+ name: 'v4.4',
+ namespace: 'platform / hardware / bsp / kernel / common / v4.4',
+ webUrl: `${TEST_HOST}/platform/hardware/bsp/kernel/common/v4.4`,
+ avatarUrl: null,
+ frequency: 8,
+ lastAccessedOn: Date.now(),
+ },
+];
+
+export const mockProject = {
+ id: 1,
+ name: 'GitLab Community Edition',
+ namespace: 'gitlab-org / gitlab-ce',
+ webUrl: `${TEST_HOST}/gitlab-org/gitlab-foss`,
+ avatarUrl: null,
+};
diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
index 3d56bef4b33..09977ecc7a3 100644
--- a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
+++ b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
@@ -8,13 +8,13 @@ exports[`grafana integration component default state to match the default snapsh
<div
class="settings-header"
>
- <h4
- class="js-section-header"
+ <h3
+ class="js-section-header h4"
>
Grafana Authentication
- </h4>
+ </h3>
<gl-button-stub
class="js-settings-toggle"
diff --git a/spec/frontend/helpers/dom_shims/form_element.js b/spec/frontend/helpers/dom_shims/form_element.js
new file mode 100644
index 00000000000..46ef0374848
--- /dev/null
+++ b/spec/frontend/helpers/dom_shims/form_element.js
@@ -0,0 +1 @@
+HTMLFormElement.prototype.submit = jest.fn();
diff --git a/spec/frontend/helpers/dom_shims/get_client_rects.js b/spec/frontend/helpers/dom_shims/get_client_rects.js
index d740c1bf154..7ba60dd7936 100644
--- a/spec/frontend/helpers/dom_shims/get_client_rects.js
+++ b/spec/frontend/helpers/dom_shims/get_client_rects.js
@@ -8,14 +8,16 @@ function hasHiddenStyle(node) {
return false;
}
-function createDefaultClientRect() {
+function createDefaultClientRect(node) {
+ const { outerWidth: width, outerHeight: height } = node;
+
return {
- bottom: 0,
- height: 0,
+ bottom: height,
+ height,
left: 0,
- right: 0,
+ right: width,
top: 0,
- width: 0,
+ width,
x: 0,
y: 0,
};
@@ -46,5 +48,5 @@ window.Element.prototype.getClientRects = function getClientRects() {
return [];
}
- return [createDefaultClientRect()];
+ return [createDefaultClientRect(node)];
};
diff --git a/spec/frontend/helpers/dom_shims/image_element_properties.js b/spec/frontend/helpers/dom_shims/image_element_properties.js
new file mode 100644
index 00000000000..525246e6ade
--- /dev/null
+++ b/spec/frontend/helpers/dom_shims/image_element_properties.js
@@ -0,0 +1,12 @@
+Object.defineProperty(global.HTMLImageElement.prototype, 'src', {
+ get() {
+ return this.$_jest_src;
+ },
+ set(val) {
+ this.$_jest_src = val;
+
+ if (this.onload) {
+ this.onload();
+ }
+ },
+});
diff --git a/spec/frontend/helpers/dom_shims/index.js b/spec/frontend/helpers/dom_shims/index.js
index 63850b62ff7..bcd5da0ce48 100644
--- a/spec/frontend/helpers/dom_shims/index.js
+++ b/spec/frontend/helpers/dom_shims/index.js
@@ -1,4 +1,8 @@
import './element_scroll_into_view';
+import './form_element';
import './get_client_rects';
import './inner_text';
import './window_scroll_to';
+import './scroll_by';
+import './size_properties';
+import './image_element_properties';
diff --git a/spec/frontend/helpers/dom_shims/scroll_by.js b/spec/frontend/helpers/dom_shims/scroll_by.js
new file mode 100644
index 00000000000..90387e51765
--- /dev/null
+++ b/spec/frontend/helpers/dom_shims/scroll_by.js
@@ -0,0 +1,7 @@
+window.scrollX = 0;
+window.scrollY = 0;
+
+window.scrollBy = (x, y) => {
+ window.scrollX += x;
+ window.scrollY += y;
+};
diff --git a/spec/frontend/helpers/dom_shims/size_properties.js b/spec/frontend/helpers/dom_shims/size_properties.js
new file mode 100644
index 00000000000..a2d5940bd1e
--- /dev/null
+++ b/spec/frontend/helpers/dom_shims/size_properties.js
@@ -0,0 +1,19 @@
+const convertFromStyle = style => {
+ if (style.match(/[0-9](px|rem)/g)) {
+ return Number(style.replace(/[^0-9]/g, ''));
+ }
+
+ return 0;
+};
+
+Object.defineProperty(global.HTMLElement.prototype, 'offsetWidth', {
+ get() {
+ return convertFromStyle(this.style.width || '0px');
+ },
+});
+
+Object.defineProperty(global.HTMLElement.prototype, 'offsetHeight', {
+ get() {
+ return convertFromStyle(this.style.height || '0px');
+ },
+});
diff --git a/spec/frontend/helpers/tracking_helper.js b/spec/frontend/helpers/tracking_helper.js
index 68c1bd2dbca..bd3bd24028c 100644
--- a/spec/frontend/helpers/tracking_helper.js
+++ b/spec/frontend/helpers/tracking_helper.js
@@ -8,7 +8,7 @@ let handlers;
export function mockTracking(category = '_category_', documentOverride, spyMethod) {
document = documentOverride || window.document;
window.snowplow = () => {};
- Tracking.bindDocument(category, document);
+ handlers = Tracking.bindDocument(category, document);
return spyMethod ? spyMethod(Tracking, 'event') : null;
}
diff --git a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
index 054e7492429..a25aba61516 100644
--- a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
@@ -14,7 +14,6 @@ describe('IDE commit editor header', () => {
const findDiscardModal = () => wrapper.find({ ref: 'discardModal' });
const findDiscardButton = () => wrapper.find({ ref: 'discardButton' });
- const findActionButton = () => wrapper.find({ ref: 'actionButton' });
beforeEach(() => {
f = file('file');
@@ -28,9 +27,7 @@ describe('IDE commit editor header', () => {
},
});
- jest.spyOn(wrapper.vm, 'stageChange').mockImplementation();
- jest.spyOn(wrapper.vm, 'unstageChange').mockImplementation();
- jest.spyOn(wrapper.vm, 'discardFileChanges').mockImplementation();
+ jest.spyOn(wrapper.vm, 'discardChanges').mockImplementation();
});
afterEach(() => {
@@ -38,8 +35,8 @@ describe('IDE commit editor header', () => {
wrapper = null;
});
- it('renders button to discard & stage', () => {
- expect(wrapper.vm.$el.querySelectorAll('.btn').length).toBe(2);
+ it('renders button to discard', () => {
+ expect(wrapper.vm.$el.querySelectorAll('.btn')).toHaveLength(1);
});
describe('discard button', () => {
@@ -60,23 +57,7 @@ describe('IDE commit editor header', () => {
it('calls discardFileChanges if dialog result is confirmed', () => {
modal.vm.$emit('ok');
- expect(wrapper.vm.discardFileChanges).toHaveBeenCalledWith(f.path);
- });
- });
-
- describe('stage/unstage button', () => {
- it('unstages the file if it was already staged', () => {
- f.staged = true;
-
- findActionButton().trigger('click');
-
- expect(wrapper.vm.unstageChange).toHaveBeenCalledWith(f.path);
- });
-
- it('stages the file if it was not staged', () => {
- findActionButton().trigger('click');
-
- expect(wrapper.vm.stageChange).toHaveBeenCalledWith(f.path);
+ expect(wrapper.vm.discardChanges).toHaveBeenCalledWith(f.path);
});
});
});
diff --git a/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap b/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap
index 177cd4559ca..efa58a4a47b 100644
--- a/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap
+++ b/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap
@@ -8,6 +8,7 @@ exports[`IDE pipelines list when loaded renders empty state when no latestPipeli
<empty-state-stub
cansetci="true"
+ class="mb-auto mt-auto"
emptystatesvgpath="http://test.host"
helppagepath="http://test.host"
/>
diff --git a/spec/frontend/ide/components/preview/clientside_spec.js b/spec/frontend/ide/components/preview/clientside_spec.js
index c7d5ea9c513..0cde6fb6060 100644
--- a/spec/frontend/ide/components/preview/clientside_spec.js
+++ b/spec/frontend/ide/components/preview/clientside_spec.js
@@ -16,6 +16,17 @@ const dummyPackageJson = () => ({
main: 'index.js',
}),
});
+const expectedSandpackOptions = () => ({
+ files: {},
+ entry: '/index.js',
+ showOpenInCodeSandbox: true,
+});
+const expectedSandpackSettings = () => ({
+ fileResolver: {
+ isFile: expect.any(Function),
+ readFile: expect.any(Function),
+ },
+});
describe('IDE clientside preview', () => {
let wrapper;
@@ -87,6 +98,46 @@ describe('IDE clientside preview', () => {
it('creates sandpack manager', () => {
expect(smooshpack.Manager).toHaveBeenCalledWith(
'#ide-preview',
+ expectedSandpackOptions(),
+ expectedSandpackSettings(),
+ );
+ });
+
+ it('pings usage', () => {
+ expect(storeClientsideActions.pingUsage).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('with codesandboxBundlerUrl', () => {
+ const TEST_BUNDLER_URL = 'https://test.gitlab-static.test';
+
+ beforeEach(() => {
+ createComponent({
+ getters: { packageJson: dummyPackageJson },
+ state: { codesandboxBundlerUrl: TEST_BUNDLER_URL },
+ });
+
+ return waitForCalls();
+ });
+
+ it('creates sandpack manager with bundlerURL', () => {
+ expect(smooshpack.Manager).toHaveBeenCalledWith('#ide-preview', expectedSandpackOptions(), {
+ ...expectedSandpackSettings(),
+ bundlerURL: TEST_BUNDLER_URL,
+ });
+ });
+ });
+
+ describe('with codesandboxBundlerURL', () => {
+ beforeEach(() => {
+ createComponent({ getters: { packageJson: dummyPackageJson } });
+
+ return waitForCalls();
+ });
+
+ it('creates sandpack manager', () => {
+ expect(smooshpack.Manager).toHaveBeenCalledWith(
+ '#ide-preview',
{
files: {},
entry: '/index.js',
@@ -100,10 +151,6 @@ describe('IDE clientside preview', () => {
},
);
});
-
- it('pings usage', () => {
- expect(storeClientsideActions.pingUsage).toHaveBeenCalledTimes(1);
- });
});
describe('computed', () => {
diff --git a/spec/frontend/ide/utils_spec.js b/spec/frontend/ide/utils_spec.js
index 9d7926a4d06..44eae7eacbe 100644
--- a/spec/frontend/ide/utils_spec.js
+++ b/spec/frontend/ide/utils_spec.js
@@ -1,8 +1,66 @@
import { commitItemIconMap } from '~/ide/constants';
-import { getCommitIconMap } from '~/ide/utils';
+import { getCommitIconMap, isTextFile } from '~/ide/utils';
import { decorateData } from '~/ide/stores/utils';
describe('WebIDE utils', () => {
+ describe('isTextFile', () => {
+ it('returns false for known binary types', () => {
+ expect(isTextFile('file content', 'image/png', 'my.png')).toBeFalsy();
+ // mime types are case insensitive
+ expect(isTextFile('file content', 'IMAGE/PNG', 'my.png')).toBeFalsy();
+ });
+
+ it('returns true for known text types', () => {
+ expect(isTextFile('file content', 'text/plain', 'my.txt')).toBeTruthy();
+ // mime types are case insensitive
+ expect(isTextFile('file content', 'TEXT/PLAIN', 'my.txt')).toBeTruthy();
+ });
+
+ it('returns true for file extensions that Monaco supports syntax highlighting for', () => {
+ // test based on both MIME and extension
+ expect(isTextFile('{"éêė":"value"}', 'application/json', 'my.json')).toBeTruthy();
+ expect(isTextFile('{"éêė":"value"}', 'application/json', '.tsconfig')).toBeTruthy();
+ expect(isTextFile('SELECT "éêė" from tablename', 'application/sql', 'my.sql')).toBeTruthy();
+ });
+
+ it('returns true even irrespective of whether the mimes, extensions or file names are lowercase or upper case', () => {
+ expect(isTextFile('{"éêė":"value"}', 'application/json', 'MY.JSON')).toBeTruthy();
+ expect(isTextFile('SELECT "éêė" from tablename', 'application/sql', 'MY.SQL')).toBeTruthy();
+ expect(
+ isTextFile('var code = "something"', 'application/javascript', 'Gruntfile'),
+ ).toBeTruthy();
+ expect(
+ isTextFile(
+ 'MAINTAINER Александр "alexander11354322283@me.com"',
+ 'application/octet-stream',
+ 'dockerfile',
+ ),
+ ).toBeTruthy();
+ });
+
+ it('returns false if filename is same as the expected extension', () => {
+ expect(isTextFile('SELECT "éêė" from tablename', 'application/sql', 'sql')).toBeFalsy();
+ });
+
+ it('returns true for ASCII only content for unknown types', () => {
+ expect(isTextFile('plain text', 'application/x-new-type', 'hello.mytype')).toBeTruthy();
+ });
+
+ it('returns true for relevant filenames', () => {
+ expect(
+ isTextFile(
+ 'MAINTAINER Александр "alexander11354322283@me.com"',
+ 'application/octet-stream',
+ 'Dockerfile',
+ ),
+ ).toBeTruthy();
+ });
+
+ it('returns false for non-ASCII content for unknown types', () => {
+ expect(isTextFile('{"éêė":"value"}', 'application/octet-stream', 'my.random')).toBeFalsy();
+ });
+ });
+
const createFile = (name = 'name', id = name, type = '', parent = null) =>
decorateData({
id,
diff --git a/spec/frontend/issuables_list/components/issuables_list_app_spec.js b/spec/frontend/issuables_list/components/issuables_list_app_spec.js
index eafc4d83d87..6b680af354e 100644
--- a/spec/frontend/issuables_list/components/issuables_list_app_spec.js
+++ b/spec/frontend/issuables_list/components/issuables_list_app_spec.js
@@ -12,12 +12,21 @@ import { PAGE_SIZE, PAGE_SIZE_MANUAL, RELATIVE_POSITION } from '~/issuables_list
jest.mock('~/flash', () => jest.fn());
jest.mock('~/issuables_list/eventhub');
+jest.mock('~/lib/utils/common_utils', () => ({
+ ...jest.requireActual('~/lib/utils/common_utils'),
+ scrollToElement: () => {},
+}));
const TEST_LOCATION = `${TEST_HOST}/issues`;
const TEST_ENDPOINT = '/issues';
const TEST_CREATE_ISSUES_PATH = '/createIssue';
const TEST_EMPTY_SVG_PATH = '/emptySvg';
+const setUrl = query => {
+ window.location.href = `${TEST_LOCATION}${query}`;
+ window.location.search = query;
+};
+
const MOCK_ISSUES = Array(PAGE_SIZE_MANUAL)
.fill(0)
.map((_, i) => ({
@@ -267,8 +276,6 @@ describe('Issuables list component', () => {
});
describe('with query params in window.location', () => {
- const query =
- '?assignee_username=root&author_username=root&confidential=yes&label_name%5B%5D=Aquapod&label_name%5B%5D=Astro&milestone_title=v3.0&my_reaction_emoji=airplane&scope=all&sort=priority&state=opened&utf8=%E2%9C%93&weight=0';
const expectedFilters = {
assignee_username: 'root',
author_username: 'root',
@@ -284,32 +291,73 @@ describe('Issuables list component', () => {
sort: 'desc',
};
- beforeEach(() => {
- window.location.href = `${TEST_LOCATION}${query}`;
- window.location.search = query;
- setupApiMock(() => [200, MOCK_ISSUES.slice(0)]);
- factory({ sortKey: 'milestone_due_desc' });
- return waitForPromises();
- });
+ describe('when page is not present in params', () => {
+ const query =
+ '?assignee_username=root&author_username=root&confidential=yes&label_name%5B%5D=Aquapod&label_name%5B%5D=Astro&milestone_title=v3.0&my_reaction_emoji=airplane&scope=all&sort=priority&state=opened&utf8=%E2%9C%93&weight=0';
- it('applies filters and sorts', () => {
- expect(wrapper.vm.hasFilters).toBe(true);
- expect(wrapper.vm.filters).toEqual(expectedFilters);
+ beforeEach(() => {
+ setUrl(query);
- expect(apiSpy).toHaveBeenCalledWith(
- expect.objectContaining({
- params: {
- ...expectedFilters,
- with_labels_details: true,
- page: 1,
- per_page: PAGE_SIZE,
- },
- }),
- );
+ setupApiMock(() => [200, MOCK_ISSUES.slice(0)]);
+ factory({ sortKey: 'milestone_due_desc' });
+
+ return waitForPromises();
+ });
+
+ afterEach(() => {
+ apiSpy.mockClear();
+ });
+
+ it('applies filters and sorts', () => {
+ expect(wrapper.vm.hasFilters).toBe(true);
+ expect(wrapper.vm.filters).toEqual(expectedFilters);
+
+ expect(apiSpy).toHaveBeenCalledWith(
+ expect.objectContaining({
+ params: {
+ ...expectedFilters,
+ with_labels_details: true,
+ page: 1,
+ per_page: PAGE_SIZE,
+ },
+ }),
+ );
+ });
+
+ it('passes the base url to issuable', () => {
+ expect(findFirstIssuable().props('baseUrl')).toBe(TEST_LOCATION);
+ });
});
- it('passes the base url to issuable', () => {
- expect(findFirstIssuable().props('baseUrl')).toEqual(TEST_LOCATION);
+ describe('when page is present in the param', () => {
+ const query =
+ '?assignee_username=root&author_username=root&confidential=yes&label_name%5B%5D=Aquapod&label_name%5B%5D=Astro&milestone_title=v3.0&my_reaction_emoji=airplane&scope=all&sort=priority&state=opened&utf8=%E2%9C%93&weight=0&page=3';
+
+ beforeEach(() => {
+ setUrl(query);
+
+ setupApiMock(() => [200, MOCK_ISSUES.slice(0)]);
+ factory({ sortKey: 'milestone_due_desc' });
+
+ return waitForPromises();
+ });
+
+ afterEach(() => {
+ apiSpy.mockClear();
+ });
+
+ it('applies filters and sorts', () => {
+ expect(apiSpy).toHaveBeenCalledWith(
+ expect.objectContaining({
+ params: {
+ ...expectedFilters,
+ with_labels_details: true,
+ page: 3,
+ per_page: PAGE_SIZE,
+ },
+ }),
+ );
+ });
});
});
@@ -322,7 +370,7 @@ describe('Issuables list component', () => {
});
it('passes the base url to issuable', () => {
- expect(findFirstIssuable().props('baseUrl')).toEqual(TEST_LOCATION);
+ expect(findFirstIssuable().props('baseUrl')).toBe(TEST_LOCATION);
});
});
@@ -402,4 +450,47 @@ describe('Issuables list component', () => {
});
});
});
+
+ describe('when paginates', () => {
+ const newPage = 3;
+
+ beforeEach(() => {
+ window.history.pushState = jest.fn();
+ setupApiMock(() => [
+ 200,
+ MOCK_ISSUES.slice(0, PAGE_SIZE),
+ {
+ 'x-total': 100,
+ 'x-page': 2,
+ },
+ ]);
+
+ factory();
+
+ return waitForPromises();
+ });
+
+ afterEach(() => {
+ // reset to original value
+ window.history.pushState.mockRestore();
+ });
+
+ it('calls window.history.pushState one time', () => {
+ // Trigger pagination
+ wrapper.find(GlPagination).vm.$emit('input', newPage);
+
+ expect(window.history.pushState).toHaveBeenCalledTimes(1);
+ });
+
+ it('sets params in the url', () => {
+ // Trigger pagination
+ wrapper.find(GlPagination).vm.$emit('input', newPage);
+
+ expect(window.history.pushState).toHaveBeenCalledWith(
+ {},
+ '',
+ `${TEST_LOCATION}?state=opened&order_by=priority&sort=asc&page=${newPage}`,
+ );
+ });
+ });
});
diff --git a/spec/frontend/labels_select_spec.js b/spec/frontend/labels_select_spec.js
index d54e0eab845..5f48bad4970 100644
--- a/spec/frontend/labels_select_spec.js
+++ b/spec/frontend/labels_select_spec.js
@@ -23,6 +23,16 @@ const mockScopedLabels = [
},
];
+const mockScopedLabels2 = [
+ {
+ id: 28,
+ title: 'Foo::Bar2',
+ description: 'Foobar2',
+ color: '#FFFFFF',
+ text_color: '#000000',
+ },
+];
+
describe('LabelsSelect', () => {
describe('getLabelTemplate', () => {
describe('when normal label is present', () => {
@@ -41,29 +51,29 @@ describe('LabelsSelect', () => {
});
it('generated label item template has correct label URL', () => {
- expect($labelEl.attr('href')).toBe('/foo/bar?label_name[]=Foo%20Label');
+ expect($labelEl.find('a').attr('href')).toBe('/foo/bar?label_name[]=Foo%20Label');
});
it('generated label item template has correct label title', () => {
- expect($labelEl.find('span.label').text()).toBe(label.title);
+ expect($labelEl.find('span.gl-label-text').text()).toBe(label.title);
});
it('generated label item template has label description as title attribute', () => {
- expect($labelEl.find('span.label').attr('title')).toBe(label.description);
+ expect($labelEl.find('a').attr('title')).toBe(label.description);
});
it('generated label item template has correct label styles', () => {
- expect($labelEl.find('span.label').attr('style')).toBe(
+ expect($labelEl.find('span.gl-label-text').attr('style')).toBe(
`background-color: ${label.color}; color: ${label.text_color};`,
);
});
- it('generated label item has a badge class', () => {
- expect($labelEl.find('span').hasClass('badge')).toEqual(true);
+ it('generated label item has a gl-label-text class', () => {
+ expect($labelEl.find('span').hasClass('gl-label-text')).toEqual(true);
});
- it('generated label item template does not have scoped-label class', () => {
- expect($labelEl.find('.scoped-label')).toHaveLength(0);
+ it('generated label item template does not have gl-label-icon class', () => {
+ expect($labelEl.find('.gl-label-icon')).toHaveLength(0);
});
});
@@ -87,29 +97,65 @@ describe('LabelsSelect', () => {
});
it('generated label item template has correct label title', () => {
- expect($labelEl.find('span.label').text()).toBe(label.title);
+ const scopedTitle = label.title.split('::');
+ expect($labelEl.find('span.gl-label-text').text()).toContain(scopedTitle[0]);
+ expect($labelEl.find('span.gl-label-text').text()).toContain(scopedTitle[1]);
});
it('generated label item template has html flag as true', () => {
- expect($labelEl.find('span.label').attr('data-html')).toBe('true');
+ expect($labelEl.find('a').attr('data-html')).toBe('true');
});
it('generated label item template has question icon', () => {
expect($labelEl.find('i.fa-question-circle')).toHaveLength(1);
});
- it('generated label item template has scoped-label class', () => {
- expect($labelEl.find('.scoped-label')).toHaveLength(1);
+ it('generated label item template has gl-label-icon class', () => {
+ expect($labelEl.find('.gl-label-icon')).toHaveLength(1);
});
it('generated label item template has correct label styles', () => {
- expect($labelEl.find('span.label').attr('style')).toBe(
+ expect($labelEl.find('span.gl-label-text').attr('style')).toBe(
`background-color: ${label.color}; color: ${label.text_color};`,
);
+ expect(
+ $labelEl
+ .find('span.gl-label-text')
+ .last()
+ .attr('style'),
+ ).toBe(`color: ${label.color};`);
});
it('generated label item has a badge class', () => {
- expect($labelEl.find('span').hasClass('badge')).toEqual(true);
+ expect($labelEl.find('span').hasClass('gl-label-text')).toEqual(true);
+ });
+ });
+
+ describe('when scoped label is present, with text color not white', () => {
+ const label = mockScopedLabels2[0];
+ let $labelEl;
+
+ beforeEach(() => {
+ $labelEl = $(
+ LabelsSelect.getLabelTemplate({
+ labels: mockScopedLabels2,
+ issueUpdateURL: mockUrl,
+ enableScopedLabels: true,
+ scopedLabelsDocumentationLink: 'docs-link',
+ }),
+ );
+ });
+
+ it('generated label item template has correct label styles', () => {
+ expect($labelEl.find('span.gl-label-text').attr('style')).toBe(
+ `background-color: ${label.color}; color: ${label.text_color};`,
+ );
+ expect(
+ $labelEl
+ .find('span.gl-label-text')
+ .last()
+ .attr('style'),
+ ).toBe(`color: ${label.text_color};`);
});
});
});
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
new file mode 100644
index 00000000000..6ba8f58086a
--- /dev/null
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -0,0 +1,1001 @@
+import * as commonUtils from '~/lib/utils/common_utils';
+
+describe('common_utils', () => {
+ describe('parseUrl', () => {
+ it('returns an anchor tag with url', () => {
+ expect(commonUtils.parseUrl('/some/absolute/url').pathname).toContain('some/absolute/url');
+ });
+
+ it('url is escaped', () => {
+ // IE11 will return a relative pathname while other browsers will return a full pathname.
+ // parseUrl uses an anchor element for parsing an url. With relative urls, the anchor
+ // element will create an absolute url relative to the current execution context.
+ // The JavaScript test suite is executed at '/' which will lead to an absolute url
+ // starting with '/'.
+ expect(commonUtils.parseUrl('" test="asf"').pathname).toContain('/%22%20test=%22asf%22');
+ });
+ });
+
+ describe('parseUrlPathname', () => {
+ it('returns an absolute url when given an absolute url', () => {
+ expect(commonUtils.parseUrlPathname('/some/absolute/url')).toEqual('/some/absolute/url');
+ });
+
+ it('returns an absolute url when given a relative url', () => {
+ expect(commonUtils.parseUrlPathname('some/relative/url')).toEqual('/some/relative/url');
+ });
+ });
+
+ describe('urlParamsToArray', () => {
+ it('returns empty array for empty querystring', () => {
+ expect(commonUtils.urlParamsToArray('')).toEqual([]);
+ });
+
+ it('should decode params', () => {
+ expect(commonUtils.urlParamsToArray('?label_name%5B%5D=test')[0]).toBe('label_name[]=test');
+ });
+
+ it('should remove the question mark from the search params', () => {
+ const paramsArray = commonUtils.urlParamsToArray('?test=thing');
+
+ expect(paramsArray[0][0]).not.toBe('?');
+ });
+ });
+
+ describe('urlParamsToObject', () => {
+ it('parses path for label with trailing +', () => {
+ expect(commonUtils.urlParamsToObject('label_name[]=label%2B', {})).toEqual({
+ label_name: ['label+'],
+ });
+ });
+
+ it('parses path for milestone with trailing +', () => {
+ expect(commonUtils.urlParamsToObject('milestone_title=A%2B', {})).toEqual({
+ milestone_title: 'A+',
+ });
+ });
+
+ it('parses path for search terms with spaces', () => {
+ expect(commonUtils.urlParamsToObject('search=two+words', {})).toEqual({
+ search: 'two words',
+ });
+ });
+ });
+
+ describe('handleLocationHash', () => {
+ beforeEach(() => {
+ jest.spyOn(window.document, 'getElementById');
+ });
+
+ afterEach(() => {
+ window.history.pushState({}, null, '');
+ });
+
+ function expectGetElementIdToHaveBeenCalledWith(elementId) {
+ expect(window.document.getElementById).toHaveBeenCalledWith(elementId);
+ }
+
+ it('decodes hash parameter', () => {
+ window.history.pushState({}, null, '#random-hash');
+ commonUtils.handleLocationHash();
+
+ expectGetElementIdToHaveBeenCalledWith('random-hash');
+ expectGetElementIdToHaveBeenCalledWith('user-content-random-hash');
+ });
+
+ it('decodes cyrillic hash parameter', () => {
+ window.history.pushState({}, null, '#definição');
+ commonUtils.handleLocationHash();
+
+ expectGetElementIdToHaveBeenCalledWith('definição');
+ expectGetElementIdToHaveBeenCalledWith('user-content-definição');
+ });
+
+ it('decodes encoded cyrillic hash parameter', () => {
+ window.history.pushState({}, null, '#defini%C3%A7%C3%A3o');
+ commonUtils.handleLocationHash();
+
+ expectGetElementIdToHaveBeenCalledWith('definição');
+ expectGetElementIdToHaveBeenCalledWith('user-content-definição');
+ });
+
+ it('scrolls element into view', () => {
+ document.body.innerHTML += `
+ <div id="parent">
+ <div style="height: 2000px;"></div>
+ <div id="test" style="height: 2000px;"></div>
+ </div>
+ `;
+
+ window.history.pushState({}, null, '#test');
+ commonUtils.handleLocationHash();
+
+ expectGetElementIdToHaveBeenCalledWith('test');
+
+ expect(window.scrollY).toBe(document.getElementById('test').offsetTop);
+
+ document.getElementById('parent').remove();
+ });
+
+ it('scrolls user content element into view', () => {
+ document.body.innerHTML += `
+ <div id="parent">
+ <div style="height: 2000px;"></div>
+ <div id="user-content-test" style="height: 2000px;"></div>
+ </div>
+ `;
+
+ window.history.pushState({}, null, '#test');
+ commonUtils.handleLocationHash();
+
+ expectGetElementIdToHaveBeenCalledWith('test');
+ expectGetElementIdToHaveBeenCalledWith('user-content-test');
+
+ expect(window.scrollY).toBe(document.getElementById('user-content-test').offsetTop);
+
+ document.getElementById('parent').remove();
+ });
+
+ it('scrolls to element with offset from navbar', () => {
+ jest.spyOn(window, 'scrollBy');
+ document.body.innerHTML += `
+ <div id="parent">
+ <div class="navbar-gitlab" style="position: fixed; top: 0; height: 50px;"></div>
+ <div style="height: 2000px; margin-top: 50px;"></div>
+ <div id="user-content-test" style="height: 2000px;"></div>
+ </div>
+ `;
+
+ window.history.pushState({}, null, '#test');
+ commonUtils.handleLocationHash();
+ jest.advanceTimersByTime(1);
+
+ expectGetElementIdToHaveBeenCalledWith('test');
+ expectGetElementIdToHaveBeenCalledWith('user-content-test');
+
+ expect(window.scrollY).toBe(document.getElementById('user-content-test').offsetTop - 50);
+ expect(window.scrollBy).toHaveBeenCalledWith(0, -50);
+
+ document.getElementById('parent').remove();
+ });
+ });
+
+ describe('historyPushState', () => {
+ afterEach(() => {
+ window.history.replaceState({}, null, null);
+ });
+
+ it('should call pushState with the correct path', () => {
+ jest.spyOn(window.history, 'pushState').mockImplementation(() => {});
+
+ commonUtils.historyPushState('newpath?page=2');
+
+ expect(window.history.pushState).toHaveBeenCalled();
+ expect(window.history.pushState.mock.calls[0][2]).toContain('newpath?page=2');
+ });
+ });
+
+ describe('parseQueryStringIntoObject', () => {
+ it('should return object with query parameters', () => {
+ expect(commonUtils.parseQueryStringIntoObject('scope=all&page=2')).toEqual({
+ scope: 'all',
+ page: '2',
+ });
+
+ expect(commonUtils.parseQueryStringIntoObject('scope=all')).toEqual({ scope: 'all' });
+ expect(commonUtils.parseQueryStringIntoObject()).toEqual({});
+ });
+ });
+
+ describe('objectToQueryString', () => {
+ it('returns empty string when `param` is undefined, null or empty string', () => {
+ expect(commonUtils.objectToQueryString()).toBe('');
+ expect(commonUtils.objectToQueryString('')).toBe('');
+ });
+
+ it('returns query string with values of `params`', () => {
+ const singleQueryParams = { foo: true };
+ const multipleQueryParams = { foo: true, bar: true };
+
+ expect(commonUtils.objectToQueryString(singleQueryParams)).toBe('foo=true');
+ expect(commonUtils.objectToQueryString(multipleQueryParams)).toBe('foo=true&bar=true');
+ });
+ });
+
+ describe('buildUrlWithCurrentLocation', () => {
+ it('should build an url with current location and given parameters', () => {
+ expect(commonUtils.buildUrlWithCurrentLocation()).toEqual(window.location.pathname);
+ expect(commonUtils.buildUrlWithCurrentLocation('?page=2')).toEqual(
+ `${window.location.pathname}?page=2`,
+ );
+ });
+ });
+
+ describe('debounceByAnimationFrame', () => {
+ it('debounces a function to allow a maximum of one call per animation frame', done => {
+ const spy = jest.fn();
+ const debouncedSpy = commonUtils.debounceByAnimationFrame(spy);
+ window.requestAnimationFrame(() => {
+ debouncedSpy();
+ debouncedSpy();
+ window.requestAnimationFrame(() => {
+ expect(spy).toHaveBeenCalledTimes(1);
+ done();
+ });
+ });
+ });
+ });
+
+ describe('getParameterByName', () => {
+ beforeEach(() => {
+ window.history.pushState({}, null, '?scope=all&p=2');
+ });
+
+ afterEach(() => {
+ window.history.replaceState({}, null, null);
+ });
+
+ it('should return valid parameter', () => {
+ const value = commonUtils.getParameterByName('scope');
+
+ expect(commonUtils.getParameterByName('p')).toEqual('2');
+ expect(value).toBe('all');
+ });
+
+ it('should return invalid parameter', () => {
+ const value = commonUtils.getParameterByName('fakeParameter');
+
+ expect(value).toBe(null);
+ });
+
+ it('should return valid paramentes if URL is provided', () => {
+ let value = commonUtils.getParameterByName('foo', 'http://cocteau.twins/?foo=bar');
+
+ expect(value).toBe('bar');
+
+ value = commonUtils.getParameterByName('manan', 'http://cocteau.twins/?foo=bar&manan=canchu');
+
+ expect(value).toBe('canchu');
+ });
+ });
+
+ describe('normalizedHeaders', () => {
+ it('should upperCase all the header keys to keep them consistent', () => {
+ const apiHeaders = {
+ 'X-Something-Workhorse': { workhorse: 'ok' },
+ 'x-something-nginx': { nginx: 'ok' },
+ };
+
+ const normalized = commonUtils.normalizeHeaders(apiHeaders);
+
+ const WORKHORSE = 'X-SOMETHING-WORKHORSE';
+ const NGINX = 'X-SOMETHING-NGINX';
+
+ expect(normalized[WORKHORSE].workhorse).toBe('ok');
+ expect(normalized[NGINX].nginx).toBe('ok');
+ });
+ });
+
+ describe('normalizeCRLFHeaders', () => {
+ const testContext = {};
+ beforeEach(() => {
+ testContext.CLRFHeaders =
+ 'a-header: a-value\nAnother-Header: ANOTHER-VALUE\nLaSt-HeAdEr: last-VALUE';
+ jest.spyOn(String.prototype, 'split');
+ testContext.normalizeCRLFHeaders = commonUtils.normalizeCRLFHeaders(testContext.CLRFHeaders);
+ });
+
+ it('should split by newline', () => {
+ expect(String.prototype.split).toHaveBeenCalledWith('\n');
+ });
+
+ it('should split by colon+space for each header', () => {
+ expect(String.prototype.split.mock.calls.filter(args => args[0] === ': ').length).toBe(3);
+ });
+
+ it('should return a normalized headers object', () => {
+ expect(testContext.normalizeCRLFHeaders).toEqual({
+ 'A-HEADER': 'a-value',
+ 'ANOTHER-HEADER': 'ANOTHER-VALUE',
+ 'LAST-HEADER': 'last-VALUE',
+ });
+ });
+ });
+
+ describe('parseIntPagination', () => {
+ it('should parse to integers all string values and return pagination object', () => {
+ const pagination = {
+ 'X-PER-PAGE': 10,
+ 'X-PAGE': 2,
+ 'X-TOTAL': 30,
+ 'X-TOTAL-PAGES': 3,
+ 'X-NEXT-PAGE': 3,
+ 'X-PREV-PAGE': 1,
+ };
+
+ const expectedPagination = {
+ perPage: 10,
+ page: 2,
+ total: 30,
+ totalPages: 3,
+ nextPage: 3,
+ previousPage: 1,
+ };
+
+ expect(commonUtils.parseIntPagination(pagination)).toEqual(expectedPagination);
+ });
+ });
+
+ describe('isMetaClick', () => {
+ it('should identify meta click on Windows/Linux', () => {
+ const e = {
+ metaKey: false,
+ ctrlKey: true,
+ which: 1,
+ };
+
+ expect(commonUtils.isMetaClick(e)).toBe(true);
+ });
+
+ it('should identify meta click on macOS', () => {
+ const e = {
+ metaKey: true,
+ ctrlKey: false,
+ which: 1,
+ };
+
+ expect(commonUtils.isMetaClick(e)).toBe(true);
+ });
+
+ it('should identify as meta click on middle-click or Mouse-wheel click', () => {
+ const e = {
+ metaKey: false,
+ ctrlKey: false,
+ which: 2,
+ };
+
+ expect(commonUtils.isMetaClick(e)).toBe(true);
+ });
+ });
+
+ describe('parseBoolean', () => {
+ const { parseBoolean } = commonUtils;
+
+ it('returns true for "true"', () => {
+ expect(parseBoolean('true')).toEqual(true);
+ });
+
+ it('returns false for "false"', () => {
+ expect(parseBoolean('false')).toEqual(false);
+ });
+
+ it('returns false for "something"', () => {
+ expect(parseBoolean('something')).toEqual(false);
+ });
+
+ it('returns false for null', () => {
+ expect(parseBoolean(null)).toEqual(false);
+ });
+
+ it('is idempotent', () => {
+ const input = ['true', 'false', 'something', null];
+ input.forEach(value => {
+ const result = parseBoolean(value);
+
+ expect(parseBoolean(result)).toBe(result);
+ });
+ });
+ });
+
+ describe('backOff', () => {
+ beforeEach(() => {
+ // shortcut our timeouts otherwise these tests will take a long time to finish
+ jest.spyOn(window, 'setTimeout').mockImplementation(cb => setImmediate(cb, 0));
+ });
+
+ it('solves the promise from the callback', done => {
+ const expectedResponseValue = 'Success!';
+ commonUtils
+ .backOff((next, stop) =>
+ new Promise(resolve => {
+ resolve(expectedResponseValue);
+ })
+ .then(resp => {
+ stop(resp);
+ })
+ .catch(done.fail),
+ )
+ .then(respBackoff => {
+ expect(respBackoff).toBe(expectedResponseValue);
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('catches the rejected promise from the callback ', done => {
+ const errorMessage = 'Mistakes were made!';
+ commonUtils
+ .backOff((next, stop) => {
+ new Promise((resolve, reject) => {
+ reject(new Error(errorMessage));
+ })
+ .then(resp => {
+ stop(resp);
+ })
+ .catch(err => stop(err));
+ })
+ .catch(errBackoffResp => {
+ expect(errBackoffResp instanceof Error).toBe(true);
+ expect(errBackoffResp.message).toBe(errorMessage);
+ done();
+ });
+ });
+
+ it('solves the promise correctly after retrying a third time', done => {
+ let numberOfCalls = 1;
+ const expectedResponseValue = 'Success!';
+ commonUtils
+ .backOff((next, stop) =>
+ Promise.resolve(expectedResponseValue)
+ .then(resp => {
+ if (numberOfCalls < 3) {
+ numberOfCalls += 1;
+ next();
+ } else {
+ stop(resp);
+ }
+ })
+ .catch(done.fail),
+ )
+ .then(respBackoff => {
+ const timeouts = window.setTimeout.mock.calls.map(([, timeout]) => timeout);
+
+ expect(timeouts).toEqual([2000, 4000]);
+ expect(respBackoff).toBe(expectedResponseValue);
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('rejects the backOff promise after timing out', done => {
+ commonUtils
+ .backOff(next => next(), 64000)
+ .catch(errBackoffResp => {
+ const timeouts = window.setTimeout.mock.calls.map(([, timeout]) => timeout);
+
+ expect(timeouts).toEqual([2000, 4000, 8000, 16000, 32000, 32000]);
+ expect(errBackoffResp instanceof Error).toBe(true);
+ expect(errBackoffResp.message).toBe('BACKOFF_TIMEOUT');
+ done();
+ });
+ });
+ });
+
+ describe('setFavicon', () => {
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ favicon.setAttribute('href', 'default/favicon');
+ favicon.setAttribute('data-default-href', 'default/favicon');
+ document.body.appendChild(favicon);
+ });
+
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
+ });
+
+ it('should set page favicon to provided favicon', () => {
+ const faviconPath = '//custom_favicon';
+ commonUtils.setFavicon(faviconPath);
+
+ expect(document.getElementById('favicon').getAttribute('href')).toEqual(faviconPath);
+ });
+ });
+
+ describe('resetFavicon', () => {
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ favicon.setAttribute('data-original-href', 'default/favicon');
+ document.body.appendChild(favicon);
+ });
+
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
+ });
+
+ it('should reset page favicon to the default icon', () => {
+ const favicon = document.getElementById('favicon');
+ favicon.setAttribute('href', 'new/favicon');
+ commonUtils.resetFavicon();
+
+ expect(document.getElementById('favicon').getAttribute('href')).toEqual('default/favicon');
+ });
+ });
+
+ describe('spriteIcon', () => {
+ let beforeGon;
+
+ beforeEach(() => {
+ window.gon = window.gon || {};
+ beforeGon = Object.assign({}, window.gon);
+ window.gon.sprite_icons = 'icons.svg';
+ });
+
+ afterEach(() => {
+ window.gon = beforeGon;
+ });
+
+ it('should return the svg for a linked icon', () => {
+ expect(commonUtils.spriteIcon('test')).toEqual(
+ '<svg ><use xlink:href="icons.svg#test" /></svg>',
+ );
+ });
+
+ it('should set svg className when passed', () => {
+ expect(commonUtils.spriteIcon('test', 'fa fa-test')).toEqual(
+ '<svg class="fa fa-test"><use xlink:href="icons.svg#test" /></svg>',
+ );
+ });
+ });
+
+ describe('convertObjectProps*', () => {
+ const mockConversionFunction = prop => `${prop}_converted`;
+ const isEmptyObject = obj =>
+ typeof obj === 'object' && obj !== null && Object.keys(obj).length === 0;
+
+ const mockObjects = {
+ convertObjectProps: {
+ obj: {
+ id: 1,
+ group_name: 'GitLab.org',
+ absolute_web_url: 'https://gitlab.com/gitlab-org/',
+ },
+ objNested: {
+ project_name: 'GitLab CE',
+ group_name: 'GitLab.org',
+ license_type: 'MIT',
+ tech_stack: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ },
+ convertObjectPropsToCamelCase: {
+ obj: {
+ id: 1,
+ group_name: 'GitLab.org',
+ absolute_web_url: 'https://gitlab.com/gitlab-org/',
+ },
+ objNested: {
+ project_name: 'GitLab CE',
+ group_name: 'GitLab.org',
+ license_type: 'MIT',
+ tech_stack: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ },
+ convertObjectPropsToSnakeCase: {
+ obj: {
+ id: 1,
+ groupName: 'GitLab.org',
+ absoluteWebUrl: 'https://gitlab.com/gitlab-org/',
+ },
+ objNested: {
+ projectName: 'GitLab CE',
+ groupName: 'GitLab.org',
+ licenseType: 'MIT',
+ techStack: {
+ backend: 'Ruby',
+ frontendFramework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ },
+ };
+
+ describe('convertObjectProps', () => {
+ it('returns an empty object if `conversionFunction` parameter is not a function', () => {
+ const result = commonUtils.convertObjectProps(null, mockObjects.convertObjectProps.obj);
+
+ expect(isEmptyObject(result)).toBeTruthy();
+ });
+ });
+
+ describe.each`
+ functionName | mockObj | mockObjNested
+ ${'convertObjectProps'} | ${mockObjects.convertObjectProps.obj} | ${mockObjects.convertObjectProps.objNested}
+ ${'convertObjectPropsToCamelCase'} | ${mockObjects.convertObjectPropsToCamelCase.obj} | ${mockObjects.convertObjectPropsToCamelCase.objNested}
+ ${'convertObjectPropsToSnakeCase'} | ${mockObjects.convertObjectPropsToSnakeCase.obj} | ${mockObjects.convertObjectPropsToSnakeCase.objNested}
+ `('$functionName', ({ functionName, mockObj, mockObjNested }) => {
+ const testFunction =
+ functionName === 'convertObjectProps'
+ ? (obj, options = {}) =>
+ commonUtils.convertObjectProps(mockConversionFunction, obj, options)
+ : commonUtils[functionName];
+
+ it('returns an empty object if `obj` parameter is null, undefined or an empty object', () => {
+ expect(isEmptyObject(testFunction(null))).toBeTruthy();
+ expect(isEmptyObject(testFunction())).toBeTruthy();
+ expect(isEmptyObject(testFunction({}))).toBeTruthy();
+ });
+
+ it('converts object properties', () => {
+ const expected = {
+ convertObjectProps: {
+ id_converted: 1,
+ group_name_converted: 'GitLab.org',
+ absolute_web_url_converted: 'https://gitlab.com/gitlab-org/',
+ },
+ convertObjectPropsToCamelCase: {
+ id: 1,
+ groupName: 'GitLab.org',
+ absoluteWebUrl: 'https://gitlab.com/gitlab-org/',
+ },
+ convertObjectPropsToSnakeCase: {
+ id: 1,
+ group_name: 'GitLab.org',
+ absolute_web_url: 'https://gitlab.com/gitlab-org/',
+ },
+ };
+
+ expect(testFunction(mockObj)).toEqual(expected[functionName]);
+ });
+
+ it('does not deep-convert by default', () => {
+ const expected = {
+ convertObjectProps: {
+ project_name_converted: 'GitLab CE',
+ group_name_converted: 'GitLab.org',
+ license_type_converted: 'MIT',
+ tech_stack_converted: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ convertObjectPropsToCamelCase: {
+ projectName: 'GitLab CE',
+ groupName: 'GitLab.org',
+ licenseType: 'MIT',
+ techStack: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ convertObjectPropsToSnakeCase: {
+ project_name: 'GitLab CE',
+ group_name: 'GitLab.org',
+ license_type: 'MIT',
+ tech_stack: {
+ backend: 'Ruby',
+ frontendFramework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ };
+
+ expect(testFunction(mockObjNested)).toEqual(expected[functionName]);
+ });
+
+ describe('with options', () => {
+ describe('when options.deep is true', () => {
+ const expected = {
+ convertObjectProps: {
+ project_name_converted: 'GitLab CE',
+ group_name_converted: 'GitLab.org',
+ license_type_converted: 'MIT',
+ tech_stack_converted: {
+ backend_converted: 'Ruby',
+ frontend_framework_converted: 'Vue',
+ database_converted: 'PostgreSQL',
+ },
+ },
+ convertObjectPropsToCamelCase: {
+ projectName: 'GitLab CE',
+ groupName: 'GitLab.org',
+ licenseType: 'MIT',
+ techStack: {
+ backend: 'Ruby',
+ frontendFramework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ convertObjectPropsToSnakeCase: {
+ project_name: 'GitLab CE',
+ group_name: 'GitLab.org',
+ license_type: 'MIT',
+ tech_stack: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ };
+
+ it('converts nested objects', () => {
+ expect(testFunction(mockObjNested, { deep: true })).toEqual(expected[functionName]);
+ });
+
+ it('converts array of nested objects', () => {
+ expect(testFunction([mockObjNested], { deep: true })).toEqual([expected[functionName]]);
+ });
+
+ it('converts array with child arrays', () => {
+ expect(testFunction([[mockObjNested]], { deep: true })).toEqual([
+ [expected[functionName]],
+ ]);
+ });
+ });
+
+ describe('when options.dropKeys is provided', () => {
+ it('discards properties mentioned in `dropKeys` array', () => {
+ const expected = {
+ convertObjectProps: {
+ project_name_converted: 'GitLab CE',
+ license_type_converted: 'MIT',
+ tech_stack_converted: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ convertObjectPropsToCamelCase: {
+ projectName: 'GitLab CE',
+ licenseType: 'MIT',
+ techStack: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ convertObjectPropsToSnakeCase: {
+ project_name: 'GitLab CE',
+ license_type: 'MIT',
+ tech_stack: {
+ backend: 'Ruby',
+ frontendFramework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ };
+
+ const dropKeys = {
+ convertObjectProps: ['group_name'],
+ convertObjectPropsToCamelCase: ['group_name'],
+ convertObjectPropsToSnakeCase: ['groupName'],
+ };
+
+ expect(
+ testFunction(mockObjNested, {
+ dropKeys: dropKeys[functionName],
+ }),
+ ).toEqual(expected[functionName]);
+ });
+
+ it('discards properties mentioned in `dropKeys` array when `deep` is true', () => {
+ const expected = {
+ convertObjectProps: {
+ project_name_converted: 'GitLab CE',
+ license_type_converted: 'MIT',
+ tech_stack_converted: {
+ backend_converted: 'Ruby',
+ frontend_framework_converted: 'Vue',
+ },
+ },
+ convertObjectPropsToCamelCase: {
+ projectName: 'GitLab CE',
+ licenseType: 'MIT',
+ techStack: {
+ backend: 'Ruby',
+ frontendFramework: 'Vue',
+ },
+ },
+ convertObjectPropsToSnakeCase: {
+ project_name: 'GitLab CE',
+ license_type: 'MIT',
+ tech_stack: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ },
+ },
+ };
+
+ const dropKeys = {
+ convertObjectProps: ['group_name', 'database'],
+ convertObjectPropsToCamelCase: ['group_name', 'database'],
+ convertObjectPropsToSnakeCase: ['groupName', 'database'],
+ };
+
+ expect(
+ testFunction(mockObjNested, {
+ dropKeys: dropKeys[functionName],
+ deep: true,
+ }),
+ ).toEqual(expected[functionName]);
+ });
+ });
+
+ describe('when options.ignoreKeyNames is provided', () => {
+ it('leaves properties mentioned in `ignoreKeyNames` array intact', () => {
+ const expected = {
+ convertObjectProps: {
+ project_name_converted: 'GitLab CE',
+ group_name: 'GitLab.org',
+ license_type_converted: 'MIT',
+ tech_stack_converted: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ convertObjectPropsToCamelCase: {
+ projectName: 'GitLab CE',
+ group_name: 'GitLab.org',
+ licenseType: 'MIT',
+ techStack: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ convertObjectPropsToSnakeCase: {
+ project_name: 'GitLab CE',
+ groupName: 'GitLab.org',
+ license_type: 'MIT',
+ tech_stack: {
+ backend: 'Ruby',
+ frontendFramework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ };
+
+ const ignoreKeyNames = {
+ convertObjectProps: ['group_name'],
+ convertObjectPropsToCamelCase: ['group_name'],
+ convertObjectPropsToSnakeCase: ['groupName'],
+ };
+
+ expect(
+ testFunction(mockObjNested, {
+ ignoreKeyNames: ignoreKeyNames[functionName],
+ }),
+ ).toEqual(expected[functionName]);
+ });
+
+ it('leaves properties mentioned in `ignoreKeyNames` array intact when `deep` is true', () => {
+ const expected = {
+ convertObjectProps: {
+ project_name_converted: 'GitLab CE',
+ group_name: 'GitLab.org',
+ license_type_converted: 'MIT',
+ tech_stack_converted: {
+ backend_converted: 'Ruby',
+ frontend_framework: 'Vue',
+ database_converted: 'PostgreSQL',
+ },
+ },
+ convertObjectPropsToCamelCase: {
+ projectName: 'GitLab CE',
+ group_name: 'GitLab.org',
+ licenseType: 'MIT',
+ techStack: {
+ backend: 'Ruby',
+ frontend_framework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ convertObjectPropsToSnakeCase: {
+ project_name: 'GitLab CE',
+ groupName: 'GitLab.org',
+ license_type: 'MIT',
+ tech_stack: {
+ backend: 'Ruby',
+ frontendFramework: 'Vue',
+ database: 'PostgreSQL',
+ },
+ },
+ };
+
+ const ignoreKeyNames = {
+ convertObjectProps: ['group_name', 'frontend_framework'],
+ convertObjectPropsToCamelCase: ['group_name', 'frontend_framework'],
+ convertObjectPropsToSnakeCase: ['groupName', 'frontendFramework'],
+ };
+
+ expect(
+ testFunction(mockObjNested, {
+ deep: true,
+ ignoreKeyNames: ignoreKeyNames[functionName],
+ }),
+ ).toEqual(expected[functionName]);
+ });
+ });
+ });
+ });
+ });
+
+ describe('roundOffFloat', () => {
+ it('Rounds off decimal places of a float number with provided precision', () => {
+ expect(commonUtils.roundOffFloat(3.141592, 3)).toBeCloseTo(3.142);
+ });
+
+ it('Rounds off a float number to a whole number when provided precision is zero', () => {
+ expect(commonUtils.roundOffFloat(3.141592, 0)).toBeCloseTo(3);
+ expect(commonUtils.roundOffFloat(3.5, 0)).toBeCloseTo(4);
+ });
+
+ it('Rounds off float number to nearest 0, 10, 100, 1000 and so on when provided precision is below 0', () => {
+ expect(commonUtils.roundOffFloat(34567.14159, -1)).toBeCloseTo(34570);
+ expect(commonUtils.roundOffFloat(34567.14159, -2)).toBeCloseTo(34600);
+ expect(commonUtils.roundOffFloat(34567.14159, -3)).toBeCloseTo(35000);
+ expect(commonUtils.roundOffFloat(34567.14159, -4)).toBeCloseTo(30000);
+ expect(commonUtils.roundOffFloat(34567.14159, -5)).toBeCloseTo(0);
+ });
+ });
+
+ describe('searchBy', () => {
+ const searchSpace = {
+ iid: 1,
+ reference: '&1',
+ title: 'Error omnis quos consequatur ullam a vitae sed omnis libero cupiditate.',
+ url: '/groups/gitlab-org/-/epics/1',
+ };
+
+ it('returns null when `query` or `searchSpace` params are empty/undefined', () => {
+ expect(commonUtils.searchBy('omnis', null)).toBeNull();
+ expect(commonUtils.searchBy('', searchSpace)).toBeNull();
+ expect(commonUtils.searchBy()).toBeNull();
+ });
+
+ it('returns object with matching props based on `query` & `searchSpace` params', () => {
+ // String `omnis` is found only in `title` prop so return just that
+ expect(commonUtils.searchBy('omnis', searchSpace)).toEqual(
+ expect.objectContaining({
+ title: searchSpace.title,
+ }),
+ );
+
+ // String `1` is found in both `iid` and `reference` props so return both
+ expect(commonUtils.searchBy('1', searchSpace)).toEqual(
+ expect.objectContaining({
+ iid: searchSpace.iid,
+ reference: searchSpace.reference,
+ }),
+ );
+
+ // String `/epics/1` is found in `url` prop so return just that
+ expect(commonUtils.searchBy('/epics/1', searchSpace)).toEqual(
+ expect.objectContaining({
+ url: searchSpace.url,
+ }),
+ );
+ });
+ });
+
+ describe('isScopedLabel', () => {
+ it('returns true when `::` is present in title', () => {
+ expect(commonUtils.isScopedLabel({ title: 'foo::bar' })).toBe(true);
+ });
+
+ it('returns false when `::` is not present', () => {
+ expect(commonUtils.isScopedLabel({ title: 'foobar' })).toBe(false);
+ });
+ });
+
+ describe('getDashPath', () => {
+ it('returns the path following /-/', () => {
+ expect(commonUtils.getDashPath('/some/-/url-with-dashes-/')).toEqual('url-with-dashes-/');
+ });
+
+ it('returns null when no path follows /-/', () => {
+ expect(commonUtils.getDashPath('/some/url')).toEqual(null);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index e584150ba70..f6878c7c920 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -1,4 +1,6 @@
import { __, s__ } from '~/locale';
+import $ from 'jquery';
+import '~/commons/bootstrap';
import * as datetimeUtility from '~/lib/utils/datetime_utility';
describe('Date time utils', () => {
@@ -472,6 +474,23 @@ describe('getDateInFuture', () => {
});
});
+describe('isValidDate', () => {
+ it.each`
+ valueToCheck | isValid
+ ${new Date()} | ${true}
+ ${new Date('December 17, 1995 03:24:00')} | ${true}
+ ${new Date('1995-12-17T03:24:00')} | ${true}
+ ${new Date('foo')} | ${false}
+ ${5} | ${false}
+ ${''} | ${false}
+ ${false} | ${false}
+ ${undefined} | ${false}
+ ${null} | ${false}
+ `('returns $expectedReturnValue when called with $dateToCheck', ({ valueToCheck, isValid }) => {
+ expect(datetimeUtility.isValidDate(valueToCheck)).toBe(isValid);
+ });
+});
+
describe('getDatesInRange', () => {
it('returns an empty array if 1st or 2nd argument is not a Date object', () => {
const d1 = new Date('2019-01-01');
@@ -563,3 +582,23 @@ describe('approximateDuration', () => {
expect(datetimeUtility.approximateDuration(seconds)).toBe(approximation);
});
});
+
+describe('localTimeAgo', () => {
+ beforeEach(() => {
+ document.body.innerHTML = `<time title="some time" datetime="2020-02-18T22:22:32Z">1 hour ago</time>`;
+ });
+
+ it.each`
+ timeagoArg | title | dataOriginalTitle
+ ${false} | ${'some time'} | ${null}
+ ${true} | ${''} | ${'Feb 18, 2020 10:22pm GMT+0000'}
+ `('converts $seconds seconds to $approximation', ({ timeagoArg, title, dataOriginalTitle }) => {
+ const element = document.querySelector('time');
+ datetimeUtility.localTimeAgo($(element), timeagoArg);
+
+ jest.runAllTimers();
+
+ expect(element.getAttribute('data-original-title')).toBe(dataOriginalTitle);
+ expect(element.getAttribute('title')).toBe(title);
+ });
+});
diff --git a/spec/frontend/lib/utils/icon_utils_spec.js b/spec/frontend/lib/utils/icon_utils_spec.js
index 816d634ad15..f798dc6744d 100644
--- a/spec/frontend/lib/utils/icon_utils_spec.js
+++ b/spec/frontend/lib/utils/icon_utils_spec.js
@@ -1,10 +1,14 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import * as iconUtils from '~/lib/utils/icon_utils';
+import { clearSvgIconPathContentCache, getSvgIconPathContent } from '~/lib/utils/icon_utils';
describe('Icon utils', () => {
describe('getSvgIconPathContent', () => {
let spriteIcons;
+ let axiosMock;
+ const mockName = 'mockIconName';
+ const mockPath = 'mockPath';
+ const mockIcons = `<svg><symbol id="${mockName}"><path d="${mockPath}"/></symbol></svg>`;
beforeAll(() => {
spriteIcons = gon.sprite_icons;
@@ -15,45 +19,63 @@ describe('Icon utils', () => {
gon.sprite_icons = spriteIcons;
});
- let axiosMock;
- let mockEndpoint;
- const mockName = 'mockIconName';
- const mockPath = 'mockPath';
- const getIcon = () => iconUtils.getSvgIconPathContent(mockName);
-
beforeEach(() => {
axiosMock = new MockAdapter(axios);
- mockEndpoint = axiosMock.onGet(gon.sprite_icons);
});
afterEach(() => {
axiosMock.restore();
+ clearSvgIconPathContentCache();
});
- it('extracts svg icon path content from sprite icons', () => {
- mockEndpoint.replyOnce(
- 200,
- `<svg><symbol id="${mockName}"><path d="${mockPath}"/></symbol></svg>`,
- );
-
- return getIcon().then(path => {
- expect(path).toBe(mockPath);
+ describe('when the icons can be loaded', () => {
+ beforeEach(() => {
+ axiosMock.onGet(gon.sprite_icons).reply(200, mockIcons);
});
- });
- it('returns null if icon path content does not exist', () => {
- mockEndpoint.replyOnce(200, ``);
+ it('extracts svg icon path content from sprite icons', () => {
+ return getSvgIconPathContent(mockName).then(path => {
+ expect(path).toBe(mockPath);
+ });
+ });
- return getIcon().then(path => {
- expect(path).toBe(null);
+ it('returns null if icon path content does not exist', () => {
+ return getSvgIconPathContent('missing-icon').then(path => {
+ expect(path).toBe(null);
+ });
});
});
- it('returns null if an http error occurs', () => {
- mockEndpoint.replyOnce(500);
+ describe('when the icons cannot be loaded on the first 2 tries', () => {
+ beforeEach(() => {
+ axiosMock
+ .onGet(gon.sprite_icons)
+ .replyOnce(500)
+ .onGet(gon.sprite_icons)
+ .replyOnce(500)
+ .onGet(gon.sprite_icons)
+ .reply(200, mockIcons);
+ });
+
+ it('returns null', () => {
+ return getSvgIconPathContent(mockName).then(path => {
+ expect(path).toBe(null);
+ });
+ });
- return getIcon().then(path => {
- expect(path).toBe(null);
+ it('extracts svg icon path content, after 2 attempts', () => {
+ return getSvgIconPathContent(mockName)
+ .then(path1 => {
+ expect(path1).toBe(null);
+ return getSvgIconPathContent(mockName);
+ })
+ .then(path2 => {
+ expect(path2).toBe(null);
+ return getSvgIconPathContent(mockName);
+ })
+ .then(path3 => {
+ expect(path3).toBe(mockPath);
+ });
});
});
});
diff --git a/spec/frontend/lib/utils/mock_data.js b/spec/frontend/lib/utils/mock_data.js
new file mode 100644
index 00000000000..c466b0cd1ed
--- /dev/null
+++ b/spec/frontend/lib/utils/mock_data.js
@@ -0,0 +1,8 @@
+export const faviconDataUrl =
+ 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAMAAABEpIrGAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAACcFBMVEX////iQyniQyniQyniQyniQyniQyniQyniQynhRiriQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniRCniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQynhQiniQiniQiniQinhQinpUSjqUSjqTyjqTyjqTyjlSCniRCniQynjRCjqTyjsZSjrWyj8oib9kSb8pyb9pib8oyb8fyb3ZSb4Zib8fCb8oyb8oyb8oyb8pCb8cSbiQyn7bCb8cib8oyb8oSb8bSbtVSjpTij8nyb8oyb8oyb8lCb2Yyf3ZCf8mCb8oyb8oyb8oyb8iib8bSbiRCn8gyb8oyb8eCbpTinrUSj8oyb8oyb8oyb8pSb8bib4Zif0YCf8byb8oyb8oyb8oyb7oib8oyb8nCbjRSn9bib8ayb8nib8oyb8oyb8oyb8kSbpTyjpTyj8jib8oyb8oyb8oyb8fib0Xyf2ZSb8gCb8oyb6pSb8oyb8dib+cCbgQCnjRSn8cCb8oib8oyb8oyb8oybqUCjnSyn8bCb8oyb8oyb8oyb8myb2YyfyXyf8oyb8oyb8hibhQSn+bib8iSb8oyb8qCb+fSbmSSnqTyj8oib9pCb1YifxXyf7pSb8oCb8pCb+mCb0fCf8pSb7hSXvcSjiQyniQinqTyj9kCb9bib9byb+cCbqUSjiRCnsVCj+cSb8pib8bCb8bSbgQCn7bCb8bibjRSn8oyb8ayb8oib8aib8pCbjRCn8pybhQinhQSn8pSb7ayb7aSb6aib8eib///8IbM+7AAAAr3RSTlMBA3NtX2vT698HGQcRLwWLiXnv++3V+eEd/R8HE2V/Y5HjyefdFw99YWfJ+/3nwQP78/HvX1VTQ/kdA2HzbQXj9fX79/3DGf379/33T/v99/f7ba33+/f1+9/18/v59V339flzF/H9+fX3/fMhBwOh9/v5/fmvBV/z+fP3Awnp9/f38+UFgff7+/37+4c77/f7/flFz/f59dFr7/v98Wnr+/f3I5/197EDBU1ZAwUD8/kLUwAAAAFiS0dEAIgFHUgAAAAHdElNRQfhBQoLHiBV6/1lAAACHUlEQVQ4y41TZXsTQRCe4FAIUigN7m7FXY+iLRQKBG2x4g7BjhZ3Le7uMoEkFJprwyQk0CC/iZnNhUZaHt4vt6/szO7cHcD/wFKjZrJWq3YMq1M3eVc9rFzXR2yQkuA3RGxkjZLGiEk9miA2tURJs1RsnhhokYYtzaU13WZDbBVnW1sjo43J2vI6tZ0lLtFeAh1M0lECneI7dGYtrUtk3RUVIKaEJR25qw27yT0s3W0qEHuPlB4RradivXo7GX36xnbo51SQ+fWHARmCgYMGDxkaxbD3SssYPmIkwKgPLrfA87EETTg/fVaSa/SYsQDjSsd7DcGEsr+BieVKmaRNBsjUtClTfUI900y/5Mt05c8oJQKYSURZ2UqYFa0w283M588JEM2BuRwI5EqT8nmmXzZf4l8XsGNfCIv4QcHFklhiBpaqAsuC4tghj+ySyOdjeJYrP7RCCuR/E5tWAqxaLcmCNSyujdxjHZdbn8UHoA0bN/GoNm8hjQJb/ZzYpo6w3TB27JRduxxqrA7YzbWCezixN8RD2Oc2/Ptlfx7o5uT1A4XMiwzj4HfEikNe7+Ew0ZGjeuW70eEYaeHjxomTiKd++E4XnKGz8d+HDufOB3Ky3RcwdNF1qZiKLyf/B44r2tWf15wV143cwI2qfi8dbtKtX6Hbd+6G74EDqkTm/QcPH/0ufFyNLXjy9NnzF9Xb8BJevYY38C+8fZcg/AF3QTYemVkCwwAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAxNy0wNS0xMFQxMTozMDozMiswMjowMMzup8UAAAAldEVYdGRhdGU6bW9kaWZ5ADIwMTctMDUtMTBUMTE6MzA6MzIrMDI6MDC9sx95AAAAAElFTkSuQmCC';
+
+export const overlayDataUrl =
+ 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAA85JREFUWAntVllIVGEUPv/9b46O41KplYN7PeRkti8TjQlhCUGh3MmeQugpIsGKAi2soIcIooiohxYKK2daqDAlIpIiWwxtQaJcaHE0d5tMrbn37z9XRqfR0TvVW56Hudf//uec72zfEWBCJjIwkYGJDPzvGSD/KgExN3Oi2Q+2DJgSDYQEMwItVGH1iZGmJw/Si1y+/PwVAMYYib22MYc/8hVQFgKDEfYoId0KYzagAQebsos/ewMZoeB9wdffcTYpQSaCTWHKoqSQaDk7zkIt0+aCUR8BelEHrf3dUNv9AcqbnsHtT5UKB/hTASh0SLYjnjb/CIDRJi0XiFAaJOpCD8zLpdb4NB66b1OfelthX815dtdRRfiti2aAXLvVLiMQ6olGyztGDkSo4JGGXk8/QFdGpYzpHG2GBQTDhtgVhPEaVbbVpvI6GJz22rv4TcAfrYI1x7Rj5MWWAppomKFVVb2302SFzUkZHAbkG+0b1+Gh77yNYjrmqnWTrLBLRxdvBWv8qlFujH/kYjJYyvLkj71t78zAUvzMAMnHhpN4zf9UREJhd8omyssxu1IgazQDwDnHUcNuH6vhPIE1fmuBzHt74Hn7W89jWGtcAjoaIDOFrdcMYJBkgOCoaRF0Lj0oglddDbCj6tRvKjphEpgjkzEQs2YAKsNxMzjn3nKurhzK+Ly7xe28ua8TwgMMcHJZnvvT0BPtEEKM4tDJ+C8GvIIk4ylINIXVZ0EUKJxYuh3mhCeokbudl6TtVc88dfBdLwbyaWB6zQCYQJpBYSrDGQxBQ/ZWRM2B+VNmQnVnHWx7elyNuL2/R336co7KyJR8CL9oLgEuFlREevWUkEl6uGwpVEG4FBm0OEf9N10NMgPlvWYAuNVwsWDKvcUNYsHUWTCZ13ysyFEXe6TO6aC8CUr9IiK+A05TQrc8yjwmxARHeeMAPlfQJw+AQRwu0YhL/GDXi9NwufG+S8dYkuYMqIb4SsWthotlNMOUCOM6r+G9cqXxPmd1dqrBav/o1zJy2l5/NUjJA/VORwYuFnOUaTQcPs9wMqwV++Xv8oADxKAcZ8nLPr8AoGW+xR6HSqYk3GodAz2QNj0V+Gr26dT9ASNH5239Pf0gktVNWZca8ZvfAFBprWS6hSu1pqt++Y0PD+WIwDAhIWQGtzvSHDbcodfFUFB9hg1Gjs5LXqIdFL+acFBl+FddqYwdxsWC3I70OvgfUaA65zhq2O2c8VxYcyIGFTVlXegYtvCXANCQZJMobjVcLMjtSK/IcEgyOOe8Ve5w7ryKDefp2P3+C/5ohv8HZmVLAAAAAElFTkSuQmCC';
+
+export const faviconWithOverlayDataUrl =
+ 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAGtElEQVRYR8WXf3CT9R3H35/nSdIQIktrCf0RStI0FYRjVBAccxTq5MDBKUoz4ZyjbPO87q4yBsPDMdExTjlvIsdQexyI0oMBeuKhdjsNHhwcMgpjIlublLIm/UlJKZSSJs/z/e6+T5v0CQ22wB/7/pPck8/383l9fj6fEOec8H88NAjAS1LwknsFSVLU8WXd1rtm85LUeKnwGQKzjj3s33azvsEAAEIlnn8ByHL4/Pa7BgAQLCm8QOBOh88vDQkQeMxjMkcQEYKqYsyJWWPhgs/80TsFafzROJtkNIXFfYI0pfXqPeennjqlxPUNikBoTuEmEF+lCRBV3G0aQiWFrwH8d30AWJubGdiEfZzdGqDEEwbICnADQGGHry7zTr0X94IlnnMACggwAWh0+PxOvb5EBGqmTTNkj7ySxWS62C+g5Usm1Zn95YXG24UQ+r5n75Li6Ux4LBkyc7/4t5YSLSr6Lgg9UvBLcKocMEYKON/gGB3YoA/bcGFCczzLQdieLE9bHL66FakBSjzCU0cSAHDa4at7aLhG9XLBEk8zAVnxZxyIEhBy+PwFgwAafpxvNzK5NZUhrX28JA07Cl6SmtvcOUwm4ZAouHj7ad+jMrN1dqb3iG7oS4EYPh2etQS+XiesC8TQ3ZD3yZJsHuUPgbMcI+ej5v3ncv5PasNlk1p7JJnzJL+I0/O5h+u0VCdqIDi78AQRHuirft3hYJzQPvawPydVdPI+/OnTnNNKBjYVXHRa8rFFGeb4w1he0wZ7d/84IXTEhxzxUsgitB2LPFGwvgGUfLSeZUpEXqEqrIdz0nr4iHOUfeOccb/tNMtutzWHPeWcJc0aMxm5lkxYDGloj1zB+Sv/RXXTSXzaeBwSY3j+bHNv2bdtMYCbpHtRkNFd36xFQN3tXkZhvgP1fdPi5kMEXL4oIXKVAA58M8aCVQs84BYLXi5aDq+zGJTqYr+i4PV2vHxmJ/7WUoOn2i/jz6yhW7JjrdSV8U4fQFV+I2Q4UIsedMCSSlcsgp72WtnSajOhzDsBNtsYfFD8e+Rbs4fdIG98uw9vnj+AX7FWvk4NHZOXXphF/INx2SpJIU2L8L4GDAoMwlP9kWSg6awcKVs83tyUnY5Dj75+W8bjutae3o5d9X/HTiWAuUtOS6RUOR8Hp48TxjgU/AMSeKJ1Ej/tMWXG1sxwGt98sBxe5+xhe64XVLiK2Z9XwNgdRLXyzQsC4ENwelIHAFxDBOdh1qdCdNLCoon8RnY+HZ6/+TtzPhTZweAxlJ94C5VqoI2U3a7rACzJjQqgBd24CGscos1kxPQZ38fqSU/jhQkDvN9lrKG7FeUnNuPVKcvwYOb4hGgvi2HSx8vwRKyJkVLl+hk43gdBAcfADBD1cA4RXIdZ1EN1Zjqem+DGoUc2oigjMUlvaV8YL/1qPVpuhOG+JwdH5m1Okn3m6Eacaz3V2jeI9uTbVYY6AKOSKw8MX0MBg2lXjh3r3Hk4s7ASdrMtSWxnoBpZIzIwP3e69lxv3Gay4q/F6zDJ5kq6s6amEnsafJ0Db8P9JKkx1w5wPJuY36IToojgNMzb8rLwmsuB2kW7YDWMSCgTg+YXx9+AQZKxdUaFZiju+a2Mi8uvnH0f2/2f9g4AVE4z4LlTilrlehag9xIpEam4jO4DXfdaV97nwtH5byW137VYD5Yc2YAz4YAGIYx2RLq0z1Sex8l//fUWfBI83jh4Kd1PEuAwqVGjWEwSS+nJJmt0sWu86d0frMQCR/LbWQ8hDAxlXMgUV69Q67ubv0q5FUNAlHKmVLnXE/gfREpUiaQHqAizXbO0UN98BMTSo39Cw7UW7E2Rc728qJGHP68ASbQyNYCQTkAUzCSwQ+CwvSjnsQPGLOnI/C0YO3Lwxq5yhhtqb1KNpGqT1TXvigJU0jh33xpAf7NymoGNDJ9sJtPkYuNkqTh7KnY8vGaoeZPy93+GA1joe4kzzv/SVLqvYngA/dFgVfnlb8tjtm6Ux+I39y/Gqone24IQM+GxL15UO3q7WrhsnhJatCs8PAC9md3OrPK0goaDyEj7uXsuXi0qg4HkIUGE52XHNqmXIl0RGOiHoUV7xb+v5K14SC39At79Ximdhc8ekjImuiyjsXryUszLnY40yThIhSi4bbUHsbfBJ6ZKE5dpQdz4HQOgf2a8tLvklY+M6cuvSnJummxSZ46+X+7biMzaRnSu84IauNYsE5HCOX+HDCPWi7DrKW8/BTcVZ2UN8Me57kc5448TaCYR5XJwC0BtHMwPjs/SgAP1pfuCqSL8Pxhr/wunLWAOAAAAAElFTkSuQmCC';
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index 803b3629524..dc8f6c64136 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -94,8 +94,27 @@ describe('text_utility', () => {
});
describe('convertToCamelCase', () => {
- it('converts snake_case string to camelCase string', () => {
- expect(textUtils.convertToCamelCase('snake_case')).toBe('snakeCase');
+ it.each`
+ txt | result
+ ${'a_snake_cased_string'} | ${'aSnakeCasedString'}
+ ${'_leading_underscore'} | ${'_leadingUnderscore'}
+ ${'__leading_underscores'} | ${'__leadingUnderscores'}
+ ${'trailing_underscore_'} | ${'trailingUnderscore_'}
+ ${'trailing_underscores__'} | ${'trailingUnderscores__'}
+ `('converts string "$txt" to "$result"', ({ txt, result }) => {
+ expect(textUtils.convertToCamelCase(txt)).toBe(result);
+ });
+
+ it.each`
+ txt
+ ${'__withoutMiddleUnderscores__'}
+ ${''}
+ ${'with spaces'}
+ ${'with\nnew\r\nlines'}
+ ${'_'}
+ ${'___'}
+ `('does not modify string "$txt"', ({ txt }) => {
+ expect(textUtils.convertToCamelCase(txt)).toBe(txt);
});
});
diff --git a/spec/frontend/lib/utils/unit_format/formatter_factory_spec.js b/spec/frontend/lib/utils/unit_format/formatter_factory_spec.js
new file mode 100644
index 00000000000..26b942c3567
--- /dev/null
+++ b/spec/frontend/lib/utils/unit_format/formatter_factory_spec.js
@@ -0,0 +1,276 @@
+import {
+ numberFormatter,
+ suffixFormatter,
+ scaledSIFormatter,
+ scaledBinaryFormatter,
+} from '~/lib/utils/unit_format/formatter_factory';
+
+describe('unit_format/formatter_factory', () => {
+ describe('numberFormatter', () => {
+ let formatNumber;
+ beforeEach(() => {
+ formatNumber = numberFormatter();
+ });
+
+ it('formats a integer', () => {
+ expect(formatNumber(1)).toBe('1');
+ expect(formatNumber(100)).toBe('100');
+ expect(formatNumber(1000)).toBe('1,000');
+ expect(formatNumber(10000)).toBe('10,000');
+ expect(formatNumber(1000000)).toBe('1,000,000');
+ });
+
+ it('formats a floating point number', () => {
+ expect(formatNumber(0.1)).toBe('0.1');
+ expect(formatNumber(0.1, 0)).toBe('0');
+ expect(formatNumber(0.1, 2)).toBe('0.10');
+ expect(formatNumber(0.1, 3)).toBe('0.100');
+
+ expect(formatNumber(12.345)).toBe('12.345');
+ expect(formatNumber(12.345, 2)).toBe('12.35');
+ expect(formatNumber(12.345, 4)).toBe('12.3450');
+ });
+
+ it('formats a large integer with a length limit', () => {
+ expect(formatNumber(10 ** 7, undefined)).toBe('10,000,000');
+ expect(formatNumber(10 ** 7, undefined, 9)).toBe('1.00e+7');
+ expect(formatNumber(10 ** 7, undefined, 10)).toBe('10,000,000');
+ });
+ });
+
+ describe('suffixFormatter', () => {
+ let formatSuffix;
+ beforeEach(() => {
+ formatSuffix = suffixFormatter('pop.', undefined);
+ });
+
+ it('formats a integer', () => {
+ expect(formatSuffix(1)).toBe('1pop.');
+ expect(formatSuffix(100)).toBe('100pop.');
+ expect(formatSuffix(1000)).toBe('1,000pop.');
+ expect(formatSuffix(10000)).toBe('10,000pop.');
+ expect(formatSuffix(1000000)).toBe('1,000,000pop.');
+ });
+
+ it('formats a floating point number', () => {
+ expect(formatSuffix(0.1)).toBe('0.1pop.');
+ expect(formatSuffix(0.1, 0)).toBe('0pop.');
+ expect(formatSuffix(0.1, 2)).toBe('0.10pop.');
+ expect(formatSuffix(0.1, 3)).toBe('0.100pop.');
+
+ expect(formatSuffix(12.345)).toBe('12.345pop.');
+ expect(formatSuffix(12.345, 2)).toBe('12.35pop.');
+ expect(formatSuffix(12.345, 4)).toBe('12.3450pop.');
+ });
+
+ it('formats a negative integer', () => {
+ expect(formatSuffix(-1)).toBe('-1pop.');
+ expect(formatSuffix(-100)).toBe('-100pop.');
+ expect(formatSuffix(-1000)).toBe('-1,000pop.');
+ expect(formatSuffix(-10000)).toBe('-10,000pop.');
+ expect(formatSuffix(-1000000)).toBe('-1,000,000pop.');
+ });
+
+ it('formats a floating point nugative number', () => {
+ expect(formatSuffix(-0.1)).toBe('-0.1pop.');
+ expect(formatSuffix(-0.1, 0)).toBe('-0pop.');
+ expect(formatSuffix(-0.1, 2)).toBe('-0.10pop.');
+ expect(formatSuffix(-0.1, 3)).toBe('-0.100pop.');
+
+ expect(formatSuffix(-12.345)).toBe('-12.345pop.');
+ expect(formatSuffix(-12.345, 2)).toBe('-12.35pop.');
+ expect(formatSuffix(-12.345, 4)).toBe('-12.3450pop.');
+ });
+
+ it('formats a large integer', () => {
+ expect(formatSuffix(10 ** 7)).toBe('10,000,000pop.');
+ expect(formatSuffix(10 ** 10)).toBe('10,000,000,000pop.');
+ });
+
+ it('formats a large integer with a length limit', () => {
+ expect(formatSuffix(10 ** 7, undefined, 10)).toBe('1.00e+7pop.');
+ expect(formatSuffix(10 ** 10, undefined, 10)).toBe('1.00e+10pop.');
+ });
+ });
+
+ describe('scaledSIFormatter', () => {
+ describe('scaled format', () => {
+ let formatGibibytes;
+
+ beforeEach(() => {
+ formatGibibytes = scaledSIFormatter('B');
+ });
+
+ it('formats bytes', () => {
+ expect(formatGibibytes(12.345)).toBe('12.345B');
+ expect(formatGibibytes(12.345, 0)).toBe('12B');
+ expect(formatGibibytes(12.345, 1)).toBe('12.3B');
+ expect(formatGibibytes(12.345, 2)).toBe('12.35B');
+ });
+
+ it('formats bytes in a decimal scale', () => {
+ expect(formatGibibytes(1)).toBe('1B');
+ expect(formatGibibytes(10)).toBe('10B');
+ expect(formatGibibytes(10 ** 2)).toBe('100B');
+ expect(formatGibibytes(10 ** 3)).toBe('1kB');
+ expect(formatGibibytes(10 ** 4)).toBe('10kB');
+ expect(formatGibibytes(10 ** 5)).toBe('100kB');
+ expect(formatGibibytes(10 ** 6)).toBe('1MB');
+ expect(formatGibibytes(10 ** 7)).toBe('10MB');
+ expect(formatGibibytes(10 ** 8)).toBe('100MB');
+ expect(formatGibibytes(10 ** 9)).toBe('1GB');
+ expect(formatGibibytes(10 ** 10)).toBe('10GB');
+ expect(formatGibibytes(10 ** 11)).toBe('100GB');
+ });
+ });
+
+ describe('scaled format with offset', () => {
+ let formatGigaBytes;
+
+ beforeEach(() => {
+ // formats gigabytes
+ formatGigaBytes = scaledSIFormatter('B', 3);
+ });
+
+ it('formats floating point numbers', () => {
+ expect(formatGigaBytes(12.345)).toBe('12.345GB');
+ expect(formatGigaBytes(12.345, 0)).toBe('12GB');
+ expect(formatGigaBytes(12.345, 1)).toBe('12.3GB');
+ expect(formatGigaBytes(12.345, 2)).toBe('12.35GB');
+ });
+
+ it('formats large numbers scaled', () => {
+ expect(formatGigaBytes(1)).toBe('1GB');
+ expect(formatGigaBytes(1, 1)).toBe('1.0GB');
+ expect(formatGigaBytes(10)).toBe('10GB');
+ expect(formatGigaBytes(10 ** 2)).toBe('100GB');
+ expect(formatGigaBytes(10 ** 3)).toBe('1TB');
+ expect(formatGigaBytes(10 ** 4)).toBe('10TB');
+ expect(formatGigaBytes(10 ** 5)).toBe('100TB');
+ expect(formatGigaBytes(10 ** 6)).toBe('1PB');
+ expect(formatGigaBytes(10 ** 7)).toBe('10PB');
+ expect(formatGigaBytes(10 ** 8)).toBe('100PB');
+ expect(formatGigaBytes(10 ** 9)).toBe('1EB');
+ });
+
+ it('formatting of too large numbers is not suported', () => {
+ // formatting YB is out of range
+ expect(() => scaledSIFormatter('B', 9)).toThrow();
+ });
+ });
+
+ describe('scaled format with negative offset', () => {
+ let formatMilligrams;
+
+ beforeEach(() => {
+ formatMilligrams = scaledSIFormatter('g', -1);
+ });
+
+ it('formats floating point numbers', () => {
+ expect(formatMilligrams(1.0)).toBe('1mg');
+ expect(formatMilligrams(12.345)).toBe('12.345mg');
+ expect(formatMilligrams(12.345, 0)).toBe('12mg');
+ expect(formatMilligrams(12.345, 1)).toBe('12.3mg');
+ expect(formatMilligrams(12.345, 2)).toBe('12.35mg');
+ });
+
+ it('formats large numbers scaled', () => {
+ expect(formatMilligrams(10)).toBe('10mg');
+ expect(formatMilligrams(10 ** 2)).toBe('100mg');
+ expect(formatMilligrams(10 ** 3)).toBe('1g');
+ expect(formatMilligrams(10 ** 4)).toBe('10g');
+ expect(formatMilligrams(10 ** 5)).toBe('100g');
+ expect(formatMilligrams(10 ** 6)).toBe('1kg');
+ expect(formatMilligrams(10 ** 7)).toBe('10kg');
+ expect(formatMilligrams(10 ** 8)).toBe('100kg');
+ });
+
+ it('formats negative numbers scaled', () => {
+ expect(formatMilligrams(-12.345)).toBe('-12.345mg');
+ expect(formatMilligrams(-12.345, 0)).toBe('-12mg');
+ expect(formatMilligrams(-12.345, 1)).toBe('-12.3mg');
+ expect(formatMilligrams(-12.345, 2)).toBe('-12.35mg');
+
+ expect(formatMilligrams(-10)).toBe('-10mg');
+ expect(formatMilligrams(-100)).toBe('-100mg');
+ expect(formatMilligrams(-(10 ** 4))).toBe('-10g');
+ });
+ });
+ });
+
+ describe('scaledBinaryFormatter', () => {
+ describe('scaled format', () => {
+ let formatScaledBin;
+
+ beforeEach(() => {
+ formatScaledBin = scaledBinaryFormatter('B');
+ });
+
+ it('formats bytes', () => {
+ expect(formatScaledBin(12.345)).toBe('12.345B');
+ expect(formatScaledBin(12.345, 0)).toBe('12B');
+ expect(formatScaledBin(12.345, 1)).toBe('12.3B');
+ expect(formatScaledBin(12.345, 2)).toBe('12.35B');
+ });
+
+ it('formats bytes in a binary scale', () => {
+ expect(formatScaledBin(1)).toBe('1B');
+ expect(formatScaledBin(10)).toBe('10B');
+ expect(formatScaledBin(100)).toBe('100B');
+ expect(formatScaledBin(1000)).toBe('1,000B');
+ expect(formatScaledBin(10000)).toBe('9.766KiB');
+
+ expect(formatScaledBin(1 * 1024)).toBe('1KiB');
+ expect(formatScaledBin(10 * 1024)).toBe('10KiB');
+ expect(formatScaledBin(100 * 1024)).toBe('100KiB');
+
+ expect(formatScaledBin(1 * 1024 ** 2)).toBe('1MiB');
+ expect(formatScaledBin(10 * 1024 ** 2)).toBe('10MiB');
+ expect(formatScaledBin(100 * 1024 ** 2)).toBe('100MiB');
+
+ expect(formatScaledBin(1 * 1024 ** 3)).toBe('1GiB');
+ expect(formatScaledBin(10 * 1024 ** 3)).toBe('10GiB');
+ expect(formatScaledBin(100 * 1024 ** 3)).toBe('100GiB');
+ });
+ });
+
+ describe('scaled format with offset', () => {
+ let formatGibibytes;
+
+ beforeEach(() => {
+ formatGibibytes = scaledBinaryFormatter('B', 3);
+ });
+
+ it('formats floating point numbers', () => {
+ expect(formatGibibytes(12.888)).toBe('12.888GiB');
+ expect(formatGibibytes(12.888, 0)).toBe('13GiB');
+ expect(formatGibibytes(12.888, 1)).toBe('12.9GiB');
+ expect(formatGibibytes(12.888, 2)).toBe('12.89GiB');
+ });
+
+ it('formats large numbers scaled', () => {
+ expect(formatGibibytes(1)).toBe('1GiB');
+ expect(formatGibibytes(10)).toBe('10GiB');
+ expect(formatGibibytes(100)).toBe('100GiB');
+ expect(formatGibibytes(1000)).toBe('1,000GiB');
+
+ expect(formatGibibytes(1 * 1024)).toBe('1TiB');
+ expect(formatGibibytes(10 * 1024)).toBe('10TiB');
+ expect(formatGibibytes(100 * 1024)).toBe('100TiB');
+
+ expect(formatGibibytes(1 * 1024 ** 2)).toBe('1PiB');
+ expect(formatGibibytes(10 * 1024 ** 2)).toBe('10PiB');
+ expect(formatGibibytes(100 * 1024 ** 2)).toBe('100PiB');
+
+ expect(formatGibibytes(1 * 1024 ** 3)).toBe('1EiB');
+ expect(formatGibibytes(10 * 1024 ** 3)).toBe('10EiB');
+ expect(formatGibibytes(100 * 1024 ** 3)).toBe('100EiB');
+ });
+
+ it('formatting of too large numbers is not suported', () => {
+ // formatting YB is out of range
+ expect(() => scaledBinaryFormatter('B', 9)).toThrow();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/unit_format/index_spec.js b/spec/frontend/lib/utils/unit_format/index_spec.js
new file mode 100644
index 00000000000..5b2fdf1f02b
--- /dev/null
+++ b/spec/frontend/lib/utils/unit_format/index_spec.js
@@ -0,0 +1,157 @@
+import { getFormatter, SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
+
+describe('unit_format', () => {
+ describe('when a supported format is provided, the returned function formats', () => {
+ it('numbers, by default', () => {
+ expect(getFormatter()(1)).toBe('1');
+ });
+
+ it('numbers', () => {
+ const formatNumber = getFormatter(SUPPORTED_FORMATS.number);
+
+ expect(formatNumber(1)).toBe('1');
+ expect(formatNumber(100)).toBe('100');
+ expect(formatNumber(1000)).toBe('1,000');
+ expect(formatNumber(10000)).toBe('10,000');
+ expect(formatNumber(1000000)).toBe('1,000,000');
+ });
+
+ it('percent', () => {
+ const formatPercent = getFormatter(SUPPORTED_FORMATS.percent);
+
+ expect(formatPercent(1)).toBe('100%');
+ expect(formatPercent(1, 2)).toBe('100.00%');
+
+ expect(formatPercent(0.1)).toBe('10%');
+ expect(formatPercent(0.5)).toBe('50%');
+
+ expect(formatPercent(0.888888)).toBe('89%');
+ expect(formatPercent(0.888888, 2)).toBe('88.89%');
+ expect(formatPercent(0.888888, 5)).toBe('88.88880%');
+
+ expect(formatPercent(2)).toBe('200%');
+ expect(formatPercent(10)).toBe('1,000%');
+ });
+
+ it('percentunit', () => {
+ const formatPercentHundred = getFormatter(SUPPORTED_FORMATS.percentHundred);
+
+ expect(formatPercentHundred(1)).toBe('1%');
+ expect(formatPercentHundred(1, 2)).toBe('1.00%');
+
+ expect(formatPercentHundred(88.8888)).toBe('89%');
+ expect(formatPercentHundred(88.8888, 2)).toBe('88.89%');
+ expect(formatPercentHundred(88.8888, 5)).toBe('88.88880%');
+
+ expect(formatPercentHundred(100)).toBe('100%');
+ expect(formatPercentHundred(100, 2)).toBe('100.00%');
+
+ expect(formatPercentHundred(200)).toBe('200%');
+ expect(formatPercentHundred(1000)).toBe('1,000%');
+ });
+
+ it('seconds', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.seconds)(1)).toBe('1s');
+ });
+
+ it('milliseconds', () => {
+ const formatMilliseconds = getFormatter(SUPPORTED_FORMATS.milliseconds);
+
+ expect(formatMilliseconds(1)).toBe('1ms');
+ expect(formatMilliseconds(100)).toBe('100ms');
+ expect(formatMilliseconds(1000)).toBe('1,000ms');
+ expect(formatMilliseconds(10000)).toBe('10,000ms');
+ expect(formatMilliseconds(1000000)).toBe('1,000,000ms');
+ });
+
+ it('decimalBytes', () => {
+ const formatDecimalBytes = getFormatter(SUPPORTED_FORMATS.decimalBytes);
+
+ expect(formatDecimalBytes(1)).toBe('1B');
+ expect(formatDecimalBytes(1, 1)).toBe('1.0B');
+
+ expect(formatDecimalBytes(10)).toBe('10B');
+ expect(formatDecimalBytes(10 ** 2)).toBe('100B');
+ expect(formatDecimalBytes(10 ** 3)).toBe('1kB');
+ expect(formatDecimalBytes(10 ** 4)).toBe('10kB');
+ expect(formatDecimalBytes(10 ** 5)).toBe('100kB');
+ expect(formatDecimalBytes(10 ** 6)).toBe('1MB');
+ expect(formatDecimalBytes(10 ** 7)).toBe('10MB');
+ expect(formatDecimalBytes(10 ** 8)).toBe('100MB');
+ expect(formatDecimalBytes(10 ** 9)).toBe('1GB');
+ expect(formatDecimalBytes(10 ** 10)).toBe('10GB');
+ expect(formatDecimalBytes(10 ** 11)).toBe('100GB');
+ });
+
+ it('kilobytes', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.kilobytes)(1)).toBe('1kB');
+ expect(getFormatter(SUPPORTED_FORMATS.kilobytes)(1, 1)).toBe('1.0kB');
+ });
+
+ it('megabytes', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.megabytes)(1)).toBe('1MB');
+ expect(getFormatter(SUPPORTED_FORMATS.megabytes)(1, 1)).toBe('1.0MB');
+ });
+
+ it('gigabytes', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.gigabytes)(1)).toBe('1GB');
+ expect(getFormatter(SUPPORTED_FORMATS.gigabytes)(1, 1)).toBe('1.0GB');
+ });
+
+ it('terabytes', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.terabytes)(1)).toBe('1TB');
+ expect(getFormatter(SUPPORTED_FORMATS.terabytes)(1, 1)).toBe('1.0TB');
+ });
+
+ it('petabytes', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.petabytes)(1)).toBe('1PB');
+ expect(getFormatter(SUPPORTED_FORMATS.petabytes)(1, 1)).toBe('1.0PB');
+ });
+
+ it('bytes', () => {
+ const formatBytes = getFormatter(SUPPORTED_FORMATS.bytes);
+
+ expect(formatBytes(1)).toBe('1B');
+ expect(formatBytes(1, 1)).toBe('1.0B');
+
+ expect(formatBytes(10)).toBe('10B');
+ expect(formatBytes(100)).toBe('100B');
+ expect(formatBytes(1000)).toBe('1,000B');
+
+ expect(formatBytes(1 * 1024)).toBe('1KiB');
+ expect(formatBytes(1 * 1024 ** 2)).toBe('1MiB');
+ expect(formatBytes(1 * 1024 ** 3)).toBe('1GiB');
+ });
+
+ it('kibibytes', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.kibibytes)(1)).toBe('1KiB');
+ expect(getFormatter(SUPPORTED_FORMATS.kibibytes)(1, 1)).toBe('1.0KiB');
+ });
+
+ it('mebibytes', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.mebibytes)(1)).toBe('1MiB');
+ expect(getFormatter(SUPPORTED_FORMATS.mebibytes)(1, 1)).toBe('1.0MiB');
+ });
+
+ it('gibibytes', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.gibibytes)(1)).toBe('1GiB');
+ expect(getFormatter(SUPPORTED_FORMATS.gibibytes)(1, 1)).toBe('1.0GiB');
+ });
+
+ it('tebibytes', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.tebibytes)(1)).toBe('1TiB');
+ expect(getFormatter(SUPPORTED_FORMATS.tebibytes)(1, 1)).toBe('1.0TiB');
+ });
+
+ it('pebibytes', () => {
+ expect(getFormatter(SUPPORTED_FORMATS.pebibytes)(1)).toBe('1PiB');
+ expect(getFormatter(SUPPORTED_FORMATS.pebibytes)(1, 1)).toBe('1.0PiB');
+ });
+ });
+
+ describe('when get formatter format is incorrect', () => {
+ it('formatter fails', () => {
+ expect(() => getFormatter('not-supported')(1)).toThrow();
+ });
+ });
+});
diff --git a/spec/frontend/logs/components/environment_logs_spec.js b/spec/frontend/logs/components/environment_logs_spec.js
new file mode 100644
index 00000000000..c638b4c05f9
--- /dev/null
+++ b/spec/frontend/logs/components/environment_logs_spec.js
@@ -0,0 +1,428 @@
+import Vue from 'vue';
+import { GlSprintf, GlDropdown, GlDropdownItem, GlSearchBoxByClick } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import EnvironmentLogs from '~/logs/components/environment_logs.vue';
+
+import { createStore } from '~/logs/stores';
+import { scrollDown } from '~/lib/utils/scroll_utils';
+import {
+ mockEnvName,
+ mockEnvironments,
+ mockPods,
+ mockLogsResult,
+ mockTrace,
+ mockPodName,
+ mockSearch,
+ mockEnvironmentsEndpoint,
+ mockDocumentationPath,
+} from '../mock_data';
+
+jest.mock('~/lib/utils/scroll_utils');
+
+const module = 'environmentLogs';
+
+jest.mock('lodash/throttle', () =>
+ jest.fn(func => {
+ return func;
+ }),
+);
+
+describe('EnvironmentLogs', () => {
+ let EnvironmentLogsComponent;
+ let store;
+ let dispatch;
+ let wrapper;
+ let state;
+
+ const propsData = {
+ environmentName: mockEnvName,
+ environmentsPath: mockEnvironmentsEndpoint,
+ clusterApplicationsDocumentationPath: mockDocumentationPath,
+ };
+
+ const updateControlBtnsMock = jest.fn();
+
+ const findEnvironmentsDropdown = () => wrapper.find('.js-environments-dropdown');
+ const findPodsDropdown = () => wrapper.find('.js-pods-dropdown');
+ const findSearchBar = () => wrapper.find('.js-logs-search');
+ const findTimeRangePicker = () => wrapper.find({ ref: 'dateTimePicker' });
+ const findInfoAlert = () => wrapper.find('.js-elasticsearch-alert');
+ const findLogControlButtons = () => wrapper.find({ name: 'log-control-buttons-stub' });
+
+ const findInfiniteScroll = () => wrapper.find({ ref: 'infiniteScroll' });
+ const findLogTrace = () => wrapper.find('.js-log-trace');
+ const findLogFooter = () => wrapper.find({ ref: 'logFooter' });
+ const getInfiniteScrollAttr = attr => parseInt(findInfiniteScroll().attributes(attr), 10);
+
+ const mockSetInitData = () => {
+ state.pods.options = mockPods;
+ state.environments.current = mockEnvName;
+ [state.pods.current] = state.pods.options;
+
+ state.logs.lines = [];
+ };
+
+ const mockShowPodLogs = () => {
+ state.pods.options = mockPods;
+ [state.pods.current] = mockPods;
+
+ state.logs.lines = mockLogsResult;
+ };
+
+ const mockFetchEnvs = () => {
+ state.environments.options = mockEnvironments;
+ };
+
+ const initWrapper = () => {
+ wrapper = shallowMount(EnvironmentLogsComponent, {
+ propsData,
+ store,
+ stubs: {
+ LogControlButtons: {
+ name: 'log-control-buttons-stub',
+ template: '<div/>',
+ methods: {
+ update: updateControlBtnsMock,
+ },
+ props: {
+ scrollDownButtonDisabled: false,
+ },
+ },
+ GlInfiniteScroll: {
+ name: 'gl-infinite-scroll',
+ template: `
+ <div>
+ <slot name="header"></slot>
+ <slot name="items"></slot>
+ <slot></slot>
+ </div>
+ `,
+ },
+ GlSprintf,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ state = store.state.environmentLogs;
+ EnvironmentLogsComponent = Vue.extend(EnvironmentLogs);
+
+ jest.spyOn(store, 'dispatch').mockResolvedValue();
+
+ dispatch = store.dispatch;
+ });
+
+ afterEach(() => {
+ store.dispatch.mockReset();
+
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ it('displays UI elements', () => {
+ initWrapper();
+
+ expect(wrapper.isVueInstance()).toBe(true);
+ expect(wrapper.isEmpty()).toBe(false);
+
+ // top bar
+ expect(findEnvironmentsDropdown().is(GlDropdown)).toBe(true);
+ expect(findPodsDropdown().is(GlDropdown)).toBe(true);
+ expect(findLogControlButtons().exists()).toBe(true);
+
+ expect(findSearchBar().exists()).toBe(true);
+ expect(findSearchBar().is(GlSearchBoxByClick)).toBe(true);
+ expect(findTimeRangePicker().exists()).toBe(true);
+ expect(findTimeRangePicker().is(DateTimePicker)).toBe(true);
+
+ // log trace
+ expect(findInfiniteScroll().exists()).toBe(true);
+ expect(findLogTrace().exists()).toBe(true);
+ });
+
+ it('mounted inits data', () => {
+ initWrapper();
+
+ expect(dispatch).toHaveBeenCalledWith(`${module}/setInitData`, {
+ timeRange: expect.objectContaining({
+ default: true,
+ }),
+ environmentName: mockEnvName,
+ podName: null,
+ });
+
+ expect(dispatch).toHaveBeenCalledWith(`${module}/fetchEnvironments`, mockEnvironmentsEndpoint);
+ });
+
+ describe('loading state', () => {
+ beforeEach(() => {
+ state.pods.options = [];
+
+ state.logs.lines = [];
+ state.logs.isLoading = true;
+
+ state.environments = {
+ options: [],
+ isLoading: true,
+ };
+
+ initWrapper();
+ });
+
+ it('displays a disabled environments dropdown', () => {
+ expect(findEnvironmentsDropdown().attributes('disabled')).toBe('true');
+ expect(findEnvironmentsDropdown().findAll(GlDropdownItem).length).toBe(0);
+ });
+
+ it('displays a disabled pods dropdown', () => {
+ expect(findPodsDropdown().attributes('disabled')).toBe('true');
+ expect(findPodsDropdown().findAll(GlDropdownItem).length).toBe(0);
+ });
+
+ it('displays a disabled search bar', () => {
+ expect(findSearchBar().exists()).toBe(true);
+ expect(findSearchBar().attributes('disabled')).toBe('true');
+ });
+
+ it('displays a disabled time window dropdown', () => {
+ expect(findTimeRangePicker().attributes('disabled')).toBe('true');
+ });
+
+ it('does not update buttons state', () => {
+ expect(updateControlBtnsMock).not.toHaveBeenCalled();
+ });
+
+ it('shows an infinite scroll with height and no content', () => {
+ expect(getInfiniteScrollAttr('max-list-height')).toBeGreaterThan(0);
+ expect(getInfiniteScrollAttr('fetched-items')).toBe(0);
+ });
+
+ it('shows an infinite scroll container with equal height and max-height ', () => {
+ const height = getInfiniteScrollAttr('max-list-height');
+
+ expect(height).toEqual(expect.any(Number));
+ expect(findInfiniteScroll().attributes('style')).toMatch(`height: ${height}px;`);
+ });
+
+ it('shows a logs trace', () => {
+ expect(findLogTrace().text()).toBe('');
+ expect(
+ findLogTrace()
+ .find('.js-build-loader-animation')
+ .isVisible(),
+ ).toBe(true);
+ });
+ });
+
+ describe('k8s environment', () => {
+ beforeEach(() => {
+ state.pods.options = [];
+
+ state.logs.lines = [];
+ state.logs.isLoading = false;
+
+ state.environments = {
+ options: mockEnvironments,
+ current: 'staging',
+ isLoading: false,
+ };
+
+ initWrapper();
+ });
+
+ it('displays a disabled time window dropdown', () => {
+ expect(findTimeRangePicker().attributes('disabled')).toBe('true');
+ });
+
+ it('displays a disabled search bar', () => {
+ expect(findSearchBar().attributes('disabled')).toBe('true');
+ });
+
+ it('displays an alert to upgrade to ES', () => {
+ expect(findInfoAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('state with data', () => {
+ beforeEach(() => {
+ dispatch.mockImplementation(actionName => {
+ if (actionName === `${module}/setInitData`) {
+ mockSetInitData();
+ } else if (actionName === `${module}/showPodLogs`) {
+ mockShowPodLogs();
+ } else if (actionName === `${module}/fetchEnvironments`) {
+ mockFetchEnvs();
+ mockShowPodLogs();
+ }
+ });
+
+ initWrapper();
+ });
+
+ afterEach(() => {
+ scrollDown.mockReset();
+ updateControlBtnsMock.mockReset();
+ });
+
+ it('displays an enabled search bar', () => {
+ expect(findSearchBar().attributes('disabled')).toBeFalsy();
+
+ // input a query and click `search`
+ findSearchBar().vm.$emit('input', mockSearch);
+ findSearchBar().vm.$emit('submit');
+
+ expect(dispatch).toHaveBeenCalledWith(`${module}/setInitData`, expect.any(Object));
+ expect(dispatch).toHaveBeenCalledWith(`${module}/setSearch`, mockSearch);
+ });
+
+ it('displays an enabled time window dropdown', () => {
+ expect(findTimeRangePicker().attributes('disabled')).toBeFalsy();
+ });
+
+ it('does not display an alert to upgrade to ES', () => {
+ expect(findInfoAlert().exists()).toBe(false);
+ });
+
+ it('populates environments dropdown', () => {
+ const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
+ expect(findEnvironmentsDropdown().props('text')).toBe(mockEnvName);
+ expect(items.length).toBe(mockEnvironments.length);
+ mockEnvironments.forEach((env, i) => {
+ const item = items.at(i);
+ expect(item.text()).toBe(env.name);
+ });
+ });
+
+ it('populates pods dropdown', () => {
+ const items = findPodsDropdown().findAll(GlDropdownItem);
+
+ expect(findPodsDropdown().props('text')).toBe(mockPodName);
+ expect(items.length).toBe(mockPods.length);
+ mockPods.forEach((pod, i) => {
+ const item = items.at(i);
+ expect(item.text()).toBe(pod);
+ });
+ });
+
+ it('shows infinite scroll with height and no content', () => {
+ expect(getInfiniteScrollAttr('max-list-height')).toBeGreaterThan(0);
+ expect(getInfiniteScrollAttr('fetched-items')).toBe(mockTrace.length);
+ });
+
+ it('populates logs trace', () => {
+ const trace = findLogTrace();
+ expect(trace.text().split('\n').length).toBe(mockTrace.length);
+ expect(trace.text().split('\n')).toEqual(mockTrace);
+ });
+
+ it('populates footer', () => {
+ const footer = findLogFooter().text();
+
+ expect(footer).toContain(`${mockLogsResult.length} results`);
+ });
+
+ describe('when user clicks', () => {
+ it('environment name, trace is refreshed', () => {
+ const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
+ const index = 1; // any env
+
+ expect(dispatch).not.toHaveBeenCalledWith(`${module}/showEnvironment`, expect.anything());
+
+ items.at(index).vm.$emit('click');
+
+ expect(dispatch).toHaveBeenCalledWith(
+ `${module}/showEnvironment`,
+ mockEnvironments[index].name,
+ );
+ });
+
+ it('pod name, trace is refreshed', () => {
+ const items = findPodsDropdown().findAll(GlDropdownItem);
+ const index = 2; // any pod
+
+ expect(dispatch).not.toHaveBeenCalledWith(`${module}/showPodLogs`, expect.anything());
+
+ items.at(index).vm.$emit('click');
+
+ expect(dispatch).toHaveBeenCalledWith(`${module}/showPodLogs`, mockPods[index]);
+ });
+
+ it('refresh button, trace is refreshed', () => {
+ expect(dispatch).not.toHaveBeenCalledWith(`${module}/showPodLogs`, expect.anything());
+
+ findLogControlButtons().vm.$emit('refresh');
+
+ expect(dispatch).toHaveBeenCalledWith(`${module}/showPodLogs`, mockPodName);
+ });
+ });
+ });
+
+ describe('listeners', () => {
+ beforeEach(() => {
+ initWrapper();
+ });
+
+ it('attaches listeners in components', () => {
+ expect(findInfiniteScroll().vm.$listeners).toEqual({
+ topReached: expect.any(Function),
+ scroll: expect.any(Function),
+ });
+ });
+
+ it('`topReached` when not loading', () => {
+ expect(store.dispatch).not.toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
+
+ findInfiniteScroll().vm.$emit('topReached');
+
+ expect(store.dispatch).toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
+ });
+
+ it('`topReached` does not fetches more logs when already loading', () => {
+ state.logs.isLoading = true;
+ findInfiniteScroll().vm.$emit('topReached');
+
+ expect(store.dispatch).not.toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
+ });
+
+ it('`topReached` fetches more logs', () => {
+ state.logs.isLoading = true;
+ findInfiniteScroll().vm.$emit('topReached');
+
+ expect(store.dispatch).not.toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
+ });
+
+ it('`scroll` on a scrollable target results in enabled scroll buttons', () => {
+ const target = { scrollTop: 10, clientHeight: 10, scrollHeight: 21 };
+
+ state.logs.isLoading = true;
+ findInfiniteScroll().vm.$emit('scroll', { target });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findLogControlButtons().props('scrollDownButtonDisabled')).toEqual(false);
+ });
+ });
+
+ it('`scroll` on a non-scrollable target in disabled scroll buttons', () => {
+ const target = { scrollTop: 10, clientHeight: 10, scrollHeight: 20 };
+
+ state.logs.isLoading = true;
+ findInfiniteScroll().vm.$emit('scroll', { target });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findLogControlButtons().props('scrollDownButtonDisabled')).toEqual(true);
+ });
+ });
+
+ it('`scroll` on no target results in disabled scroll buttons', () => {
+ state.logs.isLoading = true;
+ findInfiniteScroll().vm.$emit('scroll', { target: undefined });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findLogControlButtons().props('scrollDownButtonDisabled')).toEqual(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/logs/components/log_control_buttons_spec.js b/spec/frontend/logs/components/log_control_buttons_spec.js
new file mode 100644
index 00000000000..38e568f569f
--- /dev/null
+++ b/spec/frontend/logs/components/log_control_buttons_spec.js
@@ -0,0 +1,92 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import LogControlButtons from '~/logs/components/log_control_buttons.vue';
+
+describe('LogControlButtons', () => {
+ let wrapper;
+
+ const findScrollToTop = () => wrapper.find('.js-scroll-to-top');
+ const findScrollToBottom = () => wrapper.find('.js-scroll-to-bottom');
+ const findRefreshBtn = () => wrapper.find('.js-refresh-log');
+
+ const initWrapper = opts => {
+ wrapper = shallowMount(LogControlButtons, {
+ listeners: {
+ scrollUp: () => {},
+ scrollDown: () => {},
+ },
+ ...opts,
+ });
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ it('displays UI elements', () => {
+ initWrapper();
+
+ expect(wrapper.isVueInstance()).toBe(true);
+ expect(wrapper.isEmpty()).toBe(false);
+
+ expect(findScrollToTop().is(GlButton)).toBe(true);
+ expect(findScrollToBottom().is(GlButton)).toBe(true);
+ expect(findRefreshBtn().is(GlButton)).toBe(true);
+ });
+
+ it('emits a `refresh` event on click on `refresh` button', () => {
+ initWrapper();
+
+ // An `undefined` value means no event was emitted
+ expect(wrapper.emitted('refresh')).toBe(undefined);
+
+ findRefreshBtn().vm.$emit('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('refresh')).toHaveLength(1);
+ });
+ });
+
+ describe('when scrolling actions are enabled', () => {
+ beforeEach(() => {
+ // mock scrolled to the middle of a long page
+ initWrapper();
+ return wrapper.vm.$nextTick();
+ });
+
+ it('click on "scroll to top" scrolls up', () => {
+ expect(findScrollToTop().is('[disabled]')).toBe(false);
+
+ findScrollToTop().vm.$emit('click');
+
+ expect(wrapper.emitted('scrollUp')).toHaveLength(1);
+ });
+
+ it('click on "scroll to bottom" scrolls down', () => {
+ expect(findScrollToBottom().is('[disabled]')).toBe(false);
+
+ findScrollToBottom().vm.$emit('click');
+
+ expect(wrapper.emitted('scrollDown')).toHaveLength(1);
+ });
+ });
+
+ describe('when scrolling actions are disabled', () => {
+ beforeEach(() => {
+ initWrapper({ listeners: {} });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('buttons are disabled', () => {
+ return wrapper.vm.$nextTick(() => {
+ expect(findScrollToTop().exists()).toBe(false);
+ expect(findScrollToBottom().exists()).toBe(false);
+ // This should be enabled when gitlab-ui contains:
+ // https://gitlab.com/gitlab-org/gitlab-ui/-/merge_requests/1149
+ // expect(findScrollToBottom().is('[disabled]')).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/logs/mock_data.js b/spec/frontend/logs/mock_data.js
new file mode 100644
index 00000000000..1a84d6edd12
--- /dev/null
+++ b/spec/frontend/logs/mock_data.js
@@ -0,0 +1,53 @@
+const mockProjectPath = 'root/autodevops-deploy';
+
+export const mockEnvName = 'production';
+export const mockEnvironmentsEndpoint = `${mockProjectPath}/environments.json`;
+export const mockEnvId = '99';
+export const mockDocumentationPath = '/documentation.md';
+export const mockLogsEndpoint = '/dummy_logs_path.json';
+export const mockCursor = 'MOCK_CURSOR';
+export const mockNextCursor = 'MOCK_NEXT_CURSOR';
+
+const makeMockEnvironment = (id, name, advancedQuerying) => ({
+ id,
+ project_path: mockProjectPath,
+ name,
+ logs_api_path: mockLogsEndpoint,
+ enable_advanced_logs_querying: advancedQuerying,
+});
+
+export const mockEnvironment = makeMockEnvironment(mockEnvId, mockEnvName, true);
+export const mockEnvironments = [
+ mockEnvironment,
+ makeMockEnvironment(101, 'staging', false),
+ makeMockEnvironment(102, 'review/a-feature', false),
+];
+
+export const mockPodName = 'production-764c58d697-aaaaa';
+export const mockPods = [
+ mockPodName,
+ 'production-764c58d697-bbbbb',
+ 'production-764c58d697-ccccc',
+ 'production-764c58d697-ddddd',
+];
+
+export const mockLogsResult = [
+ { timestamp: '2019-12-13T13:43:18.2760123Z', message: 'Log 1' },
+ { timestamp: '2019-12-13T13:43:18.2760123Z', message: 'Log 2' },
+ { timestamp: '2019-12-13T13:43:26.8420123Z', message: 'Log 3' },
+];
+
+export const mockTrace = [
+ 'Dec 13 13:43:18.276Z | Log 1',
+ 'Dec 13 13:43:18.276Z | Log 2',
+ 'Dec 13 13:43:26.842Z | Log 3',
+];
+
+export const mockResponse = {
+ pod_name: mockPodName,
+ pods: mockPods,
+ logs: mockLogsResult,
+ cursor: mockNextCursor,
+};
+
+export const mockSearch = 'foo +bar';
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
new file mode 100644
index 00000000000..cad2501c67c
--- /dev/null
+++ b/spec/frontend/logs/stores/actions_spec.js
@@ -0,0 +1,398 @@
+import MockAdapter from 'axios-mock-adapter';
+
+import testAction from 'helpers/vuex_action_helper';
+import * as types from '~/logs/stores/mutation_types';
+import { convertToFixedRange } from '~/lib/utils/datetime_range';
+import logsPageState from '~/logs/stores/state';
+import {
+ setInitData,
+ setSearch,
+ showPodLogs,
+ fetchEnvironments,
+ fetchLogs,
+ fetchMoreLogsPrepend,
+} from '~/logs/stores/actions';
+
+import { defaultTimeRange } from '~/vue_shared/constants';
+
+import axios from '~/lib/utils/axios_utils';
+import flash from '~/flash';
+
+import {
+ mockPodName,
+ mockEnvironmentsEndpoint,
+ mockEnvironments,
+ mockPods,
+ mockLogsResult,
+ mockEnvName,
+ mockSearch,
+ mockLogsEndpoint,
+ mockResponse,
+ mockCursor,
+ mockNextCursor,
+} from '../mock_data';
+
+jest.mock('~/flash');
+jest.mock('~/lib/utils/datetime_range');
+jest.mock('~/logs/utils');
+
+const mockDefaultRange = {
+ start: '2020-01-10T18:00:00.000Z',
+ end: '2020-01-10T10:00:00.000Z',
+};
+const mockFixedRange = {
+ start: '2020-01-09T18:06:20.000Z',
+ end: '2020-01-09T18:36:20.000Z',
+};
+const mockRollingRange = {
+ duration: 120,
+};
+const mockRollingRangeAsFixed = {
+ start: '2020-01-10T18:00:00.000Z',
+ end: '2020-01-10T17:58:00.000Z',
+};
+
+describe('Logs Store actions', () => {
+ let state;
+ let mock;
+
+ const latestGetParams = () => mock.history.get[mock.history.get.length - 1].params;
+
+ convertToFixedRange.mockImplementation(range => {
+ if (range === defaultTimeRange) {
+ return { ...mockDefaultRange };
+ }
+ if (range === mockFixedRange) {
+ return { ...mockFixedRange };
+ }
+ if (range === mockRollingRange) {
+ return { ...mockRollingRangeAsFixed };
+ }
+ throw new Error('Invalid time range');
+ });
+
+ beforeEach(() => {
+ state = logsPageState();
+ });
+
+ afterEach(() => {
+ flash.mockClear();
+ });
+
+ describe('setInitData', () => {
+ it('should commit environment and pod name mutation', () =>
+ testAction(
+ setInitData,
+ { timeRange: mockFixedRange, environmentName: mockEnvName, podName: mockPodName },
+ state,
+ [
+ { type: types.SET_TIME_RANGE, payload: mockFixedRange },
+ { type: types.SET_PROJECT_ENVIRONMENT, payload: mockEnvName },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ ],
+ ));
+ });
+
+ describe('setSearch', () => {
+ it('should commit search mutation', () =>
+ testAction(
+ setSearch,
+ mockSearch,
+ state,
+ [{ type: types.SET_SEARCH, payload: mockSearch }],
+ [{ type: 'fetchLogs' }],
+ ));
+ });
+
+ describe('showPodLogs', () => {
+ it('should commit pod name', () =>
+ testAction(
+ showPodLogs,
+ mockPodName,
+ state,
+ [{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName }],
+ [{ type: 'fetchLogs' }],
+ ));
+ });
+
+ describe('fetchEnvironments', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ it('should commit RECEIVE_ENVIRONMENTS_DATA_SUCCESS mutation on correct data', () => {
+ mock.onGet(mockEnvironmentsEndpoint).replyOnce(200, { environments: mockEnvironments });
+ return testAction(
+ fetchEnvironments,
+ mockEnvironmentsEndpoint,
+ state,
+ [
+ { type: types.REQUEST_ENVIRONMENTS_DATA },
+ { type: types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS, payload: mockEnvironments },
+ ],
+ [{ type: 'fetchLogs' }],
+ );
+ });
+
+ it('should commit RECEIVE_ENVIRONMENTS_DATA_ERROR on wrong data', () => {
+ mock.onGet(mockEnvironmentsEndpoint).replyOnce(500);
+ return testAction(
+ fetchEnvironments,
+ mockEnvironmentsEndpoint,
+ state,
+ [
+ { type: types.REQUEST_ENVIRONMENTS_DATA },
+ { type: types.RECEIVE_ENVIRONMENTS_DATA_ERROR },
+ ],
+ [],
+ () => {
+ expect(flash).toHaveBeenCalledTimes(1);
+ },
+ );
+ });
+ });
+
+ describe('when the backend responds succesfully', () => {
+ let expectedMutations;
+ let expectedActions;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onGet(mockLogsEndpoint).reply(200, mockResponse);
+ mock.onGet(mockLogsEndpoint).replyOnce(202); // mock reactive cache
+
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+ });
+
+ afterEach(() => {
+ mock.reset();
+ });
+
+ describe('fetchLogs', () => {
+ beforeEach(() => {
+ expectedMutations = [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
+ {
+ type: types.RECEIVE_LOGS_DATA_SUCCESS,
+ payload: { logs: mockLogsResult, cursor: mockNextCursor },
+ },
+ ];
+
+ expectedActions = [];
+ });
+
+ it('should commit logs and pod data when there is pod name defined', () => {
+ state.pods.current = mockPodName;
+
+ return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
+ expect(latestGetParams()).toMatchObject({
+ pod_name: mockPodName,
+ });
+ });
+ });
+
+ it('should commit logs and pod data when there is pod name defined and a non-default date range', () => {
+ state.pods.current = mockPodName;
+ state.timeRange.current = mockFixedRange;
+ state.logs.cursor = mockCursor;
+
+ return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
+ expect(latestGetParams()).toEqual({
+ pod_name: mockPodName,
+ start: mockFixedRange.start,
+ end: mockFixedRange.end,
+ cursor: mockCursor,
+ });
+ });
+ });
+
+ it('should commit logs and pod data when there is pod name and search and a faulty date range', () => {
+ state.pods.current = mockPodName;
+ state.search = mockSearch;
+ state.timeRange.current = 'INVALID_TIME_RANGE';
+
+ return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
+ expect(latestGetParams()).toEqual({
+ pod_name: mockPodName,
+ search: mockSearch,
+ });
+ // Warning about time ranges was issued
+ expect(flash).toHaveBeenCalledTimes(1);
+ expect(flash).toHaveBeenCalledWith(expect.any(String), 'warning');
+ });
+ });
+
+ it('should commit logs and pod data when no pod name defined', () => {
+ state.timeRange.current = mockDefaultRange;
+
+ return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
+ expect(latestGetParams()).toEqual({});
+ });
+ });
+ });
+
+ describe('fetchMoreLogsPrepend', () => {
+ beforeEach(() => {
+ expectedMutations = [
+ { type: types.REQUEST_LOGS_DATA_PREPEND },
+ {
+ type: types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS,
+ payload: { logs: mockLogsResult, cursor: mockNextCursor },
+ },
+ ];
+
+ expectedActions = [];
+ });
+
+ it('should commit logs and pod data when there is pod name defined', () => {
+ state.pods.current = mockPodName;
+
+ expectedActions = [];
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ expectedMutations,
+ expectedActions,
+ () => {
+ expect(latestGetParams()).toMatchObject({
+ pod_name: mockPodName,
+ });
+ },
+ );
+ });
+
+ it('should commit logs and pod data when there is pod name defined and a non-default date range', () => {
+ state.pods.current = mockPodName;
+ state.timeRange.current = mockFixedRange;
+ state.logs.cursor = mockCursor;
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ expectedMutations,
+ expectedActions,
+ () => {
+ expect(latestGetParams()).toEqual({
+ pod_name: mockPodName,
+ start: mockFixedRange.start,
+ end: mockFixedRange.end,
+ cursor: mockCursor,
+ });
+ },
+ );
+ });
+
+ it('should commit logs and pod data when there is pod name and search and a faulty date range', () => {
+ state.pods.current = mockPodName;
+ state.search = mockSearch;
+ state.timeRange.current = 'INVALID_TIME_RANGE';
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ expectedMutations,
+ expectedActions,
+ () => {
+ expect(latestGetParams()).toEqual({
+ pod_name: mockPodName,
+ search: mockSearch,
+ });
+ // Warning about time ranges was issued
+ expect(flash).toHaveBeenCalledTimes(1);
+ expect(flash).toHaveBeenCalledWith(expect.any(String), 'warning');
+ },
+ );
+ });
+
+ it('should commit logs and pod data when no pod name defined', () => {
+ state.timeRange.current = mockDefaultRange;
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ expectedMutations,
+ expectedActions,
+ () => {
+ expect(latestGetParams()).toEqual({});
+ },
+ );
+ });
+
+ it('should not commit logs or pod data when it has reached the end', () => {
+ state.logs.isComplete = true;
+ state.logs.cursor = null;
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ [], // no mutations done
+ [], // no actions dispatched
+ () => {
+ expect(mock.history.get).toHaveLength(0);
+ },
+ );
+ });
+ });
+ });
+
+ describe('when the backend responds with an error', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onGet(mockLogsEndpoint).reply(500);
+ });
+
+ afterEach(() => {
+ mock.reset();
+ });
+
+ it('fetchLogs should commit logs and pod errors', () => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+
+ return testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.RECEIVE_PODS_DATA_ERROR },
+ { type: types.RECEIVE_LOGS_DATA_ERROR },
+ ],
+ [],
+ () => {
+ expect(mock.history.get[0].url).toBe(mockLogsEndpoint);
+ },
+ );
+ });
+
+ it('fetchMoreLogsPrepend should commit logs and pod errors', () => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ [
+ { type: types.REQUEST_LOGS_DATA_PREPEND },
+ { type: types.RECEIVE_LOGS_DATA_PREPEND_ERROR },
+ ],
+ [],
+ () => {
+ expect(mock.history.get[0].url).toBe(mockLogsEndpoint);
+ },
+ );
+ });
+ });
+});
diff --git a/spec/frontend/logs/stores/getters_spec.js b/spec/frontend/logs/stores/getters_spec.js
new file mode 100644
index 00000000000..fdce575fa97
--- /dev/null
+++ b/spec/frontend/logs/stores/getters_spec.js
@@ -0,0 +1,40 @@
+import * as getters from '~/logs/stores/getters';
+import logsPageState from '~/logs/stores/state';
+
+import { mockLogsResult, mockTrace } from '../mock_data';
+
+describe('Logs Store getters', () => {
+ let state;
+
+ beforeEach(() => {
+ state = logsPageState();
+ });
+
+ describe('trace', () => {
+ describe('when state is initialized', () => {
+ it('returns an empty string', () => {
+ expect(getters.trace(state)).toEqual('');
+ });
+ });
+
+ describe('when state logs are empty', () => {
+ beforeEach(() => {
+ state.logs.lines = [];
+ });
+
+ it('returns an empty string', () => {
+ expect(getters.trace(state)).toEqual('');
+ });
+ });
+
+ describe('when state logs are set', () => {
+ beforeEach(() => {
+ state.logs.lines = mockLogsResult;
+ });
+
+ it('returns an empty string', () => {
+ expect(getters.trace(state)).toEqual(mockTrace.join('\n'));
+ });
+ });
+ });
+});
diff --git a/spec/frontend/logs/stores/mutations_spec.js b/spec/frontend/logs/stores/mutations_spec.js
new file mode 100644
index 00000000000..eae838a31d4
--- /dev/null
+++ b/spec/frontend/logs/stores/mutations_spec.js
@@ -0,0 +1,259 @@
+import mutations from '~/logs/stores/mutations';
+import * as types from '~/logs/stores/mutation_types';
+
+import logsPageState from '~/logs/stores/state';
+import {
+ mockEnvName,
+ mockEnvironments,
+ mockPods,
+ mockPodName,
+ mockLogsResult,
+ mockSearch,
+ mockCursor,
+ mockNextCursor,
+} from '../mock_data';
+
+describe('Logs Store Mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = logsPageState();
+ });
+
+ it('ensures mutation types are correctly named', () => {
+ Object.keys(types).forEach(k => {
+ expect(k).toEqual(types[k]);
+ });
+ });
+
+ describe('SET_PROJECT_ENVIRONMENT', () => {
+ it('sets the environment', () => {
+ mutations[types.SET_PROJECT_ENVIRONMENT](state, mockEnvName);
+ expect(state.environments.current).toEqual(mockEnvName);
+ });
+ });
+
+ describe('SET_SEARCH', () => {
+ it('sets the search', () => {
+ mutations[types.SET_SEARCH](state, mockSearch);
+ expect(state.search).toEqual(mockSearch);
+ });
+ });
+
+ describe('REQUEST_ENVIRONMENTS_DATA', () => {
+ it('inits data', () => {
+ mutations[types.REQUEST_ENVIRONMENTS_DATA](state);
+ expect(state.environments.options).toEqual([]);
+ expect(state.environments.isLoading).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_ENVIRONMENTS_DATA_SUCCESS', () => {
+ it('receives environments data and stores it as options', () => {
+ expect(state.environments.options).toEqual([]);
+
+ mutations[types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS](state, mockEnvironments);
+
+ expect(state.environments.options).toEqual(mockEnvironments);
+ expect(state.environments.isLoading).toEqual(false);
+ });
+ });
+
+ describe('RECEIVE_ENVIRONMENTS_DATA_ERROR', () => {
+ it('captures an error loading environments', () => {
+ mutations[types.RECEIVE_ENVIRONMENTS_DATA_ERROR](state);
+
+ expect(state.environments).toEqual({
+ options: [],
+ isLoading: false,
+ current: null,
+ });
+ });
+ });
+
+ describe('REQUEST_LOGS_DATA', () => {
+ it('starts loading for logs', () => {
+ mutations[types.REQUEST_LOGS_DATA](state);
+
+ expect(state.timeRange.current).toEqual({
+ start: expect.any(String),
+ end: expect.any(String),
+ });
+
+ expect(state.logs).toEqual({
+ lines: [],
+ cursor: null,
+ isLoading: true,
+ isComplete: false,
+ });
+ });
+ });
+
+ describe('RECEIVE_LOGS_DATA_SUCCESS', () => {
+ it('receives logs lines and cursor', () => {
+ mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: mockCursor,
+ });
+
+ expect(state.logs).toEqual({
+ lines: mockLogsResult,
+ isLoading: false,
+ cursor: mockCursor,
+ isComplete: false,
+ });
+ });
+
+ it('receives logs lines and a null cursor to indicate the end', () => {
+ mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: null,
+ });
+
+ expect(state.logs).toEqual({
+ lines: mockLogsResult,
+ isLoading: false,
+ cursor: null,
+ isComplete: true,
+ });
+ });
+ });
+
+ describe('RECEIVE_LOGS_DATA_ERROR', () => {
+ it('receives log data error and stops loading', () => {
+ mutations[types.RECEIVE_LOGS_DATA_ERROR](state);
+
+ expect(state.logs).toEqual({
+ lines: [],
+ isLoading: false,
+ cursor: null,
+ isComplete: false,
+ });
+ });
+ });
+
+ describe('REQUEST_LOGS_DATA_PREPEND', () => {
+ it('receives logs lines and cursor', () => {
+ mutations[types.REQUEST_LOGS_DATA_PREPEND](state);
+
+ expect(state.logs.isLoading).toBe(true);
+ });
+ });
+
+ describe('RECEIVE_LOGS_DATA_PREPEND_SUCCESS', () => {
+ it('receives logs lines and cursor', () => {
+ mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: mockCursor,
+ });
+
+ expect(state.logs).toEqual({
+ lines: mockLogsResult,
+ isLoading: false,
+ cursor: mockCursor,
+ isComplete: false,
+ });
+ });
+
+ it('receives additional logs lines and a new cursor', () => {
+ mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: mockCursor,
+ });
+
+ mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: mockNextCursor,
+ });
+
+ expect(state.logs).toEqual({
+ lines: [...mockLogsResult, ...mockLogsResult],
+ isLoading: false,
+ cursor: mockNextCursor,
+ isComplete: false,
+ });
+ });
+
+ it('receives logs lines and a null cursor to indicate is complete', () => {
+ mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: null,
+ });
+
+ expect(state.logs).toEqual({
+ lines: mockLogsResult,
+ isLoading: false,
+ cursor: null,
+ isComplete: true,
+ });
+ });
+ });
+
+ describe('RECEIVE_LOGS_DATA_PREPEND_ERROR', () => {
+ it('receives logs lines and cursor', () => {
+ mutations[types.RECEIVE_LOGS_DATA_PREPEND_ERROR](state);
+
+ expect(state.logs.isLoading).toBe(false);
+ });
+ });
+
+ describe('SET_CURRENT_POD_NAME', () => {
+ it('set current pod name', () => {
+ mutations[types.SET_CURRENT_POD_NAME](state, mockPodName);
+
+ expect(state.pods.current).toEqual(mockPodName);
+ });
+ });
+
+ describe('SET_TIME_RANGE', () => {
+ it('sets a default range', () => {
+ expect(state.timeRange.selected).toEqual(expect.any(Object));
+ expect(state.timeRange.current).toEqual(expect.any(Object));
+ });
+
+ it('sets a time range', () => {
+ const mockRange = {
+ start: '2020-01-10T18:00:00.000Z',
+ end: '2020-01-10T10:00:00.000Z',
+ };
+ mutations[types.SET_TIME_RANGE](state, mockRange);
+
+ expect(state.timeRange.selected).toEqual(mockRange);
+ expect(state.timeRange.current).toEqual(mockRange);
+ });
+ });
+
+ describe('REQUEST_PODS_DATA', () => {
+ it('receives pods data', () => {
+ mutations[types.REQUEST_PODS_DATA](state);
+
+ expect(state.pods).toEqual(
+ expect.objectContaining({
+ options: [],
+ }),
+ );
+ });
+ });
+ describe('RECEIVE_PODS_DATA_SUCCESS', () => {
+ it('receives pods data success', () => {
+ mutations[types.RECEIVE_PODS_DATA_SUCCESS](state, mockPods);
+
+ expect(state.pods).toEqual(
+ expect.objectContaining({
+ options: mockPods,
+ }),
+ );
+ });
+ });
+ describe('RECEIVE_PODS_DATA_ERROR', () => {
+ it('receives pods data error', () => {
+ mutations[types.RECEIVE_PODS_DATA_ERROR](state);
+
+ expect(state.pods).toEqual(
+ expect.objectContaining({
+ options: [],
+ }),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/logs/utils_spec.js b/spec/frontend/logs/utils_spec.js
new file mode 100644
index 00000000000..986fe320363
--- /dev/null
+++ b/spec/frontend/logs/utils_spec.js
@@ -0,0 +1,38 @@
+import { getTimeRange } from '~/logs/utils';
+
+describe('logs/utils', () => {
+ describe('getTimeRange', () => {
+ const nowTimestamp = 1577836800000;
+ const nowString = '2020-01-01T00:00:00.000Z';
+
+ beforeEach(() => {
+ jest.spyOn(Date, 'now').mockImplementation(() => nowTimestamp);
+ });
+
+ afterEach(() => {
+ Date.now.mockRestore();
+ });
+
+ it('returns the right values', () => {
+ expect(getTimeRange(0)).toEqual({
+ start: '2020-01-01T00:00:00.000Z',
+ end: nowString,
+ });
+
+ expect(getTimeRange(60 * 30)).toEqual({
+ start: '2019-12-31T23:30:00.000Z',
+ end: nowString,
+ });
+
+ expect(getTimeRange(60 * 60 * 24 * 7 * 1)).toEqual({
+ start: '2019-12-25T00:00:00.000Z',
+ end: nowString,
+ });
+
+ expect(getTimeRange(60 * 60 * 24 * 7 * 4)).toEqual({
+ start: '2019-12-04T00:00:00.000Z',
+ end: nowString,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/mocks/ce/diffs/workers/tree_worker.js b/spec/frontend/mocks/ce/diffs/workers/tree_worker.js
new file mode 100644
index 00000000000..a33ddbbfe63
--- /dev/null
+++ b/spec/frontend/mocks/ce/diffs/workers/tree_worker.js
@@ -0,0 +1,8 @@
+/* eslint-disable class-methods-use-this */
+export default class TreeWorkerMock {
+ addEventListener() {}
+
+ terminate() {}
+
+ postMessage() {}
+}
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index c705270343b..e37043e5d4d 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -72,17 +72,33 @@ exports[`Dashboard template matches the default snapshot 1`] = `
</gl-form-group-stub>
<gl-form-group-stub
- class="col-sm-6 col-md-6 col-lg-4"
+ class="col-sm-auto col-md-auto col-lg-auto"
label="Show last"
label-for="monitor-time-window-dropdown"
label-size="sm"
>
<date-time-picker-stub
+ customenabled="true"
options="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
value="[object Object]"
/>
</gl-form-group-stub>
+ <gl-form-group-stub
+ class="col-sm-2 col-md-2 col-lg-1 refresh-dashboard-button"
+ >
+ <gl-button-stub
+ size="md"
+ title="Reload this page"
+ variant="default"
+ >
+ <icon-stub
+ name="repeat"
+ size="16"
+ />
+ </gl-button-stub>
+ </gl-form-group-stub>
+
<!---->
</div>
</div>
diff --git a/spec/frontend/monitoring/components/charts/anomaly_spec.js b/spec/frontend/monitoring/components/charts/anomaly_spec.js
index cea22d075ec..e2d001c3058 100644
--- a/spec/frontend/monitoring/components/charts/anomaly_spec.js
+++ b/spec/frontend/monitoring/components/charts/anomaly_spec.js
@@ -11,7 +11,6 @@ import {
} from '../../mock_data';
import MonitorTimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
-const mockWidgets = 'mockWidgets';
const mockProjectPath = `${TEST_HOST}${mockProjectDir}`;
jest.mock('~/lib/utils/icon_utils'); // mock getSvgIconPathContent
@@ -35,9 +34,6 @@ describe('Anomaly chart component', () => {
const setupAnomalyChart = props => {
wrapper = shallowMount(Anomaly, {
propsData: { ...props },
- slots: {
- default: mockWidgets,
- },
});
};
const findTimeSeries = () => wrapper.find(MonitorTimeSeriesChart);
diff --git a/spec/frontend/monitoring/components/charts/column_spec.js b/spec/frontend/monitoring/components/charts/column_spec.js
index d6a96ffbd65..f368cb7916c 100644
--- a/spec/frontend/monitoring/components/charts/column_spec.js
+++ b/spec/frontend/monitoring/components/charts/column_spec.js
@@ -6,56 +6,75 @@ jest.mock('~/lib/utils/icon_utils', () => ({
getSvgIconPathContent: jest.fn().mockResolvedValue('mockSvgPathContent'),
}));
+const yAxisName = 'Y-axis mock name';
+const yAxisFormat = 'bytes';
+const yAxisPrecistion = 3;
+const dataValues = [
+ [1495700554.925, '8.0390625'],
+ [1495700614.925, '8.0390625'],
+ [1495700674.925, '8.0390625'],
+];
+
describe('Column component', () => {
- let columnChart;
+ let wrapper;
+
+ const findChart = () => wrapper.find(GlColumnChart);
+ const chartProps = prop => findChart().props(prop);
beforeEach(() => {
- columnChart = shallowMount(ColumnChart, {
+ wrapper = shallowMount(ColumnChart, {
propsData: {
graphData: {
+ yAxis: {
+ name: yAxisName,
+ format: yAxisFormat,
+ precision: yAxisPrecistion,
+ },
metrics: [
{
- x_label: 'Time',
- y_label: 'Usage',
result: [
{
metric: {},
- values: [
- [1495700554.925, '8.0390625'],
- [1495700614.925, '8.0390625'],
- [1495700674.925, '8.0390625'],
- ],
+ values: dataValues,
},
],
},
],
},
- containerWidth: 100,
},
});
});
afterEach(() => {
- columnChart.destroy();
+ wrapper.destroy();
});
describe('wrapped components', () => {
describe('GitLab UI column chart', () => {
- let glColumnChart;
+ it('is a Vue instance', () => {
+ expect(findChart().isVueInstance()).toBe(true);
+ });
- beforeEach(() => {
- glColumnChart = columnChart.find(GlColumnChart);
+ it('receives data properties needed for proper chart render', () => {
+ expect(chartProps('data').values).toEqual(dataValues);
});
- it('is a Vue instance', () => {
- expect(glColumnChart.isVueInstance()).toBe(true);
+ it('passes the y axis name correctly', () => {
+ expect(chartProps('yAxisTitle')).toBe(yAxisName);
});
- it('receives data properties needed for proper chart render', () => {
- const props = glColumnChart.props();
+ it('passes the y axis configuration correctly', () => {
+ expect(chartProps('option').yAxis).toMatchObject({
+ name: yAxisName,
+ axisLabel: {
+ formatter: expect.any(Function),
+ },
+ scale: false,
+ });
+ });
- expect(props.data).toBe(columnChart.vm.chartData);
- expect(props.option).toBe(columnChart.vm.chartOptions);
+ it('passes a dataZoom configuration', () => {
+ expect(chartProps('option').dataZoom).toBeDefined();
});
});
});
diff --git a/spec/frontend/monitoring/components/charts/empty_chart_spec.js b/spec/frontend/monitoring/components/charts/empty_chart_spec.js
index bbfca27dc5a..d755ed7c104 100644
--- a/spec/frontend/monitoring/components/charts/empty_chart_spec.js
+++ b/spec/frontend/monitoring/components/charts/empty_chart_spec.js
@@ -13,14 +13,6 @@ describe('Empty Chart component', () => {
});
});
- afterEach(() => {
- emptyChart.destroy();
- });
-
- it('render the chart title', () => {
- expect(emptyChart.find({ ref: 'graphTitle' }).text()).toBe(graphTitle);
- });
-
describe('Computed props', () => {
it('sets the height for the svg container', () => {
expect(emptyChart.vm.svgContainerStyle.height).toBe('300px');
diff --git a/spec/frontend/monitoring/components/charts/options_spec.js b/spec/frontend/monitoring/components/charts/options_spec.js
new file mode 100644
index 00000000000..d219a6627bf
--- /dev/null
+++ b/spec/frontend/monitoring/components/charts/options_spec.js
@@ -0,0 +1,60 @@
+import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
+import { getYAxisOptions, getTooltipFormatter } from '~/monitoring/components/charts/options';
+
+describe('options spec', () => {
+ describe('getYAxisOptions', () => {
+ it('default options', () => {
+ const options = getYAxisOptions();
+
+ expect(options).toMatchObject({
+ name: expect.any(String),
+ axisLabel: {
+ formatter: expect.any(Function),
+ },
+ scale: true,
+ boundaryGap: [expect.any(Number), expect.any(Number)],
+ });
+
+ expect(options.name).not.toHaveLength(0);
+ });
+
+ it('name options', () => {
+ const yAxisName = 'My axis values';
+ const options = getYAxisOptions({
+ name: yAxisName,
+ });
+
+ expect(options).toMatchObject({
+ name: yAxisName,
+ nameLocation: 'center',
+ nameGap: expect.any(Number),
+ });
+ });
+
+ it('formatter options', () => {
+ const options = getYAxisOptions({
+ format: SUPPORTED_FORMATS.bytes,
+ });
+
+ expect(options.axisLabel.formatter).toEqual(expect.any(Function));
+ expect(options.axisLabel.formatter(1)).toBe('1.00B');
+ });
+ });
+
+ describe('getTooltipFormatter', () => {
+ it('default format', () => {
+ const formatter = getTooltipFormatter();
+
+ expect(formatter).toEqual(expect.any(Function));
+ expect(formatter(1)).toBe('1.000');
+ });
+
+ it('defined format', () => {
+ const formatter = getTooltipFormatter({
+ format: SUPPORTED_FORMATS.bytes,
+ });
+
+ expect(formatter(1)).toBe('1.000B');
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index a911b925b66..02b59d46c71 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import { setTestTimeout } from 'helpers/timeout';
import { GlLink } from '@gitlab/ui';
import { GlAreaChart, GlLineChart, GlChartSeriesLabel } from '@gitlab/ui/dist/charts';
+import { cloneDeep } from 'lodash';
import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
import { chartColorValues } from '~/monitoring/constants';
import { createStore } from '~/monitoring/stores';
@@ -9,17 +10,21 @@ import TimeSeries from '~/monitoring/components/charts/time_series.vue';
import * as types from '~/monitoring/stores/mutation_types';
import {
deploymentData,
- metricsDashboardPayload,
- mockedQueryResultPayload,
+ mockedQueryResultFixture,
+ metricsDashboardViewModel,
mockProjectDir,
mockHost,
} from '../../mock_data';
import * as iconUtils from '~/lib/utils/icon_utils';
-
-const mockWidgets = 'mockWidgets';
+import { getJSONFixture } from '../../../helpers/fixtures';
const mockSvgPathContent = 'mockSvgPathContent';
+const metricsDashboardFixture = getJSONFixture(
+ 'metrics_dashboard/environment_metrics_dashboard.json',
+);
+const metricsDashboardPayload = metricsDashboardFixture.dashboard;
+
jest.mock('lodash/throttle', () =>
// this throttle mock executes immediately
jest.fn(func => {
@@ -34,514 +39,539 @@ jest.mock('~/lib/utils/icon_utils', () => ({
describe('Time series component', () => {
let mockGraphData;
- let makeTimeSeriesChart;
let store;
- beforeEach(() => {
- setTestTimeout(1000);
-
- store = createStore();
-
- store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- metricsDashboardPayload,
- );
-
- store.commit(`monitoringDashboard/${types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS}`, deploymentData);
+ const makeTimeSeriesChart = (graphData, type) =>
+ shallowMount(TimeSeries, {
+ propsData: {
+ graphData: { ...graphData, type },
+ deploymentData: store.state.monitoringDashboard.deploymentData,
+ projectPath: `${mockHost}${mockProjectDir}`,
+ },
+ store,
+ });
- // Mock data contains 2 panel groups, with 1 and 2 panels respectively
- store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayload,
- );
+ describe('With a single time series', () => {
+ beforeEach(() => {
+ setTestTimeout(1000);
- // Pick the second panel group and the first panel in it
- [mockGraphData] = store.state.monitoringDashboard.dashboard.panel_groups[1].panels;
+ store = createStore();
- makeTimeSeriesChart = (graphData, type) =>
- shallowMount(TimeSeries, {
- propsData: {
- graphData: { ...graphData, type },
- deploymentData: store.state.monitoringDashboard.deploymentData,
- projectPath: `${mockHost}${mockProjectDir}`,
- },
- slots: {
- default: mockWidgets,
- },
- store,
- });
- });
+ store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
+ metricsDashboardPayload,
+ );
- describe('general functions', () => {
- let timeSeriesChart;
+ store.commit(`monitoringDashboard/${types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS}`, deploymentData);
- const findChart = () => timeSeriesChart.find({ ref: 'chart' });
-
- beforeEach(done => {
- timeSeriesChart = makeTimeSeriesChart(mockGraphData, 'area-chart');
- timeSeriesChart.vm.$nextTick(done);
- });
-
- it('renders chart title', () => {
- expect(timeSeriesChart.find('.js-graph-title').text()).toBe(mockGraphData.title);
+ store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
+ mockedQueryResultFixture,
+ );
+ // dashboard is a dynamically generated fixture and stored at environment_metrics_dashboard.json
+ [mockGraphData] = store.state.monitoringDashboard.dashboard.panelGroups[1].panels;
});
- it('contains graph widgets from slot', () => {
- expect(timeSeriesChart.find('.js-graph-widgets').text()).toBe(mockWidgets);
- });
+ describe('general functions', () => {
+ let timeSeriesChart;
- it('allows user to override max value label text using prop', () => {
- timeSeriesChart.setProps({ legendMaxText: 'legendMaxText' });
+ const findChart = () => timeSeriesChart.find({ ref: 'chart' });
- return timeSeriesChart.vm.$nextTick().then(() => {
- expect(timeSeriesChart.props().legendMaxText).toBe('legendMaxText');
+ beforeEach(done => {
+ timeSeriesChart = makeTimeSeriesChart(mockGraphData, 'area-chart');
+ timeSeriesChart.vm.$nextTick(done);
});
- });
- it('allows user to override average value label text using prop', () => {
- timeSeriesChart.setProps({ legendAverageText: 'averageText' });
+ it('allows user to override max value label text using prop', () => {
+ timeSeriesChart.setProps({ legendMaxText: 'legendMaxText' });
- return timeSeriesChart.vm.$nextTick().then(() => {
- expect(timeSeriesChart.props().legendAverageText).toBe('averageText');
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ expect(timeSeriesChart.props().legendMaxText).toBe('legendMaxText');
+ });
});
- });
- describe('events', () => {
- describe('datazoom', () => {
- let eChartMock;
- let startValue;
- let endValue;
-
- beforeEach(done => {
- eChartMock = {
- handlers: {},
- getOption: () => ({
- dataZoom: [
- {
- startValue,
- endValue,
- },
- ],
- }),
- off: jest.fn(eChartEvent => {
- delete eChartMock.handlers[eChartEvent];
- }),
- on: jest.fn((eChartEvent, fn) => {
- eChartMock.handlers[eChartEvent] = fn;
- }),
- };
-
- timeSeriesChart = makeTimeSeriesChart(mockGraphData);
- timeSeriesChart.vm.$nextTick(() => {
- findChart().vm.$emit('created', eChartMock);
- done();
- });
- });
+ it('allows user to override average value label text using prop', () => {
+ timeSeriesChart.setProps({ legendAverageText: 'averageText' });
- it('handles datazoom event from chart', () => {
- startValue = 1577836800000; // 2020-01-01T00:00:00.000Z
- endValue = 1577840400000; // 2020-01-01T01:00:00.000Z
- eChartMock.handlers.datazoom();
-
- expect(timeSeriesChart.emitted('datazoom')).toHaveLength(1);
- expect(timeSeriesChart.emitted('datazoom')[0]).toEqual([
- {
- start: new Date(startValue).toISOString(),
- end: new Date(endValue).toISOString(),
- },
- ]);
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ expect(timeSeriesChart.props().legendAverageText).toBe('averageText');
});
});
- });
- describe('methods', () => {
- describe('formatTooltipText', () => {
- let mockDate;
- let mockCommitUrl;
- let generateSeriesData;
+ describe('events', () => {
+ describe('datazoom', () => {
+ let eChartMock;
+ let startValue;
+ let endValue;
- beforeEach(() => {
- mockDate = deploymentData[0].created_at;
- mockCommitUrl = deploymentData[0].commitUrl;
- generateSeriesData = type => ({
- seriesData: [
+ beforeEach(done => {
+ eChartMock = {
+ handlers: {},
+ getOption: () => ({
+ dataZoom: [
+ {
+ startValue,
+ endValue,
+ },
+ ],
+ }),
+ off: jest.fn(eChartEvent => {
+ delete eChartMock.handlers[eChartEvent];
+ }),
+ on: jest.fn((eChartEvent, fn) => {
+ eChartMock.handlers[eChartEvent] = fn;
+ }),
+ };
+
+ timeSeriesChart = makeTimeSeriesChart(mockGraphData);
+ timeSeriesChart.vm.$nextTick(() => {
+ findChart().vm.$emit('created', eChartMock);
+ done();
+ });
+ });
+
+ it('handles datazoom event from chart', () => {
+ startValue = 1577836800000; // 2020-01-01T00:00:00.000Z
+ endValue = 1577840400000; // 2020-01-01T01:00:00.000Z
+ eChartMock.handlers.datazoom();
+
+ expect(timeSeriesChart.emitted('datazoom')).toHaveLength(1);
+ expect(timeSeriesChart.emitted('datazoom')[0]).toEqual([
{
- seriesName: timeSeriesChart.vm.chartData[0].name,
- componentSubType: type,
- value: [mockDate, 5.55555],
- dataIndex: 0,
+ start: new Date(startValue).toISOString(),
+ end: new Date(endValue).toISOString(),
},
- ],
- value: mockDate,
+ ]);
});
});
+ });
- it('does not throw error if data point is outside the zoom range', () => {
- const seriesDataWithoutValue = generateSeriesData('line');
- expect(
- timeSeriesChart.vm.formatTooltipText({
- ...seriesDataWithoutValue,
- seriesData: seriesDataWithoutValue.seriesData.map(data => ({
- ...data,
- value: undefined,
- })),
- }),
- ).toBeUndefined();
- });
+ describe('methods', () => {
+ describe('formatTooltipText', () => {
+ let mockDate;
+ let mockCommitUrl;
+ let generateSeriesData;
- describe('when series is of line type', () => {
- beforeEach(done => {
- timeSeriesChart.vm.formatTooltipText(generateSeriesData('line'));
- timeSeriesChart.vm.$nextTick(done);
+ beforeEach(() => {
+ mockDate = deploymentData[0].created_at;
+ mockCommitUrl = deploymentData[0].commitUrl;
+ generateSeriesData = type => ({
+ seriesData: [
+ {
+ seriesName: timeSeriesChart.vm.chartData[0].name,
+ componentSubType: type,
+ value: [mockDate, 5.55555],
+ dataIndex: 0,
+ },
+ ],
+ value: mockDate,
+ });
});
- it('formats tooltip title', () => {
- expect(timeSeriesChart.vm.tooltip.title).toBe('16 Jul 2019, 10:14AM');
+ it('does not throw error if data point is outside the zoom range', () => {
+ const seriesDataWithoutValue = generateSeriesData('line');
+ expect(
+ timeSeriesChart.vm.formatTooltipText({
+ ...seriesDataWithoutValue,
+ seriesData: seriesDataWithoutValue.seriesData.map(data => ({
+ ...data,
+ value: undefined,
+ })),
+ }),
+ ).toBeUndefined();
});
- it('formats tooltip content', () => {
- const name = 'Pod average';
- const value = '5.556';
- const dataIndex = 0;
- const seriesLabel = timeSeriesChart.find(GlChartSeriesLabel);
+ describe('when series is of line type', () => {
+ beforeEach(done => {
+ timeSeriesChart.vm.formatTooltipText(generateSeriesData('line'));
+ timeSeriesChart.vm.$nextTick(done);
+ });
- expect(seriesLabel.vm.color).toBe('');
- expect(shallowWrapperContainsSlotText(seriesLabel, 'default', name)).toBe(true);
- expect(timeSeriesChart.vm.tooltip.content).toEqual([
- { name, value, dataIndex, color: undefined },
- ]);
+ it('formats tooltip title', () => {
+ expect(timeSeriesChart.vm.tooltip.title).toBe('16 Jul 2019, 10:14AM');
+ });
- expect(
- shallowWrapperContainsSlotText(
- timeSeriesChart.find(GlAreaChart),
- 'tooltipContent',
- value,
- ),
- ).toBe(true);
+ it('formats tooltip content', () => {
+ const name = 'Status Code';
+ const value = '5.556';
+ const dataIndex = 0;
+ const seriesLabel = timeSeriesChart.find(GlChartSeriesLabel);
+
+ expect(seriesLabel.vm.color).toBe('');
+ expect(shallowWrapperContainsSlotText(seriesLabel, 'default', name)).toBe(true);
+ expect(timeSeriesChart.vm.tooltip.content).toEqual([
+ { name, value, dataIndex, color: undefined },
+ ]);
+
+ expect(
+ shallowWrapperContainsSlotText(
+ timeSeriesChart.find(GlAreaChart),
+ 'tooltipContent',
+ value,
+ ),
+ ).toBe(true);
+ });
});
- });
- describe('when series is of scatter type, for deployments', () => {
- beforeEach(() => {
- timeSeriesChart.vm.formatTooltipText(generateSeriesData('scatter'));
- });
+ describe('when series is of scatter type, for deployments', () => {
+ beforeEach(() => {
+ timeSeriesChart.vm.formatTooltipText(generateSeriesData('scatter'));
+ });
- it('formats tooltip title', () => {
- expect(timeSeriesChart.vm.tooltip.title).toBe('16 Jul 2019, 10:14AM');
- });
+ it('formats tooltip title', () => {
+ expect(timeSeriesChart.vm.tooltip.title).toBe('16 Jul 2019, 10:14AM');
+ });
- it('formats tooltip sha', () => {
- expect(timeSeriesChart.vm.tooltip.sha).toBe('f5bcd1d9');
- });
+ it('formats tooltip sha', () => {
+ expect(timeSeriesChart.vm.tooltip.sha).toBe('f5bcd1d9');
+ });
- it('formats tooltip commit url', () => {
- expect(timeSeriesChart.vm.tooltip.commitUrl).toBe(mockCommitUrl);
+ it('formats tooltip commit url', () => {
+ expect(timeSeriesChart.vm.tooltip.commitUrl).toBe(mockCommitUrl);
+ });
});
});
- });
- describe('setSvg', () => {
- const mockSvgName = 'mockSvgName';
+ describe('setSvg', () => {
+ const mockSvgName = 'mockSvgName';
- beforeEach(done => {
- timeSeriesChart.vm.setSvg(mockSvgName);
- timeSeriesChart.vm.$nextTick(done);
- });
+ beforeEach(done => {
+ timeSeriesChart.vm.setSvg(mockSvgName);
+ timeSeriesChart.vm.$nextTick(done);
+ });
- it('gets svg path content', () => {
- expect(iconUtils.getSvgIconPathContent).toHaveBeenCalledWith(mockSvgName);
- });
+ it('gets svg path content', () => {
+ expect(iconUtils.getSvgIconPathContent).toHaveBeenCalledWith(mockSvgName);
+ });
- it('sets svg path content', () => {
- timeSeriesChart.vm.$nextTick(() => {
- expect(timeSeriesChart.vm.svgs[mockSvgName]).toBe(`path://${mockSvgPathContent}`);
+ it('sets svg path content', () => {
+ timeSeriesChart.vm.$nextTick(() => {
+ expect(timeSeriesChart.vm.svgs[mockSvgName]).toBe(`path://${mockSvgPathContent}`);
+ });
});
- });
- it('contains an svg object within an array to properly render icon', () => {
- timeSeriesChart.vm.$nextTick(() => {
- expect(timeSeriesChart.vm.chartOptions.dataZoom).toEqual([
- {
- handleIcon: `path://${mockSvgPathContent}`,
- },
- ]);
+ it('contains an svg object within an array to properly render icon', () => {
+ timeSeriesChart.vm.$nextTick(() => {
+ expect(timeSeriesChart.vm.chartOptions.dataZoom).toEqual([
+ {
+ handleIcon: `path://${mockSvgPathContent}`,
+ },
+ ]);
+ });
});
});
- });
- describe('onResize', () => {
- const mockWidth = 233;
+ describe('onResize', () => {
+ const mockWidth = 233;
- beforeEach(() => {
- jest.spyOn(Element.prototype, 'getBoundingClientRect').mockImplementation(() => ({
- width: mockWidth,
- }));
- timeSeriesChart.vm.onResize();
- });
+ beforeEach(() => {
+ jest.spyOn(Element.prototype, 'getBoundingClientRect').mockImplementation(() => ({
+ width: mockWidth,
+ }));
+ timeSeriesChart.vm.onResize();
+ });
- it('sets area chart width', () => {
- expect(timeSeriesChart.vm.width).toBe(mockWidth);
+ it('sets area chart width', () => {
+ expect(timeSeriesChart.vm.width).toBe(mockWidth);
+ });
});
});
- });
- describe('computed', () => {
- const getChartOptions = () => findChart().props('option');
+ describe('computed', () => {
+ const getChartOptions = () => findChart().props('option');
- describe('chartData', () => {
- let chartData;
- const seriesData = () => chartData[0];
+ describe('chartData', () => {
+ let chartData;
+ const seriesData = () => chartData[0];
- beforeEach(() => {
- ({ chartData } = timeSeriesChart.vm);
- });
+ beforeEach(() => {
+ ({ chartData } = timeSeriesChart.vm);
+ });
- it('utilizes all data points', () => {
- const { values } = mockGraphData.metrics[0].result[0];
+ it('utilizes all data points', () => {
+ const { values } = mockGraphData.metrics[0].result[0];
- expect(chartData.length).toBe(1);
- expect(seriesData().data.length).toBe(values.length);
- });
+ expect(chartData.length).toBe(1);
+ expect(seriesData().data.length).toBe(values.length);
+ });
- it('creates valid data', () => {
- const { data } = seriesData();
+ it('creates valid data', () => {
+ const { data } = seriesData();
- expect(
- data.filter(
- ([time, value]) => new Date(time).getTime() > 0 && typeof value === 'number',
- ).length,
- ).toBe(data.length);
- });
+ expect(
+ data.filter(
+ ([time, value]) => new Date(time).getTime() > 0 && typeof value === 'number',
+ ).length,
+ ).toBe(data.length);
+ });
- it('formats line width correctly', () => {
- expect(chartData[0].lineStyle.width).toBe(2);
- });
+ it('formats line width correctly', () => {
+ expect(chartData[0].lineStyle.width).toBe(2);
+ });
- it('formats line color correctly', () => {
- expect(chartData[0].lineStyle.color).toBe(chartColorValues[0]);
+ it('formats line color correctly', () => {
+ expect(chartData[0].lineStyle.color).toBe(chartColorValues[0]);
+ });
});
- });
- describe('chartOptions', () => {
- describe('are extended by `option`', () => {
- const mockSeriesName = 'Extra series 1';
- const mockOption = {
- option1: 'option1',
- option2: 'option2',
- };
-
- it('arbitrary options', () => {
- timeSeriesChart.setProps({
- option: mockOption,
- });
+ describe('chartOptions', () => {
+ describe('are extended by `option`', () => {
+ const mockSeriesName = 'Extra series 1';
+ const mockOption = {
+ option1: 'option1',
+ option2: 'option2',
+ };
- return timeSeriesChart.vm.$nextTick().then(() => {
- expect(getChartOptions()).toEqual(expect.objectContaining(mockOption));
- });
- });
+ it('arbitrary options', () => {
+ timeSeriesChart.setProps({
+ option: mockOption,
+ });
- it('additional series', () => {
- timeSeriesChart.setProps({
- option: {
- series: [
- {
- name: mockSeriesName,
- },
- ],
- },
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ expect(getChartOptions()).toEqual(expect.objectContaining(mockOption));
+ });
});
- return timeSeriesChart.vm.$nextTick().then(() => {
- const optionSeries = getChartOptions().series;
+ it('additional series', () => {
+ timeSeriesChart.setProps({
+ option: {
+ series: [
+ {
+ name: mockSeriesName,
+ },
+ ],
+ },
+ });
+
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ const optionSeries = getChartOptions().series;
- expect(optionSeries.length).toEqual(2);
- expect(optionSeries[0].name).toEqual(mockSeriesName);
+ expect(optionSeries.length).toEqual(2);
+ expect(optionSeries[0].name).toEqual(mockSeriesName);
+ });
});
- });
- it('additional y axis data', () => {
- const mockCustomYAxisOption = {
- name: 'Custom y axis label',
- axisLabel: {
- formatter: jest.fn(),
- },
- };
+ it('additional y-axis data', () => {
+ const mockCustomYAxisOption = {
+ name: 'Custom y-axis label',
+ axisLabel: {
+ formatter: jest.fn(),
+ },
+ };
- timeSeriesChart.setProps({
- option: {
- yAxis: mockCustomYAxisOption,
- },
- });
+ timeSeriesChart.setProps({
+ option: {
+ yAxis: mockCustomYAxisOption,
+ },
+ });
- return timeSeriesChart.vm.$nextTick().then(() => {
- const { yAxis } = getChartOptions();
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ const { yAxis } = getChartOptions();
- expect(yAxis[0]).toMatchObject(mockCustomYAxisOption);
+ expect(yAxis[0]).toMatchObject(mockCustomYAxisOption);
+ });
});
- });
- it('additional x axis data', () => {
- const mockCustomXAxisOption = {
- name: 'Custom x axis label',
- };
+ it('additional x axis data', () => {
+ const mockCustomXAxisOption = {
+ name: 'Custom x axis label',
+ };
- timeSeriesChart.setProps({
- option: {
- xAxis: mockCustomXAxisOption,
- },
- });
+ timeSeriesChart.setProps({
+ option: {
+ xAxis: mockCustomXAxisOption,
+ },
+ });
- return timeSeriesChart.vm.$nextTick().then(() => {
- const { xAxis } = getChartOptions();
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ const { xAxis } = getChartOptions();
- expect(xAxis).toMatchObject(mockCustomXAxisOption);
+ expect(xAxis).toMatchObject(mockCustomXAxisOption);
+ });
});
});
- });
- describe('yAxis formatter', () => {
- let dataFormatter;
- let deploymentFormatter;
+ describe('yAxis formatter', () => {
+ let dataFormatter;
+ let deploymentFormatter;
- beforeEach(() => {
- dataFormatter = getChartOptions().yAxis[0].axisLabel.formatter;
- deploymentFormatter = getChartOptions().yAxis[1].axisLabel.formatter;
- });
+ beforeEach(() => {
+ dataFormatter = getChartOptions().yAxis[0].axisLabel.formatter;
+ deploymentFormatter = getChartOptions().yAxis[1].axisLabel.formatter;
+ });
- it('rounds to 3 decimal places', () => {
- expect(dataFormatter(0.88888)).toBe('0.889');
- });
+ it('formats and rounds to 2 decimal places', () => {
+ expect(dataFormatter(0.88888)).toBe('0.89');
+ });
- it('deployment formatter is set as is required to display a tooltip', () => {
- expect(deploymentFormatter).toEqual(expect.any(Function));
+ it('deployment formatter is set as is required to display a tooltip', () => {
+ expect(deploymentFormatter).toEqual(expect.any(Function));
+ });
});
});
- });
- describe('deploymentSeries', () => {
- it('utilizes deployment data', () => {
- expect(timeSeriesChart.vm.deploymentSeries.yAxisIndex).toBe(1); // same as deployment y axis
- expect(timeSeriesChart.vm.deploymentSeries.data).toEqual([
- ['2019-07-16T10:14:25.589Z', expect.any(Number)],
- ['2019-07-16T11:14:25.589Z', expect.any(Number)],
- ['2019-07-16T12:14:25.589Z', expect.any(Number)],
- ]);
+ describe('deploymentSeries', () => {
+ it('utilizes deployment data', () => {
+ expect(timeSeriesChart.vm.deploymentSeries.yAxisIndex).toBe(1); // same as deployment y axis
+ expect(timeSeriesChart.vm.deploymentSeries.data).toEqual([
+ ['2019-07-16T10:14:25.589Z', expect.any(Number)],
+ ['2019-07-16T11:14:25.589Z', expect.any(Number)],
+ ['2019-07-16T12:14:25.589Z', expect.any(Number)],
+ ]);
- expect(timeSeriesChart.vm.deploymentSeries.symbolSize).toBe(14);
+ expect(timeSeriesChart.vm.deploymentSeries.symbolSize).toBe(14);
+ });
});
- });
- describe('yAxisLabel', () => {
- it('y axis is configured correctly', () => {
- const { yAxis } = getChartOptions();
+ describe('yAxisLabel', () => {
+ it('y-axis is configured correctly', () => {
+ const { yAxis } = getChartOptions();
- expect(yAxis).toHaveLength(2);
+ expect(yAxis).toHaveLength(2);
- const [dataAxis, deploymentAxis] = yAxis;
+ const [dataAxis, deploymentAxis] = yAxis;
- expect(dataAxis.boundaryGap).toHaveLength(2);
- expect(dataAxis.scale).toBe(true);
+ expect(dataAxis.boundaryGap).toHaveLength(2);
+ expect(dataAxis.scale).toBe(true);
- expect(deploymentAxis.show).toBe(false);
- expect(deploymentAxis.min).toEqual(expect.any(Number));
- expect(deploymentAxis.max).toEqual(expect.any(Number));
- expect(deploymentAxis.min).toBeLessThan(deploymentAxis.max);
- });
+ expect(deploymentAxis.show).toBe(false);
+ expect(deploymentAxis.min).toEqual(expect.any(Number));
+ expect(deploymentAxis.max).toEqual(expect.any(Number));
+ expect(deploymentAxis.min).toBeLessThan(deploymentAxis.max);
+ });
- it('constructs a label for the chart y-axis', () => {
- const { yAxis } = getChartOptions();
+ it('constructs a label for the chart y-axis', () => {
+ const { yAxis } = getChartOptions();
- expect(yAxis[0].name).toBe('Memory Used per Pod');
+ expect(yAxis[0].name).toBe('Requests / Sec');
+ });
});
});
- });
- afterEach(() => {
- timeSeriesChart.destroy();
+ afterEach(() => {
+ timeSeriesChart.destroy();
+ });
});
- });
- describe('wrapped components', () => {
- const glChartComponents = [
- {
- chartType: 'area-chart',
- component: GlAreaChart,
- },
- {
- chartType: 'line-chart',
- component: GlLineChart,
- },
- ];
+ describe('wrapped components', () => {
+ const glChartComponents = [
+ {
+ chartType: 'area-chart',
+ component: GlAreaChart,
+ },
+ {
+ chartType: 'line-chart',
+ component: GlLineChart,
+ },
+ ];
- glChartComponents.forEach(dynamicComponent => {
- describe(`GitLab UI: ${dynamicComponent.chartType}`, () => {
- let timeSeriesAreaChart;
- const findChartComponent = () => timeSeriesAreaChart.find(dynamicComponent.component);
+ glChartComponents.forEach(dynamicComponent => {
+ describe(`GitLab UI: ${dynamicComponent.chartType}`, () => {
+ let timeSeriesAreaChart;
+ const findChartComponent = () => timeSeriesAreaChart.find(dynamicComponent.component);
- beforeEach(done => {
- timeSeriesAreaChart = makeTimeSeriesChart(mockGraphData, dynamicComponent.chartType);
- timeSeriesAreaChart.vm.$nextTick(done);
- });
+ beforeEach(done => {
+ timeSeriesAreaChart = makeTimeSeriesChart(mockGraphData, dynamicComponent.chartType);
+ timeSeriesAreaChart.vm.$nextTick(done);
+ });
- afterEach(() => {
- timeSeriesAreaChart.destroy();
- });
+ afterEach(() => {
+ timeSeriesAreaChart.destroy();
+ });
- it('is a Vue instance', () => {
- expect(findChartComponent().exists()).toBe(true);
- expect(findChartComponent().isVueInstance()).toBe(true);
- });
+ it('is a Vue instance', () => {
+ expect(findChartComponent().exists()).toBe(true);
+ expect(findChartComponent().isVueInstance()).toBe(true);
+ });
- it('receives data properties needed for proper chart render', () => {
- const props = findChartComponent().props();
+ it('receives data properties needed for proper chart render', () => {
+ const props = findChartComponent().props();
- expect(props.data).toBe(timeSeriesAreaChart.vm.chartData);
- expect(props.option).toBe(timeSeriesAreaChart.vm.chartOptions);
- expect(props.formatTooltipText).toBe(timeSeriesAreaChart.vm.formatTooltipText);
- expect(props.thresholds).toBe(timeSeriesAreaChart.vm.thresholds);
- });
+ expect(props.data).toBe(timeSeriesAreaChart.vm.chartData);
+ expect(props.option).toBe(timeSeriesAreaChart.vm.chartOptions);
+ expect(props.formatTooltipText).toBe(timeSeriesAreaChart.vm.formatTooltipText);
+ expect(props.thresholds).toBe(timeSeriesAreaChart.vm.thresholds);
+ });
- it('recieves a tooltip title', done => {
- const mockTitle = 'mockTitle';
- timeSeriesAreaChart.vm.tooltip.title = mockTitle;
+ it('recieves a tooltip title', done => {
+ const mockTitle = 'mockTitle';
+ timeSeriesAreaChart.vm.tooltip.title = mockTitle;
- timeSeriesAreaChart.vm.$nextTick(() => {
- expect(
- shallowWrapperContainsSlotText(findChartComponent(), 'tooltipTitle', mockTitle),
- ).toBe(true);
- done();
+ timeSeriesAreaChart.vm.$nextTick(() => {
+ expect(
+ shallowWrapperContainsSlotText(findChartComponent(), 'tooltipTitle', mockTitle),
+ ).toBe(true);
+ done();
+ });
});
- });
- describe('when tooltip is showing deployment data', () => {
- const mockSha = 'mockSha';
- const commitUrl = `${mockProjectDir}/-/commit/${mockSha}`;
+ describe('when tooltip is showing deployment data', () => {
+ const mockSha = 'mockSha';
+ const commitUrl = `${mockProjectDir}/-/commit/${mockSha}`;
- beforeEach(done => {
- timeSeriesAreaChart.vm.tooltip.isDeployment = true;
- timeSeriesAreaChart.vm.$nextTick(done);
- });
+ beforeEach(done => {
+ timeSeriesAreaChart.vm.tooltip.isDeployment = true;
+ timeSeriesAreaChart.vm.$nextTick(done);
+ });
- it('uses deployment title', () => {
- expect(
- shallowWrapperContainsSlotText(findChartComponent(), 'tooltipTitle', 'Deployed'),
- ).toBe(true);
- });
+ it('uses deployment title', () => {
+ expect(
+ shallowWrapperContainsSlotText(findChartComponent(), 'tooltipTitle', 'Deployed'),
+ ).toBe(true);
+ });
- it('renders clickable commit sha in tooltip content', done => {
- timeSeriesAreaChart.vm.tooltip.sha = mockSha;
- timeSeriesAreaChart.vm.tooltip.commitUrl = commitUrl;
+ it('renders clickable commit sha in tooltip content', done => {
+ timeSeriesAreaChart.vm.tooltip.sha = mockSha;
+ timeSeriesAreaChart.vm.tooltip.commitUrl = commitUrl;
- timeSeriesAreaChart.vm.$nextTick(() => {
- const commitLink = timeSeriesAreaChart.find(GlLink);
+ timeSeriesAreaChart.vm.$nextTick(() => {
+ const commitLink = timeSeriesAreaChart.find(GlLink);
- expect(shallowWrapperContainsSlotText(commitLink, 'default', mockSha)).toBe(true);
- expect(commitLink.attributes('href')).toEqual(commitUrl);
- done();
+ expect(shallowWrapperContainsSlotText(commitLink, 'default', mockSha)).toBe(true);
+ expect(commitLink.attributes('href')).toEqual(commitUrl);
+ done();
+ });
});
});
});
});
});
});
+
+ describe('with multiple time series', () => {
+ describe('General functions', () => {
+ let timeSeriesChart;
+
+ beforeEach(done => {
+ store = createStore();
+ const graphData = cloneDeep(metricsDashboardViewModel.panelGroups[0].panels[3]);
+ graphData.metrics.forEach(metric =>
+ Object.assign(metric, { result: mockedQueryResultFixture.result }),
+ );
+
+ timeSeriesChart = makeTimeSeriesChart(graphData, 'area-chart');
+ timeSeriesChart.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ timeSeriesChart.destroy();
+ });
+
+ describe('computed', () => {
+ let chartData;
+
+ beforeEach(() => {
+ ({ chartData } = timeSeriesChart.vm);
+ });
+
+ it('should contain different colors for each time series', () => {
+ expect(chartData[0].lineStyle.color).toBe('#1f78d1');
+ expect(chartData[1].lineStyle.color).toBe('#1aaa55');
+ expect(chartData[2].lineStyle.color).toBe('#fc9403');
+ expect(chartData[3].lineStyle.color).toBe('#6d49cb');
+ expect(chartData[4].lineStyle.color).toBe('#1f78d1');
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 15c82242262..b9d838085a1 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -6,6 +6,7 @@ import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import { metricStates } from '~/monitoring/constants';
import Dashboard from '~/monitoring/components/dashboard.vue';
+import { getJSONFixture } from '../../../../spec/frontend/helpers/fixtures';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
@@ -15,14 +16,19 @@ import { createStore } from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
import { setupComponentStore, propsData } from '../init_utils';
import {
- metricsDashboardPayload,
- mockedQueryResultPayload,
+ metricsDashboardViewModel,
environmentData,
dashboardGitResponse,
+ mockedQueryResultFixture,
} from '../mock_data';
const localVue = createLocalVue();
-const expectedPanelCount = 2;
+const expectedPanelCount = 4;
+
+const metricsDashboardFixture = getJSONFixture(
+ 'metrics_dashboard/environment_metrics_dashboard.json',
+);
+const metricsDashboardPayload = metricsDashboardFixture.dashboard;
describe('Dashboard', () => {
let store;
@@ -195,7 +201,7 @@ describe('Dashboard', () => {
);
wrapper.vm.$store.commit(
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayload,
+ mockedQueryResultFixture,
);
return wrapper.vm.$nextTick().then(() => {
@@ -213,6 +219,19 @@ describe('Dashboard', () => {
});
});
+ it('renders the refresh dashboard button', () => {
+ createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
+
+ setupComponentStore(wrapper);
+
+ return wrapper.vm.$nextTick().then(() => {
+ const refreshBtn = wrapper.findAll({ ref: 'refreshDashboardBtn' });
+
+ expect(refreshBtn).toHaveLength(1);
+ expect(refreshBtn.is(GlButton)).toBe(true);
+ });
+ });
+
describe('when one of the metrics is missing', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
@@ -366,7 +385,7 @@ describe('Dashboard', () => {
it('metrics can be swapped', () => {
const firstDraggable = findDraggables().at(0);
- const mockMetrics = [...metricsDashboardPayload.panel_groups[1].panels];
+ const mockMetrics = [...metricsDashboardViewModel.panelGroups[0].panels];
const firstTitle = mockMetrics[0].title;
const secondTitle = mockMetrics[1].title;
@@ -376,7 +395,7 @@ describe('Dashboard', () => {
firstDraggable.vm.$emit('input', mockMetrics);
return wrapper.vm.$nextTick(() => {
- const { panels } = wrapper.vm.dashboard.panel_groups[1];
+ const { panels } = wrapper.vm.dashboard.panelGroups[0];
expect(panels[1].title).toEqual(firstTitle);
expect(panels[0].title).toEqual(secondTitle);
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index 161c64dd74b..bf5a11a536e 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -7,7 +7,7 @@ import { mockProjectDir } from '../mock_data';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores';
-import { defaultTimeRange } from '~/monitoring/constants';
+import { defaultTimeRange } from '~/vue_shared/constants';
import { propsData } from '../init_utils';
jest.mock('~/flash');
diff --git a/spec/frontend/monitoring/components/panel_type_spec.js b/spec/frontend/monitoring/components/panel_type_spec.js
index 0d79babf386..058c201d325 100644
--- a/spec/frontend/monitoring/components/panel_type_spec.js
+++ b/spec/frontend/monitoring/components/panel_type_spec.js
@@ -74,6 +74,18 @@ describe('Panel Type component', () => {
glEmptyChart = wrapper.find(EmptyChart);
});
+ it('renders the chart title', () => {
+ expect(wrapper.find({ ref: 'graphTitle' }).text()).toBe(graphDataNoResult.title);
+ });
+
+ it('renders the no download csv link', () => {
+ expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(false);
+ });
+
+ it('does not contain graph widgets', () => {
+ expect(wrapper.find('.js-graph-widgets').exists()).toBe(false);
+ });
+
it('is a Vue instance', () => {
expect(glEmptyChart.isVueInstance()).toBe(true);
});
@@ -97,6 +109,15 @@ describe('Panel Type component', () => {
wrapper.destroy();
});
+ it('renders the chart title', () => {
+ expect(wrapper.find({ ref: 'graphTitle' }).text()).toBe(graphDataPrometheusQueryRange.title);
+ });
+
+ it('contains graph widgets', () => {
+ expect(wrapper.find('.js-graph-widgets').exists()).toBe(true);
+ expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(true);
+ });
+
it('sets no clipboard copy link on dropdown by default', () => {
expect(findCopyLink().exists()).toBe(false);
});
@@ -127,6 +148,82 @@ describe('Panel Type component', () => {
});
});
+ describe('Edit custom metric dropdown item', () => {
+ const findEditCustomMetricLink = () => wrapper.find({ ref: 'editMetricLink' });
+
+ beforeEach(() => {
+ createWrapper({
+ graphData: {
+ ...graphDataPrometheusQueryRange,
+ },
+ });
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('is not present if the panel is not a custom metric', () => {
+ expect(findEditCustomMetricLink().exists()).toBe(false);
+ });
+
+ it('is present when the panel contains an edit_path property', () => {
+ wrapper.setProps({
+ graphData: {
+ ...graphDataPrometheusQueryRange,
+ metrics: [
+ {
+ ...graphDataPrometheusQueryRange.metrics[0],
+ edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
+ },
+ ],
+ },
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findEditCustomMetricLink().exists()).toBe(true);
+ });
+ });
+
+ it('shows an "Edit metric" link for a panel with a single metric', () => {
+ wrapper.setProps({
+ graphData: {
+ ...graphDataPrometheusQueryRange,
+ metrics: [
+ {
+ ...graphDataPrometheusQueryRange.metrics[0],
+ edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
+ },
+ ],
+ },
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findEditCustomMetricLink().text()).toBe('Edit metric');
+ });
+ });
+
+ it('shows an "Edit metrics" link for a panel with multiple metrics', () => {
+ wrapper.setProps({
+ graphData: {
+ ...graphDataPrometheusQueryRange,
+ metrics: [
+ {
+ ...graphDataPrometheusQueryRange.metrics[0],
+ edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
+ },
+ {
+ ...graphDataPrometheusQueryRange.metrics[0],
+ edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
+ },
+ ],
+ },
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findEditCustomMetricLink().text()).toBe('Edit metrics');
+ });
+ });
+ });
+
describe('View Logs dropdown item', () => {
const mockLogsPath = '/path/to/logs';
const mockTimeRange = { duration: { seconds: 120 } };
diff --git a/spec/frontend/monitoring/embed/embed_spec.js b/spec/frontend/monitoring/embed/embed_spec.js
index 3bb70a02bd9..850092c4a72 100644
--- a/spec/frontend/monitoring/embed/embed_spec.js
+++ b/spec/frontend/monitoring/embed/embed_spec.js
@@ -69,8 +69,8 @@ describe('Embed', () => {
describe('metrics are available', () => {
beforeEach(() => {
- store.state.monitoringDashboard.dashboard.panel_groups = groups;
- store.state.monitoringDashboard.dashboard.panel_groups[0].panels = metricsData;
+ store.state.monitoringDashboard.dashboard.panelGroups = groups;
+ store.state.monitoringDashboard.dashboard.panelGroups[0].panels = metricsData;
metricsWithDataGetter.mockReturnValue(metricsWithData);
diff --git a/spec/frontend/monitoring/embed/mock_data.js b/spec/frontend/monitoring/embed/mock_data.js
index 1dc31846034..da8eb8c0fc4 100644
--- a/spec/frontend/monitoring/embed/mock_data.js
+++ b/spec/frontend/monitoring/embed/mock_data.js
@@ -1,4 +1,4 @@
-export const metricsWithData = [15, 16];
+export const metricsWithData = ['15_metric_a', '16_metric_b'];
export const groups = [
{
@@ -7,41 +7,12 @@ export const groups = [
title: 'Memory Usage (Total)',
type: 'area-chart',
y_label: 'Total Memory Used',
- weight: 4,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_total',
- metric_id: 15,
- },
- ],
- },
- {
- title: 'Core Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Cores',
- weight: 3,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_cores_total',
- metric_id: 16,
- },
- ],
+ metrics: null,
},
],
},
];
-export const metrics = [
- {
- id: 'system_metrics_kubernetes_container_memory_total',
- metric_id: 15,
- },
- {
- id: 'system_metrics_kubernetes_container_cores_total',
- metric_id: 16,
- },
-];
-
const result = [
{
values: [
@@ -60,7 +31,7 @@ export const metricsData = [
{
metrics: [
{
- metric_id: 15,
+ metricId: '15_metric_a',
result,
},
],
@@ -68,7 +39,7 @@ export const metricsData = [
{
metrics: [
{
- metric_id: 16,
+ metricId: '16_metric_b',
result,
},
],
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index 4d83933f2b8..c98b6a9592f 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -1,3 +1,5 @@
+import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
+
// This import path needs to be relative for now because this mock data is used in
// Karma specs too, where the helpers/test_constants alias can not be resolved
import { TEST_HOST } from '../helpers/test_constants';
@@ -240,95 +242,75 @@ export const metricsNewGroupsAPIResponse = [
},
];
+const metricsResult = [
+ {
+ metric: {},
+ values: [
+ [1563272065.589, '10.396484375'],
+ [1563272125.589, '10.333984375'],
+ [1563272185.589, '10.333984375'],
+ [1563272245.589, '10.333984375'],
+ [1563272305.589, '10.333984375'],
+ [1563272365.589, '10.333984375'],
+ [1563272425.589, '10.38671875'],
+ [1563272485.589, '10.333984375'],
+ [1563272545.589, '10.333984375'],
+ [1563272605.589, '10.333984375'],
+ [1563272665.589, '10.333984375'],
+ [1563272725.589, '10.333984375'],
+ [1563272785.589, '10.396484375'],
+ [1563272845.589, '10.333984375'],
+ [1563272905.589, '10.333984375'],
+ [1563272965.589, '10.3984375'],
+ [1563273025.589, '10.337890625'],
+ [1563273085.589, '10.34765625'],
+ [1563273145.589, '10.337890625'],
+ [1563273205.589, '10.337890625'],
+ [1563273265.589, '10.337890625'],
+ [1563273325.589, '10.337890625'],
+ [1563273385.589, '10.337890625'],
+ [1563273445.589, '10.337890625'],
+ [1563273505.589, '10.337890625'],
+ [1563273565.589, '10.337890625'],
+ [1563273625.589, '10.337890625'],
+ [1563273685.589, '10.337890625'],
+ [1563273745.589, '10.337890625'],
+ [1563273805.589, '10.337890625'],
+ [1563273865.589, '10.390625'],
+ [1563273925.589, '10.390625'],
+ ],
+ },
+];
+
export const mockedEmptyResult = {
metricId: '1_response_metrics_nginx_ingress_throughput_status_code',
result: [],
};
+export const mockedEmptyThroughputResult = {
+ metricId: 'undefined_response_metrics_nginx_ingress_16_throughput_status_code',
+ result: [],
+};
+
export const mockedQueryResultPayload = {
- metricId: '17_system_metrics_kubernetes_container_memory_average',
- result: [
- {
- metric: {},
- values: [
- [1563272065.589, '10.396484375'],
- [1563272125.589, '10.333984375'],
- [1563272185.589, '10.333984375'],
- [1563272245.589, '10.333984375'],
- [1563272305.589, '10.333984375'],
- [1563272365.589, '10.333984375'],
- [1563272425.589, '10.38671875'],
- [1563272485.589, '10.333984375'],
- [1563272545.589, '10.333984375'],
- [1563272605.589, '10.333984375'],
- [1563272665.589, '10.333984375'],
- [1563272725.589, '10.333984375'],
- [1563272785.589, '10.396484375'],
- [1563272845.589, '10.333984375'],
- [1563272905.589, '10.333984375'],
- [1563272965.589, '10.3984375'],
- [1563273025.589, '10.337890625'],
- [1563273085.589, '10.34765625'],
- [1563273145.589, '10.337890625'],
- [1563273205.589, '10.337890625'],
- [1563273265.589, '10.337890625'],
- [1563273325.589, '10.337890625'],
- [1563273385.589, '10.337890625'],
- [1563273445.589, '10.337890625'],
- [1563273505.589, '10.337890625'],
- [1563273565.589, '10.337890625'],
- [1563273625.589, '10.337890625'],
- [1563273685.589, '10.337890625'],
- [1563273745.589, '10.337890625'],
- [1563273805.589, '10.337890625'],
- [1563273865.589, '10.390625'],
- [1563273925.589, '10.390625'],
- ],
- },
- ],
+ metricId: '12_system_metrics_kubernetes_container_memory_total',
+ result: metricsResult,
};
export const mockedQueryResultPayloadCoresTotal = {
metricId: '13_system_metrics_kubernetes_container_cores_total',
- result: [
- {
- metric: {},
- values: [
- [1563272065.589, '9.396484375'],
- [1563272125.589, '9.333984375'],
- [1563272185.589, '9.333984375'],
- [1563272245.589, '9.333984375'],
- [1563272305.589, '9.333984375'],
- [1563272365.589, '9.333984375'],
- [1563272425.589, '9.38671875'],
- [1563272485.589, '9.333984375'],
- [1563272545.589, '9.333984375'],
- [1563272605.589, '9.333984375'],
- [1563272665.589, '9.333984375'],
- [1563272725.589, '9.333984375'],
- [1563272785.589, '9.396484375'],
- [1563272845.589, '9.333984375'],
- [1563272905.589, '9.333984375'],
- [1563272965.589, '9.3984375'],
- [1563273025.589, '9.337890625'],
- [1563273085.589, '9.34765625'],
- [1563273145.589, '9.337890625'],
- [1563273205.589, '9.337890625'],
- [1563273265.589, '9.337890625'],
- [1563273325.589, '9.337890625'],
- [1563273385.589, '9.337890625'],
- [1563273445.589, '9.337890625'],
- [1563273505.589, '9.337890625'],
- [1563273565.589, '9.337890625'],
- [1563273625.589, '9.337890625'],
- [1563273685.589, '9.337890625'],
- [1563273745.589, '9.337890625'],
- [1563273805.589, '9.337890625'],
- [1563273865.589, '9.390625'],
- [1563273925.589, '9.390625'],
- ],
- },
- ],
+ result: metricsResult,
+};
+
+export const mockedQueryResultFixture = {
+ // First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
+ metricId: 'undefined_response_metrics_nginx_ingress_throughput_status_code',
+ result: metricsResult,
+};
+
+export const mockedQueryResultFixtureStatusCode = {
+ metricId: 'undefined_response_metrics_nginx_ingress_latency_pod_average',
+ result: metricsResult,
};
const extraEnvironmentData = new Array(15).fill(null).map((_, idx) => ({
@@ -378,146 +360,149 @@ export const environmentData = [
},
].concat(extraEnvironmentData);
-export const metricsDashboardResponse = {
- dashboard: {
- dashboard: 'Environment metrics',
- priority: 1,
- panel_groups: [
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Memory Used',
- weight: 4,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_total',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
- label: 'Total',
- unit: 'GB',
- metric_id: 12,
- prometheus_endpoint_path: 'http://test',
- },
- ],
- },
- {
- title: 'Core Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Cores',
- weight: 3,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_cores_total',
- query_range:
- 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
- label: 'Total',
- unit: 'cores',
- metric_id: 13,
- },
- ],
- },
- {
- title: 'Memory Usage (Pod average)',
- type: 'line-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 14,
- },
- ],
- },
- ],
- },
- ],
- },
- status: 'success',
-};
-
export const metricsDashboardPayload = {
dashboard: 'Environment metrics',
+ priority: 1,
panel_groups: [
{
- group: 'Response metrics (NGINX Ingress VTS)',
- priority: 10,
+ group: 'System metrics (Kubernetes)',
+ priority: 5,
panels: [
{
+ title: 'Memory Usage (Total)',
+ type: 'area-chart',
+ y_label: 'Total Memory Used',
+ weight: 4,
+ y_axis: {
+ format: 'megabytes',
+ },
metrics: [
{
- id: 'response_metrics_nginx_ingress_throughput_status_code',
- label: 'Status Code',
- metric_id: 1,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
+ id: 'system_metrics_kubernetes_container_memory_total',
query_range:
- 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)',
- unit: 'req / sec',
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1000/1000',
+ label: 'Total',
+ unit: 'MB',
+ metric_id: 12,
+ prometheus_endpoint_path: 'http://test',
},
],
- title: 'Throughput',
+ },
+ {
+ title: 'Core Usage (Total)',
type: 'area-chart',
- weight: 1,
- y_label: 'Requests / Sec',
+ y_label: 'Total Cores',
+ weight: 3,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_cores_total',
+ query_range:
+ 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
+ label: 'Total',
+ unit: 'cores',
+ metric_id: 13,
+ },
+ ],
},
- ],
- },
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
{
title: 'Memory Usage (Pod average)',
- type: 'area-chart',
+ type: 'line-chart',
y_label: 'Memory Used per Pod',
weight: 2,
metrics: [
{
id: 'system_metrics_kubernetes_container_memory_average',
query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
label: 'Pod average',
unit: 'MB',
- metric_id: 17,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
- appearance: {
- line: {
- width: 2,
- },
- },
+ metric_id: 14,
},
],
},
{
- title: 'Core Usage (Total)',
+ title: 'memories',
type: 'area-chart',
- y_label: 'Total Cores',
- weight: 3,
+ y_label: 'memories',
metrics: [
{
- id: 'system_metrics_kubernetes_container_cores_total',
+ id: 'metric_of_ages_1000',
+ label: 'memory_1000',
+ unit: 'count',
+ prometheus_endpoint_path: '/root',
+ metric_id: 20,
+ },
+ {
+ id: 'metric_of_ages_1001',
+ label: 'memory_1000',
+ unit: 'count',
+ prometheus_endpoint_path: '/root',
+ metric_id: 21,
+ },
+ {
+ id: 'metric_of_ages_1002',
+ label: 'memory_1000',
+ unit: 'count',
+ prometheus_endpoint_path: '/root',
+ metric_id: 22,
+ },
+ {
+ id: 'metric_of_ages_1003',
+ label: 'memory_1000',
+ unit: 'count',
+ prometheus_endpoint_path: '/root',
+ metric_id: 23,
+ },
+ {
+ id: 'metric_of_ages_1004',
+ label: 'memory_1004',
+ unit: 'count',
+ prometheus_endpoint_path: '/root',
+ metric_id: 24,
+ },
+ ],
+ },
+ ],
+ },
+ {
+ group: 'Response metrics (NGINX Ingress VTS)',
+ priority: 10,
+ panels: [
+ {
+ metrics: [
+ {
+ id: 'response_metrics_nginx_ingress_throughput_status_code',
+ label: 'Status Code',
+ metric_id: 1,
+ prometheus_endpoint_path:
+ '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
query_range:
- 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
- label: 'Total',
- unit: 'cores',
- metric_id: 13,
+ 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)',
+ unit: 'req / sec',
},
],
+ title: 'Throughput',
+ type: 'area-chart',
+ weight: 1,
+ y_label: 'Requests / Sec',
},
],
},
],
};
+/**
+ * Mock of response of metrics_dashboard.json
+ */
+export const metricsDashboardResponse = {
+ all_dashboards: [],
+ dashboard: metricsDashboardPayload,
+ metrics_data: {},
+ status: 'success',
+};
+
+export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
+
const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
default: false,
display_name: `Custom Dashboard ${idx}`,
@@ -539,6 +524,12 @@ export const dashboardGitResponse = [
...customDashboardsData,
];
+export const mockDashboardsErrorResponse = {
+ all_dashboards: customDashboardsData,
+ message: "Each 'panel_group' must define an array :panels",
+ status: 'error',
+};
+
export const graphDataPrometheusQuery = {
title: 'Super Chart A2',
type: 'single-stat',
diff --git a/spec/frontend/monitoring/shared/prometheus_header_spec.js b/spec/frontend/monitoring/shared/prometheus_header_spec.js
deleted file mode 100644
index b216bfb72d8..00000000000
--- a/spec/frontend/monitoring/shared/prometheus_header_spec.js
+++ /dev/null
@@ -1,26 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import PrometheusHeader from '~/monitoring/components/shared/prometheus_header.vue';
-
-describe('Prometheus Header component', () => {
- let prometheusHeader;
-
- beforeEach(() => {
- prometheusHeader = shallowMount(PrometheusHeader, {
- propsData: {
- graphTitle: 'graph header',
- },
- });
- });
-
- afterEach(() => {
- prometheusHeader.destroy();
- });
-
- describe('Prometheus header component', () => {
- it('should show a title', () => {
- const title = prometheusHeader.find({ ref: 'title' }).text();
-
- expect(title).toBe('graph header');
- });
- });
-});
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index 11d3109fcd1..ba41a75ceec 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -3,7 +3,7 @@ import testAction from 'helpers/vuex_action_helper';
import Tracking from '~/tracking';
import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
-import { backOff } from '~/lib/utils/common_utils';
+import * as commonUtils from '~/lib/utils/common_utils';
import createFlash from '~/flash';
import store from '~/monitoring/stores';
@@ -28,11 +28,11 @@ import {
deploymentData,
environmentData,
metricsDashboardResponse,
- metricsDashboardPayload,
+ metricsDashboardViewModel,
dashboardGitResponse,
+ mockDashboardsErrorResponse,
} from '../mock_data';
-jest.mock('~/lib/utils/common_utils');
jest.mock('~/flash');
const resetStore = str => {
@@ -44,14 +44,17 @@ const resetStore = str => {
};
describe('Monitoring store actions', () => {
+ const { convertObjectPropsToCamelCase } = commonUtils;
+
let mock;
+
beforeEach(() => {
mock = new MockAdapter(axios);
// Mock `backOff` function to remove exponential algorithm delay.
jest.useFakeTimers();
- backOff.mockImplementation(callback => {
+ jest.spyOn(commonUtils, 'backOff').mockImplementation(callback => {
const q = new Promise((resolve, reject) => {
const stop = arg => (arg instanceof Error ? reject(arg) : resolve(arg));
const next = () => callback(next, stop);
@@ -69,7 +72,7 @@ describe('Monitoring store actions', () => {
resetStore(store);
mock.reset();
- backOff.mockReset();
+ commonUtils.backOff.mockReset();
createFlash.mockReset();
});
@@ -115,7 +118,6 @@ describe('Monitoring store actions', () => {
afterEach(() => {
resetStore(store);
- jest.restoreAllMocks();
});
it('setting SET_ENVIRONMENTS_FILTER should dispatch fetchEnvironmentsData', () => {
@@ -256,9 +258,11 @@ describe('Monitoring store actions', () => {
describe('fetchDashboard', () => {
let dispatch;
let state;
+ let commit;
const response = metricsDashboardResponse;
beforeEach(() => {
dispatch = jest.fn();
+ commit = jest.fn();
state = storeState();
state.dashboardEndpoint = '/dashboard';
});
@@ -269,6 +273,7 @@ describe('Monitoring store actions', () => {
fetchDashboard(
{
state,
+ commit,
dispatch,
},
params,
@@ -286,19 +291,21 @@ describe('Monitoring store actions', () => {
describe('on failure', () => {
let result;
- let errorResponse;
beforeEach(() => {
const params = {};
result = () => {
- mock.onGet(state.dashboardEndpoint).replyOnce(500, errorResponse);
- return fetchDashboard({ state, dispatch }, params);
+ mock.onGet(state.dashboardEndpoint).replyOnce(500, mockDashboardsErrorResponse);
+ return fetchDashboard({ state, commit, dispatch }, params);
};
});
it('dispatches a failure action', done => {
- errorResponse = {};
result()
.then(() => {
+ expect(commit).toHaveBeenCalledWith(
+ types.SET_ALL_DASHBOARDS,
+ mockDashboardsErrorResponse.all_dashboards,
+ );
expect(dispatch).toHaveBeenCalledWith(
'receiveMetricsDashboardFailure',
new Error('Request failed with status code 500'),
@@ -310,15 +317,15 @@ describe('Monitoring store actions', () => {
});
it('dispatches a failure action when a message is returned', done => {
- const message = 'Something went wrong with Prometheus!';
- errorResponse = { message };
result()
.then(() => {
expect(dispatch).toHaveBeenCalledWith(
'receiveMetricsDashboardFailure',
new Error('Request failed with status code 500'),
);
- expect(createFlash).toHaveBeenCalledWith(expect.stringContaining(message));
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.stringContaining(mockDashboardsErrorResponse.message),
+ );
done();
})
.catch(done.fail);
@@ -365,6 +372,7 @@ describe('Monitoring store actions', () => {
);
expect(commit).toHaveBeenCalledWith(
types.RECEIVE_METRICS_DATA_SUCCESS,
+
metricsDashboardResponse.dashboard,
);
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetrics', params);
@@ -443,8 +451,11 @@ describe('Monitoring store actions', () => {
.catch(done.fail);
});
it('dispatches fetchPrometheusMetric for each panel query', done => {
- state.dashboard.panel_groups = metricsDashboardResponse.dashboard.panel_groups;
- const [metric] = state.dashboard.panel_groups[0].panels[0].metrics;
+ state.dashboard.panelGroups = convertObjectPropsToCamelCase(
+ metricsDashboardResponse.dashboard.panel_groups,
+ );
+
+ const [metric] = state.dashboard.panelGroups[0].panels[0].metrics;
const getters = {
metricsWithData: () => [metric.id],
};
@@ -473,16 +484,16 @@ describe('Monitoring store actions', () => {
});
it('dispatches fetchPrometheusMetric for each panel query, handles an error', done => {
- state.dashboard.panel_groups = metricsDashboardResponse.dashboard.panel_groups;
- const metric = state.dashboard.panel_groups[0].panels[0].metrics[0];
+ state.dashboard.panelGroups = metricsDashboardViewModel.panelGroups;
+ const metric = state.dashboard.panelGroups[0].panels[0].metrics[0];
- // Mock having one out of three metrics failing
+ // Mock having one out of four metrics failing
dispatch.mockRejectedValueOnce(new Error('Error fetching this metric'));
dispatch.mockResolvedValue();
fetchPrometheusMetrics({ state, commit, dispatch }, params)
.then(() => {
- expect(dispatch).toHaveBeenCalledTimes(3);
+ expect(dispatch).toHaveBeenCalledTimes(9); // one per metric
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
metric,
params,
@@ -508,7 +519,12 @@ describe('Monitoring store actions', () => {
beforeEach(() => {
state = storeState();
[metric] = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics;
- [data] = metricsDashboardPayload.panel_groups[0].panels[0].metrics;
+ metric = convertObjectPropsToCamelCase(metric, { deep: true });
+
+ data = {
+ metricId: metric.metricId,
+ result: [1582065167.353, 5, 1582065599.353],
+ };
});
it('commits result', done => {
@@ -522,13 +538,13 @@ describe('Monitoring store actions', () => {
{
type: types.REQUEST_METRIC_RESULT,
payload: {
- metricId: metric.metric_id,
+ metricId: metric.metricId,
},
},
{
type: types.RECEIVE_METRIC_RESULT_SUCCESS,
payload: {
- metricId: metric.metric_id,
+ metricId: metric.metricId,
result: data.result,
},
},
@@ -556,13 +572,13 @@ describe('Monitoring store actions', () => {
{
type: types.REQUEST_METRIC_RESULT,
payload: {
- metricId: metric.metric_id,
+ metricId: metric.metricId,
},
},
{
type: types.RECEIVE_METRIC_RESULT_SUCCESS,
payload: {
- metricId: metric.metric_id,
+ metricId: metric.metricId,
result: data.result,
},
},
@@ -592,13 +608,13 @@ describe('Monitoring store actions', () => {
{
type: types.REQUEST_METRIC_RESULT,
payload: {
- metricId: metric.metric_id,
+ metricId: metric.metricId,
},
},
{
type: types.RECEIVE_METRIC_RESULT_FAILURE,
payload: {
- metricId: metric.metric_id,
+ metricId: metric.metricId,
error,
},
},
diff --git a/spec/frontend/monitoring/store/getters_spec.js b/spec/frontend/monitoring/store/getters_spec.js
index 263050b462f..5a14ffc03f2 100644
--- a/spec/frontend/monitoring/store/getters_spec.js
+++ b/spec/frontend/monitoring/store/getters_spec.js
@@ -4,11 +4,16 @@ import * as types from '~/monitoring/stores/mutation_types';
import { metricStates } from '~/monitoring/constants';
import {
environmentData,
- metricsDashboardPayload,
- mockedEmptyResult,
- mockedQueryResultPayload,
- mockedQueryResultPayloadCoresTotal,
+ mockedEmptyThroughputResult,
+ mockedQueryResultFixture,
+ mockedQueryResultFixtureStatusCode,
} from '../mock_data';
+import { getJSONFixture } from '../../helpers/fixtures';
+
+const metricsDashboardFixture = getJSONFixture(
+ 'metrics_dashboard/environment_metrics_dashboard.json',
+);
+const metricsDashboardPayload = metricsDashboardFixture.dashboard;
describe('Monitoring store Getters', () => {
describe('getMetricStates', () => {
@@ -32,7 +37,7 @@ describe('Monitoring store Getters', () => {
it('when dashboard has no panel groups, returns empty', () => {
setupState({
dashboard: {
- panel_groups: [],
+ panelGroups: [],
},
});
@@ -43,10 +48,10 @@ describe('Monitoring store Getters', () => {
let groups;
beforeEach(() => {
setupState({
- dashboard: { panel_groups: [] },
+ dashboard: { panelGroups: [] },
});
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
- groups = state.dashboard.panel_groups;
+ groups = state.dashboard.panelGroups;
});
it('no loaded metric returns empty', () => {
@@ -55,14 +60,14 @@ describe('Monitoring store Getters', () => {
it('on an empty metric with no result, returns NO_DATA', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyResult);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
expect(getMetricStates()).toEqual([metricStates.NO_DATA]);
});
it('on a metric with a result, returns OK', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
expect(getMetricStates()).toEqual([metricStates.OK]);
});
@@ -78,14 +83,14 @@ describe('Monitoring store Getters', () => {
it('on multiple metrics with results, returns OK', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
expect(getMetricStates()).toEqual([metricStates.OK]);
// Filtered by groups
- expect(getMetricStates(state.dashboard.panel_groups[0].key)).toEqual([]);
- expect(getMetricStates(state.dashboard.panel_groups[1].key)).toEqual([metricStates.OK]);
+ expect(getMetricStates(state.dashboard.panelGroups[1].key)).toEqual([metricStates.OK]);
+ expect(getMetricStates(state.dashboard.panelGroups[2].key)).toEqual([]);
});
it('on multiple metrics errors', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
@@ -94,10 +99,10 @@ describe('Monitoring store Getters', () => {
metricId: groups[0].panels[0].metrics[0].metricId,
});
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[1].panels[0].metrics[0].metricId,
+ metricId: groups[0].panels[0].metrics[0].metricId,
});
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[1].panels[1].metrics[0].metricId,
+ metricId: groups[1].panels[0].metrics[0].metricId,
});
// Entire dashboard fails
@@ -110,21 +115,21 @@ describe('Monitoring store Getters', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
// An success in 1 group
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
// An error in 2 groups
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[0].panels[0].metrics[0].metricId,
+ metricId: groups[1].panels[1].metrics[0].metricId,
});
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[1].panels[1].metrics[0].metricId,
+ metricId: groups[2].panels[0].metrics[0].metricId,
});
expect(getMetricStates()).toEqual([metricStates.OK, metricStates.UNKNOWN_ERROR]);
- expect(getMetricStates(groups[0].key)).toEqual([metricStates.UNKNOWN_ERROR]);
expect(getMetricStates(groups[1].key)).toEqual([
metricStates.OK,
metricStates.UNKNOWN_ERROR,
]);
+ expect(getMetricStates(groups[2].key)).toEqual([metricStates.UNKNOWN_ERROR]);
});
});
});
@@ -154,7 +159,7 @@ describe('Monitoring store Getters', () => {
it('when dashboard has no panel groups, returns empty', () => {
setupState({
dashboard: {
- panel_groups: [],
+ panelGroups: [],
},
});
@@ -164,7 +169,7 @@ describe('Monitoring store Getters', () => {
describe('when the dashboard is set', () => {
beforeEach(() => {
setupState({
- dashboard: { panel_groups: [] },
+ dashboard: { panelGroups: [] },
});
});
@@ -176,42 +181,42 @@ describe('Monitoring store Getters', () => {
it('an empty metric, returns empty', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyResult);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
expect(metricsWithData()).toEqual([]);
});
it('a metric with results, it returns a metric', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
- expect(metricsWithData()).toEqual([mockedQueryResultPayload.metricId]);
+ expect(metricsWithData()).toEqual([mockedQueryResultFixture.metricId]);
});
it('multiple metrics with results, it return multiple metrics', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
expect(metricsWithData()).toEqual([
- mockedQueryResultPayload.metricId,
- mockedQueryResultPayloadCoresTotal.metricId,
+ mockedQueryResultFixture.metricId,
+ mockedQueryResultFixtureStatusCode.metricId,
]);
});
it('multiple metrics with results, it returns metrics filtered by group', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
- // First group has no metrics
- expect(metricsWithData(state.dashboard.panel_groups[0].key)).toEqual([]);
-
- // Second group has metrics
- expect(metricsWithData(state.dashboard.panel_groups[1].key)).toEqual([
- mockedQueryResultPayload.metricId,
- mockedQueryResultPayloadCoresTotal.metricId,
+ // First group has metrics
+ expect(metricsWithData(state.dashboard.panelGroups[1].key)).toEqual([
+ mockedQueryResultFixture.metricId,
+ mockedQueryResultFixtureStatusCode.metricId,
]);
+
+ // Second group has no metrics
+ expect(metricsWithData(state.dashboard.panelGroups[2].key)).toEqual([]);
});
});
});
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index d9aebafb9ec..5a79b8ef49c 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -4,12 +4,14 @@ import mutations from '~/monitoring/stores/mutations';
import * as types from '~/monitoring/stores/mutation_types';
import state from '~/monitoring/stores/state';
import { metricStates } from '~/monitoring/constants';
-import {
- metricsDashboardPayload,
- deploymentData,
- metricsDashboardResponse,
- dashboardGitResponse,
-} from '../mock_data';
+
+import { deploymentData, dashboardGitResponse } from '../mock_data';
+import { getJSONFixture } from '../../helpers/fixtures';
+
+const metricsDashboardFixture = getJSONFixture(
+ 'metrics_dashboard/environment_metrics_dashboard.json',
+);
+const metricsDashboardPayload = metricsDashboardFixture.dashboard;
describe('Monitoring mutations', () => {
let stateCopy;
@@ -17,42 +19,46 @@ describe('Monitoring mutations', () => {
beforeEach(() => {
stateCopy = state();
});
+
describe('RECEIVE_METRICS_DATA_SUCCESS', () => {
let payload;
- const getGroups = () => stateCopy.dashboard.panel_groups;
+ const getGroups = () => stateCopy.dashboard.panelGroups;
beforeEach(() => {
- stateCopy.dashboard.panel_groups = [];
+ stateCopy.dashboard.panelGroups = [];
payload = metricsDashboardPayload;
});
it('adds a key to the group', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
const groups = getGroups();
- expect(groups[0].key).toBe('response-metrics-nginx-ingress-vts-0');
- expect(groups[1].key).toBe('system-metrics-kubernetes-1');
+ expect(groups[0].key).toBe('system-metrics-kubernetes-0');
+ expect(groups[1].key).toBe('response-metrics-nginx-ingress-vts-1');
+ expect(groups[2].key).toBe('response-metrics-nginx-ingress-2');
});
it('normalizes values', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
- const expectedLabel = 'Pod average';
- const { label, query_range } = getGroups()[1].panels[0].metrics[0];
+ const expectedLabel = 'Pod average (MB)';
+
+ const { label, queryRange } = getGroups()[0].panels[2].metrics[0];
expect(label).toEqual(expectedLabel);
- expect(query_range.length).toBeGreaterThan(0);
+ expect(queryRange.length).toBeGreaterThan(0);
});
- it('contains two groups, with panels with a metric each', () => {
+ it('contains six groups, with panels with a metric each', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
const groups = getGroups();
expect(groups).toBeDefined();
- expect(groups).toHaveLength(2);
+ expect(groups).toHaveLength(6);
- expect(groups[0].panels).toHaveLength(1);
+ expect(groups[0].panels).toHaveLength(7);
expect(groups[0].panels[0].metrics).toHaveLength(1);
+ expect(groups[0].panels[1].metrics).toHaveLength(1);
+ expect(groups[0].panels[2].metrics).toHaveLength(1);
- expect(groups[1].panels).toHaveLength(2);
+ expect(groups[1].panels).toHaveLength(3);
expect(groups[1].panels[0].metrics).toHaveLength(1);
- expect(groups[1].panels[1].metrics).toHaveLength(1);
});
it('assigns metrics a metric id', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
@@ -60,10 +66,13 @@ describe('Monitoring mutations', () => {
const groups = getGroups();
expect(groups[0].panels[0].metrics[0].metricId).toEqual(
- '1_response_metrics_nginx_ingress_throughput_status_code',
+ 'undefined_system_metrics_kubernetes_container_memory_total',
);
expect(groups[1].panels[0].metrics[0].metricId).toEqual(
- '17_system_metrics_kubernetes_container_memory_average',
+ 'undefined_response_metrics_nginx_ingress_throughput_status_code',
+ );
+ expect(groups[2].panels[0].metrics[0].metricId).toEqual(
+ 'undefined_response_metrics_nginx_ingress_16_throughput_status_code',
);
});
});
@@ -123,14 +132,14 @@ describe('Monitoring mutations', () => {
});
describe('Individual panel/metric results', () => {
- const metricId = '12_system_metrics_kubernetes_container_memory_total';
+ const metricId = 'undefined_response_metrics_nginx_ingress_throughput_status_code';
const result = [
{
values: [[0, 1], [1, 1], [1, 3]],
},
];
- const { dashboard } = metricsDashboardResponse;
- const getMetric = () => stateCopy.dashboard.panel_groups[0].panels[0].metrics[0];
+ const dashboard = metricsDashboardPayload;
+ const getMetric = () => stateCopy.dashboard.panelGroups[1].panels[0].metrics[0];
describe('REQUEST_METRIC_RESULT', () => {
beforeEach(() => {
diff --git a/spec/frontend/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index d322d45457e..1e5bbc9c113 100644
--- a/spec/frontend/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
@@ -1,27 +1,263 @@
+import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
import {
- normalizeMetric,
uniqMetricsId,
parseEnvironmentsResponse,
removeLeadingSlash,
+ mapToDashboardViewModel,
} from '~/monitoring/stores/utils';
const projectPath = 'gitlab-org/gitlab-test';
-describe('normalizeMetric', () => {
- [
- { args: [], expected: 'undefined_undefined' },
- { args: [undefined], expected: 'undefined_undefined' },
- { args: [{ id: 'something' }], expected: 'undefined_something' },
- { args: [{ id: 45 }], expected: 'undefined_45' },
- { args: [{ metric_id: 5 }], expected: '5_undefined' },
- { args: [{ metric_id: 'something' }], expected: 'something_undefined' },
- {
- args: [{ metric_id: 5, id: 'system_metrics_kubernetes_container_memory_total' }],
- expected: '5_system_metrics_kubernetes_container_memory_total',
- },
- ].forEach(({ args, expected }) => {
- it(`normalizes metric to "${expected}" with args=${JSON.stringify(args)}`, () => {
- expect(normalizeMetric(...args)).toEqual({ metric_id: expected, metricId: expected });
+describe('mapToDashboardViewModel', () => {
+ it('maps an empty dashboard', () => {
+ expect(mapToDashboardViewModel({})).toEqual({
+ dashboard: '',
+ panelGroups: [],
+ });
+ });
+
+ it('maps a simple dashboard', () => {
+ const response = {
+ dashboard: 'Dashboard Name',
+ panel_groups: [
+ {
+ group: 'Group 1',
+ panels: [
+ {
+ title: 'Title A',
+ type: 'chart-type',
+ y_label: 'Y Label A',
+ metrics: [],
+ },
+ ],
+ },
+ ],
+ };
+
+ expect(mapToDashboardViewModel(response)).toEqual({
+ dashboard: 'Dashboard Name',
+ panelGroups: [
+ {
+ group: 'Group 1',
+ key: 'group-1-0',
+ panels: [
+ {
+ title: 'Title A',
+ type: 'chart-type',
+ y_label: 'Y Label A',
+ yAxis: {
+ name: 'Y Label A',
+ format: 'number',
+ precision: 2,
+ },
+ metrics: [],
+ },
+ ],
+ },
+ ],
+ });
+ });
+
+ describe('panel groups mapping', () => {
+ it('key', () => {
+ const response = {
+ dashboard: 'Dashboard Name',
+ panel_groups: [
+ {
+ group: 'Group A',
+ },
+ {
+ group: 'Group B',
+ },
+ {
+ group: '',
+ unsupported_property: 'This should be removed',
+ },
+ ],
+ };
+
+ expect(mapToDashboardViewModel(response).panelGroups).toEqual([
+ {
+ group: 'Group A',
+ key: 'group-a-0',
+ panels: [],
+ },
+ {
+ group: 'Group B',
+ key: 'group-b-1',
+ panels: [],
+ },
+ {
+ group: '',
+ key: 'default-2',
+ panels: [],
+ },
+ ]);
+ });
+ });
+
+ describe('panel mapping', () => {
+ const panelTitle = 'Panel Title';
+ const yAxisName = 'Y Axis Name';
+
+ let dashboard;
+
+ const setupWithPanel = panel => {
+ dashboard = {
+ panel_groups: [
+ {
+ panels: [panel],
+ },
+ ],
+ };
+ };
+
+ const getMappedPanel = () => mapToDashboardViewModel(dashboard).panelGroups[0].panels[0];
+
+ it('group y_axis defaults', () => {
+ setupWithPanel({
+ title: panelTitle,
+ });
+
+ expect(getMappedPanel()).toEqual({
+ title: panelTitle,
+ y_label: '',
+ yAxis: {
+ name: '',
+ format: SUPPORTED_FORMATS.number,
+ precision: 2,
+ },
+ metrics: [],
+ });
+ });
+
+ it('panel with y_axis.name', () => {
+ setupWithPanel({
+ y_axis: {
+ name: yAxisName,
+ },
+ });
+
+ expect(getMappedPanel().y_label).toBe(yAxisName);
+ expect(getMappedPanel().yAxis.name).toBe(yAxisName);
+ });
+
+ it('panel with y_axis.name and y_label, displays y_axis.name', () => {
+ setupWithPanel({
+ y_label: 'Ignored Y Label',
+ y_axis: {
+ name: yAxisName,
+ },
+ });
+
+ expect(getMappedPanel().y_label).toBe(yAxisName);
+ expect(getMappedPanel().yAxis.name).toBe(yAxisName);
+ });
+
+ it('group y_label', () => {
+ setupWithPanel({
+ y_label: yAxisName,
+ });
+
+ expect(getMappedPanel().y_label).toBe(yAxisName);
+ expect(getMappedPanel().yAxis.name).toBe(yAxisName);
+ });
+
+ it('group y_axis format and precision', () => {
+ setupWithPanel({
+ title: panelTitle,
+ y_axis: {
+ precision: 0,
+ format: SUPPORTED_FORMATS.bytes,
+ },
+ });
+
+ expect(getMappedPanel().yAxis.format).toBe(SUPPORTED_FORMATS.bytes);
+ expect(getMappedPanel().yAxis.precision).toBe(0);
+ });
+
+ it('group y_axis unsupported format defaults to number', () => {
+ setupWithPanel({
+ title: panelTitle,
+ y_axis: {
+ format: 'invalid_format',
+ },
+ });
+
+ expect(getMappedPanel().yAxis.format).toBe(SUPPORTED_FORMATS.number);
+ });
+ });
+
+ describe('metrics mapping', () => {
+ const defaultLabel = 'Panel Label';
+ const dashboardWithMetric = (metric, label = defaultLabel) => ({
+ panel_groups: [
+ {
+ panels: [
+ {
+ y_label: label,
+ metrics: [metric],
+ },
+ ],
+ },
+ ],
+ });
+
+ const getMappedMetric = dashboard => {
+ return mapToDashboardViewModel(dashboard).panelGroups[0].panels[0].metrics[0];
+ };
+
+ it('creates a metric', () => {
+ const dashboard = dashboardWithMetric({});
+
+ expect(getMappedMetric(dashboard)).toEqual({
+ label: expect.any(String),
+ metricId: expect.any(String),
+ });
+ });
+
+ it('creates a metric with a correct id', () => {
+ const dashboard = dashboardWithMetric({
+ id: 'http_responses',
+ metric_id: 1,
+ });
+
+ expect(getMappedMetric(dashboard).metricId).toEqual('1_http_responses');
+ });
+
+ it('creates a metric with a default label', () => {
+ const dashboard = dashboardWithMetric({});
+
+ expect(getMappedMetric(dashboard)).toMatchObject({
+ label: defaultLabel,
+ });
+ });
+
+ it('creates a metric with an endpoint and query', () => {
+ const dashboard = dashboardWithMetric({
+ prometheus_endpoint_path: 'http://test',
+ query_range: 'http_responses',
+ });
+
+ expect(getMappedMetric(dashboard)).toMatchObject({
+ prometheusEndpointPath: 'http://test',
+ queryRange: 'http_responses',
+ });
+ });
+
+ it('creates a metric with an ad-hoc property', () => {
+ // This behavior is deprecated and should be removed
+ // https://gitlab.com/gitlab-org/gitlab/issues/207198
+
+ const dashboard = dashboardWithMetric({
+ x_label: 'Another label',
+ unkown_option: 'unkown_data',
+ });
+
+ expect(getMappedMetric(dashboard)).toMatchObject({
+ x_label: 'Another label',
+ unkown_option: 'unkown_data',
+ });
});
});
});
diff --git a/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap b/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
index 2f4c114dd3d..b1a718d58b5 100644
--- a/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
+++ b/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
@@ -7,6 +7,9 @@ exports[`JumpToNextDiscussionButton matches the snapshot 1`] = `
>
<button
class="btn btn-default discussion-next-btn"
+ data-track-event="click_button"
+ data-track-label="mr_next_unresolved_thread"
+ data-track-property="click_next_unresolved_thread"
title="Jump to next unresolved thread"
>
<icon-stub
diff --git a/spec/frontend/notes/components/discussion_counter_spec.js b/spec/frontend/notes/components/discussion_counter_spec.js
new file mode 100644
index 00000000000..c9375df07e8
--- /dev/null
+++ b/spec/frontend/notes/components/discussion_counter_spec.js
@@ -0,0 +1,91 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import notesModule from '~/notes/stores/modules';
+import DiscussionCounter from '~/notes/components/discussion_counter.vue';
+import { noteableDataMock, discussionMock, notesDataMock, userDataMock } from '../mock_data';
+import * as types from '~/notes/stores/mutation_types';
+
+describe('DiscussionCounter component', () => {
+ let store;
+ let wrapper;
+ const localVue = createLocalVue();
+
+ localVue.use(Vuex);
+
+ beforeEach(() => {
+ window.mrTabs = {};
+ const { state, getters, mutations, actions } = notesModule();
+
+ store = new Vuex.Store({
+ state: {
+ ...state,
+ userData: userDataMock,
+ },
+ getters,
+ mutations,
+ actions,
+ });
+ store.dispatch('setNoteableData', {
+ ...noteableDataMock,
+ create_issue_to_resolve_discussions_path: '/test',
+ });
+ store.dispatch('setNotesData', notesDataMock);
+ });
+
+ afterEach(() => {
+ wrapper.vm.$destroy();
+ wrapper = null;
+ });
+
+ describe('has no discussions', () => {
+ it('does not render', () => {
+ wrapper = shallowMount(DiscussionCounter, { store, localVue });
+
+ expect(wrapper.find({ ref: 'discussionCounter' }).exists()).toBe(false);
+ });
+ });
+
+ describe('has no resolvable discussions', () => {
+ it('does not render', () => {
+ store.commit(types.SET_INITIAL_DISCUSSIONS, [{ ...discussionMock, resolvable: false }]);
+ store.dispatch('updateResolvableDiscussionsCounts');
+ wrapper = shallowMount(DiscussionCounter, { store, localVue });
+
+ expect(wrapper.find({ ref: 'discussionCounter' }).exists()).toBe(false);
+ });
+ });
+
+ describe('has resolvable discussions', () => {
+ const updateStore = (note = {}) => {
+ discussionMock.notes[0] = { ...discussionMock.notes[0], ...note };
+ store.commit(types.SET_INITIAL_DISCUSSIONS, [discussionMock]);
+ store.dispatch('updateResolvableDiscussionsCounts');
+ };
+
+ afterEach(() => {
+ delete discussionMock.notes[0].resolvable;
+ delete discussionMock.notes[0].resolved;
+ });
+
+ it('renders', () => {
+ updateStore();
+ wrapper = shallowMount(DiscussionCounter, { store, localVue });
+
+ expect(wrapper.find({ ref: 'discussionCounter' }).exists()).toBe(true);
+ });
+
+ it.each`
+ title | resolved | hasNextBtn | isActive | icon | groupLength
+ ${'hasNextButton'} | ${false} | ${true} | ${false} | ${'check-circle'} | ${2}
+ ${'allResolved'} | ${true} | ${false} | ${true} | ${'check-circle-filled'} | ${0}
+ `('renders correctly if $title', ({ resolved, hasNextBtn, isActive, icon, groupLength }) => {
+ updateStore({ resolvable: true, resolved });
+ wrapper = shallowMount(DiscussionCounter, { store, localVue });
+
+ expect(wrapper.find(`.has-next-btn`).exists()).toBe(hasNextBtn);
+ expect(wrapper.find(`.is-active`).exists()).toBe(isActive);
+ expect(wrapper.find({ name: icon }).exists()).toBe(true);
+ expect(wrapper.findAll('[role="group"').length).toBe(groupLength);
+ });
+ });
+});
diff --git a/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js b/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
index a00dd445c4f..183966cf435 100644
--- a/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
+++ b/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
@@ -1,11 +1,21 @@
import { shallowMount } from '@vue/test-utils';
import JumpToNextDiscussionButton from '~/notes/components/discussion_jump_to_next_button.vue';
+import { mockTracking } from '../../helpers/tracking_helper';
describe('JumpToNextDiscussionButton', () => {
+ const fromDiscussionId = 'abc123';
let wrapper;
+ let trackingSpy;
+ let jumpFn;
beforeEach(() => {
- wrapper = shallowMount(JumpToNextDiscussionButton);
+ jumpFn = jest.fn();
+ wrapper = shallowMount(JumpToNextDiscussionButton, {
+ propsData: { fromDiscussionId },
+ });
+ wrapper.setMethods({ jumpToNextRelativeDiscussion: jumpFn });
+
+ trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
});
afterEach(() => {
@@ -15,4 +25,19 @@ describe('JumpToNextDiscussionButton', () => {
it('matches the snapshot', () => {
expect(wrapper.vm.$el).toMatchSnapshot();
});
+
+ it('calls jumpToNextRelativeDiscussion when clicked', () => {
+ wrapper.find({ ref: 'button' }).trigger('click');
+
+ expect(jumpFn).toHaveBeenCalledWith(fromDiscussionId);
+ });
+
+ it('sends the correct tracking event when clicked', () => {
+ wrapper.find({ ref: 'button' }).trigger('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'mr_next_unresolved_thread',
+ property: 'click_next_unresolved_thread',
+ });
+ });
});
diff --git a/spec/frontend/notes/components/discussion_keyboard_navigator_spec.js b/spec/frontend/notes/components/discussion_keyboard_navigator_spec.js
index 74e827784ec..e932133b869 100644
--- a/spec/frontend/notes/components/discussion_keyboard_navigator_spec.js
+++ b/spec/frontend/notes/components/discussion_keyboard_navigator_spec.js
@@ -1,84 +1,53 @@
/* global Mousetrap */
import 'mousetrap';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
import DiscussionKeyboardNavigator from '~/notes/components/discussion_keyboard_navigator.vue';
-import notesModule from '~/notes/stores/modules';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-const NEXT_ID = 'abc123';
-const PREV_ID = 'def456';
-const NEXT_DIFF_ID = 'abc123_diff';
-const PREV_DIFF_ID = 'def456_diff';
describe('notes/components/discussion_keyboard_navigator', () => {
- let storeOptions;
- let wrapper;
- let store;
+ const localVue = createLocalVue();
- const createComponent = (options = {}) => {
- store = new Vuex.Store(storeOptions);
+ let wrapper;
+ let jumpToNextDiscussion;
+ let jumpToPreviousDiscussion;
+ const createComponent = () => {
wrapper = shallowMount(DiscussionKeyboardNavigator, {
- localVue,
- store,
- ...options,
+ mixins: [
+ localVue.extend({
+ methods: {
+ jumpToNextDiscussion,
+ jumpToPreviousDiscussion,
+ },
+ }),
+ ],
});
-
- wrapper.vm.jumpToDiscussion = jest.fn();
};
beforeEach(() => {
- const notes = notesModule();
-
- notes.getters.nextUnresolvedDiscussionId = () => (currId, isDiff) =>
- isDiff ? NEXT_DIFF_ID : NEXT_ID;
- notes.getters.previousUnresolvedDiscussionId = () => (currId, isDiff) =>
- isDiff ? PREV_DIFF_ID : PREV_ID;
- notes.getters.getDiscussion = () => id => ({ id });
-
- storeOptions = {
- modules: {
- notes,
- },
- };
+ jumpToNextDiscussion = jest.fn();
+ jumpToPreviousDiscussion = jest.fn();
});
afterEach(() => {
wrapper.destroy();
- storeOptions = null;
- store = null;
+ wrapper = null;
});
- describe.each`
- currentAction | expectedNextId | expectedPrevId
- ${'diffs'} | ${NEXT_DIFF_ID} | ${PREV_DIFF_ID}
- ${'show'} | ${NEXT_ID} | ${PREV_ID}
- `('when isDiffView is $isDiffView', ({ currentAction, expectedNextId, expectedPrevId }) => {
+ describe('on mount', () => {
beforeEach(() => {
- window.mrTabs = { currentAction };
createComponent();
});
- afterEach(() => delete window.mrTabs);
it('calls jumpToNextDiscussion when pressing `n`', () => {
Mousetrap.trigger('n');
- expect(wrapper.vm.jumpToDiscussion).toHaveBeenCalledWith(
- expect.objectContaining({ id: expectedNextId }),
- );
- expect(wrapper.vm.currentDiscussionId).toEqual(expectedNextId);
+ expect(jumpToNextDiscussion).toHaveBeenCalled();
});
it('calls jumpToPreviousDiscussion when pressing `p`', () => {
Mousetrap.trigger('p');
- expect(wrapper.vm.jumpToDiscussion).toHaveBeenCalledWith(
- expect.objectContaining({ id: expectedPrevId }),
- );
- expect(wrapper.vm.currentDiscussionId).toEqual(expectedPrevId);
+ expect(jumpToPreviousDiscussion).toHaveBeenCalled();
});
});
@@ -99,13 +68,13 @@ describe('notes/components/discussion_keyboard_navigator', () => {
it('does not call jumpToNextDiscussion when pressing `n`', () => {
Mousetrap.trigger('n');
- expect(wrapper.vm.jumpToDiscussion).not.toHaveBeenCalled();
+ expect(jumpToNextDiscussion).not.toHaveBeenCalled();
});
it('does not call jumpToNextDiscussion when pressing `p`', () => {
Mousetrap.trigger('p');
- expect(wrapper.vm.jumpToDiscussion).not.toHaveBeenCalled();
+ expect(jumpToPreviousDiscussion).not.toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/notes/components/note_app_spec.js b/spec/frontend/notes/components/note_app_spec.js
index a51c7c57f6c..2d0cca18647 100644
--- a/spec/frontend/notes/components/note_app_spec.js
+++ b/spec/frontend/notes/components/note_app_spec.js
@@ -5,7 +5,6 @@ import { mount } from '@vue/test-utils';
import { setTestTimeout } from 'helpers/timeout';
import axios from '~/lib/utils/axios_utils';
import NotesApp from '~/notes/components/notes_app.vue';
-import service from '~/notes/services/notes_service';
import createStore from '~/notes/stores';
import '~/behaviors/markdown/render_gfm';
// TODO: use generated fixture (https://gitlab.com/gitlab-org/gitlab-foss/issues/62491)
@@ -192,7 +191,6 @@ describe('note_app', () => {
describe('individual note', () => {
beforeEach(() => {
axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
- jest.spyOn(service, 'updateNote');
wrapper = mountComponent();
return waitForDiscussionsRequest().then(() => {
wrapper.find('.js-note-edit').trigger('click');
@@ -203,18 +201,18 @@ describe('note_app', () => {
expect(wrapper.find('.js-vue-issue-note-form').exists()).toBe(true);
});
- it('calls the service to update the note', () => {
+ it('calls the store action to update the note', () => {
+ jest.spyOn(axios, 'put').mockImplementation(() => Promise.resolve({ data: {} }));
wrapper.find('.js-vue-issue-note-form').value = 'this is a note';
wrapper.find('.js-vue-issue-save').trigger('click');
- expect(service.updateNote).toHaveBeenCalled();
+ expect(axios.put).toHaveBeenCalled();
});
});
describe('discussion note', () => {
beforeEach(() => {
axiosMock.onAny().reply(mockData.getDiscussionNoteResponse);
- jest.spyOn(service, 'updateNote');
wrapper = mountComponent();
return waitForDiscussionsRequest().then(() => {
wrapper.find('.js-note-edit').trigger('click');
@@ -226,10 +224,11 @@ describe('note_app', () => {
});
it('updates the note and resets the edit form', () => {
+ jest.spyOn(axios, 'put').mockImplementation(() => Promise.resolve({ data: {} }));
wrapper.find('.js-vue-issue-note-form').value = 'this is a note';
wrapper.find('.js-vue-issue-save').trigger('click');
- expect(service.updateNote).toHaveBeenCalled();
+ expect(axios.put).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/notes/helpers.js b/spec/frontend/notes/helpers.js
new file mode 100644
index 00000000000..3f349b40ba5
--- /dev/null
+++ b/spec/frontend/notes/helpers.js
@@ -0,0 +1,12 @@
+// eslint-disable-next-line import/prefer-default-export
+export const resetStore = store => {
+ store.replaceState({
+ notes: [],
+ targetNoteHash: null,
+ lastFetchedAt: null,
+
+ notesData: {},
+ userData: {},
+ noteableData: {},
+ });
+};
diff --git a/spec/frontend/notes/mixins/discussion_navigation_spec.js b/spec/frontend/notes/mixins/discussion_navigation_spec.js
new file mode 100644
index 00000000000..4e5325b8bc3
--- /dev/null
+++ b/spec/frontend/notes/mixins/discussion_navigation_spec.js
@@ -0,0 +1,178 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import * as utils from '~/lib/utils/common_utils';
+import discussionNavigation from '~/notes/mixins/discussion_navigation';
+import eventHub from '~/notes/event_hub';
+import notesModule from '~/notes/stores/modules';
+import { setHTMLFixture } from 'helpers/fixtures';
+
+const discussion = (id, index) => ({
+ id,
+ resolvable: index % 2 === 0,
+ active: true,
+ notes: [{}],
+ diff_discussion: true,
+});
+const createDiscussions = () => [...'abcde'].map(discussion);
+const createComponent = () => ({
+ mixins: [discussionNavigation],
+ render() {
+ return this.$slots.default;
+ },
+});
+
+describe('Discussion navigation mixin', () => {
+ const localVue = createLocalVue();
+ localVue.use(Vuex);
+
+ let wrapper;
+ let store;
+ let expandDiscussion;
+
+ beforeEach(() => {
+ setHTMLFixture(
+ [...'abcde']
+ .map(
+ id =>
+ `<ul class="notes" data-discussion-id="${id}"></ul>
+ <div class="discussion" data-discussion-id="${id}"></div>`,
+ )
+ .join(''),
+ );
+
+ jest.spyOn(utils, 'scrollToElement');
+
+ expandDiscussion = jest.fn();
+ const { actions, ...notesRest } = notesModule();
+ store = new Vuex.Store({
+ modules: {
+ notes: {
+ ...notesRest,
+ actions: { ...actions, expandDiscussion },
+ },
+ },
+ });
+ store.state.notes.discussions = createDiscussions();
+
+ wrapper = shallowMount(createComponent(), { store, localVue });
+ });
+
+ afterEach(() => {
+ wrapper.vm.$destroy();
+ jest.clearAllMocks();
+ });
+
+ const findDiscussion = (selector, id) =>
+ document.querySelector(`${selector}[data-discussion-id="${id}"]`);
+
+ describe('cycle through discussions', () => {
+ beforeEach(() => {
+ // eslint-disable-next-line new-cap
+ window.mrTabs = { eventHub: new localVue(), tabShown: jest.fn() };
+ });
+
+ describe.each`
+ fn | args | currentId | expected
+ ${'jumpToNextDiscussion'} | ${[]} | ${null} | ${'a'}
+ ${'jumpToNextDiscussion'} | ${[]} | ${'a'} | ${'c'}
+ ${'jumpToNextDiscussion'} | ${[]} | ${'e'} | ${'a'}
+ ${'jumpToPreviousDiscussion'} | ${[]} | ${null} | ${'e'}
+ ${'jumpToPreviousDiscussion'} | ${[]} | ${'e'} | ${'c'}
+ ${'jumpToPreviousDiscussion'} | ${[]} | ${'c'} | ${'a'}
+ ${'jumpToNextRelativeDiscussion'} | ${[null]} | ${null} | ${'a'}
+ ${'jumpToNextRelativeDiscussion'} | ${['a']} | ${null} | ${'c'}
+ ${'jumpToNextRelativeDiscussion'} | ${['e']} | ${'c'} | ${'a'}
+ `('$fn (args = $args, currentId = $currentId)', ({ fn, args, currentId, expected }) => {
+ beforeEach(() => {
+ store.state.notes.currentDiscussionId = currentId;
+ });
+
+ describe('on `show` active tab', () => {
+ beforeEach(() => {
+ window.mrTabs.currentAction = 'show';
+ wrapper.vm[fn](...args);
+ });
+
+ it('sets current discussion', () => {
+ expect(store.state.notes.currentDiscussionId).toEqual(expected);
+ });
+
+ it('expands discussion', () => {
+ expect(expandDiscussion).toHaveBeenCalled();
+ });
+
+ it('scrolls to element', () => {
+ expect(utils.scrollToElement).toHaveBeenCalledWith(
+ findDiscussion('div.discussion', expected),
+ );
+ });
+ });
+
+ describe('on `diffs` active tab', () => {
+ beforeEach(() => {
+ window.mrTabs.currentAction = 'diffs';
+ wrapper.vm[fn](...args);
+ });
+
+ it('sets current discussion', () => {
+ expect(store.state.notes.currentDiscussionId).toEqual(expected);
+ });
+
+ it('expands discussion', () => {
+ expect(expandDiscussion).toHaveBeenCalled();
+ });
+
+ it('scrolls when scrollToDiscussion is emitted', () => {
+ expect(utils.scrollToElement).not.toHaveBeenCalled();
+
+ eventHub.$emit('scrollToDiscussion');
+
+ expect(utils.scrollToElement).toHaveBeenCalledWith(findDiscussion('ul.notes', expected));
+ });
+ });
+
+ describe('on `other` active tab', () => {
+ beforeEach(() => {
+ window.mrTabs.currentAction = 'other';
+ wrapper.vm[fn](...args);
+ });
+
+ it('sets current discussion', () => {
+ expect(store.state.notes.currentDiscussionId).toEqual(expected);
+ });
+
+ it('does not expand discussion yet', () => {
+ expect(expandDiscussion).not.toHaveBeenCalled();
+ });
+
+ it('shows mrTabs', () => {
+ expect(window.mrTabs.tabShown).toHaveBeenCalledWith('show');
+ });
+
+ describe('when tab is changed', () => {
+ beforeEach(() => {
+ window.mrTabs.eventHub.$emit('MergeRequestTabChange');
+
+ jest.runAllTimers();
+ });
+
+ it('expands discussion', () => {
+ expect(expandDiscussion).toHaveBeenCalledWith(
+ expect.anything(),
+ {
+ discussionId: expected,
+ },
+ undefined,
+ );
+ });
+
+ it('scrolls to discussion', () => {
+ expect(utils.scrollToElement).toHaveBeenCalledWith(
+ findDiscussion('div.discussion', expected),
+ );
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/notes/old_notes_spec.js b/spec/frontend/notes/old_notes_spec.js
index 5f7a5d57cd8..49b887b21b4 100644
--- a/spec/frontend/notes/old_notes_spec.js
+++ b/spec/frontend/notes/old_notes_spec.js
@@ -28,7 +28,10 @@ window.gl = window.gl || {};
gl.utils = gl.utils || {};
gl.utils.disableButtonIfEmptyField = () => {};
-describe('Old Notes (~/notes.js)', () => {
+// the following test is unreliable and failing in master 2-3 times a day
+// see https://gitlab.com/gitlab-org/gitlab/issues/206906#note_290602581
+// eslint-disable-next-line jest/no-disabled-tests
+describe.skip('Old Notes (~/notes.js)', () => {
beforeEach(() => {
jest.useFakeTimers();
loadFixtures(fixture);
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
new file mode 100644
index 00000000000..40b0134e12e
--- /dev/null
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -0,0 +1,905 @@
+import { TEST_HOST } from 'spec/test_constants';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import Api from '~/api';
+import Flash from '~/flash';
+import * as actions from '~/notes/stores/actions';
+import * as mutationTypes from '~/notes/stores/mutation_types';
+import * as notesConstants from '~/notes/constants';
+import createStore from '~/notes/stores';
+import mrWidgetEventHub from '~/vue_merge_request_widget/event_hub';
+import testAction from '../../helpers/vuex_action_helper';
+import { resetStore } from '../helpers';
+import {
+ discussionMock,
+ notesDataMock,
+ userDataMock,
+ noteableDataMock,
+ individualNote,
+} from '../mock_data';
+import axios from '~/lib/utils/axios_utils';
+
+const TEST_ERROR_MESSAGE = 'Test error message';
+jest.mock('~/flash');
+
+describe('Actions Notes Store', () => {
+ let commit;
+ let dispatch;
+ let state;
+ let store;
+ let axiosMock;
+
+ beforeEach(() => {
+ store = createStore();
+ commit = jest.fn();
+ dispatch = jest.fn();
+ state = {};
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ resetStore(store);
+ axiosMock.restore();
+ });
+
+ describe('setNotesData', () => {
+ it('should set received notes data', done => {
+ testAction(
+ actions.setNotesData,
+ notesDataMock,
+ { notesData: {} },
+ [{ type: 'SET_NOTES_DATA', payload: notesDataMock }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setNoteableData', () => {
+ it('should set received issue data', done => {
+ testAction(
+ actions.setNoteableData,
+ noteableDataMock,
+ { noteableData: {} },
+ [{ type: 'SET_NOTEABLE_DATA', payload: noteableDataMock }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setUserData', () => {
+ it('should set received user data', done => {
+ testAction(
+ actions.setUserData,
+ userDataMock,
+ { userData: {} },
+ [{ type: 'SET_USER_DATA', payload: userDataMock }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setLastFetchedAt', () => {
+ it('should set received timestamp', done => {
+ testAction(
+ actions.setLastFetchedAt,
+ 'timestamp',
+ { lastFetchedAt: {} },
+ [{ type: 'SET_LAST_FETCHED_AT', payload: 'timestamp' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setInitialNotes', () => {
+ it('should set initial notes', done => {
+ testAction(
+ actions.setInitialNotes,
+ [individualNote],
+ { notes: [] },
+ [{ type: 'SET_INITIAL_DISCUSSIONS', payload: [individualNote] }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setTargetNoteHash', () => {
+ it('should set target note hash', done => {
+ testAction(
+ actions.setTargetNoteHash,
+ 'hash',
+ { notes: [] },
+ [{ type: 'SET_TARGET_NOTE_HASH', payload: 'hash' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('toggleDiscussion', () => {
+ it('should toggle discussion', done => {
+ testAction(
+ actions.toggleDiscussion,
+ { discussionId: discussionMock.id },
+ { notes: [discussionMock] },
+ [{ type: 'TOGGLE_DISCUSSION', payload: { discussionId: discussionMock.id } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('expandDiscussion', () => {
+ it('should expand discussion', done => {
+ testAction(
+ actions.expandDiscussion,
+ { discussionId: discussionMock.id },
+ { notes: [discussionMock] },
+ [{ type: 'EXPAND_DISCUSSION', payload: { discussionId: discussionMock.id } }],
+ [{ type: 'diffs/renderFileForDiscussionId', payload: discussionMock.id }],
+ done,
+ );
+ });
+ });
+
+ describe('collapseDiscussion', () => {
+ it('should commit collapse discussion', done => {
+ testAction(
+ actions.collapseDiscussion,
+ { discussionId: discussionMock.id },
+ { notes: [discussionMock] },
+ [{ type: 'COLLAPSE_DISCUSSION', payload: { discussionId: discussionMock.id } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('async methods', () => {
+ beforeEach(() => {
+ axiosMock.onAny().reply(200, {});
+ });
+
+ describe('closeIssue', () => {
+ it('sets state as closed', done => {
+ store
+ .dispatch('closeIssue', { notesData: { closeIssuePath: '' } })
+ .then(() => {
+ expect(store.state.noteableData.state).toEqual('closed');
+ expect(store.state.isToggleStateButtonLoading).toEqual(false);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('reopenIssue', () => {
+ it('sets state as reopened', done => {
+ store
+ .dispatch('reopenIssue', { notesData: { reopenIssuePath: '' } })
+ .then(() => {
+ expect(store.state.noteableData.state).toEqual('reopened');
+ expect(store.state.isToggleStateButtonLoading).toEqual(false);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+ });
+
+ describe('emitStateChangedEvent', () => {
+ it('emits an event on the document', () => {
+ document.addEventListener('issuable_vue_app:change', event => {
+ expect(event.detail.data).toEqual({ id: '1', state: 'closed' });
+ expect(event.detail.isClosed).toEqual(false);
+ });
+
+ store.dispatch('emitStateChangedEvent', { id: '1', state: 'closed' });
+ });
+ });
+
+ describe('toggleStateButtonLoading', () => {
+ it('should set loading as true', done => {
+ testAction(
+ actions.toggleStateButtonLoading,
+ true,
+ {},
+ [{ type: 'TOGGLE_STATE_BUTTON_LOADING', payload: true }],
+ [],
+ done,
+ );
+ });
+
+ it('should set loading as false', done => {
+ testAction(
+ actions.toggleStateButtonLoading,
+ false,
+ {},
+ [{ type: 'TOGGLE_STATE_BUTTON_LOADING', payload: false }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('toggleIssueLocalState', () => {
+ it('sets issue state as closed', done => {
+ testAction(actions.toggleIssueLocalState, 'closed', {}, [{ type: 'CLOSE_ISSUE' }], [], done);
+ });
+
+ it('sets issue state as reopened', done => {
+ testAction(
+ actions.toggleIssueLocalState,
+ 'reopened',
+ {},
+ [{ type: 'REOPEN_ISSUE' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('poll', () => {
+ jest.useFakeTimers();
+
+ beforeEach(done => {
+ jest.spyOn(axios, 'get');
+
+ store
+ .dispatch('setNotesData', notesDataMock)
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('calls service with last fetched state', done => {
+ axiosMock
+ .onAny()
+ .reply(200, { notes: [], last_fetched_at: '123456' }, { 'poll-interval': '1000' });
+
+ store
+ .dispatch('poll')
+ .then(() => new Promise(resolve => requestAnimationFrame(resolve)))
+ .then(() => {
+ expect(axios.get).toHaveBeenCalled();
+ expect(store.state.lastFetchedAt).toBe('123456');
+
+ jest.advanceTimersByTime(1500);
+ })
+ .then(
+ () =>
+ new Promise(resolve => {
+ requestAnimationFrame(resolve);
+ }),
+ )
+ .then(() => {
+ expect(axios.get.mock.calls.length).toBe(2);
+ expect(axios.get.mock.calls[axios.get.mock.calls.length - 1][1].headers).toEqual({
+ 'X-Last-Fetched-At': '123456',
+ });
+ })
+ .then(() => store.dispatch('stopPolling'))
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('setNotesFetchedState', () => {
+ it('should set notes fetched state', done => {
+ testAction(
+ actions.setNotesFetchedState,
+ true,
+ {},
+ [{ type: 'SET_NOTES_FETCHED_STATE', payload: true }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('removeNote', () => {
+ const endpoint = `${TEST_HOST}/note`;
+
+ beforeEach(() => {
+ axiosMock.onDelete(endpoint).replyOnce(200, {});
+
+ document.body.setAttribute('data-page', '');
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+
+ document.body.setAttribute('data-page', '');
+ });
+
+ it('commits DELETE_NOTE and dispatches updateMergeRequestWidget', done => {
+ const note = { path: endpoint, id: 1 };
+
+ testAction(
+ actions.removeNote,
+ note,
+ store.state,
+ [
+ {
+ type: 'DELETE_NOTE',
+ payload: note,
+ },
+ ],
+ [
+ {
+ type: 'updateMergeRequestWidget',
+ },
+ {
+ type: 'updateResolvableDiscussionsCounts',
+ },
+ ],
+ done,
+ );
+ });
+
+ it('dispatches removeDiscussionsFromDiff on merge request page', done => {
+ const note = { path: endpoint, id: 1 };
+
+ document.body.setAttribute('data-page', 'projects:merge_requests:show');
+
+ testAction(
+ actions.removeNote,
+ note,
+ store.state,
+ [
+ {
+ type: 'DELETE_NOTE',
+ payload: note,
+ },
+ ],
+ [
+ {
+ type: 'updateMergeRequestWidget',
+ },
+ {
+ type: 'updateResolvableDiscussionsCounts',
+ },
+ {
+ type: 'diffs/removeDiscussionsFromDiff',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('deleteNote', () => {
+ const endpoint = `${TEST_HOST}/note`;
+
+ beforeEach(() => {
+ axiosMock.onDelete(endpoint).replyOnce(200, {});
+
+ document.body.setAttribute('data-page', '');
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+
+ document.body.setAttribute('data-page', '');
+ });
+
+ it('dispatches removeNote', done => {
+ const note = { path: endpoint, id: 1 };
+
+ testAction(
+ actions.deleteNote,
+ note,
+ {},
+ [],
+ [
+ {
+ type: 'removeNote',
+ payload: {
+ id: 1,
+ path: 'http://test.host/note',
+ },
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('createNewNote', () => {
+ describe('success', () => {
+ const res = {
+ id: 1,
+ valid: true,
+ };
+
+ beforeEach(() => {
+ axiosMock.onAny().reply(200, res);
+ });
+
+ it('commits ADD_NEW_NOTE and dispatches updateMergeRequestWidget', done => {
+ testAction(
+ actions.createNewNote,
+ { endpoint: `${gl.TEST_HOST}`, data: {} },
+ store.state,
+ [
+ {
+ type: 'ADD_NEW_NOTE',
+ payload: res,
+ },
+ ],
+ [
+ {
+ type: 'updateMergeRequestWidget',
+ },
+ {
+ type: 'startTaskList',
+ },
+ {
+ type: 'updateResolvableDiscussionsCounts',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ const res = {
+ errors: ['error'],
+ };
+
+ beforeEach(() => {
+ axiosMock.onAny().replyOnce(200, res);
+ });
+
+ it('does not commit ADD_NEW_NOTE or dispatch updateMergeRequestWidget', done => {
+ testAction(
+ actions.createNewNote,
+ { endpoint: `${gl.TEST_HOST}`, data: {} },
+ store.state,
+ [],
+ [],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('toggleResolveNote', () => {
+ const res = {
+ resolved: true,
+ };
+
+ beforeEach(() => {
+ axiosMock.onAny().reply(200, res);
+ });
+
+ describe('as note', () => {
+ it('commits UPDATE_NOTE and dispatches updateMergeRequestWidget', done => {
+ testAction(
+ actions.toggleResolveNote,
+ { endpoint: `${gl.TEST_HOST}`, isResolved: true, discussion: false },
+ store.state,
+ [
+ {
+ type: 'UPDATE_NOTE',
+ payload: res,
+ },
+ ],
+ [
+ {
+ type: 'updateResolvableDiscussionsCounts',
+ },
+ {
+ type: 'updateMergeRequestWidget',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('as discussion', () => {
+ it('commits UPDATE_DISCUSSION and dispatches updateMergeRequestWidget', done => {
+ testAction(
+ actions.toggleResolveNote,
+ { endpoint: `${gl.TEST_HOST}`, isResolved: true, discussion: true },
+ store.state,
+ [
+ {
+ type: 'UPDATE_DISCUSSION',
+ payload: res,
+ },
+ ],
+ [
+ {
+ type: 'updateResolvableDiscussionsCounts',
+ },
+ {
+ type: 'updateMergeRequestWidget',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('updateMergeRequestWidget', () => {
+ it('calls mrWidget checkStatus', () => {
+ jest.spyOn(mrWidgetEventHub, '$emit').mockImplementation(() => {});
+
+ actions.updateMergeRequestWidget();
+
+ expect(mrWidgetEventHub.$emit).toHaveBeenCalledWith('mr.discussion.updated');
+ });
+ });
+
+ describe('setCommentsDisabled', () => {
+ it('should set comments disabled state', done => {
+ testAction(
+ actions.setCommentsDisabled,
+ true,
+ null,
+ [{ type: 'DISABLE_COMMENTS', payload: true }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('updateResolvableDiscussionsCounts', () => {
+ it('commits UPDATE_RESOLVABLE_DISCUSSIONS_COUNTS', done => {
+ testAction(
+ actions.updateResolvableDiscussionsCounts,
+ null,
+ {},
+ [{ type: 'UPDATE_RESOLVABLE_DISCUSSIONS_COUNTS' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('convertToDiscussion', () => {
+ it('commits CONVERT_TO_DISCUSSION with noteId', done => {
+ const noteId = 'dummy-note-id';
+ testAction(
+ actions.convertToDiscussion,
+ noteId,
+ {},
+ [{ type: 'CONVERT_TO_DISCUSSION', payload: noteId }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('updateOrCreateNotes', () => {
+ it('Updates existing note', () => {
+ const note = { id: 1234 };
+ const getters = { notesById: { 1234: note } };
+
+ actions.updateOrCreateNotes({ commit, state, getters, dispatch }, [note]);
+
+ expect(commit.mock.calls).toEqual([[mutationTypes.UPDATE_NOTE, note]]);
+ });
+
+ it('Creates a new note if none exisits', () => {
+ const note = { id: 1234 };
+ const getters = { notesById: {} };
+ actions.updateOrCreateNotes({ commit, state, getters, dispatch }, [note]);
+
+ expect(commit.mock.calls).toEqual([[mutationTypes.ADD_NEW_NOTE, note]]);
+ });
+
+ describe('Discussion notes', () => {
+ let note;
+ let getters;
+
+ beforeEach(() => {
+ note = { id: 1234 };
+ getters = { notesById: {} };
+ });
+
+ it('Adds a reply to an existing discussion', () => {
+ state = { discussions: [note] };
+ const discussionNote = {
+ ...note,
+ type: notesConstants.DISCUSSION_NOTE,
+ discussion_id: 1234,
+ };
+
+ actions.updateOrCreateNotes({ commit, state, getters, dispatch }, [discussionNote]);
+
+ expect(commit.mock.calls).toEqual([
+ [mutationTypes.ADD_NEW_REPLY_TO_DISCUSSION, discussionNote],
+ ]);
+ });
+
+ it('fetches discussions for diff notes', () => {
+ state = { discussions: [], notesData: { discussionsPath: 'Hello world' } };
+ const diffNote = { ...note, type: notesConstants.DIFF_NOTE, discussion_id: 1234 };
+
+ actions.updateOrCreateNotes({ commit, state, getters, dispatch }, [diffNote]);
+
+ expect(dispatch.mock.calls).toEqual([
+ ['fetchDiscussions', { path: state.notesData.discussionsPath }],
+ ]);
+ });
+
+ it('Adds a new note', () => {
+ state = { discussions: [] };
+ const discussionNote = {
+ ...note,
+ type: notesConstants.DISCUSSION_NOTE,
+ discussion_id: 1234,
+ };
+
+ actions.updateOrCreateNotes({ commit, state, getters, dispatch }, [discussionNote]);
+
+ expect(commit.mock.calls).toEqual([[mutationTypes.ADD_NEW_NOTE, discussionNote]]);
+ });
+ });
+ });
+
+ describe('replyToDiscussion', () => {
+ const payload = { endpoint: TEST_HOST, data: {} };
+
+ it('updates discussion if response contains disussion', done => {
+ const discussion = { notes: [] };
+ axiosMock.onAny().reply(200, { discussion });
+
+ testAction(
+ actions.replyToDiscussion,
+ payload,
+ {
+ notesById: {},
+ },
+ [{ type: mutationTypes.UPDATE_DISCUSSION, payload: discussion }],
+ [
+ { type: 'updateMergeRequestWidget' },
+ { type: 'startTaskList' },
+ { type: 'updateResolvableDiscussionsCounts' },
+ ],
+ done,
+ );
+ });
+
+ it('adds a reply to a discussion', done => {
+ const res = {};
+ axiosMock.onAny().reply(200, res);
+
+ testAction(
+ actions.replyToDiscussion,
+ payload,
+ {
+ notesById: {},
+ },
+ [{ type: mutationTypes.ADD_NEW_REPLY_TO_DISCUSSION, payload: res }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('removeConvertedDiscussion', () => {
+ it('commits CONVERT_TO_DISCUSSION with noteId', done => {
+ const noteId = 'dummy-id';
+ testAction(
+ actions.removeConvertedDiscussion,
+ noteId,
+ {},
+ [{ type: 'REMOVE_CONVERTED_DISCUSSION', payload: noteId }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('resolveDiscussion', () => {
+ let getters;
+ let discussionId;
+
+ beforeEach(() => {
+ discussionId = discussionMock.id;
+ state.discussions = [discussionMock];
+ getters = {
+ isDiscussionResolved: () => false,
+ };
+ });
+
+ it('when unresolved, dispatches action', done => {
+ testAction(
+ actions.resolveDiscussion,
+ { discussionId },
+ { ...state, ...getters },
+ [],
+ [
+ {
+ type: 'toggleResolveNote',
+ payload: {
+ endpoint: discussionMock.resolve_path,
+ isResolved: false,
+ discussion: true,
+ },
+ },
+ ],
+ done,
+ );
+ });
+
+ it('when resolved, does nothing', done => {
+ getters.isDiscussionResolved = id => id === discussionId;
+
+ testAction(
+ actions.resolveDiscussion,
+ { discussionId },
+ { ...state, ...getters },
+ [],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('saveNote', () => {
+ const flashContainer = {};
+ const payload = { endpoint: TEST_HOST, data: { 'note[note]': 'some text' }, flashContainer };
+
+ describe('if response contains errors', () => {
+ const res = { errors: { something: ['went wrong'] } };
+ const error = { message: 'Unprocessable entity', response: { data: res } };
+
+ it('throws an error', done => {
+ actions
+ .saveNote(
+ {
+ commit() {},
+ dispatch: () => Promise.reject(error),
+ },
+ payload,
+ )
+ .then(() => done.fail('Expected error to be thrown!'))
+ .catch(err => {
+ expect(err).toBe(error);
+ expect(Flash).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('if response contains errors.base', () => {
+ const res = { errors: { base: ['something went wrong'] } };
+ const error = { message: 'Unprocessable entity', response: { data: res } };
+
+ it('sets flash alert using errors.base message', done => {
+ actions
+ .saveNote(
+ {
+ commit() {},
+ dispatch: () => Promise.reject(error),
+ },
+ { ...payload, flashContainer },
+ )
+ .then(resp => {
+ expect(resp.hasFlash).toBe(true);
+ expect(Flash).toHaveBeenCalledWith(
+ 'Your comment could not be submitted because something went wrong',
+ 'alert',
+ flashContainer,
+ );
+ })
+ .catch(() => done.fail('Expected success response!'))
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('if response contains no errors', () => {
+ const res = { valid: true };
+
+ it('returns the response', done => {
+ actions
+ .saveNote(
+ {
+ commit() {},
+ dispatch: () => Promise.resolve(res),
+ },
+ payload,
+ )
+ .then(data => {
+ expect(data).toBe(res);
+ expect(Flash).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+ });
+
+ describe('submitSuggestion', () => {
+ const discussionId = 'discussion-id';
+ const noteId = 'note-id';
+ const suggestionId = 'suggestion-id';
+ let flashContainer;
+
+ beforeEach(() => {
+ jest.spyOn(Api, 'applySuggestion').mockReturnValue(Promise.resolve());
+ dispatch.mockReturnValue(Promise.resolve());
+ flashContainer = {};
+ });
+
+ const testSubmitSuggestion = (done, expectFn) => {
+ actions
+ .submitSuggestion(
+ { commit, dispatch },
+ { discussionId, noteId, suggestionId, flashContainer },
+ )
+ .then(expectFn)
+ .then(done)
+ .catch(done.fail);
+ };
+
+ it('when service success, commits and resolves discussion', done => {
+ testSubmitSuggestion(done, () => {
+ expect(commit.mock.calls).toEqual([
+ [mutationTypes.APPLY_SUGGESTION, { discussionId, noteId, suggestionId }],
+ ]);
+
+ expect(dispatch.mock.calls).toEqual([['resolveDiscussion', { discussionId }]]);
+ expect(Flash).not.toHaveBeenCalled();
+ });
+ });
+
+ it('when service fails, flashes error message', done => {
+ const response = { response: { data: { message: TEST_ERROR_MESSAGE } } };
+
+ Api.applySuggestion.mockReturnValue(Promise.reject(response));
+
+ testSubmitSuggestion(done, () => {
+ expect(commit).not.toHaveBeenCalled();
+ expect(dispatch).not.toHaveBeenCalled();
+ expect(Flash).toHaveBeenCalledWith(`${TEST_ERROR_MESSAGE}.`, 'alert', flashContainer);
+ });
+ });
+
+ it('when resolve discussion fails, fail gracefully', done => {
+ dispatch.mockReturnValue(Promise.reject());
+
+ testSubmitSuggestion(done, () => {
+ expect(Flash).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('filterDiscussion', () => {
+ const path = 'some-discussion-path';
+ const filter = 0;
+
+ beforeEach(() => {
+ dispatch.mockReturnValue(new Promise(() => {}));
+ });
+
+ it('fetches discussions with filter and persistFilter false', () => {
+ actions.filterDiscussion({ dispatch }, { path, filter, persistFilter: false });
+
+ expect(dispatch.mock.calls).toEqual([
+ ['setLoadingState', true],
+ ['fetchDiscussions', { path, filter, persistFilter: false }],
+ ]);
+ });
+
+ it('fetches discussions with filter and persistFilter true', () => {
+ actions.filterDiscussion({ dispatch }, { path, filter, persistFilter: true });
+
+ expect(dispatch.mock.calls).toEqual([
+ ['setLoadingState', true],
+ ['fetchDiscussions', { path, filter, persistFilter: true }],
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/notes/stores/getters_spec.js b/spec/frontend/notes/stores/getters_spec.js
index 83417bd70ef..602e4c70741 100644
--- a/spec/frontend/notes/stores/getters_spec.js
+++ b/spec/frontend/notes/stores/getters_spec.js
@@ -35,6 +35,7 @@ describe('Getters Notes Store', () => {
notesData: notesDataMock,
userData: userDataMock,
noteableData: noteableDataMock,
+ descriptionVersions: 'descriptionVersions',
};
});
@@ -385,4 +386,10 @@ describe('Getters Notes Store', () => {
expect(getters.getDiscussion(state)('1')).toEqual({ id: '1' });
});
});
+
+ describe('descriptionVersions', () => {
+ it('should return `descriptionVersions`', () => {
+ expect(getters.descriptionVersions(state)).toEqual('descriptionVersions');
+ });
+ });
});
diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index 46b4081f6f6..ee772afbc03 100644
--- a/spec/frontend/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -8,6 +8,7 @@ import {
userDataMock,
noteableDataMock,
individualNote,
+ notesWithDescriptionChanges,
} from '../mock_data';
const RESOLVED_NOTE = { resolvable: true, resolved: true };
@@ -579,4 +580,27 @@ describe('Notes Store mutations', () => {
expect(state.convertedDisscussionIds).not.toContain(discussion.id);
});
});
+
+ describe('RECEIVE_DESCRIPTION_VERSION', () => {
+ const descriptionVersion = notesWithDescriptionChanges[0].notes[0].note;
+ const versionId = notesWithDescriptionChanges[0].notes[0].id;
+ const state = {};
+
+ it('adds a descriptionVersion', () => {
+ mutations.RECEIVE_DESCRIPTION_VERSION(state, { descriptionVersion, versionId });
+ expect(state.descriptionVersions[versionId]).toBe(descriptionVersion);
+ });
+ });
+
+ describe('RECEIVE_DELETE_DESCRIPTION_VERSION', () => {
+ const descriptionVersion = notesWithDescriptionChanges[0].notes[0].note;
+ const versionId = notesWithDescriptionChanges[0].notes[0].id;
+ const state = { descriptionVersions: { [versionId]: descriptionVersion } };
+ const deleted = 'Deleted';
+
+ it('updates descriptionVersion to "Deleted"', () => {
+ mutations.RECEIVE_DELETE_DESCRIPTION_VERSION(state, { [versionId]: deleted });
+ expect(state.descriptionVersions[versionId]).toBe(deleted);
+ });
+ });
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/project_feature_settings_spec.js b/spec/frontend/pages/projects/shared/permissions/components/project_feature_settings_spec.js
new file mode 100644
index 00000000000..8ab5426a005
--- /dev/null
+++ b/spec/frontend/pages/projects/shared/permissions/components/project_feature_settings_spec.js
@@ -0,0 +1,124 @@
+import { mount, shallowMount } from '@vue/test-utils';
+
+import projectFeatureSetting from '~/pages/projects/shared/permissions/components/project_feature_setting.vue';
+import projectFeatureToggle from '~/vue_shared/components/toggle_button.vue';
+
+describe('Project Feature Settings', () => {
+ const defaultProps = {
+ name: 'Test',
+ options: [[1, 1], [2, 2], [3, 3], [4, 4], [5, 5]],
+ value: 1,
+ disabledInput: false,
+ };
+ let wrapper;
+
+ const mountComponent = customProps => {
+ const propsData = { ...defaultProps, ...customProps };
+ return shallowMount(projectFeatureSetting, { propsData });
+ };
+
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Hidden name input', () => {
+ it('should set the hidden name input if the name exists', () => {
+ expect(wrapper.find({ name: 'Test' }).props().value).toBe(1);
+ });
+
+ it('should not set the hidden name input if the name does not exist', () => {
+ wrapper.setProps({ name: null });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ name: 'Test' }).exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('Feature toggle', () => {
+ it('should enable the feature toggle if the value is not 0', () => {
+ expect(wrapper.find(projectFeatureToggle).props().value).toBe(true);
+ });
+
+ it('should enable the feature toggle if the value is less than 0', () => {
+ wrapper.setProps({ value: -1 });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(projectFeatureToggle).props().value).toBe(true);
+ });
+ });
+
+ it('should disable the feature toggle if the value is 0', () => {
+ wrapper.setProps({ value: 0 });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(projectFeatureToggle).props().value).toBe(false);
+ });
+ });
+
+ it('should disable the feature toggle if disabledInput is set', () => {
+ wrapper.setProps({ disabledInput: true });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(projectFeatureToggle).props().disabledInput).toBe(true);
+ });
+ });
+
+ it('should emit a change event when the feature toggle changes', () => {
+ // Needs to be fully mounted to be able to trigger the click event on the internal button
+ wrapper = mount(projectFeatureSetting, { propsData: defaultProps });
+
+ expect(wrapper.emitted().change).toBeUndefined();
+ wrapper
+ .find(projectFeatureToggle)
+ .find('button')
+ .trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().change.length).toBe(1);
+ expect(wrapper.emitted().change[0]).toEqual([0]);
+ });
+ });
+ });
+
+ describe('Project repo select', () => {
+ it.each`
+ disabledInput | value | options | isDisabled
+ ${true} | ${0} | ${[[1, 1]]} | ${true}
+ ${true} | ${1} | ${[[1, 1], [2, 2], [3, 3]]} | ${true}
+ ${false} | ${0} | ${[[1, 1], [2, 2], [3, 3]]} | ${true}
+ ${false} | ${1} | ${[[1, 1]]} | ${true}
+ ${false} | ${1} | ${[[1, 1], [2, 2], [3, 3]]} | ${false}
+ `(
+ 'should set disabled to $isDisabled when disabledInput is $disabledInput, the value is $value and options are $options',
+ ({ disabledInput, value, options, isDisabled }) => {
+ wrapper.setProps({ disabledInput, value, options });
+
+ return wrapper.vm.$nextTick(() => {
+ if (isDisabled) {
+ expect(wrapper.find('select').attributes().disabled).toEqual('disabled');
+ } else {
+ expect(wrapper.find('select').attributes().disabled).toBeUndefined();
+ }
+ });
+ },
+ );
+
+ it('should emit the change when a new option is selected', () => {
+ expect(wrapper.emitted().change).toBeUndefined();
+ wrapper
+ .findAll('option')
+ .at(1)
+ .trigger('change');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().change.length).toBe(1);
+ expect(wrapper.emitted().change[0]).toEqual([2]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/project_setting_row_spec.js b/spec/frontend/pages/projects/shared/permissions/components/project_setting_row_spec.js
new file mode 100644
index 00000000000..7cbcbdcdd1f
--- /dev/null
+++ b/spec/frontend/pages/projects/shared/permissions/components/project_setting_row_spec.js
@@ -0,0 +1,63 @@
+import { shallowMount } from '@vue/test-utils';
+
+import projectSettingRow from '~/pages/projects/shared/permissions/components/project_setting_row.vue';
+
+describe('Project Setting Row', () => {
+ let wrapper;
+
+ const mountComponent = (customProps = {}) => {
+ const propsData = { ...customProps };
+ return shallowMount(projectSettingRow, { propsData });
+ };
+
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should show the label if it is set', () => {
+ wrapper.setProps({ label: 'Test label' });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('label').text()).toEqual('Test label');
+ });
+ });
+
+ it('should hide the label if it is not set', () => {
+ expect(wrapper.find('label').exists()).toBe(false);
+ });
+
+ it('should show the help icon with the correct help path if it is set', () => {
+ wrapper.setProps({ label: 'Test label', helpPath: '/123' });
+
+ return wrapper.vm.$nextTick(() => {
+ const link = wrapper.find('a');
+
+ expect(link.exists()).toBe(true);
+ expect(link.attributes().href).toEqual('/123');
+ });
+ });
+
+ it('should hide the help icon if no help path is set', () => {
+ wrapper.setProps({ label: 'Test label' });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('a').exists()).toBe(false);
+ });
+ });
+
+ it('should show the help text if it is set', () => {
+ wrapper.setProps({ helpText: 'Test text' });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('span').text()).toEqual('Test text');
+ });
+ });
+
+ it('should hide the help text if it is set', () => {
+ expect(wrapper.find('span').exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
new file mode 100644
index 00000000000..c304dfd2048
--- /dev/null
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -0,0 +1,434 @@
+import { shallowMount } from '@vue/test-utils';
+
+import settingsPanel from '~/pages/projects/shared/permissions/components/settings_panel.vue';
+import {
+ featureAccessLevel,
+ visibilityLevelDescriptions,
+ visibilityOptions,
+} from '~/pages/projects/shared/permissions/constants';
+
+const defaultProps = {
+ currentSettings: {
+ visibilityLevel: 10,
+ requestAccessEnabled: true,
+ issuesAccessLevel: 20,
+ repositoryAccessLevel: 20,
+ forkingAccessLevel: 20,
+ mergeRequestsAccessLevel: 20,
+ buildsAccessLevel: 20,
+ wikiAccessLevel: 20,
+ snippetsAccessLevel: 20,
+ pagesAccessLevel: 10,
+ containerRegistryEnabled: true,
+ lfsEnabled: true,
+ emailsDisabled: false,
+ packagesEnabled: true,
+ },
+ canDisableEmails: true,
+ canChangeVisibilityLevel: true,
+ allowedVisibilityOptions: [0, 10, 20],
+ visibilityHelpPath: '/help/public_access/public_access',
+ registryAvailable: false,
+ registryHelpPath: '/help/user/packages/container_registry/index',
+ lfsAvailable: true,
+ lfsHelpPath: '/help/workflow/lfs/manage_large_binaries_with_git_lfs',
+ pagesAvailable: true,
+ pagesAccessControlEnabled: false,
+ pagesAccessControlForced: false,
+ pagesHelpPath: '/help/user/project/pages/introduction#gitlab-pages-access-control-core',
+ packagesAvailable: false,
+ packagesHelpPath: '/help/user/packages/index',
+};
+
+describe('Settings Panel', () => {
+ let wrapper;
+
+ const mountComponent = customProps => {
+ const propsData = { ...defaultProps, ...customProps };
+ return shallowMount(settingsPanel, { propsData });
+ };
+
+ const overrideCurrentSettings = (currentSettingsProps, extraProps = {}) => {
+ return mountComponent({
+ ...extraProps,
+ currentSettings: {
+ ...defaultProps.currentSettings,
+ ...currentSettingsProps,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Project Visibility', () => {
+ it('should set the project visibility help path', () => {
+ expect(wrapper.find({ ref: 'project-visibility-settings' }).props().helpPath).toBe(
+ defaultProps.visibilityHelpPath,
+ );
+ });
+
+ it('should not disable the visibility level dropdown', () => {
+ wrapper.setProps({ canChangeVisibilityLevel: true });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(
+ wrapper.find('[name="project[visibility_level]"]').attributes().disabled,
+ ).toBeUndefined();
+ });
+ });
+
+ it('should disable the visibility level dropdown', () => {
+ wrapper.setProps({ canChangeVisibilityLevel: false });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('[name="project[visibility_level]"]').attributes().disabled).toBe(
+ 'disabled',
+ );
+ });
+ });
+
+ it.each`
+ option | allowedOptions | disabled
+ ${visibilityOptions.PRIVATE} | ${[visibilityOptions.PRIVATE, visibilityOptions.INTERNAL, visibilityOptions.PUBLIC]} | ${false}
+ ${visibilityOptions.PRIVATE} | ${[visibilityOptions.INTERNAL, visibilityOptions.PUBLIC]} | ${true}
+ ${visibilityOptions.INTERNAL} | ${[visibilityOptions.PRIVATE, visibilityOptions.INTERNAL, visibilityOptions.PUBLIC]} | ${false}
+ ${visibilityOptions.INTERNAL} | ${[visibilityOptions.PRIVATE, visibilityOptions.PUBLIC]} | ${true}
+ ${visibilityOptions.PUBLIC} | ${[visibilityOptions.PRIVATE, visibilityOptions.INTERNAL, visibilityOptions.PUBLIC]} | ${false}
+ ${visibilityOptions.PUBLIC} | ${[visibilityOptions.PRIVATE, visibilityOptions.INTERNAL]} | ${true}
+ `(
+ 'sets disabled to $disabled for the visibility option $option when given $allowedOptions',
+ ({ option, allowedOptions, disabled }) => {
+ wrapper.setProps({ allowedVisibilityOptions: allowedOptions });
+
+ return wrapper.vm.$nextTick(() => {
+ const attributeValue = wrapper
+ .find(`[name="project[visibility_level]"] option[value="${option}"]`)
+ .attributes().disabled;
+
+ if (disabled) {
+ expect(attributeValue).toBe('disabled');
+ } else {
+ expect(attributeValue).toBeUndefined();
+ }
+ });
+ },
+ );
+
+ it('should set the visibility level description based upon the selected visibility level', () => {
+ wrapper.find('[name="project[visibility_level]"]').setValue(visibilityOptions.INTERNAL);
+
+ expect(wrapper.find({ ref: 'project-visibility-settings' }).text()).toContain(
+ visibilityLevelDescriptions[visibilityOptions.INTERNAL],
+ );
+ });
+
+ it('should show the request access checkbox if the visibility level is not private', () => {
+ wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.INTERNAL });
+
+ expect(wrapper.find('[name="project[request_access_enabled]"]').exists()).toBe(true);
+ });
+
+ it('should not show the request access checkbox if the visibility level is private', () => {
+ wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.PRIVATE });
+
+ expect(wrapper.find('[name="project[request_access_enabled]"]').exists()).toBe(false);
+ });
+ });
+
+ describe('Repository', () => {
+ it('should set the repository help text when the visibility level is set to private', () => {
+ wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.PRIVATE });
+
+ expect(wrapper.find({ ref: 'repository-settings' }).props().helpText).toEqual(
+ 'View and edit files in this project',
+ );
+ });
+
+ it('should set the repository help text with a read access warning when the visibility level is set to non-private', () => {
+ wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.PUBLIC });
+
+ expect(wrapper.find({ ref: 'repository-settings' }).props().helpText).toEqual(
+ 'View and edit files in this project. Non-project members will only have read access',
+ );
+ });
+ });
+
+ describe('Merge requests', () => {
+ it('should enable the merge requests access level input when the repository is enabled', () => {
+ wrapper = overrideCurrentSettings({ repositoryAccessLevel: featureAccessLevel.EVERYONE });
+
+ expect(
+ wrapper
+ .find('[name="project[project_feature_attributes][merge_requests_access_level]"]')
+ .props().disabledInput,
+ ).toEqual(false);
+ });
+
+ it('should disable the merge requests access level input when the repository is disabled', () => {
+ wrapper = overrideCurrentSettings({ repositoryAccessLevel: featureAccessLevel.NOT_ENABLED });
+
+ expect(
+ wrapper
+ .find('[name="project[project_feature_attributes][merge_requests_access_level]"]')
+ .props().disabledInput,
+ ).toEqual(true);
+ });
+ });
+
+ describe('Forks', () => {
+ it('should enable the forking access level input when the repository is enabled', () => {
+ wrapper = overrideCurrentSettings({ repositoryAccessLevel: featureAccessLevel.EVERYONE });
+
+ expect(
+ wrapper.find('[name="project[project_feature_attributes][forking_access_level]"]').props()
+ .disabledInput,
+ ).toEqual(false);
+ });
+
+ it('should disable the forking access level input when the repository is disabled', () => {
+ wrapper = overrideCurrentSettings({ repositoryAccessLevel: featureAccessLevel.NOT_ENABLED });
+
+ expect(
+ wrapper.find('[name="project[project_feature_attributes][forking_access_level]"]').props()
+ .disabledInput,
+ ).toEqual(true);
+ });
+ });
+
+ describe('Pipelines', () => {
+ it('should enable the builds access level input when the repository is enabled', () => {
+ wrapper = overrideCurrentSettings({ repositoryAccessLevel: featureAccessLevel.EVERYONE });
+
+ expect(
+ wrapper.find('[name="project[project_feature_attributes][builds_access_level]"]').props()
+ .disabledInput,
+ ).toEqual(false);
+ });
+
+ it('should disable the builds access level input when the repository is disabled', () => {
+ wrapper = overrideCurrentSettings({ repositoryAccessLevel: featureAccessLevel.NOT_ENABLED });
+
+ expect(
+ wrapper.find('[name="project[project_feature_attributes][builds_access_level]"]').props()
+ .disabledInput,
+ ).toEqual(true);
+ });
+ });
+
+ describe('Container registry', () => {
+ it('should show the container registry settings if the registry is available', () => {
+ wrapper.setProps({ registryAvailable: true });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'container-registry-settings' }).exists()).toBe(true);
+ });
+ });
+
+ it('should hide the container registry settings if the registry is not available', () => {
+ wrapper.setProps({ registryAvailable: false });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'container-registry-settings' }).exists()).toBe(false);
+ });
+ });
+
+ it('should set the container registry settings help path', () => {
+ wrapper.setProps({ registryAvailable: true });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'container-registry-settings' }).props().helpPath).toBe(
+ defaultProps.registryHelpPath,
+ );
+ });
+ });
+
+ it('should show the container registry public note if the visibility level is public and the registry is available', () => {
+ wrapper = overrideCurrentSettings(
+ { visibilityLevel: visibilityOptions.PUBLIC },
+ { registryAvailable: true },
+ );
+
+ expect(wrapper.find({ ref: 'container-registry-settings' }).text()).toContain(
+ 'Note: the container registry is always visible when a project is public',
+ );
+ });
+
+ it('should hide the container registry public note if the visibility level is private and the registry is available', () => {
+ wrapper = overrideCurrentSettings(
+ { visibilityLevel: visibilityOptions.PRIVATE },
+ { registryAvailable: true },
+ );
+
+ expect(wrapper.find({ ref: 'container-registry-settings' }).text()).not.toContain(
+ 'Note: the container registry is always visible when a project is public',
+ );
+ });
+
+ it('should enable the container registry input when the repository is enabled', () => {
+ wrapper = overrideCurrentSettings(
+ { repositoryAccessLevel: featureAccessLevel.EVERYONE },
+ { registryAvailable: true },
+ );
+
+ expect(
+ wrapper.find('[name="project[container_registry_enabled]"]').props().disabledInput,
+ ).toEqual(false);
+ });
+
+ it('should disable the container registry input when the repository is disabled', () => {
+ wrapper = overrideCurrentSettings(
+ { repositoryAccessLevel: featureAccessLevel.NOT_ENABLED },
+ { registryAvailable: true },
+ );
+
+ expect(
+ wrapper.find('[name="project[container_registry_enabled]"]').props().disabledInput,
+ ).toEqual(true);
+ });
+ });
+
+ describe('Git Large File Storage', () => {
+ it('should show the LFS settings if LFS is available', () => {
+ wrapper.setProps({ lfsAvailable: true });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'git-lfs-settings' }).exists()).toEqual(true);
+ });
+ });
+
+ it('should hide the LFS settings if LFS is not available', () => {
+ wrapper.setProps({ lfsAvailable: false });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'git-lfs-settings' }).exists()).toEqual(false);
+ });
+ });
+
+ it('should set the LFS settings help path', () => {
+ expect(wrapper.find({ ref: 'git-lfs-settings' }).props().helpPath).toBe(
+ defaultProps.lfsHelpPath,
+ );
+ });
+
+ it('should enable the LFS input when the repository is enabled', () => {
+ wrapper = overrideCurrentSettings(
+ { repositoryAccessLevel: featureAccessLevel.EVERYONE },
+ { lfsAvailable: true },
+ );
+
+ expect(wrapper.find('[name="project[lfs_enabled]"]').props().disabledInput).toEqual(false);
+ });
+
+ it('should disable the LFS input when the repository is disabled', () => {
+ wrapper = overrideCurrentSettings(
+ { repositoryAccessLevel: featureAccessLevel.NOT_ENABLED },
+ { lfsAvailable: true },
+ );
+
+ expect(wrapper.find('[name="project[lfs_enabled]"]').props().disabledInput).toEqual(true);
+ });
+ });
+
+ describe('Packages', () => {
+ it('should show the packages settings if packages are available', () => {
+ wrapper.setProps({ packagesAvailable: true });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'package-settings' }).exists()).toEqual(true);
+ });
+ });
+
+ it('should hide the packages settings if packages are not available', () => {
+ wrapper.setProps({ packagesAvailable: false });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'package-settings' }).exists()).toEqual(false);
+ });
+ });
+
+ it('should set the package settings help path', () => {
+ wrapper.setProps({ packagesAvailable: true });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'package-settings' }).props().helpPath).toBe(
+ defaultProps.packagesHelpPath,
+ );
+ });
+ });
+
+ it('should enable the packages input when the repository is enabled', () => {
+ wrapper = overrideCurrentSettings(
+ { repositoryAccessLevel: featureAccessLevel.EVERYONE },
+ { packagesAvailable: true },
+ );
+
+ expect(wrapper.find('[name="project[packages_enabled]"]').props().disabledInput).toEqual(
+ false,
+ );
+ });
+
+ it('should disable the packages input when the repository is disabled', () => {
+ wrapper = overrideCurrentSettings(
+ { repositoryAccessLevel: featureAccessLevel.NOT_ENABLED },
+ { packagesAvailable: true },
+ );
+
+ expect(wrapper.find('[name="project[packages_enabled]"]').props().disabledInput).toEqual(
+ true,
+ );
+ });
+ });
+
+ describe('Pages', () => {
+ it.each`
+ pagesAvailable | pagesAccessControlEnabled | visibility
+ ${true} | ${true} | ${'show'}
+ ${true} | ${false} | ${'hide'}
+ ${false} | ${true} | ${'hide'}
+ ${false} | ${false} | ${'hide'}
+ `(
+ 'should $visibility the page settings if pagesAvailable is $pagesAvailable and pagesAccessControlEnabled is $pagesAccessControlEnabled',
+ ({ pagesAvailable, pagesAccessControlEnabled, visibility }) => {
+ wrapper.setProps({ pagesAvailable, pagesAccessControlEnabled });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'pages-settings' }).exists()).toBe(visibility === 'show');
+ });
+ },
+ );
+
+ it('should set the pages settings help path', () => {
+ wrapper.setProps({ pagesAvailable: true, pagesAccessControlEnabled: true });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'pages-settings' }).props().helpPath).toBe(
+ defaultProps.pagesHelpPath,
+ );
+ });
+ });
+ });
+
+ describe('Email notifications', () => {
+ it('should show the disable email notifications input if emails an be disabled', () => {
+ wrapper.setProps({ canDisableEmails: true });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'email-settings' }).exists()).toBe(true);
+ });
+ });
+
+ it('should hide the disable email notifications input if emails cannot be disabled', () => {
+ wrapper.setProps({ canDisableEmails: false });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'email-settings' }).exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/projects/project_import_gitlab_project_spec.js b/spec/frontend/projects/project_import_gitlab_project_spec.js
index 3c94934699d..3c94934699d 100644
--- a/spec/javascripts/projects/project_import_gitlab_project_spec.js
+++ b/spec/frontend/projects/project_import_gitlab_project_spec.js
diff --git a/spec/javascripts/projects/project_new_spec.js b/spec/frontend/projects/project_new_spec.js
index 7c6ff90aff6..7c6ff90aff6 100644
--- a/spec/javascripts/projects/project_new_spec.js
+++ b/spec/frontend/projects/project_new_spec.js
diff --git a/spec/frontend/registry/explorer/components/project_policy_alert_spec.js b/spec/frontend/registry/explorer/components/project_policy_alert_spec.js
new file mode 100644
index 00000000000..89c37e55398
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/project_policy_alert_spec.js
@@ -0,0 +1,132 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlSprintf, GlAlert, GlLink } from '@gitlab/ui';
+import * as dateTimeUtils from '~/lib/utils/datetime_utility';
+import component from '~/registry/explorer/components/project_policy_alert.vue';
+import {
+ EXPIRATION_POLICY_ALERT_TITLE,
+ EXPIRATION_POLICY_ALERT_PRIMARY_BUTTON,
+} from '~/registry/explorer/constants';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Project Policy Alert', () => {
+ let wrapper;
+ let store;
+
+ const defaultState = {
+ config: {
+ expirationPolicy: {
+ enabled: true,
+ },
+ settingsPath: 'foo',
+ expirationPolicyHelpPagePath: 'bar',
+ },
+ images: [],
+ isLoading: false,
+ };
+
+ const findAlert = () => wrapper.find(GlAlert);
+ const findLink = () => wrapper.find(GlLink);
+
+ const createComponent = (state = defaultState) => {
+ store = new Vuex.Store({
+ state,
+ });
+ wrapper = shallowMount(component, {
+ localVue,
+ store,
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ const documentationExpectation = () => {
+ it('contain a documentation link', () => {
+ createComponent();
+ expect(findLink().attributes('href')).toBe(defaultState.config.expirationPolicyHelpPagePath);
+ expect(findLink().text()).toBe('documentation');
+ });
+ };
+
+ beforeEach(() => {
+ jest.spyOn(dateTimeUtils, 'approximateDuration').mockReturnValue('1 day');
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('is hidden', () => {
+ it('when expiration policy does not exist', () => {
+ createComponent({ config: {} });
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('when expiration policy exist but is disabled', () => {
+ createComponent({
+ ...defaultState,
+ config: {
+ expirationPolicy: {
+ enabled: false,
+ },
+ },
+ });
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('is visible', () => {
+ it('when expiration policy exists and is enabled', () => {
+ createComponent();
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('full info alert', () => {
+ beforeEach(() => {
+ createComponent({ ...defaultState, images: [1] });
+ });
+
+ it('has a primary button', () => {
+ const alert = findAlert();
+ expect(alert.props('primaryButtonText')).toBe(EXPIRATION_POLICY_ALERT_PRIMARY_BUTTON);
+ expect(alert.props('primaryButtonLink')).toBe(defaultState.config.settingsPath);
+ });
+
+ it('has a title', () => {
+ const alert = findAlert();
+ expect(alert.props('title')).toBe(EXPIRATION_POLICY_ALERT_TITLE);
+ });
+
+ it('has the full message', () => {
+ expect(findAlert().html()).toContain('<strong>1 day</strong>');
+ });
+
+ documentationExpectation();
+ });
+
+ describe('compact info alert', () => {
+ beforeEach(() => {
+ createComponent({ ...defaultState, images: [] });
+ });
+
+ it('does not have a button', () => {
+ const alert = findAlert();
+ expect(alert.props('primaryButtonText')).toBe(null);
+ });
+
+ it('does not have a title', () => {
+ const alert = findAlert();
+ expect(alert.props('title')).toBe(null);
+ });
+
+ it('has the short message', () => {
+ expect(findAlert().html()).not.toContain('<strong>1 day</strong>');
+ });
+
+ documentationExpectation();
+ });
+});
diff --git a/spec/frontend/registry/explorer/pages/details_spec.js b/spec/frontend/registry/explorer/pages/details_spec.js
index 48f3b0f9b65..15c6b36af03 100644
--- a/spec/frontend/registry/explorer/pages/details_spec.js
+++ b/spec/frontend/registry/explorer/pages/details_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import { GlTable, GlPagination, GlLoadingIcon } from '@gitlab/ui';
+import { GlTable, GlPagination, GlSkeletonLoader } from '@gitlab/ui';
import Tracking from '~/tracking';
import stubChildren from 'helpers/stub_children';
import component from '~/registry/explorer/pages/details.vue';
@@ -14,8 +14,7 @@ describe('Details Page', () => {
const findDeleteModal = () => wrapper.find(GlModal);
const findPagination = () => wrapper.find(GlPagination);
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
- const findTagsTable = () => wrapper.find(GlTable);
+ const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
const findMainCheckbox = () => wrapper.find({ ref: 'mainCheckbox' });
const findFirstRowItem = ref => wrapper.find({ ref });
const findBulkDeleteButton = () => wrapper.find({ ref: 'bulkDeleteButton' });
@@ -33,7 +32,7 @@ describe('Details Page', () => {
...stubChildren(component),
GlModal,
GlSprintf: false,
- GlTable: false,
+ GlTable,
},
mocks: {
$route: {
@@ -53,18 +52,19 @@ describe('Details Page', () => {
});
describe('when isLoading is true', () => {
- beforeAll(() => store.commit(SET_MAIN_LOADING, true));
+ beforeEach(() => {
+ store.dispatch('receiveTagsListSuccess', { ...tagsListResponse, data: [] });
+ store.commit(SET_MAIN_LOADING, true);
+ });
afterAll(() => store.commit(SET_MAIN_LOADING, false));
- it('has a loading icon', () => {
- expect(findLoadingIcon().exists()).toBe(true);
+ it('has a skeleton loader', () => {
+ expect(findSkeletonLoader().exists()).toBe(true);
});
- it('does not have a main content', () => {
- expect(findTagsTable().exists()).toBe(false);
- expect(findPagination().exists()).toBe(false);
- expect(findDeleteModal().exists()).toBe(false);
+ it('does not have list items', () => {
+ expect(findFirstRowItem('rowCheckbox').exists()).toBe(false);
});
});
@@ -219,7 +219,7 @@ describe('Details Page', () => {
dispatchSpy.mockResolvedValue();
wrapper.setData({ currentPage: 2 });
expect(store.dispatch).toHaveBeenCalledWith('requestTagsList', {
- id: wrapper.vm.$route.params.id,
+ params: wrapper.vm.$route.params.id,
pagination: { page: 2 },
});
});
diff --git a/spec/frontend/registry/explorer/pages/list_spec.js b/spec/frontend/registry/explorer/pages/list_spec.js
index f463dc49035..91c3c242ed4 100644
--- a/spec/frontend/registry/explorer/pages/list_spec.js
+++ b/spec/frontend/registry/explorer/pages/list_spec.js
@@ -1,6 +1,6 @@
import VueRouter from 'vue-router';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlPagination, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
+import { GlPagination, GlSkeletonLoader, GlSprintf } from '@gitlab/ui';
import Tracking from '~/tracking';
import component from '~/registry/explorer/pages/list.vue';
import store from '~/registry/explorer/stores/';
@@ -17,7 +17,7 @@ describe('List Page', () => {
const findDeleteBtn = () => wrapper.find({ ref: 'deleteImageButton' });
const findDeleteModal = () => wrapper.find(GlModal);
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
const findImagesList = () => wrapper.find({ ref: 'imagesList' });
const findRowItems = () => wrapper.findAll({ ref: 'rowItem' });
const findEmptyState = () => wrapper.find(GlEmptyState);
@@ -71,7 +71,7 @@ describe('List Page', () => {
});
it('should not show the loading or default state', () => {
- expect(findLoadingIcon().exists()).toBe(false);
+ expect(findSkeletonLoader().exists()).toBe(false);
expect(findImagesList().exists()).toBe(false);
});
});
@@ -81,8 +81,8 @@ describe('List Page', () => {
afterAll(() => store.commit(SET_MAIN_LOADING, false));
- it('shows the loading icon', () => {
- expect(findLoadingIcon().exists()).toBe(true);
+ it('shows the skeleton loader', () => {
+ expect(findSkeletonLoader().exists()).toBe(true);
});
it('imagesList is not visible', () => {
diff --git a/spec/frontend/registry/explorer/stores/actions_spec.js b/spec/frontend/registry/explorer/stores/actions_spec.js
index a3fb29c0eb9..3e22621058e 100644
--- a/spec/frontend/registry/explorer/stores/actions_spec.js
+++ b/spec/frontend/registry/explorer/stores/actions_spec.js
@@ -180,10 +180,7 @@ describe('Actions RegistryExplorer Store', () => {
{
tagsPagination: {},
},
- [
- { type: types.SET_MAIN_LOADING, payload: true },
- { type: types.SET_MAIN_LOADING, payload: false },
- ],
+ [{ type: types.SET_MAIN_LOADING, payload: true }],
[
{
type: 'requestTagsList',
@@ -220,13 +217,11 @@ describe('Actions RegistryExplorer Store', () => {
});
describe('request delete multiple tags', () => {
- const id = 1;
- const params = window.btoa(JSON.stringify({ id }));
- const projectPath = 'project-path';
- const url = `${projectPath}/registry/repository/${id}/tags/bulk_destroy`;
+ const url = `project-path/registry/repository/foo/tags`;
+ const params = window.btoa(JSON.stringify({ tags_path: `${url}?format=json` }));
it('successfully performs the delete request', done => {
- mock.onDelete(url).replyOnce(200);
+ mock.onDelete(`${url}/bulk_destroy`).replyOnce(200);
testAction(
actions.requestDeleteTags,
@@ -235,15 +230,9 @@ describe('Actions RegistryExplorer Store', () => {
params,
},
{
- config: {
- projectPath,
- },
tagsPagination: {},
},
- [
- { type: types.SET_MAIN_LOADING, payload: true },
- { type: types.SET_MAIN_LOADING, payload: false },
- ],
+ [{ type: types.SET_MAIN_LOADING, payload: true }],
[
{
type: 'requestTagsList',
@@ -267,9 +256,6 @@ describe('Actions RegistryExplorer Store', () => {
params,
},
{
- config: {
- projectPath,
- },
tagsPagination: {},
},
[
diff --git a/spec/frontend/registry/explorer/stores/getters_spec.js b/spec/frontend/registry/explorer/stores/getters_spec.js
new file mode 100644
index 00000000000..c224f076d30
--- /dev/null
+++ b/spec/frontend/registry/explorer/stores/getters_spec.js
@@ -0,0 +1,34 @@
+import * as getters from '~/registry/explorer/stores/getters';
+
+describe('Getters RegistryExplorer store', () => {
+ let state;
+ const tags = ['foo', 'bar'];
+
+ describe('tags', () => {
+ describe('when isLoading is false', () => {
+ beforeEach(() => {
+ state = {
+ tags,
+ isLoading: false,
+ };
+ });
+
+ it('returns tags', () => {
+ expect(getters.tags(state)).toEqual(state.tags);
+ });
+ });
+
+ describe('when isLoading is true', () => {
+ beforeEach(() => {
+ state = {
+ tags,
+ isLoading: true,
+ };
+ });
+
+ it('returns empty array', () => {
+ expect(getters.tags(state)).toEqual([]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/registry/explorer/stores/mutations_spec.js b/spec/frontend/registry/explorer/stores/mutations_spec.js
index 43f6a95db10..1d5055c02d2 100644
--- a/spec/frontend/registry/explorer/stores/mutations_spec.js
+++ b/spec/frontend/registry/explorer/stores/mutations_spec.js
@@ -10,9 +10,12 @@ describe('Mutations Registry Explorer Store', () => {
describe('SET_INITIAL_STATE', () => {
it('should set the initial state', () => {
- const payload = { endpoint: 'foo', isGroupPage: true };
+ const payload = { endpoint: 'foo', isGroupPage: true, expirationPolicy: { foo: 'bar' } };
const expectedState = { ...mockState, config: payload };
- mutations[types.SET_INITIAL_STATE](mockState, payload);
+ mutations[types.SET_INITIAL_STATE](mockState, {
+ ...payload,
+ expirationPolicy: JSON.stringify(payload.expirationPolicy),
+ });
expect(mockState).toEqual(expectedState);
});
diff --git a/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap b/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap
index 426bc5c0e6c..c072950f3e2 100644
--- a/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap
+++ b/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap
@@ -89,6 +89,8 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
title="Copy login command"
type="button"
>
+ <!---->
+
<svg
class="gl-icon s16"
>
@@ -126,6 +128,8 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
title="Copy build command"
type="button"
>
+ <!---->
+
<svg
class="gl-icon s16"
>
@@ -155,6 +159,8 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
title="Copy push command"
type="button"
>
+ <!---->
+
<svg
class="gl-icon s16"
>
diff --git a/spec/frontend/registry/settings/components/registry_settings_app_spec.js b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
index e9ba65e4387..c83cc0c00dd 100644
--- a/spec/frontend/registry/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
@@ -44,15 +44,6 @@ describe('Registry Settings App', () => {
expect(store.dispatch).toHaveBeenCalledWith('fetchSettings');
});
- it('show a toast if fetchSettings fails', () => {
- mountComponent({ dispatchMock: 'mockRejectedValue' });
- return wrapper.vm.$nextTick().then(() =>
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(FETCH_SETTINGS_ERROR_MESSAGE, {
- type: 'error',
- }),
- );
- });
-
it('renders the setting form', () => {
mountComponent();
expect(findSettingsComponent().exists()).toBe(true);
@@ -68,7 +59,23 @@ describe('Registry Settings App', () => {
});
it('shows an alert', () => {
- expect(findAlert().exists()).toBe(true);
+ expect(findAlert().html()).toContain(
+ 'Currently, the Container Registry tag expiration feature is not available',
+ );
+ });
+ });
+
+ describe('fetchSettingsError', () => {
+ beforeEach(() => {
+ mountComponent({ dispatchMock: 'mockRejectedValue' });
+ });
+
+ it('the form is hidden', () => {
+ expect(findSettingsComponent().exists()).toBe(false);
+ });
+
+ it('shows an alert', () => {
+ expect(findAlert().html()).toContain(FETCH_SETTINGS_ERROR_MESSAGE);
});
});
});
diff --git a/spec/frontend/releases/components/app_edit_spec.js b/spec/frontend/releases/components/app_edit_spec.js
index cb940facbd6..ac4b2b9124f 100644
--- a/spec/frontend/releases/components/app_edit_spec.js
+++ b/spec/frontend/releases/components/app_edit_spec.js
@@ -1,30 +1,27 @@
import Vuex from 'vuex';
import { mount } from '@vue/test-utils';
import ReleaseEditApp from '~/releases/components/app_edit.vue';
-import { release } from '../mock_data';
-import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import { release as originalRelease } from '../mock_data';
+import * as commonUtils from '~/lib/utils/common_utils';
+import { BACK_URL_PARAM } from '~/releases/constants';
describe('Release edit component', () => {
let wrapper;
- let releaseClone;
+ let release;
let actions;
let state;
- beforeEach(() => {
- gon.api_version = 'v4';
-
- releaseClone = JSON.parse(JSON.stringify(convertObjectPropsToCamelCase(release)));
-
+ const factory = () => {
state = {
- release: releaseClone,
+ release,
markdownDocsPath: 'path/to/markdown/docs',
updateReleaseApiDocsPath: 'path/to/update/release/api/docs',
+ releasesPagePath: 'path/to/releases/page',
};
actions = {
fetchRelease: jest.fn(),
updateRelease: jest.fn(),
- navigateToReleasesPage: jest.fn(),
};
const store = new Vuex.Store({
@@ -40,58 +37,99 @@ describe('Release edit component', () => {
wrapper = mount(ReleaseEditApp, {
store,
});
+ };
- return wrapper.vm.$nextTick();
- });
+ beforeEach(() => {
+ gon.api_version = 'v4';
- it('calls fetchRelease when the component is created', () => {
- expect(actions.fetchRelease).toHaveBeenCalledTimes(1);
+ release = commonUtils.convertObjectPropsToCamelCase(originalRelease, { deep: true });
});
- it('renders the description text at the top of the page', () => {
- expect(wrapper.find('.js-subtitle-text').text()).toBe(
- 'Releases are based on Git tags. We recommend naming tags that fit within semantic versioning, for example v1.0, v2.0-pre.',
- );
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
});
- it('renders the correct tag name in the "Tag name" field', () => {
- expect(wrapper.find('#git-ref').element.value).toBe(releaseClone.tagName);
- });
+ describe(`basic functionality tests: all tests unrelated to the "${BACK_URL_PARAM}" parameter`, () => {
+ beforeEach(() => {
+ factory();
+ });
- it('renders the correct help text under the "Tag name" field', () => {
- const helperText = wrapper.find('#tag-name-help');
- const helperTextLink = helperText.find('a');
- const helperTextLinkAttrs = helperTextLink.attributes();
-
- expect(helperText.text()).toBe(
- 'Changing a Release tag is only supported via Releases API. More information',
- );
- expect(helperTextLink.text()).toBe('More information');
- expect(helperTextLinkAttrs.href).toBe(state.updateReleaseApiDocsPath);
- expect(helperTextLinkAttrs.rel).toContain('noopener');
- expect(helperTextLinkAttrs.rel).toContain('noreferrer');
- expect(helperTextLinkAttrs.target).toBe('_blank');
- });
+ it('calls fetchRelease when the component is created', () => {
+ expect(actions.fetchRelease).toHaveBeenCalledTimes(1);
+ });
- it('renders the correct release title in the "Release title" field', () => {
- expect(wrapper.find('#release-title').element.value).toBe(releaseClone.name);
- });
+ it('renders the description text at the top of the page', () => {
+ expect(wrapper.find('.js-subtitle-text').text()).toBe(
+ 'Releases are based on Git tags. We recommend naming tags that fit within semantic versioning, for example v1.0, v2.0-pre.',
+ );
+ });
- it('renders the release notes in the "Release notes" textarea', () => {
- expect(wrapper.find('#release-notes').element.value).toBe(releaseClone.description);
- });
+ it('renders the correct tag name in the "Tag name" field', () => {
+ expect(wrapper.find('#git-ref').element.value).toBe(release.tagName);
+ });
+
+ it('renders the correct help text under the "Tag name" field', () => {
+ const helperText = wrapper.find('#tag-name-help');
+ const helperTextLink = helperText.find('a');
+ const helperTextLinkAttrs = helperTextLink.attributes();
+
+ expect(helperText.text()).toBe(
+ 'Changing a Release tag is only supported via Releases API. More information',
+ );
+ expect(helperTextLink.text()).toBe('More information');
+ expect(helperTextLinkAttrs).toEqual(
+ expect.objectContaining({
+ href: state.updateReleaseApiDocsPath,
+ rel: 'noopener noreferrer',
+ target: '_blank',
+ }),
+ );
+ });
+
+ it('renders the correct release title in the "Release title" field', () => {
+ expect(wrapper.find('#release-title').element.value).toBe(release.name);
+ });
+
+ it('renders the release notes in the "Release notes" textarea', () => {
+ expect(wrapper.find('#release-notes').element.value).toBe(release.description);
+ });
+
+ it('renders the "Save changes" button as type="submit"', () => {
+ expect(wrapper.find('.js-submit-button').attributes('type')).toBe('submit');
+ });
- it('renders the "Save changes" button as type="submit"', () => {
- expect(wrapper.find('.js-submit-button').attributes('type')).toBe('submit');
+ it('calls updateRelease when the form is submitted', () => {
+ wrapper.find('form').trigger('submit');
+ expect(actions.updateRelease).toHaveBeenCalledTimes(1);
+ });
});
- it('calls updateRelease when the form is submitted', () => {
- wrapper.find('form').trigger('submit');
- expect(actions.updateRelease).toHaveBeenCalledTimes(1);
+ describe(`when the URL does not contain a "${BACK_URL_PARAM}" parameter`, () => {
+ beforeEach(() => {
+ factory();
+ });
+
+ it(`renders a "Cancel" button with an href pointing to "${BACK_URL_PARAM}"`, () => {
+ const cancelButton = wrapper.find('.js-cancel-button');
+ expect(cancelButton.attributes().href).toBe(state.releasesPagePath);
+ });
});
- it('calls navigateToReleasesPage when the "Cancel" button is clicked', () => {
- wrapper.find('.js-cancel-button').vm.$emit('click');
- expect(actions.navigateToReleasesPage).toHaveBeenCalledTimes(1);
+ describe(`when the URL contains a "${BACK_URL_PARAM}" parameter`, () => {
+ const backUrl = 'https://example.gitlab.com/back/url';
+
+ beforeEach(() => {
+ commonUtils.getParameterByName = jest
+ .fn()
+ .mockImplementation(paramToGet => ({ [BACK_URL_PARAM]: backUrl }[paramToGet]));
+
+ factory();
+ });
+
+ it('renders a "Cancel" button with an href pointing to the main Releases page', () => {
+ const cancelButton = wrapper.find('.js-cancel-button');
+ expect(cancelButton.attributes().href).toBe(backUrl);
+ });
});
});
diff --git a/spec/frontend/releases/components/app_show_spec.js b/spec/frontend/releases/components/app_show_spec.js
new file mode 100644
index 00000000000..3dc9964c25c
--- /dev/null
+++ b/spec/frontend/releases/components/app_show_spec.js
@@ -0,0 +1,61 @@
+import Vuex from 'vuex';
+import { shallowMount } from '@vue/test-utils';
+import ReleaseShowApp from '~/releases/components/app_show.vue';
+import { release as originalRelease } from '../mock_data';
+import { GlSkeletonLoading } from '@gitlab/ui';
+import ReleaseBlock from '~/releases/components/release_block.vue';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+
+describe('Release show component', () => {
+ let wrapper;
+ let release;
+ let actions;
+
+ beforeEach(() => {
+ release = convertObjectPropsToCamelCase(originalRelease);
+ });
+
+ const factory = state => {
+ actions = {
+ fetchRelease: jest.fn(),
+ };
+
+ const store = new Vuex.Store({
+ modules: {
+ detail: {
+ namespaced: true,
+ actions,
+ state,
+ },
+ },
+ });
+
+ wrapper = shallowMount(ReleaseShowApp, { store });
+ };
+
+ const findLoadingSkeleton = () => wrapper.find(GlSkeletonLoading);
+ const findReleaseBlock = () => wrapper.find(ReleaseBlock);
+
+ it('calls fetchRelease when the component is created', () => {
+ factory({ release });
+ expect(actions.fetchRelease).toHaveBeenCalledTimes(1);
+ });
+
+ it('shows a loading skeleton and hides the release block while the API call is in progress', () => {
+ factory({ isFetchingRelease: true });
+ expect(findLoadingSkeleton().exists()).toBe(true);
+ expect(findReleaseBlock().exists()).toBe(false);
+ });
+
+ it('hides the loading skeleton and shows the release block when the API call finishes successfully', () => {
+ factory({ isFetchingRelease: false });
+ expect(findLoadingSkeleton().exists()).toBe(false);
+ expect(findReleaseBlock().exists()).toBe(true);
+ });
+
+ it('hides both the loading skeleton and the release block when the API call fails', () => {
+ factory({ fetchError: new Error('Uh oh') });
+ expect(findLoadingSkeleton().exists()).toBe(false);
+ expect(findReleaseBlock().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/releases/components/evidence_block_spec.js b/spec/frontend/releases/components/evidence_block_spec.js
index 7b896575965..c76a0e04dce 100644
--- a/spec/frontend/releases/components/evidence_block_spec.js
+++ b/spec/frontend/releases/components/evidence_block_spec.js
@@ -2,12 +2,14 @@ import { mount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
import { truncateSha } from '~/lib/utils/text_utility';
import Icon from '~/vue_shared/components/icon.vue';
-import { release } from '../mock_data';
+import { release as originalRelease } from '../mock_data';
import EvidenceBlock from '~/releases/components/evidence_block.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
describe('Evidence Block', () => {
let wrapper;
+ let release;
const factory = (options = {}) => {
wrapper = mount(EvidenceBlock, {
@@ -16,6 +18,8 @@ describe('Evidence Block', () => {
};
beforeEach(() => {
+ release = convertObjectPropsToCamelCase(originalRelease, { deep: true });
+
factory({
propsData: {
release,
@@ -32,7 +36,7 @@ describe('Evidence Block', () => {
});
it('renders the title for the dowload link', () => {
- expect(wrapper.find(GlLink).text()).toBe(`${release.tag_name}-evidence.json`);
+ expect(wrapper.find(GlLink).text()).toBe(`${release.tagName}-evidence.json`);
});
it('renders the correct hover text for the download', () => {
@@ -40,19 +44,19 @@ describe('Evidence Block', () => {
});
it('renders the correct file link for download', () => {
- expect(wrapper.find(GlLink).attributes().download).toBe(`${release.tag_name}-evidence.json`);
+ expect(wrapper.find(GlLink).attributes().download).toBe(`${release.tagName}-evidence.json`);
});
describe('sha text', () => {
it('renders the short sha initially', () => {
- expect(wrapper.find('.js-short').text()).toBe(truncateSha(release.evidence_sha));
+ expect(wrapper.find('.js-short').text()).toBe(truncateSha(release.evidenceSha));
});
it('renders the long sha after expansion', () => {
wrapper.find('.js-text-expander-prepend').trigger('click');
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find('.js-expanded').text()).toBe(release.evidence_sha);
+ expect(wrapper.find('.js-expanded').text()).toBe(release.evidenceSha);
});
});
});
@@ -63,12 +67,12 @@ describe('Evidence Block', () => {
});
it('renders the correct hover text', () => {
- expect(wrapper.find(ClipboardButton).attributes('title')).toBe('Copy commit SHA');
+ expect(wrapper.find(ClipboardButton).attributes('title')).toBe('Copy evidence SHA');
});
it('copies the sha', () => {
expect(wrapper.find(ClipboardButton).attributes('data-clipboard-text')).toBe(
- release.evidence_sha,
+ release.evidenceSha,
);
});
});
diff --git a/spec/frontend/releases/components/release_block_footer_spec.js b/spec/frontend/releases/components/release_block_footer_spec.js
index 4125d5c7e74..c63637c4cae 100644
--- a/spec/frontend/releases/components/release_block_footer_spec.js
+++ b/spec/frontend/releases/components/release_block_footer_spec.js
@@ -24,7 +24,7 @@ describe('Release block footer', () => {
const factory = (props = {}) => {
wrapper = mount(ReleaseBlockFooter, {
propsData: {
- ...convertObjectPropsToCamelCase(releaseClone),
+ ...convertObjectPropsToCamelCase(releaseClone, { deep: true }),
...props,
},
});
diff --git a/spec/frontend/releases/components/release_block_header_spec.js b/spec/frontend/releases/components/release_block_header_spec.js
index 157df15ff3c..9c6cbc86d3c 100644
--- a/spec/frontend/releases/components/release_block_header_spec.js
+++ b/spec/frontend/releases/components/release_block_header_spec.js
@@ -1,9 +1,10 @@
import { shallowMount } from '@vue/test-utils';
-import { cloneDeep, merge } from 'lodash';
+import { merge } from 'lodash';
import { GlLink } from '@gitlab/ui';
import ReleaseBlockHeader from '~/releases/components/release_block_header.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { release as originalRelease } from '../mock_data';
+import { BACK_URL_PARAM } from '~/releases/constants';
describe('Release block header', () => {
let wrapper;
@@ -18,9 +19,7 @@ describe('Release block header', () => {
};
beforeEach(() => {
- release = convertObjectPropsToCamelCase(cloneDeep(originalRelease), {
- ignoreKeyNames: ['_links'],
- });
+ release = convertObjectPropsToCamelCase(originalRelease, { deep: true });
});
afterEach(() => {
@@ -29,6 +28,7 @@ describe('Release block header', () => {
const findHeader = () => wrapper.find('h2');
const findHeaderLink = () => findHeader().find(GlLink);
+ const findEditButton = () => wrapper.find('.js-edit-button');
describe('when _links.self is provided', () => {
beforeEach(() => {
@@ -53,4 +53,39 @@ describe('Release block header', () => {
expect(findHeaderLink().exists()).toBe(false);
});
});
+
+ describe('when _links.edit_url is provided', () => {
+ const currentUrl = 'https://example.gitlab.com/path';
+
+ beforeEach(() => {
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: {
+ href: currentUrl,
+ },
+ });
+
+ factory();
+ });
+
+ it('renders an edit button', () => {
+ expect(findEditButton().exists()).toBe(true);
+ });
+
+ it('renders the edit button with the correct href', () => {
+ const expectedQueryParam = `${BACK_URL_PARAM}=${encodeURIComponent(currentUrl)}`;
+ const expectedUrl = `${release._links.editUrl}?${expectedQueryParam}`;
+ expect(findEditButton().attributes().href).toBe(expectedUrl);
+ });
+ });
+
+ describe('when _links.edit is missing', () => {
+ beforeEach(() => {
+ factory({ _links: { editUrl: null } });
+ });
+
+ it('does not render an edit button', () => {
+ expect(findEditButton().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/releases/components/release_block_milestone_info_spec.js b/spec/frontend/releases/components/release_block_milestone_info_spec.js
index 5a3204a4ce2..0e79c45b337 100644
--- a/spec/frontend/releases/components/release_block_milestone_info_spec.js
+++ b/spec/frontend/releases/components/release_block_milestone_info_spec.js
@@ -2,29 +2,29 @@ import { mount } from '@vue/test-utils';
import { GlProgressBar, GlLink, GlBadge, GlButton } from '@gitlab/ui';
import { trimText } from 'helpers/text_helper';
import ReleaseBlockMilestoneInfo from '~/releases/components/release_block_milestone_info.vue';
-import { milestones } from '../mock_data';
+import { milestones as originalMilestones } from '../mock_data';
import { MAX_MILESTONES_TO_DISPLAY } from '~/releases/constants';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
describe('Release block milestone info', () => {
let wrapper;
- let milestonesClone;
+ let milestones;
- const factory = milestonesProp => {
+ const factory = props => {
wrapper = mount(ReleaseBlockMilestoneInfo, {
- propsData: {
- milestones: milestonesProp,
- },
+ propsData: props,
});
return wrapper.vm.$nextTick();
};
beforeEach(() => {
- milestonesClone = JSON.parse(JSON.stringify(milestones));
+ milestones = convertObjectPropsToCamelCase(originalMilestones, { deep: true });
});
afterEach(() => {
wrapper.destroy();
+ wrapper = null;
});
const milestoneProgressBarContainer = () => wrapper.find('.js-milestone-progress-bar-container');
@@ -32,7 +32,7 @@ describe('Release block milestone info', () => {
const issuesContainer = () => wrapper.find('.js-issues-container');
describe('with default props', () => {
- beforeEach(() => factory(milestonesClone));
+ beforeEach(() => factory({ milestones }));
it('renders the correct percentage', () => {
expect(milestoneProgressBarContainer().text()).toContain('41% complete');
@@ -53,13 +53,13 @@ describe('Release block milestone info', () => {
it('renders a list of links to all associated milestones', () => {
expect(trimText(milestoneListContainer().text())).toContain('Milestones 13.6 • 13.5');
- milestonesClone.forEach((m, i) => {
+ milestones.forEach((m, i) => {
const milestoneLink = milestoneListContainer()
.findAll(GlLink)
.at(i);
expect(milestoneLink.text()).toBe(m.title);
- expect(milestoneLink.attributes('href')).toBe(m.web_url);
+ expect(milestoneLink.attributes('href')).toBe(m.webUrl);
expect(milestoneLink.attributes('title')).toBe(m.description);
});
});
@@ -84,7 +84,7 @@ describe('Release block milestone info', () => {
beforeEach(() => {
lotsOfMilestones = [];
- const template = milestonesClone[0];
+ const template = milestones[0];
for (let i = 0; i < MAX_MILESTONES_TO_DISPLAY + 10; i += 1) {
lotsOfMilestones.push({
@@ -101,7 +101,7 @@ describe('Release block milestone info', () => {
.map(m => m.title)
.join(' • ');
- return factory(lotsOfMilestones);
+ return factory({ milestones: lotsOfMilestones });
});
const clickShowMoreFewerButton = () => {
@@ -148,16 +148,16 @@ describe('Release block milestone info', () => {
/** Ensures we don't have any issues with dividing by zero when computing percentages */
describe('when all issue counts are zero', () => {
beforeEach(() => {
- milestonesClone = milestonesClone.map(m => ({
+ milestones = milestones.map(m => ({
...m,
- issue_stats: {
- ...m.issue_stats,
- opened: 0,
+ issueStats: {
+ ...m.issueStats,
+ total: 0,
closed: 0,
},
}));
- return factory(milestonesClone);
+ return factory({ milestones });
});
expectAllZeros();
@@ -165,14 +165,77 @@ describe('Release block milestone info', () => {
describe('if the API response is missing the "issue_stats" property', () => {
beforeEach(() => {
- milestonesClone = milestonesClone.map(m => ({
+ milestones = milestones.map(m => ({
...m,
- issue_stats: undefined,
+ issueStats: undefined,
}));
- return factory(milestonesClone);
+ return factory({ milestones });
});
expectAllZeros();
});
+
+ describe('Issue links', () => {
+ const findOpenIssuesLink = () => wrapper.find({ ref: 'openIssuesLink' });
+ const findOpenIssuesText = () => wrapper.find({ ref: 'openIssuesText' });
+ const findClosedIssuesLink = () => wrapper.find({ ref: 'closedIssuesLink' });
+ const findClosedIssuesText = () => wrapper.find({ ref: 'closedIssuesText' });
+
+ describe('when openIssuePath is provided', () => {
+ const openIssuesPath = '/path/to/open/issues';
+
+ beforeEach(() => {
+ return factory({ milestones, openIssuesPath });
+ });
+
+ it('renders the open issues as a link', () => {
+ expect(findOpenIssuesLink().exists()).toBe(true);
+ expect(findOpenIssuesText().exists()).toBe(false);
+ });
+
+ it('renders the open issues link with the correct href', () => {
+ expect(findOpenIssuesLink().attributes().href).toBe(openIssuesPath);
+ });
+ });
+
+ describe('when openIssuePath is not provided', () => {
+ beforeEach(() => {
+ return factory({ milestones });
+ });
+
+ it('renders the open issues as plain text', () => {
+ expect(findOpenIssuesLink().exists()).toBe(false);
+ expect(findOpenIssuesText().exists()).toBe(true);
+ });
+ });
+
+ describe('when closedIssuePath is provided', () => {
+ const closedIssuesPath = '/path/to/closed/issues';
+
+ beforeEach(() => {
+ return factory({ milestones, closedIssuesPath });
+ });
+
+ it('renders the closed issues as a link', () => {
+ expect(findClosedIssuesLink().exists()).toBe(true);
+ expect(findClosedIssuesText().exists()).toBe(false);
+ });
+
+ it('renders the closed issues link with the correct href', () => {
+ expect(findClosedIssuesLink().attributes().href).toBe(closedIssuesPath);
+ });
+ });
+
+ describe('when closedIssuePath is not provided', () => {
+ beforeEach(() => {
+ return factory({ milestones });
+ });
+
+ it('renders the closed issues as plain text', () => {
+ expect(findClosedIssuesLink().exists()).toBe(false);
+ expect(findClosedIssuesText().exists()).toBe(true);
+ });
+ });
+ });
});
diff --git a/spec/frontend/releases/components/release_block_spec.js b/spec/frontend/releases/components/release_block_spec.js
index aba1b8aff41..7ea2379ea35 100644
--- a/spec/frontend/releases/components/release_block_spec.js
+++ b/spec/frontend/releases/components/release_block_spec.js
@@ -5,24 +5,15 @@ import EvidenceBlock from '~/releases/components/evidence_block.vue';
import ReleaseBlock from '~/releases/components/release_block.vue';
import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
import timeagoMixin from '~/vue_shared/mixins/timeago';
-import { release } from '../mock_data';
+import { release as originalRelease } from '../mock_data';
import Icon from '~/vue_shared/components/icon.vue';
-import { scrollToElement } from '~/lib/utils/common_utils';
-
-let mockLocationHash;
-jest.mock('~/lib/utils/url_utility', () => ({
- __esModule: true,
- getLocationHash: jest.fn().mockImplementation(() => mockLocationHash),
-}));
-
-jest.mock('~/lib/utils/common_utils', () => ({
- __esModule: true,
- scrollToElement: jest.fn(),
-}));
+import * as commonUtils from '~/lib/utils/common_utils';
+import { BACK_URL_PARAM } from '~/releases/constants';
+import * as urlUtility from '~/lib/utils/url_utility';
describe('Release block', () => {
let wrapper;
- let releaseClone;
+ let release;
const factory = (releaseProp, featureFlags = {}) => {
wrapper = mount(ReleaseBlock, {
@@ -45,7 +36,7 @@ describe('Release block', () => {
beforeEach(() => {
jest.spyOn($.fn, 'renderGFM');
- releaseClone = JSON.parse(JSON.stringify(release));
+ release = commonUtils.convertObjectPropsToCamelCase(originalRelease, { deep: true });
});
afterEach(() => {
@@ -59,9 +50,11 @@ describe('Release block', () => {
expect(wrapper.attributes().id).toBe('v0.3');
});
- it('renders an edit button that links to the "Edit release" page', () => {
+ it(`renders an edit button that links to the "Edit release" page with a "${BACK_URL_PARAM}" parameter`, () => {
expect(editButton().exists()).toBe(true);
- expect(editButton().attributes('href')).toBe(release._links.edit_url);
+ expect(editButton().attributes('href')).toBe(
+ `${release._links.editUrl}?${BACK_URL_PARAM}=${encodeURIComponent(window.location.href)}`,
+ );
});
it('renders release name', () => {
@@ -74,7 +67,7 @@ describe('Release block', () => {
});
it('renders release date', () => {
- expect(wrapper.text()).toContain(timeagoMixin.methods.timeFormatted(release.released_at));
+ expect(wrapper.text()).toContain(timeagoMixin.methods.timeFormatted(release.releasedAt));
});
it('renders number of assets provided', () => {
@@ -99,7 +92,7 @@ describe('Release block', () => {
expect(wrapper.findAll('.js-assets-list li').length).toEqual(release.assets.links.length);
expect(wrapper.find('.js-assets-list li a').attributes().href).toEqual(
- first(release.assets.links).url,
+ first(release.assets.links).directAssetUrl,
);
expect(wrapper.find('.js-assets-list li a').text()).toContain(
@@ -129,109 +122,104 @@ describe('Release block', () => {
});
it('renders commit sha', () => {
- releaseClone.commit_path = '/commit/example';
+ release.commitPath = '/commit/example';
- return factory(releaseClone).then(() => {
- expect(wrapper.text()).toContain(release.commit.short_id);
+ return factory(release).then(() => {
+ expect(wrapper.text()).toContain(release.commit.shortId);
expect(wrapper.find('a[href="/commit/example"]').exists()).toBe(true);
});
});
it('renders tag name', () => {
- releaseClone.tag_path = '/tag/example';
+ release.tagPath = '/tag/example';
- return factory(releaseClone).then(() => {
- expect(wrapper.text()).toContain(release.tag_name);
+ return factory(release).then(() => {
+ expect(wrapper.text()).toContain(release.tagName);
expect(wrapper.find('a[href="/tag/example"]').exists()).toBe(true);
});
});
- it("does not render an edit button if release._links.edit_url isn't a string", () => {
- delete releaseClone._links;
-
- return factory(releaseClone).then(() => {
- expect(editButton().exists()).toBe(false);
- });
- });
-
it('does not render the milestone list if no milestones are associated to the release', () => {
- delete releaseClone.milestones;
+ delete release.milestones;
- return factory(releaseClone).then(() => {
+ return factory(release).then(() => {
expect(milestoneListLabel().exists()).toBe(false);
});
});
it('renders upcoming release badge', () => {
- releaseClone.upcoming_release = true;
+ release.upcomingRelease = true;
- return factory(releaseClone).then(() => {
+ return factory(release).then(() => {
expect(wrapper.text()).toContain('Upcoming Release');
});
});
- it('slugifies the tag_name before setting it as the elements ID', () => {
- releaseClone.tag_name = 'a dangerous tag name <script>alert("hello")</script>';
+ it('slugifies the tagName before setting it as the elements ID', () => {
+ release.tagName = 'a dangerous tag name <script>alert("hello")</script>';
- return factory(releaseClone).then(() => {
+ return factory(release).then(() => {
expect(wrapper.attributes().id).toBe('a-dangerous-tag-name-script-alert-hello-script');
});
});
describe('evidence block', () => {
it('renders the evidence block when the evidence is available and the feature flag is true', () =>
- factory(releaseClone, { releaseEvidenceCollection: true }).then(() =>
+ factory(release, { releaseEvidenceCollection: true }).then(() =>
expect(wrapper.find(EvidenceBlock).exists()).toBe(true),
));
it('does not render the evidence block when the evidence is available but the feature flag is false', () =>
- factory(releaseClone, { releaseEvidenceCollection: true }).then(() =>
+ factory(release, { releaseEvidenceCollection: true }).then(() =>
expect(wrapper.find(EvidenceBlock).exists()).toBe(true),
));
it('does not render the evidence block when there is no evidence', () => {
- releaseClone.evidence_sha = null;
+ release.evidenceSha = null;
- return factory(releaseClone).then(() => {
+ return factory(release).then(() => {
expect(wrapper.find(EvidenceBlock).exists()).toBe(false);
});
});
});
describe('anchor scrolling', () => {
+ let locationHash;
+
beforeEach(() => {
- scrollToElement.mockClear();
+ commonUtils.scrollToElement = jest.fn();
+ urlUtility.getLocationHash = jest.fn().mockImplementation(() => locationHash);
});
const hasTargetBlueBackground = () => wrapper.classes('bg-line-target-blue');
it('does not attempt to scroll the page if no anchor tag is included in the URL', () => {
- mockLocationHash = '';
+ locationHash = '';
return factory(release).then(() => {
- expect(scrollToElement).not.toHaveBeenCalled();
+ expect(commonUtils.scrollToElement).not.toHaveBeenCalled();
});
});
it("does not attempt to scroll the page if the anchor tag doesn't match the release's tag name", () => {
- mockLocationHash = 'v0.4';
+ locationHash = 'v0.4';
return factory(release).then(() => {
- expect(scrollToElement).not.toHaveBeenCalled();
+ expect(commonUtils.scrollToElement).not.toHaveBeenCalled();
});
});
it("attempts to scroll itself into view if the anchor tag matches the release's tag name", () => {
- mockLocationHash = release.tag_name;
+ locationHash = release.tagName;
return factory(release).then(() => {
- expect(scrollToElement).toHaveBeenCalledTimes(1);
+ expect(commonUtils.scrollToElement).toHaveBeenCalledTimes(1);
- expect(scrollToElement).toHaveBeenCalledWith(wrapper.element);
+ expect(commonUtils.scrollToElement).toHaveBeenCalledWith(wrapper.element);
});
});
it('renders with a light blue background if it is the target of the anchor', () => {
- mockLocationHash = release.tag_name;
+ locationHash = release.tagName;
return factory(release).then(() => {
expect(hasTargetBlueBackground()).toBe(true);
@@ -239,7 +227,7 @@ describe('Release block', () => {
});
it('does not render with a light blue background if it is not the target of the anchor', () => {
- mockLocationHash = '';
+ locationHash = '';
return factory(release).then(() => {
expect(hasTargetBlueBackground()).toBe(false);
@@ -275,16 +263,16 @@ describe('Release block', () => {
expect(milestoneLink.text()).toBe(milestone.title);
- expect(milestoneLink.attributes('href')).toBe(milestone.web_url);
+ expect(milestoneLink.attributes('href')).toBe(milestone.webUrl);
expect(milestoneLink.attributes('title')).toBe(milestone.description);
});
});
it('renders the label as "Milestone" if only a single milestone is passed in', () => {
- releaseClone.milestones = releaseClone.milestones.slice(0, 1);
+ release.milestones = release.milestones.slice(0, 1);
- return factory(releaseClone, { releaseIssueSummary: false }).then(() => {
+ return factory(release, { releaseIssueSummary: false }).then(() => {
expect(
milestoneListLabel()
.find('.js-label-text')
diff --git a/spec/frontend/releases/mock_data.js b/spec/frontend/releases/mock_data.js
index c2d3f00550a..85e6bab71ba 100644
--- a/spec/frontend/releases/mock_data.js
+++ b/spec/frontend/releases/mock_data.js
@@ -12,7 +12,7 @@ export const milestones = [
start_date: '2019-08-31',
web_url: 'http://0.0.0.0:3001/root/release-test/-/milestones/2',
issue_stats: {
- opened: 14,
+ total: 33,
closed: 19,
},
},
@@ -29,7 +29,7 @@ export const milestones = [
start_date: '2019-08-19',
web_url: 'http://0.0.0.0:3001/root/release-test/-/milestones/1',
issue_stats: {
- opened: 18,
+ total: 21,
closed: 3,
},
},
@@ -96,6 +96,7 @@ export const release = {
id: 1,
name: 'my link',
url: 'https://google.com',
+ direct_asset_url: 'https://redirected.google.com',
external: true,
},
{
@@ -103,6 +104,7 @@ export const release = {
name: 'my second link',
url:
'https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/artifacts/v11.6.0-rc4/download?job=rspec-mysql+41%2F50',
+ direct_asset_url: 'https://redirected.google.com',
external: false,
},
],
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 0c2763822c9..88346083f5a 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -1,13 +1,14 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import { cloneDeep, merge } from 'lodash';
import * as actions from '~/releases/stores/modules/detail/actions';
import * as types from '~/releases/stores/modules/detail/mutation_types';
-import { release } from '../../../mock_data';
-import state from '~/releases/stores/modules/detail/state';
+import { release as originalRelease } from '../../../mock_data';
+import createState from '~/releases/stores/modules/detail/state';
import createFlash from '~/flash';
-import { redirectTo } from '~/lib/utils/url_utility';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import { redirectTo } from '~/lib/utils/url_utility';
jest.mock('~/flash', () => jest.fn());
@@ -17,14 +18,14 @@ jest.mock('~/lib/utils/url_utility', () => ({
}));
describe('Release detail actions', () => {
- let stateClone;
- let releaseClone;
+ let state;
+ let release;
let mock;
let error;
beforeEach(() => {
- stateClone = state();
- releaseClone = JSON.parse(JSON.stringify(release));
+ state = createState();
+ release = cloneDeep(originalRelease);
mock = new MockAdapter(axios);
gon.api_version = 'v4';
error = { message: 'An error occurred' };
@@ -39,7 +40,7 @@ describe('Release detail actions', () => {
it(`commits ${types.SET_INITIAL_STATE} with the provided object`, () => {
const initialState = {};
- return testAction(actions.setInitialState, initialState, stateClone, [
+ return testAction(actions.setInitialState, initialState, state, [
{ type: types.SET_INITIAL_STATE, payload: initialState },
]);
});
@@ -47,19 +48,19 @@ describe('Release detail actions', () => {
describe('requestRelease', () => {
it(`commits ${types.REQUEST_RELEASE}`, () =>
- testAction(actions.requestRelease, undefined, stateClone, [{ type: types.REQUEST_RELEASE }]));
+ testAction(actions.requestRelease, undefined, state, [{ type: types.REQUEST_RELEASE }]));
});
describe('receiveReleaseSuccess', () => {
it(`commits ${types.RECEIVE_RELEASE_SUCCESS}`, () =>
- testAction(actions.receiveReleaseSuccess, releaseClone, stateClone, [
- { type: types.RECEIVE_RELEASE_SUCCESS, payload: releaseClone },
+ testAction(actions.receiveReleaseSuccess, release, state, [
+ { type: types.RECEIVE_RELEASE_SUCCESS, payload: release },
]));
});
describe('receiveReleaseError', () => {
it(`commits ${types.RECEIVE_RELEASE_ERROR}`, () =>
- testAction(actions.receiveReleaseError, error, stateClone, [
+ testAction(actions.receiveReleaseError, error, state, [
{ type: types.RECEIVE_RELEASE_ERROR, payload: error },
]));
@@ -77,24 +78,24 @@ describe('Release detail actions', () => {
let getReleaseUrl;
beforeEach(() => {
- stateClone.projectId = '18';
- stateClone.tagName = 'v1.3';
- getReleaseUrl = `/api/v4/projects/${stateClone.projectId}/releases/${stateClone.tagName}`;
+ state.projectId = '18';
+ state.tagName = 'v1.3';
+ getReleaseUrl = `/api/v4/projects/${state.projectId}/releases/${state.tagName}`;
});
it(`dispatches requestRelease and receiveReleaseSuccess with the camel-case'd release object`, () => {
- mock.onGet(getReleaseUrl).replyOnce(200, releaseClone);
+ mock.onGet(getReleaseUrl).replyOnce(200, release);
return testAction(
actions.fetchRelease,
undefined,
- stateClone,
+ state,
[],
[
{ type: 'requestRelease' },
{
type: 'receiveReleaseSuccess',
- payload: convertObjectPropsToCamelCase(releaseClone, { deep: true }),
+ payload: convertObjectPropsToCamelCase(release, { deep: true }),
},
],
);
@@ -106,7 +107,7 @@ describe('Release detail actions', () => {
return testAction(
actions.fetchRelease,
undefined,
- stateClone,
+ state,
[],
[{ type: 'requestRelease' }, { type: 'receiveReleaseError', payload: expect.anything() }],
);
@@ -116,7 +117,7 @@ describe('Release detail actions', () => {
describe('updateReleaseTitle', () => {
it(`commits ${types.UPDATE_RELEASE_TITLE} with the updated release title`, () => {
const newTitle = 'The new release title';
- return testAction(actions.updateReleaseTitle, newTitle, stateClone, [
+ return testAction(actions.updateReleaseTitle, newTitle, state, [
{ type: types.UPDATE_RELEASE_TITLE, payload: newTitle },
]);
});
@@ -125,7 +126,7 @@ describe('Release detail actions', () => {
describe('updateReleaseNotes', () => {
it(`commits ${types.UPDATE_RELEASE_NOTES} with the updated release notes`, () => {
const newReleaseNotes = 'The new release notes';
- return testAction(actions.updateReleaseNotes, newReleaseNotes, stateClone, [
+ return testAction(actions.updateReleaseNotes, newReleaseNotes, state, [
{ type: types.UPDATE_RELEASE_NOTES, payload: newReleaseNotes },
]);
});
@@ -133,25 +134,40 @@ describe('Release detail actions', () => {
describe('requestUpdateRelease', () => {
it(`commits ${types.REQUEST_UPDATE_RELEASE}`, () =>
- testAction(actions.requestUpdateRelease, undefined, stateClone, [
+ testAction(actions.requestUpdateRelease, undefined, state, [
{ type: types.REQUEST_UPDATE_RELEASE },
]));
});
describe('receiveUpdateReleaseSuccess', () => {
it(`commits ${types.RECEIVE_UPDATE_RELEASE_SUCCESS}`, () =>
- testAction(
- actions.receiveUpdateReleaseSuccess,
- undefined,
- stateClone,
- [{ type: types.RECEIVE_UPDATE_RELEASE_SUCCESS }],
- [{ type: 'navigateToReleasesPage' }],
- ));
+ testAction(actions.receiveUpdateReleaseSuccess, undefined, { ...state, featureFlags: {} }, [
+ { type: types.RECEIVE_UPDATE_RELEASE_SUCCESS },
+ ]));
+
+ describe('when the releaseShowPage feature flag is enabled', () => {
+ const rootState = { featureFlags: { releaseShowPage: true } };
+ const updatedState = merge({}, state, {
+ releasesPagePath: 'path/to/releases/page',
+ release: {
+ _links: {
+ self: 'path/to/self',
+ },
+ },
+ });
+
+ actions.receiveUpdateReleaseSuccess({ commit: jest.fn(), state: updatedState, rootState });
+
+ expect(redirectTo).toHaveBeenCalledTimes(1);
+ expect(redirectTo).toHaveBeenCalledWith(updatedState.release._links.self);
+ });
+
+ describe('when the releaseShowPage feature flag is disabled', () => {});
});
describe('receiveUpdateReleaseError', () => {
it(`commits ${types.RECEIVE_UPDATE_RELEASE_ERROR}`, () =>
- testAction(actions.receiveUpdateReleaseError, error, stateClone, [
+ testAction(actions.receiveUpdateReleaseError, error, state, [
{ type: types.RECEIVE_UPDATE_RELEASE_ERROR, payload: error },
]));
@@ -169,10 +185,10 @@ describe('Release detail actions', () => {
let getReleaseUrl;
beforeEach(() => {
- stateClone.release = releaseClone;
- stateClone.projectId = '18';
- stateClone.tagName = 'v1.3';
- getReleaseUrl = `/api/v4/projects/${stateClone.projectId}/releases/${stateClone.tagName}`;
+ state.release = release;
+ state.projectId = '18';
+ state.tagName = 'v1.3';
+ getReleaseUrl = `/api/v4/projects/${state.projectId}/releases/${state.tagName}`;
});
it(`dispatches requestUpdateRelease and receiveUpdateReleaseSuccess`, () => {
@@ -181,7 +197,7 @@ describe('Release detail actions', () => {
return testAction(
actions.updateRelease,
undefined,
- stateClone,
+ state,
[],
[{ type: 'requestUpdateRelease' }, { type: 'receiveUpdateReleaseSuccess' }],
);
@@ -193,7 +209,7 @@ describe('Release detail actions', () => {
return testAction(
actions.updateRelease,
undefined,
- stateClone,
+ state,
[],
[
{ type: 'requestUpdateRelease' },
@@ -202,16 +218,4 @@ describe('Release detail actions', () => {
);
});
});
-
- describe('navigateToReleasesPage', () => {
- it(`calls redirectTo() with the URL to the releases page`, () => {
- const releasesPagePath = 'path/to/releases/page';
- stateClone.releasesPagePath = releasesPagePath;
-
- actions.navigateToReleasesPage({ state: stateClone });
-
- expect(redirectTo).toHaveBeenCalledTimes(1);
- expect(redirectTo).toHaveBeenCalledWith(releasesPagePath);
- });
- });
});
diff --git a/spec/frontend/reports/store/utils_spec.js b/spec/frontend/reports/store/utils_spec.js
index 0d9a8dd4585..9ae456658dc 100644
--- a/spec/frontend/reports/store/utils_spec.js
+++ b/spec/frontend/reports/store/utils_spec.js
@@ -30,9 +30,7 @@ describe('Reports store utils', () => {
const data = { failed: 3, total: 10 };
const result = utils.summaryTextBuilder(name, data);
- expect(result).toBe(
- 'Test summary contained 3 failed/error test results out of 10 total tests',
- );
+ expect(result).toBe('Test summary contained 3 failed out of 10 total tests');
});
it('should render text for multiple errored results', () => {
@@ -40,9 +38,7 @@ describe('Reports store utils', () => {
const data = { errored: 7, total: 10 };
const result = utils.summaryTextBuilder(name, data);
- expect(result).toBe(
- 'Test summary contained 7 failed/error test results out of 10 total tests',
- );
+ expect(result).toBe('Test summary contained 7 errors out of 10 total tests');
});
it('should render text for multiple fixed results', () => {
@@ -59,7 +55,7 @@ describe('Reports store utils', () => {
const result = utils.summaryTextBuilder(name, data);
expect(result).toBe(
- 'Test summary contained 3 failed/error test results and 4 fixed test results out of 10 total tests',
+ 'Test summary contained 3 failed and 4 fixed test results out of 10 total tests',
);
});
@@ -69,18 +65,17 @@ describe('Reports store utils', () => {
const result = utils.summaryTextBuilder(name, data);
expect(result).toBe(
- 'Test summary contained 1 failed/error test result and 1 fixed test result out of 10 total tests',
+ 'Test summary contained 1 failed and 1 fixed test result out of 10 total tests',
);
});
it('should render text for singular failed, errored, and fixed results', () => {
- // these will be singular when the copy is updated
const name = 'Test summary';
const data = { failed: 1, errored: 1, resolved: 1, total: 10 };
const result = utils.summaryTextBuilder(name, data);
expect(result).toBe(
- 'Test summary contained 2 failed/error test results and 1 fixed test result out of 10 total tests',
+ 'Test summary contained 1 failed, 1 error and 1 fixed test result out of 10 total tests',
);
});
@@ -90,7 +85,7 @@ describe('Reports store utils', () => {
const result = utils.summaryTextBuilder(name, data);
expect(result).toBe(
- 'Test summary contained 5 failed/error test results and 4 fixed test results out of 10 total tests',
+ 'Test summary contained 2 failed, 3 errors and 4 fixed test results out of 10 total tests',
);
});
});
@@ -117,7 +112,7 @@ describe('Reports store utils', () => {
const data = { failed: 3, total: 10 };
const result = utils.reportTextBuilder(name, data);
- expect(result).toBe('Rspec found 3 failed/error test results out of 10 total tests');
+ expect(result).toBe('Rspec found 3 failed out of 10 total tests');
});
it('should render text for multiple errored results', () => {
@@ -125,7 +120,7 @@ describe('Reports store utils', () => {
const data = { errored: 7, total: 10 };
const result = utils.reportTextBuilder(name, data);
- expect(result).toBe('Rspec found 7 failed/error test results out of 10 total tests');
+ expect(result).toBe('Rspec found 7 errors out of 10 total tests');
});
it('should render text for multiple fixed results', () => {
@@ -141,9 +136,7 @@ describe('Reports store utils', () => {
const data = { failed: 3, resolved: 4, total: 10 };
const result = utils.reportTextBuilder(name, data);
- expect(result).toBe(
- 'Rspec found 3 failed/error test results and 4 fixed test results out of 10 total tests',
- );
+ expect(result).toBe('Rspec found 3 failed and 4 fixed test results out of 10 total tests');
});
it('should render text for a singular fixed, and a singular failed result', () => {
@@ -151,19 +144,16 @@ describe('Reports store utils', () => {
const data = { failed: 1, resolved: 1, total: 10 };
const result = utils.reportTextBuilder(name, data);
- expect(result).toBe(
- 'Rspec found 1 failed/error test result and 1 fixed test result out of 10 total tests',
- );
+ expect(result).toBe('Rspec found 1 failed and 1 fixed test result out of 10 total tests');
});
it('should render text for singular failed, errored, and fixed results', () => {
- // these will be singular when the copy is updated
const name = 'Rspec';
const data = { failed: 1, errored: 1, resolved: 1, total: 10 };
const result = utils.reportTextBuilder(name, data);
expect(result).toBe(
- 'Rspec found 2 failed/error test results and 1 fixed test result out of 10 total tests',
+ 'Rspec found 1 failed, 1 error and 1 fixed test result out of 10 total tests',
);
});
@@ -173,7 +163,7 @@ describe('Reports store utils', () => {
const result = utils.reportTextBuilder(name, data);
expect(result).toBe(
- 'Rspec found 5 failed/error test results and 4 fixed test results out of 10 total tests',
+ 'Rspec found 2 failed, 3 errors and 4 fixed test results out of 10 total tests',
);
});
});
diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
index 1497539a0c1..491fc20c40e 100644
--- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
@@ -68,10 +68,10 @@ exports[`Repository last commit component renders commit widget 1`] = `
<gl-link-stub
class="js-commit-pipeline"
href="https://test.com/pipeline"
- title="Commit: failed"
+ title="Pipeline: failed"
>
<ci-icon-stub
- aria-label="Commit: failed"
+ aria-label="Pipeline: failed"
cssclasses=""
size="24"
status="[object Object]"
@@ -174,10 +174,10 @@ exports[`Repository last commit component renders the signature HTML as returned
<gl-link-stub
class="js-commit-pipeline"
href="https://test.com/pipeline"
- title="Commit: failed"
+ title="Pipeline: failed"
>
<ci-icon-stub
- aria-label="Commit: failed"
+ aria-label="Pipeline: failed"
cssclasses=""
size="24"
status="[object Object]"
diff --git a/spec/frontend/search_spec.js b/spec/frontend/search_spec.js
new file mode 100644
index 00000000000..af93fa88f72
--- /dev/null
+++ b/spec/frontend/search_spec.js
@@ -0,0 +1,42 @@
+import $ from 'jquery';
+import Api from '~/api';
+import Search from '~/pages/search/show/search';
+
+jest.mock('~/api');
+
+describe('Search', () => {
+ const fixturePath = 'search/show.html';
+ const searchTerm = 'some search';
+ const fillDropdownInput = dropdownSelector => {
+ const dropdownElement = document.querySelector(dropdownSelector).parentNode;
+ const inputElement = dropdownElement.querySelector('.dropdown-input-field');
+ inputElement.value = searchTerm;
+ return inputElement;
+ };
+
+ preloadFixtures(fixturePath);
+
+ beforeEach(() => {
+ loadFixtures(fixturePath);
+ new Search(); // eslint-disable-line no-new
+ });
+
+ it('requests groups from backend when filtering', () => {
+ jest.spyOn(Api, 'groups').mockImplementation(term => {
+ expect(term).toBe(searchTerm);
+ });
+
+ const inputElement = fillDropdownInput('.js-search-group-dropdown');
+
+ $(inputElement).trigger('input');
+ });
+
+ it('requests projects from backend when filtering', () => {
+ jest.spyOn(Api, 'projects').mockImplementation(term => {
+ expect(term).toBe(searchTerm);
+ });
+ const inputElement = fillDropdownInput('.js-search-project-dropdown');
+
+ $(inputElement).trigger('input');
+ });
+});
diff --git a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
index 955716ccbca..955716ccbca 100644
--- a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap
+++ b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
diff --git a/spec/frontend/self_monitor/components/self_monitor_form_spec.js b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
new file mode 100644
index 00000000000..6532c6ed2c7
--- /dev/null
+++ b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
@@ -0,0 +1,89 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import SelfMonitor from '~/self_monitor/components/self_monitor_form.vue';
+import { createStore } from '~/self_monitor/store';
+
+describe('self monitor component', () => {
+ let wrapper;
+ let store;
+
+ describe('When the self monitor project has not been created', () => {
+ beforeEach(() => {
+ store = createStore({
+ projectEnabled: false,
+ selfMonitoringProjectExists: false,
+ createSelfMonitoringProjectPath: '/create',
+ deleteSelfMonitoringProjectPath: '/delete',
+ });
+ });
+
+ afterEach(() => {
+ if (wrapper.destroy) {
+ wrapper.destroy();
+ }
+ });
+
+ describe('default state', () => {
+ it('to match the default snapshot', () => {
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+
+ it('renders header text', () => {
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ expect(wrapper.find('.js-section-header').text()).toBe('Self monitoring');
+ });
+
+ describe('expand/collapse button', () => {
+ it('renders as an expand button by default', () => {
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ const button = wrapper.find(GlButton);
+
+ expect(button.text()).toBe('Expand');
+ });
+ });
+
+ describe('sub-header', () => {
+ it('renders descriptive text', () => {
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ expect(wrapper.find('.js-section-sub-header').text()).toContain(
+ 'Enable or disable instance self monitoring',
+ );
+ });
+ });
+
+ describe('settings-content', () => {
+ it('renders the form description without a link', () => {
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ expect(wrapper.vm.selfMonitoringFormText).toContain(
+ 'Enabling this feature creates a project that can be used to monitor the health of your instance.',
+ );
+ });
+
+ it('renders the form description with a link', () => {
+ store = createStore({
+ projectEnabled: true,
+ selfMonitoringProjectExists: true,
+ createSelfMonitoringProjectPath: '/create',
+ deleteSelfMonitoringProjectPath: '/delete',
+ selfMonitoringProjectFullPath: 'instance-administrators-random/gitlab-self-monitoring',
+ });
+
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ expect(
+ wrapper
+ .find({ ref: 'selfMonitoringFormText' })
+ .find('a')
+ .attributes('href'),
+ ).toEqual('http://localhost/instance-administrators-random/gitlab-self-monitoring');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/self_monitor/components/self_monitor_spec.js b/spec/frontend/self_monitor/components/self_monitor_spec.js
deleted file mode 100644
index 50b97ae914d..00000000000
--- a/spec/frontend/self_monitor/components/self_monitor_spec.js
+++ /dev/null
@@ -1,83 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlButton } from '@gitlab/ui';
-import SelfMonitor from '~/self_monitor/components/self_monitor_form.vue';
-import { createStore } from '~/self_monitor/store';
-
-describe('self monitor component', () => {
- let wrapper;
- let store;
-
- describe('When the self monitor project has not been created', () => {
- beforeEach(() => {
- store = createStore({
- projectEnabled: false,
- selfMonitoringProjectExists: false,
- createSelfMonitoringProjectPath: '/create',
- deleteSelfMonitoringProjectPath: '/delete',
- });
- });
-
- afterEach(() => {
- if (wrapper.destroy) {
- wrapper.destroy();
- }
- });
-
- describe('default state', () => {
- it('to match the default snapshot', () => {
- wrapper = shallowMount(SelfMonitor, { store });
-
- expect(wrapper.element).toMatchSnapshot();
- });
- });
-
- it('renders header text', () => {
- wrapper = shallowMount(SelfMonitor, { store });
-
- expect(wrapper.find('.js-section-header').text()).toBe('Self monitoring');
- });
-
- describe('expand/collapse button', () => {
- it('renders as an expand button by default', () => {
- wrapper = shallowMount(SelfMonitor, { store });
-
- const button = wrapper.find(GlButton);
-
- expect(button.text()).toBe('Expand');
- });
- });
-
- describe('sub-header', () => {
- it('renders descriptive text', () => {
- wrapper = shallowMount(SelfMonitor, { store });
-
- expect(wrapper.find('.js-section-sub-header').text()).toContain(
- 'Enable or disable instance self monitoring',
- );
- });
- });
-
- describe('settings-content', () => {
- it('renders the form description without a link', () => {
- wrapper = shallowMount(SelfMonitor, { store });
-
- expect(wrapper.vm.selfMonitoringFormText).toContain(
- 'Enabling this feature creates a project that can be used to monitor the health of your instance.',
- );
- });
-
- it('renders the form description with a link', () => {
- store = createStore({
- projectEnabled: true,
- selfMonitoringProjectExists: true,
- createSelfMonitoringProjectPath: '/create',
- deleteSelfMonitoringProjectPath: '/delete',
- });
-
- wrapper = shallowMount(SelfMonitor, { store });
-
- expect(wrapper.vm.selfMonitoringFormText).toContain('<a href="http://localhost/">');
- });
- });
- });
-});
diff --git a/spec/frontend/sidebar/assignee_title_spec.js b/spec/frontend/sidebar/assignee_title_spec.js
new file mode 100644
index 00000000000..92fabaa664e
--- /dev/null
+++ b/spec/frontend/sidebar/assignee_title_spec.js
@@ -0,0 +1,116 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
+import Component from '~/sidebar/components/assignees/assignee_title.vue';
+
+describe('AssigneeTitle component', () => {
+ let wrapper;
+
+ const createComponent = props => {
+ return shallowMount(Component, {
+ propsData: {
+ numberOfAssignees: 0,
+ editable: false,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('assignee title', () => {
+ it('renders assignee', () => {
+ wrapper = createComponent({
+ numberOfAssignees: 1,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.innerText.trim()).toEqual('Assignee');
+ });
+
+ it('renders 2 assignees', () => {
+ wrapper = createComponent({
+ numberOfAssignees: 2,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.innerText.trim()).toEqual('2 Assignees');
+ });
+ });
+
+ describe('gutter toggle', () => {
+ it('does not show toggle by default', () => {
+ wrapper = createComponent({
+ numberOfAssignees: 2,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.gutter-toggle')).toBeNull();
+ });
+
+ it('shows toggle when showToggle is true', () => {
+ wrapper = createComponent({
+ numberOfAssignees: 2,
+ editable: false,
+ showToggle: true,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.gutter-toggle')).toEqual(expect.any(Object));
+ });
+ });
+
+ it('does not render spinner by default', () => {
+ wrapper = createComponent({
+ numberOfAssignees: 0,
+ editable: false,
+ });
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBeFalsy();
+ });
+
+ it('renders spinner when loading', () => {
+ wrapper = createComponent({
+ loading: true,
+ numberOfAssignees: 0,
+ editable: false,
+ });
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBeTruthy();
+ });
+
+ it('does not render edit link when not editable', () => {
+ wrapper = createComponent({
+ numberOfAssignees: 0,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.edit-link')).toBeNull();
+ });
+
+ it('renders edit link when editable', () => {
+ wrapper = createComponent({
+ numberOfAssignees: 0,
+ editable: true,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.edit-link')).not.toBeNull();
+ });
+
+ it('tracks the event when edit is clicked', () => {
+ wrapper = createComponent({
+ numberOfAssignees: 0,
+ editable: true,
+ });
+
+ const spy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ triggerEvent('.js-sidebar-dropdown-toggle');
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_edit_button', {
+ label: 'right_sidebar',
+ property: 'assignee',
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index 3ee97b978fd..3dde40260eb 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -178,8 +178,17 @@ const RESPONSE_MAP = {
},
};
+const graphQlResponseData = {
+ project: {
+ issue: {
+ healthStatus: 'onTrack',
+ },
+ },
+};
+
const mockData = {
responseMap: RESPONSE_MAP,
+ graphQlResponseData,
mediator: {
endpoint: '/gitlab-org/gitlab-shell/issues/5.json?serializer=sidebar_extras',
toggleSubscriptionEndpoint: '/gitlab-org/gitlab-shell/issues/5/toggle_subscription',
@@ -195,6 +204,7 @@ const mockData = {
},
rootPath: '/',
fullPath: '/gitlab-org/gitlab-shell',
+ iid: 1,
},
time: {
time_estimate: 3600,
diff --git a/spec/frontend/snippet/snippet_bundle_spec.js b/spec/frontend/snippet/snippet_bundle_spec.js
new file mode 100644
index 00000000000..12d20d5cd85
--- /dev/null
+++ b/spec/frontend/snippet/snippet_bundle_spec.js
@@ -0,0 +1,94 @@
+import Editor from '~/editor/editor_lite';
+import { initEditor } from '~/snippet/snippet_bundle';
+import { setHTMLFixture } from 'helpers/fixtures';
+
+jest.mock('~/editor/editor_lite', () => jest.fn());
+
+describe('Snippet editor', () => {
+ describe('Monaco editor for Snippets', () => {
+ let oldGon;
+ let editorEl;
+ let contentEl;
+ let fileNameEl;
+ let form;
+
+ const mockName = 'foo.bar';
+ const mockContent = 'Foo Bar';
+ const updatedMockContent = 'New Foo Bar';
+
+ const mockEditor = {
+ createInstance: jest.fn(),
+ updateModelLanguage: jest.fn(),
+ getValue: jest.fn().mockReturnValueOnce(updatedMockContent),
+ };
+ Editor.mockImplementation(() => mockEditor);
+
+ function setUpFixture(name, content) {
+ setHTMLFixture(`
+ <div class="snippet-form-holder">
+ <form>
+ <input class="js-snippet-file-name" type="text" value="${name}">
+ <input class="snippet-file-content" type="hidden" value="${content}">
+ <pre id="editor"></pre>
+ </form>
+ </div>
+ `);
+ }
+
+ function bootstrap(name = '', content = '') {
+ setUpFixture(name, content);
+ editorEl = document.getElementById('editor');
+ contentEl = document.querySelector('.snippet-file-content');
+ fileNameEl = document.querySelector('.js-snippet-file-name');
+ form = document.querySelector('.snippet-form-holder form');
+
+ initEditor();
+ }
+
+ function createEvent(name) {
+ return new Event(name, {
+ view: window,
+ bubbles: true,
+ cancelable: true,
+ });
+ }
+
+ beforeEach(() => {
+ oldGon = window.gon;
+ window.gon = { features: { monacoSnippets: true } };
+ bootstrap(mockName, mockContent);
+ });
+
+ afterEach(() => {
+ window.gon = oldGon;
+ });
+
+ it('correctly initializes Editor', () => {
+ expect(mockEditor.createInstance).toHaveBeenCalledWith({
+ el: editorEl,
+ blobPath: mockName,
+ blobContent: mockContent,
+ });
+ });
+
+ it('listens to file name changes and updates syntax highlighting of code', () => {
+ expect(mockEditor.updateModelLanguage).not.toHaveBeenCalled();
+
+ const event = createEvent('change');
+
+ fileNameEl.value = updatedMockContent;
+ fileNameEl.dispatchEvent(event);
+
+ expect(mockEditor.updateModelLanguage).toHaveBeenCalledWith(updatedMockContent);
+ });
+
+ it('listens to form submit event and populates the hidden field with most recent version of the content', () => {
+ expect(contentEl.value).toBe(mockContent);
+
+ const event = createEvent('submit');
+
+ form.dispatchEvent(event);
+ expect(contentEl.value).toBe(updatedMockContent);
+ });
+ });
+});
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
new file mode 100644
index 00000000000..b1bbe2a9710
--- /dev/null
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
@@ -0,0 +1,24 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Snippet Blob Edit component rendering matches the snapshot 1`] = `
+<div
+ class="form-group file-editor"
+>
+ <label>
+ File
+ </label>
+
+ <div
+ class="file-holder snippet"
+ >
+ <blob-header-edit-stub
+ value="lorem.txt"
+ />
+
+ <blob-content-edit-stub
+ filename="lorem.txt"
+ value="Lorem ipsum dolor sit amet, consectetur adipiscing elit."
+ />
+ </div>
+</div>
+`;
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
new file mode 100644
index 00000000000..3c3f9764f64
--- /dev/null
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -0,0 +1,48 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Snippet Description Edit component rendering matches the snapshot 1`] = `
+<div
+ class="form-group js-description-input"
+>
+ <label>
+ Description (optional)
+ </label>
+
+ <div
+ class="js-collapsible-input"
+ >
+ <div
+ class="js-collapsed d-none"
+ >
+ <gl-form-input-stub
+ class="form-control"
+ data-qa-selector="description_placeholder"
+ placeholder="Optionally add a description about what your snippet does or how to use it…"
+ />
+ </div>
+
+ <markdown-field-stub
+ addspacingclasses="true"
+ canattachfile="true"
+ class="js-expanded"
+ enableautocomplete="true"
+ helppagepath=""
+ markdowndocspath="help/"
+ markdownpreviewpath="foo/"
+ note="[object Object]"
+ quickactionsdocspath=""
+ textareavalue=""
+ >
+ <textarea
+ aria-label="Description"
+ class="note-textarea js-gfm-input js-autosize markdown-area
+ qa-description-textarea"
+ data-supports-quick-actions="false"
+ dir="auto"
+ id="snippet-description"
+ placeholder="Write a comment or drag your files here…"
+ />
+ </markdown-field-stub>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap
new file mode 100644
index 00000000000..4f1d46dffef
--- /dev/null
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap
@@ -0,0 +1,94 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] = `
+<div
+ class="form-group"
+>
+ <label>
+
+ Visibility level
+
+ <gl-link-stub
+ href="/foo/bar"
+ target="_blank"
+ >
+ <gl-icon-stub
+ name="question"
+ size="12"
+ />
+ </gl-link-stub>
+ </label>
+
+ <gl-form-group-stub
+ id="visibility-level-setting"
+ >
+ <gl-form-radio-group-stub
+ checked="0"
+ disabledfield="disabled"
+ htmlfield="html"
+ options=""
+ stacked=""
+ textfield="text"
+ valuefield="value"
+ >
+ <gl-form-radio-stub
+ class="mb-3"
+ value="0"
+ >
+ <div
+ class="d-flex align-items-center"
+ >
+ <gl-icon-stub
+ name="lock"
+ size="16"
+ />
+
+ <span
+ class="font-weight-bold ml-1"
+ >
+ Private
+ </span>
+ </div>
+ </gl-form-radio-stub>
+ <gl-form-radio-stub
+ class="mb-3"
+ value="1"
+ >
+ <div
+ class="d-flex align-items-center"
+ >
+ <gl-icon-stub
+ name="shield"
+ size="16"
+ />
+
+ <span
+ class="font-weight-bold ml-1"
+ >
+ Internal
+ </span>
+ </div>
+ </gl-form-radio-stub>
+ <gl-form-radio-stub
+ class="mb-3"
+ value="2"
+ >
+ <div
+ class="d-flex align-items-center"
+ >
+ <gl-icon-stub
+ name="earth"
+ size="16"
+ />
+
+ <span
+ class="font-weight-bold ml-1"
+ >
+ Public
+ </span>
+ </div>
+ </gl-form-radio-stub>
+ </gl-form-radio-group-stub>
+ </gl-form-group-stub>
+</div>
+`;
diff --git a/spec/frontend/snippets/components/snippet_blob_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
new file mode 100644
index 00000000000..42b49c50c75
--- /dev/null
+++ b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
@@ -0,0 +1,40 @@
+import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
+import BlobHeaderEdit from '~/blob/components/blob_edit_header.vue';
+import BlobContentEdit from '~/blob/components/blob_edit_content.vue';
+import { shallowMount } from '@vue/test-utils';
+
+jest.mock('~/blob/utils', () => jest.fn());
+
+describe('Snippet Blob Edit component', () => {
+ let wrapper;
+ const content = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.';
+ const fileName = 'lorem.txt';
+
+ function createComponent() {
+ wrapper = shallowMount(SnippetBlobEdit, {
+ propsData: {
+ content,
+ fileName,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('rendering', () => {
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('renders required components', () => {
+ expect(wrapper.contains(BlobHeaderEdit)).toBe(true);
+ expect(wrapper.contains(BlobContentEdit)).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/snippets/components/snippet_description_edit_spec.js b/spec/frontend/snippets/components/snippet_description_edit_spec.js
new file mode 100644
index 00000000000..167489dc004
--- /dev/null
+++ b/spec/frontend/snippets/components/snippet_description_edit_spec.js
@@ -0,0 +1,52 @@
+import SnippetDescriptionEdit from '~/snippets/components/snippet_description_edit.vue';
+import { shallowMount } from '@vue/test-utils';
+
+describe('Snippet Description Edit component', () => {
+ let wrapper;
+ const defaultDescription = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.';
+ const markdownPreviewPath = 'foo/';
+ const markdownDocsPath = 'help/';
+
+ function createComponent(description = defaultDescription) {
+ wrapper = shallowMount(SnippetDescriptionEdit, {
+ propsData: {
+ description,
+ markdownPreviewPath,
+ markdownDocsPath,
+ },
+ });
+ }
+
+ function isHidden(sel) {
+ return wrapper.find(sel).classes('d-none');
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('rendering', () => {
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('renders the field expanded when description exists', () => {
+ expect(wrapper.find('.js-collapsed').classes('d-none')).toBe(true);
+ expect(wrapper.find('.js-expanded').classes('d-none')).toBe(false);
+
+ expect(isHidden('.js-collapsed')).toBe(true);
+ expect(isHidden('.js-expanded')).toBe(false);
+ });
+
+ it('renders the field collapsed if there is no description yet', () => {
+ createComponent('');
+
+ expect(isHidden('.js-collapsed')).toBe(false);
+ expect(isHidden('.js-expanded')).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/snippets/components/snippet_title_spec.js b/spec/frontend/snippets/components/snippet_title_spec.js
index a7efa4ae341..b49b2008610 100644
--- a/spec/frontend/snippets/components/snippet_title_spec.js
+++ b/spec/frontend/snippets/components/snippet_title_spec.js
@@ -6,10 +6,12 @@ describe('Snippet header component', () => {
let wrapper;
const title = 'The property of Thor';
const description = 'Do not touch this hammer';
+ const descriptionHtml = `<h2>${description}</h2>`;
const snippet = {
snippet: {
title,
description,
+ descriptionHtml,
},
};
@@ -35,7 +37,7 @@ describe('Snippet header component', () => {
it('renders snippets title and description', () => {
createComponent();
expect(wrapper.text().trim()).toContain(title);
- expect(wrapper.text().trim()).toContain(description);
+ expect(wrapper.find('.js-snippet-description').element.innerHTML).toBe(descriptionHtml);
});
it('does not render recent changes time stamp if there were no updates', () => {
diff --git a/spec/frontend/snippets/components/snippet_visibility_edit_spec.js b/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
new file mode 100644
index 00000000000..5104d742bb3
--- /dev/null
+++ b/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
@@ -0,0 +1,94 @@
+import SnippetVisibilityEdit from '~/snippets/components/snippet_visibility_edit.vue';
+import { GlFormRadio } from '@gitlab/ui';
+import { SNIPPET_VISIBILITY } from '~/snippets/constants';
+import { mount, shallowMount } from '@vue/test-utils';
+
+describe('Snippet Visibility Edit component', () => {
+ let wrapper;
+ let radios;
+ const defaultHelpLink = '/foo/bar';
+ const defaultVisibilityLevel = '0';
+
+ function findElements(sel) {
+ return wrapper.findAll(sel);
+ }
+
+ function createComponent(
+ {
+ helpLink = defaultHelpLink,
+ isProjectSnippet = false,
+ visibilityLevel = defaultVisibilityLevel,
+ } = {},
+ deep = false,
+ ) {
+ const method = deep ? mount : shallowMount;
+ wrapper = method.call(this, SnippetVisibilityEdit, {
+ propsData: {
+ helpLink,
+ isProjectSnippet,
+ visibilityLevel,
+ },
+ });
+ radios = findElements(GlFormRadio);
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('rendering', () => {
+ it('matches the snapshot', () => {
+ createComponent();
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it.each`
+ label | value
+ ${SNIPPET_VISIBILITY.private.label} | ${`0`}
+ ${SNIPPET_VISIBILITY.internal.label} | ${`1`}
+ ${SNIPPET_VISIBILITY.public.label} | ${`2`}
+ `('should render correct $label label', ({ label, value }) => {
+ createComponent();
+ const radio = radios.at(parseInt(value, 10));
+
+ expect(radio.attributes('value')).toBe(value);
+ expect(radio.text()).toContain(label);
+ });
+
+ describe('rendered help-text', () => {
+ it.each`
+ description | value | label
+ ${SNIPPET_VISIBILITY.private.description} | ${`0`} | ${SNIPPET_VISIBILITY.private.label}
+ ${SNIPPET_VISIBILITY.internal.description} | ${`1`} | ${SNIPPET_VISIBILITY.internal.label}
+ ${SNIPPET_VISIBILITY.public.description} | ${`2`} | ${SNIPPET_VISIBILITY.public.label}
+ `('should render correct $label description', ({ description, value }) => {
+ createComponent({}, true);
+
+ const help = findElements('.help-text').at(parseInt(value, 10));
+
+ expect(help.text()).toBe(description);
+ });
+
+ it('renders correct Private description for a project snippet', () => {
+ createComponent({ isProjectSnippet: true }, true);
+
+ const helpText = findElements('.help-text')
+ .at(0)
+ .text();
+
+ expect(helpText).not.toContain(SNIPPET_VISIBILITY.private.description);
+ expect(helpText).toBe(SNIPPET_VISIBILITY.private.description_project);
+ });
+ });
+ });
+
+ describe('functionality', () => {
+ it('pre-selects correct option in the list', () => {
+ const pos = 1;
+
+ createComponent({ visibilityLevel: `${pos}` }, true);
+ const radio = radios.at(pos);
+ expect(radio.find('input[type="radio"]').element.checked).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking_spec.js
index c3c52844c2c..30a8e138df2 100644
--- a/spec/frontend/tracking_spec.js
+++ b/spec/frontend/tracking_spec.js
@@ -226,6 +226,14 @@ describe('Tracking', () => {
};
});
+ it('calls the event method with no category or action defined', () => {
+ mixin.trackingCategory = mixin.trackingCategory();
+ mixin.trackingOptions = mixin.trackingOptions();
+
+ mixin.track();
+ expect(eventSpy).toHaveBeenCalledWith(undefined, undefined, {});
+ });
+
it('calls the event method', () => {
mixin.trackingCategory = mixin.trackingCategory();
mixin.trackingOptions = mixin.trackingOptions();
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
index 77293a5b187..8b0253dc01a 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
@@ -1,16 +1,24 @@
import { mount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
import suggestPipelineComponent from '~/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue';
+import stubChildren from 'helpers/stub_children';
+import PipelineTourState from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_tour.vue';
import MrWidgetIcon from '~/vue_merge_request_widget/components/mr_widget_icon.vue';
+import { mockTracking, triggerEvent, unmockTracking } from 'helpers/tracking_helper';
describe('MRWidgetHeader', () => {
let wrapper;
const pipelinePath = '/foo/bar/add/pipeline/path';
+ const pipelineSvgPath = '/foo/bar/pipeline/svg/path';
+ const humanAccess = 'maintainer';
const iconName = 'status_notfound';
beforeEach(() => {
wrapper = mount(suggestPipelineComponent, {
- propsData: { pipelinePath },
+ propsData: { pipelinePath, pipelineSvgPath, humanAccess },
+ stubs: {
+ ...stubChildren(PipelineTourState),
+ },
});
});
@@ -22,30 +30,47 @@ describe('MRWidgetHeader', () => {
it('renders add pipeline file link', () => {
const link = wrapper.find(GlLink);
- return wrapper.vm.$nextTick().then(() => {
- expect(link.exists()).toBe(true);
- expect(link.attributes().href).toBe(pipelinePath);
- });
+ expect(link.exists()).toBe(true);
+ expect(link.attributes().href).toBe(pipelinePath);
});
it('renders the expected text', () => {
const messageText = /\s*No pipeline\s*Add the .gitlab-ci.yml file\s*to create one./;
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.text()).toMatch(messageText);
- });
+ expect(wrapper.text()).toMatch(messageText);
});
it('renders widget icon', () => {
const icon = wrapper.find(MrWidgetIcon);
- return wrapper.vm.$nextTick().then(() => {
- expect(icon.exists()).toBe(true);
- expect(icon.props()).toEqual(
- expect.objectContaining({
- name: iconName,
- }),
- );
+ expect(icon.exists()).toBe(true);
+ expect(icon.props()).toEqual(
+ expect.objectContaining({
+ name: iconName,
+ }),
+ );
+ });
+
+ describe('tracking', () => {
+ let spy;
+
+ beforeEach(() => {
+ spy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('send an event when ok button is clicked', () => {
+ const link = wrapper.find(GlLink);
+ triggerEvent(link.element);
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_link', {
+ label: 'no_pipeline_noticed',
+ property: humanAccess,
+ value: '30',
+ });
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_tour_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_tour_spec.js
new file mode 100644
index 00000000000..e8f95e099cc
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_tour_spec.js
@@ -0,0 +1,143 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlPopover } from '@gitlab/ui';
+import Cookies from 'js-cookie';
+import { mockTracking, triggerEvent, unmockTracking } from 'helpers/tracking_helper';
+import pipelineTourState from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_tour.vue';
+import { popoverProps, cookieKey } from './pipeline_tour_mock_data';
+
+describe('MRWidgetPipelineTour', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ describe(`when ${cookieKey} cookie is set`, () => {
+ beforeEach(() => {
+ Cookies.set(cookieKey, true);
+ wrapper = shallowMount(pipelineTourState, {
+ propsData: popoverProps,
+ });
+ });
+
+ it('does not render the popover', () => {
+ const popover = wrapper.find(GlPopover);
+
+ expect(popover.exists()).toBe(false);
+ });
+
+ describe('tracking', () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+ it('does not call tracking', () => {
+ expect(trackingSpy).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe(`when ${cookieKey} cookie is not set`, () => {
+ const findOkBtn = () => wrapper.find({ ref: 'ok' });
+ const findDismissBtn = () => wrapper.find({ ref: 'no-thanks' });
+
+ beforeEach(() => {
+ Cookies.remove(cookieKey);
+ wrapper = shallowMount(pipelineTourState, {
+ propsData: popoverProps,
+ });
+ });
+
+ it('renders the popover', () => {
+ const popover = wrapper.find(GlPopover);
+
+ expect(popover.exists()).toBe(true);
+ });
+
+ it('renders the show me how button', () => {
+ const button = findOkBtn();
+
+ expect(button.exists()).toBe(true);
+ expect(button.attributes().category).toBe('primary');
+ });
+
+ it('renders the dismiss button', () => {
+ const button = findDismissBtn();
+
+ expect(button.exists()).toBe(true);
+ expect(button.attributes().category).toBe('secondary');
+ });
+
+ it('renders the empty pipelines image', () => {
+ const image = wrapper.find('img');
+
+ expect(image.exists()).toBe(true);
+ expect(image.attributes().src).toBe(popoverProps.pipelineSvgPath);
+ });
+
+ describe('tracking', () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('send event for basic view of popover', () => {
+ document.body.dataset.page = 'projects:merge_requests:show';
+
+ wrapper.vm.trackOnShow();
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, undefined, {
+ label: popoverProps.trackLabel,
+ property: popoverProps.humanAccess,
+ });
+ });
+
+ it('send an event when ok button is clicked', () => {
+ const okBtn = findOkBtn();
+ triggerEvent(okBtn.element);
+
+ expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: popoverProps.trackLabel,
+ property: popoverProps.humanAccess,
+ value: '10',
+ });
+ });
+
+ it('send an event when dismiss button is clicked', () => {
+ const dismissBtn = findDismissBtn();
+ triggerEvent(dismissBtn.element);
+
+ expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: popoverProps.trackLabel,
+ property: popoverProps.humanAccess,
+ value: '20',
+ });
+ });
+ });
+
+ describe('dismissPopover', () => {
+ it('updates popoverDismissed', () => {
+ const button = findDismissBtn();
+ const popover = wrapper.find(GlPopover);
+ button.vm.$emit('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(Cookies.get(cookieKey)).toBe('true');
+ expect(popover.exists()).toBe(false);
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/components/states/pipeline_tour_mock_data.js b/spec/frontend/vue_mr_widget/components/states/pipeline_tour_mock_data.js
new file mode 100644
index 00000000000..39bc89e459c
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/states/pipeline_tour_mock_data.js
@@ -0,0 +1,10 @@
+export const popoverProps = {
+ pipelinePath: '/foo/bar/add/pipeline/path',
+ pipelineSvgPath: 'assets/illustrations/something.svg',
+ humanAccess: 'maintainer',
+ popoverTarget: 'suggest-popover',
+ popoverContainer: 'suggest-pipeline',
+ trackLabel: 'some_tracking_label',
+};
+
+export const cookieKey = 'suggest_pipeline_dismissed';
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js
new file mode 100644
index 00000000000..1b14ee694fe
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js
@@ -0,0 +1,124 @@
+import { mount } from '@vue/test-utils';
+import { GlIcon, GlLoadingIcon, GlButton } from '@gitlab/ui';
+import DeploymentActionButton from '~/vue_merge_request_widget/components/deployment/deployment_action_button.vue';
+import {
+ CREATED,
+ RUNNING,
+ DEPLOYING,
+ REDEPLOYING,
+} from '~/vue_merge_request_widget/components/deployment/constants';
+import { actionButtonMocks } from './deployment_mock_data';
+
+const baseProps = {
+ actionsConfiguration: actionButtonMocks[DEPLOYING],
+ actionInProgress: null,
+ computedDeploymentStatus: CREATED,
+};
+
+describe('Deployment action button', () => {
+ let wrapper;
+
+ const factory = (options = {}) => {
+ wrapper = mount(DeploymentActionButton, {
+ ...options,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when passed only icon', () => {
+ beforeEach(() => {
+ factory({
+ propsData: baseProps,
+ slots: { default: ['<gl-icon name="stop" />'] },
+ stubs: {
+ 'gl-icon': GlIcon,
+ },
+ });
+ });
+
+ it('renders slot correctly', () => {
+ expect(wrapper.find(GlIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('when passed multiple items', () => {
+ beforeEach(() => {
+ factory({
+ propsData: baseProps,
+ slots: {
+ default: ['<gl-icon name="play" />', `<span>${actionButtonMocks[DEPLOYING]}</span>`],
+ },
+ stubs: {
+ 'gl-icon': GlIcon,
+ },
+ });
+ });
+
+ it('renders slot correctly', () => {
+ expect(wrapper.find(GlIcon).exists()).toBe(true);
+ expect(wrapper.text()).toContain(actionButtonMocks[DEPLOYING]);
+ });
+ });
+
+ describe('when its action is in progress', () => {
+ beforeEach(() => {
+ factory({
+ propsData: {
+ ...baseProps,
+ actionInProgress: actionButtonMocks[DEPLOYING].actionName,
+ },
+ });
+ });
+
+ it('is disabled and shows the loading icon', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.find(GlButton).props('disabled')).toBe(true);
+ });
+ });
+
+ describe('when another action is in progress', () => {
+ beforeEach(() => {
+ factory({
+ propsData: {
+ ...baseProps,
+ actionInProgress: actionButtonMocks[REDEPLOYING].actionName,
+ },
+ });
+ });
+ it('is disabled and does not show the loading icon', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(GlButton).props('disabled')).toBe(true);
+ });
+ });
+
+ describe('when action status is running', () => {
+ beforeEach(() => {
+ factory({
+ propsData: {
+ ...baseProps,
+ actionInProgress: actionButtonMocks[REDEPLOYING].actionName,
+ computedDeploymentStatus: RUNNING,
+ },
+ });
+ });
+ it('is disabled and does not show the loading icon', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(GlButton).props('disabled')).toBe(true);
+ });
+ });
+
+ describe('when no action is in progress', () => {
+ beforeEach(() => {
+ factory({
+ propsData: baseProps,
+ });
+ });
+ it('is not disabled nor does it show the loading icon', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(GlButton).props('disabled')).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
new file mode 100644
index 00000000000..6449272e6ed
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
@@ -0,0 +1,220 @@
+import { mount } from '@vue/test-utils';
+import createFlash from '~/flash';
+import { visitUrl } from '~/lib/utils/url_utility';
+import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
+import DeploymentActions from '~/vue_merge_request_widget/components/deployment/deployment_actions.vue';
+import {
+ CREATED,
+ MANUAL_DEPLOY,
+ FAILED,
+ DEPLOYING,
+ REDEPLOYING,
+ STOPPING,
+} from '~/vue_merge_request_widget/components/deployment/constants';
+import {
+ actionButtonMocks,
+ deploymentMockData,
+ playDetails,
+ retryDetails,
+} from './deployment_mock_data';
+
+jest.mock('~/flash');
+jest.mock('~/lib/utils/url_utility');
+
+describe('DeploymentAction component', () => {
+ let wrapper;
+ let executeActionSpy;
+
+ const factory = (options = {}) => {
+ // This destroys any wrappers created before a nested call to factory reassigns it
+ if (wrapper && wrapper.destroy) {
+ wrapper.destroy();
+ }
+
+ wrapper = mount(DeploymentActions, {
+ ...options,
+ provide: { glFeatures: { deployFromFooter: true } },
+ });
+ };
+
+ const findStopButton = () => wrapper.find('.js-stop-env');
+ const findDeployButton = () => wrapper.find('.js-manual-deploy-action');
+ const findRedeployButton = () => wrapper.find('.js-manual-redeploy-action');
+
+ beforeEach(() => {
+ executeActionSpy = jest.spyOn(MRWidgetService, 'executeInlineAction');
+
+ factory({
+ propsData: {
+ computedDeploymentStatus: CREATED,
+ deployment: deploymentMockData,
+ showVisualReviewApp: false,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('actions do not appear when conditions are unmet', () => {
+ describe('when there is no stop_url', () => {
+ beforeEach(() => {
+ factory({
+ propsData: {
+ computedDeploymentStatus: CREATED,
+ deployment: {
+ ...deploymentMockData,
+ stop_url: null,
+ },
+ showVisualReviewApp: false,
+ },
+ });
+ });
+
+ it('the stop button does not appear', () => {
+ expect(findStopButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when there is no play_path in details', () => {
+ it('the manual deploy button does not appear', () => {
+ expect(findDeployButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when there is no retry_path in details', () => {
+ it('the manual redeploy button does not appear', () => {
+ expect(findRedeployButton().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('when conditions are met', () => {
+ describe.each`
+ configConst | computedDeploymentStatus | displayConditionChanges | finderFn | endpoint
+ ${STOPPING} | ${CREATED} | ${{}} | ${findStopButton} | ${deploymentMockData.stop_url}
+ ${DEPLOYING} | ${MANUAL_DEPLOY} | ${playDetails} | ${findDeployButton} | ${playDetails.playable_build.play_path}
+ ${REDEPLOYING} | ${FAILED} | ${retryDetails} | ${findRedeployButton} | ${retryDetails.playable_build.retry_path}
+ `(
+ '$configConst action',
+ ({ configConst, computedDeploymentStatus, displayConditionChanges, finderFn, endpoint }) => {
+ describe(`${configConst} action`, () => {
+ const confirmAction = () => {
+ jest.spyOn(window, 'confirm').mockReturnValueOnce(true);
+ finderFn().trigger('click');
+ };
+
+ const rejectAction = () => {
+ jest.spyOn(window, 'confirm').mockReturnValueOnce(false);
+ finderFn().trigger('click');
+ };
+
+ beforeEach(() => {
+ factory({
+ propsData: {
+ computedDeploymentStatus,
+ deployment: {
+ ...deploymentMockData,
+ details: displayConditionChanges,
+ },
+ showVisualReviewApp: false,
+ },
+ });
+ });
+
+ it('the button is rendered', () => {
+ expect(finderFn().exists()).toBe(true);
+ });
+
+ describe('when clicked', () => {
+ describe('should show a confirm dialog but not call executeInlineAction when declined', () => {
+ beforeEach(() => {
+ executeActionSpy.mockResolvedValueOnce();
+ rejectAction();
+ });
+
+ it('should show the confirm dialog', () => {
+ expect(window.confirm).toHaveBeenCalled();
+ expect(window.confirm).toHaveBeenCalledWith(
+ actionButtonMocks[configConst].confirmMessage,
+ );
+ });
+
+ it('should not execute the action', () => {
+ expect(MRWidgetService.executeInlineAction).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('should show a confirm dialog and call executeInlineAction when accepted', () => {
+ beforeEach(() => {
+ executeActionSpy.mockResolvedValueOnce();
+ confirmAction();
+ });
+
+ it('should show the confirm dialog', () => {
+ expect(window.confirm).toHaveBeenCalled();
+ expect(window.confirm).toHaveBeenCalledWith(
+ actionButtonMocks[configConst].confirmMessage,
+ );
+ });
+
+ it('should execute the action with expected URL', () => {
+ expect(MRWidgetService.executeInlineAction).toHaveBeenCalled();
+ expect(MRWidgetService.executeInlineAction).toHaveBeenCalledWith(endpoint);
+ });
+
+ it('should not throw an error', () => {
+ expect(createFlash).not.toHaveBeenCalled();
+ });
+
+ describe('response includes redirect_url', () => {
+ const url = '/root/example';
+ beforeEach(() => {
+ executeActionSpy.mockResolvedValueOnce({
+ data: { redirect_url: url },
+ });
+ confirmAction();
+ });
+
+ it('calls visit url with the redirect_url', () => {
+ expect(visitUrl).toHaveBeenCalled();
+ expect(visitUrl).toHaveBeenCalledWith(url);
+ });
+ });
+
+ describe('it should call the executeAction method ', () => {
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm, 'executeAction').mockImplementation();
+ confirmAction();
+ });
+
+ it('calls with the expected arguments', () => {
+ expect(wrapper.vm.executeAction).toHaveBeenCalled();
+ expect(wrapper.vm.executeAction).toHaveBeenCalledWith(
+ endpoint,
+ actionButtonMocks[configConst],
+ );
+ });
+ });
+
+ describe('when executeInlineAction errors', () => {
+ beforeEach(() => {
+ executeActionSpy.mockRejectedValueOnce();
+ confirmAction();
+ });
+
+ it('should call createFlash with error message', () => {
+ expect(createFlash).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledWith(
+ actionButtonMocks[configConst].errorMessage,
+ );
+ });
+ });
+ });
+ });
+ });
+ },
+ );
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js b/spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js
index f8f4cb627dd..ff29022b75d 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js
@@ -1,4 +1,33 @@
-import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
+import {
+ DEPLOYING,
+ REDEPLOYING,
+ SUCCESS,
+ STOPPING,
+} from '~/vue_merge_request_widget/components/deployment/constants';
+
+const actionButtonMocks = {
+ [STOPPING]: {
+ actionName: STOPPING,
+ buttonText: 'Stop environment',
+ busyText: 'This environment is being deployed',
+ confirmMessage: 'Are you sure you want to stop this environment?',
+ errorMessage: 'Something went wrong while stopping this environment. Please try again.',
+ },
+ [DEPLOYING]: {
+ actionName: DEPLOYING,
+ buttonText: 'Deploy',
+ busyText: 'This environment is being deployed',
+ confirmMessage: 'Are you sure you want to deploy this environment?',
+ errorMessage: 'Something went wrong while deploying this environment. Please try again.',
+ },
+ [REDEPLOYING]: {
+ actionName: REDEPLOYING,
+ buttonText: 'Re-deploy',
+ busyText: 'This environment is being re-deployed',
+ confirmMessage: 'Are you sure you want to re-deploy this environment?',
+ errorMessage: 'Something went wrong while deploying this environment. Please try again.',
+ },
+};
const deploymentMockData = {
id: 15,
@@ -29,4 +58,16 @@ const deploymentMockData = {
],
};
-export default deploymentMockData;
+const playDetails = {
+ playable_build: {
+ play_path: '/root/test-deployments/-/jobs/1131/play',
+ },
+};
+
+const retryDetails = {
+ playable_build: {
+ retry_path: '/root/test-deployments/-/jobs/1131/retry',
+ },
+};
+
+export { actionButtonMocks, deploymentMockData, playDetails, retryDetails };
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
index ec7be6b64fc..ce395de3b5d 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
@@ -2,7 +2,6 @@ import { mount } from '@vue/test-utils';
import DeploymentComponent from '~/vue_merge_request_widget/components/deployment/deployment.vue';
import DeploymentInfo from '~/vue_merge_request_widget/components/deployment/deployment_info.vue';
import DeploymentViewButton from '~/vue_merge_request_widget/components/deployment/deployment_view_button.vue';
-import DeploymentStopButton from '~/vue_merge_request_widget/components/deployment/deployment_stop_button.vue';
import {
CREATED,
RUNNING,
@@ -10,15 +9,7 @@ import {
FAILED,
CANCELED,
} from '~/vue_merge_request_widget/components/deployment/constants';
-import deploymentMockData from './deployment_mock_data';
-
-const deployDetail = {
- playable_build: {
- retry_path: '/root/test-deployments/-/jobs/1131/retry',
- play_path: '/root/test-deployments/-/jobs/1131/play',
- },
- isManual: true,
-};
+import { deploymentMockData, playDetails, retryDetails } from './deployment_mock_data';
describe('Deployment component', () => {
let wrapper;
@@ -30,6 +21,7 @@ describe('Deployment component', () => {
}
wrapper = mount(DeploymentComponent, {
...options,
+ provide: { glFeatures: { deployFromFooter: true } },
});
};
@@ -53,28 +45,39 @@ describe('Deployment component', () => {
describe('status message and buttons', () => {
const noActions = [];
const noDetails = { isManual: false };
- const deployGroup = [DeploymentViewButton, DeploymentStopButton];
+ const deployDetail = {
+ ...playDetails,
+ isManual: true,
+ };
+
+ const retryDetail = {
+ ...retryDetails,
+ isManual: true,
+ };
+ const defaultGroup = ['.js-deploy-url', '.js-stop-env'];
+ const manualDeployGroup = ['.js-manual-deploy-action', ...defaultGroup];
+ const manualRedeployGroup = ['.js-manual-redeploy-action', ...defaultGroup];
describe.each`
status | previous | deploymentDetails | text | actionButtons
- ${CREATED} | ${true} | ${deployDetail} | ${'Can be manually deployed to'} | ${deployGroup}
- ${CREATED} | ${true} | ${noDetails} | ${'Will deploy to'} | ${deployGroup}
+ ${CREATED} | ${true} | ${deployDetail} | ${'Can be manually deployed to'} | ${manualDeployGroup}
+ ${CREATED} | ${true} | ${noDetails} | ${'Will deploy to'} | ${defaultGroup}
${CREATED} | ${false} | ${deployDetail} | ${'Can be manually deployed to'} | ${noActions}
${CREATED} | ${false} | ${noDetails} | ${'Will deploy to'} | ${noActions}
- ${RUNNING} | ${true} | ${deployDetail} | ${'Deploying to'} | ${deployGroup}
- ${RUNNING} | ${true} | ${noDetails} | ${'Deploying to'} | ${deployGroup}
+ ${RUNNING} | ${true} | ${deployDetail} | ${'Deploying to'} | ${defaultGroup}
+ ${RUNNING} | ${true} | ${noDetails} | ${'Deploying to'} | ${defaultGroup}
${RUNNING} | ${false} | ${deployDetail} | ${'Deploying to'} | ${noActions}
${RUNNING} | ${false} | ${noDetails} | ${'Deploying to'} | ${noActions}
- ${SUCCESS} | ${true} | ${deployDetail} | ${'Deployed to'} | ${deployGroup}
- ${SUCCESS} | ${true} | ${noDetails} | ${'Deployed to'} | ${deployGroup}
- ${SUCCESS} | ${false} | ${deployDetail} | ${'Deployed to'} | ${deployGroup}
- ${SUCCESS} | ${false} | ${noDetails} | ${'Deployed to'} | ${deployGroup}
- ${FAILED} | ${true} | ${deployDetail} | ${'Failed to deploy to'} | ${deployGroup}
- ${FAILED} | ${true} | ${noDetails} | ${'Failed to deploy to'} | ${deployGroup}
- ${FAILED} | ${false} | ${deployDetail} | ${'Failed to deploy to'} | ${noActions}
+ ${SUCCESS} | ${true} | ${deployDetail} | ${'Deployed to'} | ${defaultGroup}
+ ${SUCCESS} | ${true} | ${noDetails} | ${'Deployed to'} | ${defaultGroup}
+ ${SUCCESS} | ${false} | ${deployDetail} | ${'Deployed to'} | ${defaultGroup}
+ ${SUCCESS} | ${false} | ${noDetails} | ${'Deployed to'} | ${defaultGroup}
+ ${FAILED} | ${true} | ${retryDetail} | ${'Failed to deploy to'} | ${manualRedeployGroup}
+ ${FAILED} | ${true} | ${noDetails} | ${'Failed to deploy to'} | ${defaultGroup}
+ ${FAILED} | ${false} | ${retryDetail} | ${'Failed to deploy to'} | ${noActions}
${FAILED} | ${false} | ${noDetails} | ${'Failed to deploy to'} | ${noActions}
- ${CANCELED} | ${true} | ${deployDetail} | ${'Canceled deployment to'} | ${deployGroup}
- ${CANCELED} | ${true} | ${noDetails} | ${'Canceled deployment to'} | ${deployGroup}
+ ${CANCELED} | ${true} | ${deployDetail} | ${'Canceled deployment to'} | ${defaultGroup}
+ ${CANCELED} | ${true} | ${noDetails} | ${'Canceled deployment to'} | ${defaultGroup}
${CANCELED} | ${false} | ${deployDetail} | ${'Canceled deployment to'} | ${noActions}
${CANCELED} | ${false} | ${noDetails} | ${'Canceled deployment to'} | ${noActions}
`(
@@ -112,7 +115,7 @@ describe('Deployment component', () => {
if (actionButtons.length > 0) {
describe('renders the expected button group', () => {
actionButtons.forEach(button => {
- it(`renders ${button.name}`, () => {
+ it(`renders ${button}`, () => {
expect(wrapper.find(button).exists()).toBe(true);
});
});
@@ -121,8 +124,8 @@ describe('Deployment component', () => {
if (actionButtons.length === 0) {
describe('does not render the button group', () => {
- [DeploymentViewButton, DeploymentStopButton].forEach(button => {
- it(`does not render ${button.name}`, () => {
+ defaultGroup.forEach(button => {
+ it(`does not render ${button}`, () => {
expect(wrapper.find(button).exists()).toBe(false);
});
});
@@ -144,10 +147,6 @@ describe('Deployment component', () => {
describe('hasExternalUrls', () => {
describe('when deployment has both external_url_formatted and external_url', () => {
- it('should return true', () => {
- expect(wrapper.vm.hasExternalUrls).toEqual(true);
- });
-
it('should render the View Button', () => {
expect(wrapper.find(DeploymentViewButton).exists()).toBe(true);
});
@@ -163,10 +162,6 @@ describe('Deployment component', () => {
});
});
- it('should return false', () => {
- expect(wrapper.vm.hasExternalUrls).toEqual(false);
- });
-
it('should not render the View Button', () => {
expect(wrapper.find(DeploymentViewButton).exists()).toBe(false);
});
@@ -182,10 +177,6 @@ describe('Deployment component', () => {
});
});
- it('should return false', () => {
- expect(wrapper.vm.hasExternalUrls).toEqual(false);
- });
-
it('should not render the View Button', () => {
expect(wrapper.find(DeploymentViewButton).exists()).toBe(false);
});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
index 5e0f38459b0..a12757d4cce 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
@@ -1,7 +1,7 @@
import { mount } from '@vue/test-utils';
import DeploymentViewButton from '~/vue_merge_request_widget/components/deployment/deployment_view_button.vue';
import ReviewAppLink from '~/vue_merge_request_widget/components/review_app_link.vue';
-import deploymentMockData from './deployment_mock_data';
+import { deploymentMockData } from './deployment_mock_data';
const appButtonText = {
text: 'View app',
diff --git a/spec/frontend/vue_mr_widget/mock_data.js b/spec/frontend/vue_mr_widget/mock_data.js
new file mode 100644
index 00000000000..8ed153658fd
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/mock_data.js
@@ -0,0 +1,319 @@
+import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
+
+export default {
+ id: 132,
+ iid: 22,
+ assignee_id: null,
+ author_id: 1,
+ description: '',
+ lock_version: null,
+ milestone_id: null,
+ position: 0,
+ state: 'merged',
+ title: 'Update README.md',
+ updated_by_id: null,
+ created_at: '2017-04-07T12:27:26.718Z',
+ updated_at: '2017-04-07T15:39:25.852Z',
+ time_estimate: 0,
+ total_time_spent: 0,
+ human_access: 'Maintainer',
+ human_time_estimate: null,
+ human_total_time_spent: null,
+ in_progress_merge_commit_sha: null,
+ merge_commit_sha: '53027d060246c8f47e4a9310fb332aa52f221775',
+ short_merge_commit_sha: '53027d06',
+ merge_error: null,
+ merge_params: {
+ force_remove_source_branch: null,
+ },
+ merge_status: 'can_be_merged',
+ merge_user_id: null,
+ pipelines_empty_svg_path: '/path/to/svg',
+ source_branch: 'daaaa',
+ source_branch_link: 'daaaa',
+ source_project_id: 19,
+ source_project_full_path: '/group1/project1',
+ target_branch: 'master',
+ target_project_id: 19,
+ target_project_full_path: '/group2/project2',
+ merge_request_add_ci_config_path: '/group2/project2/new/pipeline',
+ new_project_pipeline_path: '/group2/project2/pipelines/new',
+ metrics: {
+ merged_by: {
+ name: 'Administrator',
+ username: 'root',
+ id: 1,
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ web_url: 'http://localhost:3000/root',
+ },
+ merged_at: '2017-04-07T15:39:25.696Z',
+ closed_by: null,
+ closed_at: null,
+ },
+ author: {
+ name: 'Administrator',
+ username: 'root',
+ id: 1,
+ state: 'active',
+ avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ web_url: 'http://localhost:3000/root',
+ },
+ merge_user: null,
+ diff_head_sha: '104096c51715e12e7ae41f9333e9fa35b73f385d',
+ diff_head_commit_short_id: '104096c5',
+ default_merge_commit_message:
+ "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22",
+ pipeline: {
+ id: 172,
+ user: {
+ name: 'Administrator',
+ username: 'root',
+ id: 1,
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ web_url: 'http://localhost:3000/root',
+ },
+ active: false,
+ coverage: '92.16',
+ path: '/root/acets-app/pipelines/172',
+ details: {
+ status: {
+ icon: 'status_success',
+ favicon: 'favicon_status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ has_details: true,
+ details_path: '/root/acets-app/pipelines/172',
+ },
+ duration: null,
+ finished_at: '2017-04-07T14:00:14.256Z',
+ stages: [
+ {
+ name: 'build',
+ title: 'build: failed',
+ status: {
+ icon: 'status_failed',
+ favicon: 'favicon_status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ has_details: true,
+ details_path: '/root/acets-app/pipelines/172#build',
+ },
+ path: '/root/acets-app/pipelines/172#build',
+ dropdown_path: '/root/acets-app/pipelines/172/stage.json?stage=build',
+ },
+ {
+ name: 'review',
+ title: 'review: skipped',
+ status: {
+ icon: 'status_skipped',
+ favicon: 'favicon_status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ has_details: true,
+ details_path: '/root/acets-app/pipelines/172#review',
+ },
+ path: '/root/acets-app/pipelines/172#review',
+ dropdown_path: '/root/acets-app/pipelines/172/stage.json?stage=review',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'stop_review',
+ path: '/root/acets-app/builds/1427/play',
+ playable: false,
+ },
+ ],
+ },
+ flags: {
+ latest: false,
+ triggered: false,
+ stuck: false,
+ yaml_errors: false,
+ retryable: true,
+ cancelable: false,
+ merge_request_pipeline: false,
+ detached_merge_request_pipeline: true,
+ },
+ ref: {
+ name: 'daaaa',
+ path: '/root/acets-app/tree/daaaa',
+ tag: false,
+ branch: true,
+ },
+ merge_request: {
+ iid: 1,
+ path: '/root/detached-merge-request-pipelines/-/merge_requests/1',
+ title: 'Update README.md',
+ source_branch: 'feature-1',
+ source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1',
+ target_branch: 'master',
+ target_branch_path: '/root/detached-merge-request-pipelines/branches/master',
+ },
+ commit: {
+ id: '104096c51715e12e7ae41f9333e9fa35b73f385d',
+ short_id: '104096c5',
+ title: 'Update README.md',
+ created_at: '2017-04-07T15:27:18.000+03:00',
+ parent_ids: ['2396536178668d8930c29d904e53bd4d06228b32'],
+ message: 'Update README.md',
+ author_name: 'Administrator',
+ author_email: 'admin@example.com',
+ authored_date: '2017-04-07T15:27:18.000+03:00',
+ committer_name: 'Administrator',
+ committer_email: 'admin@example.com',
+ committed_date: '2017-04-07T15:27:18.000+03:00',
+ author: {
+ name: 'Administrator',
+ username: 'root',
+ id: 1,
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ web_url: 'http://localhost:3000/root',
+ },
+ author_gravatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ commit_url:
+ 'http://localhost:3000/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d',
+ commit_path: '/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d',
+ },
+ retry_path: '/root/acets-app/pipelines/172/retry',
+ created_at: '2017-04-07T12:27:19.520Z',
+ updated_at: '2017-04-07T15:28:44.800Z',
+ },
+ pipelineCoverageDelta: '15.25',
+ work_in_progress: false,
+ source_branch_exists: false,
+ mergeable_discussions_state: true,
+ conflicts_can_be_resolved_in_ui: false,
+ branch_missing: true,
+ commits_count: 1,
+ has_conflicts: false,
+ can_be_merged: true,
+ has_ci: true,
+ ci_status: 'success',
+ pipeline_status_path: '/root/acets-app/-/merge_requests/22/pipeline_status',
+ issues_links: {
+ closing: '',
+ mentioned_but_not_closing: '',
+ },
+ current_user: {
+ can_resolve_conflicts: true,
+ can_remove_source_branch: false,
+ can_revert_on_current_merge_request: true,
+ can_cherry_pick_on_current_merge_request: true,
+ },
+ target_branch_path: '/root/acets-app/branches/master',
+ source_branch_path: '/root/acets-app/branches/daaaa',
+ conflict_resolution_ui_path: '/root/acets-app/-/merge_requests/22/conflicts',
+ remove_wip_path: '/root/acets-app/-/merge_requests/22/remove_wip',
+ cancel_auto_merge_path: '/root/acets-app/-/merge_requests/22/cancel_auto_merge',
+ create_issue_to_resolve_discussions_path:
+ '/root/acets-app/-/issues/new?merge_request_to_resolve_discussions_of=22',
+ merge_path: '/root/acets-app/-/merge_requests/22/merge',
+ cherry_pick_in_fork_path:
+ '/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+revert+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1',
+ revert_in_fork_path:
+ '/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+cherry-pick+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1',
+ email_patches_path: '/root/acets-app/-/merge_requests/22.patch',
+ plain_diff_path: '/root/acets-app/-/merge_requests/22.diff',
+ merge_request_basic_path: '/root/acets-app/-/merge_requests/22.json?serializer=basic',
+ merge_request_widget_path: '/root/acets-app/-/merge_requests/22/widget.json',
+ merge_request_cached_widget_path: '/cached.json',
+ merge_check_path: '/root/acets-app/-/merge_requests/22/merge_check',
+ ci_environments_status_url: '/root/acets-app/-/merge_requests/22/ci_environments_status',
+ project_archived: false,
+ default_merge_commit_message_with_description:
+ "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22",
+ default_squash_commit_message: 'Test squash commit message',
+ diverged_commits_count: 0,
+ only_allow_merge_if_pipeline_succeeds: false,
+ commit_change_content_path: '/root/acets-app/-/merge_requests/22/commit_change_content',
+ merge_commit_path:
+ 'http://localhost:3000/root/acets-app/commit/53027d060246c8f47e4a9310fb332aa52f221775',
+ troubleshooting_docs_path: 'help',
+ merge_request_pipelines_docs_path: '/help/ci/merge_request_pipelines/index.md',
+ merge_train_when_pipeline_succeeds_docs_path:
+ '/help/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/#startadd-to-merge-train-when-pipeline-succeeds',
+ squash: true,
+ visual_review_app_available: true,
+ merge_trains_enabled: true,
+ merge_trains_count: 3,
+ merge_train_index: 1,
+};
+
+export const mockStore = {
+ pipeline: {
+ id: 0,
+ details: {
+ status: {
+ details_path: '/root/review-app-tester/pipelines/66',
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2. png',
+ group: 'success-with-warnings',
+ has_details: true,
+ icon: 'status_warning',
+ illustration: null,
+ label: 'passed with warnings',
+ text: 'passed',
+ tooltip: 'passed',
+ },
+ },
+ flags: {},
+ ref: {},
+ },
+ mergePipeline: {
+ id: 1,
+ details: {
+ status: {
+ details_path: '/root/review-app-tester/pipelines/66',
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2. png',
+ group: 'success-with-warnings',
+ has_details: true,
+ icon: 'status_warning',
+ illustration: null,
+ label: 'passed with warnings',
+ text: 'passed',
+ tooltip: 'passed',
+ },
+ },
+ flags: {},
+ ref: {},
+ },
+ targetBranch: 'target-branch',
+ sourceBranch: 'source-branch',
+ sourceBranchLink: 'source-branch-link',
+ deployments: [
+ {
+ id: 0,
+ name: 'bogus',
+ external_url: 'https://fake.com',
+ external_url_formatted: 'https://fake.com',
+ status: SUCCESS,
+ },
+ {
+ id: 1,
+ name: 'bogus-docs',
+ external_url: 'https://fake.com',
+ external_url_formatted: 'https://fake.com',
+ status: SUCCESS,
+ },
+ ],
+ postMergeDeployments: [
+ { id: 0, name: 'prod', status: SUCCESS },
+ { id: 1, name: 'prod-docs', status: SUCCESS },
+ ],
+ troubleshootingDocsPath: 'troubleshooting-docs-path',
+ ciStatus: 'ci-status',
+ hasCI: true,
+ exposedArtifactsPath: 'exposed_artifacts.json',
+};
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
new file mode 100644
index 00000000000..ef95cb1b8f2
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -0,0 +1,858 @@
+import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import mountComponent from 'helpers/vue_mount_component_helper';
+import axios from '~/lib/utils/axios_utils';
+import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
+import eventHub from '~/vue_merge_request_widget/event_hub';
+import notify from '~/lib/utils/notify';
+import SmartInterval from '~/smart_interval';
+import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
+import mockData from './mock_data';
+import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
+import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
+
+jest.mock('~/smart_interval');
+
+const returnPromise = data =>
+ new Promise(resolve => {
+ resolve({
+ data,
+ });
+ });
+
+describe('mrWidgetOptions', () => {
+ let vm;
+ let mock;
+ let MrWidgetOptions;
+
+ const COLLABORATION_MESSAGE = 'Allows commits from members who can merge to the target branch';
+
+ beforeEach(() => {
+ // Prevent component mounting
+ delete mrWidgetOptions.el;
+
+ gl.mrWidgetData = { ...mockData };
+ gon.features = { asyncMrWidget: true };
+
+ mock = new MockAdapter(axios);
+ mock.onGet(mockData.merge_request_widget_path).reply(() => [200, { ...mockData }]);
+ mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, { ...mockData }]);
+
+ MrWidgetOptions = Vue.extend(mrWidgetOptions);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ vm.$destroy();
+ vm = null;
+
+ gl.mrWidgetData = {};
+ gon.features = {};
+ });
+
+ const createComponent = () => {
+ if (vm) {
+ vm.$destroy();
+ }
+
+ vm = mountComponent(MrWidgetOptions, {
+ mrData: { ...mockData },
+ });
+
+ return axios.waitForAll();
+ };
+
+ describe('default', () => {
+ beforeEach(() => {
+ return createComponent();
+ });
+
+ describe('data', () => {
+ it('should instantiate Store and Service', () => {
+ expect(vm.mr).toBeDefined();
+ expect(vm.service).toBeDefined();
+ });
+ });
+
+ describe('computed', () => {
+ describe('componentName', () => {
+ it('should return merged component', () => {
+ expect(vm.componentName).toEqual('mr-widget-merged');
+ });
+
+ it('should return conflicts component', () => {
+ vm.mr.state = 'conflicts';
+
+ expect(vm.componentName).toEqual('mr-widget-conflicts');
+ });
+ });
+
+ describe('shouldRenderMergeHelp', () => {
+ it('should return false for the initial merged state', () => {
+ expect(vm.shouldRenderMergeHelp).toBeFalsy();
+ });
+
+ it('should return true for a state which requires help widget', () => {
+ vm.mr.state = 'conflicts';
+
+ expect(vm.shouldRenderMergeHelp).toBeTruthy();
+ });
+ });
+
+ describe('shouldRenderPipelines', () => {
+ it('should return true when hasCI is true', () => {
+ vm.mr.hasCI = true;
+
+ expect(vm.shouldRenderPipelines).toBeTruthy();
+ });
+
+ it('should return false when hasCI is false', () => {
+ vm.mr.hasCI = false;
+
+ expect(vm.shouldRenderPipelines).toBeFalsy();
+ });
+ });
+
+ describe('shouldRenderRelatedLinks', () => {
+ it('should return false for the initial data', () => {
+ expect(vm.shouldRenderRelatedLinks).toBeFalsy();
+ });
+
+ it('should return true if there is relatedLinks in MR', () => {
+ Vue.set(vm.mr, 'relatedLinks', {});
+
+ expect(vm.shouldRenderRelatedLinks).toBeTruthy();
+ });
+ });
+
+ describe('shouldRenderSourceBranchRemovalStatus', () => {
+ beforeEach(() => {
+ vm.mr.state = 'readyToMerge';
+ });
+
+ it('should return true when cannot remove source branch and branch will be removed', () => {
+ vm.mr.canRemoveSourceBranch = false;
+ vm.mr.shouldRemoveSourceBranch = true;
+
+ expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(true);
+ });
+
+ it('should return false when can remove source branch and branch will be removed', () => {
+ vm.mr.canRemoveSourceBranch = true;
+ vm.mr.shouldRemoveSourceBranch = true;
+
+ expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
+ });
+
+ it('should return false when cannot remove source branch and branch will not be removed', () => {
+ vm.mr.canRemoveSourceBranch = false;
+ vm.mr.shouldRemoveSourceBranch = false;
+
+ expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
+ });
+
+ it('should return false when in merged state', () => {
+ vm.mr.canRemoveSourceBranch = false;
+ vm.mr.shouldRemoveSourceBranch = true;
+ vm.mr.state = 'merged';
+
+ expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
+ });
+
+ it('should return false when in nothing to merge state', () => {
+ vm.mr.canRemoveSourceBranch = false;
+ vm.mr.shouldRemoveSourceBranch = true;
+ vm.mr.state = 'nothingToMerge';
+
+ expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
+ });
+ });
+
+ describe('shouldRenderCollaborationStatus', () => {
+ describe('when collaboration is allowed', () => {
+ beforeEach(() => {
+ vm.mr.allowCollaboration = true;
+ });
+
+ describe('when merge request is opened', () => {
+ beforeEach(done => {
+ vm.mr.isOpen = true;
+ vm.$nextTick(done);
+ });
+
+ it('should render collaboration status', () => {
+ expect(vm.$el.textContent).toContain(COLLABORATION_MESSAGE);
+ });
+ });
+
+ describe('when merge request is not opened', () => {
+ beforeEach(done => {
+ vm.mr.isOpen = false;
+ vm.$nextTick(done);
+ });
+
+ it('should not render collaboration status', () => {
+ expect(vm.$el.textContent).not.toContain(COLLABORATION_MESSAGE);
+ });
+ });
+ });
+
+ describe('when collaboration is not allowed', () => {
+ beforeEach(() => {
+ vm.mr.allowCollaboration = false;
+ });
+
+ describe('when merge request is opened', () => {
+ beforeEach(done => {
+ vm.mr.isOpen = true;
+ vm.$nextTick(done);
+ });
+
+ it('should not render collaboration status', () => {
+ expect(vm.$el.textContent).not.toContain(COLLABORATION_MESSAGE);
+ });
+ });
+ });
+ });
+
+ describe('showMergePipelineForkWarning', () => {
+ describe('when the source project and target project are the same', () => {
+ beforeEach(done => {
+ Vue.set(vm.mr, 'mergePipelinesEnabled', true);
+ Vue.set(vm.mr, 'sourceProjectId', 1);
+ Vue.set(vm.mr, 'targetProjectId', 1);
+ vm.$nextTick(done);
+ });
+
+ it('should be false', () => {
+ expect(vm.showMergePipelineForkWarning).toEqual(false);
+ });
+ });
+
+ describe('when merge pipelines are not enabled', () => {
+ beforeEach(done => {
+ Vue.set(vm.mr, 'mergePipelinesEnabled', false);
+ Vue.set(vm.mr, 'sourceProjectId', 1);
+ Vue.set(vm.mr, 'targetProjectId', 2);
+ vm.$nextTick(done);
+ });
+
+ it('should be false', () => {
+ expect(vm.showMergePipelineForkWarning).toEqual(false);
+ });
+ });
+
+ describe('when merge pipelines are enabled _and_ the source project and target project are different', () => {
+ beforeEach(done => {
+ Vue.set(vm.mr, 'mergePipelinesEnabled', true);
+ Vue.set(vm.mr, 'sourceProjectId', 1);
+ Vue.set(vm.mr, 'targetProjectId', 2);
+ vm.$nextTick(done);
+ });
+
+ it('should be true', () => {
+ expect(vm.showMergePipelineForkWarning).toEqual(true);
+ });
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('checkStatus', () => {
+ let cb;
+ let isCbExecuted;
+
+ beforeEach(() => {
+ jest.spyOn(vm.service, 'checkStatus').mockReturnValue(returnPromise(mockData));
+ jest.spyOn(vm.mr, 'setData').mockImplementation(() => {});
+ jest.spyOn(vm, 'handleNotification').mockImplementation(() => {});
+
+ isCbExecuted = false;
+ cb = () => {
+ isCbExecuted = true;
+ };
+ });
+
+ it('should not tell service to check status if document is not visible', () => {
+ Object.defineProperty(document, 'visibilityState', {
+ value: 'hidden',
+ configurable: true,
+ });
+ vm.checkStatus(cb);
+
+ return vm.$nextTick().then(() => {
+ expect(vm.service.checkStatus).not.toHaveBeenCalled();
+ expect(vm.mr.setData).not.toHaveBeenCalled();
+ expect(vm.handleNotification).not.toHaveBeenCalled();
+ expect(isCbExecuted).toBeFalsy();
+ Object.defineProperty(document, 'visibilityState', {
+ value: 'visible',
+ configurable: true,
+ });
+ });
+ });
+
+ it('should tell service to check status if document is visible', () => {
+ vm.checkStatus(cb);
+
+ return vm.$nextTick().then(() => {
+ expect(vm.service.checkStatus).toHaveBeenCalled();
+ expect(vm.mr.setData).toHaveBeenCalled();
+ expect(vm.handleNotification).toHaveBeenCalledWith(mockData);
+ expect(isCbExecuted).toBeTruthy();
+ });
+ });
+ });
+
+ describe('initPolling', () => {
+ it('should call SmartInterval', () => {
+ vm.initPolling();
+
+ expect(SmartInterval).toHaveBeenCalledWith(
+ expect.objectContaining({
+ callback: vm.checkStatus,
+ }),
+ );
+ });
+ });
+
+ describe('initDeploymentsPolling', () => {
+ it('should call SmartInterval', () => {
+ vm.initDeploymentsPolling();
+
+ expect(SmartInterval).toHaveBeenCalledWith(
+ expect.objectContaining({
+ callback: vm.fetchPreMergeDeployments,
+ }),
+ );
+ });
+ });
+
+ describe('fetchDeployments', () => {
+ it('should fetch deployments', () => {
+ jest
+ .spyOn(vm.service, 'fetchDeployments')
+ .mockReturnValue(returnPromise([{ id: 1, status: SUCCESS }]));
+
+ vm.fetchPreMergeDeployments();
+
+ return vm.$nextTick().then(() => {
+ expect(vm.service.fetchDeployments).toHaveBeenCalled();
+ expect(vm.mr.deployments.length).toEqual(1);
+ expect(vm.mr.deployments[0].id).toBe(1);
+ });
+ });
+ });
+
+ describe('fetchActionsContent', () => {
+ it('should fetch content of Cherry Pick and Revert modals', () => {
+ jest
+ .spyOn(vm.service, 'fetchMergeActionsContent')
+ .mockReturnValue(returnPromise('hello world'));
+
+ vm.fetchActionsContent();
+
+ return vm.$nextTick().then(() => {
+ expect(vm.service.fetchMergeActionsContent).toHaveBeenCalled();
+ expect(document.body.textContent).toContain('hello world');
+ });
+ });
+ });
+
+ describe('bindEventHubListeners', () => {
+ it.each`
+ event | method | methodArgs
+ ${'MRWidgetUpdateRequested'} | ${'checkStatus'} | ${x => [x]}
+ ${'MRWidgetRebaseSuccess'} | ${'checkStatus'} | ${x => [x, true]}
+ ${'FetchActionsContent'} | ${'fetchActionsContent'} | ${() => []}
+ ${'EnablePolling'} | ${'resumePolling'} | ${() => []}
+ ${'DisablePolling'} | ${'stopPolling'} | ${() => []}
+ `('should bind to $event', ({ event, method, methodArgs }) => {
+ jest.spyOn(vm, method).mockImplementation();
+
+ const eventArg = {};
+ eventHub.$emit(event, eventArg);
+
+ expect(vm[method]).toHaveBeenCalledWith(...methodArgs(eventArg));
+ });
+
+ it('should bind to SetBranchRemoveFlag', () => {
+ expect(vm.mr.isRemovingSourceBranch).toBe(false);
+
+ eventHub.$emit('SetBranchRemoveFlag', [true]);
+
+ expect(vm.mr.isRemovingSourceBranch).toBe(true);
+ });
+
+ it('should bind to FailedToMerge', () => {
+ vm.mr.state = '';
+ vm.mr.mergeError = '';
+
+ const mergeError = 'Something bad happened!';
+ eventHub.$emit('FailedToMerge', mergeError);
+
+ expect(vm.mr.state).toBe('failedToMerge');
+ expect(vm.mr.mergeError).toBe(mergeError);
+ });
+
+ it('should bind to UpdateWidgetData', () => {
+ jest.spyOn(vm.mr, 'setData').mockImplementation();
+
+ const data = { ...mockData };
+ eventHub.$emit('UpdateWidgetData', data);
+
+ expect(vm.mr.setData).toHaveBeenCalledWith(data);
+ });
+ });
+
+ describe('setFavicon', () => {
+ let faviconElement;
+
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ favicon.setAttribute('data-original-href', faviconDataUrl);
+ document.body.appendChild(favicon);
+
+ faviconElement = document.getElementById('favicon');
+ });
+
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
+ });
+
+ it('should call setFavicon method', done => {
+ vm.mr.ciStatusFaviconPath = overlayDataUrl;
+ vm.setFaviconHelper()
+ .then(() => {
+ /*
+ It would be better if we'd could mock commonUtils.setFaviconURL
+ with a spy and test that it was called. We are doing the following
+ tests as a proxy to show that the function has been called
+ */
+ expect(faviconElement.getAttribute('href')).not.toEqual(null);
+ expect(faviconElement.getAttribute('href')).not.toEqual(overlayDataUrl);
+ expect(faviconElement.getAttribute('href')).not.toEqual(faviconDataUrl);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('should not call setFavicon when there is no ciStatusFaviconPath', done => {
+ vm.mr.ciStatusFaviconPath = null;
+ vm.setFaviconHelper()
+ .then(() => {
+ expect(faviconElement.getAttribute('href')).toEqual(null);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('handleNotification', () => {
+ const data = {
+ ci_status: 'running',
+ title: 'title',
+ pipeline: { details: { status: { label: 'running-label' } } },
+ };
+
+ beforeEach(() => {
+ jest.spyOn(notify, 'notifyMe').mockImplementation(() => {});
+
+ vm.mr.ciStatus = 'failed';
+ vm.mr.gitlabLogo = 'logo.png';
+ });
+
+ it('should call notifyMe', () => {
+ vm.handleNotification(data);
+
+ expect(notify.notifyMe).toHaveBeenCalledWith(
+ 'Pipeline running-label',
+ 'Pipeline running-label for "title"',
+ 'logo.png',
+ );
+ });
+
+ it('should not call notifyMe if the status has not changed', () => {
+ vm.mr.ciStatus = data.ci_status;
+
+ vm.handleNotification(data);
+
+ expect(notify.notifyMe).not.toHaveBeenCalled();
+ });
+
+ it('should not notify if no pipeline provided', () => {
+ vm.handleNotification({
+ ...data,
+ pipeline: undefined,
+ });
+
+ expect(notify.notifyMe).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('resumePolling', () => {
+ it('should call stopTimer on pollingInterval', () => {
+ jest.spyOn(vm.pollingInterval, 'resume').mockImplementation(() => {});
+
+ vm.resumePolling();
+
+ expect(vm.pollingInterval.resume).toHaveBeenCalled();
+ });
+ });
+
+ describe('stopPolling', () => {
+ it('should call stopTimer on pollingInterval', () => {
+ jest.spyOn(vm.pollingInterval, 'stopTimer').mockImplementation(() => {});
+
+ vm.stopPolling();
+
+ expect(vm.pollingInterval.stopTimer).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('rendering relatedLinks', () => {
+ beforeEach(done => {
+ vm.mr.relatedLinks = {
+ assignToMe: null,
+ closing: `
+ <a class="close-related-link" href="#">
+ Close
+ </a>
+ `,
+ mentioned: '',
+ };
+ Vue.nextTick(done);
+ });
+
+ it('renders if there are relatedLinks', () => {
+ expect(vm.$el.querySelector('.close-related-link')).toBeDefined();
+ });
+
+ it('does not render if state is nothingToMerge', done => {
+ vm.mr.state = stateKey.nothingToMerge;
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.close-related-link')).toBeNull();
+ done();
+ });
+ });
+ });
+
+ describe('rendering source branch removal status', () => {
+ it('renders when user cannot remove branch and branch should be removed', done => {
+ vm.mr.canRemoveSourceBranch = false;
+ vm.mr.shouldRemoveSourceBranch = true;
+ vm.mr.state = 'readyToMerge';
+
+ vm.$nextTick(() => {
+ const tooltip = vm.$el.querySelector('.fa-question-circle');
+
+ expect(vm.$el.textContent).toContain('Deletes source branch');
+ expect(tooltip.getAttribute('data-original-title')).toBe(
+ 'A user with write access to the source branch selected this option',
+ );
+
+ done();
+ });
+ });
+
+ it('does not render in merged state', done => {
+ vm.mr.canRemoveSourceBranch = false;
+ vm.mr.shouldRemoveSourceBranch = true;
+ vm.mr.state = 'merged';
+
+ vm.$nextTick(() => {
+ expect(vm.$el.textContent).toContain('The source branch has been deleted');
+ expect(vm.$el.textContent).not.toContain('Deletes source branch');
+
+ done();
+ });
+ });
+ });
+
+ describe('rendering deployments', () => {
+ const changes = [
+ {
+ path: 'index.html',
+ external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/index.html',
+ },
+ {
+ path: 'imgs/gallery.html',
+ external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
+ },
+ {
+ path: 'about/',
+ external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/about/',
+ },
+ ];
+ const deploymentMockData = {
+ id: 15,
+ name: 'review/diplo',
+ url: '/root/acets-review-apps/environments/15',
+ stop_url: '/root/acets-review-apps/environments/15/stop',
+ metrics_url: '/root/acets-review-apps/environments/15/deployments/1/metrics',
+ metrics_monitoring_url: '/root/acets-review-apps/environments/15/metrics',
+ external_url: 'http://diplo.',
+ external_url_formatted: 'diplo.',
+ deployed_at: '2017-03-22T22:44:42.258Z',
+ deployed_at_formatted: 'Mar 22, 2017 10:44pm',
+ changes,
+ status: SUCCESS,
+ };
+
+ beforeEach(done => {
+ vm.mr.deployments.push(
+ {
+ ...deploymentMockData,
+ },
+ {
+ ...deploymentMockData,
+ id: deploymentMockData.id + 1,
+ },
+ );
+
+ vm.$nextTick(done);
+ });
+
+ it('renders multiple deployments', () => {
+ expect(vm.$el.querySelectorAll('.deploy-heading').length).toBe(2);
+ });
+
+ it('renders dropdpown with multiple file changes', () => {
+ expect(
+ vm.$el
+ .querySelector('.js-mr-wigdet-deployment-dropdown')
+ .querySelectorAll('.js-filtered-dropdown-result').length,
+ ).toEqual(changes.length);
+ });
+ });
+
+ describe('pipeline for target branch after merge', () => {
+ describe('with information for target branch pipeline', () => {
+ beforeEach(done => {
+ vm.mr.state = 'merged';
+ vm.mr.mergePipeline = {
+ id: 127,
+ user: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ web_url: 'http://localhost:3000/root',
+ status_tooltip_html: null,
+ path: '/root',
+ },
+ active: true,
+ coverage: null,
+ source: 'push',
+ created_at: '2018-10-22T11:41:35.186Z',
+ updated_at: '2018-10-22T11:41:35.433Z',
+ path: '/root/ci-web-terminal/pipelines/127',
+ flags: {
+ latest: true,
+ stuck: true,
+ auto_devops: false,
+ yaml_errors: false,
+ retryable: false,
+ cancelable: true,
+ failure_reason: false,
+ },
+ details: {
+ status: {
+ icon: 'status_pending',
+ text: 'pending',
+ label: 'pending',
+ group: 'pending',
+ tooltip: 'pending',
+ has_details: true,
+ details_path: '/root/ci-web-terminal/pipelines/127',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
+ },
+ duration: null,
+ finished_at: null,
+ stages: [
+ {
+ name: 'test',
+ title: 'test: pending',
+ status: {
+ icon: 'status_pending',
+ text: 'pending',
+ label: 'pending',
+ group: 'pending',
+ tooltip: 'pending',
+ has_details: true,
+ details_path: '/root/ci-web-terminal/pipelines/127#test',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
+ },
+ path: '/root/ci-web-terminal/pipelines/127#test',
+ dropdown_path: '/root/ci-web-terminal/pipelines/127/stage.json?stage=test',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [],
+ scheduled_actions: [],
+ },
+ ref: {
+ name: 'master',
+ path: '/root/ci-web-terminal/commits/master',
+ tag: false,
+ branch: true,
+ },
+ commit: {
+ id: 'aa1939133d373c94879becb79d91828a892ee319',
+ short_id: 'aa193913',
+ title: "Merge branch 'master-test' into 'master'",
+ created_at: '2018-10-22T11:41:33.000Z',
+ parent_ids: [
+ '4622f4dd792468993003caf2e3be978798cbe096',
+ '76598df914cdfe87132d0c3c40f80db9fa9396a4',
+ ],
+ message:
+ "Merge branch 'master-test' into 'master'\n\nUpdate .gitlab-ci.yml\n\nSee merge request root/ci-web-terminal!1",
+ author_name: 'Administrator',
+ author_email: 'admin@example.com',
+ authored_date: '2018-10-22T11:41:33.000Z',
+ committer_name: 'Administrator',
+ committer_email: 'admin@example.com',
+ committed_date: '2018-10-22T11:41:33.000Z',
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ web_url: 'http://localhost:3000/root',
+ status_tooltip_html: null,
+ path: '/root',
+ },
+ author_gravatar_url: null,
+ commit_url:
+ 'http://localhost:3000/root/ci-web-terminal/commit/aa1939133d373c94879becb79d91828a892ee319',
+ commit_path: '/root/ci-web-terminal/commit/aa1939133d373c94879becb79d91828a892ee319',
+ },
+ cancel_path: '/root/ci-web-terminal/pipelines/127/cancel',
+ };
+ vm.$nextTick(done);
+ });
+
+ it('renders pipeline block', () => {
+ expect(vm.$el.querySelector('.js-post-merge-pipeline')).not.toBeNull();
+ });
+
+ describe('with post merge deployments', () => {
+ beforeEach(done => {
+ vm.mr.postMergeDeployments = [
+ {
+ id: 15,
+ name: 'review/diplo',
+ url: '/root/acets-review-apps/environments/15',
+ stop_url: '/root/acets-review-apps/environments/15/stop',
+ metrics_url: '/root/acets-review-apps/environments/15/deployments/1/metrics',
+ metrics_monitoring_url: '/root/acets-review-apps/environments/15/metrics',
+ external_url: 'http://diplo.',
+ external_url_formatted: 'diplo.',
+ deployed_at: '2017-03-22T22:44:42.258Z',
+ deployed_at_formatted: 'Mar 22, 2017 10:44pm',
+ changes: [
+ {
+ path: 'index.html',
+ external_url:
+ 'http://root-master-patch-91341.volatile-watch.surge.sh/index.html',
+ },
+ {
+ path: 'imgs/gallery.html',
+ external_url:
+ 'http://root-master-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
+ },
+ {
+ path: 'about/',
+ external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/about/',
+ },
+ ],
+ status: 'success',
+ },
+ ];
+
+ vm.$nextTick(done);
+ });
+
+ it('renders post deployment information', () => {
+ expect(vm.$el.querySelector('.js-post-deployment')).not.toBeNull();
+ });
+ });
+ });
+
+ describe('without information for target branch pipeline', () => {
+ beforeEach(done => {
+ vm.mr.state = 'merged';
+
+ vm.$nextTick(done);
+ });
+
+ it('does not render pipeline block', () => {
+ expect(vm.$el.querySelector('.js-post-merge-pipeline')).toBeNull();
+ });
+ });
+
+ describe('when state is not merged', () => {
+ beforeEach(done => {
+ vm.mr.state = 'archived';
+
+ vm.$nextTick(done);
+ });
+
+ it('does not render pipeline block', () => {
+ expect(vm.$el.querySelector('.js-post-merge-pipeline')).toBeNull();
+ });
+
+ it('does not render post deployment information', () => {
+ expect(vm.$el.querySelector('.js-post-deployment')).toBeNull();
+ });
+ });
+ });
+
+ it('should not suggest pipelines', () => {
+ vm.mr.mergeRequestAddCiConfigPath = null;
+
+ expect(vm.shouldSuggestPipelines).toBeFalsy();
+ });
+ });
+
+ describe('given suggestPipeline feature flag is enabled', () => {
+ beforeEach(() => {
+ // This is needed because some grandchildren Bootstrap components throw warnings
+ // https://gitlab.com/gitlab-org/gitlab/issues/208458
+ jest.spyOn(console, 'warn').mockImplementation();
+
+ gon.features = { suggestPipeline: true };
+ return createComponent();
+ });
+
+ it('should suggest pipelines when none exist', () => {
+ vm.mr.mergeRequestAddCiConfigPath = 'some/path';
+ vm.mr.hasCI = false;
+
+ expect(vm.shouldSuggestPipelines).toBeTruthy();
+ });
+
+ it('should not suggest pipelines when they exist', () => {
+ vm.mr.mergeRequestAddCiConfigPath = null;
+ vm.mr.hasCI = false;
+
+ expect(vm.shouldSuggestPipelines).toBeFalsy();
+ });
+
+ it('should not suggest pipelines hasCI is true', () => {
+ vm.mr.mergeRequestAddCiConfigPath = 'some/path';
+ vm.mr.hasCI = true;
+
+ expect(vm.shouldSuggestPipelines).toBeFalsy();
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
index 2abcc53bf14..1f54405928b 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
@@ -8,6 +8,8 @@ exports[`Expand button on click when short text is provided renders button after
style="display: none;"
type="button"
>
+ <!---->
+
<svg
aria-hidden="true"
class="s12 ic-ellipsis_h"
@@ -32,6 +34,8 @@ exports[`Expand button on click when short text is provided renders button after
style=""
type="button"
>
+ <!---->
+
<svg
aria-hidden="true"
class="s12 ic-ellipsis_h"
@@ -51,6 +55,8 @@ exports[`Expand button when short text is provided renders button before text 1`
class="btn js-text-expander-prepend text-expander btn-blank btn-secondary btn-md"
type="button"
>
+ <!---->
+
<svg
aria-hidden="true"
class="s12 ic-ellipsis_h"
@@ -75,6 +81,8 @@ exports[`Expand button when short text is provided renders button before text 1`
style="display: none;"
type="button"
>
+ <!---->
+
<svg
aria-hidden="true"
class="s12 ic-ellipsis_h"
diff --git a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
index 17ea78b5826..ce3f289eb6e 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
@@ -1,14 +1,19 @@
import { shallowMount } from '@vue/test-utils';
import RichViewer from '~/vue_shared/components/blob_viewers/rich_viewer.vue';
+import { handleBlobRichViewer } from '~/blob/viewer';
+
+jest.mock('~/blob/viewer');
describe('Blob Rich Viewer component', () => {
let wrapper;
const content = '<h1 id="markdown">Foo Bar</h1>';
+ const defaultType = 'markdown';
- function createComponent() {
+ function createComponent(type = defaultType) {
wrapper = shallowMount(RichViewer, {
propsData: {
content,
+ type,
},
});
}
@@ -24,4 +29,8 @@ describe('Blob Rich Viewer component', () => {
it('renders the passed content without transformations', () => {
expect(wrapper.html()).toContain(content);
});
+
+ it('queries for advanced viewer', () => {
+ expect(handleBlobRichViewer).toHaveBeenCalledWith(expect.anything(), defaultType);
+ });
});
diff --git a/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
index d12bfc5c686..79195aa1350 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
@@ -10,6 +10,7 @@ describe('Blob Simple Viewer component', () => {
wrapper = shallowMount(SimpleViewer, {
propsData: {
content,
+ type: 'text',
},
});
}
diff --git a/spec/frontend/vue_shared/components/changed_file_icon_spec.js b/spec/frontend/vue_shared/components/changed_file_icon_spec.js
index 8258eb8204c..03519a6f803 100644
--- a/spec/frontend/vue_shared/components/changed_file_icon_spec.js
+++ b/spec/frontend/vue_shared/components/changed_file_icon_spec.js
@@ -5,6 +5,7 @@ import Icon from '~/vue_shared/components/icon.vue';
const changedFile = () => ({ changed: true });
const stagedFile = () => ({ changed: true, staged: true });
const newFile = () => ({ changed: true, tempFile: true });
+const deletedFile = () => ({ changed: false, tempFile: false, staged: false, deleted: true });
const unchangedFile = () => ({ changed: false, tempFile: false, staged: false, deleted: false });
describe('Changed file icon', () => {
@@ -54,10 +55,11 @@ describe('Changed file icon', () => {
});
describe.each`
- file | iconName | tooltipText | desc
- ${changedFile()} | ${'file-modified'} | ${'Unstaged modification'} | ${'with file changed'}
- ${stagedFile()} | ${'file-modified-solid'} | ${'Staged modification'} | ${'with file staged'}
- ${newFile()} | ${'file-addition'} | ${'Unstaged addition'} | ${'with file new'}
+ file | iconName | tooltipText | desc
+ ${changedFile()} | ${'file-modified'} | ${'Modified'} | ${'with file changed'}
+ ${stagedFile()} | ${'file-modified-solid'} | ${'Modified'} | ${'with file staged'}
+ ${newFile()} | ${'file-addition'} | ${'Added'} | ${'with file new'}
+ ${deletedFile()} | ${'file-deletion'} | ${'Deleted'} | ${'with file deleted'}
`('$desc', ({ file, iconName, tooltipText }) => {
beforeEach(() => {
factory({ file });
diff --git a/spec/frontend/vue_shared/components/confirm_modal_spec.js b/spec/frontend/vue_shared/components/confirm_modal_spec.js
new file mode 100644
index 00000000000..7bccd6f1a64
--- /dev/null
+++ b/spec/frontend/vue_shared/components/confirm_modal_spec.js
@@ -0,0 +1,120 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
+import { TEST_HOST } from 'helpers/test_constants';
+import ConfirmModal from '~/vue_shared/components/confirm_modal.vue';
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'test-csrf-token' }));
+
+describe('vue_shared/components/confirm_modal', () => {
+ const MOCK_MODAL_DATA = {
+ path: `${TEST_HOST}/1`,
+ method: 'delete',
+ modalAttributes: {
+ title: 'Are you sure?',
+ message: 'This will remove item 1',
+ okVariant: 'danger',
+ okTitle: 'Remove item',
+ },
+ };
+
+ const defaultProps = {
+ selector: '.test-button',
+ };
+
+ const actionSpies = {
+ openModal: jest.fn(),
+ closeModal: jest.fn(),
+ };
+
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ConfirmModal, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ methods: {
+ ...actionSpies,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findModal = () => wrapper.find(GlModal);
+ const findForm = () => wrapper.find('form');
+ const findFormData = () =>
+ findForm()
+ .findAll('input')
+ .wrappers.map(x => ({ name: x.attributes('name'), value: x.attributes('value') }));
+
+ describe('template', () => {
+ describe('when modal data is set', () => {
+ beforeEach(() => {
+ createComponent();
+ wrapper.vm.modalAttributes = MOCK_MODAL_DATA.modalAttributes;
+ });
+
+ it('renders GlModal wtih data', () => {
+ expect(findModal().exists()).toBeTruthy();
+ expect(findModal().attributes()).toEqual(
+ expect.objectContaining({
+ oktitle: MOCK_MODAL_DATA.modalAttributes.okTitle,
+ okvariant: MOCK_MODAL_DATA.modalAttributes.okVariant,
+ }),
+ );
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('submitModal', () => {
+ beforeEach(() => {
+ createComponent();
+ wrapper.vm.path = MOCK_MODAL_DATA.path;
+ wrapper.vm.method = MOCK_MODAL_DATA.method;
+ });
+
+ it('does not submit form', () => {
+ expect(findForm().element.submit).not.toHaveBeenCalled();
+ });
+
+ describe('when modal submitted', () => {
+ beforeEach(() => {
+ findModal().vm.$emit('primary');
+ });
+
+ it('submits form', () => {
+ expect(findFormData()).toEqual([
+ { name: '_method', value: MOCK_MODAL_DATA.method },
+ { name: 'authenticity_token', value: 'test-csrf-token' },
+ ]);
+ expect(findForm().element.submit).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('closeModal', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('does not close modal', () => {
+ expect(actionSpies.closeModal).not.toHaveBeenCalled();
+ });
+
+ describe('when modal closed', () => {
+ beforeEach(() => {
+ findModal().vm.$emit('cancel');
+ });
+
+ it('closes modal', () => {
+ expect(actionSpies.closeModal).toHaveBeenCalled();
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js b/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
new file mode 100644
index 00000000000..f364f374887
--- /dev/null
+++ b/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
@@ -0,0 +1,194 @@
+import Vue from 'vue';
+import { compileToFunctions } from 'vue-template-compiler';
+import { mount } from '@vue/test-utils';
+
+import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants';
+import imageDiffViewer from '~/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue';
+
+describe('ImageDiffViewer', () => {
+ const requiredProps = {
+ diffMode: 'replaced',
+ newPath: GREEN_BOX_IMAGE_URL,
+ oldPath: RED_BOX_IMAGE_URL,
+ };
+ const allProps = {
+ ...requiredProps,
+ oldSize: 2048,
+ newSize: 1024,
+ };
+ let wrapper;
+ let vm;
+
+ function createComponent(props) {
+ const ImageDiffViewer = Vue.extend(imageDiffViewer);
+ wrapper = mount(ImageDiffViewer, { propsData: props });
+ vm = wrapper.vm;
+ }
+
+ const triggerEvent = (eventName, el = vm.$el, clientX = 0) => {
+ const event = new MouseEvent(eventName, {
+ bubbles: true,
+ cancelable: true,
+ view: window,
+ detail: 1,
+ screenX: clientX,
+ clientX,
+ });
+
+ // JSDOM does not implement experimental APIs
+ event.pageX = clientX;
+
+ el.dispatchEvent(event);
+ };
+
+ const dragSlider = (sliderElement, doc, dragPixel) => {
+ triggerEvent('mousedown', sliderElement);
+ triggerEvent('mousemove', doc.body, dragPixel);
+ triggerEvent('mouseup', doc.body);
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders image diff for replaced', done => {
+ createComponent({ ...allProps });
+
+ vm.$nextTick(() => {
+ const metaInfoElements = vm.$el.querySelectorAll('.image-info');
+
+ expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
+
+ expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
+
+ expect(vm.$el.querySelector('.view-modes-menu li.active').textContent.trim()).toBe('2-up');
+ expect(vm.$el.querySelector('.view-modes-menu li:nth-child(2)').textContent.trim()).toBe(
+ 'Swipe',
+ );
+
+ expect(vm.$el.querySelector('.view-modes-menu li:nth-child(3)').textContent.trim()).toBe(
+ 'Onion skin',
+ );
+
+ expect(metaInfoElements.length).toBe(2);
+ expect(metaInfoElements[0]).toHaveText('2.00 KiB');
+ expect(metaInfoElements[1]).toHaveText('1.00 KiB');
+
+ done();
+ });
+ });
+
+ it('renders image diff for new', done => {
+ createComponent({ ...allProps, diffMode: 'new', oldPath: '' });
+
+ setImmediate(() => {
+ const metaInfoElement = vm.$el.querySelector('.image-info');
+
+ expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
+ expect(metaInfoElement).toHaveText('1.00 KiB');
+
+ done();
+ });
+ });
+
+ it('renders image diff for deleted', done => {
+ createComponent({ ...allProps, diffMode: 'deleted', newPath: '' });
+
+ setImmediate(() => {
+ const metaInfoElement = vm.$el.querySelector('.image-info');
+
+ expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
+ expect(metaInfoElement).toHaveText('2.00 KiB');
+
+ done();
+ });
+ });
+
+ it('renders image diff for renamed', done => {
+ vm = new Vue({
+ components: {
+ imageDiffViewer,
+ },
+ data: {
+ ...allProps,
+ diffMode: 'renamed',
+ },
+ ...compileToFunctions(`
+ <image-diff-viewer
+ :diff-mode="diffMode"
+ :new-path="newPath"
+ :old-path="oldPath"
+ :new-size="newSize"
+ :old-size="oldSize"
+ >
+ <span slot="image-overlay" class="overlay">test</span>
+ </image-diff-viewer>
+ `),
+ }).$mount();
+
+ setImmediate(() => {
+ const metaInfoElement = vm.$el.querySelector('.image-info');
+
+ expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
+ expect(vm.$el.querySelector('.overlay')).not.toBe(null);
+
+ expect(metaInfoElement).toHaveText('2.00 KiB');
+
+ done();
+ });
+ });
+
+ describe('swipeMode', () => {
+ beforeEach(done => {
+ createComponent({ ...requiredProps });
+
+ setImmediate(() => {
+ done();
+ });
+ });
+
+ it('switches to Swipe Mode', done => {
+ vm.$el.querySelector('.view-modes-menu li:nth-child(2)').click();
+
+ vm.$nextTick(() => {
+ expect(vm.$el.querySelector('.view-modes-menu li.active').textContent.trim()).toBe('Swipe');
+ done();
+ });
+ });
+ });
+
+ describe('onionSkin', () => {
+ beforeEach(done => {
+ createComponent({ ...requiredProps });
+
+ setImmediate(() => {
+ done();
+ });
+ });
+
+ it('switches to Onion Skin Mode', done => {
+ vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
+
+ vm.$nextTick(() => {
+ expect(vm.$el.querySelector('.view-modes-menu li.active').textContent.trim()).toBe(
+ 'Onion skin',
+ );
+ done();
+ });
+ });
+
+ it('has working drag handler', done => {
+ vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
+
+ vm.$nextTick(() => {
+ dragSlider(vm.$el.querySelector('.dragger'), document, 20);
+
+ vm.$nextTick(() => {
+ expect(vm.$el.querySelector('.dragger').style.left).toBe('20px');
+ expect(vm.$el.querySelector('.added.frame').style.opacity).toBe('0.2');
+ done();
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/gl_mentions_spec.js b/spec/frontend/vue_shared/components/gl_mentions_spec.js
new file mode 100644
index 00000000000..32fc055a77d
--- /dev/null
+++ b/spec/frontend/vue_shared/components/gl_mentions_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+import Tribute from 'tributejs';
+import GlMentions from '~/vue_shared/components/gl_mentions.vue';
+
+describe('GlMentions', () => {
+ let wrapper;
+
+ describe('Tribute', () => {
+ const mentions = '/gitlab-org/gitlab-test/-/autocomplete_sources/members?type=Issue&type_id=1';
+
+ beforeEach(() => {
+ wrapper = shallowMount(GlMentions, {
+ propsData: {
+ dataSources: {
+ mentions,
+ },
+ },
+ slots: {
+ default: ['<input/>'],
+ },
+ });
+ });
+
+ it('is set to tribute instance variable', () => {
+ expect(wrapper.vm.tribute instanceof Tribute).toBe(true);
+ });
+
+ it('contains the slot input element', () => {
+ wrapper.find('input').setValue('@');
+
+ expect(wrapper.vm.tribute.current.element).toBe(wrapper.find('input').element);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js b/spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js
new file mode 100644
index 00000000000..5f69d761fdf
--- /dev/null
+++ b/spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js
@@ -0,0 +1,121 @@
+export const defaultProps = {
+ endpoint: '/foo/bar/issues/1/related_issues',
+ currentNamespacePath: 'foo',
+ currentProjectPath: 'bar',
+};
+
+export const issuable1 = {
+ id: 200,
+ epicIssueId: 1,
+ confidential: false,
+ reference: 'foo/bar#123',
+ displayReference: '#123',
+ title: 'some title',
+ path: '/foo/bar/issues/123',
+ relationPath: '/foo/bar/issues/123/relation',
+ state: 'opened',
+ linkType: 'relates_to',
+ dueDate: '2010-11-22',
+ weight: 5,
+};
+
+export const issuable2 = {
+ id: 201,
+ epicIssueId: 2,
+ confidential: false,
+ reference: 'foo/bar#124',
+ displayReference: '#124',
+ title: 'some other thing',
+ path: '/foo/bar/issues/124',
+ relationPath: '/foo/bar/issues/124/relation',
+ state: 'opened',
+ linkType: 'blocks',
+};
+
+export const issuable3 = {
+ id: 202,
+ epicIssueId: 3,
+ confidential: false,
+ reference: 'foo/bar#125',
+ displayReference: '#125',
+ title: 'some other other thing',
+ path: '/foo/bar/issues/125',
+ relationPath: '/foo/bar/issues/125/relation',
+ state: 'opened',
+ linkType: 'is_blocked_by',
+};
+
+export const issuable4 = {
+ id: 203,
+ epicIssueId: 4,
+ confidential: false,
+ reference: 'foo/bar#126',
+ displayReference: '#126',
+ title: 'some other other other thing',
+ path: '/foo/bar/issues/126',
+ relationPath: '/foo/bar/issues/126/relation',
+ state: 'opened',
+};
+
+export const issuable5 = {
+ id: 204,
+ epicIssueId: 5,
+ confidential: false,
+ reference: 'foo/bar#127',
+ displayReference: '#127',
+ title: 'some other other other thing',
+ path: '/foo/bar/issues/127',
+ relationPath: '/foo/bar/issues/127/relation',
+ state: 'opened',
+};
+
+export const defaultMilestone = {
+ id: 1,
+ state: 'active',
+ title: 'Milestone title',
+ start_date: '2018-01-01',
+ due_date: '2019-12-31',
+};
+
+export const defaultAssignees = [
+ {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url: `${gl.TEST_HOST}`,
+ web_url: `${gl.TEST_HOST}/root`,
+ status_tooltip_html: null,
+ path: '/root',
+ },
+ {
+ id: 13,
+ name: 'Brooks Beatty',
+ username: 'brynn_champlin',
+ state: 'active',
+ avatar_url: `${gl.TEST_HOST}`,
+ web_url: `${gl.TEST_HOST}/brynn_champlin`,
+ status_tooltip_html: null,
+ path: '/brynn_champlin',
+ },
+ {
+ id: 6,
+ name: 'Bryce Turcotte',
+ username: 'melynda',
+ state: 'active',
+ avatar_url: `${gl.TEST_HOST}`,
+ web_url: `${gl.TEST_HOST}/melynda`,
+ status_tooltip_html: null,
+ path: '/melynda',
+ },
+ {
+ id: 20,
+ name: 'Conchita Eichmann',
+ username: 'juliana_gulgowski',
+ state: 'active',
+ avatar_url: `${gl.TEST_HOST}`,
+ web_url: `${gl.TEST_HOST}/juliana_gulgowski`,
+ status_tooltip_html: null,
+ path: '/juliana_gulgowski',
+ },
+];
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_title_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_title_spec.js
index 2fffb31acf5..5cbbb99eaef 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_title_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_title_spec.js
@@ -1,39 +1,38 @@
-import Vue from 'vue';
-
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
import dropdownTitleComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_title.vue';
-const createComponent = (canEdit = true) => {
- const Component = Vue.extend(dropdownTitleComponent);
-
- return mountComponent(Component, {
- canEdit,
+const createComponent = (canEdit = true) =>
+ shallowMount(dropdownTitleComponent, {
+ propsData: {
+ canEdit,
+ },
});
-};
describe('DropdownTitleComponent', () => {
- let vm;
+ let wrapper;
beforeEach(() => {
- vm = createComponent();
+ wrapper = createComponent();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
describe('template', () => {
it('renders title text', () => {
- expect(vm.$el.classList.contains('title', 'hide-collapsed')).toBe(true);
- expect(vm.$el.innerText.trim()).toContain('Labels');
+ expect(wrapper.vm.$el.classList.contains('title', 'hide-collapsed')).toBe(true);
+ expect(wrapper.vm.$el.innerText.trim()).toContain('Labels');
});
it('renders spinner icon element', () => {
- expect(vm.$el.querySelector('.fa-spinner.fa-spin.block-loading')).not.toBeNull();
+ expect(wrapper.find(GlLoadingIcon)).not.toBeNull();
});
it('renders `Edit` button element', () => {
- const editBtnEl = vm.$el.querySelector('button.edit-link.js-sidebar-dropdown-toggle');
+ const editBtnEl = wrapper.vm.$el.querySelector('button.edit-link.js-sidebar-dropdown-toggle');
expect(editBtnEl).not.toBeNull();
expect(editBtnEl.innerText.trim()).toBe('Edit');
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
index 54ad96073c8..06355c0dd65 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
@@ -1,31 +1,26 @@
import { mount } from '@vue/test-utils';
-import { hexToRgb } from '~/lib/utils/color_utils';
import DropdownValueComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_value.vue';
-import DropdownValueScopedLabel from '~/vue_shared/components/sidebar/labels_select/dropdown_value_scoped_label.vue';
+import { GlLabel } from '@gitlab/ui';
import {
mockConfig,
mockLabels,
} from '../../../../../javascripts/vue_shared/components/sidebar/labels_select/mock_data';
-const labelStyles = {
- textColor: '#FFFFFF',
- color: '#BADA55',
-};
const createComponent = (
labels = mockLabels,
labelFilterBasePath = mockConfig.labelFilterBasePath,
-) => {
- labels.forEach(label => Object.assign(label, labelStyles));
-
- return mount(DropdownValueComponent, {
+) =>
+ mount(DropdownValueComponent, {
propsData: {
labels,
labelFilterBasePath,
enableScopedLabels: true,
},
+ stubs: {
+ GlLabel: true,
+ },
});
-};
describe('DropdownValueComponent', () => {
let vm;
@@ -56,24 +51,17 @@ describe('DropdownValueComponent', () => {
describe('methods', () => {
describe('labelFilterUrl', () => {
it('returns URL string starting with labelFilterBasePath and encoded label.title', () => {
- expect(vm.find(DropdownValueScopedLabel).props('labelFilterUrl')).toBe(
- '/gitlab-org/my-project/issues?label_name[]=Foo%3A%3ABar',
+ expect(vm.find(GlLabel).props('target')).toBe(
+ '/gitlab-org/my-project/issues?label_name[]=Foo%20Label',
);
});
});
- describe('labelStyle', () => {
- it('returns object with `color` & `backgroundColor` properties from label.textColor & label.color', () => {
- expect(vm.find(DropdownValueScopedLabel).props('labelStyle')).toEqual({
- color: labelStyles.textColor,
- backgroundColor: labelStyles.color,
- });
- });
- });
-
describe('showScopedLabels', () => {
it('returns true if the label is scoped label', () => {
- expect(vm.findAll(DropdownValueScopedLabel).length).toEqual(1);
+ const labels = vm.findAll(GlLabel);
+ expect(labels.length).toEqual(2);
+ expect(labels.at(1).props('scoped')).toBe(true);
});
});
});
@@ -95,33 +83,10 @@ describe('DropdownValueComponent', () => {
vmEmptyLabels.destroy();
});
- it('renders label element with filter URL', () => {
- expect(vm.find('a').attributes('href')).toBe(
- '/gitlab-org/my-project/issues?label_name[]=Foo%20Label',
- );
- });
-
- it('renders label element and styles based on label details', () => {
- const labelEl = vm.find('a span.badge.color-label');
+ it('renders DropdownValueComponent element', () => {
+ const labelEl = vm.find(GlLabel);
expect(labelEl.exists()).toBe(true);
- expect(labelEl.attributes('style')).toContain(
- `background-color: rgb(${hexToRgb(labelStyles.color).join(', ')});`,
- );
- expect(labelEl.text().trim()).toBe(mockLabels[0].title);
- });
-
- describe('label is of scoped-label type', () => {
- it('renders a scoped-label-wrapper span to incorporate 2 anchors', () => {
- expect(vm.find('span.scoped-label-wrapper').exists()).toBe(true);
- });
-
- it('renders anchor tag containing question icon', () => {
- const anchor = vm.find('.scoped-label-wrapper a.scoped-label');
-
- expect(anchor.exists()).toBe(true);
- expect(anchor.find('i.fa-question-circle').exists()).toBe(true);
- });
});
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js
new file mode 100644
index 00000000000..d996f48f9cc
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js
@@ -0,0 +1,55 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+
+import { GlIcon } from '@gitlab/ui';
+import DropdownButton from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_button.vue';
+
+import labelSelectModule from '~/vue_shared/components/sidebar/labels_select_vue/store';
+
+import { mockConfig } from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const createComponent = (initialState = mockConfig) => {
+ const store = new Vuex.Store(labelSelectModule());
+
+ store.dispatch('setInitialState', initialState);
+
+ return shallowMount(DropdownButton, {
+ localVue,
+ store,
+ });
+};
+
+describe('DropdownButton', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ it('renders component container element', () => {
+ expect(wrapper.is('gl-button-stub')).toBe(true);
+ });
+
+ it('renders button text element', () => {
+ const dropdownTextEl = wrapper.find('.dropdown-toggle-text');
+
+ expect(dropdownTextEl.exists()).toBe(true);
+ expect(dropdownTextEl.text()).toBe('Label');
+ });
+
+ it('renders chevron icon element', () => {
+ const iconEl = wrapper.find(GlIcon);
+
+ expect(iconEl.exists()).toBe(true);
+ expect(iconEl.props('name')).toBe('chevron-down');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js
new file mode 100644
index 00000000000..9bc01d8723f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js
@@ -0,0 +1,223 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+
+import { GlButton, GlIcon, GlFormInput, GlLink, GlLoadingIcon } from '@gitlab/ui';
+import DropdownContentsCreateView from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view.vue';
+
+import labelSelectModule from '~/vue_shared/components/sidebar/labels_select_vue/store';
+
+import { mockConfig, mockSuggestedColors } from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const createComponent = (initialState = mockConfig) => {
+ const store = new Vuex.Store(labelSelectModule());
+
+ store.dispatch('setInitialState', initialState);
+
+ return shallowMount(DropdownContentsCreateView, {
+ localVue,
+ store,
+ });
+};
+
+describe('DropdownContentsCreateView', () => {
+ let wrapper;
+ const colors = Object.keys(mockSuggestedColors).map(color => ({
+ [color]: mockSuggestedColors[color],
+ }));
+
+ beforeEach(() => {
+ gon.suggested_label_colors = mockSuggestedColors;
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('computed', () => {
+ describe('disableCreate', () => {
+ it('returns `true` when label title and color is not defined', () => {
+ expect(wrapper.vm.disableCreate).toBe(true);
+ });
+
+ it('returns `true` when `labelCreateInProgress` is true', () => {
+ wrapper.setData({
+ labelTitle: 'Foo',
+ selectedColor: '#ff0000',
+ });
+ wrapper.vm.$store.dispatch('requestCreateLabel');
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.disableCreate).toBe(true);
+ });
+ });
+
+ it('returns `false` when label title and color is defined and create request is not already in progress', () => {
+ wrapper.setData({
+ labelTitle: 'Foo',
+ selectedColor: '#ff0000',
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.disableCreate).toBe(false);
+ });
+ });
+ });
+
+ describe('suggestedColors', () => {
+ it('returns array of color objects containing color code and name', () => {
+ colors.forEach((color, index) => {
+ expect(wrapper.vm.suggestedColors[index]).toEqual(expect.objectContaining(color));
+ });
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('getColorCode', () => {
+ it('returns color code from color object', () => {
+ expect(wrapper.vm.getColorCode(colors[0])).toBe(Object.keys(colors[0]).pop());
+ });
+ });
+
+ describe('getColorName', () => {
+ it('returns color name from color object', () => {
+ expect(wrapper.vm.getColorName(colors[0])).toBe(Object.values(colors[0]).pop());
+ });
+ });
+
+ describe('handleColorClick', () => {
+ it('sets provided `color` param to `selectedColor` prop', () => {
+ wrapper.vm.handleColorClick(colors[0]);
+
+ expect(wrapper.vm.selectedColor).toBe(Object.keys(colors[0]).pop());
+ });
+ });
+
+ describe('handleCreateClick', () => {
+ it('calls action `createLabel` with object containing `labelTitle` & `selectedColor`', () => {
+ jest.spyOn(wrapper.vm, 'createLabel').mockImplementation();
+ wrapper.setData({
+ labelTitle: 'Foo',
+ selectedColor: '#ff0000',
+ });
+
+ wrapper.vm.handleCreateClick();
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.createLabel).toHaveBeenCalledWith(
+ expect.objectContaining({
+ title: 'Foo',
+ color: '#ff0000',
+ }),
+ );
+ });
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('renders component container element with class "labels-select-contents-create"', () => {
+ expect(wrapper.attributes('class')).toContain('labels-select-contents-create');
+ });
+
+ it('renders dropdown back button element', () => {
+ const backBtnEl = wrapper
+ .find('.dropdown-title')
+ .findAll(GlButton)
+ .at(0);
+
+ expect(backBtnEl.exists()).toBe(true);
+ expect(backBtnEl.attributes('aria-label')).toBe('Go back');
+ expect(backBtnEl.find(GlIcon).props('name')).toBe('arrow-left');
+ });
+
+ it('renders dropdown title element', () => {
+ const headerEl = wrapper.find('.dropdown-title > span');
+
+ expect(headerEl.exists()).toBe(true);
+ expect(headerEl.text()).toBe('Create label');
+ });
+
+ it('renders dropdown close button element', () => {
+ const closeBtnEl = wrapper
+ .find('.dropdown-title')
+ .findAll(GlButton)
+ .at(1);
+
+ expect(closeBtnEl.exists()).toBe(true);
+ expect(closeBtnEl.attributes('aria-label')).toBe('Close');
+ expect(closeBtnEl.find(GlIcon).props('name')).toBe('close');
+ });
+
+ it('renders label title input element', () => {
+ const titleInputEl = wrapper.find('.dropdown-input').find(GlFormInput);
+
+ expect(titleInputEl.exists()).toBe(true);
+ expect(titleInputEl.attributes('placeholder')).toBe('Name new label');
+ expect(titleInputEl.attributes('autofocus')).toBe('true');
+ });
+
+ it('renders color block element for all suggested colors', () => {
+ const colorBlocksEl = wrapper.find('.dropdown-content').findAll(GlLink);
+
+ colorBlocksEl.wrappers.forEach((colorBlock, index) => {
+ expect(colorBlock.attributes('style')).toContain('background-color');
+ expect(colorBlock.attributes('title')).toBe(Object.values(colors[index]).pop());
+ });
+ });
+
+ it('renders color input element', () => {
+ wrapper.setData({
+ selectedColor: '#ff0000',
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ const colorPreviewEl = wrapper.find(
+ '.color-input-container > .dropdown-label-color-preview',
+ );
+ const colorInputEl = wrapper.find('.color-input-container').find(GlFormInput);
+
+ expect(colorPreviewEl.exists()).toBe(true);
+ expect(colorPreviewEl.attributes('style')).toContain('background-color');
+ expect(colorInputEl.exists()).toBe(true);
+ expect(colorInputEl.attributes('placeholder')).toBe('Use custom color #FF0000');
+ expect(colorInputEl.attributes('value')).toBe('#ff0000');
+ });
+ });
+
+ it('renders create button element', () => {
+ const createBtnEl = wrapper
+ .find('.dropdown-actions')
+ .findAll(GlButton)
+ .at(0);
+
+ expect(createBtnEl.exists()).toBe(true);
+ expect(createBtnEl.text()).toContain('Create');
+ });
+
+ it('shows gl-loading-icon within create button element when `labelCreateInProgress` is `true`', () => {
+ wrapper.vm.$store.dispatch('requestCreateLabel');
+
+ return wrapper.vm.$nextTick(() => {
+ const loadingIconEl = wrapper.find('.dropdown-actions').find(GlLoadingIcon);
+
+ expect(loadingIconEl.exists()).toBe(true);
+ expect(loadingIconEl.isVisible()).toBe(true);
+ });
+ });
+
+ it('renders cancel button element', () => {
+ const cancelBtnEl = wrapper
+ .find('.dropdown-actions')
+ .findAll(GlButton)
+ .at(1);
+
+ expect(cancelBtnEl.exists()).toBe(true);
+ expect(cancelBtnEl.text()).toContain('Cancel');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
new file mode 100644
index 00000000000..487b917852e
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
@@ -0,0 +1,265 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+
+import { GlButton, GlLoadingIcon, GlIcon, GlSearchBoxByType, GlLink } from '@gitlab/ui';
+import { UP_KEY_CODE, DOWN_KEY_CODE, ENTER_KEY_CODE, ESC_KEY_CODE } from '~/lib/utils/keycodes';
+import DropdownContentsLabelsView from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view.vue';
+
+import defaultState from '~/vue_shared/components/sidebar/labels_select_vue/store/state';
+import mutations from '~/vue_shared/components/sidebar/labels_select_vue/store/mutations';
+import * as actions from '~/vue_shared/components/sidebar/labels_select_vue/store/actions';
+import * as getters from '~/vue_shared/components/sidebar/labels_select_vue/store/getters';
+
+import { mockConfig, mockLabels, mockRegularLabel } from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const createComponent = (initialState = mockConfig) => {
+ const store = new Vuex.Store({
+ getters,
+ mutations,
+ state: {
+ ...defaultState(),
+ footerCreateLabelTitle: 'Create label',
+ footerManageLabelTitle: 'Manage labels',
+ },
+ actions: {
+ ...actions,
+ fetchLabels: jest.fn(),
+ },
+ });
+
+ store.dispatch('setInitialState', initialState);
+ store.dispatch('receiveLabelsSuccess', mockLabels);
+
+ return shallowMount(DropdownContentsLabelsView, {
+ localVue,
+ store,
+ });
+};
+
+describe('DropdownContentsLabelsView', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('computed', () => {
+ describe('visibleLabels', () => {
+ it('returns matching labels filtered with `searchKey`', () => {
+ wrapper.setData({
+ searchKey: 'bug',
+ });
+
+ expect(wrapper.vm.visibleLabels.length).toBe(1);
+ expect(wrapper.vm.visibleLabels[0].title).toBe('Bug');
+ });
+
+ it('returns all labels when `searchKey` is empty', () => {
+ wrapper.setData({
+ searchKey: '',
+ });
+
+ expect(wrapper.vm.visibleLabels.length).toBe(mockLabels.length);
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('getDropdownLabelBoxStyle', () => {
+ it('returns an object containing `backgroundColor` based on provided `label` param', () => {
+ expect(wrapper.vm.getDropdownLabelBoxStyle(mockRegularLabel)).toEqual(
+ expect.objectContaining({
+ backgroundColor: mockRegularLabel.color,
+ }),
+ );
+ });
+ });
+
+ describe('isLabelSelected', () => {
+ it('returns true when provided `label` param is one of the selected labels', () => {
+ expect(wrapper.vm.isLabelSelected(mockRegularLabel)).toBe(true);
+ });
+
+ it('returns false when provided `label` param is not one of the selected labels', () => {
+ expect(wrapper.vm.isLabelSelected(mockLabels[2])).toBe(false);
+ });
+ });
+
+ describe('handleKeyDown', () => {
+ it('decreases `currentHighlightItem` value by 1 when Up arrow key is pressed', () => {
+ wrapper.setData({
+ currentHighlightItem: 1,
+ });
+
+ wrapper.vm.handleKeyDown({
+ keyCode: UP_KEY_CODE,
+ });
+
+ expect(wrapper.vm.currentHighlightItem).toBe(0);
+ });
+
+ it('increases `currentHighlightItem` value by 1 when Down arrow key is pressed', () => {
+ wrapper.setData({
+ currentHighlightItem: 1,
+ });
+
+ wrapper.vm.handleKeyDown({
+ keyCode: DOWN_KEY_CODE,
+ });
+
+ expect(wrapper.vm.currentHighlightItem).toBe(2);
+ });
+
+ it('calls action `updateSelectedLabels` with currently highlighted label when Enter key is pressed', () => {
+ jest.spyOn(wrapper.vm, 'updateSelectedLabels').mockImplementation();
+ wrapper.setData({
+ currentHighlightItem: 1,
+ });
+
+ wrapper.vm.handleKeyDown({
+ keyCode: ENTER_KEY_CODE,
+ });
+
+ expect(wrapper.vm.updateSelectedLabels).toHaveBeenCalledWith([
+ {
+ ...mockLabels[1],
+ set: true,
+ },
+ ]);
+ });
+
+ it('calls action `toggleDropdownContents` when Esc key is pressed', () => {
+ jest.spyOn(wrapper.vm, 'toggleDropdownContents').mockImplementation();
+ wrapper.setData({
+ currentHighlightItem: 1,
+ });
+
+ wrapper.vm.handleKeyDown({
+ keyCode: ESC_KEY_CODE,
+ });
+
+ expect(wrapper.vm.toggleDropdownContents).toHaveBeenCalled();
+ });
+
+ it('calls action `scrollIntoViewIfNeeded` in next tick when any key is pressed', () => {
+ jest.spyOn(wrapper.vm, 'scrollIntoViewIfNeeded').mockImplementation();
+ wrapper.setData({
+ currentHighlightItem: 1,
+ });
+
+ wrapper.vm.handleKeyDown({
+ keyCode: DOWN_KEY_CODE,
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.scrollIntoViewIfNeeded).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('handleLabelClick', () => {
+ it('calls action `updateSelectedLabels` with provided `label` param', () => {
+ jest.spyOn(wrapper.vm, 'updateSelectedLabels').mockImplementation();
+
+ wrapper.vm.handleLabelClick(mockRegularLabel);
+
+ expect(wrapper.vm.updateSelectedLabels).toHaveBeenCalledWith([mockRegularLabel]);
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('renders component container element with class `labels-select-contents-list`', () => {
+ expect(wrapper.attributes('class')).toContain('labels-select-contents-list');
+ });
+
+ it('renders gl-loading-icon component when `labelsFetchInProgress` prop is true', () => {
+ wrapper.vm.$store.dispatch('requestLabels');
+
+ return wrapper.vm.$nextTick(() => {
+ const loadingIconEl = wrapper.find(GlLoadingIcon);
+
+ expect(loadingIconEl.exists()).toBe(true);
+ expect(loadingIconEl.attributes('class')).toContain('labels-fetch-loading');
+ });
+ });
+
+ it('renders dropdown title element', () => {
+ const titleEl = wrapper.find('.dropdown-title > span');
+
+ expect(titleEl.exists()).toBe(true);
+ expect(titleEl.text()).toBe('Assign labels');
+ });
+
+ it('renders dropdown close button element', () => {
+ const closeButtonEl = wrapper.find('.dropdown-title').find(GlButton);
+
+ expect(closeButtonEl.exists()).toBe(true);
+ expect(closeButtonEl.find(GlIcon).exists()).toBe(true);
+ expect(closeButtonEl.find(GlIcon).props('name')).toBe('close');
+ });
+
+ it('renders label search input element', () => {
+ const searchInputEl = wrapper.find(GlSearchBoxByType);
+
+ expect(searchInputEl.exists()).toBe(true);
+ expect(searchInputEl.attributes('autofocus')).toBe('true');
+ });
+
+ it('renders label elements for all labels', () => {
+ const labelsEl = wrapper.findAll('.dropdown-content li');
+ const labelItemEl = labelsEl.at(0).find(GlLink);
+
+ expect(labelsEl.length).toBe(mockLabels.length);
+ expect(labelItemEl.exists()).toBe(true);
+ expect(labelItemEl.find(GlIcon).props('name')).toBe('mobile-issue-close');
+ expect(labelItemEl.find('.dropdown-label-box').attributes('style')).toBe(
+ 'background-color: rgb(186, 218, 85);',
+ );
+ expect(labelItemEl.find(GlLink).text()).toContain(mockLabels[0].title);
+ });
+
+ it('renders label element with "is-focused" when value of `currentHighlightItem` is more than -1', () => {
+ wrapper.setData({
+ currentHighlightItem: 0,
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ const labelsEl = wrapper.findAll('.dropdown-content li');
+ const labelItemEl = labelsEl.at(0).find(GlLink);
+
+ expect(labelItemEl.attributes('class')).toContain('is-focused');
+ });
+ });
+
+ it('renders element containing "No matching results" when `searchKey` does not match with any label', () => {
+ wrapper.setData({
+ searchKey: 'abc',
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ const noMatchEl = wrapper.find('.dropdown-content li');
+
+ expect(noMatchEl.exists()).toBe(true);
+ expect(noMatchEl.text()).toContain('No matching results');
+ });
+ });
+
+ it('renders footer list items', () => {
+ const createLabelBtn = wrapper.find('.dropdown-footer').find(GlButton);
+ const manageLabelsLink = wrapper.find('.dropdown-footer').find(GlLink);
+
+ expect(createLabelBtn.exists()).toBe(true);
+ expect(createLabelBtn.text()).toBe('Create label');
+ expect(manageLabelsLink.exists()).toBe(true);
+ expect(manageLabelsLink.text()).toBe('Manage labels');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js
new file mode 100644
index 00000000000..bb462acf11c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js
@@ -0,0 +1,54 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+
+import DropdownContents from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_contents.vue';
+
+import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vue/store';
+
+import { mockConfig } from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const createComponent = (initialState = mockConfig) => {
+ const store = new Vuex.Store(labelsSelectModule());
+
+ store.dispatch('setInitialState', initialState);
+
+ return shallowMount(DropdownContents, {
+ localVue,
+ store,
+ });
+};
+
+describe('DropdownContent', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('computed', () => {
+ describe('dropdownContentsView', () => {
+ it('returns string "dropdown-contents-create-view" when `showDropdownContentsCreateView` prop is `true`', () => {
+ wrapper.vm.$store.dispatch('toggleDropdownContentsCreateView');
+
+ expect(wrapper.vm.dropdownContentsView).toBe('dropdown-contents-create-view');
+ });
+
+ it('returns string "dropdown-contents-labels-view" when `showDropdownContentsCreateView` prop is `false`', () => {
+ expect(wrapper.vm.dropdownContentsView).toBe('dropdown-contents-labels-view');
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('renders component container element with class `labels-select-dropdown-contents`', () => {
+ expect(wrapper.attributes('class')).toContain('labels-select-dropdown-contents');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js
new file mode 100644
index 00000000000..c1d9be7393c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js
@@ -0,0 +1,61 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+
+import { GlButton, GlLoadingIcon } from '@gitlab/ui';
+import DropdownTitle from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_title.vue';
+
+import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vue/store';
+
+import { mockConfig } from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const createComponent = (initialState = mockConfig) => {
+ const store = new Vuex.Store(labelsSelectModule());
+
+ store.dispatch('setInitialState', initialState);
+
+ return shallowMount(DropdownTitle, {
+ localVue,
+ store,
+ propsData: {
+ labelsSelectInProgress: false,
+ },
+ });
+};
+
+describe('DropdownTitle', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ it('renders component container element with string "Labels"', () => {
+ expect(wrapper.text()).toContain('Labels');
+ });
+
+ it('renders edit link', () => {
+ const editBtnEl = wrapper.find(GlButton);
+
+ expect(editBtnEl.exists()).toBe(true);
+ expect(editBtnEl.text()).toBe('Edit');
+ });
+
+ it('renders loading icon element when `labelsSelectInProgress` prop is true', () => {
+ wrapper.setProps({
+ labelsSelectInProgress: true,
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(GlLoadingIcon).isVisible()).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_value_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_value_spec.js
new file mode 100644
index 00000000000..70311f8235f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_value_spec.js
@@ -0,0 +1,84 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+
+import { GlLabel } from '@gitlab/ui';
+import DropdownValue from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_value.vue';
+
+import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vue/store';
+
+import { mockConfig, mockRegularLabel, mockScopedLabel } from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const createComponent = (initialState = mockConfig, slots = {}) => {
+ const store = new Vuex.Store(labelsSelectModule());
+
+ store.dispatch('setInitialState', initialState);
+
+ return shallowMount(DropdownValue, {
+ localVue,
+ store,
+ slots,
+ });
+};
+
+describe('DropdownValue', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('methods', () => {
+ describe('labelFilterUrl', () => {
+ it('returns a label filter URL based on provided label param', () => {
+ expect(wrapper.vm.labelFilterUrl(mockRegularLabel)).toBe(
+ '/gitlab-org/my-project/issues?label_name[]=Foo%20Label',
+ );
+ });
+ });
+
+ describe('scopedLabel', () => {
+ it('returns `true` when provided label param is a scoped label', () => {
+ expect(wrapper.vm.scopedLabel(mockScopedLabel)).toBe(true);
+ });
+
+ it('returns `false` when provided label param is a regular label', () => {
+ expect(wrapper.vm.scopedLabel(mockRegularLabel)).toBe(false);
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('renders class `has-labels` on component container element when `selectedLabels` is not empty', () => {
+ expect(wrapper.attributes('class')).toContain('has-labels');
+ });
+
+ it('renders element containing `None` when `selectedLabels` is empty', () => {
+ const wrapperNoLabels = createComponent(
+ {
+ ...mockConfig,
+ selectedLabels: [],
+ },
+ {
+ default: 'None',
+ },
+ );
+ const noneEl = wrapperNoLabels.find('span.text-secondary');
+
+ expect(noneEl.exists()).toBe(true);
+ expect(noneEl.text()).toBe('None');
+
+ wrapperNoLabels.destroy();
+ });
+
+ it('renders labels when `selectedLabels` is not empty', () => {
+ expect(wrapper.findAll(GlLabel).length).toBe(2);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
new file mode 100644
index 00000000000..126fd5438c4
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
@@ -0,0 +1,127 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+
+import LabelsSelectRoot from '~/vue_shared/components/sidebar/labels_select_vue/labels_select_root.vue';
+import DropdownTitle from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_title.vue';
+import DropdownValue from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_value.vue';
+import DropdownValueCollapsed from '~/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed.vue';
+import DropdownButton from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_button.vue';
+import DropdownContents from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_contents.vue';
+
+import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vue/store';
+
+import { mockConfig } from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const createComponent = (config = mockConfig, slots = {}) =>
+ shallowMount(LabelsSelectRoot, {
+ localVue,
+ slots,
+ store: new Vuex.Store(labelsSelectModule()),
+ propsData: config,
+ });
+
+describe('LabelsSelectRoot', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('methods', () => {
+ describe('handleVuexActionDispatch', () => {
+ it('calls `handleDropdownClose` when params `action.type` is `toggleDropdownContents` and state has `showDropdownButton` & `showDropdownContents` props `false`', () => {
+ jest.spyOn(wrapper.vm, 'handleDropdownClose').mockImplementation();
+
+ wrapper.vm.handleVuexActionDispatch(
+ { type: 'toggleDropdownContents' },
+ {
+ showDropdownButton: false,
+ showDropdownContents: false,
+ labels: [{ id: 1 }, { id: 2, touched: true }],
+ },
+ );
+
+ expect(wrapper.vm.handleDropdownClose).toHaveBeenCalledWith(
+ expect.arrayContaining([
+ {
+ id: 2,
+ touched: true,
+ },
+ ]),
+ );
+ });
+ });
+
+ describe('handleDropdownClose', () => {
+ it('emits `updateSelectedLabels` & `onDropdownClose` events on component when provided `labels` param is not empty', () => {
+ wrapper.vm.handleDropdownClose([{ id: 1 }, { id: 2 }]);
+
+ expect(wrapper.emitted().updateSelectedLabels).toBeTruthy();
+ expect(wrapper.emitted().onDropdownClose).toBeTruthy();
+ });
+
+ it('emits only `onDropdownClose` event on component when provided `labels` param is empty', () => {
+ wrapper.vm.handleDropdownClose([]);
+
+ expect(wrapper.emitted().updateSelectedLabels).toBeFalsy();
+ expect(wrapper.emitted().onDropdownClose).toBeTruthy();
+ });
+ });
+
+ describe('handleCollapsedValueClick', () => {
+ it('emits `toggleCollapse` event on component', () => {
+ wrapper.vm.handleCollapsedValueClick();
+
+ expect(wrapper.emitted().toggleCollapse).toBeTruthy();
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('renders component with classes `labels-select-wrapper position-relative`', () => {
+ expect(wrapper.attributes('class')).toContain('labels-select-wrapper position-relative');
+ });
+
+ it('renders `dropdown-value-collapsed` component when `allowLabelCreate` prop is `true`', () => {
+ expect(wrapper.find(DropdownValueCollapsed).exists()).toBe(true);
+ });
+
+ it('renders `dropdown-title` component', () => {
+ expect(wrapper.find(DropdownTitle).exists()).toBe(true);
+ });
+
+ it('renders `dropdown-value` component with slot when `showDropdownButton` prop is `false`', () => {
+ const wrapperDropdownValue = createComponent(mockConfig, {
+ default: 'None',
+ });
+
+ const valueComp = wrapperDropdownValue.find(DropdownValue);
+
+ expect(valueComp.exists()).toBe(true);
+ expect(valueComp.text()).toBe('None');
+
+ wrapperDropdownValue.destroy();
+ });
+
+ it('renders `dropdown-button` component when `showDropdownButton` prop is `true`', () => {
+ wrapper.vm.$store.dispatch('toggleDropdownButton');
+
+ expect(wrapper.find(DropdownButton).exists()).toBe(true);
+ });
+
+ it('renders `dropdown-contents` component when `showDropdownButton` & `showDropdownContents` prop is `true`', () => {
+ wrapper.vm.$store.dispatch('toggleDropdownContents');
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(DropdownContents).exists()).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js
new file mode 100644
index 00000000000..a863cddbaee
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js
@@ -0,0 +1,66 @@
+export const mockRegularLabel = {
+ id: 26,
+ title: 'Foo Label',
+ description: 'Foobar',
+ color: '#BADA55',
+ textColor: '#FFFFFF',
+};
+
+export const mockScopedLabel = {
+ id: 27,
+ title: 'Foo::Bar',
+ description: 'Foobar',
+ color: '#0033CC',
+ textColor: '#FFFFFF',
+};
+
+export const mockLabels = [
+ mockRegularLabel,
+ mockScopedLabel,
+ {
+ id: 28,
+ title: 'Bug',
+ description: 'Label for bugs',
+ color: '#FF0000',
+ textColor: '#FFFFFF',
+ },
+];
+
+export const mockConfig = {
+ allowLabelEdit: true,
+ allowLabelCreate: true,
+ allowScopedLabels: true,
+ labelsListTitle: 'Assign labels',
+ labelsCreateTitle: 'Create label',
+ dropdownOnly: false,
+ selectedLabels: [mockRegularLabel, mockScopedLabel],
+ labelsSelectInProgress: false,
+ labelsFetchPath: '/gitlab-org/my-project/-/labels.json',
+ labelsManagePath: '/gitlab-org/my-project/-/labels',
+ labelsFilterBasePath: '/gitlab-org/my-project/issues',
+ scopedLabelsDocumentationPath: '/help/user/project/labels.md#scoped-labels-premium',
+};
+
+export const mockSuggestedColors = {
+ '#0033CC': 'UA blue',
+ '#428BCA': 'Moderate blue',
+ '#44AD8E': 'Lime green',
+ '#A8D695': 'Feijoa',
+ '#5CB85C': 'Slightly desaturated green',
+ '#69D100': 'Bright green',
+ '#004E00': 'Very dark lime green',
+ '#34495E': 'Very dark desaturated blue',
+ '#7F8C8D': 'Dark grayish cyan',
+ '#A295D6': 'Slightly desaturated blue',
+ '#5843AD': 'Dark moderate blue',
+ '#8E44AD': 'Dark moderate violet',
+ '#FFECDB': 'Very pale orange',
+ '#AD4363': 'Dark moderate pink',
+ '#D10069': 'Strong pink',
+ '#CC0033': 'Strong red',
+ '#FF0000': 'Pure red',
+ '#D9534F': 'Soft red',
+ '#D1D100': 'Strong yellow',
+ '#F0AD4E': 'Soft orange',
+ '#AD8D43': 'Dark moderate orange',
+};
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
new file mode 100644
index 00000000000..6e2363ba96f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
@@ -0,0 +1,276 @@
+import MockAdapter from 'axios-mock-adapter';
+
+import defaultState from '~/vue_shared/components/sidebar/labels_select_vue/store/state';
+import * as types from '~/vue_shared/components/sidebar/labels_select_vue/store/mutation_types';
+import * as actions from '~/vue_shared/components/sidebar/labels_select_vue/store/actions';
+
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
+
+describe('LabelsSelect Actions', () => {
+ let state;
+ const mockInitialState = {
+ labels: [],
+ selectedLabels: [],
+ };
+
+ beforeEach(() => {
+ state = Object.assign({}, defaultState());
+ });
+
+ describe('setInitialState', () => {
+ it('sets initial store state', done => {
+ testAction(
+ actions.setInitialState,
+ mockInitialState,
+ state,
+ [{ type: types.SET_INITIAL_STATE, payload: mockInitialState }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('toggleDropdownButton', () => {
+ it('toggles dropdown button', done => {
+ testAction(
+ actions.toggleDropdownButton,
+ {},
+ state,
+ [{ type: types.TOGGLE_DROPDOWN_BUTTON }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('toggleDropdownContents', () => {
+ it('toggles dropdown contents', done => {
+ testAction(
+ actions.toggleDropdownContents,
+ {},
+ state,
+ [{ type: types.TOGGLE_DROPDOWN_CONTENTS }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('toggleDropdownContentsCreateView', () => {
+ it('toggles dropdown create view', done => {
+ testAction(
+ actions.toggleDropdownContentsCreateView,
+ {},
+ state,
+ [{ type: types.TOGGLE_DROPDOWN_CONTENTS_CREATE_VIEW }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('requestLabels', () => {
+ it('sets value of `state.labelsFetchInProgress` to `true`', done => {
+ testAction(actions.requestLabels, {}, state, [{ type: types.REQUEST_LABELS }], [], done);
+ });
+ });
+
+ describe('receiveLabelsSuccess', () => {
+ it('sets provided labels to `state.labels`', done => {
+ const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
+
+ testAction(
+ actions.receiveLabelsSuccess,
+ labels,
+ state,
+ [{ type: types.RECEIVE_SET_LABELS_SUCCESS, payload: labels }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveLabelsFailure', () => {
+ beforeEach(() => {
+ setFixtures('<div class="flash-container"></div>');
+ });
+
+ it('sets value `state.labelsFetchInProgress` to `false`', done => {
+ testAction(
+ actions.receiveLabelsFailure,
+ {},
+ state,
+ [{ type: types.RECEIVE_SET_LABELS_FAILURE }],
+ [],
+ done,
+ );
+ });
+
+ it('shows flash error', () => {
+ actions.receiveLabelsFailure({ commit: () => {} });
+
+ expect(document.querySelector('.flash-container .flash-text').innerText.trim()).toBe(
+ 'Error fetching labels.',
+ );
+ });
+ });
+
+ describe('fetchLabels', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ state.labelsFetchPath = 'labels.json';
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('on success', () => {
+ it('dispatches `requestLabels` & `receiveLabelsSuccess` actions', done => {
+ const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
+ mock.onGet(/labels.json/).replyOnce(200, labels);
+
+ testAction(
+ actions.fetchLabels,
+ {},
+ state,
+ [],
+ [{ type: 'requestLabels' }, { type: 'receiveLabelsSuccess', payload: labels }],
+ done,
+ );
+ });
+ });
+
+ describe('on failure', () => {
+ it('dispatches `requestLabels` & `receiveLabelsFailure` actions', done => {
+ mock.onGet(/labels.json/).replyOnce(500, {});
+
+ testAction(
+ actions.fetchLabels,
+ {},
+ state,
+ [],
+ [{ type: 'requestLabels' }, { type: 'receiveLabelsFailure' }],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestCreateLabel', () => {
+ it('sets value `state.labelCreateInProgress` to `true`', done => {
+ testAction(
+ actions.requestCreateLabel,
+ {},
+ state,
+ [{ type: types.REQUEST_CREATE_LABEL }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveCreateLabelSuccess', () => {
+ it('sets value `state.labelCreateInProgress` to `false`', done => {
+ testAction(
+ actions.receiveCreateLabelSuccess,
+ {},
+ state,
+ [{ type: types.RECEIVE_CREATE_LABEL_SUCCESS }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveCreateLabelFailure', () => {
+ beforeEach(() => {
+ setFixtures('<div class="flash-container"></div>');
+ });
+
+ it('sets value `state.labelCreateInProgress` to `false`', done => {
+ testAction(
+ actions.receiveCreateLabelFailure,
+ {},
+ state,
+ [{ type: types.RECEIVE_CREATE_LABEL_FAILURE }],
+ [],
+ done,
+ );
+ });
+
+ it('shows flash error', () => {
+ actions.receiveCreateLabelFailure({ commit: () => {} });
+
+ expect(document.querySelector('.flash-container .flash-text').innerText.trim()).toBe(
+ 'Error creating label.',
+ );
+ });
+ });
+
+ describe('createLabel', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ state.labelsManagePath = 'labels.json';
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('on success', () => {
+ it('dispatches `requestCreateLabel`, `receiveCreateLabelSuccess` & `toggleDropdownContentsCreateView` actions', done => {
+ const label = { id: 1 };
+ mock.onPost(/labels.json/).replyOnce(200, label);
+
+ testAction(
+ actions.createLabel,
+ {},
+ state,
+ [],
+ [
+ { type: 'requestCreateLabel' },
+ { type: 'receiveCreateLabelSuccess' },
+ { type: 'toggleDropdownContentsCreateView' },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('on failure', () => {
+ it('dispatches `requestCreateLabel` & `receiveCreateLabelFailure` actions', done => {
+ mock.onPost(/labels.json/).replyOnce(500, {});
+
+ testAction(
+ actions.createLabel,
+ {},
+ state,
+ [],
+ [{ type: 'requestCreateLabel' }, { type: 'receiveCreateLabelFailure' }],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('updateSelectedLabels', () => {
+ it('updates `state.labels` based on provided `labels` param', done => {
+ const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
+
+ testAction(
+ actions.updateSelectedLabels,
+ labels,
+ state,
+ [{ type: types.UPDATE_SELECTED_LABELS, payload: { labels } }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js
new file mode 100644
index 00000000000..bfceaa0828b
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js
@@ -0,0 +1,31 @@
+import * as getters from '~/vue_shared/components/sidebar/labels_select_vue/store/getters';
+
+describe('LabelsSelect Getters', () => {
+ describe('dropdownButtonText', () => {
+ it('returns string "Label" when state.labels has no selected labels', () => {
+ const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
+
+ expect(getters.dropdownButtonText({ labels })).toBe('Label');
+ });
+
+ it('returns label title when state.labels has only 1 label', () => {
+ const labels = [{ id: 1, title: 'Foobar', set: true }];
+
+ expect(getters.dropdownButtonText({ labels })).toBe('Foobar');
+ });
+
+ it('returns first label title and remaining labels count when state.labels has more than 1 label', () => {
+ const labels = [{ id: 1, title: 'Foo', set: true }, { id: 2, title: 'Bar', set: true }];
+
+ expect(getters.dropdownButtonText({ labels })).toBe('Foo +1 more');
+ });
+ });
+
+ describe('selectedLabelsList', () => {
+ it('returns array of IDs of all labels within `state.selectedLabels`', () => {
+ const selectedLabels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
+
+ expect(getters.selectedLabelsList({ selectedLabels })).toEqual([1, 2, 3, 4]);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
new file mode 100644
index 00000000000..f6ca98fcc71
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
@@ -0,0 +1,172 @@
+import mutations from '~/vue_shared/components/sidebar/labels_select_vue/store/mutations';
+import * as types from '~/vue_shared/components/sidebar/labels_select_vue/store/mutation_types';
+
+describe('LabelsSelect Mutations', () => {
+ describe(`${types.SET_INITIAL_STATE}`, () => {
+ it('initializes provided props to store state', () => {
+ const state = {};
+ mutations[types.SET_INITIAL_STATE](state, {
+ labels: 'foo',
+ });
+
+ expect(state.labels).toEqual('foo');
+ });
+ });
+
+ describe(`${types.TOGGLE_DROPDOWN_BUTTON}`, () => {
+ it('toggles value of `state.showDropdownButton`', () => {
+ const state = {
+ showDropdownButton: false,
+ };
+ mutations[types.TOGGLE_DROPDOWN_BUTTON](state);
+
+ expect(state.showDropdownButton).toBe(true);
+ });
+ });
+
+ describe(`${types.TOGGLE_DROPDOWN_CONTENTS}`, () => {
+ it('toggles value of `state.showDropdownButton` when `state.dropdownOnly` is false', () => {
+ const state = {
+ dropdownOnly: false,
+ showDropdownButton: false,
+ };
+ mutations[types.TOGGLE_DROPDOWN_CONTENTS](state);
+
+ expect(state.showDropdownButton).toBe(true);
+ });
+
+ it('toggles value of `state.showDropdownContents`', () => {
+ const state = {
+ showDropdownContents: false,
+ };
+ mutations[types.TOGGLE_DROPDOWN_CONTENTS](state);
+
+ expect(state.showDropdownContents).toBe(true);
+ });
+
+ it('sets value of `state.showDropdownContentsCreateView` to `false` when `showDropdownContents` is true', () => {
+ const state = {
+ showDropdownContents: false,
+ showDropdownContentsCreateView: true,
+ };
+ mutations[types.TOGGLE_DROPDOWN_CONTENTS](state);
+
+ expect(state.showDropdownContentsCreateView).toBe(false);
+ });
+ });
+
+ describe(`${types.TOGGLE_DROPDOWN_CONTENTS_CREATE_VIEW}`, () => {
+ it('toggles value of `state.showDropdownContentsCreateView`', () => {
+ const state = {
+ showDropdownContentsCreateView: false,
+ };
+ mutations[types.TOGGLE_DROPDOWN_CONTENTS_CREATE_VIEW](state);
+
+ expect(state.showDropdownContentsCreateView).toBe(true);
+ });
+ });
+
+ describe(`${types.REQUEST_LABELS}`, () => {
+ it('sets value of `state.labelsFetchInProgress` to true', () => {
+ const state = {
+ labelsFetchInProgress: false,
+ };
+ mutations[types.REQUEST_LABELS](state);
+
+ expect(state.labelsFetchInProgress).toBe(true);
+ });
+ });
+
+ describe(`${types.RECEIVE_SET_LABELS_SUCCESS}`, () => {
+ const selectedLabels = [{ id: 2 }, { id: 4 }];
+ const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
+
+ it('sets value of `state.labelsFetchInProgress` to false', () => {
+ const state = {
+ selectedLabels,
+ labelsFetchInProgress: true,
+ };
+ mutations[types.RECEIVE_SET_LABELS_SUCCESS](state, labels);
+
+ expect(state.labelsFetchInProgress).toBe(false);
+ });
+
+ it('sets provided `labels` to `state.labels` along with `set` prop based on `state.selectedLabels`', () => {
+ const selectedLabelIds = selectedLabels.map(label => label.id);
+ const state = {
+ selectedLabels,
+ labelsFetchInProgress: true,
+ };
+ mutations[types.RECEIVE_SET_LABELS_SUCCESS](state, labels);
+
+ state.labels.forEach(label => {
+ if (selectedLabelIds.includes(label.id)) {
+ expect(label.set).toBe(true);
+ }
+ });
+ });
+ });
+
+ describe(`${types.RECEIVE_SET_LABELS_FAILURE}`, () => {
+ it('sets value of `state.labelsFetchInProgress` to false', () => {
+ const state = {
+ labelsFetchInProgress: true,
+ };
+ mutations[types.RECEIVE_SET_LABELS_FAILURE](state);
+
+ expect(state.labelsFetchInProgress).toBe(false);
+ });
+ });
+
+ describe(`${types.REQUEST_CREATE_LABEL}`, () => {
+ it('sets value of `state.labelCreateInProgress` to true', () => {
+ const state = {
+ labelCreateInProgress: false,
+ };
+ mutations[types.REQUEST_CREATE_LABEL](state);
+
+ expect(state.labelCreateInProgress).toBe(true);
+ });
+ });
+
+ describe(`${types.RECEIVE_CREATE_LABEL_SUCCESS}`, () => {
+ it('sets value of `state.labelCreateInProgress` to false', () => {
+ const state = {
+ labelCreateInProgress: false,
+ };
+ mutations[types.RECEIVE_CREATE_LABEL_SUCCESS](state);
+
+ expect(state.labelCreateInProgress).toBe(false);
+ });
+ });
+
+ describe(`${types.RECEIVE_CREATE_LABEL_FAILURE}`, () => {
+ it('sets value of `state.labelCreateInProgress` to false', () => {
+ const state = {
+ labelCreateInProgress: false,
+ };
+ mutations[types.RECEIVE_CREATE_LABEL_FAILURE](state);
+
+ expect(state.labelCreateInProgress).toBe(false);
+ });
+ });
+
+ describe(`${types.UPDATE_SELECTED_LABELS}`, () => {
+ const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
+
+ it('updates `state.labels` to include `touched` and `set` props based on provided `labels` param', () => {
+ const updatedLabelIds = [2, 4];
+ const state = {
+ labels,
+ };
+ mutations[types.UPDATE_SELECTED_LABELS](state, { labels });
+
+ state.labels.forEach(label => {
+ if (updatedLabelIds.includes(label.id)) {
+ expect(label.touched).toBe(true);
+ expect(label.set).toBe(true);
+ }
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index a8bbc80d2df..a2e2d2447d5 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -1,4 +1,4 @@
-import { GlSkeletonLoading } from '@gitlab/ui';
+import { GlSkeletonLoading, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import UserPopover from '~/vue_shared/components/user_popover/user_popover.vue';
import Icon from '~/vue_shared/components/icon.vue';
@@ -11,6 +11,7 @@ const DEFAULT_PROPS = {
location: 'Vienna',
bio: null,
organization: null,
+ jobTitle: null,
status: null,
},
};
@@ -39,6 +40,9 @@ describe('User Popover Component', () => {
target: findTarget(),
...props,
},
+ stubs: {
+ 'gl-sprintf': GlSprintf,
+ },
...options,
});
};
@@ -56,6 +60,7 @@ describe('User Popover Component', () => {
location: null,
bio: null,
organization: null,
+ jobTitle: null,
status: null,
},
},
@@ -85,51 +90,125 @@ describe('User Popover Component', () => {
});
describe('job data', () => {
- it('should show only bio if no organization is available', () => {
- const user = { ...DEFAULT_PROPS.user, bio: 'Engineer' };
+ const findWorkInformation = () => wrapper.find({ ref: 'workInformation' });
+ const findBio = () => wrapper.find({ ref: 'bio' });
+
+ it('should show only bio if organization and job title are not available', () => {
+ const user = { ...DEFAULT_PROPS.user, bio: 'My super interesting bio' };
createWrapper({ user });
- expect(wrapper.text()).toContain('Engineer');
+ expect(findBio().text()).toBe('My super interesting bio');
+ expect(findWorkInformation().exists()).toBe(false);
});
- it('should show only organization if no bio is available', () => {
+ it('should show only organization if job title is not available', () => {
const user = { ...DEFAULT_PROPS.user, organization: 'GitLab' };
createWrapper({ user });
- expect(wrapper.text()).toContain('GitLab');
+ expect(findWorkInformation().text()).toBe('GitLab');
+ });
+
+ it('should show only job title if organization is not available', () => {
+ const user = { ...DEFAULT_PROPS.user, jobTitle: 'Frontend Engineer' };
+
+ createWrapper({ user });
+
+ expect(findWorkInformation().text()).toBe('Frontend Engineer');
+ });
+
+ it('should show organization and job title if they are both available', () => {
+ const user = {
+ ...DEFAULT_PROPS.user,
+ organization: 'GitLab',
+ jobTitle: 'Frontend Engineer',
+ };
+
+ createWrapper({ user });
+
+ expect(findWorkInformation().text()).toBe('Frontend Engineer at GitLab');
+ });
+
+ it('should display bio and job info in separate lines', () => {
+ const user = {
+ ...DEFAULT_PROPS.user,
+ bio: 'My super interesting bio',
+ organization: 'GitLab',
+ };
+
+ createWrapper({ user });
+
+ expect(findBio().text()).toBe('My super interesting bio');
+ expect(findWorkInformation().text()).toBe('GitLab');
});
- it('should display bio and organization in separate lines', () => {
- const user = { ...DEFAULT_PROPS.user, bio: 'Engineer', organization: 'GitLab' };
+ it('should not encode special characters in bio', () => {
+ const user = {
+ ...DEFAULT_PROPS.user,
+ bio: 'I like <html> & CSS',
+ };
createWrapper({ user });
- expect(wrapper.find('.js-bio').text()).toContain('Engineer');
- expect(wrapper.find('.js-organization').text()).toContain('GitLab');
+ expect(findBio().text()).toBe('I like <html> & CSS');
});
- it('should not encode special characters in bio and organization', () => {
+ it('should not encode special characters in organization', () => {
const user = {
...DEFAULT_PROPS.user,
- bio: 'Manager & Team Lead',
organization: 'Me & my <funky> Company',
};
createWrapper({ user });
- expect(wrapper.find('.js-bio').text()).toContain('Manager & Team Lead');
- expect(wrapper.find('.js-organization').text()).toContain('Me & my <funky> Company');
+ expect(findWorkInformation().text()).toBe('Me & my <funky> Company');
+ });
+
+ it('should not encode special characters in job title', () => {
+ const user = {
+ ...DEFAULT_PROPS.user,
+ jobTitle: 'Manager & Team Lead',
+ };
+
+ createWrapper({ user });
+
+ expect(findWorkInformation().text()).toBe('Manager & Team Lead');
+ });
+
+ it('should not encode special characters when both job title and organization are set', () => {
+ const user = {
+ ...DEFAULT_PROPS.user,
+ jobTitle: 'Manager & Team Lead',
+ organization: 'Me & my <funky> Company',
+ };
+
+ createWrapper({ user });
+
+ expect(findWorkInformation().text()).toBe('Manager & Team Lead at Me & my <funky> Company');
});
it('shows icon for bio', () => {
+ const user = {
+ ...DEFAULT_PROPS.user,
+ bio: 'My super interesting bio',
+ };
+
+ createWrapper({ user });
+
expect(wrapper.findAll(Icon).filter(icon => icon.props('name') === 'profile').length).toEqual(
1,
);
});
it('shows icon for organization', () => {
+ const user = {
+ ...DEFAULT_PROPS.user,
+ organization: 'GitLab',
+ };
+
+ createWrapper({ user });
+
expect(wrapper.findAll(Icon).filter(icon => icon.props('name') === 'work').length).toEqual(1);
});
});
diff --git a/spec/frontend/wikis_spec.js b/spec/frontend/wikis_spec.js
index b2475488d97..1d17c8b0777 100644
--- a/spec/frontend/wikis_spec.js
+++ b/spec/frontend/wikis_spec.js
@@ -8,11 +8,21 @@ describe('Wikis', () => {
}">
<input type="text" id="wiki_title" value="My title" />
<input type="text" id="wiki_message" />
- </form>`;
+ <select class="form-control select-control" name="wiki[format]" id="wiki_format">
+ <option value="markdown">Markdown</option>
+ <option selected="selected" value="rdoc">RDoc</option>
+ <option value="asciidoc">AsciiDoc</option>
+ <option value="org">Org</option>
+ </select>
+ <code class="js-markup-link-example">{Link title}[link:page-slug]</code>
+ </form>
+ `;
let wikis;
let titleInput;
let messageInput;
+ let changeFormatSelect;
+ let linkExample;
describe('when the wiki page is being created', () => {
const formHtmlFixture = editFormHtmlFixture({ newPage: true });
@@ -22,6 +32,8 @@ describe('Wikis', () => {
titleInput = document.getElementById('wiki_title');
messageInput = document.getElementById('wiki_message');
+ changeFormatSelect = document.querySelector('#wiki_format');
+ linkExample = document.querySelector('.js-markup-link-example');
wikis = new Wikis();
});
@@ -69,6 +81,19 @@ describe('Wikis', () => {
expect(messageInput.value).toEqual('Update My title');
});
+
+ it.each`
+ value | text
+ ${'markdown'} | ${'[Link Title](page-slug)'}
+ ${'rdoc'} | ${'{Link title}[link:page-slug]'}
+ ${'asciidoc'} | ${'link:page-slug[Link title]'}
+ ${'org'} | ${'[[page-slug]]'}
+ `('updates a message when value=$value is selected', ({ value, text }) => {
+ changeFormatSelect.value = value;
+ changeFormatSelect.dispatchEvent(new Event('change'));
+
+ expect(linkExample.innerHTML).toBe(text);
+ });
});
});
});
diff --git a/spec/graphql/features/authorization_spec.rb b/spec/graphql/features/authorization_spec.rb
index 5ef1bced179..44f47e6c739 100644
--- a/spec/graphql/features/authorization_spec.rb
+++ b/spec/graphql/features/authorization_spec.rb
@@ -5,8 +5,7 @@ require 'spec_helper'
describe 'Gitlab::Graphql::Authorization' do
include GraphqlHelpers
- set(:user) { create(:user) }
-
+ let_it_be(:user) { create(:user) }
let(:permission_single) { :foo }
let(:permission_collection) { [:foo, :bar] }
let(:test_object) { double(name: 'My name') }
diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb
index 2ec477fc494..d0eb0475879 100644
--- a/spec/graphql/gitlab_schema_spec.rb
+++ b/spec/graphql/gitlab_schema_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
describe GitlabSchema do
+ let_it_be(:implementations) { GraphQL::Relay::BaseConnection::CONNECTION_IMPLEMENTATIONS }
let(:user) { build :user }
it 'uses batch loading' do
@@ -33,12 +34,30 @@ describe GitlabSchema do
expect(described_class.query).to eq(::Types::QueryType.to_graphql)
end
- it 'paginates active record relations using `Gitlab::Graphql::Connections::KeysetConnection`' do
- connection = GraphQL::Relay::BaseConnection::CONNECTION_IMPLEMENTATIONS[ActiveRecord::Relation.name]
+ it 'paginates active record relations using `Connections::Keyset::Connection`' do
+ connection = implementations[ActiveRecord::Relation.name]
expect(connection).to eq(Gitlab::Graphql::Connections::Keyset::Connection)
end
+ it 'paginates ExternallyPaginatedArray using `Connections::ExternallyPaginatedArrayConnection`' do
+ connection = implementations[Gitlab::Graphql::ExternallyPaginatedArray.name]
+
+ expect(connection).to eq(Gitlab::Graphql::Connections::ExternallyPaginatedArrayConnection)
+ end
+
+ it 'paginates FilterableArray using `Connections::FilterableArrayConnection`' do
+ connection = implementations[Gitlab::Graphql::FilterableArray.name]
+
+ expect(connection).to eq(Gitlab::Graphql::Connections::FilterableArrayConnection)
+ end
+
+ it 'paginates OffsetActiveRecordRelation using `Pagination::OffsetActiveRecordRelationConnection`' do
+ connection = implementations[Gitlab::Graphql::Pagination::Relations::OffsetActiveRecordRelation.name]
+
+ expect(connection).to eq(Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection)
+ end
+
describe '.execute' do
context 'for different types of users' do
context 'when no context' do
diff --git a/spec/graphql/mutations/concerns/mutations/resolves_group_spec.rb b/spec/graphql/mutations/concerns/mutations/resolves_group_spec.rb
index fcc717f83a2..51d3c4f5d6b 100644
--- a/spec/graphql/mutations/concerns/mutations/resolves_group_spec.rb
+++ b/spec/graphql/mutations/concerns/mutations/resolves_group_spec.rb
@@ -11,12 +11,12 @@ describe Mutations::ResolvesGroup do
let(:context) { double }
- subject(:mutation) { mutation_class.new(object: nil, context: context) }
+ subject(:mutation) { mutation_class.new(object: nil, context: context, field: nil) }
it 'uses the GroupsResolver to resolve groups by path' do
group = create(:group)
- expect(Resolvers::GroupResolver).to receive(:new).with(object: nil, context: context).and_call_original
+ expect(Resolvers::GroupResolver).to receive(:new).with(object: nil, context: context, field: nil).and_call_original
expect(mutation.resolve_group(full_path: group.full_path).sync).to eq(group)
end
end
diff --git a/spec/graphql/mutations/concerns/mutations/resolves_issuable_spec.rb b/spec/graphql/mutations/concerns/mutations/resolves_issuable_spec.rb
new file mode 100644
index 00000000000..69db8d016d7
--- /dev/null
+++ b/spec/graphql/mutations/concerns/mutations/resolves_issuable_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Mutations::ResolvesIssuable do
+ let(:mutation_class) do
+ Class.new(Mutations::BaseMutation) do
+ include Mutations::ResolvesIssuable
+ end
+ end
+
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:context) { { current_user: user } }
+ let(:mutation) { mutation_class.new(object: nil, context: context, field: nil) }
+
+ shared_examples 'resolving an issuable' do |type|
+ context 'when user has access' do
+ let(:source) { type == :merge_request ? 'source_project' : 'project' }
+ let(:issuable) { create(type, author: user, "#{source}" => project) }
+
+ subject { mutation.resolve_issuable(type: type, parent_path: project.full_path, iid: issuable.iid) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'resolves issuable by iid' do
+ result = type == :merge_request ? subject.sync : subject
+ expect(result).to eq(issuable)
+ end
+
+ it 'uses the correct Resolver to resolve issuable' do
+ resolver_class = "Resolvers::#{type.to_s.classify.pluralize}Resolver".constantize
+ resolved_project = mutation.resolve_project(full_path: project.full_path)
+
+ allow(mutation).to receive(:resolve_project)
+ .with(full_path: project.full_path)
+ .and_return(resolved_project)
+
+ expect(resolver_class).to receive(:new)
+ .with(object: resolved_project, context: context, field: nil)
+ .and_call_original
+
+ subject
+ end
+
+ it 'uses the ResolvesProject to resolve project' do
+ expect(Resolvers::ProjectResolver).to receive(:new)
+ .with(object: nil, context: context, field: nil)
+ .and_call_original
+
+ subject
+ end
+
+ it 'returns nil if issuable is not found' do
+ result = mutation.resolve_issuable(type: type, parent_path: project.full_path, iid: "100")
+ result = type == :merge_request ? result.sync : result
+
+ expect(result).to be_nil
+ end
+ end
+ end
+
+ context 'with issues' do
+ it_behaves_like 'resolving an issuable', :issue
+ end
+
+ context 'with merge requests' do
+ it_behaves_like 'resolving an issuable', :merge_request
+ end
+end
diff --git a/spec/graphql/mutations/concerns/mutations/resolves_project_spec.rb b/spec/graphql/mutations/concerns/mutations/resolves_project_spec.rb
index 918e5fb016e..b5c349f6284 100644
--- a/spec/graphql/mutations/concerns/mutations/resolves_project_spec.rb
+++ b/spec/graphql/mutations/concerns/mutations/resolves_project_spec.rb
@@ -11,12 +11,12 @@ describe Mutations::ResolvesProject do
let(:context) { double }
- subject(:mutation) { mutation_class.new(object: nil, context: context) }
+ subject(:mutation) { mutation_class.new(object: nil, context: context, field: nil) }
it 'uses the ProjectsResolver to resolve projects by path' do
project = create(:project)
- expect(Resolvers::ProjectResolver).to receive(:new).with(object: nil, context: context).and_call_original
+ expect(Resolvers::ProjectResolver).to receive(:new).with(object: nil, context: context, field: nil).and_call_original
expect(mutation.resolve_project(full_path: project.full_path).sync).to eq(project)
end
end
diff --git a/spec/graphql/mutations/issues/set_confidential_spec.rb b/spec/graphql/mutations/issues/set_confidential_spec.rb
index a8f1fcdf7f1..6031953c869 100644
--- a/spec/graphql/mutations/issues/set_confidential_spec.rb
+++ b/spec/graphql/mutations/issues/set_confidential_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::Issues::SetConfidential do
let(:issue) { create(:issue) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:confidential) { true }
diff --git a/spec/graphql/mutations/issues/set_due_date_spec.rb b/spec/graphql/mutations/issues/set_due_date_spec.rb
index b45a7b460cd..73ba11fc551 100644
--- a/spec/graphql/mutations/issues/set_due_date_spec.rb
+++ b/spec/graphql/mutations/issues/set_due_date_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::Issues::SetDueDate do
let(:issue) { create(:issue) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:due_date) { 2.days.since }
diff --git a/spec/graphql/mutations/issues/update_spec.rb b/spec/graphql/mutations/issues/update_spec.rb
new file mode 100644
index 00000000000..83bd3041cbf
--- /dev/null
+++ b/spec/graphql/mutations/issues/update_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Mutations::Issues::Update do
+ let(:issue) { create(:issue) }
+ let(:user) { create(:user) }
+ let(:expected_attributes) do
+ {
+ title: 'new title',
+ description: 'new description',
+ confidential: true,
+ due_date: Date.tomorrow
+ }
+ end
+ let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+ let(:mutated_issue) { subject[:issue] }
+
+ describe '#resolve' do
+ let(:mutation_params) do
+ {
+ project_path: issue.project.full_path,
+ iid: issue.iid
+ }.merge(expected_attributes)
+ end
+
+ subject { mutation.resolve(mutation_params) }
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when the user can update the issue' do
+ before do
+ issue.project.add_developer(user)
+ end
+
+ it 'updates issue with correct values' do
+ subject
+
+ expect(issue.reload).to have_attributes(expected_attributes)
+ end
+
+ context 'when iid does not exist' do
+ it 'raises resource not available error' do
+ mutation_params[:iid] = 99999
+
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
index 2033ab57a0d..d88c5db05c9 100644
--- a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::MergeRequests::SetAssignees do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:assignee) { create(:user) }
diff --git a/spec/graphql/mutations/merge_requests/set_labels_spec.rb b/spec/graphql/mutations/merge_requests/set_labels_spec.rb
index f7c04a57f68..f58f35eb6f3 100644
--- a/spec/graphql/mutations/merge_requests/set_labels_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_labels_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::MergeRequests::SetLabels do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:label) { create(:label, project: merge_request.project) }
diff --git a/spec/graphql/mutations/merge_requests/set_locked_spec.rb b/spec/graphql/mutations/merge_requests/set_locked_spec.rb
index d35430abff1..12ae1314f22 100644
--- a/spec/graphql/mutations/merge_requests/set_locked_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_locked_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::MergeRequests::SetLocked do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:locked) { true }
diff --git a/spec/graphql/mutations/merge_requests/set_milestone_spec.rb b/spec/graphql/mutations/merge_requests/set_milestone_spec.rb
index d79b0a995d7..ad7f2df0842 100644
--- a/spec/graphql/mutations/merge_requests/set_milestone_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_milestone_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::MergeRequests::SetMilestone do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:milestone) { create(:milestone, project: merge_request.project) }
diff --git a/spec/graphql/mutations/merge_requests/set_subscription_spec.rb b/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
index 286de6c0c97..a28bab363f3 100644
--- a/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
@@ -7,7 +7,7 @@ describe Mutations::MergeRequests::SetSubscription do
let(:project) { merge_request.project }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:subscribe) { true }
diff --git a/spec/graphql/mutations/merge_requests/set_wip_spec.rb b/spec/graphql/mutations/merge_requests/set_wip_spec.rb
index 490994c4577..9f0adcf117a 100644
--- a/spec/graphql/mutations/merge_requests/set_wip_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_wip_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::MergeRequests::SetWip do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:wip) { true }
diff --git a/spec/graphql/mutations/todos/mark_all_done_spec.rb b/spec/graphql/mutations/todos/mark_all_done_spec.rb
index cce69d0dcdc..98b22a3e761 100644
--- a/spec/graphql/mutations/todos/mark_all_done_spec.rb
+++ b/spec/graphql/mutations/todos/mark_all_done_spec.rb
@@ -48,6 +48,6 @@ describe Mutations::Todos::MarkAllDone do
end
def mutation_for(user)
- described_class.new(object: nil, context: { current_user: user })
+ described_class.new(object: nil, context: { current_user: user }, field: nil)
end
end
diff --git a/spec/graphql/mutations/todos/mark_done_spec.rb b/spec/graphql/mutations/todos/mark_done_spec.rb
index ff61ef76db6..059ef3c8eee 100644
--- a/spec/graphql/mutations/todos/mark_done_spec.rb
+++ b/spec/graphql/mutations/todos/mark_done_spec.rb
@@ -14,7 +14,7 @@ describe Mutations::Todos::MarkDone do
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :pending) }
- let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }) }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
describe '#resolve' do
it 'marks a single todo as done' do
diff --git a/spec/graphql/mutations/todos/restore_many_spec.rb b/spec/graphql/mutations/todos/restore_many_spec.rb
index 7821ce35a08..8f4a8985f9e 100644
--- a/spec/graphql/mutations/todos/restore_many_spec.rb
+++ b/spec/graphql/mutations/todos/restore_many_spec.rb
@@ -12,7 +12,7 @@ describe Mutations::Todos::RestoreMany do
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :done) }
- let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }) }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
describe '#resolve' do
it 'restores a single todo' do
diff --git a/spec/graphql/mutations/todos/restore_spec.rb b/spec/graphql/mutations/todos/restore_spec.rb
index 76a2d4ffffd..1637acc2fb5 100644
--- a/spec/graphql/mutations/todos/restore_spec.rb
+++ b/spec/graphql/mutations/todos/restore_spec.rb
@@ -12,7 +12,7 @@ describe Mutations::Todos::Restore do
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :done) }
- let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }) }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
describe '#resolve' do
it 'restores a single todo' do
diff --git a/spec/graphql/resolvers/boards_resolver_spec.rb b/spec/graphql/resolvers/boards_resolver_spec.rb
index ab77dfa8fc3..02d6f808118 100644
--- a/spec/graphql/resolvers/boards_resolver_spec.rb
+++ b/spec/graphql/resolvers/boards_resolver_spec.rb
@@ -45,6 +45,21 @@ describe Resolvers::BoardsResolver do
expect(resolve_boards).to eq [board1]
end
end
+
+ context 'when querying for a single board' do
+ let(:board1) { create(:board, name: 'One', resource_parent: board_parent) }
+
+ it 'returns specified board' do
+ expect(resolve_boards(args: { id: global_id_of(board1) })).to eq [board1]
+ end
+
+ it 'returns nil if board not found' do
+ outside_parent = create(board_parent.class.underscore.to_sym)
+ outside_board = create(:board, name: 'outside board', resource_parent: outside_parent)
+
+ expect(resolve_boards(args: { id: global_id_of(outside_board) })).to eq Board.none
+ end
+ end
end
describe '#resolve' do
diff --git a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
index b59561ebdd4..03ff1e11d85 100644
--- a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
+++ b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
@@ -17,11 +17,11 @@ describe ResolvesPipelines do
let(:current_user) { create(:user) }
- set(:project) { create(:project, :private) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
- set(:failed_pipeline) { create(:ci_pipeline, :failed, project: project) }
- set(:ref_pipeline) { create(:ci_pipeline, project: project, ref: 'awesome-feature') }
- set(:sha_pipeline) { create(:ci_pipeline, project: project, sha: 'deadbeef') }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:failed_pipeline) { create(:ci_pipeline, :failed, project: project) }
+ let_it_be(:ref_pipeline) { create(:ci_pipeline, project: project, ref: 'awesome-feature') }
+ let_it_be(:sha_pipeline) { create(:ci_pipeline, project: project, sha: 'deadbeef') }
before do
project.add_developer(current_user)
diff --git a/spec/graphql/resolvers/group_resolver_spec.rb b/spec/graphql/resolvers/group_resolver_spec.rb
index 7dec9ac1aa5..70b1102d363 100644
--- a/spec/graphql/resolvers/group_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_resolver_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
describe Resolvers::GroupResolver do
include GraphqlHelpers
- set(:group1) { create(:group) }
- set(:group2) { create(:group) }
+ let_it_be(:group1) { create(:group) }
+ let_it_be(:group2) { create(:group) }
describe '#resolve' do
it 'batch-resolves groups by full path' do
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index 9e75a6cad60..7cfef9b4cc7 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -8,11 +8,13 @@ describe Resolvers::IssuesResolver do
let(:current_user) { create(:user) }
context "with a project" do
- set(:project) { create(:project) }
- set(:issue1) { create(:issue, project: project, state: :opened, created_at: 3.hours.ago, updated_at: 3.hours.ago) }
- set(:issue2) { create(:issue, project: project, state: :closed, title: 'foo', created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at: 1.hour.ago) }
- set(:label1) { create(:label, project: project) }
- set(:label2) { create(:label, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:issue1) { create(:issue, project: project, state: :opened, created_at: 3.hours.ago, updated_at: 3.hours.ago, milestone: milestone) }
+ let_it_be(:issue2) { create(:issue, project: project, state: :closed, title: 'foo', created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at: 1.hour.ago, assignees: [assignee]) }
+ let_it_be(:label1) { create(:label, project: project) }
+ let_it_be(:label2) { create(:label, project: project) }
before do
project.add_developer(current_user)
@@ -31,6 +33,26 @@ describe Resolvers::IssuesResolver do
expect(resolve_issues(state: 'closed')).to contain_exactly(issue2)
end
+ it 'filters by milestone' do
+ expect(resolve_issues(milestone_title: milestone.title)).to contain_exactly(issue1)
+ end
+
+ it 'filters by assignee_username' do
+ expect(resolve_issues(assignee_username: assignee.username)).to contain_exactly(issue2)
+ end
+
+ it 'filters by assignee_id' do
+ expect(resolve_issues(assignee_id: assignee.id)).to contain_exactly(issue2)
+ end
+
+ it 'filters by any assignee' do
+ expect(resolve_issues(assignee_id: IssuableFinder::FILTER_ANY)).to contain_exactly(issue2)
+ end
+
+ it 'filters by no assignee' do
+ expect(resolve_issues(assignee_id: IssuableFinder::FILTER_NONE)).to contain_exactly(issue1)
+ end
+
it 'filters by labels' do
expect(resolve_issues(label_name: [label1.title])).to contain_exactly(issue1, issue2)
expect(resolve_issues(label_name: [label1.title, label2.title])).to contain_exactly(issue2)
diff --git a/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb b/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
index b8bdfc36ba7..02c6409a9a6 100644
--- a/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
+++ b/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
describe Resolvers::MergeRequestPipelinesResolver do
include GraphqlHelpers
- set(:merge_request) { create(:merge_request) }
- set(:pipeline) do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:pipeline) do
create(
:ci_pipeline,
project: merge_request.source_project,
@@ -14,8 +14,8 @@ describe Resolvers::MergeRequestPipelinesResolver do
sha: merge_request.diff_head_sha
)
end
- set(:other_project_pipeline) { create(:ci_pipeline, project: merge_request.source_project) }
- set(:other_pipeline) { create(:ci_pipeline) }
+ let_it_be(:other_project_pipeline) { create(:ci_pipeline, project: merge_request.source_project) }
+ let_it_be(:other_pipeline) { create(:ci_pipeline) }
let(:current_user) { create(:user) }
before do
diff --git a/spec/graphql/resolvers/merge_requests_resolver_spec.rb b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
index fe167a6ae3e..0bd5043d855 100644
--- a/spec/graphql/resolvers/merge_requests_resolver_spec.rb
+++ b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
@@ -5,16 +5,13 @@ require 'spec_helper'
describe Resolvers::MergeRequestsResolver do
include GraphqlHelpers
- set(:project) { create(:project, :repository) }
- set(:merge_request_1) { create(:merge_request, :simple, source_project: project, target_project: project) }
- set(:merge_request_2) { create(:merge_request, :rebased, source_project: project, target_project: project) }
-
- set(:other_project) { create(:project, :repository) }
- set(:other_merge_request) { create(:merge_request, source_project: other_project, target_project: other_project) }
-
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request_1) { create(:merge_request, :simple, source_project: project, target_project: project) }
+ let_it_be(:merge_request_2) { create(:merge_request, :rebased, source_project: project, target_project: project) }
+ let_it_be(:other_project) { create(:project, :repository) }
+ let_it_be(:other_merge_request) { create(:merge_request, source_project: other_project, target_project: other_project) }
let(:iid_1) { merge_request_1.iid }
let(:iid_2) { merge_request_2.iid }
-
let(:other_iid) { other_merge_request.iid }
describe '#resolve' do
diff --git a/spec/graphql/resolvers/project_pipelines_resolver_spec.rb b/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
index f312a118c96..2a14796fdfa 100644
--- a/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Resolvers::ProjectPipelinesResolver do
include GraphqlHelpers
- set(:project) { create(:project) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
- set(:other_pipeline) { create(:ci_pipeline) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:other_pipeline) { create(:ci_pipeline) }
let(:current_user) { create(:user) }
before do
diff --git a/spec/graphql/resolvers/project_resolver_spec.rb b/spec/graphql/resolvers/project_resolver_spec.rb
index 860f8b4abb8..e9e38353156 100644
--- a/spec/graphql/resolvers/project_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_resolver_spec.rb
@@ -5,10 +5,9 @@ require 'spec_helper'
describe Resolvers::ProjectResolver do
include GraphqlHelpers
- set(:project1) { create(:project) }
- set(:project2) { create(:project) }
-
- set(:other_project) { create(:project) }
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
describe '#resolve' do
it 'batch-resolves projects by full path' do
diff --git a/spec/graphql/resolvers/projects/snippets_resolver_spec.rb b/spec/graphql/resolvers/projects/snippets_resolver_spec.rb
index eef891bf984..6d301b1c742 100644
--- a/spec/graphql/resolvers/projects/snippets_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/snippets_resolver_spec.rb
@@ -75,6 +75,16 @@ describe Resolvers::Projects::SnippetsResolver do
expect(resolve_snippets(context: { current_user: other_user }, args: { ids: project_snippet.to_global_id })).to be_empty
end
end
+
+ context 'when project snippets are disabled' do
+ it 'raises an error' do
+ disabled_snippet_project = create(:project, :snippets_disabled)
+ disabled_snippet_project.add_developer(current_user)
+
+ expect(SnippetsFinder).not_to receive(:new)
+ expect { resolve_snippets(obj: disabled_snippet_project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
end
def resolve_snippets(args: {}, context: { current_user: current_user }, obj: project)
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index 1f82f316aa7..2547d39bcb2 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -46,7 +46,15 @@ describe Types::BaseField do
expect(field.to_graphql.complexity).to eq 12
end
- context 'when field has a resolver proc' do
+ context 'when field has a resolver' do
+ context 'when a valid complexity is already set' do
+ let(:field) { described_class.new(name: 'test', type: GraphQL::STRING_TYPE.connection_type, resolver_class: resolver, complexity: 2, max_page_size: 100, null: true) }
+
+ it 'uses this complexity' do
+ expect(field.to_graphql.complexity).to eq 2
+ end
+ end
+
context 'and is a connection' do
let(:field) { described_class.new(name: 'test', type: GraphQL::STRING_TYPE.connection_type, resolver_class: resolver, max_page_size: 100, null: true) }
@@ -59,6 +67,17 @@ describe Types::BaseField do
expect(field.to_graphql.complexity.call({}, { first: 1 }, 2)).to eq 2
expect(field.to_graphql.complexity.call({}, { first: 1, foo: true }, 2)).to eq 4
end
+
+ context 'when graphql_resolver_complexity is disabled' do
+ before do
+ stub_feature_flags(graphql_resolver_complexity: false)
+ end
+
+ it 'sets default field complexity' do
+ expect(field.to_graphql.complexity.call({}, {}, 2)).to eq 1
+ expect(field.to_graphql.complexity.call({}, { first: 50 }, 2)).to eq 1
+ end
+ end
end
context 'and is not a connection' do
@@ -155,7 +174,7 @@ describe Types::BaseField do
let(:flag) { :test_flag }
it 'prepends the description' do
- expect(field.description). to eq 'Test description. Available only when feature flag test_flag is enabled.'
+ expect(field.description). to eq 'Test description. Available only when feature flag `test_flag` is enabled.'
end
context 'falsey feature_flag values' do
diff --git a/spec/graphql/types/board_type_spec.rb b/spec/graphql/types/board_type_spec.rb
new file mode 100644
index 00000000000..9d18065bbcd
--- /dev/null
+++ b/spec/graphql/types/board_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['Board'] do
+ it { expect(described_class.graphql_name).to eq('Board') }
+
+ it { expect(described_class).to require_graphql_authorizations(:read_board) }
+
+ it 'has specific fields' do
+ expected_fields = %w[id name]
+
+ is_expected.to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index 6a0028f6529..240dd9fa5e2 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -16,9 +16,17 @@ describe GitlabSchema.types['Group'] do
web_url avatar_url share_with_group_lock project_creation_level
subgroup_creation_level require_two_factor_authentication
two_factor_grace_period auto_devops_enabled emails_disabled
- mentions_disabled parent
+ mentions_disabled parent boards
]
is_expected.to include_graphql_fields(*expected_fields)
end
+
+ describe 'boards field' do
+ subject { described_class.fields['boards'] }
+
+ it 'returns boards' do
+ is_expected.to have_graphql_type(Types::BoardType.connection_type)
+ end
+ end
end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index ac2d2d6f7f0..9c6d1e3f35c 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -24,6 +24,7 @@ describe GitlabSchema.types['Project'] do
namespace group statistics repository merge_requests merge_request issues
issue pipelines removeSourceBranchAfterMerge sentryDetailedError snippets
grafanaIntegration autocloseReferencedIssues suggestion_commit_message environments
+ boards
]
is_expected.to include_graphql_fields(*expected_fields)
@@ -77,4 +78,10 @@ describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_type(Types::EnvironmentType.connection_type) }
it { is_expected.to have_graphql_resolver(Resolvers::EnvironmentsResolver) }
end
+
+ describe 'boards field' do
+ subject { described_class.fields['boards'] }
+
+ it { is_expected.to have_graphql_type(Types::BoardType.connection_type) }
+ end
end
diff --git a/spec/graphql/types/snippet_type_spec.rb b/spec/graphql/types/snippet_type_spec.rb
index a06d372f668..97573f8c46b 100644
--- a/spec/graphql/types/snippet_type_spec.rb
+++ b/spec/graphql/types/snippet_type_spec.rb
@@ -16,4 +16,47 @@ describe GitlabSchema.types['Snippet'] do
describe 'authorizations' do
it { expect(described_class).to require_graphql_authorizations(:read_snippet) }
end
+
+ describe '#blob' do
+ let_it_be(:user) { create(:user) }
+ let(:query_blob) { subject.dig('data', 'snippets', 'edges')[0]['node']['blob'] }
+ let(:query) do
+ %(
+ {
+ snippets {
+ edges {
+ node {
+ blob {
+ name
+ path
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
+
+ context 'when snippet has repository' do
+ let!(:snippet) { create(:personal_snippet, :repository, :public, author: user) }
+ let(:blob) { snippet.blobs.first }
+
+ it 'returns blob from the repository' do
+ expect(query_blob['name']).to eq blob.name
+ expect(query_blob['path']).to eq blob.path
+ end
+ end
+
+ context 'when snippet does not have a repository' do
+ let!(:snippet) { create(:personal_snippet, :public, author: user) }
+ let(:blob) { snippet.blob }
+
+ it 'returns SnippetBlob type' do
+ expect(query_blob['name']).to eq blob.name
+ expect(query_blob['path']).to eq blob.path
+ end
+ end
+ end
end
diff --git a/spec/helpers/auth_helper_spec.rb b/spec/helpers/auth_helper_spec.rb
index 9179019cd6a..1764a2bbc3c 100644
--- a/spec/helpers/auth_helper_spec.rb
+++ b/spec/helpers/auth_helper_spec.rb
@@ -56,7 +56,7 @@ describe AuthHelper do
describe 'any_form_based_providers_enabled?' do
before do
- allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
+ allow(Gitlab::Auth::Ldap::Config).to receive(:enabled?).and_return(true)
end
it 'detects form-based providers' do
@@ -154,4 +154,34 @@ describe AuthHelper do
expect(helper.unlink_provider_allowed?(provider)).to eq 'policy_unlink_result'
end
end
+
+ describe '#provider_has_icon?' do
+ it 'returns true for defined providers' do
+ expect(helper.provider_has_icon?(described_class::PROVIDERS_WITH_ICONS.sample)).to eq true
+ end
+
+ it 'returns false for undefined providers' do
+ expect(helper.provider_has_icon?('test')).to be_falsey
+ end
+
+ context 'when provider is defined by config' do
+ before do
+ allow(Gitlab::Auth::OAuth::Provider).to receive(:icon_for).with('test').and_return('icon')
+ end
+
+ it 'returns true' do
+ expect(helper.provider_has_icon?('test')).to be_truthy
+ end
+ end
+
+ context 'when provider is not defined by config' do
+ before do
+ allow(Gitlab::Auth::OAuth::Provider).to receive(:icon_for).with('test').and_return(nil)
+ end
+
+ it 'returns true' do
+ expect(helper.provider_has_icon?('test')).to be_falsey
+ end
+ end
+ end
end
diff --git a/spec/helpers/auto_devops_helper_spec.rb b/spec/helpers/auto_devops_helper_spec.rb
index 5d42a80aae3..d06548f1595 100644
--- a/spec/helpers/auto_devops_helper_spec.rb
+++ b/spec/helpers/auto_devops_helper_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe AutoDevopsHelper do
- set(:project) { create(:project) }
- set(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:user) { create(:user) }
describe '.show_auto_devops_callout?' do
let(:allowed) { true }
diff --git a/spec/helpers/award_emoji_helper_spec.rb b/spec/helpers/award_emoji_helper_spec.rb
index 975f32edd42..3dee466a80c 100644
--- a/spec/helpers/award_emoji_helper_spec.rb
+++ b/spec/helpers/award_emoji_helper_spec.rb
@@ -64,7 +64,7 @@ describe AwardEmojiHelper do
it 'returns correct url' do
@project = issue.project
- expected_url = "/#{@project.namespace.path}/#{@project.path}/issues/#{issue.iid}/toggle_award_emoji"
+ expected_url = "/#{@project.namespace.path}/#{@project.path}/-/issues/#{issue.iid}/toggle_award_emoji"
expect(subject).to eq(expected_url)
end
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index a9f4b03eba5..2631c219222 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -27,7 +27,7 @@ describe BlobHelper do
end
describe "#edit_blob_link" do
- let(:namespace) { create(:namespace, name: 'gitlab' )}
+ let(:namespace) { create(:namespace, name: 'gitlab') }
let(:project) { create(:project, :repository, namespace: namespace) }
before do
@@ -202,6 +202,95 @@ describe BlobHelper do
end
end
end
+
+ describe '#show_suggest_pipeline_creation_celebration?' do
+ let(:current_user) { create(:user) }
+
+ before do
+ assign(:project, project)
+ assign(:blob, blob)
+ assign(:commit, double('Commit', sha: 'whatever'))
+ helper.request.cookies["suggest_gitlab_ci_yml_commit_#{project.id}"] = 'true'
+ allow(helper).to receive(:current_user).and_return(current_user)
+ end
+
+ context 'when file is a pipeline config file' do
+ let(:data) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) }
+ let(:blob) { fake_blob(path: Gitlab::FileDetector::PATTERNS[:gitlab_ci], data: data) }
+
+ context 'experiment enabled' do
+ before do
+ allow(helper).to receive(:experiment_enabled?).and_return(true)
+ end
+
+ it 'is true' do
+ expect(helper.show_suggest_pipeline_creation_celebration?).to be_truthy
+ end
+
+ context 'file is invalid format' do
+ let(:data) { 'foo' }
+
+ it 'is false' do
+ expect(helper.show_suggest_pipeline_creation_celebration?).to be_falsey
+ end
+ end
+
+ context 'does not use the default ci config' do
+ before do
+ project.ci_config_path = 'something_bad'
+ end
+
+ it 'is false' do
+ expect(helper.show_suggest_pipeline_creation_celebration?).to be_falsey
+ end
+ end
+
+ context 'does not have the needed cookie' do
+ before do
+ helper.request.cookies.delete "suggest_gitlab_ci_yml_commit_#{project.id}"
+ end
+
+ it 'is false' do
+ expect(helper.show_suggest_pipeline_creation_celebration?).to be_falsey
+ end
+ end
+ end
+
+ context 'experiment disabled' do
+ before do
+ allow(helper).to receive(:experiment_enabled?).and_return(false)
+ end
+
+ it 'is false' do
+ expect(helper.show_suggest_pipeline_creation_celebration?).to be_falsey
+ end
+ end
+ end
+
+ context 'when file is not a pipeline config file' do
+ let(:blob) { fake_blob(path: 'LICENSE') }
+
+ context 'experiment enabled' do
+ before do
+ allow(helper).to receive(:experiment_enabled?).and_return(true)
+ end
+
+ it 'is false' do
+ expect(helper.show_suggest_pipeline_creation_celebration?).to be_falsey
+ end
+ end
+ end
+ end
+ end
+
+ describe 'suggest_pipeline_commit_cookie_name' do
+ let(:project) { create(:project) }
+
+ it 'uses project id to make up the cookie name' do
+ assign(:project, project)
+
+ expect(helper.suggest_pipeline_commit_cookie_name).to eq "suggest_gitlab_ci_yml_commit_#{project.id}"
+ end
end
describe '#ide_edit_path' do
diff --git a/spec/helpers/boards_helper_spec.rb b/spec/helpers/boards_helper_spec.rb
index 8a4446b7f59..f5e5285554c 100644
--- a/spec/helpers/boards_helper_spec.rb
+++ b/spec/helpers/boards_helper_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe BoardsHelper do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
describe '#build_issue_link_base' do
context 'project board' do
@@ -11,7 +11,7 @@ describe BoardsHelper do
@project = project
@board = create(:board, project: @project)
- expect(build_issue_link_base).to eq("/#{@project.namespace.path}/#{@project.path}/issues")
+ expect(build_issue_link_base).to eq("/#{@project.namespace.path}/#{@project.path}/-/issues")
end
end
diff --git a/spec/helpers/broadcast_messages_helper_spec.rb b/spec/helpers/broadcast_messages_helper_spec.rb
index 7e181e429d7..58cc03a9446 100644
--- a/spec/helpers/broadcast_messages_helper_spec.rb
+++ b/spec/helpers/broadcast_messages_helper_spec.rb
@@ -14,7 +14,7 @@ describe BroadcastMessagesHelper do
context 'when last broadcast message is hidden' do
before do
- helper.request.cookies["hide_broadcast_notification_message_#{broadcast_message_2.id}"] = 'true'
+ helper.request.cookies["hide_broadcast_message_#{broadcast_message_2.id}"] = 'true'
end
it { is_expected.to eq broadcast_message_1 }
@@ -27,8 +27,13 @@ describe BroadcastMessagesHelper do
end
describe 'broadcast_message' do
+ let_it_be(:user) { create(:user) }
let(:current_broadcast_message) { BroadcastMessage.new(message: 'Current Message') }
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
it 'returns nil when no current message' do
expect(helper.broadcast_message(nil)).to be_nil
end
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index b72fbc9fd3c..37713a04844 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe EnvironmentsHelper do
- set(:user) { create(:user) }
- set(:project) { create(:project, :repository) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
+ let_it_be(:environment) { create(:environment, project: project) }
describe '#metrics_data' do
before do
diff --git a/spec/helpers/events_helper_spec.rb b/spec/helpers/events_helper_spec.rb
index 062fa8f106e..61229127770 100644
--- a/spec/helpers/events_helper_spec.rb
+++ b/spec/helpers/events_helper_spec.rb
@@ -110,7 +110,7 @@ describe EventsHelper do
it 'returns a project issue url' do
event.target = create(:note_on_issue, note: 'nice work')
- expect(subject).to eq("#{project_base_url}/issues/#{event.note_target.iid}#note_#{event.target.id}")
+ expect(subject).to eq("#{project_base_url}/-/issues/#{event.note_target.iid}#note_#{event.target.id}")
end
it 'returns a merge request url' do
diff --git a/spec/helpers/form_helper_spec.rb b/spec/helpers/form_helper_spec.rb
index 68aa0137cd5..6698d8970e7 100644
--- a/spec/helpers/form_helper_spec.rb
+++ b/spec/helpers/form_helper_spec.rb
@@ -39,6 +39,25 @@ describe FormHelper do
end
end
+ it 'renders messages truncated if requested' do
+ model = double(errors: errors_stub('Error 1', 'Error 2'))
+ model.errors.add(:title, 'is truncated')
+ model.errors.add(:base, 'Error 3')
+
+ expect(model.class).to receive(:human_attribute_name) do |attribute|
+ attribute.to_s.capitalize
+ end
+
+ errors = helper.form_errors(model, truncate: :title)
+
+ aggregate_failures do
+ expect(errors).to include('<li>Error 1</li>')
+ expect(errors).to include('<li>Error 2</li>')
+ expect(errors).to include('<li><span class="str-truncated-100">Title is truncated</span></li>')
+ expect(errors).to include('<li>Error 3</li>')
+ end
+ end
+
def errors_stub(*messages)
ActiveModel::Errors.new(double).tap do |errors|
messages.each { |msg| errors.add(:base, msg) }
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index a39110d0aa7..7971eb8849d 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -183,8 +183,8 @@ describe IssuablesHelper do
@project = issue.project
expected_data = {
- endpoint: "/#{@project.full_path}/issues/#{issue.iid}",
- updateEndpoint: "/#{@project.full_path}/issues/#{issue.iid}.json",
+ endpoint: "/#{@project.full_path}/-/issues/#{issue.iid}",
+ updateEndpoint: "/#{@project.full_path}/-/issues/#{issue.iid}.json",
canUpdate: true,
canDestroy: true,
issuableRef: "##{issue.iid}",
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index a394812f8f0..dad740d3b80 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -200,7 +200,7 @@ describe IssuesHelper do
shared_examples 'successfully displays link to issue and with css class' do |action|
it 'returns link' do
- link = "<a class=\"#{css_class}\" href=\"/#{new_issue.project.full_path}/issues/#{new_issue.iid}\">(#{action})</a>"
+ link = "<a class=\"#{css_class}\" href=\"/#{new_issue.project.full_path}/-/issues/#{new_issue.iid}\">(#{action})</a>"
expect(helper.issue_closed_link(issue, user, css_class: css_class)).to match(link)
end
diff --git a/spec/helpers/labels_helper_spec.rb b/spec/helpers/labels_helper_spec.rb
index 7ad554fd618..ec70041d51f 100644
--- a/spec/helpers/labels_helper_spec.rb
+++ b/spec/helpers/labels_helper_spec.rb
@@ -35,7 +35,7 @@ describe LabelsHelper do
end
context 'with a group label' do
- set(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
let(:label) { create(:group_label, group: group, title: 'bug') }
context 'when asking for an issue link' do
@@ -56,7 +56,7 @@ describe LabelsHelper do
context 'without subject' do
it "uses the label's project" do
- expect(link_to_label(label_presenter)).to match %r{<a href="/#{label.project.full_path}/issues\?label_name%5B%5D=#{label.name}">.*</a>}
+ expect(link_to_label(label_presenter)).to match %r{<a.*href="/#{label.project.full_path}/-/issues\?label_name%5B%5D=#{label.name}".*>.*</a>}m
end
end
@@ -65,7 +65,7 @@ describe LabelsHelper do
let(:subject) { build(:project, namespace: namespace, name: 'bar3') }
it 'links to project issues page' do
- expect(link_to_label(label_presenter)).to match %r{<a href="/foo3/bar3/issues\?label_name%5B%5D=#{label.name}">.*</a>}
+ expect(link_to_label(label_presenter)).to match %r{<a.*href="/foo3/bar3/-/issues\?label_name%5B%5D=#{label.name}".*>.*</a>}m
end
end
@@ -73,23 +73,15 @@ describe LabelsHelper do
let(:subject) { build(:group, name: 'bar') }
it 'links to group issues page' do
- expect(link_to_label(label_presenter)).to match %r{<a href="/groups/bar/-/issues\?label_name%5B%5D=#{label.name}">.*</a>}
+ expect(link_to_label(label_presenter)).to match %r{<a.*href="/groups/bar/-/issues\?label_name%5B%5D=#{label.name}".*>.*</a>}m
end
end
context 'with a type argument' do
- ['issue', :issue].each do |type|
+ ['issue', :issue, 'merge_request', :merge_request].each do |type|
context "set to #{type}" do
it 'links to correct page' do
- expect(link_to_label(label_presenter, type: type)).to match %r{<a href="/#{label.project.full_path}/#{type.to_s.pluralize}\?label_name%5B%5D=#{label.name}">.*</a>}
- end
- end
- end
-
- ['merge_request', :merge_request].each do |type|
- context "set to #{type}" do
- it 'links to correct page' do
- expect(link_to_label(label_presenter, type: type)).to match %r{<a href="/#{label.project.full_path}/-/#{type.to_s.pluralize}\?label_name%5B%5D=#{label.name}">.*</a>}
+ expect(link_to_label(label_presenter, type: type)).to match %r{<a.*href="/#{label.project.full_path}/-/#{type.to_s.pluralize}\?label_name%5B%5D=#{label.name}".*>.*</a>}m
end
end
end
@@ -113,7 +105,7 @@ describe LabelsHelper do
context 'without block' do
it 'uses render_colored_label as the link content' do
expect(self).to receive(:render_colored_label)
- .with(label_presenter, tooltip: true).and_return('Foo')
+ .with(label_presenter).and_return('Foo')
expect(link_to_label(label_presenter)).to match('Foo')
end
end
@@ -135,7 +127,7 @@ describe LabelsHelper do
end
describe 'create_label_title' do
- set(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
context 'with a group as subject' do
it 'returns "Create group label"' do
@@ -144,7 +136,7 @@ describe LabelsHelper do
end
context 'with a project as subject' do
- set(:project) { create(:project, namespace: group) }
+ let_it_be(:project) { create(:project, namespace: group) }
it 'returns "Create project label"' do
expect(create_label_title(project)).to eq _('Create project label')
@@ -159,7 +151,7 @@ describe LabelsHelper do
end
describe 'manage_labels_title' do
- set(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
context 'with a group as subject' do
it 'returns "Manage group labels"' do
@@ -168,7 +160,7 @@ describe LabelsHelper do
end
context 'with a project as subject' do
- set(:project) { create(:project, namespace: group) }
+ let_it_be(:project) { create(:project, namespace: group) }
it 'returns "Manage project labels"' do
expect(manage_labels_title(project)).to eq _('Manage project labels')
@@ -183,7 +175,7 @@ describe LabelsHelper do
end
describe 'view_labels_title' do
- set(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
context 'with a group as subject' do
it 'returns "View group labels"' do
@@ -192,7 +184,7 @@ describe LabelsHelper do
end
context 'with a project as subject' do
- set(:project) { create(:project, namespace: group) }
+ let_it_be(:project) { create(:project, namespace: group) }
it 'returns "View project labels"' do
expect(view_labels_title(project)).to eq _('View project labels')
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index d7cc8afe9c5..33347f20de8 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -3,15 +3,15 @@
require 'spec_helper'
describe MarkupHelper do
- set(:project) { create(:project, :repository) }
- set(:user) do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) do
user = create(:user, username: 'gfm')
project.add_maintainer(user)
user
end
- set(:issue) { create(:issue, project: project) }
- set(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- set(:snippet) { create(:project_snippet, project: project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let_it_be(:snippet) { create(:project_snippet, project: project) }
let(:commit) { project.commit }
before do
@@ -45,8 +45,8 @@ describe MarkupHelper do
describe "override default project" do
let(:actual) { issue.to_reference }
- set(:second_project) { create(:project, :public) }
- set(:second_issue) { create(:issue, project: second_project) }
+ let_it_be(:second_project) { create(:project, :public) }
+ let_it_be(:second_issue) { create(:issue, project: second_project) }
it 'links to the issue' do
expected = urls.project_issue_path(second_project, second_issue)
@@ -57,7 +57,7 @@ describe MarkupHelper do
describe 'uploads' do
let(:text) { "![ImageTest](/uploads/test.png)" }
- set(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
subject { helper.markdown(text) }
@@ -79,7 +79,7 @@ describe MarkupHelper do
end
describe "with a group in the context" do
- set(:project_in_group) { create(:project, group: group) }
+ let_it_be(:project_in_group) { create(:project, group: group) }
before do
helper.instance_variable_set(:@group, group)
@@ -114,7 +114,7 @@ describe MarkupHelper do
let(:requested_path) { nil }
it 'returns the link to the image path as a relative path' do
- expanded_path = "/#{project.full_path}/master/./#{image_file}"
+ expanded_path = "/#{project.full_path}/-/blob/master/./#{image_file}"
expect(subject.css('a')[0].attr('href')).to eq(expanded_path)
end
@@ -531,17 +531,30 @@ describe MarkupHelper do
it 'preserves style attribute for a label that can be accessed by current_user' do
project = create(:project, :public)
+ label = create_and_format_label(project)
- expect(create_and_format_label(project)).to match(/span class=.*style=.*/)
+ expect(label).to match(/span class=.*style=.*/)
+ expect(label).to include('data-html="true"')
end
it 'does not style a label that can not be accessed by current_user' do
project = create(:project, :private)
+ label = create_and_format_label(project)
- expect(create_and_format_label(project)).to eq("<p>#{label_title}</p>")
+ expect(label).to include("~label_1")
+ expect(label).not_to match(/span class=.*style=.*/)
end
end
+ it 'keeps whitelisted tags' do
+ html = '<a><i></i></a> <strong>strong</strong><em>em</em><b>b</b>'
+
+ object = create_object(html)
+ result = first_line_in_markdown(object, attribute, 100, project: project)
+
+ expect(result).to include(html)
+ end
+
it 'truncates Markdown properly' do
object = create_object("@#{user.username}, can you look at this?\nHello world\n")
actual = first_line_in_markdown(object, attribute, 100, project: project)
diff --git a/spec/helpers/notes_helper_spec.rb b/spec/helpers/notes_helper_spec.rb
index 1dc7f4e98ab..f074a918e7f 100644
--- a/spec/helpers/notes_helper_spec.rb
+++ b/spec/helpers/notes_helper_spec.rb
@@ -272,7 +272,7 @@ describe NotesHelper do
let(:note) { create(:note_on_issue, noteable: issue, project: project) }
it 'returns the noteable url with an anchor to the note' do
- expect(noteable_note_url(note)).to match("/#{project.namespace.path}/#{project.path}/issues/#{issue.iid}##{dom_id(note)}")
+ expect(noteable_note_url(note)).to match("/#{project.namespace.path}/#{project.path}/-/issues/#{issue.iid}##{dom_id(note)}")
end
end
diff --git a/spec/helpers/notifications_helper_spec.rb b/spec/helpers/notifications_helper_spec.rb
index 2384c87b377..d8dcce203fe 100644
--- a/spec/helpers/notifications_helper_spec.rb
+++ b/spec/helpers/notifications_helper_spec.rb
@@ -21,6 +21,7 @@ describe NotificationsHelper do
describe '#notification_event_name' do
it { expect(notification_event_name(:success_pipeline)).to match('Successful pipeline') }
it { expect(notification_event_name(:failed_pipeline)).to match('Failed pipeline') }
+ it { expect(notification_event_name(:fixed_pipeline)).to match('Fixed pipeline') }
end
describe '#notification_icon_level' do
diff --git a/spec/helpers/projects/error_tracking_helper_spec.rb b/spec/helpers/projects/error_tracking_helper_spec.rb
index 38a6ef6826b..008d749a002 100644
--- a/spec/helpers/projects/error_tracking_helper_spec.rb
+++ b/spec/helpers/projects/error_tracking_helper_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
describe Projects::ErrorTrackingHelper do
include Gitlab::Routing.url_helpers
- set(:project) { create(:project) }
- set(:current_user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
describe '#error_tracking_data' do
let(:can_enable_error_tracking) { true }
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 37bc2b382cb..17e3f8f9c06 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -994,4 +994,56 @@ describe ProjectsHelper do
it { is_expected.to eq(grafana_integration.enabled) }
end
end
+
+ describe '#project_license_name(project)' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:repository) { project.repository }
+
+ subject { project_license_name(project) }
+
+ context 'gitaly is working appropriately' do
+ it 'returns the license name' do
+ license = Licensee::License.new('mit')
+ allow(repository).to receive(:license).and_return(license)
+
+ expect(subject).to eq(license.name)
+ end
+ end
+
+ context 'gitaly is unreachable' do
+ shared_examples 'returns nil and tracks exception' do
+ it { is_expected.to be_nil }
+
+ it 'tracks the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(exception)
+ )
+
+ subject
+ end
+ end
+
+ before do
+ allow(repository).to receive(:license).and_raise(exception)
+ end
+
+ context "Gitlab::Git::CommandError" do
+ let(:exception) { Gitlab::Git::CommandError }
+
+ it_behaves_like 'returns nil and tracks exception'
+ end
+
+ context "GRPC::Unavailable" do
+ let(:exception) { GRPC::Unavailable }
+
+ it_behaves_like 'returns nil and tracks exception'
+ end
+
+ context "GRPC::DeadlineExceeded" do
+ let(:exception) { GRPC::DeadlineExceeded }
+
+ it_behaves_like 'returns nil and tracks exception'
+ end
+ end
+ end
end
diff --git a/spec/helpers/releases_helper_spec.rb b/spec/helpers/releases_helper_spec.rb
index 3f56c189642..d9d6a324f09 100644
--- a/spec/helpers/releases_helper_spec.rb
+++ b/spec/helpers/releases_helper_spec.rb
@@ -18,16 +18,31 @@ describe ReleasesHelper do
context 'url helpers' do
let(:project) { build(:project, namespace: create(:group)) }
let(:release) { create(:release, project: project) }
+ let(:user) { create(:user) }
+ let(:can_user_create_release) { false }
+ let(:common_keys) { [:project_id, :illustration_path, :documentation_path] }
before do
helper.instance_variable_set(:@project, project)
helper.instance_variable_set(:@release, release)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:can?)
+ .with(user, :create_release, project)
+ .and_return(can_user_create_release)
end
describe '#data_for_releases_page' do
- it 'has the needed data to display release blocks' do
- keys = %i(project_id illustration_path documentation_path)
- expect(helper.data_for_releases_page.keys).to eq(keys)
+ it 'includes the required data for displaying release blocks' do
+ expect(helper.data_for_releases_page.keys).to contain_exactly(*common_keys)
+ end
+
+ context 'when the user is allowed to create a new release' do
+ let(:can_user_create_release) { true }
+
+ it 'includes new_release_path' do
+ expect(helper.data_for_releases_page.keys).to contain_exactly(*common_keys, :new_release_path)
+ expect(helper.data_for_releases_page[:new_release_path]).to eq(new_project_tag_path(project))
+ end
end
end
diff --git a/spec/helpers/submodule_helper_spec.rb b/spec/helpers/submodule_helper_spec.rb
index d229753a0f0..db0836c8550 100644
--- a/spec/helpers/submodule_helper_spec.rb
+++ b/spec/helpers/submodule_helper_spec.rb
@@ -23,14 +23,30 @@ describe SubmoduleHelper do
it 'detects ssh on standard port' do
allow(Gitlab.config.gitlab_shell).to receive(:ssh_port).and_return(22) # set this just to be sure
allow(Gitlab.config.gitlab_shell).to receive(:ssh_path_prefix).and_return(Settings.send(:build_gitlab_shell_ssh_path_prefix))
- stub_url([config.user, '@', config.host, ':gitlab-org/gitlab-foss.git'].join(''))
+ stub_url([config.ssh_user, '@', config.host, ':gitlab-org/gitlab-foss.git'].join(''))
+ expect(subject).to eq([namespace_project_path('gitlab-org', 'gitlab-foss'), namespace_project_tree_path('gitlab-org', 'gitlab-foss', 'hash')])
+ end
+
+ it 'detects ssh on standard port without a username' do
+ allow(Gitlab.config.gitlab_shell).to receive(:ssh_port).and_return(22) # set this just to be sure
+ allow(Gitlab.config.gitlab_shell).to receive(:ssh_user).and_return('')
+ allow(Gitlab.config.gitlab_shell).to receive(:ssh_path_prefix).and_return(Settings.send(:build_gitlab_shell_ssh_path_prefix))
+ stub_url([config.host, ':gitlab-org/gitlab-foss.git'].join(''))
expect(subject).to eq([namespace_project_path('gitlab-org', 'gitlab-foss'), namespace_project_tree_path('gitlab-org', 'gitlab-foss', 'hash')])
end
it 'detects ssh on non-standard port' do
allow(Gitlab.config.gitlab_shell).to receive(:ssh_port).and_return(2222)
allow(Gitlab.config.gitlab_shell).to receive(:ssh_path_prefix).and_return(Settings.send(:build_gitlab_shell_ssh_path_prefix))
- stub_url(['ssh://', config.user, '@', config.host, ':2222/gitlab-org/gitlab-foss.git'].join(''))
+ stub_url(['ssh://', config.ssh_user, '@', config.host, ':2222/gitlab-org/gitlab-foss.git'].join(''))
+ expect(subject).to eq([namespace_project_path('gitlab-org', 'gitlab-foss'), namespace_project_tree_path('gitlab-org', 'gitlab-foss', 'hash')])
+ end
+
+ it 'detects ssh on non-standard port without a username' do
+ allow(Gitlab.config.gitlab_shell).to receive(:ssh_port).and_return(2222)
+ allow(Gitlab.config.gitlab_shell).to receive(:ssh_user).and_return('')
+ allow(Gitlab.config.gitlab_shell).to receive(:ssh_path_prefix).and_return(Settings.send(:build_gitlab_shell_ssh_path_prefix))
+ stub_url(['ssh://', config.host, ':2222/gitlab-org/gitlab-foss.git'].join(''))
expect(subject).to eq([namespace_project_path('gitlab-org', 'gitlab-foss'), namespace_project_tree_path('gitlab-org', 'gitlab-foss', 'hash')])
end
@@ -65,6 +81,33 @@ describe SubmoduleHelper do
end
end
+ context 'submodule on gist.github.com' do
+ it 'detects ssh' do
+ stub_url('git@gist.github.com:gitlab-org/gitlab-foss.git')
+ is_expected.to eq(['https://gist.github.com/gitlab-org/gitlab-foss', 'https://gist.github.com/gitlab-org/gitlab-foss/hash'])
+ end
+
+ it 'detects http' do
+ stub_url('http://gist.github.com/gitlab-org/gitlab-foss.git')
+ is_expected.to eq(['https://gist.github.com/gitlab-org/gitlab-foss', 'https://gist.github.com/gitlab-org/gitlab-foss/hash'])
+ end
+
+ it 'detects https' do
+ stub_url('https://gist.github.com/gitlab-org/gitlab-foss.git')
+ is_expected.to eq(['https://gist.github.com/gitlab-org/gitlab-foss', 'https://gist.github.com/gitlab-org/gitlab-foss/hash'])
+ end
+
+ it 'handles urls with no .git on the end' do
+ stub_url('http://gist.github.com/gitlab-org/gitlab-foss')
+ is_expected.to eq(['https://gist.github.com/gitlab-org/gitlab-foss', 'https://gist.github.com/gitlab-org/gitlab-foss/hash'])
+ end
+
+ it 'returns original with non-standard url' do
+ stub_url('http://gist.github.com/another/gitlab-org/gitlab-foss.git')
+ is_expected.to eq([repo.submodule_url_for, nil])
+ end
+ end
+
context 'submodule on github.com' do
it 'detects ssh' do
stub_url('git@github.com:gitlab-org/gitlab-foss.git')
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index 8479f8509f5..893d5cde24a 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -178,4 +178,42 @@ describe UsersHelper do
end
end
end
+
+ describe '#work_information' do
+ subject { helper.work_information(user) }
+
+ context 'when both job_title and organization are present' do
+ let(:user) { build(:user, organization: 'GitLab', job_title: 'Frontend Engineer') }
+
+ it 'returns job title concatenated with organization' do
+ is_expected.to eq('Frontend Engineer at GitLab')
+ end
+ end
+
+ context 'when only organization is present' do
+ let(:user) { build(:user, organization: 'GitLab') }
+
+ it "returns organization" do
+ is_expected.to eq('GitLab')
+ end
+ end
+
+ context 'when only job_title is present' do
+ let(:user) { build(:user, job_title: 'Frontend Engineer') }
+
+ it 'returns job title' do
+ is_expected.to eq('Frontend Engineer')
+ end
+ end
+
+ context 'when neither organization nor job_title are present' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when user parameter is nil' do
+ let(:user) { nil }
+
+ it { is_expected.to be_nil }
+ end
+ end
end
diff --git a/spec/helpers/visibility_level_helper_spec.rb b/spec/helpers/visibility_level_helper_spec.rb
index df338fac228..debe4401308 100644
--- a/spec/helpers/visibility_level_helper_spec.rb
+++ b/spec/helpers/visibility_level_helper_spec.rb
@@ -59,7 +59,7 @@ describe VisibilityLevelHelper do
describe "#project_visibility_level_description" do
it "describes private projects" do
expect(project_visibility_level_description(Gitlab::VisibilityLevel::PRIVATE))
- .to eq _('Project access must be granted explicitly to each user.')
+ .to eq _('Project access must be granted explicitly to each user. If this project is part of a group, access will be granted to members of the group.')
end
it "describes public projects" do
diff --git a/spec/initializers/100_patch_omniauth_saml_spec.rb b/spec/initializers/100_patch_omniauth_saml_spec.rb
new file mode 100644
index 00000000000..c4d20f79af0
--- /dev/null
+++ b/spec/initializers/100_patch_omniauth_saml_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'OmniAuth::Strategies::SAML', type: :strategy do
+ let(:idp_sso_target_url) { 'https://login.example.com/idp' }
+ let(:strategy) { [OmniAuth::Strategies::SAML, { idp_sso_target_url: idp_sso_target_url }] }
+
+ describe 'POST /users/auth/saml' do
+ it 'redirects to the provider login page' do
+ post '/users/auth/saml'
+
+ expect(last_response).to redirect_to(/\A#{Regexp.quote(idp_sso_target_url)}/)
+ end
+
+ it 'stores request ID during request phase' do
+ request_id = double
+ allow_next_instance_of(OneLogin::RubySaml::Authrequest) do |instance|
+ allow(instance).to receive(:uuid).and_return(request_id)
+ end
+
+ post '/users/auth/saml'
+ expect(session['last_authn_request_id']).to eq(request_id)
+ end
+ end
+end
diff --git a/spec/initializers/lograge_spec.rb b/spec/initializers/lograge_spec.rb
index 15165c6db98..0068b894474 100644
--- a/spec/initializers/lograge_spec.rb
+++ b/spec/initializers/lograge_spec.rb
@@ -17,7 +17,7 @@ describe 'lograge', type: :request do
end
let(:limited_params) do
- large_params.slice(:a, :b).map { |k, v| { key: k.to_s, value: v } } + ['...']
+ large_params.slice(:a, :b).map { |k, v| { key: k.to_s, value: v } } + [{ key: 'truncated', value: '...' }]
end
context 'for API requests' do
diff --git a/spec/javascripts/badges/components/badge_form_spec.js b/spec/javascripts/badges/components/badge_form_spec.js
deleted file mode 100644
index c7aa7fa63b1..00000000000
--- a/spec/javascripts/badges/components/badge_form_spec.js
+++ /dev/null
@@ -1,192 +0,0 @@
-import Vue from 'vue';
-import MockAdapter from 'axios-mock-adapter';
-import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import axios from '~/lib/utils/axios_utils';
-import store from '~/badges/store';
-import createEmptyBadge from '~/badges/empty_badge';
-import BadgeForm from '~/badges/components/badge_form.vue';
-import { DUMMY_IMAGE_URL, TEST_HOST } from '../../test_constants';
-
-// avoid preview background process
-BadgeForm.methods.debouncedPreview = () => {};
-
-describe('BadgeForm component', () => {
- const Component = Vue.extend(BadgeForm);
- let axiosMock;
- let vm;
-
- beforeEach(() => {
- setFixtures(`
- <div id="dummy-element"></div>
- `);
-
- axiosMock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- vm.$destroy();
- axiosMock.restore();
- });
-
- describe('methods', () => {
- beforeEach(() => {
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
- store,
- props: {
- isEditing: false,
- },
- });
- });
-
- describe('onCancel', () => {
- it('calls stopEditing', () => {
- spyOn(vm, 'stopEditing');
-
- vm.onCancel();
-
- expect(vm.stopEditing).toHaveBeenCalled();
- });
- });
- });
-
- const sharedSubmitTests = submitAction => {
- const nameSelector = '#badge-name';
- const imageUrlSelector = '#badge-image-url';
- const findImageUrlElement = () => vm.$el.querySelector(imageUrlSelector);
- const linkUrlSelector = '#badge-link-url';
- const findLinkUrlElement = () => vm.$el.querySelector(linkUrlSelector);
- const setValue = (inputElementSelector, value) => {
- const inputElement = vm.$el.querySelector(inputElementSelector);
- inputElement.value = value;
- inputElement.dispatchEvent(new Event('input'));
- };
- const submitForm = () => {
- const submitButton = vm.$el.querySelector('button[type="submit"]');
- submitButton.click();
- };
- const expectInvalidInput = inputElementSelector => {
- const inputElement = vm.$el.querySelector(inputElementSelector);
-
- expect(inputElement).toBeMatchedBy(':invalid');
- const feedbackElement = vm.$el.querySelector(`${inputElementSelector} + .invalid-feedback`);
-
- expect(feedbackElement).toBeVisible();
- };
-
- beforeEach(() => {
- spyOn(vm, submitAction).and.returnValue(Promise.resolve());
- store.replaceState({
- ...store.state,
- badgeInAddForm: createEmptyBadge(),
- badgeInEditForm: createEmptyBadge(),
- isSaving: false,
- });
-
- setValue(nameSelector, 'TestBadge');
- setValue(linkUrlSelector, `${TEST_HOST}/link/url`);
- setValue(imageUrlSelector, `${window.location.origin}${DUMMY_IMAGE_URL}`);
- });
-
- it('returns immediately if imageUrl is empty', () => {
- setValue(imageUrlSelector, '');
-
- submitForm();
-
- expectInvalidInput(imageUrlSelector);
-
- expect(vm[submitAction]).not.toHaveBeenCalled();
- });
-
- it('returns immediately if imageUrl is malformed', () => {
- setValue(imageUrlSelector, 'not-a-url');
-
- submitForm();
-
- expectInvalidInput(imageUrlSelector);
-
- expect(vm[submitAction]).not.toHaveBeenCalled();
- });
-
- it('returns immediately if linkUrl is empty', () => {
- setValue(linkUrlSelector, '');
-
- submitForm();
-
- expectInvalidInput(linkUrlSelector);
-
- expect(vm[submitAction]).not.toHaveBeenCalled();
- });
-
- it('returns immediately if linkUrl is malformed', () => {
- setValue(linkUrlSelector, 'not-a-url');
-
- submitForm();
-
- expectInvalidInput(linkUrlSelector);
-
- expect(vm[submitAction]).not.toHaveBeenCalled();
- });
-
- it(`calls ${submitAction}`, () => {
- submitForm();
-
- expect(findImageUrlElement()).toBeMatchedBy(':valid');
- expect(findLinkUrlElement()).toBeMatchedBy(':valid');
- expect(vm[submitAction]).toHaveBeenCalled();
- });
- };
-
- describe('if isEditing is false', () => {
- beforeEach(() => {
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
- store,
- props: {
- isEditing: false,
- },
- });
- });
-
- it('renders one button', () => {
- expect(vm.$el.querySelector('.row-content-block')).toBeNull();
- const buttons = vm.$el.querySelectorAll('.form-group:last-of-type button');
-
- expect(buttons.length).toBe(1);
- const buttonAddElement = buttons[0];
-
- expect(buttonAddElement).toBeVisible();
- expect(buttonAddElement).toHaveText('Add badge');
- });
-
- sharedSubmitTests('addBadge');
- });
-
- describe('if isEditing is true', () => {
- beforeEach(() => {
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
- store,
- props: {
- isEditing: true,
- },
- });
- });
-
- it('renders two buttons', () => {
- const buttons = vm.$el.querySelectorAll('.row-content-block button');
-
- expect(buttons.length).toBe(2);
- const buttonSaveElement = buttons[0];
-
- expect(buttonSaveElement).toBeVisible();
- expect(buttonSaveElement).toHaveText('Save changes');
- const buttonCancelElement = buttons[1];
-
- expect(buttonCancelElement).toBeVisible();
- expect(buttonCancelElement).toHaveText('Cancel');
- });
-
- sharedSubmitTests('saveBadge');
- });
-});
diff --git a/spec/javascripts/badges/components/badge_list_row_spec.js b/spec/javascripts/badges/components/badge_list_row_spec.js
deleted file mode 100644
index d1434737085..00000000000
--- a/spec/javascripts/badges/components/badge_list_row_spec.js
+++ /dev/null
@@ -1,106 +0,0 @@
-import $ from 'jquery';
-import Vue from 'vue';
-import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants';
-import store from '~/badges/store';
-import BadgeListRow from '~/badges/components/badge_list_row.vue';
-import { createDummyBadge } from '../dummy_badge';
-
-describe('BadgeListRow component', () => {
- const Component = Vue.extend(BadgeListRow);
- let badge;
- let vm;
-
- beforeEach(() => {
- setFixtures(`
- <div id="delete-badge-modal" class="modal"></div>
- <div id="dummy-element"></div>
- `);
- store.replaceState({
- ...store.state,
- kind: PROJECT_BADGE,
- });
- badge = createDummyBadge();
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
- store,
- props: { badge },
- });
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders the badge', () => {
- const badgeElement = vm.$el.querySelector('.project-badge');
-
- expect(badgeElement).not.toBeNull();
- expect(badgeElement.getAttribute('src')).toBe(badge.renderedImageUrl);
- });
-
- it('renders the badge name', () => {
- expect(vm.$el).toContainText(badge.name);
- });
-
- it('renders the badge link', () => {
- expect(vm.$el).toContainText(badge.linkUrl);
- });
-
- it('renders the badge kind', () => {
- expect(vm.$el).toContainText('Project Badge');
- });
-
- it('shows edit and delete buttons', () => {
- const buttons = vm.$el.querySelectorAll('.table-button-footer button');
-
- expect(buttons).toHaveLength(2);
- const buttonEditElement = buttons[0];
-
- expect(buttonEditElement).toBeVisible();
- expect(buttonEditElement).toHaveSpriteIcon('pencil');
- const buttonDeleteElement = buttons[1];
-
- expect(buttonDeleteElement).toBeVisible();
- expect(buttonDeleteElement).toHaveSpriteIcon('remove');
- });
-
- it('calls editBadge when clicking then edit button', () => {
- spyOn(vm, 'editBadge');
-
- const editButton = vm.$el.querySelector('.table-button-footer button:first-of-type');
- editButton.click();
-
- expect(vm.editBadge).toHaveBeenCalled();
- });
-
- it('calls updateBadgeInModal and shows modal when clicking then delete button', done => {
- spyOn(vm, 'updateBadgeInModal');
- $('#delete-badge-modal').on('shown.bs.modal', () => done());
-
- const deleteButton = vm.$el.querySelector('.table-button-footer button:last-of-type');
- deleteButton.click();
-
- expect(vm.updateBadgeInModal).toHaveBeenCalled();
- });
-
- describe('for a group badge', () => {
- beforeEach(done => {
- badge.kind = GROUP_BADGE;
-
- Vue.nextTick()
- .then(done)
- .catch(done.fail);
- });
-
- it('renders the badge kind', () => {
- expect(vm.$el).toContainText('Group Badge');
- });
-
- it('hides edit and delete buttons', () => {
- const buttons = vm.$el.querySelectorAll('.table-button-footer button');
-
- expect(buttons).toHaveLength(0);
- });
- });
-});
diff --git a/spec/javascripts/badges/components/badge_list_spec.js b/spec/javascripts/badges/components/badge_list_spec.js
deleted file mode 100644
index 3af194454e3..00000000000
--- a/spec/javascripts/badges/components/badge_list_spec.js
+++ /dev/null
@@ -1,91 +0,0 @@
-import Vue from 'vue';
-import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants';
-import store from '~/badges/store';
-import BadgeList from '~/badges/components/badge_list.vue';
-import { createDummyBadge } from '../dummy_badge';
-
-describe('BadgeList component', () => {
- const Component = Vue.extend(BadgeList);
- const numberOfDummyBadges = 3;
- let vm;
-
- beforeEach(() => {
- setFixtures('<div id="dummy-element"></div>');
- const badges = [];
- for (let id = 0; id < numberOfDummyBadges; id += 1) {
- badges.push({ id, ...createDummyBadge() });
- }
- store.replaceState({
- ...store.state,
- badges,
- kind: PROJECT_BADGE,
- isLoading: false,
- });
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
- store,
- });
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders a header with the badge count', () => {
- const header = vm.$el.querySelector('.card-header');
-
- expect(header).toHaveText(new RegExp(`Your badges\\s+${numberOfDummyBadges}`));
- });
-
- it('renders a row for each badge', () => {
- const rows = vm.$el.querySelectorAll('.gl-responsive-table-row');
-
- expect(rows).toHaveLength(numberOfDummyBadges);
- });
-
- it('renders a message if no badges exist', done => {
- store.state.badges = [];
-
- Vue.nextTick()
- .then(() => {
- expect(vm.$el).toContainText('This project has no badges');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('shows a loading icon when loading', done => {
- store.state.isLoading = true;
-
- Vue.nextTick()
- .then(() => {
- const loadingIcon = vm.$el.querySelector('.gl-spinner');
-
- expect(loadingIcon).toBeVisible();
- })
- .then(done)
- .catch(done.fail);
- });
-
- describe('for group badges', () => {
- beforeEach(done => {
- store.state.kind = GROUP_BADGE;
-
- Vue.nextTick()
- .then(done)
- .catch(done.fail);
- });
-
- it('renders a message if no badges exist', done => {
- store.state.badges = [];
-
- Vue.nextTick()
- .then(() => {
- expect(vm.$el).toContainText('This group has no badges');
- })
- .then(done)
- .catch(done.fail);
- });
- });
-});
diff --git a/spec/javascripts/badges/components/badge_settings_spec.js b/spec/javascripts/badges/components/badge_settings_spec.js
deleted file mode 100644
index 479a905661b..00000000000
--- a/spec/javascripts/badges/components/badge_settings_spec.js
+++ /dev/null
@@ -1,118 +0,0 @@
-import $ from 'jquery';
-import Vue from 'vue';
-import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import store from '~/badges/store';
-import BadgeSettings from '~/badges/components/badge_settings.vue';
-import { createDummyBadge } from '../dummy_badge';
-
-describe('BadgeSettings component', () => {
- const Component = Vue.extend(BadgeSettings);
- let vm;
-
- beforeEach(() => {
- setFixtures(`
- <div id="dummy-element"></div>
- <button
- id="dummy-modal-button"
- type="button"
- data-toggle="modal"
- data-target="#delete-badge-modal"
- >Show modal</button>
- `);
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
- store,
- });
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('displays modal if button is clicked', done => {
- const badge = createDummyBadge();
- store.state.badgeInModal = badge;
- const modal = vm.$el.querySelector('#delete-badge-modal');
- const button = document.getElementById('dummy-modal-button');
-
- $(modal).on('shown.bs.modal', () => {
- expect(modal).toContainText('Delete badge?');
- const badgeElement = modal.querySelector('img.project-badge');
-
- expect(badgeElement).not.toBe(null);
- expect(badgeElement.getAttribute('src')).toBe(badge.renderedImageUrl);
-
- done();
- });
-
- Vue.nextTick()
- .then(() => {
- button.click();
- })
- .catch(done.fail);
- });
-
- it('displays a form to add a badge', () => {
- const form = vm.$el.querySelector('form:nth-of-type(2)');
-
- expect(form).not.toBe(null);
- const button = form.querySelector('.btn-success');
-
- expect(button).not.toBe(null);
- expect(button).toHaveText(/Add badge/);
- });
-
- it('displays badge list', () => {
- const badgeListElement = vm.$el.querySelector('.card');
-
- expect(badgeListElement).not.toBe(null);
- expect(badgeListElement).toBeVisible();
- expect(badgeListElement).toContainText('Your badges');
- });
-
- describe('when editing', () => {
- beforeEach(done => {
- store.state.isEditing = true;
-
- Vue.nextTick()
- .then(done)
- .catch(done.fail);
- });
-
- it('displays a form to edit a badge', () => {
- const form = vm.$el.querySelector('form:nth-of-type(1)');
-
- expect(form).not.toBe(null);
- const submitButton = form.querySelector('.btn-success');
-
- expect(submitButton).not.toBe(null);
- expect(submitButton).toHaveText(/Save changes/);
- const cancelButton = form.querySelector('.btn-cancel');
-
- expect(cancelButton).not.toBe(null);
- expect(cancelButton).toHaveText(/Cancel/);
- });
-
- it('displays no badge list', () => {
- const badgeListElement = vm.$el.querySelector('.card');
-
- expect(badgeListElement).toBeHidden();
- });
- });
-
- describe('methods', () => {
- describe('onSubmitModal', () => {
- it('triggers ', () => {
- spyOn(vm, 'deleteBadge').and.callFake(() => Promise.resolve());
- const modal = vm.$el.querySelector('#delete-badge-modal');
- const deleteButton = modal.querySelector('.btn-danger');
-
- deleteButton.click();
-
- const badge = store.state.badgeInModal;
-
- expect(vm.deleteBadge).toHaveBeenCalledWith(badge);
- });
- });
- });
-});
diff --git a/spec/javascripts/badges/components/badge_spec.js b/spec/javascripts/badges/components/badge_spec.js
deleted file mode 100644
index 14490b1bbd1..00000000000
--- a/spec/javascripts/badges/components/badge_spec.js
+++ /dev/null
@@ -1,150 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import { DUMMY_IMAGE_URL, TEST_HOST } from 'spec/test_constants';
-import Badge from '~/badges/components/badge.vue';
-
-describe('Badge component', () => {
- const Component = Vue.extend(Badge);
- const dummyProps = {
- imageUrl: DUMMY_IMAGE_URL,
- linkUrl: `${TEST_HOST}/badge/link/url`,
- };
- let vm;
-
- const findElements = () => {
- const buttons = vm.$el.querySelectorAll('button');
- return {
- badgeImage: vm.$el.querySelector('img.project-badge'),
- loadingIcon: vm.$el.querySelector('.gl-spinner'),
- reloadButton: buttons[buttons.length - 1],
- };
- };
-
- const createComponent = (props, el = null) => {
- vm = mountComponent(Component, props, el);
- const { badgeImage } = findElements();
- return new Promise(resolve => badgeImage.addEventListener('load', resolve)).then(() =>
- Vue.nextTick(),
- );
- };
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('watchers', () => {
- describe('imageUrl', () => {
- it('sets isLoading and resets numRetries and hasError', done => {
- const props = { ...dummyProps };
- createComponent(props)
- .then(() => {
- expect(vm.isLoading).toBe(false);
- vm.hasError = true;
- vm.numRetries = 42;
-
- vm.imageUrl = `${props.imageUrl}#something/else`;
-
- return Vue.nextTick();
- })
- .then(() => {
- expect(vm.isLoading).toBe(true);
- expect(vm.numRetries).toBe(0);
- expect(vm.hasError).toBe(false);
- })
- .then(done)
- .catch(done.fail);
- });
- });
- });
-
- describe('methods', () => {
- beforeEach(done => {
- createComponent({ ...dummyProps })
- .then(done)
- .catch(done.fail);
- });
-
- it('onError resets isLoading and sets hasError', () => {
- vm.hasError = false;
- vm.isLoading = true;
-
- vm.onError();
-
- expect(vm.hasError).toBe(true);
- expect(vm.isLoading).toBe(false);
- });
-
- it('onLoad sets isLoading', () => {
- vm.isLoading = true;
-
- vm.onLoad();
-
- expect(vm.isLoading).toBe(false);
- });
-
- it('reloadImage resets isLoading and hasError and increases numRetries', () => {
- vm.hasError = true;
- vm.isLoading = false;
- vm.numRetries = 0;
-
- vm.reloadImage();
-
- expect(vm.hasError).toBe(false);
- expect(vm.isLoading).toBe(true);
- expect(vm.numRetries).toBe(1);
- });
- });
-
- describe('behavior', () => {
- beforeEach(done => {
- setFixtures('<div id="dummy-element"></div>');
- createComponent({ ...dummyProps }, '#dummy-element')
- .then(done)
- .catch(done.fail);
- });
-
- it('shows a badge image after loading', () => {
- expect(vm.isLoading).toBe(false);
- expect(vm.hasError).toBe(false);
- const { badgeImage, loadingIcon, reloadButton } = findElements();
-
- expect(badgeImage).toBeVisible();
- expect(loadingIcon).toBeHidden();
- expect(reloadButton).toBeHidden();
- expect(vm.$el.innerText).toBe('');
- });
-
- it('shows a loading icon when loading', done => {
- vm.isLoading = true;
-
- Vue.nextTick()
- .then(() => {
- const { badgeImage, loadingIcon, reloadButton } = findElements();
-
- expect(badgeImage).toBeHidden();
- expect(loadingIcon).toBeVisible();
- expect(reloadButton).toBeHidden();
- expect(vm.$el.innerText).toBe('');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('shows an error and reload button if loading failed', done => {
- vm.hasError = true;
-
- Vue.nextTick()
- .then(() => {
- const { badgeImage, loadingIcon, reloadButton } = findElements();
-
- expect(badgeImage).toBeHidden();
- expect(loadingIcon).toBeHidden();
- expect(reloadButton).toBeVisible();
- expect(reloadButton).toHaveSpriteIcon('retry');
- expect(vm.$el.innerText.trim()).toBe('No badge image');
- })
- .then(done)
- .catch(done.fail);
- });
- });
-});
diff --git a/spec/javascripts/badges/store/actions_spec.js b/spec/javascripts/badges/store/actions_spec.js
deleted file mode 100644
index d92155d59b5..00000000000
--- a/spec/javascripts/badges/store/actions_spec.js
+++ /dev/null
@@ -1,618 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import { TEST_HOST } from 'spec/test_constants';
-import testAction from 'spec/helpers/vuex_action_helper';
-import axios from '~/lib/utils/axios_utils';
-import actions, { transformBackendBadge } from '~/badges/store/actions';
-import mutationTypes from '~/badges/store/mutation_types';
-import createState from '~/badges/store/state';
-import { createDummyBadge, createDummyBadgeResponse } from '../dummy_badge';
-
-describe('Badges store actions', () => {
- const dummyEndpointUrl = `${TEST_HOST}/badges/endpoint`;
- const dummyBadges = [{ ...createDummyBadge(), id: 5 }, { ...createDummyBadge(), id: 6 }];
-
- let axiosMock;
- let badgeId;
- let state;
-
- beforeEach(() => {
- axiosMock = new MockAdapter(axios);
- state = {
- ...createState(),
- apiEndpointUrl: dummyEndpointUrl,
- badges: dummyBadges,
- };
- badgeId = state.badges[0].id;
- });
-
- afterEach(() => {
- axiosMock.restore();
- });
-
- describe('requestNewBadge', () => {
- it('commits REQUEST_NEW_BADGE', done => {
- testAction(
- actions.requestNewBadge,
- null,
- state,
- [{ type: mutationTypes.REQUEST_NEW_BADGE }],
- [],
- done,
- );
- });
- });
-
- describe('receiveNewBadge', () => {
- it('commits RECEIVE_NEW_BADGE', done => {
- const newBadge = createDummyBadge();
- testAction(
- actions.receiveNewBadge,
- newBadge,
- state,
- [{ type: mutationTypes.RECEIVE_NEW_BADGE, payload: newBadge }],
- [],
- done,
- );
- });
- });
-
- describe('receiveNewBadgeError', () => {
- it('commits RECEIVE_NEW_BADGE_ERROR', done => {
- testAction(
- actions.receiveNewBadgeError,
- null,
- state,
- [{ type: mutationTypes.RECEIVE_NEW_BADGE_ERROR }],
- [],
- done,
- );
- });
- });
-
- describe('addBadge', () => {
- let badgeInAddForm;
- let dispatch;
- let endpointMock;
-
- beforeEach(() => {
- endpointMock = axiosMock.onPost(dummyEndpointUrl);
- dispatch = jasmine.createSpy('dispatch');
- badgeInAddForm = createDummyBadge();
- state = {
- ...state,
- badgeInAddForm,
- };
- });
-
- it('dispatches requestNewBadge and receiveNewBadge for successful response', done => {
- const dummyResponse = createDummyBadgeResponse();
-
- endpointMock.replyOnce(req => {
- expect(req.data).toBe(
- JSON.stringify({
- name: 'TestBadge',
- image_url: badgeInAddForm.imageUrl,
- link_url: badgeInAddForm.linkUrl,
- }),
- );
-
- expect(dispatch.calls.allArgs()).toEqual([['requestNewBadge']]);
- dispatch.calls.reset();
- return [200, dummyResponse];
- });
-
- const dummyBadge = transformBackendBadge(dummyResponse);
- actions
- .addBadge({ state, dispatch })
- .then(() => {
- expect(dispatch.calls.allArgs()).toEqual([['receiveNewBadge', dummyBadge]]);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('dispatches requestNewBadge and receiveNewBadgeError for error response', done => {
- endpointMock.replyOnce(req => {
- expect(req.data).toBe(
- JSON.stringify({
- name: 'TestBadge',
- image_url: badgeInAddForm.imageUrl,
- link_url: badgeInAddForm.linkUrl,
- }),
- );
-
- expect(dispatch.calls.allArgs()).toEqual([['requestNewBadge']]);
- dispatch.calls.reset();
- return [500, ''];
- });
-
- actions
- .addBadge({ state, dispatch })
- .then(() => done.fail('Expected Ajax call to fail!'))
- .catch(() => {
- expect(dispatch.calls.allArgs()).toEqual([['receiveNewBadgeError']]);
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('requestDeleteBadge', () => {
- it('commits REQUEST_DELETE_BADGE', done => {
- testAction(
- actions.requestDeleteBadge,
- badgeId,
- state,
- [{ type: mutationTypes.REQUEST_DELETE_BADGE, payload: badgeId }],
- [],
- done,
- );
- });
- });
-
- describe('receiveDeleteBadge', () => {
- it('commits RECEIVE_DELETE_BADGE', done => {
- testAction(
- actions.receiveDeleteBadge,
- badgeId,
- state,
- [{ type: mutationTypes.RECEIVE_DELETE_BADGE, payload: badgeId }],
- [],
- done,
- );
- });
- });
-
- describe('receiveDeleteBadgeError', () => {
- it('commits RECEIVE_DELETE_BADGE_ERROR', done => {
- testAction(
- actions.receiveDeleteBadgeError,
- badgeId,
- state,
- [{ type: mutationTypes.RECEIVE_DELETE_BADGE_ERROR, payload: badgeId }],
- [],
- done,
- );
- });
- });
-
- describe('deleteBadge', () => {
- let dispatch;
- let endpointMock;
-
- beforeEach(() => {
- endpointMock = axiosMock.onDelete(`${dummyEndpointUrl}/${badgeId}`);
- dispatch = jasmine.createSpy('dispatch');
- });
-
- it('dispatches requestDeleteBadge and receiveDeleteBadge for successful response', done => {
- endpointMock.replyOnce(() => {
- expect(dispatch.calls.allArgs()).toEqual([['requestDeleteBadge', badgeId]]);
- dispatch.calls.reset();
- return [200, ''];
- });
-
- actions
- .deleteBadge({ state, dispatch }, { id: badgeId })
- .then(() => {
- expect(dispatch.calls.allArgs()).toEqual([['receiveDeleteBadge', badgeId]]);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('dispatches requestDeleteBadge and receiveDeleteBadgeError for error response', done => {
- endpointMock.replyOnce(() => {
- expect(dispatch.calls.allArgs()).toEqual([['requestDeleteBadge', badgeId]]);
- dispatch.calls.reset();
- return [500, ''];
- });
-
- actions
- .deleteBadge({ state, dispatch }, { id: badgeId })
- .then(() => done.fail('Expected Ajax call to fail!'))
- .catch(() => {
- expect(dispatch.calls.allArgs()).toEqual([['receiveDeleteBadgeError', badgeId]]);
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('editBadge', () => {
- it('commits START_EDITING', done => {
- const dummyBadge = createDummyBadge();
- testAction(
- actions.editBadge,
- dummyBadge,
- state,
- [{ type: mutationTypes.START_EDITING, payload: dummyBadge }],
- [],
- done,
- );
- });
- });
-
- describe('requestLoadBadges', () => {
- it('commits REQUEST_LOAD_BADGES', done => {
- const dummyData = 'this is not real data';
- testAction(
- actions.requestLoadBadges,
- dummyData,
- state,
- [{ type: mutationTypes.REQUEST_LOAD_BADGES, payload: dummyData }],
- [],
- done,
- );
- });
- });
-
- describe('receiveLoadBadges', () => {
- it('commits RECEIVE_LOAD_BADGES', done => {
- const badges = dummyBadges;
- testAction(
- actions.receiveLoadBadges,
- badges,
- state,
- [{ type: mutationTypes.RECEIVE_LOAD_BADGES, payload: badges }],
- [],
- done,
- );
- });
- });
-
- describe('receiveLoadBadgesError', () => {
- it('commits RECEIVE_LOAD_BADGES_ERROR', done => {
- testAction(
- actions.receiveLoadBadgesError,
- null,
- state,
- [{ type: mutationTypes.RECEIVE_LOAD_BADGES_ERROR }],
- [],
- done,
- );
- });
- });
-
- describe('loadBadges', () => {
- let dispatch;
- let endpointMock;
-
- beforeEach(() => {
- endpointMock = axiosMock.onGet(dummyEndpointUrl);
- dispatch = jasmine.createSpy('dispatch');
- });
-
- it('dispatches requestLoadBadges and receiveLoadBadges for successful response', done => {
- const dummyData = 'this is just some data';
- const dummyReponse = [
- createDummyBadgeResponse(),
- createDummyBadgeResponse(),
- createDummyBadgeResponse(),
- ];
- endpointMock.replyOnce(() => {
- expect(dispatch.calls.allArgs()).toEqual([['requestLoadBadges', dummyData]]);
- dispatch.calls.reset();
- return [200, dummyReponse];
- });
-
- actions
- .loadBadges({ state, dispatch }, dummyData)
- .then(() => {
- const badges = dummyReponse.map(transformBackendBadge);
-
- expect(dispatch.calls.allArgs()).toEqual([['receiveLoadBadges', badges]]);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('dispatches requestLoadBadges and receiveLoadBadgesError for error response', done => {
- const dummyData = 'this is just some data';
- endpointMock.replyOnce(() => {
- expect(dispatch.calls.allArgs()).toEqual([['requestLoadBadges', dummyData]]);
- dispatch.calls.reset();
- return [500, ''];
- });
-
- actions
- .loadBadges({ state, dispatch }, dummyData)
- .then(() => done.fail('Expected Ajax call to fail!'))
- .catch(() => {
- expect(dispatch.calls.allArgs()).toEqual([['receiveLoadBadgesError']]);
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('requestRenderedBadge', () => {
- it('commits REQUEST_RENDERED_BADGE', done => {
- testAction(
- actions.requestRenderedBadge,
- null,
- state,
- [{ type: mutationTypes.REQUEST_RENDERED_BADGE }],
- [],
- done,
- );
- });
- });
-
- describe('receiveRenderedBadge', () => {
- it('commits RECEIVE_RENDERED_BADGE', done => {
- const dummyBadge = createDummyBadge();
- testAction(
- actions.receiveRenderedBadge,
- dummyBadge,
- state,
- [{ type: mutationTypes.RECEIVE_RENDERED_BADGE, payload: dummyBadge }],
- [],
- done,
- );
- });
- });
-
- describe('receiveRenderedBadgeError', () => {
- it('commits RECEIVE_RENDERED_BADGE_ERROR', done => {
- testAction(
- actions.receiveRenderedBadgeError,
- null,
- state,
- [{ type: mutationTypes.RECEIVE_RENDERED_BADGE_ERROR }],
- [],
- done,
- );
- });
- });
-
- describe('renderBadge', () => {
- let dispatch;
- let endpointMock;
- let badgeInForm;
-
- beforeEach(() => {
- badgeInForm = createDummyBadge();
- state = {
- ...state,
- badgeInAddForm: badgeInForm,
- };
- const urlParameters = [
- `link_url=${encodeURIComponent(badgeInForm.linkUrl)}`,
- `image_url=${encodeURIComponent(badgeInForm.imageUrl)}`,
- ].join('&');
- endpointMock = axiosMock.onGet(`${dummyEndpointUrl}/render?${urlParameters}`);
- dispatch = jasmine.createSpy('dispatch');
- });
-
- it('returns immediately if imageUrl is empty', done => {
- spyOn(axios, 'get');
- badgeInForm.imageUrl = '';
-
- actions
- .renderBadge({ state, dispatch })
- .then(() => {
- expect(axios.get).not.toHaveBeenCalled();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('returns immediately if linkUrl is empty', done => {
- spyOn(axios, 'get');
- badgeInForm.linkUrl = '';
-
- actions
- .renderBadge({ state, dispatch })
- .then(() => {
- expect(axios.get).not.toHaveBeenCalled();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('escapes user input', done => {
- spyOn(axios, 'get').and.callFake(() => Promise.resolve({ data: createDummyBadgeResponse() }));
- badgeInForm.imageUrl = '&make-sandwich=true';
- badgeInForm.linkUrl = '<script>I am dangerous!</script>';
-
- actions
- .renderBadge({ state, dispatch })
- .then(() => {
- expect(axios.get.calls.count()).toBe(1);
- const url = axios.get.calls.argsFor(0)[0];
-
- expect(url).toMatch(`^${dummyEndpointUrl}/render?`);
- expect(url).toMatch('\\?link_url=%3Cscript%3EI%20am%20dangerous!%3C%2Fscript%3E&');
- expect(url).toMatch('&image_url=%26make-sandwich%3Dtrue$');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('dispatches requestRenderedBadge and receiveRenderedBadge for successful response', done => {
- const dummyReponse = createDummyBadgeResponse();
- endpointMock.replyOnce(() => {
- expect(dispatch.calls.allArgs()).toEqual([['requestRenderedBadge']]);
- dispatch.calls.reset();
- return [200, dummyReponse];
- });
-
- actions
- .renderBadge({ state, dispatch })
- .then(() => {
- const renderedBadge = transformBackendBadge(dummyReponse);
-
- expect(dispatch.calls.allArgs()).toEqual([['receiveRenderedBadge', renderedBadge]]);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('dispatches requestRenderedBadge and receiveRenderedBadgeError for error response', done => {
- endpointMock.replyOnce(() => {
- expect(dispatch.calls.allArgs()).toEqual([['requestRenderedBadge']]);
- dispatch.calls.reset();
- return [500, ''];
- });
-
- actions
- .renderBadge({ state, dispatch })
- .then(() => done.fail('Expected Ajax call to fail!'))
- .catch(() => {
- expect(dispatch.calls.allArgs()).toEqual([['receiveRenderedBadgeError']]);
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('requestUpdatedBadge', () => {
- it('commits REQUEST_UPDATED_BADGE', done => {
- testAction(
- actions.requestUpdatedBadge,
- null,
- state,
- [{ type: mutationTypes.REQUEST_UPDATED_BADGE }],
- [],
- done,
- );
- });
- });
-
- describe('receiveUpdatedBadge', () => {
- it('commits RECEIVE_UPDATED_BADGE', done => {
- const updatedBadge = createDummyBadge();
- testAction(
- actions.receiveUpdatedBadge,
- updatedBadge,
- state,
- [{ type: mutationTypes.RECEIVE_UPDATED_BADGE, payload: updatedBadge }],
- [],
- done,
- );
- });
- });
-
- describe('receiveUpdatedBadgeError', () => {
- it('commits RECEIVE_UPDATED_BADGE_ERROR', done => {
- testAction(
- actions.receiveUpdatedBadgeError,
- null,
- state,
- [{ type: mutationTypes.RECEIVE_UPDATED_BADGE_ERROR }],
- [],
- done,
- );
- });
- });
-
- describe('saveBadge', () => {
- let badgeInEditForm;
- let dispatch;
- let endpointMock;
-
- beforeEach(() => {
- badgeInEditForm = createDummyBadge();
- state = {
- ...state,
- badgeInEditForm,
- };
- endpointMock = axiosMock.onPut(`${dummyEndpointUrl}/${badgeInEditForm.id}`);
- dispatch = jasmine.createSpy('dispatch');
- });
-
- it('dispatches requestUpdatedBadge and receiveUpdatedBadge for successful response', done => {
- const dummyResponse = createDummyBadgeResponse();
-
- endpointMock.replyOnce(req => {
- expect(req.data).toBe(
- JSON.stringify({
- name: 'TestBadge',
- image_url: badgeInEditForm.imageUrl,
- link_url: badgeInEditForm.linkUrl,
- }),
- );
-
- expect(dispatch.calls.allArgs()).toEqual([['requestUpdatedBadge']]);
- dispatch.calls.reset();
- return [200, dummyResponse];
- });
-
- const updatedBadge = transformBackendBadge(dummyResponse);
- actions
- .saveBadge({ state, dispatch })
- .then(() => {
- expect(dispatch.calls.allArgs()).toEqual([['receiveUpdatedBadge', updatedBadge]]);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('dispatches requestUpdatedBadge and receiveUpdatedBadgeError for error response', done => {
- endpointMock.replyOnce(req => {
- expect(req.data).toBe(
- JSON.stringify({
- name: 'TestBadge',
- image_url: badgeInEditForm.imageUrl,
- link_url: badgeInEditForm.linkUrl,
- }),
- );
-
- expect(dispatch.calls.allArgs()).toEqual([['requestUpdatedBadge']]);
- dispatch.calls.reset();
- return [500, ''];
- });
-
- actions
- .saveBadge({ state, dispatch })
- .then(() => done.fail('Expected Ajax call to fail!'))
- .catch(() => {
- expect(dispatch.calls.allArgs()).toEqual([['receiveUpdatedBadgeError']]);
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('stopEditing', () => {
- it('commits STOP_EDITING', done => {
- testAction(
- actions.stopEditing,
- null,
- state,
- [{ type: mutationTypes.STOP_EDITING }],
- [],
- done,
- );
- });
- });
-
- describe('updateBadgeInForm', () => {
- it('commits UPDATE_BADGE_IN_FORM', done => {
- const dummyBadge = createDummyBadge();
- testAction(
- actions.updateBadgeInForm,
- dummyBadge,
- state,
- [{ type: mutationTypes.UPDATE_BADGE_IN_FORM, payload: dummyBadge }],
- [],
- done,
- );
- });
-
- describe('updateBadgeInModal', () => {
- it('commits UPDATE_BADGE_IN_MODAL', done => {
- const dummyBadge = createDummyBadge();
- testAction(
- actions.updateBadgeInModal,
- dummyBadge,
- state,
- [{ type: mutationTypes.UPDATE_BADGE_IN_MODAL, payload: dummyBadge }],
- [],
- done,
- );
- });
- });
- });
-});
diff --git a/spec/javascripts/behaviors/quick_submit_spec.js b/spec/javascripts/behaviors/quick_submit_spec.js
deleted file mode 100644
index 7af8c984841..00000000000
--- a/spec/javascripts/behaviors/quick_submit_spec.js
+++ /dev/null
@@ -1,143 +0,0 @@
-import $ from 'jquery';
-import '~/behaviors/quick_submit';
-
-describe('Quick Submit behavior', function() {
- const keydownEvent = (options = { keyCode: 13, metaKey: true }) => $.Event('keydown', options);
-
- preloadFixtures('snippets/show.html');
-
- beforeEach(() => {
- loadFixtures('snippets/show.html');
- $('form').submit(e => {
- // Prevent a form submit from moving us off the testing page
- e.preventDefault();
- });
- this.spies = {
- submit: spyOnEvent('form', 'submit'),
- };
-
- this.textarea = $('.js-quick-submit textarea').first();
- });
-
- afterEach(() => {
- // Undo what we did to the shared <body>
- $('body').removeAttr('data-page');
- });
-
- it('does not respond to other keyCodes', () => {
- this.textarea.trigger(
- keydownEvent({
- keyCode: 32,
- }),
- );
-
- expect(this.spies.submit).not.toHaveBeenTriggered();
- });
-
- it('does not respond to Enter alone', () => {
- this.textarea.trigger(
- keydownEvent({
- ctrlKey: false,
- metaKey: false,
- }),
- );
-
- expect(this.spies.submit).not.toHaveBeenTriggered();
- });
-
- it('does not respond to repeated events', () => {
- this.textarea.trigger(
- keydownEvent({
- repeat: true,
- }),
- );
-
- expect(this.spies.submit).not.toHaveBeenTriggered();
- });
-
- it('disables input of type submit', () => {
- const submitButton = $('.js-quick-submit input[type=submit]');
- this.textarea.trigger(keydownEvent());
-
- expect(submitButton).toBeDisabled();
- });
-
- it('disables button of type submit', () => {
- const submitButton = $('.js-quick-submit input[type=submit]');
- this.textarea.trigger(keydownEvent());
-
- expect(submitButton).toBeDisabled();
- });
-
- it('only clicks one submit', () => {
- const existingSubmit = $('.js-quick-submit input[type=submit]');
- // Add an extra submit button
- const newSubmit = $('<button type="submit">Submit it</button>');
- newSubmit.insertAfter(this.textarea);
-
- const oldClick = spyOnEvent(existingSubmit, 'click');
- const newClick = spyOnEvent(newSubmit, 'click');
-
- this.textarea.trigger(keydownEvent());
-
- expect(oldClick).not.toHaveBeenTriggered();
- expect(newClick).toHaveBeenTriggered();
- });
- // We cannot stub `navigator.userAgent` for CI's `rake karma` task, so we'll
- // only run the tests that apply to the current platform
- if (navigator.userAgent.match(/Macintosh/)) {
- describe('In Macintosh', () => {
- it('responds to Meta+Enter', () => {
- this.textarea.trigger(keydownEvent());
-
- expect(this.spies.submit).toHaveBeenTriggered();
- });
-
- it('excludes other modifier keys', () => {
- this.textarea.trigger(
- keydownEvent({
- altKey: true,
- }),
- );
- this.textarea.trigger(
- keydownEvent({
- ctrlKey: true,
- }),
- );
- this.textarea.trigger(
- keydownEvent({
- shiftKey: true,
- }),
- );
-
- expect(this.spies.submit).not.toHaveBeenTriggered();
- });
- });
- } else {
- it('responds to Ctrl+Enter', () => {
- this.textarea.trigger(keydownEvent());
-
- expect(this.spies.submit).toHaveBeenTriggered();
- });
-
- it('excludes other modifier keys', () => {
- this.textarea.trigger(
- keydownEvent({
- altKey: true,
- }),
- );
- this.textarea.trigger(
- keydownEvent({
- metaKey: true,
- }),
- );
- this.textarea.trigger(
- keydownEvent({
- shiftKey: true,
- }),
- );
-
- expect(this.spies.submit).not.toHaveBeenTriggered();
- });
- }
-});
diff --git a/spec/javascripts/blob/balsamiq/balsamiq_viewer_browser_spec.js b/spec/javascripts/blob/balsamiq/balsamiq_viewer_browser_spec.js
new file mode 100644
index 00000000000..4e06e5c12fc
--- /dev/null
+++ b/spec/javascripts/blob/balsamiq/balsamiq_viewer_browser_spec.js
@@ -0,0 +1,59 @@
+// this file can't be migrated to jest because it relies on the browser to perform integration tests:
+// see: https://gitlab.com/gitlab-org/gitlab/-/issues/194207#note_301878738
+import { FIXTURES_PATH } from 'spec/test_constants';
+import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer';
+
+const bmprPath = `${FIXTURES_PATH}/blob/balsamiq/test.bmpr`;
+
+describe('Balsamiq integration spec', () => {
+ let container;
+ let endpoint;
+ let balsamiqViewer;
+
+ preloadFixtures('static/balsamiq_viewer.html');
+
+ beforeEach(() => {
+ loadFixtures('static/balsamiq_viewer.html');
+
+ container = document.getElementById('js-balsamiq-viewer');
+ balsamiqViewer = new BalsamiqViewer(container);
+ });
+
+ describe('successful response', () => {
+ beforeEach(done => {
+ endpoint = bmprPath;
+
+ balsamiqViewer
+ .loadFile(endpoint)
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('does not show loading icon', () => {
+ expect(document.querySelector('.loading')).toBeNull();
+ });
+
+ it('renders the balsamiq previews', () => {
+ expect(document.querySelectorAll('.previews .preview').length).not.toEqual(0);
+ });
+ });
+
+ describe('error getting file', () => {
+ beforeEach(done => {
+ endpoint = 'invalid/path/to/file.bmpr';
+
+ balsamiqViewer
+ .loadFile(endpoint)
+ .then(done.fail, null)
+ .catch(done);
+ });
+
+ it('does not show loading icon', () => {
+ expect(document.querySelector('.loading')).toBeNull();
+ });
+
+ it('does not render the balsamiq previews', () => {
+ expect(document.querySelectorAll('.previews .preview').length).toEqual(0);
+ });
+ });
+});
diff --git a/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js b/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js
deleted file mode 100644
index 0c2b7b7392d..00000000000
--- a/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import { FIXTURES_PATH } from 'spec/test_constants';
-import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer';
-
-const bmprPath = `${FIXTURES_PATH}/blob/balsamiq/test.bmpr`;
-
-describe('Balsamiq integration spec', () => {
- let container;
- let endpoint;
- let balsamiqViewer;
-
- preloadFixtures('static/balsamiq_viewer.html');
-
- beforeEach(() => {
- loadFixtures('static/balsamiq_viewer.html');
-
- container = document.getElementById('js-balsamiq-viewer');
- balsamiqViewer = new BalsamiqViewer(container);
- });
-
- describe('successful response', () => {
- beforeEach(done => {
- endpoint = bmprPath;
-
- balsamiqViewer
- .loadFile(endpoint)
- .then(done)
- .catch(done.fail);
- });
-
- it('does not show loading icon', () => {
- expect(document.querySelector('.loading')).toBeNull();
- });
-
- it('renders the balsamiq previews', () => {
- expect(document.querySelectorAll('.previews .preview').length).not.toEqual(0);
- });
- });
-
- describe('error getting file', () => {
- beforeEach(done => {
- endpoint = 'invalid/path/to/file.bmpr';
-
- balsamiqViewer
- .loadFile(endpoint)
- .then(done.fail, null)
- .catch(done);
- });
-
- it('does not show loading icon', () => {
- expect(document.querySelector('.loading')).toBeNull();
- });
-
- it('does not render the balsamiq previews', () => {
- expect(document.querySelectorAll('.previews .preview').length).toEqual(0);
- });
- });
-});
diff --git a/spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js b/spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js
deleted file mode 100644
index d175c8ba853..00000000000
--- a/spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js
+++ /dev/null
@@ -1,351 +0,0 @@
-import sqljs from 'sql.js';
-import axios from '~/lib/utils/axios_utils';
-import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer';
-import ClassSpecHelper from '../../helpers/class_spec_helper';
-
-describe('BalsamiqViewer', () => {
- const mockArrayBuffer = new ArrayBuffer(10);
- let balsamiqViewer;
- let viewer;
-
- describe('class constructor', () => {
- beforeEach(() => {
- viewer = {};
-
- balsamiqViewer = new BalsamiqViewer(viewer);
- });
-
- it('should set .viewer', () => {
- expect(balsamiqViewer.viewer).toBe(viewer);
- });
- });
-
- describe('loadFile', () => {
- let bv;
- const endpoint = 'endpoint';
- const requestSuccess = Promise.resolve({
- data: mockArrayBuffer,
- status: 200,
- });
-
- beforeEach(() => {
- viewer = {};
- bv = new BalsamiqViewer(viewer);
- });
-
- it('should call `axios.get` on `endpoint` param with responseType set to `arraybuffer', () => {
- spyOn(axios, 'get').and.returnValue(requestSuccess);
- spyOn(bv, 'renderFile').and.stub();
-
- bv.loadFile(endpoint);
-
- expect(axios.get).toHaveBeenCalledWith(
- endpoint,
- jasmine.objectContaining({
- responseType: 'arraybuffer',
- }),
- );
- });
-
- it('should call `renderFile` on request success', done => {
- spyOn(axios, 'get').and.returnValue(requestSuccess);
- spyOn(bv, 'renderFile').and.callFake(() => {});
-
- bv.loadFile(endpoint)
- .then(() => {
- expect(bv.renderFile).toHaveBeenCalledWith(mockArrayBuffer);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('should not call `renderFile` on request failure', done => {
- spyOn(axios, 'get').and.returnValue(Promise.reject());
- spyOn(bv, 'renderFile');
-
- bv.loadFile(endpoint)
- .then(() => {
- done.fail('Expected loadFile to throw error!');
- })
- .catch(() => {
- expect(bv.renderFile).not.toHaveBeenCalled();
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('renderFile', () => {
- let container;
- let previews;
-
- beforeEach(() => {
- viewer = jasmine.createSpyObj('viewer', ['appendChild']);
- previews = [document.createElement('ul'), document.createElement('ul')];
-
- balsamiqViewer = jasmine.createSpyObj('balsamiqViewer', [
- 'initDatabase',
- 'getPreviews',
- 'renderPreview',
- ]);
- balsamiqViewer.viewer = viewer;
-
- balsamiqViewer.getPreviews.and.returnValue(previews);
- balsamiqViewer.renderPreview.and.callFake(preview => preview);
- viewer.appendChild.and.callFake(containerElement => {
- container = containerElement;
- });
-
- BalsamiqViewer.prototype.renderFile.call(balsamiqViewer, mockArrayBuffer);
- });
-
- it('should call .initDatabase', () => {
- expect(balsamiqViewer.initDatabase).toHaveBeenCalledWith(mockArrayBuffer);
- });
-
- it('should call .getPreviews', () => {
- expect(balsamiqViewer.getPreviews).toHaveBeenCalled();
- });
-
- it('should call .renderPreview for each preview', () => {
- const allArgs = balsamiqViewer.renderPreview.calls.allArgs();
-
- expect(allArgs.length).toBe(2);
-
- previews.forEach((preview, i) => {
- expect(allArgs[i][0]).toBe(preview);
- });
- });
-
- it('should set the container HTML', () => {
- expect(container.innerHTML).toBe('<ul></ul><ul></ul>');
- });
-
- it('should add inline preview classes', () => {
- expect(container.classList[0]).toBe('list-inline');
- expect(container.classList[1]).toBe('previews');
- });
-
- it('should call viewer.appendChild', () => {
- expect(viewer.appendChild).toHaveBeenCalledWith(container);
- });
- });
-
- describe('initDatabase', () => {
- let database;
- let uint8Array;
- let data;
-
- beforeEach(() => {
- uint8Array = {};
- database = {};
- data = 'data';
-
- balsamiqViewer = {};
-
- spyOn(window, 'Uint8Array').and.returnValue(uint8Array);
- spyOn(sqljs, 'Database').and.returnValue(database);
-
- BalsamiqViewer.prototype.initDatabase.call(balsamiqViewer, data);
- });
-
- it('should instantiate Uint8Array', () => {
- expect(window.Uint8Array).toHaveBeenCalledWith(data);
- });
-
- it('should call sqljs.Database', () => {
- expect(sqljs.Database).toHaveBeenCalledWith(uint8Array);
- });
-
- it('should set .database', () => {
- expect(balsamiqViewer.database).toBe(database);
- });
- });
-
- describe('getPreviews', () => {
- let database;
- let thumbnails;
- let getPreviews;
-
- beforeEach(() => {
- database = jasmine.createSpyObj('database', ['exec']);
- thumbnails = [{ values: [0, 1, 2] }];
-
- balsamiqViewer = {
- database,
- };
-
- spyOn(BalsamiqViewer, 'parsePreview').and.callFake(preview => preview.toString());
- database.exec.and.returnValue(thumbnails);
-
- getPreviews = BalsamiqViewer.prototype.getPreviews.call(balsamiqViewer);
- });
-
- it('should call database.exec', () => {
- expect(database.exec).toHaveBeenCalledWith('SELECT * FROM thumbnails');
- });
-
- it('should call .parsePreview for each value', () => {
- const allArgs = BalsamiqViewer.parsePreview.calls.allArgs();
-
- expect(allArgs.length).toBe(3);
-
- thumbnails[0].values.forEach((value, i) => {
- expect(allArgs[i][0]).toBe(value);
- });
- });
-
- it('should return an array of parsed values', () => {
- expect(getPreviews).toEqual(['0', '1', '2']);
- });
- });
-
- describe('getResource', () => {
- let database;
- let resourceID;
- let resource;
- let getResource;
-
- beforeEach(() => {
- database = jasmine.createSpyObj('database', ['exec']);
- resourceID = 4;
- resource = ['resource'];
-
- balsamiqViewer = {
- database,
- };
-
- database.exec.and.returnValue(resource);
-
- getResource = BalsamiqViewer.prototype.getResource.call(balsamiqViewer, resourceID);
- });
-
- it('should call database.exec', () => {
- expect(database.exec).toHaveBeenCalledWith(
- `SELECT * FROM resources WHERE id = '${resourceID}'`,
- );
- });
-
- it('should return the selected resource', () => {
- expect(getResource).toBe(resource[0]);
- });
- });
-
- describe('renderPreview', () => {
- let previewElement;
- let innerHTML;
- let preview;
- let renderPreview;
-
- beforeEach(() => {
- innerHTML = '<a>innerHTML</a>';
- previewElement = {
- outerHTML: '<p>outerHTML</p>',
- classList: jasmine.createSpyObj('classList', ['add']),
- };
- preview = {};
-
- balsamiqViewer = jasmine.createSpyObj('balsamiqViewer', ['renderTemplate']);
-
- spyOn(document, 'createElement').and.returnValue(previewElement);
- balsamiqViewer.renderTemplate.and.returnValue(innerHTML);
-
- renderPreview = BalsamiqViewer.prototype.renderPreview.call(balsamiqViewer, preview);
- });
-
- it('should call classList.add', () => {
- expect(previewElement.classList.add).toHaveBeenCalledWith('preview');
- });
-
- it('should call .renderTemplate', () => {
- expect(balsamiqViewer.renderTemplate).toHaveBeenCalledWith(preview);
- });
-
- it('should set .innerHTML', () => {
- expect(previewElement.innerHTML).toBe(innerHTML);
- });
-
- it('should return element', () => {
- expect(renderPreview).toBe(previewElement);
- });
- });
-
- describe('renderTemplate', () => {
- let preview;
- let name;
- let resource;
- let template;
- let renderTemplate;
-
- beforeEach(() => {
- preview = { resourceID: 1, image: 'image' };
- name = 'name';
- resource = 'resource';
- template = `
- <div class="card">
- <div class="card-header">name</div>
- <div class="card-body">
- <img class="img-thumbnail" src="data:image/png;base64,image"/>
- </div>
- </div>
- `;
-
- balsamiqViewer = jasmine.createSpyObj('balsamiqViewer', ['getResource']);
-
- spyOn(BalsamiqViewer, 'parseTitle').and.returnValue(name);
- balsamiqViewer.getResource.and.returnValue(resource);
-
- renderTemplate = BalsamiqViewer.prototype.renderTemplate.call(balsamiqViewer, preview);
- });
-
- it('should call .getResource', () => {
- expect(balsamiqViewer.getResource).toHaveBeenCalledWith(preview.resourceID);
- });
-
- it('should call .parseTitle', () => {
- expect(BalsamiqViewer.parseTitle).toHaveBeenCalledWith(resource);
- });
-
- it('should return the template string', function() {
- expect(renderTemplate.replace(/\s/g, '')).toEqual(template.replace(/\s/g, ''));
- });
- });
-
- describe('parsePreview', () => {
- let preview;
- let parsePreview;
-
- beforeEach(() => {
- preview = ['{}', '{ "id": 1 }'];
-
- spyOn(JSON, 'parse').and.callThrough();
-
- parsePreview = BalsamiqViewer.parsePreview(preview);
- });
-
- ClassSpecHelper.itShouldBeAStaticMethod(BalsamiqViewer, 'parsePreview');
-
- it('should return the parsed JSON', () => {
- expect(parsePreview).toEqual(JSON.parse('{ "id": 1 }'));
- });
- });
-
- describe('parseTitle', () => {
- let title;
- let parseTitle;
-
- beforeEach(() => {
- title = { values: [['{}', '{}', '{"name":"name"}']] };
-
- spyOn(JSON, 'parse').and.callThrough();
-
- parseTitle = BalsamiqViewer.parseTitle(title);
- });
-
- ClassSpecHelper.itShouldBeAStaticMethod(BalsamiqViewer, 'parsePreview');
-
- it('should return the name value', () => {
- expect(parseTitle).toBe('name');
- });
- });
-});
diff --git a/spec/javascripts/blob/blob_file_dropzone_spec.js b/spec/javascripts/blob/blob_file_dropzone_spec.js
deleted file mode 100644
index fe03775ec4d..00000000000
--- a/spec/javascripts/blob/blob_file_dropzone_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import $ from 'jquery';
-import BlobFileDropzone from '~/blob/blob_file_dropzone';
-
-describe('BlobFileDropzone', function() {
- preloadFixtures('blob/show.html');
-
- beforeEach(() => {
- loadFixtures('blob/show.html');
- const form = $('.js-upload-blob-form');
- this.blobFileDropzone = new BlobFileDropzone(form, 'POST');
- this.dropzone = $('.js-upload-blob-form .dropzone').get(0).dropzone;
- this.replaceFileButton = $('#submit-all');
- });
-
- describe('submit button', () => {
- it('requires file', () => {
- spyOn(window, 'alert');
-
- this.replaceFileButton.click();
-
- expect(window.alert).toHaveBeenCalled();
- });
-
- it('is disabled while uploading', () => {
- spyOn(window, 'alert');
-
- const file = new File([], 'some-file.jpg');
- const fakeEvent = $.Event('drop', {
- dataTransfer: { files: [file] },
- });
-
- this.dropzone.listeners[0].events.drop(fakeEvent);
- this.replaceFileButton.click();
-
- expect(window.alert).not.toHaveBeenCalled();
- expect(this.replaceFileButton.is(':disabled')).toEqual(true);
- });
- });
-});
diff --git a/spec/javascripts/blob/notebook/index_spec.js b/spec/javascripts/blob/notebook/index_spec.js
deleted file mode 100644
index db6ca5bd22d..00000000000
--- a/spec/javascripts/blob/notebook/index_spec.js
+++ /dev/null
@@ -1,130 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
-import renderNotebook from '~/blob/notebook';
-
-describe('iPython notebook renderer', () => {
- preloadFixtures('static/notebook_viewer.html');
-
- beforeEach(() => {
- loadFixtures('static/notebook_viewer.html');
- });
-
- it('shows loading icon', () => {
- renderNotebook();
-
- expect(document.querySelector('.loading')).not.toBeNull();
- });
-
- describe('successful response', () => {
- let mock;
-
- beforeEach(done => {
- mock = new MockAdapter(axios);
- mock.onGet('/test').reply(200, {
- cells: [
- {
- cell_type: 'markdown',
- source: ['# test'],
- },
- {
- cell_type: 'code',
- execution_count: 1,
- source: ['def test(str)', ' return str'],
- outputs: [],
- },
- ],
- });
-
- renderNotebook();
-
- setTimeout(() => {
- done();
- });
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- it('does not show loading icon', () => {
- expect(document.querySelector('.loading')).toBeNull();
- });
-
- it('renders the notebook', () => {
- expect(document.querySelector('.md')).not.toBeNull();
- });
-
- it('renders the markdown cell', () => {
- expect(document.querySelector('h1')).not.toBeNull();
-
- expect(document.querySelector('h1').textContent.trim()).toBe('test');
- });
-
- it('highlights code', () => {
- expect(document.querySelector('.token')).not.toBeNull();
-
- expect(document.querySelector('.language-python')).not.toBeNull();
- });
- });
-
- describe('error in JSON response', () => {
- let mock;
-
- beforeEach(done => {
- mock = new MockAdapter(axios);
- mock.onGet('/test').reply(() =>
- // eslint-disable-next-line prefer-promise-reject-errors
- Promise.reject({ status: 200, data: '{ "cells": [{"cell_type": "markdown"} }' }),
- );
-
- renderNotebook();
-
- setTimeout(() => {
- done();
- });
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- it('does not show loading icon', () => {
- expect(document.querySelector('.loading')).toBeNull();
- });
-
- it('shows error message', () => {
- expect(document.querySelector('.md').textContent.trim()).toBe(
- 'An error occurred while parsing the file.',
- );
- });
- });
-
- describe('error getting file', () => {
- let mock;
-
- beforeEach(done => {
- mock = new MockAdapter(axios);
- mock.onGet('/test').reply(500, '');
-
- renderNotebook();
-
- setTimeout(() => {
- done();
- });
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- it('does not show loading icon', () => {
- expect(document.querySelector('.loading')).toBeNull();
- });
-
- it('shows error message', () => {
- expect(document.querySelector('.md').textContent.trim()).toBe(
- 'An error occurred while loading the file. Please try again later.',
- );
- });
- });
-});
diff --git a/spec/javascripts/blob/pdf/index_spec.js b/spec/javascripts/blob/pdf/index_spec.js
deleted file mode 100644
index 66769a8aa47..00000000000
--- a/spec/javascripts/blob/pdf/index_spec.js
+++ /dev/null
@@ -1,72 +0,0 @@
-import { FIXTURES_PATH } from 'spec/test_constants';
-import renderPDF from '~/blob/pdf';
-
-const testPDF = `${FIXTURES_PATH}/blob/pdf/test.pdf`;
-
-describe('PDF renderer', () => {
- let viewer;
- let app;
-
- const checkLoaded = done => {
- if (app.loading) {
- setTimeout(() => {
- checkLoaded(done);
- }, 100);
- } else {
- done();
- }
- };
-
- preloadFixtures('static/pdf_viewer.html');
-
- beforeEach(() => {
- loadFixtures('static/pdf_viewer.html');
- viewer = document.getElementById('js-pdf-viewer');
- viewer.dataset.endpoint = testPDF;
- });
-
- it('shows loading icon', () => {
- renderPDF();
-
- expect(document.querySelector('.loading')).not.toBeNull();
- });
-
- describe('successful response', () => {
- beforeEach(done => {
- app = renderPDF();
-
- checkLoaded(done);
- });
-
- it('does not show loading icon', () => {
- expect(document.querySelector('.loading')).toBeNull();
- });
-
- it('renders the PDF', () => {
- expect(document.querySelector('.pdf-viewer')).not.toBeNull();
- });
-
- it('renders the PDF page', () => {
- expect(document.querySelector('.pdf-page')).not.toBeNull();
- });
- });
-
- describe('error getting file', () => {
- beforeEach(done => {
- viewer.dataset.endpoint = 'invalid/path/to/file.pdf';
- app = renderPDF();
-
- checkLoaded(done);
- });
-
- it('does not show loading icon', () => {
- expect(document.querySelector('.loading')).toBeNull();
- });
-
- it('shows error message', () => {
- expect(document.querySelector('.md').textContent.trim()).toBe(
- 'An error occurred while loading the file. Please try again later.',
- );
- });
- });
-});
diff --git a/spec/javascripts/blob/sketch/index_spec.js b/spec/javascripts/blob/sketch/index_spec.js
deleted file mode 100644
index 3d3129e10da..00000000000
--- a/spec/javascripts/blob/sketch/index_spec.js
+++ /dev/null
@@ -1,120 +0,0 @@
-/* eslint-disable no-new, promise/catch-or-return */
-import JSZip from 'jszip';
-import SketchLoader from '~/blob/sketch';
-
-describe('Sketch viewer', () => {
- const generateZipFileArrayBuffer = (zipFile, resolve, done) => {
- zipFile.generateAsync({ type: 'arrayBuffer' }).then(content => {
- resolve(content);
-
- setTimeout(() => {
- done();
- }, 100);
- });
- };
-
- preloadFixtures('static/sketch_viewer.html');
-
- beforeEach(() => {
- loadFixtures('static/sketch_viewer.html');
- });
-
- describe('with error message', () => {
- beforeEach(done => {
- spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(
- () =>
- new Promise((resolve, reject) => {
- reject();
-
- setTimeout(() => {
- done();
- });
- }),
- );
-
- new SketchLoader(document.getElementById('js-sketch-viewer'));
- });
-
- it('renders error message', () => {
- expect(document.querySelector('#js-sketch-viewer p')).not.toBeNull();
-
- expect(document.querySelector('#js-sketch-viewer p').textContent.trim()).toContain(
- 'Cannot show preview.',
- );
- });
-
- it('removes render the loading icon', () => {
- expect(document.querySelector('.js-loading-icon')).toBeNull();
- });
- });
-
- describe('success', () => {
- beforeEach(done => {
- spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(
- () =>
- new Promise(resolve => {
- const zipFile = new JSZip();
- zipFile
- .folder('previews')
- .file(
- 'preview.png',
- 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAMAAAAoyzS7AAAAA1BMVEUAAACnej3aAAAAAXRSTlMAQObYZgAAAA1JREFUeNoBAgD9/wAAAAIAAVMrnDAAAAAASUVORK5CYII=',
- {
- base64: true,
- },
- );
-
- generateZipFileArrayBuffer(zipFile, resolve, done);
- }),
- );
-
- new SketchLoader(document.getElementById('js-sketch-viewer'));
- });
-
- it('does not render error message', () => {
- expect(document.querySelector('#js-sketch-viewer p')).toBeNull();
- });
-
- it('removes render the loading icon', () => {
- expect(document.querySelector('.js-loading-icon')).toBeNull();
- });
-
- it('renders preview img', () => {
- const img = document.querySelector('#js-sketch-viewer img');
-
- expect(img).not.toBeNull();
- expect(img.classList.contains('img-fluid')).toBeTruthy();
- });
-
- it('renders link to image', () => {
- const img = document.querySelector('#js-sketch-viewer img');
- const link = document.querySelector('#js-sketch-viewer a');
-
- expect(link.href).toBe(img.src);
- expect(link.target).toBe('_blank');
- });
- });
-
- describe('incorrect file', () => {
- beforeEach(done => {
- spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(
- () =>
- new Promise(resolve => {
- const zipFile = new JSZip();
-
- generateZipFileArrayBuffer(zipFile, resolve, done);
- }),
- );
-
- new SketchLoader(document.getElementById('js-sketch-viewer'));
- });
-
- it('renders error message', () => {
- expect(document.querySelector('#js-sketch-viewer p')).not.toBeNull();
-
- expect(document.querySelector('#js-sketch-viewer p').textContent.trim()).toContain(
- 'Cannot show preview.',
- );
- });
- });
-});
diff --git a/spec/javascripts/blob/viewer/index_spec.js b/spec/javascripts/blob/viewer/index_spec.js
deleted file mode 100644
index 06c06613887..00000000000
--- a/spec/javascripts/blob/viewer/index_spec.js
+++ /dev/null
@@ -1,180 +0,0 @@
-/* eslint-disable no-new */
-
-import $ from 'jquery';
-import MockAdapter from 'axios-mock-adapter';
-import BlobViewer from '~/blob/viewer/index';
-import axios from '~/lib/utils/axios_utils';
-
-describe('Blob viewer', () => {
- let blob;
- let mock;
-
- preloadFixtures('snippets/show.html');
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
-
- loadFixtures('snippets/show.html');
- $('#modal-upload-blob').remove();
-
- blob = new BlobViewer();
-
- mock.onGet('http://test.host/snippets/1.json?viewer=rich').reply(200, {
- html: '<div>testing</div>',
- });
-
- mock.onGet('http://test.host/snippets/1.json?viewer=simple').reply(200, {
- html: '<div>testing</div>',
- });
-
- spyOn(axios, 'get').and.callThrough();
- });
-
- afterEach(() => {
- mock.restore();
- window.location.hash = '';
- });
-
- it('loads source file after switching views', done => {
- document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
-
- setTimeout(() => {
- expect(
- document
- .querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
- .classList.contains('hidden'),
- ).toBeFalsy();
-
- done();
- });
- });
-
- it('loads source file when line number is in hash', done => {
- window.location.hash = '#L1';
-
- new BlobViewer();
-
- setTimeout(() => {
- expect(
- document
- .querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
- .classList.contains('hidden'),
- ).toBeFalsy();
-
- done();
- });
- });
-
- it('doesnt reload file if already loaded', done => {
- const asyncClick = () =>
- new Promise(resolve => {
- document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
-
- setTimeout(resolve);
- });
-
- asyncClick()
- .then(() => asyncClick())
- .then(() => {
- expect(
- document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'),
- ).toBe('true');
-
- done();
- })
- .catch(() => {
- fail();
- done();
- });
- });
-
- describe('copy blob button', () => {
- let copyButton;
-
- beforeEach(() => {
- copyButton = document.querySelector('.js-copy-blob-source-btn');
- });
-
- it('disabled on load', () => {
- expect(copyButton.classList.contains('disabled')).toBeTruthy();
- });
-
- it('has tooltip when disabled', () => {
- expect(copyButton.getAttribute('data-original-title')).toBe(
- 'Switch to the source to copy the file contents',
- );
- });
-
- it('is blurred when clicked and disabled', () => {
- spyOn(copyButton, 'blur');
-
- copyButton.click();
-
- expect(copyButton.blur).toHaveBeenCalled();
- });
-
- it('is not blurred when clicked and not disabled', () => {
- spyOn(copyButton, 'blur');
-
- copyButton.classList.remove('disabled');
- copyButton.click();
-
- expect(copyButton.blur).not.toHaveBeenCalled();
- });
-
- it('enables after switching to simple view', done => {
- document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
-
- setTimeout(() => {
- expect(copyButton.classList.contains('disabled')).toBeFalsy();
-
- done();
- });
- });
-
- it('updates tooltip after switching to simple view', done => {
- document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
-
- setTimeout(() => {
- expect(copyButton.getAttribute('data-original-title')).toBe('Copy file contents');
-
- done();
- });
- });
- });
-
- describe('switchToViewer', () => {
- it('removes active class from old viewer button', () => {
- blob.switchToViewer('simple');
-
- expect(
- document.querySelector('.js-blob-viewer-switch-btn.active[data-viewer="rich"]'),
- ).toBeNull();
- });
-
- it('adds active class to new viewer button', () => {
- const simpleBtn = document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]');
-
- spyOn(simpleBtn, 'blur');
-
- blob.switchToViewer('simple');
-
- expect(simpleBtn.classList.contains('active')).toBeTruthy();
-
- expect(simpleBtn.blur).toHaveBeenCalled();
- });
-
- it('sends AJAX request when switching to simple view', () => {
- blob.switchToViewer('simple');
-
- expect(axios.get).toHaveBeenCalled();
- });
-
- it('does not send AJAX request when switching to rich view', () => {
- blob.switchToViewer('simple');
- blob.switchToViewer('rich');
-
- expect(axios.get.calls.count()).toBe(1);
- });
- });
-});
diff --git a/spec/javascripts/blob_edit/blob_bundle_spec.js b/spec/javascripts/blob_edit/blob_bundle_spec.js
deleted file mode 100644
index 06c6a603155..00000000000
--- a/spec/javascripts/blob_edit/blob_bundle_spec.js
+++ /dev/null
@@ -1,30 +0,0 @@
-import $ from 'jquery';
-import blobBundle from '~/blob_edit/blob_bundle';
-
-describe('BlobBundle', () => {
- beforeEach(() => {
- spyOnDependency(blobBundle, 'EditBlob').and.stub();
- setFixtures(`
- <div class="js-edit-blob-form" data-blob-filename="blah">
- <button class="js-commit-button"></button>
- <a class="btn btn-cancel" href="#"></a>
- </div>`);
- blobBundle();
- });
-
- it('sets the window beforeunload listener to a function returning a string', () => {
- expect(window.onbeforeunload()).toBe('');
- });
-
- it('removes beforeunload listener if commit button is clicked', () => {
- $('.js-commit-button').click();
-
- expect(window.onbeforeunload).toBeNull();
- });
-
- it('removes beforeunload listener when cancel link is clicked', () => {
- $('.btn.btn-cancel').click();
-
- expect(window.onbeforeunload).toBeNull();
- });
-});
diff --git a/spec/javascripts/boards/board_card_spec.js b/spec/javascripts/boards/board_card_spec.js
index 72367377929..2b0eee8b95d 100644
--- a/spec/javascripts/boards/board_card_spec.js
+++ b/spec/javascripts/boards/board_card_spec.js
@@ -32,7 +32,7 @@ describe('Board card', () => {
const label1 = new ListLabel({
id: 3,
title: 'testing 123',
- color: 'blue',
+ color: '#000cff',
text_color: 'white',
description: 'test',
});
@@ -155,12 +155,6 @@ describe('Board card', () => {
expect(boardsStore.detail.issue).toEqual({});
});
- it('does not set detail issue if button is clicked', () => {
- triggerEvent('mouseup', vm.$el.querySelector('button'));
-
- expect(boardsStore.detail.issue).toEqual({});
- });
-
it('does not set detail issue if img is clicked', done => {
vm.issue.assignees = [
new ListAssignee({
diff --git a/spec/javascripts/boards/components/boards_selector_spec.js b/spec/javascripts/boards/components/boards_selector_spec.js
deleted file mode 100644
index 16ec3b801cd..00000000000
--- a/spec/javascripts/boards/components/boards_selector_spec.js
+++ /dev/null
@@ -1,203 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import { TEST_HOST } from 'spec/test_constants';
-import BoardsSelector from '~/boards/components/boards_selector.vue';
-import boardsStore from '~/boards/stores/boards_store';
-
-const throttleDuration = 1;
-
-function boardGenerator(n) {
- return new Array(n).fill().map((board, id) => {
- const name = `board${id}`;
-
- return {
- id,
- name,
- };
- });
-}
-
-describe('BoardsSelector', () => {
- let vm;
- let allBoardsResponse;
- let recentBoardsResponse;
- let fillSearchBox;
- const boards = boardGenerator(20);
- const recentBoards = boardGenerator(5);
-
- beforeEach(done => {
- setFixtures('<div class="js-boards-selector"></div>');
- window.gl = window.gl || {};
-
- boardsStore.setEndpoints({
- boardsEndpoint: '',
- recentBoardsEndpoint: '',
- listsEndpoint: '',
- bulkUpdatePath: '',
- boardId: '',
- });
-
- allBoardsResponse = Promise.resolve({
- data: boards,
- });
- recentBoardsResponse = Promise.resolve({
- data: recentBoards,
- });
-
- spyOn(boardsStore, 'allBoards').and.returnValue(allBoardsResponse);
- spyOn(boardsStore, 'recentBoards').and.returnValue(recentBoardsResponse);
-
- const Component = Vue.extend(BoardsSelector);
- vm = mountComponent(
- Component,
- {
- throttleDuration,
- currentBoard: {
- id: 1,
- name: 'Development',
- milestone_id: null,
- weight: null,
- assignee_id: null,
- labels: [],
- },
- milestonePath: `${TEST_HOST}/milestone/path`,
- boardBaseUrl: `${TEST_HOST}/board/base/url`,
- hasMissingBoards: false,
- canAdminBoard: true,
- multipleIssueBoardsAvailable: true,
- labelsPath: `${TEST_HOST}/labels/path`,
- projectId: 42,
- groupId: 19,
- scopedIssueBoardFeatureEnabled: true,
- weights: [],
- },
- document.querySelector('.js-boards-selector'),
- );
-
- // Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
- vm.$children[0].$emit('show');
-
- Promise.all([allBoardsResponse, recentBoardsResponse])
- .then(() => vm.$nextTick())
- .then(done)
- .catch(done.fail);
-
- fillSearchBox = filterTerm => {
- const { searchBox } = vm.$refs;
- const searchBoxInput = searchBox.$el.querySelector('input');
- searchBoxInput.value = filterTerm;
- searchBoxInput.dispatchEvent(new Event('input'));
- };
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('filtering', () => {
- it('shows all boards without filtering', done => {
- vm.$nextTick()
- .then(() => {
- const dropdownItem = vm.$el.querySelectorAll('.js-dropdown-item');
-
- expect(dropdownItem.length).toBe(boards.length + recentBoards.length);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('shows only matching boards when filtering', done => {
- const filterTerm = 'board1';
- const expectedCount = boards.filter(board => board.name.includes(filterTerm)).length;
-
- fillSearchBox(filterTerm);
-
- vm.$nextTick()
- .then(() => {
- const dropdownItems = vm.$el.querySelectorAll('.js-dropdown-item');
-
- expect(dropdownItems.length).toBe(expectedCount);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('shows message if there are no matching boards', done => {
- fillSearchBox('does not exist');
-
- vm.$nextTick()
- .then(() => {
- const dropdownItems = vm.$el.querySelectorAll('.js-dropdown-item');
-
- expect(dropdownItems.length).toBe(0);
- expect(vm.$el).toContainText('No matching boards found');
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('recent boards section', () => {
- it('shows only when boards are greater than 10', done => {
- vm.$nextTick()
- .then(() => {
- const headerEls = vm.$el.querySelectorAll('.dropdown-bold-header');
-
- const expectedCount = 2; // Recent + All
-
- expect(expectedCount).toBe(headerEls.length);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('does not show when boards are less than 10', done => {
- spyOn(vm, 'initScrollFade');
- spyOn(vm, 'setScrollFade');
-
- vm.$nextTick()
- .then(() => {
- vm.boards = vm.boards.slice(0, 5);
- })
- .then(vm.$nextTick)
- .then(() => {
- const headerEls = vm.$el.querySelectorAll('.dropdown-bold-header');
- const expectedCount = 0;
-
- expect(expectedCount).toBe(headerEls.length);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('does not show when recentBoards api returns empty array', done => {
- vm.$nextTick()
- .then(() => {
- vm.recentBoards = [];
- })
- .then(vm.$nextTick)
- .then(() => {
- const headerEls = vm.$el.querySelectorAll('.dropdown-bold-header');
- const expectedCount = 0;
-
- expect(expectedCount).toBe(headerEls.length);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('does not show when search is active', done => {
- fillSearchBox('Random string');
-
- vm.$nextTick()
- .then(() => {
- const headerEls = vm.$el.querySelectorAll('.dropdown-bold-header');
- const expectedCount = 0;
-
- expect(expectedCount).toBe(headerEls.length);
- })
- .then(done)
- .catch(done.fail);
- });
- });
-});
diff --git a/spec/javascripts/boards/list_spec.js b/spec/javascripts/boards/list_spec.js
index c340b62730c..7385bfb0e5f 100644
--- a/spec/javascripts/boards/list_spec.js
+++ b/spec/javascripts/boards/list_spec.js
@@ -4,7 +4,6 @@
/* global ListLabel */
import MockAdapter from 'axios-mock-adapter';
-import _ from 'underscore';
import axios from '~/lib/utils/axios_utils';
import '~/boards/models/label';
import '~/boards/models/assignee';
@@ -40,7 +39,7 @@ describe('List model', () => {
list = new List({
title: 'test',
label: {
- id: _.random(10000),
+ id: 1,
title: 'test',
color: 'red',
text_color: 'white',
@@ -96,8 +95,8 @@ describe('List model', () => {
const listDup = new List(listObjDuplicate);
const issue = new ListIssue({
title: 'Testing',
- id: _.random(10000),
- iid: _.random(10000),
+ id: 1,
+ iid: 1,
confidential: false,
labels: [list.label, listDup.label],
assignees: [],
@@ -129,8 +128,8 @@ describe('List model', () => {
list.issues.push(
new ListIssue({
title: 'Testing',
- id: _.random(10000) + i,
- iid: _.random(10000) + i,
+ id: i,
+ iid: i,
confidential: false,
labels: [list.label],
assignees: [],
@@ -151,7 +150,7 @@ describe('List model', () => {
list.issues.push(
new ListIssue({
title: 'Testing',
- id: _.random(10000),
+ id: 1,
confidential: false,
labels: [list.label],
assignees: [],
@@ -192,7 +191,7 @@ describe('List model', () => {
list.issues.push(
new ListIssue({
title: 'Testing',
- id: _.random(10000),
+ id: 1,
confidential: false,
labels: [new ListLabel(list.label)],
assignees: [],
@@ -200,7 +199,7 @@ describe('List model', () => {
);
const dummyIssue = new ListIssue({
title: 'new issue',
- id: _.random(10000),
+ id: 2,
confidential: false,
labels: [new ListLabel(list.label)],
assignees: [user],
diff --git a/spec/javascripts/collapsed_sidebar_todo_spec.js b/spec/javascripts/collapsed_sidebar_todo_spec.js
index f75d63c8f57..f2eb08fa198 100644
--- a/spec/javascripts/collapsed_sidebar_todo_spec.js
+++ b/spec/javascripts/collapsed_sidebar_todo_spec.js
@@ -1,5 +1,5 @@
/* eslint-disable no-new */
-import _ from 'underscore';
+import { clone } from 'lodash';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import Sidebar from '~/right_sidebar';
@@ -24,13 +24,13 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
mock = new MockAdapter(axios);
mock.onPost(`${gl.TEST_HOST}/frontend-fixtures/issues-project/todos`).reply(() => {
- const response = _.clone(todoData);
+ const response = clone(todoData);
return [200, response];
});
mock.onDelete(/(.*)\/dashboard\/todos\/\d+$/).reply(() => {
- const response = _.clone(todoData);
+ const response = clone(todoData);
delete response.delete_path;
return [200, response];
diff --git a/spec/javascripts/create_cluster/.eslintrc.yml b/spec/javascripts/create_cluster/.eslintrc.yml
deleted file mode 100644
index 14e318a2f3e..00000000000
--- a/spec/javascripts/create_cluster/.eslintrc.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-rules:
- # https://gitlab.com/gitlab-org/gitlab/issues/33025
- promise/no-nesting: off
diff --git a/spec/javascripts/create_cluster/gke_cluster/components/gke_zone_dropdown_spec.js b/spec/javascripts/create_cluster/gke_cluster/components/gke_zone_dropdown_spec.js
deleted file mode 100644
index b2a7443422a..00000000000
--- a/spec/javascripts/create_cluster/gke_cluster/components/gke_zone_dropdown_spec.js
+++ /dev/null
@@ -1,94 +0,0 @@
-import Vue from 'vue';
-import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import GkeZoneDropdown from '~/create_cluster/gke_cluster/components/gke_zone_dropdown.vue';
-import { createStore } from '~/create_cluster/gke_cluster/store';
-import {
- SET_PROJECT,
- SET_ZONES,
- SET_PROJECT_BILLING_STATUS,
-} from '~/create_cluster/gke_cluster/store/mutation_types';
-import { selectedZoneMock, selectedProjectMock, gapiZonesResponseMock } from '../mock_data';
-
-const componentConfig = {
- fieldId: 'cluster_provider_gcp_attributes_gcp_zone',
- fieldName: 'cluster[provider_gcp_attributes][gcp_zone]',
-};
-
-const LABELS = {
- LOADING: 'Fetching zones',
- DISABLED: 'Select project to choose zone',
- DEFAULT: 'Select zone',
-};
-
-const createComponent = (store, props = componentConfig) => {
- const Component = Vue.extend(GkeZoneDropdown);
-
- return mountComponentWithStore(Component, {
- el: null,
- props,
- store,
- });
-};
-
-describe('GkeZoneDropdown', () => {
- let vm;
- let store;
-
- beforeEach(() => {
- store = createStore();
- vm = createComponent(store);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('toggleText', () => {
- it('returns disabled state toggle text', () => {
- expect(vm.toggleText).toBe(LABELS.DISABLED);
- });
-
- it('returns loading toggle text', () => {
- vm.isLoading = true;
-
- expect(vm.toggleText).toBe(LABELS.LOADING);
- });
-
- it('returns default toggle text', () => {
- expect(vm.toggleText).toBe(LABELS.DISABLED);
-
- vm.$store.commit(SET_PROJECT, selectedProjectMock);
- vm.$store.commit(SET_PROJECT_BILLING_STATUS, true);
-
- expect(vm.toggleText).toBe(LABELS.DEFAULT);
- });
-
- it('returns project name if project selected', () => {
- vm.setItem(selectedZoneMock);
-
- expect(vm.toggleText).toBe(selectedZoneMock);
- });
- });
-
- describe('selectItem', () => {
- it('reflects new value when dropdown item is clicked', done => {
- expect(vm.$el.querySelector('input').value).toBe('');
- vm.$store.commit(SET_ZONES, gapiZonesResponseMock.items);
-
- return vm
- .$nextTick()
- .then(() => {
- vm.$el.querySelector('.dropdown-content button').click();
-
- return vm
- .$nextTick()
- .then(() => {
- expect(vm.$el.querySelector('input').value).toBe(selectedZoneMock);
- done();
- })
- .catch(done.fail);
- })
- .catch(done.fail);
- });
- });
-});
diff --git a/spec/javascripts/create_cluster/gke_cluster/helpers.js b/spec/javascripts/create_cluster/gke_cluster/helpers.js
deleted file mode 100644
index 6df511e9157..00000000000
--- a/spec/javascripts/create_cluster/gke_cluster/helpers.js
+++ /dev/null
@@ -1,49 +0,0 @@
-import {
- gapiProjectsResponseMock,
- gapiZonesResponseMock,
- gapiMachineTypesResponseMock,
-} from './mock_data';
-
-// eslint-disable-next-line import/prefer-default-export
-export const gapi = () => ({
- client: {
- cloudbilling: {
- projects: {
- getBillingInfo: () =>
- new Promise(resolve => {
- resolve({
- result: { billingEnabled: true },
- });
- }),
- },
- },
- cloudresourcemanager: {
- projects: {
- list: () =>
- new Promise(resolve => {
- resolve({
- result: { ...gapiProjectsResponseMock },
- });
- }),
- },
- },
- compute: {
- zones: {
- list: () =>
- new Promise(resolve => {
- resolve({
- result: { ...gapiZonesResponseMock },
- });
- }),
- },
- machineTypes: {
- list: () =>
- new Promise(resolve => {
- resolve({
- result: { ...gapiMachineTypesResponseMock },
- });
- }),
- },
- },
- },
-});
diff --git a/spec/javascripts/create_cluster/gke_cluster/mock_data.js b/spec/javascripts/create_cluster/gke_cluster/mock_data.js
deleted file mode 100644
index d9f5dbc636f..00000000000
--- a/spec/javascripts/create_cluster/gke_cluster/mock_data.js
+++ /dev/null
@@ -1,75 +0,0 @@
-export const emptyProjectMock = {
- projectId: '',
- name: '',
-};
-
-export const selectedProjectMock = {
- projectId: 'gcp-project-123',
- name: 'gcp-project',
-};
-
-export const selectedZoneMock = 'us-central1-a';
-
-export const selectedMachineTypeMock = 'n1-standard-2';
-
-export const gapiProjectsResponseMock = {
- projects: [
- {
- projectNumber: '1234',
- projectId: 'gcp-project-123',
- lifecycleState: 'ACTIVE',
- name: 'gcp-project',
- createTime: '2017-12-16T01:48:29.129Z',
- parent: {
- type: 'organization',
- id: '12345',
- },
- },
- ],
-};
-
-export const gapiZonesResponseMock = {
- kind: 'compute#zoneList',
- id: 'projects/gitlab-internal-153318/zones',
- items: [
- {
- kind: 'compute#zone',
- id: '2000',
- creationTimestamp: '1969-12-31T16:00:00.000-08:00',
- name: 'us-central1-a',
- description: 'us-central1-a',
- status: 'UP',
- region:
- 'https://www.googleapis.com/compute/v1/projects/gitlab-internal-153318/regions/us-central1',
- selfLink:
- 'https://www.googleapis.com/compute/v1/projects/gitlab-internal-153318/zones/us-central1-a',
- availableCpuPlatforms: ['Intel Skylake', 'Intel Broadwell', 'Intel Sandy Bridge'],
- },
- ],
- selfLink: 'https://www.googleapis.com/compute/v1/projects/gitlab-internal-153318/zones',
-};
-
-export const gapiMachineTypesResponseMock = {
- kind: 'compute#machineTypeList',
- id: 'projects/gitlab-internal-153318/zones/us-central1-a/machineTypes',
- items: [
- {
- kind: 'compute#machineType',
- id: '3002',
- creationTimestamp: '1969-12-31T16:00:00.000-08:00',
- name: 'n1-standard-2',
- description: '2 vCPUs, 7.5 GB RAM',
- guestCpus: 2,
- memoryMb: 7680,
- imageSpaceGb: 10,
- maximumPersistentDisks: 64,
- maximumPersistentDisksSizeGb: '65536',
- zone: 'us-central1-a',
- selfLink:
- 'https://www.googleapis.com/compute/v1/projects/gitlab-internal-153318/zones/us-central1-a/machineTypes/n1-standard-2',
- isSharedCpu: false,
- },
- ],
- selfLink:
- 'https://www.googleapis.com/compute/v1/projects/gitlab-internal-153318/zones/us-central1-a/machineTypes',
-};
diff --git a/spec/javascripts/create_cluster/gke_cluster/stores/actions_spec.js b/spec/javascripts/create_cluster/gke_cluster/stores/actions_spec.js
deleted file mode 100644
index 7ceaeace82f..00000000000
--- a/spec/javascripts/create_cluster/gke_cluster/stores/actions_spec.js
+++ /dev/null
@@ -1,139 +0,0 @@
-import testAction from 'spec/helpers/vuex_action_helper';
-import * as actions from '~/create_cluster/gke_cluster/store/actions';
-import { createStore } from '~/create_cluster/gke_cluster/store';
-import { gapi } from '../helpers';
-import { selectedProjectMock, selectedZoneMock, selectedMachineTypeMock } from '../mock_data';
-
-describe('GCP Cluster Dropdown Store Actions', () => {
- let store;
-
- beforeEach(() => {
- store = createStore();
- });
-
- describe('setProject', () => {
- it('should set project', done => {
- testAction(
- actions.setProject,
- selectedProjectMock,
- { selectedProject: {} },
- [{ type: 'SET_PROJECT', payload: selectedProjectMock }],
- [],
- done,
- );
- });
- });
-
- describe('setZone', () => {
- it('should set zone', done => {
- testAction(
- actions.setZone,
- selectedZoneMock,
- { selectedZone: '' },
- [{ type: 'SET_ZONE', payload: selectedZoneMock }],
- [],
- done,
- );
- });
- });
-
- describe('setMachineType', () => {
- it('should set machine type', done => {
- testAction(
- actions.setMachineType,
- selectedMachineTypeMock,
- { selectedMachineType: '' },
- [{ type: 'SET_MACHINE_TYPE', payload: selectedMachineTypeMock }],
- [],
- done,
- );
- });
- });
-
- describe('setIsValidatingProjectBilling', () => {
- it('should set machine type', done => {
- testAction(
- actions.setIsValidatingProjectBilling,
- true,
- { isValidatingProjectBilling: null },
- [{ type: 'SET_IS_VALIDATING_PROJECT_BILLING', payload: true }],
- [],
- done,
- );
- });
- });
-
- describe('async fetch methods', () => {
- let originalGapi;
- beforeAll(() => {
- originalGapi = window.gapi;
- window.gapi = gapi();
- });
-
- afterAll(() => {
- window.gapi = originalGapi;
- });
-
- describe('fetchProjects', () => {
- it('fetches projects from Google API', done => {
- store
- .dispatch('fetchProjects')
- .then(() => {
- expect(store.state.projects[0].projectId).toEqual(selectedProjectMock.projectId);
- expect(store.state.projects[0].name).toEqual(selectedProjectMock.name);
-
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('validateProjectBilling', () => {
- it('checks project billing status from Google API', done => {
- testAction(
- actions.validateProjectBilling,
- true,
- {
- selectedProject: selectedProjectMock,
- selectedZone: '',
- selectedMachineType: '',
- projectHasBillingEnabled: null,
- },
- [
- { type: 'SET_ZONE', payload: '' },
- { type: 'SET_MACHINE_TYPE', payload: '' },
- { type: 'SET_PROJECT_BILLING_STATUS', payload: true },
- ],
- [{ type: 'setIsValidatingProjectBilling', payload: false }],
- done,
- );
- });
- });
-
- describe('fetchZones', () => {
- it('fetches zones from Google API', done => {
- store
- .dispatch('fetchZones')
- .then(() => {
- expect(store.state.zones[0].name).toEqual(selectedZoneMock);
-
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('fetchMachineTypes', () => {
- it('fetches machine types from Google API', done => {
- store
- .dispatch('fetchMachineTypes')
- .then(() => {
- expect(store.state.machineTypes[0].name).toEqual(selectedMachineTypeMock);
-
- done();
- })
- .catch(done.fail);
- });
- });
- });
-});
diff --git a/spec/javascripts/diffs/components/app_spec.js b/spec/javascripts/diffs/components/app_spec.js
deleted file mode 100644
index 5f97182489e..00000000000
--- a/spec/javascripts/diffs/components/app_spec.js
+++ /dev/null
@@ -1,718 +0,0 @@
-import Vuex from 'vuex';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
-import { TEST_HOST } from 'spec/test_constants';
-import Mousetrap from 'mousetrap';
-import App from '~/diffs/components/app.vue';
-import NoChanges from '~/diffs/components/no_changes.vue';
-import DiffFile from '~/diffs/components/diff_file.vue';
-import CompareVersions from '~/diffs/components/compare_versions.vue';
-import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
-import CommitWidget from '~/diffs/components/commit_widget.vue';
-import TreeList from '~/diffs/components/tree_list.vue';
-import { INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '~/diffs/constants';
-import createDiffsStore from '../create_diffs_store';
-import diffsMockData from '../mock_data/merge_request_diffs';
-
-const mergeRequestDiff = { version_index: 1 };
-
-describe('diffs/components/app', () => {
- const oldMrTabs = window.mrTabs;
- let store;
- let wrapper;
-
- function createComponent(props = {}, extendStore = () => {}) {
- const localVue = createLocalVue();
-
- localVue.use(Vuex);
-
- store = createDiffsStore();
- store.state.diffs.isLoading = false;
-
- extendStore(store);
-
- wrapper = shallowMount(localVue.extend(App), {
- localVue,
- propsData: {
- endpoint: `${TEST_HOST}/diff/endpoint`,
- endpointMetadata: `${TEST_HOST}/diff/endpointMetadata`,
- endpointBatch: `${TEST_HOST}/diff/endpointBatch`,
- projectPath: 'namespace/project',
- currentUser: {},
- changesEmptyStateIllustration: '',
- dismissEndpoint: '',
- showSuggestPopover: true,
- ...props,
- },
- store,
- methods: {
- isLatestVersion() {
- return true;
- },
- },
- });
- }
-
- function getOppositeViewType(currentViewType) {
- return currentViewType === INLINE_DIFF_VIEW_TYPE
- ? PARALLEL_DIFF_VIEW_TYPE
- : INLINE_DIFF_VIEW_TYPE;
- }
-
- beforeEach(() => {
- // setup globals (needed for component to mount :/)
- window.mrTabs = jasmine.createSpyObj('mrTabs', ['resetViewContainer']);
- window.mrTabs.expandViewContainer = jasmine.createSpy();
- });
-
- afterEach(() => {
- // reset globals
- window.mrTabs = oldMrTabs;
-
- // reset component
- wrapper.destroy();
- });
-
- describe('fetch diff methods', () => {
- beforeEach(done => {
- const fetchResolver = () => {
- store.state.diffs.retrievingBatches = false;
- store.state.notes.discussions = 'test';
- return Promise.resolve({ real_size: 100 });
- };
- spyOn(window, 'requestIdleCallback').and.callFake(fn => fn());
- createComponent();
- spyOn(wrapper.vm, 'fetchDiffFiles').and.callFake(fetchResolver);
- spyOn(wrapper.vm, 'fetchDiffFilesMeta').and.callFake(fetchResolver);
- spyOn(wrapper.vm, 'fetchDiffFilesBatch').and.callFake(fetchResolver);
- spyOn(wrapper.vm, 'setDiscussions');
- spyOn(wrapper.vm, 'startRenderDiffsQueue');
- spyOn(wrapper.vm, 'unwatchDiscussions');
- spyOn(wrapper.vm, 'unwatchRetrievingBatches');
- store.state.diffs.retrievingBatches = true;
- store.state.diffs.diffFiles = [];
- wrapper.vm.$nextTick(done);
- });
-
- describe('when the diff view type changes and it should load a single diff view style', () => {
- const noLinesDiff = {
- highlighted_diff_lines: [],
- parallel_diff_lines: [],
- };
- const parallelLinesDiff = {
- highlighted_diff_lines: [],
- parallel_diff_lines: ['line'],
- };
- const inlineLinesDiff = {
- highlighted_diff_lines: ['line'],
- parallel_diff_lines: [],
- };
- const fullDiff = {
- highlighted_diff_lines: ['line'],
- parallel_diff_lines: ['line'],
- };
-
- function expectFetchToOccur({
- vueInstance,
- done = () => {},
- batch = false,
- existingFiles = 1,
- } = {}) {
- vueInstance.$nextTick(() => {
- expect(vueInstance.diffFiles.length).toEqual(existingFiles);
-
- if (!batch) {
- expect(vueInstance.fetchDiffFiles).toHaveBeenCalled();
- expect(vueInstance.fetchDiffFilesBatch).not.toHaveBeenCalled();
- } else {
- expect(vueInstance.fetchDiffFiles).not.toHaveBeenCalled();
- expect(vueInstance.fetchDiffFilesBatch).toHaveBeenCalled();
- }
-
- done();
- });
- }
-
- beforeEach(() => {
- wrapper.vm.glFeatures.singleMrDiffView = true;
- });
-
- it('fetches diffs if it has none', done => {
- wrapper.vm.isLatestVersion = () => false;
-
- store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
-
- expectFetchToOccur({ vueInstance: wrapper.vm, batch: false, existingFiles: 0, done });
- });
-
- it('fetches diffs if it has both view styles, but no lines in either', done => {
- wrapper.vm.isLatestVersion = () => false;
-
- store.state.diffs.diffFiles.push(noLinesDiff);
- store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
-
- expectFetchToOccur({ vueInstance: wrapper.vm, done });
- });
-
- it('fetches diffs if it only has inline view style', done => {
- wrapper.vm.isLatestVersion = () => false;
-
- store.state.diffs.diffFiles.push(inlineLinesDiff);
- store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
-
- expectFetchToOccur({ vueInstance: wrapper.vm, done });
- });
-
- it('fetches diffs if it only has parallel view style', done => {
- wrapper.vm.isLatestVersion = () => false;
-
- store.state.diffs.diffFiles.push(parallelLinesDiff);
- store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
-
- expectFetchToOccur({ vueInstance: wrapper.vm, done });
- });
-
- it('fetches batch diffs if it has none', done => {
- wrapper.vm.glFeatures.diffsBatchLoad = true;
-
- store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
-
- expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, existingFiles: 0, done });
- });
-
- it('fetches batch diffs if it has both view styles, but no lines in either', done => {
- wrapper.vm.glFeatures.diffsBatchLoad = true;
-
- store.state.diffs.diffFiles.push(noLinesDiff);
- store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
-
- expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, done });
- });
-
- it('fetches batch diffs if it only has inline view style', done => {
- wrapper.vm.glFeatures.diffsBatchLoad = true;
-
- store.state.diffs.diffFiles.push(inlineLinesDiff);
- store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
-
- expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, done });
- });
-
- it('fetches batch diffs if it only has parallel view style', done => {
- wrapper.vm.glFeatures.diffsBatchLoad = true;
-
- store.state.diffs.diffFiles.push(parallelLinesDiff);
- store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
-
- expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, done });
- });
-
- it('does not fetch diffs if it has already fetched both styles of diff', () => {
- wrapper.vm.glFeatures.diffsBatchLoad = false;
-
- store.state.diffs.diffFiles.push(fullDiff);
- store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
-
- expect(wrapper.vm.diffFiles.length).toEqual(1);
- expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
- });
-
- it('does not fetch batch diffs if it has already fetched both styles of diff', () => {
- wrapper.vm.glFeatures.diffsBatchLoad = true;
-
- store.state.diffs.diffFiles.push(fullDiff);
- store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
-
- expect(wrapper.vm.diffFiles.length).toEqual(1);
- expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
- });
- });
-
- it('calls fetchDiffFiles if diffsBatchLoad is not enabled', done => {
- expect(wrapper.vm.diffFilesLength).toEqual(0);
- wrapper.vm.glFeatures.diffsBatchLoad = false;
- wrapper.vm.fetchData(false);
-
- expect(wrapper.vm.fetchDiffFiles).toHaveBeenCalled();
- setTimeout(() => {
- expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesMeta).not.toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
- expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
- expect(wrapper.vm.diffFilesLength).toEqual(100);
- expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
-
- done();
- });
- });
-
- it('calls batch methods if diffsBatchLoad is enabled, and not latest version', done => {
- expect(wrapper.vm.diffFilesLength).toEqual(0);
- wrapper.vm.glFeatures.diffsBatchLoad = true;
- wrapper.vm.isLatestVersion = () => false;
- wrapper.vm.fetchData(false);
-
- expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
- setTimeout(() => {
- expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
- expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
- expect(wrapper.vm.diffFilesLength).toEqual(100);
- expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
- done();
- });
- });
-
- it('calls batch methods if diffsBatchLoad is enabled, and latest version', done => {
- expect(wrapper.vm.diffFilesLength).toEqual(0);
- wrapper.vm.glFeatures.diffsBatchLoad = true;
- wrapper.vm.fetchData(false);
-
- expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
- setTimeout(() => {
- expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
- expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
- expect(wrapper.vm.diffFilesLength).toEqual(100);
- expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
- done();
- });
- });
- });
-
- it('adds container-limiting classes when showFileTree is false with inline diffs', () => {
- createComponent({}, ({ state }) => {
- state.diffs.showTreeList = false;
- state.diffs.isParallelView = false;
- });
-
- expect(wrapper.contains('.container-limited.limit-container-width')).toBe(true);
- });
-
- it('does not add container-limiting classes when showFileTree is false with inline diffs', () => {
- createComponent({}, ({ state }) => {
- state.diffs.showTreeList = true;
- state.diffs.isParallelView = false;
- });
-
- expect(wrapper.contains('.container-limited.limit-container-width')).toBe(false);
- });
-
- it('does not add container-limiting classes when isFluidLayout', () => {
- createComponent({ isFluidLayout: true }, ({ state }) => {
- state.diffs.isParallelView = false;
- });
-
- expect(wrapper.contains('.container-limited.limit-container-width')).toBe(false);
- });
-
- it('displays loading icon on loading', () => {
- createComponent({}, ({ state }) => {
- state.diffs.isLoading = true;
- });
-
- expect(wrapper.contains(GlLoadingIcon)).toBe(true);
- });
-
- it('displays loading icon on batch loading', () => {
- createComponent({}, ({ state }) => {
- state.diffs.isBatchLoading = true;
- });
-
- expect(wrapper.contains(GlLoadingIcon)).toBe(true);
- });
-
- it('displays diffs container when not loading', () => {
- createComponent();
-
- expect(wrapper.contains(GlLoadingIcon)).toBe(false);
- expect(wrapper.contains('#diffs')).toBe(true);
- });
-
- it('does not show commit info', () => {
- createComponent();
-
- expect(wrapper.contains('.blob-commit-info')).toBe(false);
- });
-
- describe('row highlighting', () => {
- beforeEach(() => {
- window.location.hash = 'ABC_123';
- });
-
- it('sets highlighted row if hash exists in location object', done => {
- createComponent({
- shouldShow: true,
- });
-
- // Component uses $nextTick so we wait until that has finished
- setTimeout(() => {
- expect(store.state.diffs.highlightedRow).toBe('ABC_123');
-
- done();
- });
- });
-
- it('marks current diff file based on currently highlighted row', done => {
- createComponent({
- shouldShow: true,
- });
-
- // Component uses $nextTick so we wait until that has finished
- setTimeout(() => {
- expect(store.state.diffs.currentDiffFileId).toBe('ABC');
-
- done();
- });
- });
- });
-
- describe('resizable', () => {
- afterEach(() => {
- localStorage.removeItem('mr_tree_list_width');
- });
-
- it('sets initial width when no localStorage has been set', () => {
- createComponent();
-
- expect(wrapper.vm.treeWidth).toEqual(320);
- });
-
- it('sets initial width to localStorage size', () => {
- localStorage.setItem('mr_tree_list_width', '200');
-
- createComponent();
-
- expect(wrapper.vm.treeWidth).toEqual(200);
- });
-
- it('sets width of tree list', () => {
- createComponent();
-
- expect(wrapper.find('.js-diff-tree-list').element.style.width).toEqual('320px');
- });
- });
-
- it('marks current diff file based on currently highlighted row', done => {
- createComponent({
- shouldShow: true,
- });
-
- // Component uses $nextTick so we wait until that has finished
- setTimeout(() => {
- expect(store.state.diffs.currentDiffFileId).toBe('ABC');
-
- done();
- });
- });
-
- describe('empty state', () => {
- it('renders empty state when no diff files exist', () => {
- createComponent();
-
- expect(wrapper.contains(NoChanges)).toBe(true);
- });
-
- it('does not render empty state when diff files exist', () => {
- createComponent({}, ({ state }) => {
- state.diffs.diffFiles.push({
- id: 1,
- });
- });
-
- expect(wrapper.contains(NoChanges)).toBe(false);
- expect(wrapper.findAll(DiffFile).length).toBe(1);
- });
-
- it('does not render empty state when versions match', () => {
- createComponent({}, ({ state }) => {
- state.diffs.startVersion = mergeRequestDiff;
- state.diffs.mergeRequestDiff = mergeRequestDiff;
- });
-
- expect(wrapper.contains(NoChanges)).toBe(false);
- });
- });
-
- describe('keyboard shortcut navigation', () => {
- const mappings = {
- '[': -1,
- k: -1,
- ']': +1,
- j: +1,
- };
- let spy;
-
- describe('visible app', () => {
- beforeEach(() => {
- spy = jasmine.createSpy('spy');
-
- createComponent({
- shouldShow: true,
- });
- wrapper.setMethods({
- jumpToFile: spy,
- });
- });
-
- it('calls `jumpToFile()` with correct parameter whenever pre-defined key is pressed', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- Object.keys(mappings).forEach(function(key) {
- Mousetrap.trigger(key);
-
- expect(spy.calls.mostRecent().args).toEqual([mappings[key]]);
- });
-
- expect(spy.calls.count()).toEqual(Object.keys(mappings).length);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('does not call `jumpToFile()` when unknown key is pressed', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- Mousetrap.trigger('d');
-
- expect(spy).not.toHaveBeenCalled();
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('hideen app', () => {
- beforeEach(() => {
- spy = jasmine.createSpy('spy');
-
- createComponent({
- shouldShow: false,
- });
- wrapper.setMethods({
- jumpToFile: spy,
- });
- });
-
- it('stops calling `jumpToFile()` when application is hidden', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- Object.keys(mappings).forEach(function(key) {
- Mousetrap.trigger(key);
-
- expect(spy).not.toHaveBeenCalled();
- });
- })
- .then(done)
- .catch(done.fail);
- });
- });
- });
-
- describe('jumpToFile', () => {
- let spy;
-
- beforeEach(() => {
- spy = jasmine.createSpy();
-
- createComponent({}, () => {
- store.state.diffs.diffFiles = [
- { file_hash: '111', file_path: '111.js' },
- { file_hash: '222', file_path: '222.js' },
- { file_hash: '333', file_path: '333.js' },
- ];
- });
-
- wrapper.setMethods({
- scrollToFile: spy,
- });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('jumps to next and previous files in the list', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- wrapper.vm.jumpToFile(+1);
-
- expect(spy.calls.mostRecent().args).toEqual(['222.js']);
- store.state.diffs.currentDiffFileId = '222';
- wrapper.vm.jumpToFile(+1);
-
- expect(spy.calls.mostRecent().args).toEqual(['333.js']);
- store.state.diffs.currentDiffFileId = '333';
- wrapper.vm.jumpToFile(-1);
-
- expect(spy.calls.mostRecent().args).toEqual(['222.js']);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('does not jump to previous file from the first one', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- store.state.diffs.currentDiffFileId = '333';
-
- expect(wrapper.vm.currentDiffIndex).toEqual(2);
-
- wrapper.vm.jumpToFile(+1);
-
- expect(wrapper.vm.currentDiffIndex).toEqual(2);
- expect(spy).not.toHaveBeenCalled();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('does not jump to next file from the last one', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(wrapper.vm.currentDiffIndex).toEqual(0);
-
- wrapper.vm.jumpToFile(-1);
-
- expect(wrapper.vm.currentDiffIndex).toEqual(0);
- expect(spy).not.toHaveBeenCalled();
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('diffs', () => {
- it('should render compare versions component', () => {
- createComponent({}, ({ state }) => {
- state.diffs.mergeRequestDiffs = diffsMockData;
- state.diffs.targetBranchName = 'target-branch';
- state.diffs.mergeRequestDiff = mergeRequestDiff;
- });
-
- expect(wrapper.contains(CompareVersions)).toBe(true);
- expect(wrapper.find(CompareVersions).props()).toEqual(
- jasmine.objectContaining({
- targetBranch: {
- branchName: 'target-branch',
- versionIndex: -1,
- path: '',
- },
- mergeRequestDiffs: diffsMockData,
- mergeRequestDiff,
- }),
- );
- });
-
- it('should render hidden files warning if render overflow warning is present', () => {
- createComponent({}, ({ state }) => {
- state.diffs.renderOverflowWarning = true;
- state.diffs.realSize = '5';
- state.diffs.plainDiffPath = 'plain diff path';
- state.diffs.emailPatchPath = 'email patch path';
- state.diffs.size = 1;
- });
-
- expect(wrapper.contains(HiddenFilesWarning)).toBe(true);
- expect(wrapper.find(HiddenFilesWarning).props()).toEqual(
- jasmine.objectContaining({
- total: '5',
- plainDiffPath: 'plain diff path',
- emailPatchPath: 'email patch path',
- visible: 1,
- }),
- );
- });
-
- it('should display commit widget if store has a commit', () => {
- createComponent({}, () => {
- store.state.diffs.commit = {
- author: 'John Doe',
- };
- });
-
- expect(wrapper.contains(CommitWidget)).toBe(true);
- });
-
- it('should display diff file if there are diff files', () => {
- createComponent({}, ({ state }) => {
- state.diffs.diffFiles.push({ sha: '123' });
- });
-
- expect(wrapper.contains(DiffFile)).toBe(true);
- });
-
- it('should render tree list', () => {
- createComponent();
-
- expect(wrapper.find(TreeList).exists()).toBe(true);
- });
- });
-
- describe('hideTreeListIfJustOneFile', () => {
- let toggleShowTreeList;
-
- beforeEach(() => {
- toggleShowTreeList = jasmine.createSpy('toggleShowTreeList');
- });
-
- afterEach(() => {
- localStorage.removeItem('mr_tree_show');
- });
-
- it('calls toggleShowTreeList when only 1 file', () => {
- createComponent({}, ({ state }) => {
- state.diffs.diffFiles.push({ sha: '123' });
- });
-
- wrapper.setMethods({
- toggleShowTreeList,
- });
-
- wrapper.vm.hideTreeListIfJustOneFile();
-
- expect(toggleShowTreeList).toHaveBeenCalledWith(false);
- });
-
- it('does not call toggleShowTreeList when more than 1 file', () => {
- createComponent({}, ({ state }) => {
- state.diffs.diffFiles.push({ sha: '123' });
- state.diffs.diffFiles.push({ sha: '124' });
- });
-
- wrapper.setMethods({
- toggleShowTreeList,
- });
-
- wrapper.vm.hideTreeListIfJustOneFile();
-
- expect(toggleShowTreeList).not.toHaveBeenCalled();
- });
-
- it('does not call toggleShowTreeList when localStorage is set', () => {
- localStorage.setItem('mr_tree_show', 'true');
-
- createComponent({}, ({ state }) => {
- state.diffs.diffFiles.push({ sha: '123' });
- });
-
- wrapper.setMethods({
- toggleShowTreeList,
- });
-
- wrapper.vm.hideTreeListIfJustOneFile();
-
- expect(toggleShowTreeList).not.toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/javascripts/diffs/components/commit_item_spec.js b/spec/javascripts/diffs/components/commit_item_spec.js
deleted file mode 100644
index dc3fb16eb40..00000000000
--- a/spec/javascripts/diffs/components/commit_item_spec.js
+++ /dev/null
@@ -1,166 +0,0 @@
-import Vue from 'vue';
-import { TEST_HOST } from 'spec/test_constants';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import { trimText } from 'spec/helpers/text_helper';
-import { getTimeago } from '~/lib/utils/datetime_utility';
-import CommitItem from '~/diffs/components/commit_item.vue';
-import getDiffWithCommit from '../mock_data/diff_with_commit';
-
-const TEST_AUTHOR_NAME = 'test';
-const TEST_AUTHOR_EMAIL = 'test+test@gitlab.com';
-const TEST_AUTHOR_GRAVATAR = `${TEST_HOST}/avatar/test?s=40`;
-const TEST_SIGNATURE_HTML = '<a>Legit commit</a>';
-const TEST_PIPELINE_STATUS_PATH = `${TEST_HOST}/pipeline/status`;
-
-const getTitleElement = vm => vm.$el.querySelector('.commit-row-message.item-title');
-const getDescElement = vm => vm.$el.querySelector('pre.commit-row-description');
-const getDescExpandElement = vm =>
- vm.$el.querySelector('.commit-content .text-expander.js-toggle-button');
-const getShaElement = vm => vm.$el.querySelector('.commit-sha-group');
-const getAvatarElement = vm => vm.$el.querySelector('.user-avatar-link');
-const getCommitterElement = vm => vm.$el.querySelector('.committer');
-const getCommitActionsElement = vm => vm.$el.querySelector('.commit-actions');
-
-describe('diffs/components/commit_item', () => {
- const Component = Vue.extend(CommitItem);
- const timeago = getTimeago();
- const { commit } = getDiffWithCommit();
-
- let vm;
-
- beforeEach(() => {
- vm = mountComponent(Component, {
- commit: getDiffWithCommit().commit,
- });
- });
-
- it('renders commit title', () => {
- const titleElement = getTitleElement(vm);
-
- expect(titleElement).toHaveAttr('href', commit.commit_url);
- expect(titleElement).toHaveText(commit.title_html);
- });
-
- it('renders commit description', () => {
- const descElement = getDescElement(vm);
- const descExpandElement = getDescExpandElement(vm);
-
- const expected = commit.description_html.replace(/&#x000A;/g, '');
-
- expect(trimText(descElement.innerHTML)).toEqual(trimText(expected));
- expect(descExpandElement).not.toBeNull();
- });
-
- it('renders commit sha', () => {
- const shaElement = getShaElement(vm);
- const labelElement = shaElement.querySelector('.label');
- const buttonElement = shaElement.querySelector('button');
-
- expect(labelElement.textContent).toEqual(commit.short_id);
- expect(buttonElement).toHaveData('clipboard-text', commit.id);
- });
-
- it('renders author avatar', () => {
- const avatarElement = getAvatarElement(vm);
- const imgElement = avatarElement.querySelector('img');
-
- expect(avatarElement).toHaveAttr('href', commit.author.web_url);
- expect(imgElement).toHaveClass('s40');
- expect(imgElement).toHaveAttr('alt', commit.author.name);
- expect(imgElement).toHaveAttr('src', commit.author.avatar_url);
- });
-
- it('renders committer text', () => {
- const committerElement = getCommitterElement(vm);
- const nameElement = committerElement.querySelector('a');
-
- const expectTimeText = timeago.format(commit.authored_date);
- const expectedText = `${commit.author.name} authored ${expectTimeText}`;
-
- expect(trimText(committerElement.textContent)).toEqual(expectedText);
- expect(nameElement).toHaveAttr('href', commit.author.web_url);
- expect(nameElement).toHaveText(commit.author.name);
- expect(nameElement).toHaveClass('js-user-link');
- expect(nameElement.dataset.userId).toEqual(commit.author.id.toString());
- });
-
- describe('without commit description', () => {
- beforeEach(done => {
- vm.commit.description_html = '';
-
- vm.$nextTick()
- .then(done)
- .catch(done.fail);
- });
-
- it('hides description', () => {
- const descElement = getDescElement(vm);
- const descExpandElement = getDescExpandElement(vm);
-
- expect(descElement).toBeNull();
- expect(descExpandElement).toBeNull();
- });
- });
-
- describe('with no matching user', () => {
- beforeEach(done => {
- vm.commit.author = null;
- vm.commit.author_email = TEST_AUTHOR_EMAIL;
- vm.commit.author_name = TEST_AUTHOR_NAME;
- vm.commit.author_gravatar_url = TEST_AUTHOR_GRAVATAR;
-
- vm.$nextTick()
- .then(done)
- .catch(done.fail);
- });
-
- it('renders author avatar', () => {
- const avatarElement = getAvatarElement(vm);
- const imgElement = avatarElement.querySelector('img');
-
- expect(avatarElement).toHaveAttr('href', `mailto:${TEST_AUTHOR_EMAIL}`);
- expect(imgElement).toHaveAttr('alt', TEST_AUTHOR_NAME);
- expect(imgElement).toHaveAttr('src', TEST_AUTHOR_GRAVATAR);
- });
-
- it('renders committer text', () => {
- const committerElement = getCommitterElement(vm);
- const nameElement = committerElement.querySelector('a');
-
- expect(nameElement).toHaveAttr('href', `mailto:${TEST_AUTHOR_EMAIL}`);
- expect(nameElement).toHaveText(TEST_AUTHOR_NAME);
- });
- });
-
- describe('with signature', () => {
- beforeEach(done => {
- vm.commit.signature_html = TEST_SIGNATURE_HTML;
-
- vm.$nextTick()
- .then(done)
- .catch(done.fail);
- });
-
- it('renders signature html', () => {
- const actionsElement = getCommitActionsElement(vm);
-
- expect(actionsElement).toContainHtml(TEST_SIGNATURE_HTML);
- });
- });
-
- describe('with pipeline status', () => {
- beforeEach(done => {
- vm.commit.pipeline_status_path = TEST_PIPELINE_STATUS_PATH;
-
- vm.$nextTick()
- .then(done)
- .catch(done.fail);
- });
-
- it('renders pipeline status', () => {
- const actionsElement = getCommitActionsElement(vm);
-
- expect(actionsElement).toContainElement('.ci-status-link');
- });
- });
-});
diff --git a/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js b/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
deleted file mode 100644
index e0686901483..00000000000
--- a/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
+++ /dev/null
@@ -1,160 +0,0 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import CompareVersionsDropdown from '~/diffs/components/compare_versions_dropdown.vue';
-import diffsMockData from '../mock_data/merge_request_diffs';
-import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
-
-const localVue = createLocalVue();
-const targetBranch = { branchName: 'tmp-wine-dev', versionIndex: -1 };
-const startVersion = { version_index: 4 };
-const mergeRequestVersion = {
- version_path: '123',
-};
-const baseVersionPath = '/gnuwget/wget2/-/merge_requests/6/diffs?diff_id=37';
-
-describe('CompareVersionsDropdown', () => {
- let wrapper;
-
- const findSelectedVersion = () => wrapper.find('.dropdown-menu-toggle');
- const findVersionsListElements = () => wrapper.findAll('li');
- const findLinkElement = index =>
- findVersionsListElements()
- .at(index)
- .find('a');
- const findLastLink = () => findLinkElement(findVersionsListElements().length - 1);
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(localVue.extend(CompareVersionsDropdown), {
- localVue,
- propsData: { ...props },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('selected version name', () => {
- it('shows latest version when latest is selected', () => {
- createComponent({
- mergeRequestVersion,
- startVersion,
- otherVersions: diffsMockData,
- });
-
- expect(findSelectedVersion().text()).toBe('latest version');
- });
-
- it('shows target branch name for base branch', () => {
- createComponent({
- targetBranch,
- });
-
- expect(findSelectedVersion().text()).toBe('tmp-wine-dev');
- });
-
- it('shows correct version for non-base and non-latest branches', () => {
- createComponent({
- startVersion,
- targetBranch,
- });
-
- expect(findSelectedVersion().text()).toBe(`version ${startVersion.version_index}`);
- });
- });
-
- describe('target versions list', () => {
- it('should have the same length as otherVersions if merge request version is present', () => {
- createComponent({
- mergeRequestVersion,
- otherVersions: diffsMockData,
- });
-
- expect(findVersionsListElements().length).toEqual(diffsMockData.length);
- });
-
- it('should have an otherVersions length plus 1 if no merge request version is present', () => {
- createComponent({
- targetBranch,
- otherVersions: diffsMockData,
- });
-
- expect(findVersionsListElements().length).toEqual(diffsMockData.length + 1);
- });
-
- it('should have base branch link as active on base branch', () => {
- createComponent({
- targetBranch,
- otherVersions: diffsMockData,
- });
-
- expect(findLastLink().classes()).toContain('is-active');
- });
-
- it('should have correct branch link as active if start version present', () => {
- createComponent({
- targetBranch,
- startVersion,
- otherVersions: diffsMockData,
- });
-
- expect(findLinkElement(0).classes()).toContain('is-active');
- });
-
- it('should render a correct base version link', () => {
- createComponent({
- baseVersionPath,
- otherVersions: diffsMockData.slice(1),
- targetBranch,
- });
-
- expect(findLastLink().attributes('href')).toEqual(baseVersionPath);
- expect(findLastLink().text()).toContain('(base)');
- });
-
- it('should not render commits count if no showCommitsCount is passed', () => {
- createComponent({
- otherVersions: diffsMockData,
- targetBranch,
- });
-
- const commitsCount = diffsMockData[0].commits_count;
-
- expect(findLinkElement(0).text()).not.toContain(`${commitsCount} commit`);
- });
-
- it('should render correct commits count if showCommitsCount is passed', () => {
- createComponent({
- otherVersions: diffsMockData,
- targetBranch,
- showCommitCount: true,
- });
-
- const commitsCount = diffsMockData[0].commits_count;
-
- expect(findLinkElement(0).text()).toContain(`${commitsCount} commit`);
- });
-
- it('should render correct commit sha', () => {
- createComponent({
- otherVersions: diffsMockData,
- targetBranch,
- });
-
- const commitShaElement = findLinkElement(0).find('.commit-sha');
-
- expect(commitShaElement.text()).toBe(diffsMockData[0].short_commit_sha);
- });
-
- it('should render correct time-ago ', () => {
- createComponent({
- otherVersions: diffsMockData,
- targetBranch,
- });
-
- const timeAgoElement = findLinkElement(0).find(TimeAgo);
-
- expect(timeAgoElement.exists()).toBe(true);
- expect(timeAgoElement.props('time')).toBe(diffsMockData[0].created_at);
- });
- });
-});
diff --git a/spec/javascripts/diffs/components/diff_expansion_cell_spec.js b/spec/javascripts/diffs/components/diff_expansion_cell_spec.js
index e8ff6778512..9a5048d9332 100644
--- a/spec/javascripts/diffs/components/diff_expansion_cell_spec.js
+++ b/spec/javascripts/diffs/components/diff_expansion_cell_spec.js
@@ -1,64 +1,229 @@
import Vue from 'vue';
+import { cloneDeep } from 'lodash';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import { createStore } from '~/mr_notes/stores';
import DiffExpansionCell from '~/diffs/components/diff_expansion_cell.vue';
+import { getPreviousLineIndex } from '~/diffs/store/utils';
+import { INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '~/diffs/constants';
import diffFileMockData from '../mock_data/diff_file';
const EXPAND_UP_CLASS = '.js-unfold';
const EXPAND_DOWN_CLASS = '.js-unfold-down';
const EXPAND_ALL_CLASS = '.js-unfold-all';
+const LINE_TO_USE = 5;
+const lineSources = {
+ [INLINE_DIFF_VIEW_TYPE]: 'highlighted_diff_lines',
+ [PARALLEL_DIFF_VIEW_TYPE]: 'parallel_diff_lines',
+};
+const lineHandlers = {
+ [INLINE_DIFF_VIEW_TYPE]: line => line,
+ [PARALLEL_DIFF_VIEW_TYPE]: line => line.right || line.left,
+};
+
+function makeLoadMoreLinesPayload({
+ sinceLine,
+ toLine,
+ oldLineNumber,
+ diffViewType,
+ fileHash,
+ nextLineNumbers = {},
+ unfold = false,
+ bottom = false,
+ isExpandDown = false,
+}) {
+ return {
+ endpoint: 'contextLinesPath',
+ params: {
+ since: sinceLine,
+ to: toLine,
+ offset: toLine + 1 - oldLineNumber,
+ view: diffViewType,
+ unfold,
+ bottom,
+ },
+ lineNumbers: {
+ oldLineNumber,
+ newLineNumber: toLine + 1,
+ },
+ nextLineNumbers,
+ fileHash,
+ isExpandDown,
+ };
+}
+
+function getLine(file, type, index) {
+ const source = lineSources[type];
+ const handler = lineHandlers[type];
+
+ return handler(file[source][index]);
+}
describe('DiffExpansionCell', () => {
- const matchLine = diffFileMockData.highlighted_diff_lines[5];
+ let mockFile;
+ let mockLine;
+ let store;
+ let vm;
+
+ beforeEach(() => {
+ mockFile = cloneDeep(diffFileMockData);
+ mockLine = getLine(mockFile, INLINE_DIFF_VIEW_TYPE, LINE_TO_USE);
+ store = createStore();
+ store.state.diffs.diffFiles = [mockFile];
+ spyOn(store, 'dispatch').and.returnValue(Promise.resolve());
+ });
const createComponent = (options = {}) => {
const cmp = Vue.extend(DiffExpansionCell);
const defaults = {
- fileHash: diffFileMockData.file_hash,
+ fileHash: mockFile.file_hash,
contextLinesPath: 'contextLinesPath',
- line: matchLine,
+ line: mockLine,
isTop: false,
isBottom: false,
};
const props = Object.assign({}, defaults, options);
- return createComponentWithStore(cmp, createStore(), props).$mount();
+ vm = createComponentWithStore(cmp, store, props).$mount();
};
+ const findExpandUp = () => vm.$el.querySelector(EXPAND_UP_CLASS);
+ const findExpandDown = () => vm.$el.querySelector(EXPAND_DOWN_CLASS);
+ const findExpandAll = () => vm.$el.querySelector(EXPAND_ALL_CLASS);
+
describe('top row', () => {
it('should have "expand up" and "show all" option', () => {
- const vm = createComponent({
+ createComponent({
isTop: true,
});
- const el = vm.$el;
- expect(el.querySelector(EXPAND_UP_CLASS)).not.toBe(null);
- expect(el.querySelector(EXPAND_DOWN_CLASS)).toBe(null);
- expect(el.querySelector(EXPAND_ALL_CLASS)).not.toBe(null);
+ expect(findExpandUp()).not.toBe(null);
+ expect(findExpandDown()).toBe(null);
+ expect(findExpandAll()).not.toBe(null);
});
});
describe('middle row', () => {
it('should have "expand down", "show all", "expand up" option', () => {
- const vm = createComponent();
- const el = vm.$el;
+ createComponent();
- expect(el.querySelector(EXPAND_UP_CLASS)).not.toBe(null);
- expect(el.querySelector(EXPAND_DOWN_CLASS)).not.toBe(null);
- expect(el.querySelector(EXPAND_ALL_CLASS)).not.toBe(null);
+ expect(findExpandUp()).not.toBe(null);
+ expect(findExpandDown()).not.toBe(null);
+ expect(findExpandAll()).not.toBe(null);
});
});
describe('bottom row', () => {
it('should have "expand down" and "show all" option', () => {
- const vm = createComponent({
+ createComponent({
isBottom: true,
});
- const el = vm.$el;
- expect(el.querySelector(EXPAND_UP_CLASS)).toBe(null);
- expect(el.querySelector(EXPAND_DOWN_CLASS)).not.toBe(null);
- expect(el.querySelector(EXPAND_ALL_CLASS)).not.toBe(null);
+ expect(findExpandUp()).toBe(null);
+ expect(findExpandDown()).not.toBe(null);
+ expect(findExpandAll()).not.toBe(null);
+ });
+ });
+
+ describe('any row', () => {
+ [
+ { diffViewType: INLINE_DIFF_VIEW_TYPE, file: { parallel_diff_lines: [] } },
+ { diffViewType: PARALLEL_DIFF_VIEW_TYPE, file: { highlighted_diff_lines: [] } },
+ ].forEach(({ diffViewType, file }) => {
+ describe(`with diffViewType (${diffViewType})`, () => {
+ beforeEach(() => {
+ mockLine = getLine(mockFile, diffViewType, LINE_TO_USE);
+ store.state.diffs.diffFiles = [{ ...mockFile, ...file }];
+ store.state.diffs.diffViewType = diffViewType;
+ });
+
+ it('does not initially dispatch anything', () => {
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
+
+ it('on expand all clicked, dispatch loadMoreLines', () => {
+ const oldLineNumber = mockLine.meta_data.old_pos;
+ const newLineNumber = mockLine.meta_data.new_pos;
+ const previousIndex = getPreviousLineIndex(diffViewType, mockFile, {
+ oldLineNumber,
+ newLineNumber,
+ });
+
+ createComponent();
+
+ findExpandAll().click();
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'diffs/loadMoreLines',
+ makeLoadMoreLinesPayload({
+ fileHash: mockFile.file_hash,
+ toLine: newLineNumber - 1,
+ sinceLine: previousIndex,
+ oldLineNumber,
+ diffViewType,
+ }),
+ );
+ });
+
+ it('on expand up clicked, dispatch loadMoreLines', () => {
+ mockLine.meta_data.old_pos = 200;
+ mockLine.meta_data.new_pos = 200;
+
+ const oldLineNumber = mockLine.meta_data.old_pos;
+ const newLineNumber = mockLine.meta_data.new_pos;
+
+ createComponent();
+
+ findExpandUp().click();
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'diffs/loadMoreLines',
+ makeLoadMoreLinesPayload({
+ fileHash: mockFile.file_hash,
+ toLine: newLineNumber - 1,
+ sinceLine: 179,
+ oldLineNumber,
+ diffViewType,
+ unfold: true,
+ }),
+ );
+ });
+
+ it('on expand down clicked, dispatch loadMoreLines', () => {
+ mockFile[lineSources[diffViewType]][LINE_TO_USE + 1] = cloneDeep(
+ mockFile[lineSources[diffViewType]][LINE_TO_USE],
+ );
+ const nextLine = getLine(mockFile, diffViewType, LINE_TO_USE + 1);
+
+ nextLine.meta_data.old_pos = 300;
+ nextLine.meta_data.new_pos = 300;
+ mockLine.meta_data.old_pos = 200;
+ mockLine.meta_data.new_pos = 200;
+
+ createComponent();
+
+ findExpandDown().click();
+
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/loadMoreLines', {
+ endpoint: 'contextLinesPath',
+ params: {
+ since: 1,
+ to: 21, // the load amount, plus 1 line
+ offset: 0,
+ view: diffViewType,
+ unfold: true,
+ bottom: true,
+ },
+ lineNumbers: {
+ // when expanding down, these are based on the previous line, 0, in this case
+ oldLineNumber: 0,
+ newLineNumber: 0,
+ },
+ nextLineNumbers: { old_line: 200, new_line: 200 },
+ fileHash: mockFile.file_hash,
+ isExpandDown: true,
+ });
+ });
+ });
});
});
});
diff --git a/spec/javascripts/diffs/components/diff_file_spec.js b/spec/javascripts/diffs/components/diff_file_spec.js
index eab4f4fb17f..e2b64a5418e 100644
--- a/spec/javascripts/diffs/components/diff_file_spec.js
+++ b/spec/javascripts/diffs/components/diff_file_spec.js
@@ -23,6 +23,9 @@ describe('DiffFile', () => {
vm.$destroy();
});
+ const findDiffContent = () => vm.$el.querySelector('.diff-content');
+ const isVisible = el => el.style.display !== 'none';
+
describe('template', () => {
it('should render component with file header, file content components', done => {
const el = vm.$el;
@@ -69,13 +72,13 @@ describe('DiffFile', () => {
describe('collapsed', () => {
it('should not have file content', done => {
- expect(vm.$el.querySelectorAll('.diff-content').length).toEqual(1);
+ expect(isVisible(findDiffContent())).toBe(true);
expect(vm.isCollapsed).toEqual(false);
vm.isCollapsed = true;
vm.file.renderIt = true;
vm.$nextTick(() => {
- expect(vm.$el.querySelectorAll('.diff-content').length).toEqual(0);
+ expect(isVisible(findDiffContent())).toBe(false);
done();
});
diff --git a/spec/javascripts/diffs/components/inline_diff_table_row_spec.js b/spec/javascripts/diffs/components/inline_diff_table_row_spec.js
index 67443e9aecc..392893eb695 100644
--- a/spec/javascripts/diffs/components/inline_diff_table_row_spec.js
+++ b/spec/javascripts/diffs/components/inline_diff_table_row_spec.js
@@ -12,6 +12,7 @@ describe('InlineDiffTableRow', () => {
vm = createComponentWithStore(Vue.extend(InlineDiffTableRow), createStore(), {
line: thisLine,
fileHash: diffFileMockData.file_hash,
+ filePath: diffFileMockData.file_path,
contextLinesPath: 'contextLinesPath',
isHighlighted: false,
}).$mount();
@@ -39,4 +40,64 @@ describe('InlineDiffTableRow', () => {
.then(done)
.catch(done.fail);
});
+
+ describe('sets coverage title and class', () => {
+ it('for lines with coverage', done => {
+ vm.$nextTick()
+ .then(() => {
+ const name = diffFileMockData.file_path;
+ const line = thisLine.new_line;
+
+ vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 5 } } };
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage');
+
+ expect(coverage.title).toContain('Test coverage: 5 hits');
+ expect(coverage.classList).toContain('coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('for lines without coverage', done => {
+ vm.$nextTick()
+ .then(() => {
+ const name = diffFileMockData.file_path;
+ const line = thisLine.new_line;
+
+ vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 0 } } };
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage');
+
+ expect(coverage.title).toContain('No test coverage');
+ expect(coverage.classList).toContain('no-coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('for unknown lines', done => {
+ vm.$nextTick()
+ .then(() => {
+ vm.$store.state.diffs.coverageFiles = {};
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage');
+
+ expect(coverage.title).not.toContain('Coverage');
+ expect(coverage.classList).not.toContain('coverage');
+ expect(coverage.classList).not.toContain('no-coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
});
diff --git a/spec/javascripts/diffs/components/parallel_diff_table_row_spec.js b/spec/javascripts/diffs/components/parallel_diff_table_row_spec.js
index 32c947bbd8e..4e69382ba03 100644
--- a/spec/javascripts/diffs/components/parallel_diff_table_row_spec.js
+++ b/spec/javascripts/diffs/components/parallel_diff_table_row_spec.js
@@ -14,6 +14,7 @@ describe('ParallelDiffTableRow', () => {
vm = createComponentWithStore(Vue.extend(ParallelDiffTableRow), createStore(), {
line: thisLine,
fileHash: diffFileMockData.file_hash,
+ filePath: diffFileMockData.file_path,
contextLinesPath: 'contextLinesPath',
isHighlighted: false,
}).$mount();
@@ -52,6 +53,7 @@ describe('ParallelDiffTableRow', () => {
vm = createComponentWithStore(Vue.extend(ParallelDiffTableRow), createStore(), {
line: thisLine,
fileHash: diffFileMockData.file_hash,
+ filePath: diffFileMockData.file_path,
contextLinesPath: 'contextLinesPath',
isHighlighted: false,
}).$mount();
@@ -81,5 +83,65 @@ describe('ParallelDiffTableRow', () => {
.then(done)
.catch(done.fail);
});
+
+ describe('sets coverage title and class', () => {
+ it('for lines with coverage', done => {
+ vm.$nextTick()
+ .then(() => {
+ const name = diffFileMockData.file_path;
+ const line = rightLine.new_line;
+
+ vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 5 } } };
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage.right-side');
+
+ expect(coverage.title).toContain('Test coverage: 5 hits');
+ expect(coverage.classList).toContain('coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('for lines without coverage', done => {
+ vm.$nextTick()
+ .then(() => {
+ const name = diffFileMockData.file_path;
+ const line = rightLine.new_line;
+
+ vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 0 } } };
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage.right-side');
+
+ expect(coverage.title).toContain('No test coverage');
+ expect(coverage.classList).toContain('no-coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('for unknown lines', done => {
+ vm.$nextTick()
+ .then(() => {
+ vm.$store.state.diffs.coverageFiles = {};
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage.right-side');
+
+ expect(coverage.title).not.toContain('Coverage');
+ expect(coverage.classList).not.toContain('coverage');
+ expect(coverage.classList).not.toContain('no-coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
});
});
diff --git a/spec/javascripts/diffs/components/tree_list_spec.js b/spec/javascripts/diffs/components/tree_list_spec.js
deleted file mode 100644
index 0a6e433551c..00000000000
--- a/spec/javascripts/diffs/components/tree_list_spec.js
+++ /dev/null
@@ -1,126 +0,0 @@
-import Vue from 'vue';
-import Vuex from 'vuex';
-import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import TreeList from '~/diffs/components/tree_list.vue';
-import createStore from '~/diffs/store/modules';
-
-describe('Diffs tree list component', () => {
- let Component;
- let vm;
-
- beforeAll(() => {
- Component = Vue.extend(TreeList);
- });
-
- beforeEach(() => {
- Vue.use(Vuex);
-
- const store = new Vuex.Store({
- modules: {
- diffs: createStore(),
- },
- });
-
- // Setup initial state
- store.state.diffs.addedLines = 10;
- store.state.diffs.removedLines = 20;
- store.state.diffs.diffFiles.push('test');
-
- localStorage.removeItem('mr_diff_tree_list');
-
- vm = mountComponentWithStore(Component, { store, props: { hideFileStats: false } });
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders empty text', () => {
- expect(vm.$el.textContent).toContain('No files found');
- });
-
- describe('with files', () => {
- beforeEach(done => {
- Object.assign(vm.$store.state.diffs.treeEntries, {
- 'index.js': {
- addedLines: 0,
- changed: true,
- deleted: false,
- fileHash: 'test',
- key: 'index.js',
- name: 'index.js',
- path: 'app/index.js',
- removedLines: 0,
- tempFile: true,
- type: 'blob',
- parentPath: 'app',
- },
- app: {
- key: 'app',
- path: 'app',
- name: 'app',
- type: 'tree',
- tree: [],
- },
- });
- vm.$store.state.diffs.tree = [
- vm.$store.state.diffs.treeEntries['index.js'],
- vm.$store.state.diffs.treeEntries.app,
- ];
-
- vm.$nextTick(done);
- });
-
- it('renders tree', () => {
- expect(vm.$el.querySelectorAll('.file-row').length).toBe(2);
- expect(vm.$el.querySelectorAll('.file-row')[0].textContent).toContain('index.js');
- expect(vm.$el.querySelectorAll('.file-row')[1].textContent).toContain('app');
- });
-
- it('hides file stats', done => {
- vm.hideFileStats = true;
-
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.file-row-stats')).toBe(null);
-
- done();
- });
- });
-
- it('calls toggleTreeOpen when clicking folder', () => {
- spyOn(vm.$store, 'dispatch').and.stub();
-
- vm.$el.querySelectorAll('.file-row')[1].click();
-
- expect(vm.$store.dispatch).toHaveBeenCalledWith('diffs/toggleTreeOpen', 'app');
- });
-
- it('calls scrollToFile when clicking blob', () => {
- spyOn(vm.$store, 'dispatch').and.stub();
-
- vm.$el.querySelector('.file-row').click();
-
- expect(vm.$store.dispatch).toHaveBeenCalledWith('diffs/scrollToFile', 'app/index.js');
- });
-
- it('renders as file list when renderTreeList is false', done => {
- vm.$store.state.diffs.renderTreeList = false;
-
- vm.$nextTick(() => {
- expect(vm.$el.querySelectorAll('.file-row').length).toBe(1);
-
- done();
- });
- });
-
- it('renders file paths when renderTreeList is false', done => {
- vm.$store.state.diffs.renderTreeList = false;
-
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.file-row').textContent).toContain('index.js');
-
- done();
- });
- });
- });
-});
diff --git a/spec/javascripts/diffs/create_diffs_store.js b/spec/javascripts/diffs/create_diffs_store.js
index aacde99964c..cfefd4238b8 100644
--- a/spec/javascripts/diffs/create_diffs_store.js
+++ b/spec/javascripts/diffs/create_diffs_store.js
@@ -1,15 +1 @@
-import Vue from 'vue';
-import Vuex from 'vuex';
-import diffsModule from '~/diffs/store/modules';
-import notesModule from '~/notes/stores/modules';
-
-Vue.use(Vuex);
-
-export default function createDiffsStore() {
- return new Vuex.Store({
- modules: {
- diffs: diffsModule(),
- notes: notesModule(),
- },
- });
-}
+export { default } from '../../frontend/diffs/create_diffs_store';
diff --git a/spec/javascripts/diffs/mock_data/diff_discussions.js b/spec/javascripts/diffs/mock_data/diff_discussions.js
index a9b00634104..dc25dd1647a 100644
--- a/spec/javascripts/diffs/mock_data/diff_discussions.js
+++ b/spec/javascripts/diffs/mock_data/diff_discussions.js
@@ -64,7 +64,7 @@ export default {
resolve_path:
'/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
- '/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
+ '/gitlab-org/gitlab-test/-/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
},
{
id: '1753',
@@ -117,7 +117,7 @@ export default {
resolve_path:
'/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
- '/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
+ '/gitlab-org/gitlab-test/-/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
},
{
id: '1754',
@@ -160,7 +160,7 @@ export default {
resolve_path:
'/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
- '/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
+ '/gitlab-org/gitlab-test/-/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
},
{
id: '1755',
@@ -203,7 +203,7 @@ export default {
resolve_path:
'/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
- '/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
+ '/gitlab-org/gitlab-test/-/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
},
{
id: '1756',
@@ -246,7 +246,7 @@ export default {
resolve_path:
'/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
- '/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
+ '/gitlab-org/gitlab-test/-/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
},
],
individual_note: false,
@@ -255,7 +255,7 @@ export default {
resolve_path:
'/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
- '/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
+ '/gitlab-org/gitlab-test/-/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
diff_file: {
submodule: false,
submodule_link: null,
diff --git a/spec/javascripts/diffs/store/actions_spec.js b/spec/javascripts/diffs/store/actions_spec.js
index ff17d8ec158..7363a213847 100644
--- a/spec/javascripts/diffs/store/actions_spec.js
+++ b/spec/javascripts/diffs/store/actions_spec.js
@@ -12,6 +12,7 @@ import actions, {
fetchDiffFiles,
fetchDiffFilesBatch,
fetchDiffFilesMeta,
+ fetchCoverageFiles,
assignDiscussionsToDiff,
removeDiscussionsFromDiff,
startRenderDiffsQueue,
@@ -73,6 +74,7 @@ describe('DiffsStoreActions', () => {
const endpoint = '/diffs/set/endpoint';
const endpointMetadata = '/diffs/set/endpoint/metadata';
const endpointBatch = '/diffs/set/endpoint/batch';
+ const endpointCoverage = '/diffs/set/coverage_reports';
const projectPath = '/root/project';
const dismissEndpoint = '/-/user_callouts';
const showSuggestPopover = false;
@@ -84,6 +86,7 @@ describe('DiffsStoreActions', () => {
endpoint,
endpointBatch,
endpointMetadata,
+ endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@@ -93,6 +96,7 @@ describe('DiffsStoreActions', () => {
endpoint: '',
endpointBatch: '',
endpointMetadata: '',
+ endpointCoverage: '',
projectPath: '',
dismissEndpoint: '',
showSuggestPopover: true,
@@ -105,6 +109,7 @@ describe('DiffsStoreActions', () => {
endpoint,
endpointMetadata,
endpointBatch,
+ endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@@ -318,6 +323,44 @@ describe('DiffsStoreActions', () => {
});
});
+ describe('fetchCoverageFiles', () => {
+ let mock;
+ const endpointCoverage = '/fetch';
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => mock.restore());
+
+ it('should commit SET_COVERAGE_DATA with received response', done => {
+ const data = { files: { 'app.js': { '1': 0, '2': 1 } } };
+
+ mock.onGet(endpointCoverage).reply(200, { data });
+
+ testAction(
+ fetchCoverageFiles,
+ {},
+ { endpointCoverage },
+ [{ type: types.SET_COVERAGE_DATA, payload: { data } }],
+ [],
+ done,
+ );
+ });
+
+ it('should show flash on API error', done => {
+ const flashSpy = spyOnDependency(actions, 'createFlash');
+
+ mock.onGet(endpointCoverage).reply(400);
+
+ testAction(fetchCoverageFiles, {}, { endpointCoverage }, [], [], () => {
+ expect(flashSpy).toHaveBeenCalledTimes(1);
+ expect(flashSpy).toHaveBeenCalledWith(jasmine.stringMatching('Something went wrong'));
+ done();
+ });
+ });
+ });
+
describe('setHighlightedRow', () => {
it('should mark currently selected diff and set lineHash and fileHash of highlightedRow', () => {
testAction(setHighlightedRow, 'ABC_123', {}, [
diff --git a/spec/javascripts/diffs/store/getters_spec.js b/spec/javascripts/diffs/store/getters_spec.js
index 9e628fdd540..ca47f51cb15 100644
--- a/spec/javascripts/diffs/store/getters_spec.js
+++ b/spec/javascripts/diffs/store/getters_spec.js
@@ -282,4 +282,34 @@ describe('Diffs Module Getters', () => {
expect(getters.currentDiffIndex(localState)).toEqual(0);
});
});
+
+ describe('fileLineCoverage', () => {
+ beforeEach(() => {
+ Object.assign(localState.coverageFiles, { files: { 'app.js': { '1': 0, '2': 5 } } });
+ });
+
+ it('returns empty object when no coverage data is available', () => {
+ Object.assign(localState.coverageFiles, {});
+
+ expect(getters.fileLineCoverage(localState)('test.js', 2)).toEqual({});
+ });
+
+ it('returns empty object when unknown filename is passed', () => {
+ expect(getters.fileLineCoverage(localState)('test.js', 2)).toEqual({});
+ });
+
+ it('returns no-coverage info when correct filename and line is passed', () => {
+ expect(getters.fileLineCoverage(localState)('app.js', 1)).toEqual({
+ text: 'No test coverage',
+ class: 'no-coverage',
+ });
+ });
+
+ it('returns coverage info when correct filename and line is passed', () => {
+ expect(getters.fileLineCoverage(localState)('app.js', 2)).toEqual({
+ text: 'Test coverage: 5 hits',
+ class: 'coverage',
+ });
+ });
+ });
});
diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js
index cb89a89e216..c36aff39aa9 100644
--- a/spec/javascripts/diffs/store/mutations_spec.js
+++ b/spec/javascripts/diffs/store/mutations_spec.js
@@ -123,6 +123,17 @@ describe('DiffsStoreMutations', () => {
});
});
+ describe('SET_COVERAGE_DATA', () => {
+ it('should set coverage data properly', () => {
+ const state = { coverageFiles: {} };
+ const coverage = { 'app.js': { '1': 0, '2': 1 } };
+
+ mutations[types.SET_COVERAGE_DATA](state, coverage);
+
+ expect(state.coverageFiles).toEqual(coverage);
+ });
+ });
+
describe('SET_DIFF_VIEW_TYPE', () => {
it('should set diff view type properly', () => {
const state = {};
@@ -167,7 +178,7 @@ describe('DiffsStoreMutations', () => {
highlighted_diff_lines: [],
parallel_diff_lines: [],
};
- const state = { diffFiles: [diffFile] };
+ const state = { diffFiles: [diffFile], diffViewType: 'viewType' };
const lines = [{ old_line: 1, new_line: 1 }];
const findDiffFileSpy = spyOnDependency(mutations, 'findDiffFile').and.returnValue(diffFile);
@@ -195,6 +206,7 @@ describe('DiffsStoreMutations', () => {
expect(addContextLinesSpy).toHaveBeenCalledWith({
inlineLines: diffFile.highlighted_diff_lines,
parallelLines: diffFile.parallel_diff_lines,
+ diffViewType: 'viewType',
contextLines: options.contextLines,
bottom: options.params.bottom,
lineNumbers: options.lineNumbers,
diff --git a/spec/javascripts/diffs/store/utils_spec.js b/spec/javascripts/diffs/store/utils_spec.js
index 051820cedfa..223c4d7e40b 100644
--- a/spec/javascripts/diffs/store/utils_spec.js
+++ b/spec/javascripts/diffs/store/utils_spec.js
@@ -1,3 +1,4 @@
+import { clone } from 'lodash';
import * as utils from '~/diffs/store/utils';
import {
LINE_POSITION_LEFT,
@@ -8,6 +9,7 @@ import {
NEW_LINE_TYPE,
OLD_LINE_TYPE,
MATCH_LINE_TYPE,
+ INLINE_DIFF_VIEW_TYPE,
PARALLEL_DIFF_VIEW_TYPE,
} from '~/diffs/constants';
import { MERGE_REQUEST_NOTEABLE_TYPE } from '~/notes/constants';
@@ -47,7 +49,38 @@ describe('DiffsStoreUtils', () => {
describe('findIndexInParallelLines', () => {
it('should return correct index for given line numbers', () => {
- expectSet(utils.findIndexInParallelLines, getDiffFileMock().parallel_diff_lines, {});
+ expectSet(utils.findIndexInParallelLines, getDiffFileMock().parallel_diff_lines, []);
+ });
+ });
+ });
+
+ describe('getPreviousLineIndex', () => {
+ [
+ { diffViewType: INLINE_DIFF_VIEW_TYPE, file: { parallel_diff_lines: [] } },
+ { diffViewType: PARALLEL_DIFF_VIEW_TYPE, file: { highlighted_diff_lines: [] } },
+ ].forEach(({ diffViewType, file }) => {
+ describe(`with diffViewType (${diffViewType}) in split diffs`, () => {
+ let diffFile;
+
+ beforeEach(() => {
+ diffFile = { ...clone(diffFileMockData), ...file };
+ });
+
+ it('should return the correct previous line number', () => {
+ const emptyLines =
+ diffViewType === INLINE_DIFF_VIEW_TYPE
+ ? diffFile.parallel_diff_lines
+ : diffFile.highlighted_diff_lines;
+
+ // This expectation asserts that we cannot possibly be using the opposite view type lines in the next expectation
+ expect(emptyLines.length).toBe(0);
+ expect(
+ utils.getPreviousLineIndex(diffViewType, diffFile, {
+ oldLineNumber: 3,
+ newLineNumber: 5,
+ }),
+ ).toBe(4);
+ });
});
});
});
@@ -80,44 +113,59 @@ describe('DiffsStoreUtils', () => {
});
describe('addContextLines', () => {
- it('should add context lines', () => {
- const diffFile = getDiffFileMock();
- const inlineLines = diffFile.highlighted_diff_lines;
- const parallelLines = diffFile.parallel_diff_lines;
- const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
- const contextLines = [{ lineNumber: 42, line_code: '123' }];
- const options = { inlineLines, parallelLines, contextLines, lineNumbers };
- const inlineIndex = utils.findIndexInInlineLines(inlineLines, lineNumbers);
- const parallelIndex = utils.findIndexInParallelLines(parallelLines, lineNumbers);
- const normalizedParallelLine = {
- left: options.contextLines[0],
- right: options.contextLines[0],
- line_code: '123',
- };
-
- utils.addContextLines(options);
-
- expect(inlineLines[inlineIndex]).toEqual(contextLines[0]);
- expect(parallelLines[parallelIndex]).toEqual(normalizedParallelLine);
- });
-
- it('should add context lines properly with bottom parameter', () => {
- const diffFile = getDiffFileMock();
- const inlineLines = diffFile.highlighted_diff_lines;
- const parallelLines = diffFile.parallel_diff_lines;
- const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
- const contextLines = [{ lineNumber: 42, line_code: '123' }];
- const options = { inlineLines, parallelLines, contextLines, lineNumbers, bottom: true };
- const normalizedParallelLine = {
- left: options.contextLines[0],
- right: options.contextLines[0],
- line_code: '123',
- };
-
- utils.addContextLines(options);
+ [INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE].forEach(diffViewType => {
+ it(`should add context lines for ${diffViewType}`, () => {
+ const diffFile = getDiffFileMock();
+ const inlineLines = diffFile.highlighted_diff_lines;
+ const parallelLines = diffFile.parallel_diff_lines;
+ const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
+ const contextLines = [{ lineNumber: 42, line_code: '123' }];
+ const options = { inlineLines, parallelLines, contextLines, lineNumbers, diffViewType };
+ const inlineIndex = utils.findIndexInInlineLines(inlineLines, lineNumbers);
+ const parallelIndex = utils.findIndexInParallelLines(parallelLines, lineNumbers);
+ const normalizedParallelLine = {
+ left: options.contextLines[0],
+ right: options.contextLines[0],
+ line_code: '123',
+ };
+
+ utils.addContextLines(options);
+
+ if (diffViewType === INLINE_DIFF_VIEW_TYPE) {
+ expect(inlineLines[inlineIndex]).toEqual(contextLines[0]);
+ } else {
+ expect(parallelLines[parallelIndex]).toEqual(normalizedParallelLine);
+ }
+ });
- expect(inlineLines[inlineLines.length - 1]).toEqual(contextLines[0]);
- expect(parallelLines[parallelLines.length - 1]).toEqual(normalizedParallelLine);
+ it(`should add context lines properly with bottom parameter for ${diffViewType}`, () => {
+ const diffFile = getDiffFileMock();
+ const inlineLines = diffFile.highlighted_diff_lines;
+ const parallelLines = diffFile.parallel_diff_lines;
+ const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
+ const contextLines = [{ lineNumber: 42, line_code: '123' }];
+ const options = {
+ inlineLines,
+ parallelLines,
+ contextLines,
+ lineNumbers,
+ bottom: true,
+ diffViewType,
+ };
+ const normalizedParallelLine = {
+ left: options.contextLines[0],
+ right: options.contextLines[0],
+ line_code: '123',
+ };
+
+ utils.addContextLines(options);
+
+ if (diffViewType === INLINE_DIFF_VIEW_TYPE) {
+ expect(inlineLines[inlineLines.length - 1]).toEqual(contextLines[0]);
+ } else {
+ expect(parallelLines[parallelLines.length - 1]).toEqual(normalizedParallelLine);
+ }
+ });
});
});
diff --git a/spec/javascripts/dirty_submit/dirty_submit_form_spec.js b/spec/javascripts/dirty_submit/dirty_submit_form_spec.js
index b1017e0c4f0..2907d038390 100644
--- a/spec/javascripts/dirty_submit/dirty_submit_form_spec.js
+++ b/spec/javascripts/dirty_submit/dirty_submit_form_spec.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { range as rge } from 'lodash';
import DirtySubmitForm from '~/dirty_submit/dirty_submit_form';
import { getInputValue, setInputValue, createForm } from './helper';
@@ -82,7 +82,7 @@ describe('DirtySubmitForm', () => {
const { form, input } = createForm();
const updateDirtyInputSpy = spyOn(new DirtySubmitForm(form), 'updateDirtyInput');
- _.range(10).forEach(i => {
+ rge(10).forEach(i => {
setInputValue(input, `change ${i}`, false);
});
@@ -93,7 +93,7 @@ describe('DirtySubmitForm', () => {
it('does not throttle updates when rapid changes are made to different form elements', () => {
const form = document.createElement('form');
- const range = _.range(10);
+ const range = rge(10);
range.forEach(i => {
form.innerHTML += `<input type="text" name="input-${i}" class="js-input-${i}"/>`;
});
diff --git a/spec/javascripts/editor/editor_lite_spec.js b/spec/javascripts/editor/editor_lite_spec.js
index 154daccf82d..106264aa13f 100644
--- a/spec/javascripts/editor/editor_lite_spec.js
+++ b/spec/javascripts/editor/editor_lite_spec.js
@@ -1,5 +1,6 @@
import { editor as monacoEditor, Uri } from 'monaco-editor';
import Editor from '~/editor/editor_lite';
+import { DEFAULT_THEME, themes } from '~/ide/lib/themes';
describe('Base editor', () => {
let editorEl;
@@ -108,4 +109,52 @@ describe('Base editor', () => {
expect(editor.model.getLanguageIdentifier().language).toEqual('plaintext');
});
});
+
+ describe('syntax highlighting theme', () => {
+ let themeDefineSpy;
+ let themeSetSpy;
+ let defaultScheme;
+
+ beforeEach(() => {
+ themeDefineSpy = spyOn(monacoEditor, 'defineTheme');
+ themeSetSpy = spyOn(monacoEditor, 'setTheme');
+ defaultScheme = window.gon.user_color_scheme;
+ });
+
+ afterEach(() => {
+ window.gon.user_color_scheme = defaultScheme;
+ });
+
+ it('sets default syntax highlighting theme', () => {
+ const expectedTheme = themes.find(t => t.name === DEFAULT_THEME);
+
+ editor = new Editor();
+
+ expect(themeDefineSpy).toHaveBeenCalledWith(DEFAULT_THEME, expectedTheme.data);
+ expect(themeSetSpy).toHaveBeenCalledWith(DEFAULT_THEME);
+ });
+
+ it('sets correct theme if it is set in users preferences', () => {
+ const expectedTheme = themes.find(t => t.name !== DEFAULT_THEME);
+
+ expect(expectedTheme.name).not.toBe(DEFAULT_THEME);
+
+ window.gon.user_color_scheme = expectedTheme.name;
+ editor = new Editor();
+
+ expect(themeDefineSpy).toHaveBeenCalledWith(expectedTheme.name, expectedTheme.data);
+ expect(themeSetSpy).toHaveBeenCalledWith(expectedTheme.name);
+ });
+
+ it('falls back to default theme if a selected one is not supported yet', () => {
+ const name = 'non-existent-theme';
+ const nonExistentTheme = { name };
+
+ window.gon.user_color_scheme = nonExistentTheme.name;
+ editor = new Editor();
+
+ expect(themeDefineSpy).not.toHaveBeenCalled();
+ expect(themeSetSpy).toHaveBeenCalledWith(DEFAULT_THEME);
+ });
+ });
});
diff --git a/spec/javascripts/environments/environments_app_spec.js b/spec/javascripts/environments/environments_app_spec.js
deleted file mode 100644
index 6c05b609923..00000000000
--- a/spec/javascripts/environments/environments_app_spec.js
+++ /dev/null
@@ -1,279 +0,0 @@
-import Vue from 'vue';
-import MockAdapter from 'axios-mock-adapter';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import axios from '~/lib/utils/axios_utils';
-import environmentsComponent from '~/environments/components/environments_app.vue';
-import { environment, folder } from './mock_data';
-
-describe('Environment', () => {
- const mockData = {
- endpoint: 'environments.json',
- canCreateEnvironment: true,
- canReadEnvironment: true,
- newEnvironmentPath: 'environments/new',
- helpPagePath: 'help',
- canaryDeploymentFeatureId: 'canary_deployment',
- showCanaryDeploymentCallout: true,
- userCalloutsPath: '/callouts',
- lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
- helpCanaryDeploymentsPath: 'help/canary-deployments',
- };
-
- let EnvironmentsComponent;
- let component;
- let mock;
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
-
- EnvironmentsComponent = Vue.extend(environmentsComponent);
- });
-
- afterEach(() => {
- component.$destroy();
- mock.restore();
- });
-
- describe('successful request', () => {
- describe('without environments', () => {
- beforeEach(done => {
- mock.onGet(mockData.endpoint).reply(200, { environments: [] });
-
- component = mountComponent(EnvironmentsComponent, mockData);
-
- setTimeout(() => {
- done();
- }, 0);
- });
-
- it('should render the empty state', () => {
- expect(component.$el.querySelector('.js-new-environment-button').textContent).toContain(
- 'New environment',
- );
-
- expect(component.$el.querySelector('.js-blank-state-title').textContent).toContain(
- "You don't have any environments right now",
- );
- });
-
- describe('when it is possible to enable a review app', () => {
- beforeEach(done => {
- mock
- .onGet(mockData.endpoint)
- .reply(200, { environments: [], review_app: { can_setup_review_app: true } });
-
- component = mountComponent(EnvironmentsComponent, mockData);
-
- setTimeout(() => {
- done();
- }, 0);
- });
-
- it('should render the enable review app button', () => {
- expect(component.$el.querySelector('.js-enable-review-app-button').textContent).toContain(
- 'Enable review app',
- );
- });
- });
- });
-
- describe('with paginated environments', () => {
- beforeEach(done => {
- mock.onGet(mockData.endpoint).reply(
- 200,
- {
- environments: [environment],
- stopped_count: 1,
- available_count: 0,
- },
- {
- 'X-nExt-pAge': '2',
- 'x-page': '1',
- 'X-Per-Page': '1',
- 'X-Prev-Page': '',
- 'X-TOTAL': '37',
- 'X-Total-Pages': '2',
- },
- );
-
- component = mountComponent(EnvironmentsComponent, mockData);
-
- setTimeout(() => {
- done();
- }, 0);
- });
-
- it('should render a table with environments', () => {
- expect(component.$el.querySelectorAll('table')).not.toBeNull();
- expect(component.$el.querySelector('.environment-name').textContent.trim()).toEqual(
- environment.name,
- );
- });
-
- describe('pagination', () => {
- it('should render pagination', () => {
- expect(component.$el.querySelectorAll('.gl-pagination li').length).toEqual(9);
- });
-
- it('should make an API request when page is clicked', done => {
- spyOn(component, 'updateContent');
- setTimeout(() => {
- component.$el.querySelector('.gl-pagination li:nth-child(3) .page-link').click();
-
- expect(component.updateContent).toHaveBeenCalledWith({ scope: 'available', page: '2' });
- done();
- }, 0);
- });
-
- it('should make an API request when using tabs', done => {
- setTimeout(() => {
- spyOn(component, 'updateContent');
- component.$el.querySelector('.js-environments-tab-stopped').click();
-
- expect(component.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
- done();
- }, 0);
- });
- });
- });
- });
-
- describe('unsuccessfull request', () => {
- beforeEach(done => {
- mock.onGet(mockData.endpoint).reply(500, {});
-
- component = mountComponent(EnvironmentsComponent, mockData);
-
- setTimeout(() => {
- done();
- }, 0);
- });
-
- it('should render empty state', () => {
- expect(component.$el.querySelector('.js-blank-state-title').textContent).toContain(
- "You don't have any environments right now",
- );
- });
- });
-
- describe('expandable folders', () => {
- beforeEach(() => {
- mock.onGet(mockData.endpoint).reply(
- 200,
- {
- environments: [folder],
- stopped_count: 0,
- available_count: 1,
- },
- {
- 'X-nExt-pAge': '2',
- 'x-page': '1',
- 'X-Per-Page': '1',
- 'X-Prev-Page': '',
- 'X-TOTAL': '37',
- 'X-Total-Pages': '2',
- },
- );
-
- mock.onGet(environment.folder_path).reply(200, { environments: [environment] });
-
- component = mountComponent(EnvironmentsComponent, mockData);
- });
-
- it('should open a closed folder', done => {
- setTimeout(() => {
- component.$el.querySelector('.folder-name').click();
-
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.folder-icon.ic-chevron-right')).toBe(null);
- done();
- });
- }, 0);
- });
-
- it('should close an opened folder', done => {
- setTimeout(() => {
- // open folder
- component.$el.querySelector('.folder-name').click();
-
- Vue.nextTick(() => {
- // close folder
- component.$el.querySelector('.folder-name').click();
-
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.folder-icon.ic-chevron-down')).toBe(null);
- done();
- });
- });
- }, 0);
- });
-
- it('should show children environments and a button to show all environments', done => {
- setTimeout(() => {
- // open folder
- component.$el.querySelector('.folder-name').click();
-
- Vue.nextTick(() => {
- // wait for next async request
- setTimeout(() => {
- expect(component.$el.querySelectorAll('.js-child-row').length).toEqual(1);
- expect(component.$el.querySelector('.text-center > a.btn').textContent).toContain(
- 'Show all',
- );
- done();
- });
- });
- }, 0);
- });
- });
-
- describe('methods', () => {
- beforeEach(() => {
- mock.onGet(mockData.endpoint).reply(
- 200,
- {
- environments: [],
- stopped_count: 0,
- available_count: 1,
- },
- {},
- );
-
- component = mountComponent(EnvironmentsComponent, mockData);
- spyOn(window.history, 'pushState').and.stub();
- });
-
- describe('updateContent', () => {
- it('should set given parameters', done => {
- component
- .updateContent({ scope: 'stopped', page: '3' })
- .then(() => {
- expect(component.page).toEqual('3');
- expect(component.scope).toEqual('stopped');
- expect(component.requestData.scope).toEqual('stopped');
- expect(component.requestData.page).toEqual('3');
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('onChangeTab', () => {
- it('should set page to 1', () => {
- spyOn(component, 'updateContent');
- component.onChangeTab('stopped');
-
- expect(component.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
- });
- });
-
- describe('onChangePage', () => {
- it('should update page and keep scope', () => {
- spyOn(component, 'updateContent');
- component.onChangePage(4);
-
- expect(component.updateContent).toHaveBeenCalledWith({ scope: component.scope, page: '4' });
- });
- });
- });
-});
diff --git a/spec/javascripts/environments/mock_data.js b/spec/javascripts/environments/mock_data.js
deleted file mode 100644
index a8be3706b79..00000000000
--- a/spec/javascripts/environments/mock_data.js
+++ /dev/null
@@ -1,66 +0,0 @@
-export const environmentsList = [
- {
- name: 'DEV',
- size: 1,
- id: 7,
- state: 'available',
- external_url: null,
- environment_type: null,
- last_deployment: null,
- has_stop_action: false,
- environment_path: '/root/review-app/environments/7',
- stop_path: '/root/review-app/environments/7/stop',
- created_at: '2017-01-31T10:53:46.894Z',
- updated_at: '2017-01-31T10:53:46.894Z',
- },
- {
- folderName: 'build',
- size: 5,
- id: 12,
- name: 'build/update-README',
- state: 'available',
- external_url: null,
- environment_type: 'build',
- last_deployment: null,
- has_stop_action: false,
- environment_path: '/root/review-app/environments/12',
- stop_path: '/root/review-app/environments/12/stop',
- created_at: '2017-02-01T19:42:18.400Z',
- updated_at: '2017-02-01T19:42:18.400Z',
- },
-];
-
-export const environment = {
- name: 'DEV',
- size: 1,
- latest: {
- id: 7,
- name: 'DEV',
- state: 'available',
- external_url: null,
- environment_type: null,
- last_deployment: null,
- has_stop_action: false,
- environment_path: '/root/review-app/environments/7',
- stop_path: '/root/review-app/environments/7/stop',
- created_at: '2017-01-31T10:53:46.894Z',
- updated_at: '2017-01-31T10:53:46.894Z',
- folder_path: '/root/review-app/environments/7',
- },
-};
-
-export const folder = {
- folderName: 'build',
- size: 5,
- id: 12,
- name: 'build/update-README',
- state: 'available',
- external_url: null,
- environment_type: 'build',
- last_deployment: null,
- has_stop_action: false,
- environment_path: '/root/review-app/environments/12',
- stop_path: '/root/review-app/environments/12/stop',
- created_at: '2017-02-01T19:42:18.400Z',
- updated_at: '2017-02-01T19:42:18.400Z',
-};
diff --git a/spec/javascripts/filtered_search/visual_token_value_spec.js b/spec/javascripts/filtered_search/visual_token_value_spec.js
index a039e280028..4469ade1874 100644
--- a/spec/javascripts/filtered_search/visual_token_value_spec.js
+++ b/spec/javascripts/filtered_search/visual_token_value_spec.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import VisualTokenValue from '~/filtered_search/visual_token_value';
import AjaxCache from '~/lib/utils/ajax_cache';
import UsersCache from '~/lib/utils/users_cache';
@@ -121,7 +121,7 @@ describe('Filtered Search Visual Tokens', () => {
expect(tokenValueElement.innerText.trim()).toBe(dummyUser.name);
tokenValueElement.querySelector('.avatar').remove();
- expect(tokenValueElement.innerHTML.trim()).toBe(_.escape(dummyUser.name));
+ expect(tokenValueElement.innerHTML.trim()).toBe(esc(dummyUser.name));
})
.then(done)
.catch(done.fail);
diff --git a/spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js b/spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js
deleted file mode 100644
index e3f05e89a2d..00000000000
--- a/spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js
+++ /dev/null
@@ -1,94 +0,0 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { trimText } from 'spec/helpers/text_helper';
-import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue';
-import { mockProject } from '../mock_data'; // can also use 'mockGroup', but not useful to test here
-
-const localVue = createLocalVue();
-
-describe('FrequentItemsListItemComponent', () => {
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(localVue.extend(frequentItemsListItemComponent), {
- propsData: {
- itemId: mockProject.id,
- itemName: mockProject.name,
- namespace: mockProject.namespace,
- webUrl: mockProject.webUrl,
- avatarUrl: mockProject.avatarUrl,
- ...props,
- },
- localVue,
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- describe('computed', () => {
- describe('hasAvatar', () => {
- it('should return `true` or `false` if whether avatar is present or not', () => {
- createComponent({ avatarUrl: 'path/to/avatar.png' });
-
- expect(wrapper.vm.hasAvatar).toBe(true);
- });
-
- it('should return `false` if avatar is not present', () => {
- createComponent({ avatarUrl: null });
-
- expect(wrapper.vm.hasAvatar).toBe(false);
- });
- });
-
- describe('highlightedItemName', () => {
- it('should enclose part of project name in <b> & </b> which matches with `matcher` prop', () => {
- createComponent({ matcher: 'lab' });
-
- expect(wrapper.find('.js-frequent-items-item-title').html()).toContain(
- '<b>L</b><b>a</b><b>b</b>',
- );
- });
-
- it('should return project name as it is if `matcher` is not available', () => {
- createComponent({ matcher: null });
-
- expect(trimText(wrapper.find('.js-frequent-items-item-title').text())).toBe(
- mockProject.name,
- );
- });
- });
-
- describe('truncatedNamespace', () => {
- it('should truncate project name from namespace string', () => {
- createComponent({ namespace: 'platform / nokia-3310' });
-
- expect(trimText(wrapper.find('.js-frequent-items-item-namespace').text())).toBe('platform');
- });
-
- it('should truncate namespace string from the middle if it includes more than two groups in path', () => {
- createComponent({
- namespace: 'platform / hardware / broadcom / Wifi Group / Mobile Chipset / nokia-3310',
- });
-
- expect(trimText(wrapper.find('.js-frequent-items-item-namespace').text())).toBe(
- 'platform / ... / Mobile Chipset',
- );
- });
- });
- });
-
- describe('template', () => {
- it('should render component element', () => {
- createComponent();
-
- expect(wrapper.classes()).toContain('frequent-items-list-item-container');
- expect(wrapper.findAll('a').length).toBe(1);
- expect(wrapper.findAll('.frequent-items-item-avatar-container').length).toBe(1);
- expect(wrapper.findAll('.frequent-items-item-metadata-container').length).toBe(1);
- expect(wrapper.findAll('.frequent-items-item-title').length).toBe(1);
- expect(wrapper.findAll('.frequent-items-item-namespace').length).toBe(1);
- });
- });
-});
diff --git a/spec/javascripts/frequent_items/components/frequent_items_list_spec.js b/spec/javascripts/frequent_items/components/frequent_items_list_spec.js
deleted file mode 100644
index 3fcd79480cc..00000000000
--- a/spec/javascripts/frequent_items/components/frequent_items_list_spec.js
+++ /dev/null
@@ -1,90 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import frequentItemsListComponent from '~/frequent_items/components/frequent_items_list.vue';
-import { mockFrequentProjects } from '../mock_data';
-
-const createComponent = (namespace = 'projects') => {
- const Component = Vue.extend(frequentItemsListComponent);
-
- return mountComponent(Component, {
- namespace,
- items: mockFrequentProjects,
- isFetchFailed: false,
- hasSearchQuery: false,
- matcher: 'lab',
- });
-};
-
-describe('FrequentItemsListComponent', () => {
- let vm;
-
- beforeEach(() => {
- vm = createComponent();
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('computed', () => {
- describe('isListEmpty', () => {
- it('should return `true` or `false` representing whether if `items` is empty or not with projects', () => {
- vm.items = [];
-
- expect(vm.isListEmpty).toBe(true);
-
- vm.items = mockFrequentProjects;
-
- expect(vm.isListEmpty).toBe(false);
- });
- });
-
- describe('fetched item messages', () => {
- it('should return appropriate empty list message based on value of `localStorageFailed` prop with projects', () => {
- vm.isFetchFailed = true;
-
- expect(vm.listEmptyMessage).toBe('This feature requires browser localStorage support');
-
- vm.isFetchFailed = false;
-
- expect(vm.listEmptyMessage).toBe('Projects you visit often will appear here');
- });
- });
-
- describe('searched item messages', () => {
- it('should return appropriate empty list message based on value of `searchFailed` prop with projects', () => {
- vm.hasSearchQuery = true;
- vm.isFetchFailed = true;
-
- expect(vm.listEmptyMessage).toBe('Something went wrong on our end.');
-
- vm.isFetchFailed = false;
-
- expect(vm.listEmptyMessage).toBe('Sorry, no projects matched your search');
- });
- });
- });
-
- describe('template', () => {
- it('should render component element with list of projects', done => {
- vm.items = mockFrequentProjects;
-
- Vue.nextTick(() => {
- expect(vm.$el.classList.contains('frequent-items-list-container')).toBe(true);
- expect(vm.$el.querySelectorAll('ul.list-unstyled').length).toBe(1);
- expect(vm.$el.querySelectorAll('li.frequent-items-list-item-container').length).toBe(5);
- done();
- });
- });
-
- it('should render component element with empty message', done => {
- vm.items = [];
-
- Vue.nextTick(() => {
- expect(vm.$el.querySelectorAll('li.section-empty').length).toBe(1);
- expect(vm.$el.querySelectorAll('li.frequent-items-list-item-container').length).toBe(0);
- done();
- });
- });
- });
-});
diff --git a/spec/javascripts/groups/mock_data.js b/spec/javascripts/groups/mock_data.js
index 2fdc844f3d9..380dda9f7b1 100644
--- a/spec/javascripts/groups/mock_data.js
+++ b/spec/javascripts/groups/mock_data.js
@@ -14,7 +14,8 @@ export const GROUP_VISIBILITY_TYPE = {
export const PROJECT_VISIBILITY_TYPE = {
public: 'Public - The project can be accessed without any authentication.',
internal: 'Internal - The project can be accessed by any logged in user.',
- private: 'Private - Project access must be granted explicitly to each user.',
+ private:
+ 'Private - Project access must be granted explicitly to each user. If this project is part of a group, access will be granted to members of the group.',
};
export const VISIBILITY_TYPE_ICON = {
diff --git a/spec/javascripts/helpers/tracking_helper.js b/spec/javascripts/helpers/tracking_helper.js
index 68c1bd2dbca..ea322de46f4 100644
--- a/spec/javascripts/helpers/tracking_helper.js
+++ b/spec/javascripts/helpers/tracking_helper.js
@@ -1,25 +1,5 @@
-import Tracking from '~/tracking';
+// No new code should be added to this file. Instead, modify the
+// file this one re-exports from. For more detail about why, see:
+// https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/31349
-export default Tracking;
-
-let document;
-let handlers;
-
-export function mockTracking(category = '_category_', documentOverride, spyMethod) {
- document = documentOverride || window.document;
- window.snowplow = () => {};
- Tracking.bindDocument(category, document);
- return spyMethod ? spyMethod(Tracking, 'event') : null;
-}
-
-export function unmockTracking() {
- window.snowplow = undefined;
- handlers.forEach(event => document.removeEventListener(event.name, event.func));
-}
-
-export function triggerEvent(selectorOrEl, eventName = 'click') {
- const event = new Event(eventName, { bubbles: true });
- const el = typeof selectorOrEl === 'string' ? document.querySelector(selectorOrEl) : selectorOrEl;
-
- el.dispatchEvent(event);
-}
+export * from '../../frontend/helpers/tracking_helper';
diff --git a/spec/javascripts/ide/components/commit_sidebar/form_spec.js b/spec/javascripts/ide/components/commit_sidebar/form_spec.js
index 5cb804938ed..f5d1a9de59c 100644
--- a/spec/javascripts/ide/components/commit_sidebar/form_spec.js
+++ b/spec/javascripts/ide/components/commit_sidebar/form_spec.js
@@ -52,7 +52,7 @@ describe('IDE commit form', () => {
vm.$store.state.stagedFiles.push('test');
vm.$nextTick(() => {
- expect(vm.$el.querySelector('p').textContent).toContain('1 staged and 1 unstaged changes');
+ expect(vm.$el.querySelector('p').textContent).toContain('1 changed file');
done();
});
});
diff --git a/spec/javascripts/ide/components/file_row_extra_spec.js b/spec/javascripts/ide/components/file_row_extra_spec.js
index f498d8251c8..9fd014b50ef 100644
--- a/spec/javascripts/ide/components/file_row_extra_spec.js
+++ b/spec/javascripts/ide/components/file_row_extra_spec.js
@@ -41,30 +41,20 @@ describe('IDE extra file row component', () => {
describe('folderChangesTooltip', () => {
it('returns undefined when changes count is 0', () => {
- expect(vm.folderChangesTooltip).toBe(undefined);
- });
-
- it('returns unstaged changes text', () => {
- changesCount = 1;
- unstagedFilesCount = 1;
-
- expect(vm.folderChangesTooltip).toBe('1 unstaged change');
- });
+ changesCount = 0;
- it('returns staged changes text', () => {
- changesCount = 1;
- stagedFilesCount = 1;
-
- expect(vm.folderChangesTooltip).toBe('1 staged change');
+ expect(vm.folderChangesTooltip).toBe(undefined);
});
- it('returns staged and unstaged changes text', () => {
- changesCount = 1;
- stagedFilesCount = 1;
- unstagedFilesCount = 1;
+ [{ input: 1, output: '1 changed file' }, { input: 2, output: '2 changed files' }].forEach(
+ ({ input, output }) => {
+ it('returns changed files count if changes count is not 0', () => {
+ changesCount = input;
- expect(vm.folderChangesTooltip).toBe('1 staged and 1 unstaged changes');
- });
+ expect(vm.folderChangesTooltip).toBe(output);
+ });
+ },
+ );
});
describe('show tree changes count', () => {
diff --git a/spec/javascripts/ide/components/repo_commit_section_spec.js b/spec/javascripts/ide/components/repo_commit_section_spec.js
index 917eb1438bd..0ba8c86a036 100644
--- a/spec/javascripts/ide/components/repo_commit_section_spec.js
+++ b/spec/javascripts/ide/components/repo_commit_section_spec.js
@@ -30,19 +30,13 @@ describe('RepoCommitSection', () => {
const files = [file('file1'), file('file2')].map(f =>
Object.assign(f, {
type: 'blob',
+ content: 'orginal content',
}),
);
vm.$store.state.rightPanelCollapsed = false;
vm.$store.state.currentBranch = 'master';
- vm.$store.state.changedFiles = [...files];
- vm.$store.state.changedFiles.forEach(f =>
- Object.assign(f, {
- changed: true,
- content: 'changedFile testing',
- }),
- );
-
+ vm.$store.state.changedFiles = [];
vm.$store.state.stagedFiles = [{ ...files[0] }, { ...files[1] }];
vm.$store.state.stagedFiles.forEach(f =>
Object.assign(f, {
@@ -51,7 +45,7 @@ describe('RepoCommitSection', () => {
}),
);
- vm.$store.state.changedFiles.forEach(f => {
+ files.forEach(f => {
vm.$store.state.entries[f.path] = f;
});
@@ -96,7 +90,7 @@ describe('RepoCommitSection', () => {
const changedFileElements = [...vm.$el.querySelectorAll('.multi-file-commit-list > li')];
const allFiles = vm.$store.state.changedFiles.concat(vm.$store.state.stagedFiles);
- expect(changedFileElements.length).toEqual(4);
+ expect(changedFileElements).toHaveLength(2);
changedFileElements.forEach((changedFile, i) => {
expect(changedFile.textContent.trim()).toContain(allFiles[i].path);
diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js
index fdd6f4e6470..f11d4f5ac33 100644
--- a/spec/javascripts/issue_show/components/app_spec.js
+++ b/spec/javascripts/issue_show/components/app_spec.js
@@ -40,17 +40,19 @@ describe('Issuable output', () => {
const IssuableDescriptionComponent = Vue.extend(issuableApp);
mock = new MockAdapter(axios);
- mock.onGet('/gitlab-org/gitlab-shell/issues/9/realtime_changes/realtime_changes').reply(() => {
- const res = Promise.resolve([200, REALTIME_REQUEST_STACK[realtimeRequestCount]]);
- realtimeRequestCount += 1;
- return res;
- });
+ mock
+ .onGet('/gitlab-org/gitlab-shell/-/issues/9/realtime_changes/realtime_changes')
+ .reply(() => {
+ const res = Promise.resolve([200, REALTIME_REQUEST_STACK[realtimeRequestCount]]);
+ realtimeRequestCount += 1;
+ return res;
+ });
vm = new IssuableDescriptionComponent({
propsData: {
canUpdate: true,
canDestroy: true,
- endpoint: '/gitlab-org/gitlab-shell/issues/9/realtime_changes',
+ endpoint: '/gitlab-org/gitlab-shell/-/issues/9/realtime_changes',
updateEndpoint: gl.TEST_HOST,
issuableRef: '#1',
initialTitleHtml: '',
diff --git a/spec/javascripts/labels_issue_sidebar_spec.js b/spec/javascripts/labels_issue_sidebar_spec.js
index 9d05bdeee20..94e833ec83b 100644
--- a/spec/javascripts/labels_issue_sidebar_spec.js
+++ b/spec/javascripts/labels_issue_sidebar_spec.js
@@ -2,7 +2,7 @@
import $ from 'jquery';
import MockAdapter from 'axios-mock-adapter';
-import _ from 'underscore';
+import { shuffle } from 'lodash';
import axios from '~/lib/utils/axios_utils';
import IssuableContext from '~/issuable_context';
import LabelsSelect from '~/labels_select';
@@ -27,7 +27,7 @@ function testLabelClicks(labelOrder, done) {
expect(labelsInDropdown.length).toBe(10);
const arrayOfLabels = labelsInDropdown.get();
- const randomArrayOfLabels = _.shuffle(arrayOfLabels);
+ const randomArrayOfLabels = shuffle(arrayOfLabels);
randomArrayOfLabels.forEach((label, i) => {
if (i < saveLabelCount) {
$(label).click();
diff --git a/spec/javascripts/lib/utils/browser_spec.js b/spec/javascripts/lib/utils/browser_spec.js
new file mode 100644
index 00000000000..6b1074a3b4f
--- /dev/null
+++ b/spec/javascripts/lib/utils/browser_spec.js
@@ -0,0 +1,175 @@
+/**
+ * This file should only contain browser specific specs.
+ * If you need to add or update a spec, please see spec/frontend/lib/utils/*.js
+ * https://gitlab.com/gitlab-org/gitlab/issues/194242#note_292137135
+ * https://gitlab.com/groups/gitlab-org/-/epics/895#what-if-theres-a-karma-spec-which-is-simply-unmovable-to-jest-ie-it-is-dependent-on-a-running-browser-environment
+ */
+
+import MockAdapter from 'axios-mock-adapter';
+import { GlBreakpointInstance as breakpointInstance } from '@gitlab/ui/dist/utils';
+import axios from '~/lib/utils/axios_utils';
+import * as commonUtils from '~/lib/utils/common_utils';
+import { faviconDataUrl, overlayDataUrl, faviconWithOverlayDataUrl } from './mock_data';
+
+const PIXEL_TOLERANCE = 0.2;
+
+/**
+ * Loads a data URL as the src of an
+ * {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement/Image|Image}
+ * and resolves to that Image once loaded.
+ *
+ * @param url
+ * @returns {Promise}
+ */
+const urlToImage = url =>
+ new Promise(resolve => {
+ const img = new Image();
+ img.onload = function() {
+ resolve(img);
+ };
+ img.src = url;
+ });
+
+describe('common_utils browser specific specs', () => {
+ describe('contentTop', () => {
+ it('does not add height for fileTitle or compareVersionsHeader if screen is too small', () => {
+ spyOn(breakpointInstance, 'isDesktop').and.returnValue(false);
+
+ setFixtures(`
+ <div class="diff-file file-title-flex-parent">
+ blah blah blah
+ </div>
+ <div class="mr-version-controls">
+ more blah blah blah
+ </div>
+ `);
+
+ expect(commonUtils.contentTop()).toBe(0);
+ });
+
+ it('adds height for fileTitle and compareVersionsHeader screen is large enough', () => {
+ spyOn(breakpointInstance, 'isDesktop').and.returnValue(true);
+
+ setFixtures(`
+ <div class="diff-file file-title-flex-parent">
+ blah blah blah
+ </div>
+ <div class="mr-version-controls">
+ more blah blah blah
+ </div>
+ `);
+
+ expect(commonUtils.contentTop()).toBe(18);
+ });
+ });
+
+ describe('createOverlayIcon', () => {
+ it('should return the favicon with the overlay', done => {
+ commonUtils
+ .createOverlayIcon(faviconDataUrl, overlayDataUrl)
+ .then(url => Promise.all([urlToImage(url), urlToImage(faviconWithOverlayDataUrl)]))
+ .then(([actual, expected]) => {
+ expect(actual).toImageDiffEqual(expected, PIXEL_TOLERANCE);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('setFaviconOverlay', () => {
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ favicon.setAttribute('data-original-href', faviconDataUrl);
+ document.body.appendChild(favicon);
+ });
+
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
+ });
+
+ it('should set page favicon to provided favicon overlay', done => {
+ commonUtils
+ .setFaviconOverlay(overlayDataUrl)
+ .then(() => document.getElementById('favicon').getAttribute('href'))
+ .then(url => Promise.all([urlToImage(url), urlToImage(faviconWithOverlayDataUrl)]))
+ .then(([actual, expected]) => {
+ expect(actual).toImageDiffEqual(expected, PIXEL_TOLERANCE);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('setCiStatusFavicon', () => {
+ const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1/status.json`;
+ let mock;
+
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ favicon.setAttribute('href', 'null');
+ favicon.setAttribute('data-original-href', faviconDataUrl);
+ document.body.appendChild(favicon);
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ document.body.removeChild(document.getElementById('favicon'));
+ });
+
+ it('should reset favicon in case of error', done => {
+ mock.onGet(BUILD_URL).replyOnce(500);
+
+ commonUtils.setCiStatusFavicon(BUILD_URL).catch(() => {
+ const favicon = document.getElementById('favicon');
+
+ expect(favicon.getAttribute('href')).toEqual(faviconDataUrl);
+ done();
+ });
+ });
+
+ it('should set page favicon to CI status favicon based on provided status', done => {
+ mock.onGet(BUILD_URL).reply(200, {
+ favicon: overlayDataUrl,
+ });
+
+ commonUtils
+ .setCiStatusFavicon(BUILD_URL)
+ .then(() => document.getElementById('favicon').getAttribute('href'))
+ .then(url => Promise.all([urlToImage(url), urlToImage(faviconWithOverlayDataUrl)]))
+ .then(([actual, expected]) => {
+ expect(actual).toImageDiffEqual(expected, PIXEL_TOLERANCE);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('isInViewport', () => {
+ let el;
+
+ beforeEach(() => {
+ el = document.createElement('div');
+ });
+
+ afterEach(() => {
+ document.body.removeChild(el);
+ });
+
+ it('returns true when provided `el` is in viewport', () => {
+ el.setAttribute('style', `position: absolute; right: ${window.innerWidth + 0.2};`);
+ document.body.appendChild(el);
+
+ expect(commonUtils.isInViewport(el)).toBe(true);
+ });
+
+ it('returns false when provided `el` is not in viewport', () => {
+ el.setAttribute('style', 'position: absolute; top: -1000px; left: -1000px;');
+ document.body.appendChild(el);
+
+ expect(commonUtils.isInViewport(el)).toBe(false);
+ });
+ });
+});
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
deleted file mode 100644
index 504d4a3e01a..00000000000
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ /dev/null
@@ -1,981 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import { GlBreakpointInstance as breakpointInstance } from '@gitlab/ui/dist/utils';
-import axios from '~/lib/utils/axios_utils';
-import * as commonUtils from '~/lib/utils/common_utils';
-import { faviconDataUrl, overlayDataUrl, faviconWithOverlayDataUrl } from './mock_data';
-
-const PIXEL_TOLERANCE = 0.2;
-
-/**
- * Loads a data URL as the src of an
- * {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement/Image|Image}
- * and resolves to that Image once loaded.
- *
- * @param url
- * @returns {Promise}
- */
-const urlToImage = url =>
- new Promise(resolve => {
- const img = new Image();
- img.onload = function() {
- resolve(img);
- };
- img.src = url;
- });
-
-describe('common_utils', () => {
- describe('parseUrl', () => {
- it('returns an anchor tag with url', () => {
- expect(commonUtils.parseUrl('/some/absolute/url').pathname).toContain('some/absolute/url');
- });
-
- it('url is escaped', () => {
- // IE11 will return a relative pathname while other browsers will return a full pathname.
- // parseUrl uses an anchor element for parsing an url. With relative urls, the anchor
- // element will create an absolute url relative to the current execution context.
- // The JavaScript test suite is executed at '/' which will lead to an absolute url
- // starting with '/'.
- expect(commonUtils.parseUrl('" test="asf"').pathname).toContain('/%22%20test=%22asf%22');
- });
- });
-
- describe('parseUrlPathname', () => {
- it('returns an absolute url when given an absolute url', () => {
- expect(commonUtils.parseUrlPathname('/some/absolute/url')).toEqual('/some/absolute/url');
- });
-
- it('returns an absolute url when given a relative url', () => {
- expect(commonUtils.parseUrlPathname('some/relative/url')).toEqual('/some/relative/url');
- });
- });
-
- describe('urlParamsToArray', () => {
- it('returns empty array for empty querystring', () => {
- expect(commonUtils.urlParamsToArray('')).toEqual([]);
- });
-
- it('should decode params', () => {
- expect(commonUtils.urlParamsToArray('?label_name%5B%5D=test')[0]).toBe('label_name[]=test');
- });
-
- it('should remove the question mark from the search params', () => {
- const paramsArray = commonUtils.urlParamsToArray('?test=thing');
-
- expect(paramsArray[0][0]).not.toBe('?');
- });
- });
-
- describe('urlParamsToObject', () => {
- it('parses path for label with trailing +', () => {
- expect(commonUtils.urlParamsToObject('label_name[]=label%2B', {})).toEqual({
- label_name: ['label+'],
- });
- });
-
- it('parses path for milestone with trailing +', () => {
- expect(commonUtils.urlParamsToObject('milestone_title=A%2B', {})).toEqual({
- milestone_title: 'A+',
- });
- });
-
- it('parses path for search terms with spaces', () => {
- expect(commonUtils.urlParamsToObject('search=two+words', {})).toEqual({
- search: 'two words',
- });
- });
- });
-
- describe('handleLocationHash', () => {
- beforeEach(() => {
- spyOn(window.document, 'getElementById').and.callThrough();
- jasmine.clock().install();
- });
-
- afterEach(() => {
- window.history.pushState({}, null, '');
- jasmine.clock().uninstall();
- });
-
- function expectGetElementIdToHaveBeenCalledWith(elementId) {
- expect(window.document.getElementById).toHaveBeenCalledWith(elementId);
- }
-
- it('decodes hash parameter', () => {
- window.history.pushState({}, null, '#random-hash');
- commonUtils.handleLocationHash();
-
- expectGetElementIdToHaveBeenCalledWith('random-hash');
- expectGetElementIdToHaveBeenCalledWith('user-content-random-hash');
- });
-
- it('decodes cyrillic hash parameter', () => {
- window.history.pushState({}, null, '#definição');
- commonUtils.handleLocationHash();
-
- expectGetElementIdToHaveBeenCalledWith('definição');
- expectGetElementIdToHaveBeenCalledWith('user-content-definição');
- });
-
- it('decodes encoded cyrillic hash parameter', () => {
- window.history.pushState({}, null, '#defini%C3%A7%C3%A3o');
- commonUtils.handleLocationHash();
-
- expectGetElementIdToHaveBeenCalledWith('definição');
- expectGetElementIdToHaveBeenCalledWith('user-content-definição');
- });
-
- it('scrolls element into view', () => {
- document.body.innerHTML += `
- <div id="parent">
- <div style="height: 2000px;"></div>
- <div id="test" style="height: 2000px;"></div>
- </div>
- `;
-
- window.history.pushState({}, null, '#test');
- commonUtils.handleLocationHash();
-
- expectGetElementIdToHaveBeenCalledWith('test');
-
- expect(window.scrollY).toBe(document.getElementById('test').offsetTop);
-
- document.getElementById('parent').remove();
- });
-
- it('scrolls user content element into view', () => {
- document.body.innerHTML += `
- <div id="parent">
- <div style="height: 2000px;"></div>
- <div id="user-content-test" style="height: 2000px;"></div>
- </div>
- `;
-
- window.history.pushState({}, null, '#test');
- commonUtils.handleLocationHash();
-
- expectGetElementIdToHaveBeenCalledWith('test');
- expectGetElementIdToHaveBeenCalledWith('user-content-test');
-
- expect(window.scrollY).toBe(document.getElementById('user-content-test').offsetTop);
-
- document.getElementById('parent').remove();
- });
-
- it('scrolls to element with offset from navbar', () => {
- spyOn(window, 'scrollBy').and.callThrough();
- document.body.innerHTML += `
- <div id="parent">
- <div class="navbar-gitlab" style="position: fixed; top: 0; height: 50px;"></div>
- <div style="height: 2000px; margin-top: 50px;"></div>
- <div id="user-content-test" style="height: 2000px;"></div>
- </div>
- `;
-
- window.history.pushState({}, null, '#test');
- commonUtils.handleLocationHash();
- jasmine.clock().tick(1);
-
- expectGetElementIdToHaveBeenCalledWith('test');
- expectGetElementIdToHaveBeenCalledWith('user-content-test');
-
- expect(window.scrollY).toBe(document.getElementById('user-content-test').offsetTop - 50);
- expect(window.scrollBy).toHaveBeenCalledWith(0, -50);
-
- document.getElementById('parent').remove();
- });
- });
-
- describe('historyPushState', () => {
- afterEach(() => {
- window.history.replaceState({}, null, null);
- });
-
- it('should call pushState with the correct path', () => {
- spyOn(window.history, 'pushState');
-
- commonUtils.historyPushState('newpath?page=2');
-
- expect(window.history.pushState).toHaveBeenCalled();
- expect(window.history.pushState.calls.allArgs()[0][2]).toContain('newpath?page=2');
- });
- });
-
- describe('parseQueryStringIntoObject', () => {
- it('should return object with query parameters', () => {
- expect(commonUtils.parseQueryStringIntoObject('scope=all&page=2')).toEqual({
- scope: 'all',
- page: '2',
- });
-
- expect(commonUtils.parseQueryStringIntoObject('scope=all')).toEqual({ scope: 'all' });
- expect(commonUtils.parseQueryStringIntoObject()).toEqual({});
- });
- });
-
- describe('objectToQueryString', () => {
- it('returns empty string when `param` is undefined, null or empty string', () => {
- expect(commonUtils.objectToQueryString()).toBe('');
- expect(commonUtils.objectToQueryString('')).toBe('');
- });
-
- it('returns query string with values of `params`', () => {
- const singleQueryParams = { foo: true };
- const multipleQueryParams = { foo: true, bar: true };
-
- expect(commonUtils.objectToQueryString(singleQueryParams)).toBe('foo=true');
- expect(commonUtils.objectToQueryString(multipleQueryParams)).toBe('foo=true&bar=true');
- });
- });
-
- describe('buildUrlWithCurrentLocation', () => {
- it('should build an url with current location and given parameters', () => {
- expect(commonUtils.buildUrlWithCurrentLocation()).toEqual(window.location.pathname);
- expect(commonUtils.buildUrlWithCurrentLocation('?page=2')).toEqual(
- `${window.location.pathname}?page=2`,
- );
- });
- });
-
- describe('debounceByAnimationFrame', () => {
- it('debounces a function to allow a maximum of one call per animation frame', done => {
- const spy = jasmine.createSpy('spy');
- const debouncedSpy = commonUtils.debounceByAnimationFrame(spy);
- window.requestAnimationFrame(() => {
- debouncedSpy();
- debouncedSpy();
- window.requestAnimationFrame(() => {
- expect(spy).toHaveBeenCalledTimes(1);
- done();
- });
- });
- });
- });
-
- describe('getParameterByName', () => {
- beforeEach(() => {
- window.history.pushState({}, null, '?scope=all&p=2');
- });
-
- afterEach(() => {
- window.history.replaceState({}, null, null);
- });
-
- it('should return valid parameter', () => {
- const value = commonUtils.getParameterByName('scope');
-
- expect(commonUtils.getParameterByName('p')).toEqual('2');
- expect(value).toBe('all');
- });
-
- it('should return invalid parameter', () => {
- const value = commonUtils.getParameterByName('fakeParameter');
-
- expect(value).toBe(null);
- });
-
- it('should return valid paramentes if URL is provided', () => {
- let value = commonUtils.getParameterByName('foo', 'http://cocteau.twins/?foo=bar');
-
- expect(value).toBe('bar');
-
- value = commonUtils.getParameterByName('manan', 'http://cocteau.twins/?foo=bar&manan=canchu');
-
- expect(value).toBe('canchu');
- });
- });
-
- describe('normalizedHeaders', () => {
- it('should upperCase all the header keys to keep them consistent', () => {
- const apiHeaders = {
- 'X-Something-Workhorse': { workhorse: 'ok' },
- 'x-something-nginx': { nginx: 'ok' },
- };
-
- const normalized = commonUtils.normalizeHeaders(apiHeaders);
-
- const WORKHORSE = 'X-SOMETHING-WORKHORSE';
- const NGINX = 'X-SOMETHING-NGINX';
-
- expect(normalized[WORKHORSE].workhorse).toBe('ok');
- expect(normalized[NGINX].nginx).toBe('ok');
- });
- });
-
- describe('normalizeCRLFHeaders', () => {
- beforeEach(function() {
- this.CLRFHeaders =
- 'a-header: a-value\nAnother-Header: ANOTHER-VALUE\nLaSt-HeAdEr: last-VALUE';
- spyOn(String.prototype, 'split').and.callThrough();
- this.normalizeCRLFHeaders = commonUtils.normalizeCRLFHeaders(this.CLRFHeaders);
- });
-
- it('should split by newline', function() {
- expect(String.prototype.split).toHaveBeenCalledWith('\n');
- });
-
- it('should split by colon+space for each header', function() {
- expect(String.prototype.split.calls.allArgs().filter(args => args[0] === ': ').length).toBe(
- 3,
- );
- });
-
- it('should return a normalized headers object', function() {
- expect(this.normalizeCRLFHeaders).toEqual({
- 'A-HEADER': 'a-value',
- 'ANOTHER-HEADER': 'ANOTHER-VALUE',
- 'LAST-HEADER': 'last-VALUE',
- });
- });
- });
-
- describe('parseIntPagination', () => {
- it('should parse to integers all string values and return pagination object', () => {
- const pagination = {
- 'X-PER-PAGE': 10,
- 'X-PAGE': 2,
- 'X-TOTAL': 30,
- 'X-TOTAL-PAGES': 3,
- 'X-NEXT-PAGE': 3,
- 'X-PREV-PAGE': 1,
- };
-
- const expectedPagination = {
- perPage: 10,
- page: 2,
- total: 30,
- totalPages: 3,
- nextPage: 3,
- previousPage: 1,
- };
-
- expect(commonUtils.parseIntPagination(pagination)).toEqual(expectedPagination);
- });
- });
-
- describe('isMetaClick', () => {
- it('should identify meta click on Windows/Linux', () => {
- const e = {
- metaKey: false,
- ctrlKey: true,
- which: 1,
- };
-
- expect(commonUtils.isMetaClick(e)).toBe(true);
- });
-
- it('should identify meta click on macOS', () => {
- const e = {
- metaKey: true,
- ctrlKey: false,
- which: 1,
- };
-
- expect(commonUtils.isMetaClick(e)).toBe(true);
- });
-
- it('should identify as meta click on middle-click or Mouse-wheel click', () => {
- const e = {
- metaKey: false,
- ctrlKey: false,
- which: 2,
- };
-
- expect(commonUtils.isMetaClick(e)).toBe(true);
- });
- });
-
- describe('contentTop', () => {
- it('does not add height for fileTitle or compareVersionsHeader if screen is too small', () => {
- spyOn(breakpointInstance, 'isDesktop').and.returnValue(false);
-
- setFixtures(`
- <div class="diff-file file-title-flex-parent">
- blah blah blah
- </div>
- <div class="mr-version-controls">
- more blah blah blah
- </div>
- `);
-
- expect(commonUtils.contentTop()).toBe(0);
- });
-
- it('adds height for fileTitle and compareVersionsHeader screen is large enough', () => {
- spyOn(breakpointInstance, 'isDesktop').and.returnValue(true);
-
- setFixtures(`
- <div class="diff-file file-title-flex-parent">
- blah blah blah
- </div>
- <div class="mr-version-controls">
- more blah blah blah
- </div>
- `);
-
- expect(commonUtils.contentTop()).toBe(18);
- });
- });
-
- describe('parseBoolean', () => {
- const { parseBoolean } = commonUtils;
-
- it('returns true for "true"', () => {
- expect(parseBoolean('true')).toEqual(true);
- });
-
- it('returns false for "false"', () => {
- expect(parseBoolean('false')).toEqual(false);
- });
-
- it('returns false for "something"', () => {
- expect(parseBoolean('something')).toEqual(false);
- });
-
- it('returns false for null', () => {
- expect(parseBoolean(null)).toEqual(false);
- });
-
- it('is idempotent', () => {
- const input = ['true', 'false', 'something', null];
- input.forEach(value => {
- const result = parseBoolean(value);
-
- expect(parseBoolean(result)).toBe(result);
- });
- });
- });
-
- describe('backOff', () => {
- beforeEach(() => {
- // shortcut our timeouts otherwise these tests will take a long time to finish
- const origSetTimeout = window.setTimeout;
- spyOn(window, 'setTimeout').and.callFake(cb => origSetTimeout(cb, 0));
- });
-
- it('solves the promise from the callback', done => {
- const expectedResponseValue = 'Success!';
- commonUtils
- .backOff((next, stop) =>
- new Promise(resolve => {
- resolve(expectedResponseValue);
- })
- .then(resp => {
- stop(resp);
- })
- .catch(done.fail),
- )
- .then(respBackoff => {
- expect(respBackoff).toBe(expectedResponseValue);
- done();
- })
- .catch(done.fail);
- });
-
- it('catches the rejected promise from the callback ', done => {
- const errorMessage = 'Mistakes were made!';
- commonUtils
- .backOff((next, stop) => {
- new Promise((resolve, reject) => {
- reject(new Error(errorMessage));
- })
- .then(resp => {
- stop(resp);
- })
- .catch(err => stop(err));
- })
- .catch(errBackoffResp => {
- expect(errBackoffResp instanceof Error).toBe(true);
- expect(errBackoffResp.message).toBe(errorMessage);
- done();
- });
- });
-
- it('solves the promise correctly after retrying a third time', done => {
- let numberOfCalls = 1;
- const expectedResponseValue = 'Success!';
- commonUtils
- .backOff((next, stop) =>
- Promise.resolve(expectedResponseValue)
- .then(resp => {
- if (numberOfCalls < 3) {
- numberOfCalls += 1;
- next();
- } else {
- stop(resp);
- }
- })
- .catch(done.fail),
- )
- .then(respBackoff => {
- const timeouts = window.setTimeout.calls.allArgs().map(([, timeout]) => timeout);
-
- expect(timeouts).toEqual([2000, 4000]);
- expect(respBackoff).toBe(expectedResponseValue);
- done();
- })
- .catch(done.fail);
- });
-
- it('rejects the backOff promise after timing out', done => {
- commonUtils
- .backOff(next => next(), 64000)
- .catch(errBackoffResp => {
- const timeouts = window.setTimeout.calls.allArgs().map(([, timeout]) => timeout);
-
- expect(timeouts).toEqual([2000, 4000, 8000, 16000, 32000, 32000]);
- expect(errBackoffResp instanceof Error).toBe(true);
- expect(errBackoffResp.message).toBe('BACKOFF_TIMEOUT');
- done();
- });
- });
- });
-
- describe('setFavicon', () => {
- beforeEach(() => {
- const favicon = document.createElement('link');
- favicon.setAttribute('id', 'favicon');
- favicon.setAttribute('href', 'default/favicon');
- favicon.setAttribute('data-default-href', 'default/favicon');
- document.body.appendChild(favicon);
- });
-
- afterEach(() => {
- document.body.removeChild(document.getElementById('favicon'));
- });
-
- it('should set page favicon to provided favicon', () => {
- const faviconPath = '//custom_favicon';
- commonUtils.setFavicon(faviconPath);
-
- expect(document.getElementById('favicon').getAttribute('href')).toEqual(faviconPath);
- });
- });
-
- describe('resetFavicon', () => {
- beforeEach(() => {
- const favicon = document.createElement('link');
- favicon.setAttribute('id', 'favicon');
- favicon.setAttribute('data-original-href', 'default/favicon');
- document.body.appendChild(favicon);
- });
-
- afterEach(() => {
- document.body.removeChild(document.getElementById('favicon'));
- });
-
- it('should reset page favicon to the default icon', () => {
- const favicon = document.getElementById('favicon');
- favicon.setAttribute('href', 'new/favicon');
- commonUtils.resetFavicon();
-
- expect(document.getElementById('favicon').getAttribute('href')).toEqual('default/favicon');
- });
- });
-
- describe('createOverlayIcon', () => {
- it('should return the favicon with the overlay', done => {
- commonUtils
- .createOverlayIcon(faviconDataUrl, overlayDataUrl)
- .then(url => Promise.all([urlToImage(url), urlToImage(faviconWithOverlayDataUrl)]))
- .then(([actual, expected]) => {
- expect(actual).toImageDiffEqual(expected, PIXEL_TOLERANCE);
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('setFaviconOverlay', () => {
- beforeEach(() => {
- const favicon = document.createElement('link');
- favicon.setAttribute('id', 'favicon');
- favicon.setAttribute('data-original-href', faviconDataUrl);
- document.body.appendChild(favicon);
- });
-
- afterEach(() => {
- document.body.removeChild(document.getElementById('favicon'));
- });
-
- it('should set page favicon to provided favicon overlay', done => {
- commonUtils
- .setFaviconOverlay(overlayDataUrl)
- .then(() => document.getElementById('favicon').getAttribute('href'))
- .then(url => Promise.all([urlToImage(url), urlToImage(faviconWithOverlayDataUrl)]))
- .then(([actual, expected]) => {
- expect(actual).toImageDiffEqual(expected, PIXEL_TOLERANCE);
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('setCiStatusFavicon', () => {
- const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1/status.json`;
- let mock;
-
- beforeEach(() => {
- const favicon = document.createElement('link');
- favicon.setAttribute('id', 'favicon');
- favicon.setAttribute('href', 'null');
- favicon.setAttribute('data-original-href', faviconDataUrl);
- document.body.appendChild(favicon);
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mock.restore();
- document.body.removeChild(document.getElementById('favicon'));
- });
-
- it('should reset favicon in case of error', done => {
- mock.onGet(BUILD_URL).replyOnce(500);
-
- commonUtils.setCiStatusFavicon(BUILD_URL).catch(() => {
- const favicon = document.getElementById('favicon');
-
- expect(favicon.getAttribute('href')).toEqual(faviconDataUrl);
- done();
- });
- });
-
- it('should set page favicon to CI status favicon based on provided status', done => {
- mock.onGet(BUILD_URL).reply(200, {
- favicon: overlayDataUrl,
- });
-
- commonUtils
- .setCiStatusFavicon(BUILD_URL)
- .then(() => document.getElementById('favicon').getAttribute('href'))
- .then(url => Promise.all([urlToImage(url), urlToImage(faviconWithOverlayDataUrl)]))
- .then(([actual, expected]) => {
- expect(actual).toImageDiffEqual(expected, PIXEL_TOLERANCE);
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('spriteIcon', () => {
- let beforeGon;
-
- beforeEach(() => {
- window.gon = window.gon || {};
- beforeGon = Object.assign({}, window.gon);
- window.gon.sprite_icons = 'icons.svg';
- });
-
- afterEach(() => {
- window.gon = beforeGon;
- });
-
- it('should return the svg for a linked icon', () => {
- expect(commonUtils.spriteIcon('test')).toEqual(
- '<svg ><use xlink:href="icons.svg#test" /></svg>',
- );
- });
-
- it('should set svg className when passed', () => {
- expect(commonUtils.spriteIcon('test', 'fa fa-test')).toEqual(
- '<svg class="fa fa-test"><use xlink:href="icons.svg#test" /></svg>',
- );
- });
- });
-
- describe('convertObjectPropsToCamelCase', () => {
- it('returns new object with camelCase property names by converting object with snake_case names', () => {
- const snakeRegEx = /(_\w)/g;
- const mockObj = {
- id: 1,
- group_name: 'GitLab.org',
- absolute_web_url: 'https://gitlab.com/gitlab-org/',
- };
- const mappings = {
- id: 'id',
- groupName: 'group_name',
- absoluteWebUrl: 'absolute_web_url',
- };
-
- const convertedObj = commonUtils.convertObjectPropsToCamelCase(mockObj);
-
- Object.keys(convertedObj).forEach(prop => {
- expect(snakeRegEx.test(prop)).toBeFalsy();
- expect(convertedObj[prop]).toBe(mockObj[mappings[prop]]);
- });
- });
-
- it('return empty object if method is called with null or undefined', () => {
- expect(Object.keys(commonUtils.convertObjectPropsToCamelCase(null)).length).toBe(0);
- expect(Object.keys(commonUtils.convertObjectPropsToCamelCase()).length).toBe(0);
- expect(Object.keys(commonUtils.convertObjectPropsToCamelCase({})).length).toBe(0);
- });
-
- it('does not deep-convert by default', () => {
- const obj = {
- snake_key: {
- child_snake_key: 'value',
- },
- };
-
- expect(commonUtils.convertObjectPropsToCamelCase(obj)).toEqual({
- snakeKey: {
- child_snake_key: 'value',
- },
- });
- });
-
- describe('convertObjectPropsToSnakeCase', () => {
- it('converts each object key to snake case', () => {
- const obj = {
- some: 'some',
- 'cool object': 'cool object',
- likeThisLongOne: 'likeThisLongOne',
- };
-
- expect(commonUtils.convertObjectPropsToSnakeCase(obj)).toEqual({
- some: 'some',
- cool_object: 'cool object',
- like_this_long_one: 'likeThisLongOne',
- });
- });
-
- it('returns an empty object if there are no keys', () => {
- ['', {}, [], null].forEach(badObj => {
- expect(commonUtils.convertObjectPropsToSnakeCase(badObj)).toEqual({});
- });
- });
- });
-
- describe('with options', () => {
- const objWithoutChildren = {
- project_name: 'GitLab CE',
- group_name: 'GitLab.org',
- license_type: 'MIT',
- };
-
- const objWithChildren = {
- project_name: 'GitLab CE',
- group_name: 'GitLab.org',
- license_type: 'MIT',
- tech_stack: {
- backend: 'Ruby',
- frontend_framework: 'Vue',
- database: 'PostgreSQL',
- },
- };
-
- describe('when options.deep is true', () => {
- it('converts object with child objects', () => {
- const obj = {
- snake_key: {
- child_snake_key: 'value',
- },
- };
-
- expect(commonUtils.convertObjectPropsToCamelCase(obj, { deep: true })).toEqual({
- snakeKey: {
- childSnakeKey: 'value',
- },
- });
- });
-
- it('converts array with child objects', () => {
- const arr = [
- {
- child_snake_key: 'value',
- },
- ];
-
- expect(commonUtils.convertObjectPropsToCamelCase(arr, { deep: true })).toEqual([
- {
- childSnakeKey: 'value',
- },
- ]);
- });
-
- it('converts array with child arrays', () => {
- const arr = [
- [
- {
- child_snake_key: 'value',
- },
- ],
- ];
-
- expect(commonUtils.convertObjectPropsToCamelCase(arr, { deep: true })).toEqual([
- [
- {
- childSnakeKey: 'value',
- },
- ],
- ]);
- });
- });
-
- describe('when options.dropKeys is provided', () => {
- it('discards properties mentioned in `dropKeys` array', () => {
- expect(
- commonUtils.convertObjectPropsToCamelCase(objWithoutChildren, {
- dropKeys: ['group_name'],
- }),
- ).toEqual({
- projectName: 'GitLab CE',
- licenseType: 'MIT',
- });
- });
-
- it('discards properties mentioned in `dropKeys` array when `deep` is true', () => {
- expect(
- commonUtils.convertObjectPropsToCamelCase(objWithChildren, {
- deep: true,
- dropKeys: ['group_name', 'database'],
- }),
- ).toEqual({
- projectName: 'GitLab CE',
- licenseType: 'MIT',
- techStack: {
- backend: 'Ruby',
- frontendFramework: 'Vue',
- },
- });
- });
- });
-
- describe('when options.ignoreKeyNames is provided', () => {
- it('leaves properties mentioned in `ignoreKeyNames` array intact', () => {
- expect(
- commonUtils.convertObjectPropsToCamelCase(objWithoutChildren, {
- ignoreKeyNames: ['group_name'],
- }),
- ).toEqual({
- projectName: 'GitLab CE',
- licenseType: 'MIT',
- group_name: 'GitLab.org',
- });
- });
-
- it('leaves properties mentioned in `ignoreKeyNames` array intact when `deep` is true', () => {
- expect(
- commonUtils.convertObjectPropsToCamelCase(objWithChildren, {
- deep: true,
- ignoreKeyNames: ['group_name', 'frontend_framework'],
- }),
- ).toEqual({
- projectName: 'GitLab CE',
- group_name: 'GitLab.org',
- licenseType: 'MIT',
- techStack: {
- backend: 'Ruby',
- frontend_framework: 'Vue',
- database: 'PostgreSQL',
- },
- });
- });
- });
- });
- });
-
- describe('roundOffFloat', () => {
- it('Rounds off decimal places of a float number with provided precision', () => {
- expect(commonUtils.roundOffFloat(3.141592, 3)).toBeCloseTo(3.142);
- });
-
- it('Rounds off a float number to a whole number when provided precision is zero', () => {
- expect(commonUtils.roundOffFloat(3.141592, 0)).toBeCloseTo(3);
- expect(commonUtils.roundOffFloat(3.5, 0)).toBeCloseTo(4);
- });
-
- it('Rounds off float number to nearest 0, 10, 100, 1000 and so on when provided precision is below 0', () => {
- expect(commonUtils.roundOffFloat(34567.14159, -1)).toBeCloseTo(34570);
- expect(commonUtils.roundOffFloat(34567.14159, -2)).toBeCloseTo(34600);
- expect(commonUtils.roundOffFloat(34567.14159, -3)).toBeCloseTo(35000);
- expect(commonUtils.roundOffFloat(34567.14159, -4)).toBeCloseTo(30000);
- expect(commonUtils.roundOffFloat(34567.14159, -5)).toBeCloseTo(0);
- });
- });
-
- describe('isInViewport', () => {
- let el;
-
- beforeEach(() => {
- el = document.createElement('div');
- });
-
- afterEach(() => {
- document.body.removeChild(el);
- });
-
- it('returns true when provided `el` is in viewport', () => {
- el.setAttribute('style', `position: absolute; right: ${window.innerWidth + 0.2};`);
- document.body.appendChild(el);
-
- expect(commonUtils.isInViewport(el)).toBe(true);
- });
-
- it('returns false when provided `el` is not in viewport', () => {
- el.setAttribute('style', 'position: absolute; top: -1000px; left: -1000px;');
- document.body.appendChild(el);
-
- expect(commonUtils.isInViewport(el)).toBe(false);
- });
- });
-
- describe('searchBy', () => {
- const searchSpace = {
- iid: 1,
- reference: '&1',
- title: 'Error omnis quos consequatur ullam a vitae sed omnis libero cupiditate.',
- url: '/groups/gitlab-org/-/epics/1',
- };
-
- it('returns null when `query` or `searchSpace` params are empty/undefined', () => {
- expect(commonUtils.searchBy('omnis', null)).toBeNull();
- expect(commonUtils.searchBy('', searchSpace)).toBeNull();
- expect(commonUtils.searchBy()).toBeNull();
- });
-
- it('returns object with matching props based on `query` & `searchSpace` params', () => {
- // String `omnis` is found only in `title` prop so return just that
- expect(commonUtils.searchBy('omnis', searchSpace)).toEqual(
- jasmine.objectContaining({
- title: searchSpace.title,
- }),
- );
-
- // String `1` is found in both `iid` and `reference` props so return both
- expect(commonUtils.searchBy('1', searchSpace)).toEqual(
- jasmine.objectContaining({
- iid: searchSpace.iid,
- reference: searchSpace.reference,
- }),
- );
-
- // String `/epics/1` is found in `url` prop so return just that
- expect(commonUtils.searchBy('/epics/1', searchSpace)).toEqual(
- jasmine.objectContaining({
- url: searchSpace.url,
- }),
- );
- });
- });
-
- describe('isScopedLabel', () => {
- it('returns true when `::` is present in title', () => {
- expect(commonUtils.isScopedLabel({ title: 'foo::bar' })).toBe(true);
- });
-
- it('returns false when `::` is not present', () => {
- expect(commonUtils.isScopedLabel({ title: 'foobar' })).toBe(false);
- });
- });
-
- describe('getDashPath', () => {
- it('returns the path following /-/', () => {
- expect(commonUtils.getDashPath('/some/-/url-with-dashes-/')).toEqual('url-with-dashes-/');
- });
-
- it('returns null when no path follows /-/', () => {
- expect(commonUtils.getDashPath('/some/url')).toEqual(null);
- });
- });
-});
diff --git a/spec/javascripts/lib/utils/mock_data.js b/spec/javascripts/lib/utils/mock_data.js
index c466b0cd1ed..c2f79a32377 100644
--- a/spec/javascripts/lib/utils/mock_data.js
+++ b/spec/javascripts/lib/utils/mock_data.js
@@ -1,8 +1 @@
-export const faviconDataUrl =
- 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAMAAABEpIrGAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAACcFBMVEX////iQyniQyniQyniQyniQyniQyniQyniQynhRiriQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniRCniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQyniQynhQiniQiniQiniQinhQinpUSjqUSjqTyjqTyjqTyjlSCniRCniQynjRCjqTyjsZSjrWyj8oib9kSb8pyb9pib8oyb8fyb3ZSb4Zib8fCb8oyb8oyb8oyb8pCb8cSbiQyn7bCb8cib8oyb8oSb8bSbtVSjpTij8nyb8oyb8oyb8lCb2Yyf3ZCf8mCb8oyb8oyb8oyb8iib8bSbiRCn8gyb8oyb8eCbpTinrUSj8oyb8oyb8oyb8pSb8bib4Zif0YCf8byb8oyb8oyb8oyb7oib8oyb8nCbjRSn9bib8ayb8nib8oyb8oyb8oyb8kSbpTyjpTyj8jib8oyb8oyb8oyb8fib0Xyf2ZSb8gCb8oyb6pSb8oyb8dib+cCbgQCnjRSn8cCb8oib8oyb8oyb8oybqUCjnSyn8bCb8oyb8oyb8oyb8myb2YyfyXyf8oyb8oyb8hibhQSn+bib8iSb8oyb8qCb+fSbmSSnqTyj8oib9pCb1YifxXyf7pSb8oCb8pCb+mCb0fCf8pSb7hSXvcSjiQyniQinqTyj9kCb9bib9byb+cCbqUSjiRCnsVCj+cSb8pib8bCb8bSbgQCn7bCb8bibjRSn8oyb8ayb8oib8aib8pCbjRCn8pybhQinhQSn8pSb7ayb7aSb6aib8eib///8IbM+7AAAAr3RSTlMBA3NtX2vT698HGQcRLwWLiXnv++3V+eEd/R8HE2V/Y5HjyefdFw99YWfJ+/3nwQP78/HvX1VTQ/kdA2HzbQXj9fX79/3DGf379/33T/v99/f7ba33+/f1+9/18/v59V339flzF/H9+fX3/fMhBwOh9/v5/fmvBV/z+fP3Awnp9/f38+UFgff7+/37+4c77/f7/flFz/f59dFr7/v98Wnr+/f3I5/197EDBU1ZAwUD8/kLUwAAAAFiS0dEAIgFHUgAAAAHdElNRQfhBQoLHiBV6/1lAAACHUlEQVQ4y41TZXsTQRCe4FAIUigN7m7FXY+iLRQKBG2x4g7BjhZ3Le7uMoEkFJprwyQk0CC/iZnNhUZaHt4vt6/szO7cHcD/wFKjZrJWq3YMq1M3eVc9rFzXR2yQkuA3RGxkjZLGiEk9miA2tURJs1RsnhhokYYtzaU13WZDbBVnW1sjo43J2vI6tZ0lLtFeAh1M0lECneI7dGYtrUtk3RUVIKaEJR25qw27yT0s3W0qEHuPlB4RradivXo7GX36xnbo51SQ+fWHARmCgYMGDxkaxbD3SssYPmIkwKgPLrfA87EETTg/fVaSa/SYsQDjSsd7DcGEsr+BieVKmaRNBsjUtClTfUI900y/5Mt05c8oJQKYSURZ2UqYFa0w283M588JEM2BuRwI5EqT8nmmXzZf4l8XsGNfCIv4QcHFklhiBpaqAsuC4tghj+ySyOdjeJYrP7RCCuR/E5tWAqxaLcmCNSyujdxjHZdbn8UHoA0bN/GoNm8hjQJb/ZzYpo6w3TB27JRduxxqrA7YzbWCezixN8RD2Oc2/Ptlfx7o5uT1A4XMiwzj4HfEikNe7+Ew0ZGjeuW70eEYaeHjxomTiKd++E4XnKGz8d+HDufOB3Ky3RcwdNF1qZiKLyf/B44r2tWf15wV143cwI2qfi8dbtKtX6Hbd+6G74EDqkTm/QcPH/0ufFyNLXjy9NnzF9Xb8BJevYY38C+8fZcg/AF3QTYemVkCwwAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAxNy0wNS0xMFQxMTozMDozMiswMjowMMzup8UAAAAldEVYdGRhdGU6bW9kaWZ5ADIwMTctMDUtMTBUMTE6MzA6MzIrMDI6MDC9sx95AAAAAElFTkSuQmCC';
-
-export const overlayDataUrl =
- 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAA85JREFUWAntVllIVGEUPv/9b46O41KplYN7PeRkti8TjQlhCUGh3MmeQugpIsGKAi2soIcIooiohxYKK2daqDAlIpIiWwxtQaJcaHE0d5tMrbn37z9XRqfR0TvVW56Hudf//uec72zfEWBCJjIwkYGJDPzvGSD/KgExN3Oi2Q+2DJgSDYQEMwItVGH1iZGmJw/Si1y+/PwVAMYYib22MYc/8hVQFgKDEfYoId0KYzagAQebsos/ewMZoeB9wdffcTYpQSaCTWHKoqSQaDk7zkIt0+aCUR8BelEHrf3dUNv9AcqbnsHtT5UKB/hTASh0SLYjnjb/CIDRJi0XiFAaJOpCD8zLpdb4NB66b1OfelthX815dtdRRfiti2aAXLvVLiMQ6olGyztGDkSo4JGGXk8/QFdGpYzpHG2GBQTDhtgVhPEaVbbVpvI6GJz22rv4TcAfrYI1x7Rj5MWWAppomKFVVb2302SFzUkZHAbkG+0b1+Gh77yNYjrmqnWTrLBLRxdvBWv8qlFujH/kYjJYyvLkj71t78zAUvzMAMnHhpN4zf9UREJhd8omyssxu1IgazQDwDnHUcNuH6vhPIE1fmuBzHt74Hn7W89jWGtcAjoaIDOFrdcMYJBkgOCoaRF0Lj0oglddDbCj6tRvKjphEpgjkzEQs2YAKsNxMzjn3nKurhzK+Ly7xe28ua8TwgMMcHJZnvvT0BPtEEKM4tDJ+C8GvIIk4ylINIXVZ0EUKJxYuh3mhCeokbudl6TtVc88dfBdLwbyaWB6zQCYQJpBYSrDGQxBQ/ZWRM2B+VNmQnVnHWx7elyNuL2/R336co7KyJR8CL9oLgEuFlREevWUkEl6uGwpVEG4FBm0OEf9N10NMgPlvWYAuNVwsWDKvcUNYsHUWTCZ13ysyFEXe6TO6aC8CUr9IiK+A05TQrc8yjwmxARHeeMAPlfQJw+AQRwu0YhL/GDXi9NwufG+S8dYkuYMqIb4SsWthotlNMOUCOM6r+G9cqXxPmd1dqrBav/o1zJy2l5/NUjJA/VORwYuFnOUaTQcPs9wMqwV++Xv8oADxKAcZ8nLPr8AoGW+xR6HSqYk3GodAz2QNj0V+Gr26dT9ASNH5239Pf0gktVNWZca8ZvfAFBprWS6hSu1pqt++Y0PD+WIwDAhIWQGtzvSHDbcodfFUFB9hg1Gjs5LXqIdFL+acFBl+FddqYwdxsWC3I70OvgfUaA65zhq2O2c8VxYcyIGFTVlXegYtvCXANCQZJMobjVcLMjtSK/IcEgyOOe8Ve5w7ryKDefp2P3+C/5ohv8HZmVLAAAAAElFTkSuQmCC';
-
-export const faviconWithOverlayDataUrl =
- 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAGtElEQVRYR8WXf3CT9R3H35/nSdIQIktrCf0RStI0FYRjVBAccxTq5MDBKUoz4ZyjbPO87q4yBsPDMdExTjlvIsdQexyI0oMBeuKhdjsNHhwcMgpjIlublLIm/UlJKZSSJs/z/e6+T5v0CQ22wB/7/pPck8/383l9fj6fEOec8H88NAjAS1LwknsFSVLU8WXd1rtm85LUeKnwGQKzjj3s33azvsEAAEIlnn8ByHL4/Pa7BgAQLCm8QOBOh88vDQkQeMxjMkcQEYKqYsyJWWPhgs/80TsFafzROJtkNIXFfYI0pfXqPeennjqlxPUNikBoTuEmEF+lCRBV3G0aQiWFrwH8d30AWJubGdiEfZzdGqDEEwbICnADQGGHry7zTr0X94IlnnMACggwAWh0+PxOvb5EBGqmTTNkj7ySxWS62C+g5Usm1Zn95YXG24UQ+r5n75Li6Ux4LBkyc7/4t5YSLSr6Lgg9UvBLcKocMEYKON/gGB3YoA/bcGFCczzLQdieLE9bHL66FakBSjzCU0cSAHDa4at7aLhG9XLBEk8zAVnxZxyIEhBy+PwFgwAafpxvNzK5NZUhrX28JA07Cl6SmtvcOUwm4ZAouHj7ad+jMrN1dqb3iG7oS4EYPh2etQS+XiesC8TQ3ZD3yZJsHuUPgbMcI+ej5v3ncv5PasNlk1p7JJnzJL+I0/O5h+u0VCdqIDi78AQRHuirft3hYJzQPvawPydVdPI+/OnTnNNKBjYVXHRa8rFFGeb4w1he0wZ7d/84IXTEhxzxUsgitB2LPFGwvgGUfLSeZUpEXqEqrIdz0nr4iHOUfeOccb/tNMtutzWHPeWcJc0aMxm5lkxYDGloj1zB+Sv/RXXTSXzaeBwSY3j+bHNv2bdtMYCbpHtRkNFd36xFQN3tXkZhvgP1fdPi5kMEXL4oIXKVAA58M8aCVQs84BYLXi5aDq+zGJTqYr+i4PV2vHxmJ/7WUoOn2i/jz6yhW7JjrdSV8U4fQFV+I2Q4UIsedMCSSlcsgp72WtnSajOhzDsBNtsYfFD8e+Rbs4fdIG98uw9vnj+AX7FWvk4NHZOXXphF/INx2SpJIU2L8L4GDAoMwlP9kWSg6awcKVs83tyUnY5Dj75+W8bjutae3o5d9X/HTiWAuUtOS6RUOR8Hp48TxjgU/AMSeKJ1Ej/tMWXG1sxwGt98sBxe5+xhe64XVLiK2Z9XwNgdRLXyzQsC4ENwelIHAFxDBOdh1qdCdNLCoon8RnY+HZ6/+TtzPhTZweAxlJ94C5VqoI2U3a7rACzJjQqgBd24CGscos1kxPQZ38fqSU/jhQkDvN9lrKG7FeUnNuPVKcvwYOb4hGgvi2HSx8vwRKyJkVLl+hk43gdBAcfADBD1cA4RXIdZ1EN1Zjqem+DGoUc2oigjMUlvaV8YL/1qPVpuhOG+JwdH5m1Okn3m6Eacaz3V2jeI9uTbVYY6AKOSKw8MX0MBg2lXjh3r3Hk4s7ASdrMtSWxnoBpZIzIwP3e69lxv3Gay4q/F6zDJ5kq6s6amEnsafJ0Db8P9JKkx1w5wPJuY36IToojgNMzb8rLwmsuB2kW7YDWMSCgTg+YXx9+AQZKxdUaFZiju+a2Mi8uvnH0f2/2f9g4AVE4z4LlTilrlehag9xIpEam4jO4DXfdaV97nwtH5byW137VYD5Yc2YAz4YAGIYx2RLq0z1Sex8l//fUWfBI83jh4Kd1PEuAwqVGjWEwSS+nJJmt0sWu86d0frMQCR/LbWQ8hDAxlXMgUV69Q67ubv0q5FUNAlHKmVLnXE/gfREpUiaQHqAizXbO0UN98BMTSo39Cw7UW7E2Rc728qJGHP68ASbQyNYCQTkAUzCSwQ+CwvSjnsQPGLOnI/C0YO3Lwxq5yhhtqb1KNpGqT1TXvigJU0jh33xpAf7NymoGNDJ9sJtPkYuNkqTh7KnY8vGaoeZPy93+GA1joe4kzzv/SVLqvYngA/dFgVfnlb8tjtm6Ux+I39y/Gqone24IQM+GxL15UO3q7WrhsnhJatCs8PAC9md3OrPK0goaDyEj7uXsuXi0qg4HkIUGE52XHNqmXIl0RGOiHoUV7xb+v5K14SC39At79Ximdhc8ekjImuiyjsXryUszLnY40yThIhSi4bbUHsbfBJ6ZKE5dpQdz4HQOgf2a8tLvklY+M6cuvSnJummxSZ46+X+7biMzaRnSu84IauNYsE5HCOX+HDCPWi7DrKW8/BTcVZ2UN8Me57kc5448TaCYR5XJwC0BtHMwPjs/SgAP1pfuCqSL8Pxhr/wunLWAOAAAAAElFTkSuQmCC';
+export * from '../../../frontend/lib/utils/mock_data.js';
diff --git a/spec/javascripts/monitoring/components/dashboard_resize_spec.js b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
index 2422934f4b3..6a35069ccff 100644
--- a/spec/javascripts/monitoring/components/dashboard_resize_spec.js
+++ b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
@@ -112,7 +112,7 @@ describe('Dashboard', () => {
setupComponentStore(component);
return Vue.nextTick().then(() => {
- [, promPanel] = component.$el.querySelectorAll('.prometheus-panel');
+ [promPanel] = component.$el.querySelectorAll('.prometheus-panel');
promGroup = promPanel.querySelector('.prometheus-graph-group');
panelToggle = promPanel.querySelector('.js-graph-group-toggle');
chart = promGroup.querySelector('.position-relative svg');
diff --git a/spec/javascripts/notes/components/discussion_counter_spec.js b/spec/javascripts/notes/components/discussion_counter_spec.js
deleted file mode 100644
index 9c7aed43a3b..00000000000
--- a/spec/javascripts/notes/components/discussion_counter_spec.js
+++ /dev/null
@@ -1,90 +0,0 @@
-import Vue from 'vue';
-import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import createStore from '~/notes/stores';
-import DiscussionCounter from '~/notes/components/discussion_counter.vue';
-import { noteableDataMock, discussionMock, notesDataMock } from '../mock_data';
-
-describe('DiscussionCounter component', () => {
- let store;
- let vm;
- const notes = { currentDiscussionId: null };
-
- beforeEach(() => {
- window.mrTabs = {};
-
- const Component = Vue.extend(DiscussionCounter);
-
- store = createStore();
- store.dispatch('setNoteableData', noteableDataMock);
- store.dispatch('setNotesData', notesDataMock);
-
- vm = createComponentWithStore(Component, store);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('methods', () => {
- describe('jumpToNextDiscussion', () => {
- it('expands unresolved discussion', () => {
- window.mrTabs.currentAction = 'show';
-
- spyOn(vm, 'expandDiscussion').and.stub();
- const discussions = [
- {
- ...discussionMock,
- id: discussionMock.id,
- notes: [{ ...discussionMock.notes[0], resolvable: true, resolved: true }],
- resolved: true,
- },
- {
- ...discussionMock,
- id: discussionMock.id + 1,
- notes: [{ ...discussionMock.notes[0], resolvable: true, resolved: false }],
- resolved: false,
- },
- ];
- const firstDiscussionId = discussionMock.id + 1;
- store.replaceState({
- ...store.state,
- discussions,
- notes,
- });
- vm.jumpToNextDiscussion();
-
- expect(vm.expandDiscussion).toHaveBeenCalledWith({ discussionId: firstDiscussionId });
- });
-
- it('jumps to next unresolved discussion from diff tab if all diff discussions are resolved', () => {
- window.mrTabs.currentAction = 'diff';
- spyOn(vm, 'switchToDiscussionsTabAndJumpTo').and.stub();
-
- const unresolvedId = discussionMock.id + 1;
- const discussions = [
- {
- ...discussionMock,
- id: discussionMock.id,
- diff_discussion: true,
- notes: [{ ...discussionMock.notes[0], resolvable: true, resolved: true }],
- resolved: true,
- },
- {
- ...discussionMock,
- id: unresolvedId,
- notes: [{ ...discussionMock.notes[0], resolvable: true, resolved: false }],
- resolved: false,
- },
- ];
- store.replaceState({
- ...store.state,
- discussions,
- notes,
- });
- vm.jumpToNextDiscussion();
-
- expect(vm.switchToDiscussionsTabAndJumpTo).toHaveBeenCalledWith(unresolvedId);
- });
- });
- });
-});
diff --git a/spec/javascripts/notes/components/noteable_note_spec.js b/spec/javascripts/notes/components/noteable_note_spec.js
index 5fbac7faefd..1906dae7800 100644
--- a/spec/javascripts/notes/components/noteable_note_spec.js
+++ b/spec/javascripts/notes/components/noteable_note_spec.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { escape } from 'lodash';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import createStore from '~/notes/stores';
import issueNote from '~/notes/components/noteable_note.vue';
@@ -98,7 +98,7 @@ describe('issue_note', () => {
setTimeout(() => {
expect(alertSpy).not.toHaveBeenCalled();
- expect(wrapper.vm.note.note_html).toEqual(_.escape(noteBody));
+ expect(wrapper.vm.note.note_html).toEqual(escape(noteBody));
done();
}, 0);
});
diff --git a/spec/javascripts/notes/helpers.js b/spec/javascripts/notes/helpers.js
index 3f349b40ba5..7bcba609311 100644
--- a/spec/javascripts/notes/helpers.js
+++ b/spec/javascripts/notes/helpers.js
@@ -1,12 +1 @@
-// eslint-disable-next-line import/prefer-default-export
-export const resetStore = store => {
- store.replaceState({
- notes: [],
- targetNoteHash: null,
- lastFetchedAt: null,
-
- notesData: {},
- userData: {},
- noteableData: {},
- });
-};
+export * from '../../frontend/notes/helpers.js';
diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js
deleted file mode 100644
index ec1f1392845..00000000000
--- a/spec/javascripts/notes/stores/actions_spec.js
+++ /dev/null
@@ -1,911 +0,0 @@
-import $ from 'jquery';
-import { TEST_HOST } from 'spec/test_constants';
-import AxiosMockAdapter from 'axios-mock-adapter';
-import Api from '~/api';
-import actionsModule, * as actions from '~/notes/stores/actions';
-import * as mutationTypes from '~/notes/stores/mutation_types';
-import * as notesConstants from '~/notes/constants';
-import createStore from '~/notes/stores';
-import mrWidgetEventHub from '~/vue_merge_request_widget/event_hub';
-import testAction from '../../helpers/vuex_action_helper';
-import { resetStore } from '../helpers';
-import {
- discussionMock,
- notesDataMock,
- userDataMock,
- noteableDataMock,
- individualNote,
-} from '../mock_data';
-import axios from '~/lib/utils/axios_utils';
-
-const TEST_ERROR_MESSAGE = 'Test error message';
-
-describe('Actions Notes Store', () => {
- let commit;
- let dispatch;
- let state;
- let store;
- let flashSpy;
- let axiosMock;
-
- beforeEach(() => {
- store = createStore();
- commit = jasmine.createSpy('commit');
- dispatch = jasmine.createSpy('dispatch');
- state = {};
- flashSpy = spyOnDependency(actionsModule, 'Flash');
- axiosMock = new AxiosMockAdapter(axios);
- });
-
- afterEach(() => {
- resetStore(store);
- axiosMock.restore();
- });
-
- describe('setNotesData', () => {
- it('should set received notes data', done => {
- testAction(
- actions.setNotesData,
- notesDataMock,
- { notesData: {} },
- [{ type: 'SET_NOTES_DATA', payload: notesDataMock }],
- [],
- done,
- );
- });
- });
-
- describe('setNoteableData', () => {
- it('should set received issue data', done => {
- testAction(
- actions.setNoteableData,
- noteableDataMock,
- { noteableData: {} },
- [{ type: 'SET_NOTEABLE_DATA', payload: noteableDataMock }],
- [],
- done,
- );
- });
- });
-
- describe('setUserData', () => {
- it('should set received user data', done => {
- testAction(
- actions.setUserData,
- userDataMock,
- { userData: {} },
- [{ type: 'SET_USER_DATA', payload: userDataMock }],
- [],
- done,
- );
- });
- });
-
- describe('setLastFetchedAt', () => {
- it('should set received timestamp', done => {
- testAction(
- actions.setLastFetchedAt,
- 'timestamp',
- { lastFetchedAt: {} },
- [{ type: 'SET_LAST_FETCHED_AT', payload: 'timestamp' }],
- [],
- done,
- );
- });
- });
-
- describe('setInitialNotes', () => {
- it('should set initial notes', done => {
- testAction(
- actions.setInitialNotes,
- [individualNote],
- { notes: [] },
- [{ type: 'SET_INITIAL_DISCUSSIONS', payload: [individualNote] }],
- [],
- done,
- );
- });
- });
-
- describe('setTargetNoteHash', () => {
- it('should set target note hash', done => {
- testAction(
- actions.setTargetNoteHash,
- 'hash',
- { notes: [] },
- [{ type: 'SET_TARGET_NOTE_HASH', payload: 'hash' }],
- [],
- done,
- );
- });
- });
-
- describe('toggleDiscussion', () => {
- it('should toggle discussion', done => {
- testAction(
- actions.toggleDiscussion,
- { discussionId: discussionMock.id },
- { notes: [discussionMock] },
- [{ type: 'TOGGLE_DISCUSSION', payload: { discussionId: discussionMock.id } }],
- [],
- done,
- );
- });
- });
-
- describe('expandDiscussion', () => {
- it('should expand discussion', done => {
- testAction(
- actions.expandDiscussion,
- { discussionId: discussionMock.id },
- { notes: [discussionMock] },
- [{ type: 'EXPAND_DISCUSSION', payload: { discussionId: discussionMock.id } }],
- [{ type: 'diffs/renderFileForDiscussionId', payload: discussionMock.id }],
- done,
- );
- });
- });
-
- describe('collapseDiscussion', () => {
- it('should commit collapse discussion', done => {
- testAction(
- actions.collapseDiscussion,
- { discussionId: discussionMock.id },
- { notes: [discussionMock] },
- [{ type: 'COLLAPSE_DISCUSSION', payload: { discussionId: discussionMock.id } }],
- [],
- done,
- );
- });
- });
-
- describe('async methods', () => {
- beforeEach(() => {
- axiosMock.onAny().reply(200, {});
- });
-
- describe('closeIssue', () => {
- it('sets state as closed', done => {
- store
- .dispatch('closeIssue', { notesData: { closeIssuePath: '' } })
- .then(() => {
- expect(store.state.noteableData.state).toEqual('closed');
- expect(store.state.isToggleStateButtonLoading).toEqual(false);
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('reopenIssue', () => {
- it('sets state as reopened', done => {
- store
- .dispatch('reopenIssue', { notesData: { reopenIssuePath: '' } })
- .then(() => {
- expect(store.state.noteableData.state).toEqual('reopened');
- expect(store.state.isToggleStateButtonLoading).toEqual(false);
- done();
- })
- .catch(done.fail);
- });
- });
- });
-
- describe('emitStateChangedEvent', () => {
- it('emits an event on the document', () => {
- document.addEventListener('issuable_vue_app:change', event => {
- expect(event.detail.data).toEqual({ id: '1', state: 'closed' });
- expect(event.detail.isClosed).toEqual(false);
- });
-
- store.dispatch('emitStateChangedEvent', { id: '1', state: 'closed' });
- });
- });
-
- describe('toggleStateButtonLoading', () => {
- it('should set loading as true', done => {
- testAction(
- actions.toggleStateButtonLoading,
- true,
- {},
- [{ type: 'TOGGLE_STATE_BUTTON_LOADING', payload: true }],
- [],
- done,
- );
- });
-
- it('should set loading as false', done => {
- testAction(
- actions.toggleStateButtonLoading,
- false,
- {},
- [{ type: 'TOGGLE_STATE_BUTTON_LOADING', payload: false }],
- [],
- done,
- );
- });
- });
-
- describe('toggleIssueLocalState', () => {
- it('sets issue state as closed', done => {
- testAction(actions.toggleIssueLocalState, 'closed', {}, [{ type: 'CLOSE_ISSUE' }], [], done);
- });
-
- it('sets issue state as reopened', done => {
- testAction(
- actions.toggleIssueLocalState,
- 'reopened',
- {},
- [{ type: 'REOPEN_ISSUE' }],
- [],
- done,
- );
- });
- });
-
- describe('poll', () => {
- beforeEach(done => {
- jasmine.clock().install();
-
- spyOn(axios, 'get').and.callThrough();
-
- store
- .dispatch('setNotesData', notesDataMock)
- .then(done)
- .catch(done.fail);
- });
-
- afterEach(() => {
- jasmine.clock().uninstall();
- });
-
- it('calls service with last fetched state', done => {
- axiosMock
- .onAny()
- .reply(200, { notes: [], last_fetched_at: '123456' }, { 'poll-interval': '1000' });
-
- store
- .dispatch('poll')
- .then(() => new Promise(resolve => requestAnimationFrame(resolve)))
- .then(() => {
- expect(axios.get).toHaveBeenCalled();
- expect(store.state.lastFetchedAt).toBe('123456');
-
- jasmine.clock().tick(1500);
- })
- .then(
- () =>
- new Promise(resolve => {
- requestAnimationFrame(resolve);
- }),
- )
- .then(() => {
- expect(axios.get.calls.count()).toBe(2);
- expect(axios.get.calls.mostRecent().args[1].headers).toEqual({
- 'X-Last-Fetched-At': '123456',
- });
- })
- .then(() => store.dispatch('stopPolling'))
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('setNotesFetchedState', () => {
- it('should set notes fetched state', done => {
- testAction(
- actions.setNotesFetchedState,
- true,
- {},
- [{ type: 'SET_NOTES_FETCHED_STATE', payload: true }],
- [],
- done,
- );
- });
- });
-
- describe('removeNote', () => {
- const endpoint = `${TEST_HOST}/note`;
-
- beforeEach(() => {
- axiosMock.onDelete(endpoint).replyOnce(200, {});
-
- $('body').attr('data-page', '');
- });
-
- afterEach(() => {
- axiosMock.restore();
-
- $('body').attr('data-page', '');
- });
-
- it('commits DELETE_NOTE and dispatches updateMergeRequestWidget', done => {
- const note = { path: endpoint, id: 1 };
-
- testAction(
- actions.removeNote,
- note,
- store.state,
- [
- {
- type: 'DELETE_NOTE',
- payload: note,
- },
- ],
- [
- {
- type: 'updateMergeRequestWidget',
- },
- {
- type: 'updateResolvableDiscussionsCounts',
- },
- ],
- done,
- );
- });
-
- it('dispatches removeDiscussionsFromDiff on merge request page', done => {
- const note = { path: endpoint, id: 1 };
-
- $('body').attr('data-page', 'projects:merge_requests:show');
-
- testAction(
- actions.removeNote,
- note,
- store.state,
- [
- {
- type: 'DELETE_NOTE',
- payload: note,
- },
- ],
- [
- {
- type: 'updateMergeRequestWidget',
- },
- {
- type: 'updateResolvableDiscussionsCounts',
- },
- {
- type: 'diffs/removeDiscussionsFromDiff',
- },
- ],
- done,
- );
- });
- });
-
- describe('deleteNote', () => {
- const endpoint = `${TEST_HOST}/note`;
-
- beforeEach(() => {
- axiosMock.onDelete(endpoint).replyOnce(200, {});
-
- $('body').attr('data-page', '');
- });
-
- afterEach(() => {
- axiosMock.restore();
-
- $('body').attr('data-page', '');
- });
-
- it('dispatches removeNote', done => {
- const note = { path: endpoint, id: 1 };
-
- testAction(
- actions.deleteNote,
- note,
- {},
- [],
- [
- {
- type: 'removeNote',
- payload: {
- id: 1,
- path: 'http://test.host/note',
- },
- },
- ],
- done,
- );
- });
- });
-
- describe('createNewNote', () => {
- describe('success', () => {
- const res = {
- id: 1,
- valid: true,
- };
-
- beforeEach(() => {
- axiosMock.onAny().reply(200, res);
- });
-
- it('commits ADD_NEW_NOTE and dispatches updateMergeRequestWidget', done => {
- testAction(
- actions.createNewNote,
- { endpoint: `${gl.TEST_HOST}`, data: {} },
- store.state,
- [
- {
- type: 'ADD_NEW_NOTE',
- payload: res,
- },
- ],
- [
- {
- type: 'updateMergeRequestWidget',
- },
- {
- type: 'startTaskList',
- },
- {
- type: 'updateResolvableDiscussionsCounts',
- },
- ],
- done,
- );
- });
- });
-
- describe('error', () => {
- const res = {
- errors: ['error'],
- };
-
- beforeEach(() => {
- axiosMock.onAny().replyOnce(200, res);
- });
-
- it('does not commit ADD_NEW_NOTE or dispatch updateMergeRequestWidget', done => {
- testAction(
- actions.createNewNote,
- { endpoint: `${gl.TEST_HOST}`, data: {} },
- store.state,
- [],
- [],
- done,
- );
- });
- });
- });
-
- describe('toggleResolveNote', () => {
- const res = {
- resolved: true,
- };
-
- beforeEach(() => {
- axiosMock.onAny().reply(200, res);
- });
-
- describe('as note', () => {
- it('commits UPDATE_NOTE and dispatches updateMergeRequestWidget', done => {
- testAction(
- actions.toggleResolveNote,
- { endpoint: `${gl.TEST_HOST}`, isResolved: true, discussion: false },
- store.state,
- [
- {
- type: 'UPDATE_NOTE',
- payload: res,
- },
- ],
- [
- {
- type: 'updateResolvableDiscussionsCounts',
- },
- {
- type: 'updateMergeRequestWidget',
- },
- ],
- done,
- );
- });
- });
-
- describe('as discussion', () => {
- it('commits UPDATE_DISCUSSION and dispatches updateMergeRequestWidget', done => {
- testAction(
- actions.toggleResolveNote,
- { endpoint: `${gl.TEST_HOST}`, isResolved: true, discussion: true },
- store.state,
- [
- {
- type: 'UPDATE_DISCUSSION',
- payload: res,
- },
- ],
- [
- {
- type: 'updateResolvableDiscussionsCounts',
- },
- {
- type: 'updateMergeRequestWidget',
- },
- ],
- done,
- );
- });
- });
- });
-
- describe('updateMergeRequestWidget', () => {
- it('calls mrWidget checkStatus', () => {
- spyOn(mrWidgetEventHub, '$emit');
-
- actions.updateMergeRequestWidget();
-
- expect(mrWidgetEventHub.$emit).toHaveBeenCalledWith('mr.discussion.updated');
- });
- });
-
- describe('setCommentsDisabled', () => {
- it('should set comments disabled state', done => {
- testAction(
- actions.setCommentsDisabled,
- true,
- null,
- [{ type: 'DISABLE_COMMENTS', payload: true }],
- [],
- done,
- );
- });
- });
-
- describe('updateResolvableDiscussionsCounts', () => {
- it('commits UPDATE_RESOLVABLE_DISCUSSIONS_COUNTS', done => {
- testAction(
- actions.updateResolvableDiscussionsCounts,
- null,
- {},
- [{ type: 'UPDATE_RESOLVABLE_DISCUSSIONS_COUNTS' }],
- [],
- done,
- );
- });
- });
-
- describe('convertToDiscussion', () => {
- it('commits CONVERT_TO_DISCUSSION with noteId', done => {
- const noteId = 'dummy-note-id';
- testAction(
- actions.convertToDiscussion,
- noteId,
- {},
- [{ type: 'CONVERT_TO_DISCUSSION', payload: noteId }],
- [],
- done,
- );
- });
- });
-
- describe('updateOrCreateNotes', () => {
- it('Updates existing note', () => {
- const note = { id: 1234 };
- const getters = { notesById: { 1234: note } };
-
- actions.updateOrCreateNotes({ commit, state, getters, dispatch }, [note]);
-
- expect(commit.calls.allArgs()).toEqual([[mutationTypes.UPDATE_NOTE, note]]);
- });
-
- it('Creates a new note if none exisits', () => {
- const note = { id: 1234 };
- const getters = { notesById: {} };
- actions.updateOrCreateNotes({ commit, state, getters, dispatch }, [note]);
-
- expect(commit.calls.allArgs()).toEqual([[mutationTypes.ADD_NEW_NOTE, note]]);
- });
-
- describe('Discussion notes', () => {
- let note;
- let getters;
-
- beforeEach(() => {
- note = { id: 1234 };
- getters = { notesById: {} };
- });
-
- it('Adds a reply to an existing discussion', () => {
- state = { discussions: [note] };
- const discussionNote = {
- ...note,
- type: notesConstants.DISCUSSION_NOTE,
- discussion_id: 1234,
- };
-
- actions.updateOrCreateNotes({ commit, state, getters, dispatch }, [discussionNote]);
-
- expect(commit.calls.allArgs()).toEqual([
- [mutationTypes.ADD_NEW_REPLY_TO_DISCUSSION, discussionNote],
- ]);
- });
-
- it('fetches discussions for diff notes', () => {
- state = { discussions: [], notesData: { discussionsPath: 'Hello world' } };
- const diffNote = { ...note, type: notesConstants.DIFF_NOTE, discussion_id: 1234 };
-
- actions.updateOrCreateNotes({ commit, state, getters, dispatch }, [diffNote]);
-
- expect(dispatch.calls.allArgs()).toEqual([
- ['fetchDiscussions', { path: state.notesData.discussionsPath }],
- ]);
- });
-
- it('Adds a new note', () => {
- state = { discussions: [] };
- const discussionNote = {
- ...note,
- type: notesConstants.DISCUSSION_NOTE,
- discussion_id: 1234,
- };
-
- actions.updateOrCreateNotes({ commit, state, getters, dispatch }, [discussionNote]);
-
- expect(commit.calls.allArgs()).toEqual([[mutationTypes.ADD_NEW_NOTE, discussionNote]]);
- });
- });
- });
-
- describe('replyToDiscussion', () => {
- const payload = { endpoint: TEST_HOST, data: {} };
-
- it('updates discussion if response contains disussion', done => {
- const discussion = { notes: [] };
- axiosMock.onAny().reply(200, { discussion });
-
- testAction(
- actions.replyToDiscussion,
- payload,
- {
- notesById: {},
- },
- [{ type: mutationTypes.UPDATE_DISCUSSION, payload: discussion }],
- [
- { type: 'updateMergeRequestWidget' },
- { type: 'startTaskList' },
- { type: 'updateResolvableDiscussionsCounts' },
- ],
- done,
- );
- });
-
- it('adds a reply to a discussion', done => {
- const res = {};
- axiosMock.onAny().reply(200, res);
-
- testAction(
- actions.replyToDiscussion,
- payload,
- {
- notesById: {},
- },
- [{ type: mutationTypes.ADD_NEW_REPLY_TO_DISCUSSION, payload: res }],
- [],
- done,
- );
- });
- });
-
- describe('removeConvertedDiscussion', () => {
- it('commits CONVERT_TO_DISCUSSION with noteId', done => {
- const noteId = 'dummy-id';
- testAction(
- actions.removeConvertedDiscussion,
- noteId,
- {},
- [{ type: 'REMOVE_CONVERTED_DISCUSSION', payload: noteId }],
- [],
- done,
- );
- });
- });
-
- describe('resolveDiscussion', () => {
- let getters;
- let discussionId;
-
- beforeEach(() => {
- discussionId = discussionMock.id;
- state.discussions = [discussionMock];
- getters = {
- isDiscussionResolved: () => false,
- };
- });
-
- it('when unresolved, dispatches action', done => {
- testAction(
- actions.resolveDiscussion,
- { discussionId },
- { ...state, ...getters },
- [],
- [
- {
- type: 'toggleResolveNote',
- payload: {
- endpoint: discussionMock.resolve_path,
- isResolved: false,
- discussion: true,
- },
- },
- ],
- done,
- );
- });
-
- it('when resolved, does nothing', done => {
- getters.isDiscussionResolved = id => id === discussionId;
-
- testAction(
- actions.resolveDiscussion,
- { discussionId },
- { ...state, ...getters },
- [],
- [],
- done,
- );
- });
- });
-
- describe('saveNote', () => {
- const flashContainer = {};
- const payload = { endpoint: TEST_HOST, data: { 'note[note]': 'some text' }, flashContainer };
-
- describe('if response contains errors', () => {
- const res = { errors: { something: ['went wrong'] } };
- const error = { message: 'Unprocessable entity', response: { data: res } };
-
- it('throws an error', done => {
- actions
- .saveNote(
- {
- commit() {},
- dispatch: () => Promise.reject(error),
- },
- payload,
- )
- .then(() => done.fail('Expected error to be thrown!'))
- .catch(err => {
- expect(err).toBe(error);
- expect(flashSpy).not.toHaveBeenCalled();
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('if response contains errors.base', () => {
- const res = { errors: { base: ['something went wrong'] } };
- const error = { message: 'Unprocessable entity', response: { data: res } };
-
- it('sets flash alert using errors.base message', done => {
- actions
- .saveNote(
- {
- commit() {},
- dispatch: () => Promise.reject(error),
- },
- { ...payload, flashContainer },
- )
- .then(resp => {
- expect(resp.hasFlash).toBe(true);
- expect(flashSpy).toHaveBeenCalledWith(
- 'Your comment could not be submitted because something went wrong',
- 'alert',
- flashContainer,
- );
- })
- .catch(() => done.fail('Expected success response!'))
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('if response contains no errors', () => {
- const res = { valid: true };
-
- it('returns the response', done => {
- actions
- .saveNote(
- {
- commit() {},
- dispatch: () => Promise.resolve(res),
- },
- payload,
- )
- .then(data => {
- expect(data).toBe(res);
- expect(flashSpy).not.toHaveBeenCalled();
- })
- .then(done)
- .catch(done.fail);
- });
- });
- });
-
- describe('submitSuggestion', () => {
- const discussionId = 'discussion-id';
- const noteId = 'note-id';
- const suggestionId = 'suggestion-id';
- let flashContainer;
-
- beforeEach(() => {
- spyOn(Api, 'applySuggestion');
- dispatch.and.returnValue(Promise.resolve());
- Api.applySuggestion.and.returnValue(Promise.resolve());
- flashContainer = {};
- });
-
- const testSubmitSuggestion = (done, expectFn) => {
- actions
- .submitSuggestion(
- { commit, dispatch },
- { discussionId, noteId, suggestionId, flashContainer },
- )
- .then(expectFn)
- .then(done)
- .catch(done.fail);
- };
-
- it('when service success, commits and resolves discussion', done => {
- testSubmitSuggestion(done, () => {
- expect(commit.calls.allArgs()).toEqual([
- [mutationTypes.APPLY_SUGGESTION, { discussionId, noteId, suggestionId }],
- ]);
-
- expect(dispatch.calls.allArgs()).toEqual([['resolveDiscussion', { discussionId }]]);
- expect(flashSpy).not.toHaveBeenCalled();
- });
- });
-
- it('when service fails, flashes error message', done => {
- const response = { response: { data: { message: TEST_ERROR_MESSAGE } } };
-
- Api.applySuggestion.and.returnValue(Promise.reject(response));
-
- testSubmitSuggestion(done, () => {
- expect(commit).not.toHaveBeenCalled();
- expect(dispatch).not.toHaveBeenCalled();
- expect(flashSpy).toHaveBeenCalledWith(`${TEST_ERROR_MESSAGE}.`, 'alert', flashContainer);
- });
- });
-
- it('when resolve discussion fails, fail gracefully', done => {
- dispatch.and.returnValue(Promise.reject());
-
- testSubmitSuggestion(done, () => {
- expect(flashSpy).not.toHaveBeenCalled();
- });
- });
- });
-
- describe('filterDiscussion', () => {
- const path = 'some-discussion-path';
- const filter = 0;
-
- beforeEach(() => {
- dispatch.and.returnValue(new Promise(() => {}));
- });
-
- it('fetches discussions with filter and persistFilter false', () => {
- actions.filterDiscussion({ dispatch }, { path, filter, persistFilter: false });
-
- expect(dispatch.calls.allArgs()).toEqual([
- ['setLoadingState', true],
- ['fetchDiscussions', { path, filter, persistFilter: false }],
- ]);
- });
-
- it('fetches discussions with filter and persistFilter true', () => {
- actions.filterDiscussion({ dispatch }, { path, filter, persistFilter: true });
-
- expect(dispatch.calls.allArgs()).toEqual([
- ['setLoadingState', true],
- ['fetchDiscussions', { path, filter, persistFilter: true }],
- ]);
- });
- });
-});
diff --git a/spec/javascripts/pdf/index_spec.js b/spec/javascripts/pdf/index_spec.js
index e14f1b27f6c..39cd4dacd70 100644
--- a/spec/javascripts/pdf/index_spec.js
+++ b/spec/javascripts/pdf/index_spec.js
@@ -1,13 +1,10 @@
import Vue from 'vue';
-import { GlobalWorkerOptions } from 'pdfjs-dist/build/pdf';
-import workerSrc from 'pdfjs-dist/build/pdf.worker.min';
import { FIXTURES_PATH } from 'spec/test_constants';
import PDFLab from '~/pdf/index.vue';
const pdf = `${FIXTURES_PATH}/blob/pdf/test.pdf`;
-GlobalWorkerOptions.workerSrc = workerSrc;
const Component = Vue.extend(PDFLab);
describe('PDF component', () => {
diff --git a/spec/javascripts/pdf/page_spec.js b/spec/javascripts/pdf/page_spec.js
index bb2294e8d18..cc2cc204ee3 100644
--- a/spec/javascripts/pdf/page_spec.js
+++ b/spec/javascripts/pdf/page_spec.js
@@ -1,6 +1,5 @@
import Vue from 'vue';
-import pdfjsLib from 'pdfjs-dist/build/pdf';
-import workerSrc from 'pdfjs-dist/build/pdf.worker.min';
+import pdfjsLib from 'pdfjs-dist/webpack';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { FIXTURES_PATH } from 'spec/test_constants';
@@ -14,7 +13,6 @@ describe('Page component', () => {
let testPage;
beforeEach(done => {
- pdfjsLib.GlobalWorkerOptions.workerSrc = workerSrc;
pdfjsLib
.getDocument(testPDF)
.promise.then(pdf => pdf.getPage(1))
diff --git a/spec/javascripts/releases/components/app_index_spec.js b/spec/javascripts/releases/components/app_index_spec.js
index bcf062f357a..020937d07e5 100644
--- a/spec/javascripts/releases/components/app_index_spec.js
+++ b/spec/javascripts/releases/components/app_index_spec.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { range as rge } from 'lodash';
import Vue from 'vue';
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import app from '~/releases/components/app_index.vue';
@@ -12,6 +12,8 @@ import {
release,
releases,
} from '../mock_data';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import waitForPromises from 'spec/helpers/wait_for_promises';
describe('Releases App ', () => {
const Component = Vue.extend(app);
@@ -21,13 +23,16 @@ describe('Releases App ', () => {
const props = {
projectId: 'gitlab-ce',
- documentationLink: 'help/releases',
+ documentationPath: 'help/releases',
illustrationPath: 'illustration/path',
};
beforeEach(() => {
- store = createStore({ list: listModule });
- releasesPagination = _.range(21).map(index => ({ ...release, tag_name: `${index}.00` }));
+ store = createStore({ modules: { list: listModule } });
+ releasesPagination = rge(21).map(index => ({
+ ...convertObjectPropsToCamelCase(release, { deep: true }),
+ tagName: `${index}.00`,
+ }));
});
afterEach(() => {
@@ -47,9 +52,9 @@ describe('Releases App ', () => {
expect(vm.$el.querySelector('.js-success-state')).toBeNull();
expect(vm.$el.querySelector('.gl-pagination')).toBeNull();
- setTimeout(() => {
- done();
- }, 0);
+ waitForPromises()
+ .then(done)
+ .catch(done.fail);
});
});
@@ -62,14 +67,16 @@ describe('Releases App ', () => {
});
it('renders success state', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-loading')).toBeNull();
- expect(vm.$el.querySelector('.js-empty-state')).toBeNull();
- expect(vm.$el.querySelector('.js-success-state')).not.toBeNull();
- expect(vm.$el.querySelector('.gl-pagination')).toBeNull();
-
- done();
- }, 0);
+ waitForPromises()
+ .then(() => {
+ expect(vm.$el.querySelector('.js-loading')).toBeNull();
+ expect(vm.$el.querySelector('.js-empty-state')).toBeNull();
+ expect(vm.$el.querySelector('.js-success-state')).not.toBeNull();
+ expect(vm.$el.querySelector('.gl-pagination')).toBeNull();
+
+ done();
+ })
+ .catch(done.fail);
});
});
@@ -82,14 +89,16 @@ describe('Releases App ', () => {
});
it('renders success state', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-loading')).toBeNull();
- expect(vm.$el.querySelector('.js-empty-state')).toBeNull();
- expect(vm.$el.querySelector('.js-success-state')).not.toBeNull();
- expect(vm.$el.querySelector('.gl-pagination')).not.toBeNull();
-
- done();
- }, 0);
+ waitForPromises()
+ .then(() => {
+ expect(vm.$el.querySelector('.js-loading')).toBeNull();
+ expect(vm.$el.querySelector('.js-empty-state')).toBeNull();
+ expect(vm.$el.querySelector('.js-success-state')).not.toBeNull();
+ expect(vm.$el.querySelector('.gl-pagination')).not.toBeNull();
+
+ done();
+ })
+ .catch(done.fail);
});
});
@@ -100,14 +109,76 @@ describe('Releases App ', () => {
});
it('renders empty state', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-loading')).toBeNull();
- expect(vm.$el.querySelector('.js-empty-state')).not.toBeNull();
- expect(vm.$el.querySelector('.js-success-state')).toBeNull();
- expect(vm.$el.querySelector('.gl-pagination')).toBeNull();
-
- done();
- }, 0);
+ waitForPromises()
+ .then(() => {
+ expect(vm.$el.querySelector('.js-loading')).toBeNull();
+ expect(vm.$el.querySelector('.js-empty-state')).not.toBeNull();
+ expect(vm.$el.querySelector('.js-success-state')).toBeNull();
+ expect(vm.$el.querySelector('.gl-pagination')).toBeNull();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('"New release" button', () => {
+ const findNewReleaseButton = () => vm.$el.querySelector('.js-new-release-btn');
+
+ beforeEach(() => {
+ spyOn(api, 'releases').and.returnValue(Promise.resolve({ data: [], headers: {} }));
+ });
+
+ const factory = additionalProps => {
+ vm = mountComponentWithStore(Component, {
+ props: {
+ ...props,
+ ...additionalProps,
+ },
+ store,
+ });
+ };
+
+ describe('when the user is allowed to create a new Release', () => {
+ const newReleasePath = 'path/to/new/release';
+
+ beforeEach(() => {
+ factory({ newReleasePath });
+ });
+
+ it('renders the "New release" button', done => {
+ waitForPromises()
+ .then(() => {
+ expect(findNewReleaseButton()).not.toBeNull();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('renders the "New release" button with the correct href', done => {
+ waitForPromises()
+ .then(() => {
+ expect(findNewReleaseButton().getAttribute('href')).toBe(newReleasePath);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('when the user is not allowed to create a new Release', () => {
+ beforeEach(() => factory());
+
+ it('does not render the "New release" button', done => {
+ waitForPromises()
+ .then(() => {
+ expect(findNewReleaseButton()).toBeNull();
+
+ done();
+ })
+ .catch(done.fail);
+ });
});
});
});
diff --git a/spec/javascripts/releases/stores/modules/list/actions_spec.js b/spec/javascripts/releases/stores/modules/list/actions_spec.js
index 037c9d8d54a..bf85e18997b 100644
--- a/spec/javascripts/releases/stores/modules/list/actions_spec.js
+++ b/spec/javascripts/releases/stores/modules/list/actions_spec.js
@@ -8,16 +8,18 @@ import {
import state from '~/releases/stores/modules/list/state';
import * as types from '~/releases/stores/modules/list/mutation_types';
import api from '~/api';
-import { parseIntPagination } from '~/lib/utils/common_utils';
-import { pageInfoHeadersWithoutPagination, releases } from '../../../mock_data';
+import { parseIntPagination, convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import { pageInfoHeadersWithoutPagination, releases as originalReleases } from '../../../mock_data';
describe('Releases State actions', () => {
let mockedState;
let pageInfo;
+ let releases;
beforeEach(() => {
mockedState = state();
pageInfo = parseIntPagination(pageInfoHeadersWithoutPagination);
+ releases = convertObjectPropsToCamelCase(originalReleases, { deep: true });
});
describe('requestReleases', () => {
diff --git a/spec/javascripts/reports/components/grouped_test_reports_app_spec.js b/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
index 154aa881d2d..bafc47c952a 100644
--- a/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
+++ b/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
@@ -84,12 +84,10 @@ describe('Grouped Test Reports App', () => {
setTimeout(() => {
expect(vm.$el.querySelector('.gl-spinner')).toBeNull();
expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
- 'Test summary contained 2 failed/error test results out of 11 total tests',
+ 'Test summary contained 2 failed out of 11 total tests',
);
- expect(vm.$el.textContent).toContain(
- 'rspec:pg found 2 failed/error test results out of 8 total tests',
- );
+ expect(vm.$el.textContent).toContain('rspec:pg found 2 failed out of 8 total tests');
expect(vm.$el.textContent).toContain('New');
expect(vm.$el.textContent).toContain(
@@ -112,12 +110,10 @@ describe('Grouped Test Reports App', () => {
setTimeout(() => {
expect(vm.$el.querySelector('.gl-spinner')).toBeNull();
expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
- 'Test summary contained 2 failed/error test results out of 11 total tests',
+ 'Test summary contained 2 errors out of 11 total tests',
);
- expect(vm.$el.textContent).toContain(
- 'karma found 2 failed/error test results out of 3 total tests',
- );
+ expect(vm.$el.textContent).toContain('karma found 2 errors out of 3 total tests');
expect(vm.$el.textContent).toContain('New');
expect(vm.$el.textContent).toContain(
@@ -140,17 +136,15 @@ describe('Grouped Test Reports App', () => {
setTimeout(() => {
expect(vm.$el.querySelector('.gl-spinner')).toBeNull();
expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
- 'Test summary contained 2 failed/error test results and 2 fixed test results out of 11 total tests',
+ 'Test summary contained 2 failed and 2 fixed test results out of 11 total tests',
);
expect(vm.$el.textContent).toContain(
- 'rspec:pg found 1 failed/error test result and 2 fixed test results out of 8 total tests',
+ 'rspec:pg found 1 failed and 2 fixed test results out of 8 total tests',
);
expect(vm.$el.textContent).toContain('New');
- expect(vm.$el.textContent).toContain(
- ' java ant found 1 failed/error test result out of 3 total tests',
- );
+ expect(vm.$el.textContent).toContain(' java ant found 1 failed out of 3 total tests');
done();
}, 0);
});
diff --git a/spec/javascripts/search_spec.js b/spec/javascripts/search_spec.js
deleted file mode 100644
index 32f60508fa3..00000000000
--- a/spec/javascripts/search_spec.js
+++ /dev/null
@@ -1,41 +0,0 @@
-import $ from 'jquery';
-import Api from '~/api';
-import Search from '~/pages/search/show/search';
-
-describe('Search', () => {
- const fixturePath = 'search/show.html';
- const searchTerm = 'some search';
- const fillDropdownInput = dropdownSelector => {
- const dropdownElement = document.querySelector(dropdownSelector).parentNode;
- const inputElement = dropdownElement.querySelector('.dropdown-input-field');
- inputElement.value = searchTerm;
- return inputElement;
- };
-
- preloadFixtures(fixturePath);
-
- beforeEach(() => {
- loadFixtures(fixturePath);
- new Search(); // eslint-disable-line no-new
- });
-
- it('requests groups from backend when filtering', done => {
- spyOn(Api, 'groups').and.callFake(term => {
- expect(term).toBe(searchTerm);
- done();
- });
- const inputElement = fillDropdownInput('.js-search-group-dropdown');
-
- $(inputElement).trigger('input');
- });
-
- it('requests projects from backend when filtering', done => {
- spyOn(Api, 'projects').and.callFake(term => {
- expect(term).toBe(searchTerm);
- done();
- });
- const inputElement = fillDropdownInput('.js-search-project-dropdown');
-
- $(inputElement).trigger('input');
- });
-});
diff --git a/spec/javascripts/sidebar/assignee_title_spec.js b/spec/javascripts/sidebar/assignee_title_spec.js
deleted file mode 100644
index 0496e280a21..00000000000
--- a/spec/javascripts/sidebar/assignee_title_spec.js
+++ /dev/null
@@ -1,123 +0,0 @@
-import Vue from 'vue';
-import { mockTracking, triggerEvent } from 'spec/helpers/tracking_helper';
-import AssigneeTitle from '~/sidebar/components/assignees/assignee_title.vue';
-
-describe('AssigneeTitle component', () => {
- let component;
- let AssigneeTitleComponent;
-
- beforeEach(() => {
- AssigneeTitleComponent = Vue.extend(AssigneeTitle);
- });
-
- describe('assignee title', () => {
- it('renders assignee', () => {
- component = new AssigneeTitleComponent({
- propsData: {
- numberOfAssignees: 1,
- editable: false,
- },
- }).$mount();
-
- expect(component.$el.innerText.trim()).toEqual('Assignee');
- });
-
- it('renders 2 assignees', () => {
- component = new AssigneeTitleComponent({
- propsData: {
- numberOfAssignees: 2,
- editable: false,
- },
- }).$mount();
-
- expect(component.$el.innerText.trim()).toEqual('2 Assignees');
- });
- });
-
- describe('gutter toggle', () => {
- it('does not show toggle by default', () => {
- component = new AssigneeTitleComponent({
- propsData: {
- numberOfAssignees: 2,
- editable: false,
- },
- }).$mount();
-
- expect(component.$el.querySelector('.gutter-toggle')).toBeNull();
- });
-
- it('shows toggle when showToggle is true', () => {
- component = new AssigneeTitleComponent({
- propsData: {
- numberOfAssignees: 2,
- editable: false,
- showToggle: true,
- },
- }).$mount();
-
- expect(component.$el.querySelector('.gutter-toggle')).toEqual(jasmine.any(Object));
- });
- });
-
- it('does not render spinner by default', () => {
- component = new AssigneeTitleComponent({
- propsData: {
- numberOfAssignees: 0,
- editable: false,
- },
- }).$mount();
-
- expect(component.$el.querySelector('.fa')).toBeNull();
- });
-
- it('renders spinner when loading', () => {
- component = new AssigneeTitleComponent({
- propsData: {
- loading: true,
- numberOfAssignees: 0,
- editable: false,
- },
- }).$mount();
-
- expect(component.$el.querySelector('.fa')).not.toBeNull();
- });
-
- it('does not render edit link when not editable', () => {
- component = new AssigneeTitleComponent({
- propsData: {
- numberOfAssignees: 0,
- editable: false,
- },
- }).$mount();
-
- expect(component.$el.querySelector('.edit-link')).toBeNull();
- });
-
- it('renders edit link when editable', () => {
- component = new AssigneeTitleComponent({
- propsData: {
- numberOfAssignees: 0,
- editable: true,
- },
- }).$mount();
-
- expect(component.$el.querySelector('.edit-link')).not.toBeNull();
- });
-
- it('tracks the event when edit is clicked', () => {
- component = new AssigneeTitleComponent({
- propsData: {
- numberOfAssignees: 0,
- editable: true,
- },
- }).$mount();
-
- const spy = mockTracking('_category_', component.$el, spyOn);
- triggerEvent('.js-sidebar-dropdown-toggle');
-
- expect(spy).toHaveBeenCalledWith('_category_', 'click_edit_button', {
- label: 'right_sidebar',
- property: 'assignee',
- });
- });
-});
diff --git a/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js b/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js
index 0a3f0d6901f..5296908afe2 100644
--- a/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js
+++ b/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js
@@ -83,4 +83,17 @@ describe('LockIssueSidebar', () => {
done();
});
});
+
+ it('does not display the edit form when opened from collapsed state if not editable', done => {
+ expect(vm2.isLockDialogOpen).toBe(false);
+
+ vm2.$el.querySelector('.sidebar-collapsed-icon').click();
+
+ Vue.nextTick()
+ .then(() => {
+ expect(vm2.isLockDialogOpen).toBe(false);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
});
diff --git a/spec/javascripts/sidebar/sidebar_mediator_spec.js b/spec/javascripts/sidebar/sidebar_mediator_spec.js
index b0412105e3f..2aa30fd1cc6 100644
--- a/spec/javascripts/sidebar/sidebar_mediator_spec.js
+++ b/spec/javascripts/sidebar/sidebar_mediator_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import SidebarMediator from '~/sidebar/sidebar_mediator';
import SidebarStore from '~/sidebar/stores/sidebar_store';
-import SidebarService from '~/sidebar/services/sidebar_service';
+import SidebarService, { gqClient } from '~/sidebar/services/sidebar_service';
import Mock from './mock_data';
const { mediator: mediatorMockData } = Mock;
@@ -44,12 +44,18 @@ describe('Sidebar mediator', function() {
it('fetches the data', done => {
const mockData = Mock.responseMap.GET[mediatorMockData.endpoint];
mock.onGet(mediatorMockData.endpoint).reply(200, mockData);
+
+ const mockGraphQlData = Mock.graphQlResponseData;
+ spyOn(gqClient, 'query').and.returnValue({
+ data: mockGraphQlData,
+ });
+
spyOn(this.mediator, 'processFetchedData').and.callThrough();
this.mediator
.fetch()
.then(() => {
- expect(this.mediator.processFetchedData).toHaveBeenCalledWith(mockData);
+ expect(this.mediator.processFetchedData).toHaveBeenCalledWith(mockData, mockGraphQlData);
})
.then(done)
.catch(done.fail);
diff --git a/spec/javascripts/smart_interval_spec.js b/spec/javascripts/smart_interval_spec.js
index 0f579bb32cc..0dc9ee9d79a 100644
--- a/spec/javascripts/smart_interval_spec.js
+++ b/spec/javascripts/smart_interval_spec.js
@@ -1,5 +1,5 @@
import $ from 'jquery';
-import _ from 'underscore';
+import { assignIn } from 'lodash';
import waitForPromises from 'spec/helpers/wait_for_promises';
import SmartInterval from '~/smart_interval';
@@ -21,7 +21,7 @@ describe('SmartInterval', function() {
};
if (config) {
- _.extend(defaultParams, config);
+ assignIn(defaultParams, config);
}
return new SmartInterval(defaultParams);
diff --git a/spec/javascripts/vue_mr_widget/components/deployment_stop_button_spec.js b/spec/javascripts/vue_mr_widget/components/deployment_stop_button_spec.js
deleted file mode 100644
index 6a6d8279c2c..00000000000
--- a/spec/javascripts/vue_mr_widget/components/deployment_stop_button_spec.js
+++ /dev/null
@@ -1,95 +0,0 @@
-import Vue from 'vue';
-import deploymentStopComponent from '~/vue_merge_request_widget/components/deployment/deployment_stop_button.vue';
-import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
-import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
-import mountComponent from '../../helpers/vue_mount_component_helper';
-
-describe('Deployment component', () => {
- const Component = Vue.extend(deploymentStopComponent);
- let deploymentMockData;
-
- beforeEach(() => {
- deploymentMockData = {
- id: 15,
- name: 'review/diplo',
- url: '/root/review-apps/environments/15',
- stop_url: '/root/review-apps/environments/15/stop',
- metrics_url: '/root/review-apps/environments/15/deployments/1/metrics',
- metrics_monitoring_url: '/root/review-apps/environments/15/metrics',
- external_url: 'http://gitlab.com.',
- external_url_formatted: 'gitlab',
- deployed_at: '2017-03-22T22:44:42.258Z',
- deployed_at_formatted: 'Mar 22, 2017 10:44pm',
- deployment_manual_actions: [],
- status: SUCCESS,
- changes: [
- {
- path: 'index.html',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/index.html',
- },
- {
- path: 'imgs/gallery.html',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
- },
- {
- path: 'about/',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/about/',
- },
- ],
- };
- });
-
- let vm;
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('', () => {
- beforeEach(() => {
- vm = mountComponent(Component, {
- stopUrl: deploymentMockData.stop_url,
- isDeployInProgress: false,
- });
- });
-
- describe('stopEnvironment', () => {
- const url = '/foo/bar';
- const returnPromise = () =>
- new Promise(resolve => {
- resolve({
- data: {
- redirect_url: url,
- },
- });
- });
- const mockStopEnvironment = () => {
- vm.stopEnvironment(deploymentMockData);
- return vm;
- };
-
- it('should show a confirm dialog and call service.stopEnvironment when confirmed', done => {
- spyOn(window, 'confirm').and.returnValue(true);
- spyOn(MRWidgetService, 'stopEnvironment').and.returnValue(returnPromise(true));
- const visitUrl = spyOnDependency(deploymentStopComponent, 'visitUrl').and.returnValue(true);
- vm = mockStopEnvironment();
-
- expect(window.confirm).toHaveBeenCalled();
- expect(MRWidgetService.stopEnvironment).toHaveBeenCalledWith(deploymentMockData.stop_url);
- setTimeout(() => {
- expect(visitUrl).toHaveBeenCalledWith(url);
- done();
- }, 333);
- });
-
- it('should show a confirm dialog but should not work if the dialog is rejected', () => {
- spyOn(window, 'confirm').and.returnValue(false);
- spyOn(MRWidgetService, 'stopEnvironment').and.returnValue(returnPromise(false));
- vm = mockStopEnvironment();
-
- expect(window.confirm).toHaveBeenCalled();
- expect(MRWidgetService.stopEnvironment).not.toHaveBeenCalled();
- });
- });
- });
-});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index 6c44ffc6ec9..d396f2d9271 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -145,34 +145,30 @@ describe('ReadyToMerge', () => {
});
});
- describe('mergeButtonClass', () => {
- const defaultClass = 'btn btn-sm btn-success accept-merge-request';
- const failedClass = `${defaultClass} btn-danger`;
- const inActionClass = `${defaultClass} btn-info`;
-
+ describe('mergeButtonVariant', () => {
it('defaults to success class', () => {
Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
- expect(vm.mergeButtonClass).toEqual(defaultClass);
+ expect(vm.mergeButtonVariant).toEqual('success');
});
it('returns success class for success status', () => {
Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
Vue.set(vm.mr, 'pipeline', true);
- expect(vm.mergeButtonClass).toEqual(defaultClass);
+ expect(vm.mergeButtonVariant).toEqual('success');
});
it('returns info class for pending status', () => {
Vue.set(vm.mr, 'availableAutoMergeStrategies', [MTWPS_MERGE_STRATEGY]);
- expect(vm.mergeButtonClass).toEqual(inActionClass);
+ expect(vm.mergeButtonVariant).toEqual('info');
});
- it('returns failed class for failed status', () => {
+ it('returns danger class for failed status', () => {
vm.mr.hasCI = true;
- expect(vm.mergeButtonClass).toEqual(failedClass);
+ expect(vm.mergeButtonVariant).toEqual('danger');
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js
index 5844dad42ff..9153231b974 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js
@@ -43,7 +43,7 @@ describe('Wip', () => {
is_new_mr_data: true,
};
- describe('removeWIP', () => {
+ describe('handleRemoveWIP', () => {
it('should make a request to service and handle response', done => {
const vm = createComponent();
@@ -57,7 +57,7 @@ describe('Wip', () => {
}),
);
- vm.removeWIP();
+ vm.handleRemoveWIP();
setTimeout(() => {
expect(vm.isMakingRequest).toBeTruthy();
expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj);
diff --git a/spec/javascripts/vue_mr_widget/mock_data.js b/spec/javascripts/vue_mr_widget/mock_data.js
index 2eaba46cdce..7783fcb6f93 100644
--- a/spec/javascripts/vue_mr_widget/mock_data.js
+++ b/spec/javascripts/vue_mr_widget/mock_data.js
@@ -1,317 +1,2 @@
-import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
-
-export default {
- id: 132,
- iid: 22,
- assignee_id: null,
- author_id: 1,
- description: '',
- lock_version: null,
- milestone_id: null,
- position: 0,
- state: 'merged',
- title: 'Update README.md',
- updated_by_id: null,
- created_at: '2017-04-07T12:27:26.718Z',
- updated_at: '2017-04-07T15:39:25.852Z',
- time_estimate: 0,
- total_time_spent: 0,
- human_access: 'Maintainer',
- human_time_estimate: null,
- human_total_time_spent: null,
- in_progress_merge_commit_sha: null,
- merge_commit_sha: '53027d060246c8f47e4a9310fb332aa52f221775',
- short_merge_commit_sha: '53027d06',
- merge_error: null,
- merge_params: {
- force_remove_source_branch: null,
- },
- merge_status: 'can_be_merged',
- merge_user_id: null,
- source_branch: 'daaaa',
- source_branch_link: 'daaaa',
- source_project_id: 19,
- source_project_full_path: '/group1/project1',
- target_branch: 'master',
- target_project_id: 19,
- target_project_full_path: '/group2/project2',
- merge_request_add_ci_config_path: '/group2/project2/new/pipeline',
- metrics: {
- merged_by: {
- name: 'Administrator',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- merged_at: '2017-04-07T15:39:25.696Z',
- closed_by: null,
- closed_at: null,
- },
- author: {
- name: 'Administrator',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- merge_user: null,
- diff_head_sha: '104096c51715e12e7ae41f9333e9fa35b73f385d',
- diff_head_commit_short_id: '104096c5',
- default_merge_commit_message:
- "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22",
- pipeline: {
- id: 172,
- user: {
- name: 'Administrator',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- active: false,
- coverage: '92.16',
- path: '/root/acets-app/pipelines/172',
- details: {
- status: {
- icon: 'status_success',
- favicon: 'favicon_status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/acets-app/pipelines/172',
- },
- duration: null,
- finished_at: '2017-04-07T14:00:14.256Z',
- stages: [
- {
- name: 'build',
- title: 'build: failed',
- status: {
- icon: 'status_failed',
- favicon: 'favicon_status_failed',
- text: 'failed',
- label: 'failed',
- group: 'failed',
- has_details: true,
- details_path: '/root/acets-app/pipelines/172#build',
- },
- path: '/root/acets-app/pipelines/172#build',
- dropdown_path: '/root/acets-app/pipelines/172/stage.json?stage=build',
- },
- {
- name: 'review',
- title: 'review: skipped',
- status: {
- icon: 'status_skipped',
- favicon: 'favicon_status_skipped',
- text: 'skipped',
- label: 'skipped',
- group: 'skipped',
- has_details: true,
- details_path: '/root/acets-app/pipelines/172#review',
- },
- path: '/root/acets-app/pipelines/172#review',
- dropdown_path: '/root/acets-app/pipelines/172/stage.json?stage=review',
- },
- ],
- artifacts: [],
- manual_actions: [
- {
- name: 'stop_review',
- path: '/root/acets-app/builds/1427/play',
- playable: false,
- },
- ],
- },
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: false,
- merge_request_pipeline: false,
- detached_merge_request_pipeline: true,
- },
- ref: {
- name: 'daaaa',
- path: '/root/acets-app/tree/daaaa',
- tag: false,
- branch: true,
- },
- merge_request: {
- iid: 1,
- path: '/root/detached-merge-request-pipelines/-/merge_requests/1',
- title: 'Update README.md',
- source_branch: 'feature-1',
- source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1',
- target_branch: 'master',
- target_branch_path: '/root/detached-merge-request-pipelines/branches/master',
- },
- commit: {
- id: '104096c51715e12e7ae41f9333e9fa35b73f385d',
- short_id: '104096c5',
- title: 'Update README.md',
- created_at: '2017-04-07T15:27:18.000+03:00',
- parent_ids: ['2396536178668d8930c29d904e53bd4d06228b32'],
- message: 'Update README.md',
- author_name: 'Administrator',
- author_email: 'admin@example.com',
- authored_date: '2017-04-07T15:27:18.000+03:00',
- committer_name: 'Administrator',
- committer_email: 'admin@example.com',
- committed_date: '2017-04-07T15:27:18.000+03:00',
- author: {
- name: 'Administrator',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- author_gravatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d',
- commit_path: '/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d',
- },
- retry_path: '/root/acets-app/pipelines/172/retry',
- created_at: '2017-04-07T12:27:19.520Z',
- updated_at: '2017-04-07T15:28:44.800Z',
- },
- pipelineCoverageDelta: '15.25',
- work_in_progress: false,
- source_branch_exists: false,
- mergeable_discussions_state: true,
- conflicts_can_be_resolved_in_ui: false,
- branch_missing: true,
- commits_count: 1,
- has_conflicts: false,
- can_be_merged: true,
- has_ci: true,
- ci_status: 'success',
- pipeline_status_path: '/root/acets-app/-/merge_requests/22/pipeline_status',
- issues_links: {
- closing: '',
- mentioned_but_not_closing: '',
- },
- current_user: {
- can_resolve_conflicts: true,
- can_remove_source_branch: false,
- can_revert_on_current_merge_request: true,
- can_cherry_pick_on_current_merge_request: true,
- },
- target_branch_path: '/root/acets-app/branches/master',
- source_branch_path: '/root/acets-app/branches/daaaa',
- conflict_resolution_ui_path: '/root/acets-app/-/merge_requests/22/conflicts',
- remove_wip_path: '/root/acets-app/-/merge_requests/22/remove_wip',
- cancel_auto_merge_path: '/root/acets-app/-/merge_requests/22/cancel_auto_merge',
- create_issue_to_resolve_discussions_path:
- '/root/acets-app/issues/new?merge_request_to_resolve_discussions_of=22',
- merge_path: '/root/acets-app/-/merge_requests/22/merge',
- cherry_pick_in_fork_path:
- '/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+revert+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1',
- revert_in_fork_path:
- '/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+cherry-pick+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1',
- email_patches_path: '/root/acets-app/-/merge_requests/22.patch',
- plain_diff_path: '/root/acets-app/-/merge_requests/22.diff',
- merge_request_basic_path: '/root/acets-app/-/merge_requests/22.json?serializer=basic',
- merge_request_widget_path: '/root/acets-app/-/merge_requests/22/widget.json',
- merge_request_cached_widget_path: '/cached.json',
- merge_check_path: '/root/acets-app/-/merge_requests/22/merge_check',
- ci_environments_status_url: '/root/acets-app/-/merge_requests/22/ci_environments_status',
- project_archived: false,
- default_merge_commit_message_with_description:
- "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22",
- default_squash_commit_message: 'Test squash commit message',
- diverged_commits_count: 0,
- only_allow_merge_if_pipeline_succeeds: false,
- commit_change_content_path: '/root/acets-app/-/merge_requests/22/commit_change_content',
- merge_commit_path:
- 'http://localhost:3000/root/acets-app/commit/53027d060246c8f47e4a9310fb332aa52f221775',
- troubleshooting_docs_path: 'help',
- merge_request_pipelines_docs_path: '/help/ci/merge_request_pipelines/index.md',
- merge_train_when_pipeline_succeeds_docs_path:
- '/help/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/#startadd-to-merge-train-when-pipeline-succeeds',
- squash: true,
- visual_review_app_available: true,
- merge_trains_enabled: true,
- merge_trains_count: 3,
- merge_train_index: 1,
-};
-
-export const mockStore = {
- pipeline: {
- id: 0,
- details: {
- status: {
- details_path: '/root/review-app-tester/pipelines/66',
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2. png',
- group: 'success-with-warnings',
- has_details: true,
- icon: 'status_warning',
- illustration: null,
- label: 'passed with warnings',
- text: 'passed',
- tooltip: 'passed',
- },
- },
- flags: {},
- ref: {},
- },
- mergePipeline: {
- id: 1,
- details: {
- status: {
- details_path: '/root/review-app-tester/pipelines/66',
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2. png',
- group: 'success-with-warnings',
- has_details: true,
- icon: 'status_warning',
- illustration: null,
- label: 'passed with warnings',
- text: 'passed',
- tooltip: 'passed',
- },
- },
- flags: {},
- ref: {},
- },
- targetBranch: 'target-branch',
- sourceBranch: 'source-branch',
- sourceBranchLink: 'source-branch-link',
- deployments: [
- {
- id: 0,
- name: 'bogus',
- external_url: 'https://fake.com',
- external_url_formatted: 'https://fake.com',
- status: SUCCESS,
- },
- {
- id: 1,
- name: 'bogus-docs',
- external_url: 'https://fake.com',
- external_url_formatted: 'https://fake.com',
- status: SUCCESS,
- },
- ],
- postMergeDeployments: [
- { id: 0, name: 'prod', status: SUCCESS },
- { id: 1, name: 'prod-docs', status: SUCCESS },
- ],
- troubleshootingDocsPath: 'troubleshooting-docs-path',
- ciStatus: 'ci-status',
- hasCI: true,
- exposedArtifactsPath: 'exposed_artifacts.json',
-};
+export { default } from '../../frontend/vue_mr_widget/mock_data';
+export * from '../../frontend/vue_mr_widget/mock_data';
diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
deleted file mode 100644
index 35c1495be58..00000000000
--- a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
+++ /dev/null
@@ -1,855 +0,0 @@
-import Vue from 'vue';
-import MockAdapter from 'axios-mock-adapter';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import axios from '~/lib/utils/axios_utils';
-import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
-import eventHub from '~/vue_merge_request_widget/event_hub';
-import notify from '~/lib/utils/notify';
-import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
-import mockData from './mock_data';
-import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
-import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
-
-const returnPromise = data =>
- new Promise(resolve => {
- resolve({
- data,
- });
- });
-
-describe('mrWidgetOptions', () => {
- let vm;
- let mock;
- let MrWidgetOptions;
-
- const COLLABORATION_MESSAGE = 'Allows commits from members who can merge to the target branch';
-
- beforeEach(() => {
- // Prevent component mounting
- delete mrWidgetOptions.el;
-
- gl.mrWidgetData = { ...mockData };
- gon.features = { asyncMrWidget: true };
-
- mock = new MockAdapter(axios);
- mock.onGet(mockData.merge_request_widget_path).reply(() => [200, { ...mockData }]);
- mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, { ...mockData }]);
-
- MrWidgetOptions = Vue.extend(mrWidgetOptions);
- vm = mountComponent(MrWidgetOptions, {
- mrData: { ...mockData },
- });
- });
-
- afterEach(() => {
- vm.$destroy();
- mock.restore();
- gl.mrWidgetData = {};
- gon.features = {};
- });
-
- describe('data', () => {
- it('should instantiate Store and Service', () => {
- expect(vm.mr).toBeDefined();
- expect(vm.service).toBeDefined();
- });
- });
-
- describe('computed', () => {
- describe('componentName', () => {
- it('should return merged component', () => {
- expect(vm.componentName).toEqual('mr-widget-merged');
- });
-
- it('should return conflicts component', () => {
- vm.mr.state = 'conflicts';
-
- expect(vm.componentName).toEqual('mr-widget-conflicts');
- });
- });
-
- describe('shouldRenderMergeHelp', () => {
- it('should return false for the initial merged state', () => {
- expect(vm.shouldRenderMergeHelp).toBeFalsy();
- });
-
- it('should return true for a state which requires help widget', () => {
- vm.mr.state = 'conflicts';
-
- expect(vm.shouldRenderMergeHelp).toBeTruthy();
- });
- });
-
- describe('shouldRenderPipelines', () => {
- it('should return true when hasCI is true', () => {
- vm.mr.hasCI = true;
-
- expect(vm.shouldRenderPipelines).toBeTruthy();
- });
-
- it('should return false when hasCI is false', () => {
- vm.mr.hasCI = false;
-
- expect(vm.shouldRenderPipelines).toBeFalsy();
- });
- });
-
- describe('shouldSuggestPipelines', () => {
- describe('given suggestPipeline feature flag is enabled', () => {
- beforeEach(() => {
- gon.features = { suggestPipeline: true };
- vm = mountComponent(MrWidgetOptions, {
- mrData: { ...mockData },
- });
- });
-
- afterEach(() => {
- gon.features = {};
- });
-
- it('should suggest pipelines when none exist', () => {
- vm.mr.mergeRequestAddCiConfigPath = 'some/path';
- vm.mr.hasCI = false;
-
- expect(vm.shouldSuggestPipelines).toBeTruthy();
- });
-
- it('should not suggest pipelines when they exist', () => {
- vm.mr.mergeRequestAddCiConfigPath = null;
- vm.mr.hasCI = false;
-
- expect(vm.shouldSuggestPipelines).toBeFalsy();
- });
-
- it('should not suggest pipelines hasCI is true', () => {
- vm.mr.mergeRequestAddCiConfigPath = 'some/path';
- vm.mr.hasCI = true;
-
- expect(vm.shouldSuggestPipelines).toBeFalsy();
- });
- });
-
- describe('given suggestPipeline feature flag is not enabled', () => {
- beforeEach(() => {
- gon.features = { suggestPipeline: false };
- vm = mountComponent(MrWidgetOptions, {
- mrData: { ...mockData },
- });
- });
-
- afterEach(() => {
- gon.features = {};
- });
-
- it('should not suggest pipelines', () => {
- vm.mr.mergeRequestAddCiConfigPath = null;
-
- expect(vm.shouldSuggestPipelines).toBeFalsy();
- });
- });
- });
-
- describe('shouldRenderRelatedLinks', () => {
- it('should return false for the initial data', () => {
- expect(vm.shouldRenderRelatedLinks).toBeFalsy();
- });
-
- it('should return true if there is relatedLinks in MR', () => {
- Vue.set(vm.mr, 'relatedLinks', {});
-
- expect(vm.shouldRenderRelatedLinks).toBeTruthy();
- });
- });
-
- describe('shouldRenderSourceBranchRemovalStatus', () => {
- beforeEach(() => {
- vm.mr.state = 'readyToMerge';
- });
-
- it('should return true when cannot remove source branch and branch will be removed', () => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = true;
-
- expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(true);
- });
-
- it('should return false when can remove source branch and branch will be removed', () => {
- vm.mr.canRemoveSourceBranch = true;
- vm.mr.shouldRemoveSourceBranch = true;
-
- expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
- });
-
- it('should return false when cannot remove source branch and branch will not be removed', () => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = false;
-
- expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
- });
-
- it('should return false when in merged state', () => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = true;
- vm.mr.state = 'merged';
-
- expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
- });
-
- it('should return false when in nothing to merge state', () => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = true;
- vm.mr.state = 'nothingToMerge';
-
- expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
- });
- });
-
- describe('shouldRenderCollaborationStatus', () => {
- describe('when collaboration is allowed', () => {
- beforeEach(() => {
- vm.mr.allowCollaboration = true;
- });
-
- describe('when merge request is opened', () => {
- beforeEach(done => {
- vm.mr.isOpen = true;
- vm.$nextTick(done);
- });
-
- it('should render collaboration status', () => {
- expect(vm.$el.textContent).toContain(COLLABORATION_MESSAGE);
- });
- });
-
- describe('when merge request is not opened', () => {
- beforeEach(done => {
- vm.mr.isOpen = false;
- vm.$nextTick(done);
- });
-
- it('should not render collaboration status', () => {
- expect(vm.$el.textContent).not.toContain(COLLABORATION_MESSAGE);
- });
- });
- });
-
- describe('when collaboration is not allowed', () => {
- beforeEach(() => {
- vm.mr.allowCollaboration = false;
- });
-
- describe('when merge request is opened', () => {
- beforeEach(done => {
- vm.mr.isOpen = true;
- vm.$nextTick(done);
- });
-
- it('should not render collaboration status', () => {
- expect(vm.$el.textContent).not.toContain(COLLABORATION_MESSAGE);
- });
- });
- });
- });
-
- describe('showMergePipelineForkWarning', () => {
- describe('when the source project and target project are the same', () => {
- beforeEach(done => {
- Vue.set(vm.mr, 'mergePipelinesEnabled', true);
- Vue.set(vm.mr, 'sourceProjectId', 1);
- Vue.set(vm.mr, 'targetProjectId', 1);
- vm.$nextTick(done);
- });
-
- it('should be false', () => {
- expect(vm.showMergePipelineForkWarning).toEqual(false);
- });
- });
-
- describe('when merge pipelines are not enabled', () => {
- beforeEach(done => {
- Vue.set(vm.mr, 'mergePipelinesEnabled', false);
- Vue.set(vm.mr, 'sourceProjectId', 1);
- Vue.set(vm.mr, 'targetProjectId', 2);
- vm.$nextTick(done);
- });
-
- it('should be false', () => {
- expect(vm.showMergePipelineForkWarning).toEqual(false);
- });
- });
-
- describe('when merge pipelines are enabled _and_ the source project and target project are different', () => {
- beforeEach(done => {
- Vue.set(vm.mr, 'mergePipelinesEnabled', true);
- Vue.set(vm.mr, 'sourceProjectId', 1);
- Vue.set(vm.mr, 'targetProjectId', 2);
- vm.$nextTick(done);
- });
-
- it('should be true', () => {
- expect(vm.showMergePipelineForkWarning).toEqual(true);
- });
- });
- });
- });
-
- describe('methods', () => {
- describe('checkStatus', () => {
- it('should tell service to check status', done => {
- spyOn(vm.service, 'checkStatus').and.returnValue(returnPromise(mockData));
- spyOn(vm.mr, 'setData');
- spyOn(vm, 'handleNotification');
-
- let isCbExecuted = false;
- const cb = () => {
- isCbExecuted = true;
- };
-
- vm.checkStatus(cb);
-
- setTimeout(() => {
- expect(vm.service.checkStatus).toHaveBeenCalled();
- expect(vm.mr.setData).toHaveBeenCalled();
- expect(vm.handleNotification).toHaveBeenCalledWith(mockData);
- expect(isCbExecuted).toBeTruthy();
- done();
- }, 333);
- });
- });
-
- describe('initPolling', () => {
- it('should call SmartInterval', () => {
- spyOn(vm, 'checkStatus').and.returnValue(Promise.resolve());
- jasmine.clock().install();
- vm.initPolling();
-
- expect(vm.checkStatus).not.toHaveBeenCalled();
-
- jasmine.clock().tick(10000);
-
- expect(vm.pollingInterval).toBeDefined();
- expect(vm.checkStatus).toHaveBeenCalled();
-
- jasmine.clock().uninstall();
- });
- });
-
- describe('initDeploymentsPolling', () => {
- it('should call SmartInterval', () => {
- spyOn(vm, 'fetchDeployments').and.returnValue(Promise.resolve());
- vm.initDeploymentsPolling();
-
- expect(vm.deploymentsInterval).toBeDefined();
- expect(vm.fetchDeployments).toHaveBeenCalled();
- });
- });
-
- describe('fetchDeployments', () => {
- it('should fetch deployments', done => {
- spyOn(vm.service, 'fetchDeployments').and.returnValue(
- returnPromise([{ id: 1, status: SUCCESS }]),
- );
-
- vm.fetchPreMergeDeployments();
-
- setTimeout(() => {
- expect(vm.service.fetchDeployments).toHaveBeenCalled();
- expect(vm.mr.deployments.length).toEqual(1);
- expect(vm.mr.deployments[0].id).toBe(1);
- done();
- });
- });
- });
-
- describe('fetchActionsContent', () => {
- it('should fetch content of Cherry Pick and Revert modals', done => {
- spyOn(vm.service, 'fetchMergeActionsContent').and.returnValue(returnPromise('hello world'));
-
- vm.fetchActionsContent();
-
- setTimeout(() => {
- expect(vm.service.fetchMergeActionsContent).toHaveBeenCalled();
- expect(document.body.textContent).toContain('hello world');
- done();
- }, 333);
- });
- });
-
- describe('bindEventHubListeners', () => {
- it('should bind eventHub listeners', done => {
- spyOn(vm, 'checkStatus').and.returnValue(() => {});
- spyOn(vm.service, 'checkStatus').and.returnValue(returnPromise(mockData));
- spyOn(vm, 'fetchActionsContent');
- spyOn(vm.mr, 'setData');
- spyOn(vm, 'resumePolling');
- spyOn(vm, 'stopPolling');
- spyOn(eventHub, '$on').and.callThrough();
-
- setTimeout(() => {
- eventHub.$emit('SetBranchRemoveFlag', ['flag']);
-
- expect(vm.mr.isRemovingSourceBranch).toEqual('flag');
-
- eventHub.$emit('FailedToMerge');
-
- expect(vm.mr.state).toEqual('failedToMerge');
-
- eventHub.$emit('UpdateWidgetData', mockData);
-
- expect(vm.mr.setData).toHaveBeenCalledWith(mockData);
-
- eventHub.$emit('EnablePolling');
-
- expect(vm.resumePolling).toHaveBeenCalled();
-
- eventHub.$emit('DisablePolling');
-
- expect(vm.stopPolling).toHaveBeenCalled();
-
- const listenersWithServiceRequest = {
- MRWidgetUpdateRequested: true,
- FetchActionsContent: true,
- };
-
- const allArgs = eventHub.$on.calls.allArgs();
- allArgs.forEach(params => {
- const eventName = params[0];
- const callback = params[1];
-
- if (listenersWithServiceRequest[eventName]) {
- listenersWithServiceRequest[eventName] = callback;
- }
- });
-
- listenersWithServiceRequest.MRWidgetUpdateRequested();
-
- expect(vm.checkStatus).toHaveBeenCalled();
-
- listenersWithServiceRequest.FetchActionsContent();
-
- expect(vm.fetchActionsContent).toHaveBeenCalled();
-
- done();
- });
- });
- });
-
- describe('setFavicon', () => {
- let faviconElement;
-
- beforeEach(() => {
- const favicon = document.createElement('link');
- favicon.setAttribute('id', 'favicon');
- favicon.setAttribute('data-original-href', faviconDataUrl);
- document.body.appendChild(favicon);
-
- faviconElement = document.getElementById('favicon');
- });
-
- afterEach(() => {
- document.body.removeChild(document.getElementById('favicon'));
- });
-
- it('should call setFavicon method', done => {
- vm.mr.ciStatusFaviconPath = overlayDataUrl;
- vm.setFaviconHelper()
- .then(() => {
- /*
- It would be better if we'd could mock commonUtils.setFaviconURL
- with a spy and test that it was called. We are doing the following
- tests as a proxy to show that the function has been called
- */
- expect(faviconElement.getAttribute('href')).not.toEqual(null);
- expect(faviconElement.getAttribute('href')).not.toEqual(overlayDataUrl);
- expect(faviconElement.getAttribute('href')).not.toEqual(faviconDataUrl);
- done();
- })
- .catch(done.fail);
- });
-
- it('should not call setFavicon when there is no ciStatusFaviconPath', done => {
- vm.mr.ciStatusFaviconPath = null;
- vm.setFaviconHelper()
- .then(() => {
- expect(faviconElement.getAttribute('href')).toEqual(null);
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('handleNotification', () => {
- const data = {
- ci_status: 'running',
- title: 'title',
- pipeline: { details: { status: { label: 'running-label' } } },
- };
-
- beforeEach(() => {
- spyOn(notify, 'notifyMe');
-
- vm.mr.ciStatus = 'failed';
- vm.mr.gitlabLogo = 'logo.png';
- });
-
- it('should call notifyMe', () => {
- vm.handleNotification(data);
-
- expect(notify.notifyMe).toHaveBeenCalledWith(
- 'Pipeline running-label',
- 'Pipeline running-label for "title"',
- 'logo.png',
- );
- });
-
- it('should not call notifyMe if the status has not changed', () => {
- vm.mr.ciStatus = data.ci_status;
-
- vm.handleNotification(data);
-
- expect(notify.notifyMe).not.toHaveBeenCalled();
- });
-
- it('should not notify if no pipeline provided', () => {
- vm.handleNotification({
- ...data,
- pipeline: undefined,
- });
-
- expect(notify.notifyMe).not.toHaveBeenCalled();
- });
- });
-
- describe('resumePolling', () => {
- it('should call stopTimer on pollingInterval', done => {
- setTimeout(() => {
- spyOn(vm.pollingInterval, 'resume');
-
- vm.resumePolling();
-
- expect(vm.pollingInterval.resume).toHaveBeenCalled();
-
- done();
- });
- });
- });
-
- describe('stopPolling', () => {
- it('should call stopTimer on pollingInterval', done => {
- setTimeout(() => {
- spyOn(vm.pollingInterval, 'stopTimer');
-
- vm.stopPolling();
-
- expect(vm.pollingInterval.stopTimer).toHaveBeenCalled();
-
- done();
- });
- });
- });
- });
-
- describe('rendering relatedLinks', () => {
- beforeEach(done => {
- vm.mr.relatedLinks = {
- assignToMe: null,
- closing: `
- <a class="close-related-link" href="#">
- Close
- </a>
- `,
- mentioned: '',
- };
- Vue.nextTick(done);
- });
-
- it('renders if there are relatedLinks', () => {
- expect(vm.$el.querySelector('.close-related-link')).toBeDefined();
- });
-
- it('does not render if state is nothingToMerge', done => {
- vm.mr.state = stateKey.nothingToMerge;
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.close-related-link')).toBeNull();
- done();
- });
- });
- });
-
- describe('rendering source branch removal status', () => {
- it('renders when user cannot remove branch and branch should be removed', done => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = true;
- vm.mr.state = 'readyToMerge';
-
- vm.$nextTick(() => {
- const tooltip = vm.$el.querySelector('.fa-question-circle');
-
- expect(vm.$el.textContent).toContain('Deletes source branch');
- expect(tooltip.getAttribute('data-original-title')).toBe(
- 'A user with write access to the source branch selected this option',
- );
-
- done();
- });
- });
-
- it('does not render in merged state', done => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = true;
- vm.mr.state = 'merged';
-
- vm.$nextTick(() => {
- expect(vm.$el.textContent).toContain('The source branch has been deleted');
- expect(vm.$el.textContent).not.toContain('Deletes source branch');
-
- done();
- });
- });
- });
-
- describe('rendering deployments', () => {
- const changes = [
- {
- path: 'index.html',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/index.html',
- },
- {
- path: 'imgs/gallery.html',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
- },
- {
- path: 'about/',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/about/',
- },
- ];
- const deploymentMockData = {
- id: 15,
- name: 'review/diplo',
- url: '/root/acets-review-apps/environments/15',
- stop_url: '/root/acets-review-apps/environments/15/stop',
- metrics_url: '/root/acets-review-apps/environments/15/deployments/1/metrics',
- metrics_monitoring_url: '/root/acets-review-apps/environments/15/metrics',
- external_url: 'http://diplo.',
- external_url_formatted: 'diplo.',
- deployed_at: '2017-03-22T22:44:42.258Z',
- deployed_at_formatted: 'Mar 22, 2017 10:44pm',
- changes,
- status: SUCCESS,
- };
-
- beforeEach(done => {
- vm.mr.deployments.push(
- {
- ...deploymentMockData,
- },
- {
- ...deploymentMockData,
- id: deploymentMockData.id + 1,
- },
- );
-
- vm.$nextTick(done);
- });
-
- it('renders multiple deployments', () => {
- expect(vm.$el.querySelectorAll('.deploy-heading').length).toBe(2);
- });
-
- it('renders dropdpown with multiple file changes', () => {
- expect(
- vm.$el
- .querySelector('.js-mr-wigdet-deployment-dropdown')
- .querySelectorAll('.js-filtered-dropdown-result').length,
- ).toEqual(changes.length);
- });
- });
-
- describe('pipeline for target branch after merge', () => {
- describe('with information for target branch pipeline', () => {
- beforeEach(done => {
- vm.mr.state = 'merged';
- vm.mr.mergePipeline = {
- id: 127,
- user: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url: null,
- web_url: 'http://localhost:3000/root',
- status_tooltip_html: null,
- path: '/root',
- },
- active: true,
- coverage: null,
- source: 'push',
- created_at: '2018-10-22T11:41:35.186Z',
- updated_at: '2018-10-22T11:41:35.433Z',
- path: '/root/ci-web-terminal/pipelines/127',
- flags: {
- latest: true,
- stuck: true,
- auto_devops: false,
- yaml_errors: false,
- retryable: false,
- cancelable: true,
- failure_reason: false,
- },
- details: {
- status: {
- icon: 'status_pending',
- text: 'pending',
- label: 'pending',
- group: 'pending',
- tooltip: 'pending',
- has_details: true,
- details_path: '/root/ci-web-terminal/pipelines/127',
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
- },
- duration: null,
- finished_at: null,
- stages: [
- {
- name: 'test',
- title: 'test: pending',
- status: {
- icon: 'status_pending',
- text: 'pending',
- label: 'pending',
- group: 'pending',
- tooltip: 'pending',
- has_details: true,
- details_path: '/root/ci-web-terminal/pipelines/127#test',
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
- },
- path: '/root/ci-web-terminal/pipelines/127#test',
- dropdown_path: '/root/ci-web-terminal/pipelines/127/stage.json?stage=test',
- },
- ],
- artifacts: [],
- manual_actions: [],
- scheduled_actions: [],
- },
- ref: {
- name: 'master',
- path: '/root/ci-web-terminal/commits/master',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'aa1939133d373c94879becb79d91828a892ee319',
- short_id: 'aa193913',
- title: "Merge branch 'master-test' into 'master'",
- created_at: '2018-10-22T11:41:33.000Z',
- parent_ids: [
- '4622f4dd792468993003caf2e3be978798cbe096',
- '76598df914cdfe87132d0c3c40f80db9fa9396a4',
- ],
- message:
- "Merge branch 'master-test' into 'master'\n\nUpdate .gitlab-ci.yml\n\nSee merge request root/ci-web-terminal!1",
- author_name: 'Administrator',
- author_email: 'admin@example.com',
- authored_date: '2018-10-22T11:41:33.000Z',
- committer_name: 'Administrator',
- committer_email: 'admin@example.com',
- committed_date: '2018-10-22T11:41:33.000Z',
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url: null,
- web_url: 'http://localhost:3000/root',
- status_tooltip_html: null,
- path: '/root',
- },
- author_gravatar_url: null,
- commit_url:
- 'http://localhost:3000/root/ci-web-terminal/commit/aa1939133d373c94879becb79d91828a892ee319',
- commit_path: '/root/ci-web-terminal/commit/aa1939133d373c94879becb79d91828a892ee319',
- },
- cancel_path: '/root/ci-web-terminal/pipelines/127/cancel',
- };
- vm.$nextTick(done);
- });
-
- it('renders pipeline block', () => {
- expect(vm.$el.querySelector('.js-post-merge-pipeline')).not.toBeNull();
- });
-
- describe('with post merge deployments', () => {
- beforeEach(done => {
- vm.mr.postMergeDeployments = [
- {
- id: 15,
- name: 'review/diplo',
- url: '/root/acets-review-apps/environments/15',
- stop_url: '/root/acets-review-apps/environments/15/stop',
- metrics_url: '/root/acets-review-apps/environments/15/deployments/1/metrics',
- metrics_monitoring_url: '/root/acets-review-apps/environments/15/metrics',
- external_url: 'http://diplo.',
- external_url_formatted: 'diplo.',
- deployed_at: '2017-03-22T22:44:42.258Z',
- deployed_at_formatted: 'Mar 22, 2017 10:44pm',
- changes: [
- {
- path: 'index.html',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/index.html',
- },
- {
- path: 'imgs/gallery.html',
- external_url:
- 'http://root-master-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
- },
- {
- path: 'about/',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/about/',
- },
- ],
- status: 'success',
- },
- ];
-
- vm.$nextTick(done);
- });
-
- it('renders post deployment information', () => {
- expect(vm.$el.querySelector('.js-post-deployment')).not.toBeNull();
- });
- });
- });
-
- describe('without information for target branch pipeline', () => {
- beforeEach(done => {
- vm.mr.state = 'merged';
-
- vm.$nextTick(done);
- });
-
- it('does not render pipeline block', () => {
- expect(vm.$el.querySelector('.js-post-merge-pipeline')).toBeNull();
- });
- });
-
- describe('when state is not merged', () => {
- beforeEach(done => {
- vm.mr.state = 'archived';
-
- vm.$nextTick(done);
- });
-
- it('does not render pipeline block', () => {
- expect(vm.$el.querySelector('.js-post-merge-pipeline')).toBeNull();
- });
-
- it('does not render post deployment information', () => {
- expect(vm.$el.querySelector('.js-post-deployment')).toBeNull();
- });
- });
- });
-});
diff --git a/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js b/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
index 272f6cad5fc..1906585af7b 100644
--- a/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
+++ b/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
@@ -96,5 +96,17 @@ describe('MergeRequestStore', () => {
expect(store.humanAccess).toEqual('Maintainer');
});
+
+ it('should set pipelinesEmptySvgPath', () => {
+ store.setData({ ...mockData });
+
+ expect(store.pipelinesEmptySvgPath).toBe('/path/to/svg');
+ });
+
+ it('should set newPipelinePath', () => {
+ store.setData({ ...mockData });
+
+ expect(store.newPipelinePath).toBe('/group2/project2/pipelines/new');
+ });
});
});
diff --git a/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js b/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js
index e2a1ed931f1..fbe9337ecf4 100644
--- a/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js
+++ b/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import waitForPromises from 'spec/helpers/wait_for_promises';
import { GREEN_BOX_IMAGE_URL } from 'spec/test_constants';
import axios from '~/lib/utils/axios_utils';
import contentViewer from '~/vue_shared/components/content_viewer/content_viewer.vue';
@@ -22,7 +23,7 @@ describe('ContentViewer', () => {
it('markdown preview renders + loads rendered markdown from server', done => {
mock = new MockAdapter(axios);
- mock.onPost(`${gon.relative_url_root}/testproject/preview_markdown`).reply(200, {
+ mock.onPost(`${gon.relative_url_root}/testproject/preview_markdown`).replyOnce(200, {
body: '<b>testing</b>',
});
@@ -33,13 +34,12 @@ describe('ContentViewer', () => {
type: 'markdown',
});
- const previewContainer = vm.$el.querySelector('.md-previewer');
-
- setTimeout(() => {
- expect(previewContainer.textContent).toContain('testing');
-
- done();
- });
+ waitForPromises()
+ .then(() => {
+ expect(vm.$el.querySelector('.md-previewer').textContent).toContain('testing');
+ })
+ .then(done)
+ .catch(done.fail);
});
it('renders image preview', done => {
@@ -49,11 +49,12 @@ describe('ContentViewer', () => {
type: 'image',
});
- setTimeout(() => {
- expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
-
- done();
- });
+ vm.$nextTick()
+ .then(() => {
+ expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
+ })
+ .then(done)
+ .catch(done.fail);
});
it('renders fallback download control', done => {
@@ -62,18 +63,19 @@ describe('ContentViewer', () => {
fileSize: 1024,
});
- setTimeout(() => {
- expect(
- vm.$el
- .querySelector('.file-info')
- .textContent.trim()
- .replace(/\s+/, ' '),
- ).toEqual('test.abc (1.00 KiB)');
-
- expect(vm.$el.querySelector('.btn.btn-default').textContent.trim()).toEqual('Download');
-
- done();
- });
+ vm.$nextTick()
+ .then(() => {
+ expect(
+ vm.$el
+ .querySelector('.file-info')
+ .textContent.trim()
+ .replace(/\s+/, ' '),
+ ).toEqual('test.abc (1.00 KiB)');
+
+ expect(vm.$el.querySelector('.btn.btn-default').textContent.trim()).toEqual('Download');
+ })
+ .then(done)
+ .catch(done.fail);
});
it('renders fallback download control for file with a data URL path properly', done => {
@@ -82,13 +84,14 @@ describe('ContentViewer', () => {
filePath: 'somepath/test.abc',
});
- setTimeout(() => {
- expect(vm.$el.querySelector('.file-info').textContent.trim()).toEqual('test.abc');
- expect(vm.$el.querySelector('.btn.btn-default')).toHaveAttr('download', 'test.abc');
- expect(vm.$el.querySelector('.btn.btn-default').textContent.trim()).toEqual('Download');
-
- done();
- });
+ vm.$nextTick()
+ .then(() => {
+ expect(vm.$el.querySelector('.file-info').textContent.trim()).toEqual('test.abc');
+ expect(vm.$el.querySelector('.btn.btn-default')).toHaveAttr('download', 'test.abc');
+ expect(vm.$el.querySelector('.btn.btn-default').textContent.trim()).toEqual('Download');
+ })
+ .then(done)
+ .catch(done.fail);
});
it('markdown preview receives the file path as a parameter', done => {
@@ -106,14 +109,15 @@ describe('ContentViewer', () => {
filePath: 'foo/test.md',
});
- setTimeout(() => {
- expect(axios.post).toHaveBeenCalledWith(
- `${gon.relative_url_root}/testproject/preview_markdown`,
- { path: 'foo/test.md', text: '* Test' },
- jasmine.any(Object),
- );
-
- done();
- });
+ vm.$nextTick()
+ .then(() => {
+ expect(axios.post).toHaveBeenCalledWith(
+ `${gon.relative_url_root}/testproject/preview_markdown`,
+ { path: 'foo/test.md', text: '* Test' },
+ jasmine.any(Object),
+ );
+ })
+ .then(done)
+ .catch(done.fail);
});
});
diff --git a/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js b/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
deleted file mode 100644
index 81f194395ef..00000000000
--- a/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
+++ /dev/null
@@ -1,176 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants';
-import imageDiffViewer from '~/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue';
-
-describe('ImageDiffViewer', () => {
- const requiredProps = {
- diffMode: 'replaced',
- newPath: GREEN_BOX_IMAGE_URL,
- oldPath: RED_BOX_IMAGE_URL,
- };
- let vm;
-
- function createComponent(props) {
- const ImageDiffViewer = Vue.extend(imageDiffViewer);
- vm = mountComponent(ImageDiffViewer, props);
- }
-
- const triggerEvent = (eventName, el = vm.$el, clientX = 0) => {
- const event = document.createEvent('MouseEvents');
- event.initMouseEvent(
- eventName,
- true,
- true,
- window,
- 1,
- clientX,
- 0,
- clientX,
- 0,
- false,
- false,
- false,
- false,
- 0,
- null,
- );
-
- el.dispatchEvent(event);
- };
-
- const dragSlider = (sliderElement, dragPixel = 20) => {
- triggerEvent('mousedown', sliderElement);
- triggerEvent('mousemove', document.body, dragPixel);
- triggerEvent('mouseup', document.body);
- };
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders image diff for replaced', done => {
- createComponent(requiredProps);
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
-
- expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
-
- expect(vm.$el.querySelector('.view-modes-menu li.active').textContent.trim()).toBe('2-up');
- expect(vm.$el.querySelector('.view-modes-menu li:nth-child(2)').textContent.trim()).toBe(
- 'Swipe',
- );
-
- expect(vm.$el.querySelector('.view-modes-menu li:nth-child(3)').textContent.trim()).toBe(
- 'Onion skin',
- );
-
- done();
- });
- });
-
- it('renders image diff for new', done => {
- createComponent(
- Object.assign({}, requiredProps, {
- diffMode: 'new',
- oldPath: '',
- }),
- );
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
-
- done();
- });
- });
-
- it('renders image diff for deleted', done => {
- createComponent(
- Object.assign({}, requiredProps, {
- diffMode: 'deleted',
- newPath: '',
- }),
- );
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
-
- done();
- });
- });
-
- it('renders image diff for renamed', done => {
- vm = new Vue({
- components: {
- imageDiffViewer,
- },
- template: `
- <image-diff-viewer diff-mode="renamed" new-path="${GREEN_BOX_IMAGE_URL}" old-path="">
- <span slot="image-overlay" class="overlay">test</span>
- </image-diff-viewer>
- `,
- }).$mount();
-
- setTimeout(() => {
- expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
- expect(vm.$el.querySelector('.overlay')).not.toBe(null);
-
- done();
- });
- });
-
- describe('swipeMode', () => {
- beforeEach(done => {
- createComponent(requiredProps);
-
- setTimeout(() => {
- done();
- });
- });
-
- it('switches to Swipe Mode', done => {
- vm.$el.querySelector('.view-modes-menu li:nth-child(2)').click();
-
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.view-modes-menu li.active').textContent.trim()).toBe('Swipe');
- done();
- });
- });
- });
-
- describe('onionSkin', () => {
- beforeEach(done => {
- createComponent(requiredProps);
-
- setTimeout(() => {
- done();
- });
- });
-
- it('switches to Onion Skin Mode', done => {
- vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
-
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.view-modes-menu li.active').textContent.trim()).toBe(
- 'Onion skin',
- );
- done();
- });
- });
-
- it('has working drag handler', done => {
- vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
-
- vm.$nextTick(() => {
- dragSlider(vm.$el.querySelector('.dragger'));
-
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.dragger').style.left).toBe('20px');
- expect(vm.$el.querySelector('.added.frame').style.opacity).toBe('0.2');
- done();
- });
- });
- });
- });
-});
diff --git a/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js b/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js
index 92080cb9bd5..3c42f0c2aa9 100644
--- a/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js
+++ b/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js
@@ -1,114 +1 @@
-export const defaultProps = {
- endpoint: '/foo/bar/issues/1/related_issues',
- currentNamespacePath: 'foo',
- currentProjectPath: 'bar',
-};
-
-export const issuable1 = {
- id: 200,
- epicIssueId: 1,
- confidential: false,
- reference: 'foo/bar#123',
- displayReference: '#123',
- title: 'some title',
- path: '/foo/bar/issues/123',
- state: 'opened',
- linkType: 'relates_to',
-};
-
-export const issuable2 = {
- id: 201,
- epicIssueId: 2,
- confidential: false,
- reference: 'foo/bar#124',
- displayReference: '#124',
- title: 'some other thing',
- path: '/foo/bar/issues/124',
- state: 'opened',
- linkType: 'blocks',
-};
-
-export const issuable3 = {
- id: 202,
- epicIssueId: 3,
- confidential: false,
- reference: 'foo/bar#125',
- displayReference: '#125',
- title: 'some other other thing',
- path: '/foo/bar/issues/125',
- state: 'opened',
- linkType: 'is_blocked_by',
-};
-
-export const issuable4 = {
- id: 203,
- epicIssueId: 4,
- confidential: false,
- reference: 'foo/bar#126',
- displayReference: '#126',
- title: 'some other other other thing',
- path: '/foo/bar/issues/126',
- state: 'opened',
-};
-
-export const issuable5 = {
- id: 204,
- epicIssueId: 5,
- confidential: false,
- reference: 'foo/bar#127',
- displayReference: '#127',
- title: 'some other other other thing',
- path: '/foo/bar/issues/127',
- state: 'opened',
-};
-
-export const defaultMilestone = {
- id: 1,
- state: 'active',
- title: 'Milestone title',
- start_date: '2018-01-01',
- due_date: '2019-12-31',
-};
-
-export const defaultAssignees = [
- {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url: `${gl.TEST_HOST}`,
- web_url: `${gl.TEST_HOST}/root`,
- status_tooltip_html: null,
- path: '/root',
- },
- {
- id: 13,
- name: 'Brooks Beatty',
- username: 'brynn_champlin',
- state: 'active',
- avatar_url: `${gl.TEST_HOST}`,
- web_url: `${gl.TEST_HOST}/brynn_champlin`,
- status_tooltip_html: null,
- path: '/brynn_champlin',
- },
- {
- id: 6,
- name: 'Bryce Turcotte',
- username: 'melynda',
- state: 'active',
- avatar_url: `${gl.TEST_HOST}`,
- web_url: `${gl.TEST_HOST}/melynda`,
- status_tooltip_html: null,
- path: '/melynda',
- },
- {
- id: 20,
- name: 'Conchita Eichmann',
- username: 'juliana_gulgowski',
- state: 'active',
- avatar_url: `${gl.TEST_HOST}`,
- web_url: `${gl.TEST_HOST}/juliana_gulgowski`,
- status_tooltip_html: null,
- path: '/juliana_gulgowski',
- },
-];
+export * from '../../../../frontend/vue_shared/components/issue/related_issuable_mock_data';
diff --git a/spec/lib/api/entities/release_spec.rb b/spec/lib/api/entities/release_spec.rb
index 729a69347cb..f0bbaa35efe 100644
--- a/spec/lib/api/entities/release_spec.rb
+++ b/spec/lib/api/entities/release_spec.rb
@@ -4,13 +4,14 @@ require 'spec_helper'
describe API::Entities::Release do
let_it_be(:project) { create(:project) }
- let_it_be(:release) { create(:release, :with_evidence, project: project) }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:entity) { described_class.new(release, current_user: user) }
- subject { entity.as_json }
-
describe 'evidence' do
+ let(:release) { create(:release, :with_evidence, project: project) }
+
+ subject { entity.as_json }
+
context 'when the current user can download code' do
it 'exposes the evidence sha and the json path' do
allow(Ability).to receive(:allowed?).and_call_original
@@ -37,4 +38,27 @@ describe API::Entities::Release do
end
end
end
+
+ describe 'description_html' do
+ let(:issue) { create(:issue, :confidential, project: project) }
+ let(:issue_path) { Gitlab::Routing.url_helpers.project_issue_path(project, issue) }
+ let(:issue_title) { 'title="%s"' % issue.title }
+ let(:release) { create(:release, project: project, description: "Now shipping #{issue.to_reference}") }
+
+ subject(:description_html) { entity.as_json[:description_html] }
+
+ it 'renders special references if current user has access' do
+ project.add_reporter(user)
+
+ expect(description_html).to include(issue_path)
+ expect(description_html).to include(issue_title)
+ end
+
+ it 'does not render special references if current user has no access' do
+ project.add_guest(user)
+
+ expect(description_html).not_to include(issue_path)
+ expect(description_html).not_to include(issue_title)
+ end
+ end
end
diff --git a/spec/lib/api/helpers/custom_validators_spec.rb b/spec/lib/api/helpers/custom_validators_spec.rb
index 1ebce2ab5c4..66b86d0a055 100644
--- a/spec/lib/api/helpers/custom_validators_spec.rb
+++ b/spec/lib/api/helpers/custom_validators_spec.rb
@@ -24,7 +24,70 @@ describe API::Helpers::CustomValidators do
context 'invalid parameters' do
it 'raises a validation error' do
- expect_validation_error({ 'test' => 'some_value' })
+ expect_validation_error('test' => 'some_value')
+ end
+ end
+ end
+
+ describe API::Helpers::CustomValidators::GitSha do
+ let(:sha) { RepoHelpers.sample_commit.id }
+ let(:short_sha) { sha[0, Gitlab::Git::Commit::MIN_SHA_LENGTH] }
+ let(:too_short_sha) { sha[0, Gitlab::Git::Commit::MIN_SHA_LENGTH - 1] }
+
+ subject do
+ described_class.new(['test'], {}, false, scope.new)
+ end
+
+ context 'valid sha' do
+ it 'does not raise a validation error' do
+ expect_no_validation_error('test' => sha)
+ expect_no_validation_error('test' => short_sha)
+ end
+ end
+
+ context 'empty params' do
+ it 'raises a validation error' do
+ expect_validation_error('test' => nil)
+ expect_validation_error('test' => '')
+ end
+ end
+
+ context 'invalid sha' do
+ it 'raises a validation error' do
+ expect_validation_error('test' => "#{sha}2") # Sha length > 40
+ expect_validation_error('test' => 'somestring')
+ expect_validation_error('test' => too_short_sha) # sha length < MIN_SHA_LENGTH (7)
+ end
+ end
+ end
+
+ describe API::Helpers::CustomValidators::FilePath do
+ subject do
+ described_class.new(['test'], {}, false, scope.new)
+ end
+
+ context 'valid file path' do
+ it 'does not raise a validation error' do
+ expect_no_validation_error('test' => './foo')
+ expect_no_validation_error('test' => './bar.rb')
+ expect_no_validation_error('test' => 'foo%2Fbar%2Fnew%2Ffile.rb')
+ expect_no_validation_error('test' => 'foo%2Fbar%2Fnew')
+ expect_no_validation_error('test' => 'foo%252Fbar%252Fnew%252Ffile.rb')
+ end
+ end
+
+ context 'invalid file path' do
+ it 'raise a validation error' do
+ expect_validation_error('test' => '../foo')
+ expect_validation_error('test' => '../')
+ expect_validation_error('test' => 'foo/../../bar')
+ expect_validation_error('test' => 'foo/../')
+ expect_validation_error('test' => 'foo/..')
+ expect_validation_error('test' => '../')
+ expect_validation_error('test' => '..\\')
+ expect_validation_error('test' => '..\/')
+ expect_validation_error('test' => '%2e%2e%2f')
+ expect_validation_error('test' => '/etc/passwd')
end
end
end
@@ -36,12 +99,12 @@ describe API::Helpers::CustomValidators do
context 'valid parameters' do
it 'does not raise a validation error' do
- expect_no_validation_error({ 'test' => 2 })
- expect_no_validation_error({ 'test' => 100 })
- expect_no_validation_error({ 'test' => 'None' })
- expect_no_validation_error({ 'test' => 'Any' })
- expect_no_validation_error({ 'test' => 'none' })
- expect_no_validation_error({ 'test' => 'any' })
+ expect_no_validation_error('test' => 2)
+ expect_no_validation_error('test' => 100)
+ expect_no_validation_error('test' => 'None')
+ expect_no_validation_error('test' => 'Any')
+ expect_no_validation_error('test' => 'none')
+ expect_no_validation_error('test' => 'any')
end
end
@@ -59,18 +122,18 @@ describe API::Helpers::CustomValidators do
context 'valid parameters' do
it 'does not raise a validation error' do
- expect_no_validation_error({ 'test' => [] })
- expect_no_validation_error({ 'test' => [1, 2, 3] })
- expect_no_validation_error({ 'test' => 'None' })
- expect_no_validation_error({ 'test' => 'Any' })
- expect_no_validation_error({ 'test' => 'none' })
- expect_no_validation_error({ 'test' => 'any' })
+ expect_no_validation_error('test' => [])
+ expect_no_validation_error('test' => [1, 2, 3])
+ expect_no_validation_error('test' => 'None')
+ expect_no_validation_error('test' => 'Any')
+ expect_no_validation_error('test' => 'none')
+ expect_no_validation_error('test' => 'any')
end
end
context 'invalid parameters' do
it 'raises a validation error' do
- expect_validation_error({ 'test' => 'some_other_string' })
+ expect_validation_error('test' => 'some_other_string')
end
end
end
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 06ad0557e37..b86e92d5969 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -214,6 +214,30 @@ describe Backup::Manager do
end
end
+ describe 'verify_backup_version' do
+ context 'on version mismatch' do
+ let(:gitlab_version) { Gitlab::VERSION }
+
+ it 'stops the process' do
+ allow(YAML).to receive(:load_file)
+ .and_return({ gitlab_version: "not #{gitlab_version}" })
+
+ expect { subject.verify_backup_version }.to raise_error SystemExit
+ end
+ end
+
+ context 'on version match' do
+ let(:gitlab_version) { Gitlab::VERSION }
+
+ it 'does nothing' do
+ allow(YAML).to receive(:load_file)
+ .and_return({ gitlab_version: "#{gitlab_version}" })
+
+ expect { subject.verify_backup_version }.not_to raise_error
+ end
+ end
+ end
+
describe '#unpack' do
context 'when there are no backup files in the directory' do
before do
@@ -292,6 +316,26 @@ describe Backup::Manager do
expect(progress).to have_received(:puts).with(a_string_matching('done'))
end
end
+
+ context 'when there is a non-tarred backup in the directory' do
+ before do
+ allow(Dir).to receive(:glob).and_return(
+ [
+ 'backup_information.yml'
+ ]
+ )
+ allow(File).to receive(:exist?).and_return(true)
+ end
+
+ it 'selects the non-tarred backup to restore from' do
+ expect(Kernel).not_to receive(:system)
+
+ subject.unpack
+
+ expect(progress).to have_received(:puts)
+ .with(a_string_matching('Non tarred backup found '))
+ end
+ end
end
describe '#upload' do
@@ -329,9 +373,7 @@ describe Backup::Manager do
.with(hash_including(key: backup_filename, public: false))
.and_return(true)
- Dir.chdir(Gitlab.config.backup.path) do
- subject.upload
- end
+ subject.upload
end
it 'adds the DIRECTORY environment variable if present' do
@@ -341,9 +383,7 @@ describe Backup::Manager do
.with(hash_including(key: "daily/#{backup_filename}", public: false))
.and_return(true)
- Dir.chdir(Gitlab.config.backup.path) do
- subject.upload
- end
+ subject.upload
end
end
@@ -373,9 +413,7 @@ describe Backup::Manager do
.with(hash_excluding(public: false))
.and_return(true)
- Dir.chdir(Gitlab.config.backup.path) do
- subject.upload
- end
+ subject.upload
end
end
end
diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repository_spec.rb
index 2ac1b0d2583..e0afa256581 100644
--- a/spec/lib/backup/repository_spec.rb
+++ b/spec/lib/backup/repository_spec.rb
@@ -50,9 +50,9 @@ describe Backup::Repository do
describe 'command failure' do
before do
- allow_next_instance_of(Gitlab::Shell) do |instance|
- allow(instance).to receive(:create_repository).and_return(false)
- end
+ # Allow us to set expectations on the project directly
+ expect(Project).to receive(:find_each).and_yield(project)
+ expect(project.repository).to receive(:create_repository) { raise 'Fail in tests' }
end
context 'hashed storage' do
diff --git a/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb b/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb
new file mode 100644
index 00000000000..1a90abc12d9
--- /dev/null
+++ b/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Filter::BroadcastMessagePlaceholdersFilter do
+ include FilterSpecHelper
+
+ subject { filter(text, current_user: user, broadcast_message_placeholders: true).to_html }
+
+ describe 'when current user is set' do
+ let_it_be(:user) { create(:user, email: "helloworld@example.com", name: "GitLab Tanunki :)") }
+
+ context 'replaces placeholder in text' do
+ let(:text) { 'Email: {{email}}' }
+
+ it { expect(subject).to eq("Email: #{user.email}") }
+ end
+
+ context 'replaces placeholder when they are in a link' do
+ let(:text) { '<a href="http://example.com?email={{email}}"">link</a>' }
+
+ it { expect(subject).to eq("<a href=\"http://example.com?email=helloworld%40example.com\">link</a>") }
+ end
+
+ context 'replaces placeholder when they are in an escaped link' do
+ let(:text) { '<a href="http://example.com?name=%7B%7Bname%7D%7D">link</a>' }
+
+ it { expect(subject).to eq("<a href=\"http://example.com?name=GitLab+Tanunki+%3A%29\">link</a>") }
+ end
+
+ context 'works with empty text' do
+ let(:text) {" "}
+
+ it { expect(subject).to eq(" ") }
+ end
+
+ context 'replaces multiple placeholders in a given text' do
+ let(:text) { "{{email}} {{name}}" }
+
+ it { expect(subject).to eq("#{user.email} #{user.name}") }
+ end
+
+ context 'available placeholders' do
+ context 'replaces the email of the user' do
+ let(:text) { "{{email}}"}
+
+ it { expect(subject).to eq(user.email) }
+ end
+
+ context 'replaces the name of the user' do
+ let(:text) { "{{name}}"}
+
+ it { expect(subject).to eq(user.name) }
+ end
+
+ context 'replaces the ID of the user' do
+ let(:text) { "{{user_id}}" }
+
+ it { expect(subject).to eq(user.id.to_s) }
+ end
+
+ context 'replaces the username of the user' do
+ let(:text) { "{{username}}" }
+
+ it { expect(subject).to eq(user.username) }
+ end
+
+ context 'replaces the instance_id' do
+ before do
+ stub_application_setting(uuid: '123')
+ end
+
+ let(:text) { "{{instance_id}}" }
+
+ it { expect(subject).to eq(Gitlab::CurrentSettings.uuid) }
+ end
+ end
+ end
+
+ describe 'when there is no current user set' do
+ let(:user) { nil }
+
+ context 'replaces placeholder with empty string' do
+ let(:text) { "Email: {{email}}" }
+
+ it { expect(subject).to eq("Email: ") }
+ end
+ end
+end
diff --git a/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
index fd6f8816b63..28bf5bd3e92 100644
--- a/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
+++ b/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
@@ -8,39 +8,31 @@ describe Banzai::Filter::InlineGrafanaMetricsFilter do
let_it_be(:project) { create(:project) }
let_it_be(:grafana_integration) { create(:grafana_integration, project: project) }
- let(:input) { %(<a href="#{url}">example</a>) }
+ let(:input) { %(<a href="#{trigger_url}">example</a>) }
let(:doc) { filter(input) }
+ let(:embed_url) { doc.at_css('.js-render-metrics')['data-dashboard-url'] }
- let(:url) { grafana_integration.grafana_url + dashboard_path }
let(:dashboard_path) do
'/d/XDaNK6amz/gitlab-omnibus-redis' \
- '?from=1570397739557&to=1570484139557' \
- '&var-instance=All&panelId=14'
+ '?from=1570397739557&panelId=14' \
+ '&to=1570484139557&var-instance=All'
end
- it 'appends a metrics charts placeholder with dashboard url after metrics links' do
- node = doc.at_css('.js-render-metrics')
- expect(node).to be_present
-
- dashboard_url = urls.project_grafana_api_metrics_dashboard_url(
+ let(:trigger_url) { grafana_integration.grafana_url + dashboard_path }
+ let(:dashboard_url) do
+ urls.project_grafana_api_metrics_dashboard_url(
project,
+ grafana_url: trigger_url,
embedded: true,
- grafana_url: url,
start: "2019-10-06T21:35:39Z",
end: "2019-10-07T21:35:39Z"
)
-
- expect(node.attribute('data-dashboard-url').to_s).to eq(dashboard_url)
end
- context 'when the dashboard link is part of a paragraph' do
- let(:paragraph) { %(This is an <a href="#{url}">example</a> of metrics.) }
- let(:input) { %(<p>#{paragraph}</p>) }
+ it_behaves_like 'a metrics embed filter'
- it 'appends the charts placeholder after the enclosing paragraph' do
- expect(unescape(doc.at_css('p').to_s)).to include(paragraph)
- expect(doc.at_css('.js-render-metrics')).to be_present
- end
+ around do |example|
+ Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
end
context 'when grafana is not configured' do
@@ -53,11 +45,50 @@ describe Banzai::Filter::InlineGrafanaMetricsFilter do
end
end
- context 'when parameters are missing' do
+ context 'when "panelId" parameter is missing' do
+ let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis?from=1570397739557&to=1570484139557' }
+
+ it_behaves_like 'a metrics embed filter'
+ end
+
+ context 'when time window parameters are missing' do
+ let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis?panelId=16' }
+
+ it 'sets the window to the last 8 hrs' do
+ expect(embed_url).to include(
+ 'from%3D1552799400000', 'to%3D1552828200000',
+ 'start=2019-03-17T05%3A10%3A00Z', 'end=2019-03-17T13%3A10%3A00Z'
+ )
+ end
+ end
+
+ context 'when "to" parameter is missing' do
+ let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis?panelId=16&from=1570397739557' }
+
+ it 'sets "to" to 8 hrs after "from"' do
+ expect(embed_url).to include(
+ 'from%3D1570397739557', 'to%3D1570426539000',
+ 'start=2019-10-06T21%3A35%3A39Z', 'end=2019-10-07T05%3A35%3A39Z'
+ )
+ end
+ end
+
+ context 'when "from" parameter is missing' do
+ let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis?panelId=16&to=1570484139557' }
+
+ it 'sets "from" to 8 hrs before "to"' do
+ expect(embed_url).to include(
+ 'from%3D1570455339000', 'to%3D1570484139557',
+ 'start=2019-10-07T13%3A35%3A39Z', 'end=2019-10-07T21%3A35%3A39Z'
+ )
+ end
+ end
+
+ context 'when no parameters are provided' do
let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis' }
- it 'leaves the markdown unchanged' do
- expect(unescape(doc.to_s)).to eq(input)
+ it 'inserts a placeholder' do
+ expect(embed_url).to be_present
end
end
diff --git a/spec/lib/banzai/filter/inline_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_metrics_filter_spec.rb
index 66bbcbf7292..1546a5e88ed 100644
--- a/spec/lib/banzai/filter/inline_metrics_filter_spec.rb
+++ b/spec/lib/banzai/filter/inline_metrics_filter_spec.rb
@@ -5,66 +5,31 @@ require 'spec_helper'
describe Banzai::Filter::InlineMetricsFilter do
include FilterSpecHelper
- let(:input) { %(<a href="#{url}">example</a>) }
- let(:doc) { filter(input) }
-
- context 'when the document has an external link' do
- let(:url) { 'https://foo.com' }
-
- it 'leaves regular non-metrics links unchanged' do
- expect(doc.to_s).to eq(input)
- end
- end
-
- context 'when the document has a metrics dashboard link' do
- let(:params) { ['foo', 'bar', 12] }
- let(:url) { urls.metrics_namespace_project_environment_url(*params) }
-
- it 'leaves the original link unchanged' do
- expect(doc.at_css('a').to_s).to eq(input)
- end
-
- it 'appends a metrics charts placeholder with dashboard url after metrics links' do
- node = doc.at_css('.js-render-metrics')
- expect(node).to be_present
-
- dashboard_url = urls.metrics_dashboard_namespace_project_environment_url(*params, embedded: true)
- expect(node.attribute('data-dashboard-url').to_s).to eq(dashboard_url)
+ let(:params) { ['foo', 'bar', 12] }
+ let(:query_params) { {} }
+
+ let(:trigger_url) { urls.metrics_namespace_project_environment_url(*params, query_params) }
+ let(:dashboard_url) { urls.metrics_dashboard_namespace_project_environment_url(*params, **query_params, embedded: true) }
+
+ it_behaves_like 'a metrics embed filter'
+
+ context 'with query params specified' do
+ let(:query_params) do
+ {
+ dashboard: 'config/prometheus/common_metrics.yml',
+ group: 'System metrics (Kubernetes)',
+ title: 'Core Usage (Pod Average)',
+ y_label: 'Cores per Pod'
+ }
end
- context 'when the metrics dashboard link is part of a paragraph' do
- let(:paragraph) { %(This is an <a href="#{url}">example</a> of metrics.) }
- let(:input) { %(<p>#{paragraph}</p>) }
-
- it 'appends the charts placeholder after the enclosing paragraph' do
- expect(doc.at_css('p').to_s).to include(paragraph)
- expect(doc.at_css('.js-render-metrics')).to be_present
- end
- end
-
- context 'with dashboard params specified' do
- let(:params) do
- [
- 'foo',
- 'bar',
- 12,
- {
- embedded: true,
- dashboard: 'config/prometheus/common_metrics.yml',
- group: 'System metrics (Kubernetes)',
- title: 'Core Usage (Pod Average)',
- y_label: 'Cores per Pod'
- }
- ]
- end
+ it_behaves_like 'a metrics embed filter'
+ end
- it 'appends a metrics charts placeholder with dashboard url after metrics links' do
- node = doc.at_css('.js-render-metrics')
- expect(node).to be_present
+ it 'leaves links to other dashboards unchanged' do
+ url = urls.namespace_project_grafana_api_metrics_dashboard_url('foo', 'bar')
+ input = %(<a href="#{url}">example</a>)
- dashboard_url = urls.metrics_dashboard_namespace_project_environment_url(*params)
- expect(node.attribute('data-dashboard-url').to_s).to eq(dashboard_url)
- end
- end
+ expect(filter(input).to_s).to eq(input)
end
end
diff --git a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
index e2615ea5069..9d8cd729958 100644
--- a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
+++ b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
@@ -5,8 +5,7 @@ require 'spec_helper'
describe Banzai::Filter::InlineMetricsRedactorFilter do
include FilterSpecHelper
- set(:project) { create(:project) }
-
+ let_it_be(:project) { create(:project) }
let(:url) { urls.metrics_dashboard_project_environment_url(project, 1, embedded: true) }
let(:input) { %(<a href="#{url}">example</a>) }
let(:doc) { filter(input) }
@@ -18,41 +17,16 @@ describe Banzai::Filter::InlineMetricsRedactorFilter do
end
context 'with a metrics charts placeholder' do
- shared_examples_for 'a supported metrics dashboard url' do
- context 'no user is logged in' do
- it 'redacts the placeholder' do
- expect(doc.to_s).to be_empty
- end
- end
-
- context 'the user does not have permission do see charts' do
- let(:doc) { filter(input, current_user: build(:user)) }
-
- it 'redacts the placeholder' do
- expect(doc.to_s).to be_empty
- end
- end
-
- context 'the user has requisite permissions' do
- let(:user) { create(:user) }
- let(:doc) { filter(input, current_user: user) }
-
- it 'leaves the placeholder' do
- project.add_maintainer(user)
-
- expect(doc.to_s).to eq input
- end
- end
- end
-
let(:input) { %(<div class="js-render-metrics" data-dashboard-url="#{url}"></div>) }
- it_behaves_like 'a supported metrics dashboard url'
+ it_behaves_like 'redacts the embed placeholder'
+ it_behaves_like 'retains the embed placeholder when applicable'
context 'for a grafana dashboard' do
let(:url) { urls.project_grafana_api_metrics_dashboard_url(project, embedded: true) }
- it_behaves_like 'a supported metrics dashboard url'
+ it_behaves_like 'redacts the embed placeholder'
+ it_behaves_like 'retains the embed placeholder when applicable'
end
context 'the user has requisite permissions' do
diff --git a/spec/lib/banzai/filter/label_reference_filter_spec.rb b/spec/lib/banzai/filter/label_reference_filter_spec.rb
index 82df5064896..5a672de13d7 100644
--- a/spec/lib/banzai/filter/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/label_reference_filter_spec.rb
@@ -28,7 +28,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Label #{reference}")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-label has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-label has-tooltip gl-link gl-label-link'
end
it 'includes a data-project attribute' do
@@ -66,12 +66,12 @@ describe Banzai::Filter::LabelReferenceFilter do
describe 'label span element' do
it 'includes default classes' do
doc = reference_filter("Label #{reference}")
- expect(doc.css('a span').first.attr('class')).to eq 'badge color-label has-tooltip'
+ expect(doc.css('a span').first.attr('class')).to include 'gl-label-text'
end
it 'includes a style attribute' do
doc = reference_filter("Label #{reference}")
- expect(doc.css('a span').first.attr('style')).to match(/\Abackground-color: #\h{6}; color: #\h{6}\z/)
+ expect(doc.css('a span').first.attr('style')).to match(/\Abackground-color: #\h{6}\z/)
end
end
@@ -85,7 +85,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+><span.+>#{label.name}</span></a>\.\)))
+ expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{label.name}</span></a></span>\.\)))
end
it 'ignores invalid label IDs' do
@@ -109,7 +109,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}).")
- expect(doc.to_html).to match(%r(\(<a.+><span.+>#{label.name}</span></a>\)\.))
+ expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{label.name}</span></a></span>\)\.))
end
it 'ignores invalid label names' do
@@ -133,7 +133,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}).")
- expect(doc.to_html).to match(%r(\(<a.+><span.+>#{label.name}</span></a>\)\.))
+ expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{label.name}</span></a></span>\)\.))
end
it 'ignores invalid label names' do
@@ -158,7 +158,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'does not include trailing punctuation', :aggregate_failures do
['.', ', ok?', '...', '?', '!', ': is that ok?'].each do |trailing_punctuation|
doc = filter("Label #{reference}#{trailing_punctuation}")
- expect(doc.to_html).to match(%r(<a.+><span.+>\?g\.fm&amp;</span></a>#{Regexp.escape(trailing_punctuation)}))
+ expect(doc.to_html).to match(%r(<span.+><a.+><span.+>\?g\.fm&amp;</span></a></span>#{Regexp.escape(trailing_punctuation)}))
end
end
@@ -184,7 +184,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+><span.+>#{label.name}</span></a>\.\)))
+ expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{label.name}</span></a></span>\.\)))
end
it 'ignores invalid label names' do
@@ -208,7 +208,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+><span.+>#{label.name}</span></a>\.\)))
+ expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{label.name}</span></a></span>\.\)))
end
it 'ignores invalid label names' do
@@ -232,7 +232,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+><span.+>g\.fm &amp; references\?</span></a>\.\)))
+ expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>g\.fm &amp; references\?</span></a></span>\.\)))
end
it 'ignores invalid label names' do
@@ -320,7 +320,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+>Label</a>\.\)))
+ expect(doc.to_html).to match(%r(\(<span.+><a.+>Label</a></span>\.\)))
end
it 'includes a data-project attribute' do
@@ -358,7 +358,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+><span.+>#{group_label.name}</span></a>\.\)))
+ expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{group_label.name}</span></a></span>\.\)))
end
it 'ignores invalid label names' do
@@ -381,7 +381,7 @@ describe Banzai::Filter::LabelReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+><span.+>#{group_label.name}</span></a>\.\)))
+ expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{group_label.name}</span></a></span>\.\)))
end
it 'ignores invalid label names' do
diff --git a/spec/lib/banzai/filter/repository_link_filter_spec.rb b/spec/lib/banzai/filter/repository_link_filter_spec.rb
index cf73c77ecb8..460c76acd78 100644
--- a/spec/lib/banzai/filter/repository_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/repository_link_filter_spec.rb
@@ -145,10 +145,38 @@ describe Banzai::Filter::RepositoryLinkFilter do
it 'ignores ref if commit is passed' do
doc = filter(link('non/existent.file'), commit: project.commit('empty-branch') )
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/#{ref}/non/existent.file" # non-existent files have no leading blob/raw/tree
+ .to eq "/#{project_path}/-/blob/#{ref}/non/existent.file"
end
shared_examples :valid_repository do
+ it 'handles Gitaly unavailable exceptions gracefully' do
+ allow_next_instance_of(Gitlab::GitalyClient::BlobService) do |blob_service|
+ allow(blob_service).to receive(:get_blob_types).and_raise(GRPC::Unavailable)
+ end
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(GRPC::Unavailable), project_id: project.id
+ )
+ doc = ""
+ expect { doc = filter(link('doc/api/README.md')) }.not_to raise_error
+ expect(doc.at_css('a')['href'])
+ .to eq "/#{project_path}/-/blob/#{ref}/doc/api/README.md"
+ end
+
+ it 'handles Gitaly timeout exceptions gracefully' do
+ allow_next_instance_of(Gitlab::GitalyClient::BlobService) do |blob_service|
+ allow(blob_service).to receive(:get_blob_types).and_raise(GRPC::DeadlineExceeded)
+ end
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(GRPC::DeadlineExceeded), project_id: project.id
+ )
+ doc = ""
+ expect { doc = filter(link('doc/api/README.md')) }.not_to raise_error
+ expect(doc.at_css('a')['href'])
+ .to eq "/#{project_path}/-/blob/#{ref}/doc/api/README.md"
+ end
+
it 'rebuilds absolute URL for a file in the repo' do
doc = filter(link('/doc/api/README.md'))
expect(doc.at_css('a')['href'])
@@ -173,6 +201,12 @@ describe Banzai::Filter::RepositoryLinkFilter do
.to eq "/#{project_path}/-/blob/#{ref}/doc/api/README.md"
end
+ it 'rebuilds relative URL for a missing file in the repo' do
+ doc = filter(link('missing-file'))
+ expect(doc.at_css('a')['href'])
+ .to eq "/#{project_path}/-/blob/#{ref}/missing-file"
+ end
+
it 'rebuilds relative URL for a file in the repo with leading ./' do
doc = filter(link('./doc/api/README.md'))
expect(doc.at_css('a')['href'])
diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
index 47ea273ef3a..448422b15e3 100644
--- a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
@@ -121,7 +121,7 @@ describe Banzai::Pipeline::GfmPipeline do
end
describe 'emoji in references' do
- set(:project) { create(:project, :public) }
+ let_it_be(:project) { create(:project, :public) }
let(:emoji) { '💯' }
it 'renders a label reference with emoji inside' do
diff --git a/spec/lib/event_filter_spec.rb b/spec/lib/event_filter_spec.rb
index 21b8f726425..e35698f6030 100644
--- a/spec/lib/event_filter_spec.rb
+++ b/spec/lib/event_filter_spec.rb
@@ -18,17 +18,16 @@ describe EventFilter do
end
describe '#apply_filter' do
- set(:public_project) { create(:project, :public) }
-
- set(:push_event) { create(:push_event, project: public_project) }
- set(:merged_event) { create(:event, :merged, project: public_project, target: public_project) }
- set(:created_event) { create(:event, :created, project: public_project, target: create(:issue, project: public_project)) }
- set(:updated_event) { create(:event, :updated, project: public_project, target: create(:issue, project: public_project)) }
- set(:closed_event) { create(:event, :closed, project: public_project, target: create(:issue, project: public_project)) }
- set(:reopened_event) { create(:event, :reopened, project: public_project, target: create(:issue, project: public_project)) }
- set(:comments_event) { create(:event, :commented, project: public_project, target: public_project) }
- set(:joined_event) { create(:event, :joined, project: public_project, target: public_project) }
- set(:left_event) { create(:event, :left, project: public_project, target: public_project) }
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:push_event) { create(:push_event, project: public_project) }
+ let_it_be(:merged_event) { create(:event, :merged, project: public_project, target: public_project) }
+ let_it_be(:created_event) { create(:event, :created, project: public_project, target: create(:issue, project: public_project)) }
+ let_it_be(:updated_event) { create(:event, :updated, project: public_project, target: create(:issue, project: public_project)) }
+ let_it_be(:closed_event) { create(:event, :closed, project: public_project, target: create(:issue, project: public_project)) }
+ let_it_be(:reopened_event) { create(:event, :reopened, project: public_project, target: create(:issue, project: public_project)) }
+ let_it_be(:comments_event) { create(:event, :commented, project: public_project, target: public_project) }
+ let_it_be(:joined_event) { create(:event, :joined, project: public_project, target: public_project) }
+ let_it_be(:left_event) { create(:event, :left, project: public_project, target: public_project) }
let(:filtered_events) { described_class.new(filter).apply_filter(Event.all) }
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 2525dd17b89..2890b8d4f3b 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -42,7 +42,7 @@ describe Feature do
.once
.and_call_original
- expect(Gitlab::ThreadMemoryCache.cache_backend)
+ expect(Gitlab::ProcessMemoryCache.cache_backend)
.to receive(:fetch)
.once
.with('flipper:persisted_names', expires_in: 1.minute)
@@ -146,7 +146,15 @@ describe Feature do
expect(described_class.enabled?(:enabled_feature_flag)).to be_truthy
end
- it { expect(described_class.l1_cache_backend).to eq(Gitlab::ThreadMemoryCache.cache_backend) }
+ context 'with USE_THREAD_MEMORY_CACHE defined' do
+ before do
+ stub_env('USE_THREAD_MEMORY_CACHE', '1')
+ end
+
+ it { expect(described_class.l1_cache_backend).to eq(Gitlab::ThreadMemoryCache.cache_backend) }
+ end
+
+ it { expect(described_class.l1_cache_backend).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
it { expect(described_class.l2_cache_backend).to eq(Rails.cache) }
it 'caches the status in L1 and L2 caches',
diff --git a/spec/lib/gitlab/access/branch_protection_spec.rb b/spec/lib/gitlab/access/branch_protection_spec.rb
index 7f2979e8e28..e4b763357c4 100644
--- a/spec/lib/gitlab/access/branch_protection_spec.rb
+++ b/spec/lib/gitlab/access/branch_protection_spec.rb
@@ -51,4 +51,21 @@ describe Gitlab::Access::BranchProtection do
end
end
end
+
+ describe '#fully_protected?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:level, :result) do
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
+ Gitlab::Access::PROTECTION_FULL | true
+ end
+
+ with_them do
+ it do
+ expect(described_class.new(level).fully_protected?).to eq(result)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/alerting/alert_spec.rb b/spec/lib/gitlab/alerting/alert_spec.rb
index 90e93d189e2..790edbc9c60 100644
--- a/spec/lib/gitlab/alerting/alert_spec.rb
+++ b/spec/lib/gitlab/alerting/alert_spec.rb
@@ -17,6 +17,12 @@ describe Gitlab::Alerting::Alert do
end
end
+ shared_context 'full query' do
+ before do
+ payload['generatorURL'] = 'http://localhost:9090/graph?g0.expr=vector%281%29'
+ end
+ end
+
shared_examples 'invalid alert' do
it 'is invalid' do
expect(alert).not_to be_valid
@@ -180,10 +186,7 @@ describe Gitlab::Alerting::Alert do
context 'with gitlab alert' do
include_context 'gitlab alert'
-
- before do
- payload['generatorURL'] = 'http://localhost:9090/graph?g0.expr=vector%281%29'
- end
+ include_context 'full query'
it { is_expected.to eq(gitlab_alert.full_query) }
end
diff --git a/spec/lib/gitlab/auth/current_user_mode_spec.rb b/spec/lib/gitlab/auth/current_user_mode_spec.rb
index 7c2fdac6c25..2b910fac155 100644
--- a/spec/lib/gitlab/auth/current_user_mode_spec.rb
+++ b/spec/lib/gitlab/auth/current_user_mode_spec.rb
@@ -3,294 +3,330 @@
require 'spec_helper'
describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode, :request_store do
- include_context 'custom session'
-
let(:user) { build_stubbed(:user) }
subject { described_class.new(user) }
- before do
- allow(ActiveSession).to receive(:list_sessions).with(user).and_return([session])
- end
-
- shared_examples 'admin mode cannot be enabled' do
- it 'is false by default' do
- expect(subject.admin_mode?).to be(false)
- end
-
- it 'cannot be enabled with a valid password' do
- subject.enable_admin_mode!(password: user.password)
-
- expect(subject.admin_mode?).to be(false)
- end
-
- it 'cannot be enabled with an invalid password' do
- subject.enable_admin_mode!(password: nil)
-
- expect(subject.admin_mode?).to be(false)
- end
-
- it 'cannot be enabled with empty params' do
- subject.enable_admin_mode!
+ context 'when session is available' do
+ include_context 'custom session'
- expect(subject.admin_mode?).to be(false)
+ before do
+ allow(ActiveSession).to receive(:list_sessions).with(user).and_return([session])
end
- it 'disable has no effect' do
- subject.enable_admin_mode!
- subject.disable_admin_mode!
-
- expect(subject.admin_mode?).to be(false)
- end
+ shared_examples 'admin mode cannot be enabled' do
+ it 'is false by default' do
+ expect(subject.admin_mode?).to be(false)
+ end
- context 'skipping password validation' do
it 'cannot be enabled with a valid password' do
- subject.enable_admin_mode!(password: user.password, skip_password_validation: true)
+ subject.enable_admin_mode!(password: user.password)
expect(subject.admin_mode?).to be(false)
end
it 'cannot be enabled with an invalid password' do
- subject.enable_admin_mode!(skip_password_validation: true)
+ subject.enable_admin_mode!(password: nil)
expect(subject.admin_mode?).to be(false)
end
- end
- end
- describe '#admin_mode?' do
- context 'when the user is a regular user' do
- it_behaves_like 'admin mode cannot be enabled'
+ it 'cannot be enabled with empty params' do
+ subject.enable_admin_mode!
- context 'bypassing session' do
- it_behaves_like 'admin mode cannot be enabled' do
- around do |example|
- described_class.bypass_session!(user.id) { example.run }
- end
- end
+ expect(subject.admin_mode?).to be(false)
end
- end
-
- context 'when the user is an admin' do
- let(:user) { build_stubbed(:user, :admin) }
- context 'when admin mode not requested' do
- it 'is false by default' do
- expect(subject.admin_mode?).to be(false)
- end
-
- it 'raises exception if we try to enable it' do
- expect do
- subject.enable_admin_mode!(password: user.password)
- end.to raise_error(::Gitlab::Auth::CurrentUserMode::NotRequestedError)
+ it 'disable has no effect' do
+ subject.enable_admin_mode!
+ subject.disable_admin_mode!
- expect(subject.admin_mode?).to be(false)
- end
+ expect(subject.admin_mode?).to be(false)
end
- context 'when admin mode requested first' do
- before do
- subject.request_admin_mode!
- end
+ context 'skipping password validation' do
+ it 'cannot be enabled with a valid password' do
+ subject.enable_admin_mode!(password: user.password, skip_password_validation: true)
- it 'is false by default' do
expect(subject.admin_mode?).to be(false)
end
it 'cannot be enabled with an invalid password' do
- subject.enable_admin_mode!(password: nil)
+ subject.enable_admin_mode!(skip_password_validation: true)
expect(subject.admin_mode?).to be(false)
end
+ end
+ end
- it 'can be enabled with a valid password' do
- subject.enable_admin_mode!(password: user.password)
+ describe '#admin_mode?' do
+ context 'when the user is a regular user' do
+ it_behaves_like 'admin mode cannot be enabled'
- expect(subject.admin_mode?).to be(true)
+ context 'bypassing session' do
+ it_behaves_like 'admin mode cannot be enabled' do
+ around do |example|
+ described_class.bypass_session!(user.id) { example.run }
+ end
+ end
end
+ end
- it 'can be disabled' do
- subject.enable_admin_mode!(password: user.password)
- subject.disable_admin_mode!
+ context 'when the user is an admin' do
+ let(:user) { build_stubbed(:user, :admin) }
- expect(subject.admin_mode?).to be(false)
+ context 'when admin mode not requested' do
+ it 'is false by default' do
+ expect(subject.admin_mode?).to be(false)
+ end
+
+ it 'raises exception if we try to enable it' do
+ expect do
+ subject.enable_admin_mode!(password: user.password)
+ end.to raise_error(::Gitlab::Auth::CurrentUserMode::NotRequestedError)
+
+ expect(subject.admin_mode?).to be(false)
+ end
end
- it 'will expire in the future' do
- subject.enable_admin_mode!(password: user.password)
- expect(subject.admin_mode?).to be(true), 'admin mode is not active in the present'
+ context 'when admin mode requested first' do
+ before do
+ subject.request_admin_mode!
+ end
- Timecop.freeze(Gitlab::Auth::CurrentUserMode::MAX_ADMIN_MODE_TIME.from_now) do
- # in the future this will be a new request, simulate by clearing the RequestStore
- Gitlab::SafeRequestStore.clear!
+ it 'is false by default' do
+ expect(subject.admin_mode?).to be(false)
+ end
+
+ it 'cannot be enabled with an invalid password' do
+ subject.enable_admin_mode!(password: nil)
- expect(subject.admin_mode?).to be(false), 'admin mode did not expire in the future'
+ expect(subject.admin_mode?).to be(false)
end
- end
- context 'skipping password validation' do
it 'can be enabled with a valid password' do
- subject.enable_admin_mode!(password: user.password, skip_password_validation: true)
+ subject.enable_admin_mode!(password: user.password)
expect(subject.admin_mode?).to be(true)
end
- it 'can be enabled with an invalid password' do
- subject.enable_admin_mode!(skip_password_validation: true)
+ it 'can be disabled' do
+ subject.enable_admin_mode!(password: user.password)
+ subject.disable_admin_mode!
- expect(subject.admin_mode?).to be(true)
+ expect(subject.admin_mode?).to be(false)
end
- end
- context 'with two independent sessions' do
- let(:another_session) { {} }
- let(:another_subject) { described_class.new(user) }
+ it 'will expire in the future' do
+ subject.enable_admin_mode!(password: user.password)
+ expect(subject.admin_mode?).to be(true), 'admin mode is not active in the present'
- before do
- allow(ActiveSession).to receive(:list_sessions).with(user).and_return([session, another_session])
+ Timecop.freeze(Gitlab::Auth::CurrentUserMode::MAX_ADMIN_MODE_TIME.from_now) do
+ # in the future this will be a new request, simulate by clearing the RequestStore
+ Gitlab::SafeRequestStore.clear!
+
+ expect(subject.admin_mode?).to be(false), 'admin mode did not expire in the future'
+ end
end
- it 'can be enabled in one and seen in the other' do
- Gitlab::Session.with_session(another_session) do
- another_subject.request_admin_mode!
- another_subject.enable_admin_mode!(password: user.password)
+ context 'skipping password validation' do
+ it 'can be enabled with a valid password' do
+ subject.enable_admin_mode!(password: user.password, skip_password_validation: true)
+
+ expect(subject.admin_mode?).to be(true)
end
- expect(subject.admin_mode?).to be(true)
+ it 'can be enabled with an invalid password' do
+ subject.enable_admin_mode!(skip_password_validation: true)
+
+ expect(subject.admin_mode?).to be(true)
+ end
end
- end
- end
- context 'bypassing session' do
- it 'is active by default' do
- described_class.bypass_session!(user.id) do
- expect(subject.admin_mode?).to be(true)
+ context 'with two independent sessions' do
+ let(:another_session) { {} }
+ let(:another_subject) { described_class.new(user) }
+
+ before do
+ allow(ActiveSession).to receive(:list_sessions).with(user).and_return([session, another_session])
+ end
+
+ it 'can be enabled in one and seen in the other' do
+ Gitlab::Session.with_session(another_session) do
+ another_subject.request_admin_mode!
+ another_subject.enable_admin_mode!(password: user.password)
+ end
+
+ expect(subject.admin_mode?).to be(true)
+ end
end
end
- it 'enable has no effect' do
- described_class.bypass_session!(user.id) do
- subject.request_admin_mode!
- subject.enable_admin_mode!(password: user.password)
+ context 'bypassing session' do
+ it 'is active by default' do
+ described_class.bypass_session!(user.id) do
+ expect(subject.admin_mode?).to be(true)
+ end
+ end
- expect(subject.admin_mode?).to be(true)
+ it 'enable has no effect' do
+ described_class.bypass_session!(user.id) do
+ subject.request_admin_mode!
+ subject.enable_admin_mode!(password: user.password)
+
+ expect(subject.admin_mode?).to be(true)
+ end
end
- end
- it 'disable has no effect' do
- described_class.bypass_session!(user.id) do
- subject.disable_admin_mode!
+ it 'disable has no effect' do
+ described_class.bypass_session!(user.id) do
+ subject.disable_admin_mode!
- expect(subject.admin_mode?).to be(true)
+ expect(subject.admin_mode?).to be(true)
+ end
end
end
end
end
- end
- describe '#enable_admin_mode!' do
- let(:user) { build_stubbed(:user, :admin) }
+ describe '#enable_admin_mode!' do
+ let(:user) { build_stubbed(:user, :admin) }
- it 'creates a timestamp in the session' do
- subject.request_admin_mode!
- subject.enable_admin_mode!(password: user.password)
+ it 'creates a timestamp in the session' do
+ subject.request_admin_mode!
+ subject.enable_admin_mode!(password: user.password)
- expect(session).to include(expected_session_entry(be_within(1.second).of Time.now))
+ expect(session).to include(expected_session_entry(be_within(1.second).of Time.now))
+ end
end
- end
- describe '#enable_sessionless_admin_mode!' do
- let(:user) { build_stubbed(:user, :admin) }
+ describe '#disable_admin_mode!' do
+ let(:user) { build_stubbed(:user, :admin) }
- it 'enabled admin mode without password' do
- subject.enable_sessionless_admin_mode!
+ it 'sets the session timestamp to nil' do
+ subject.request_admin_mode!
+ subject.disable_admin_mode!
- expect(subject.admin_mode?).to be(true)
+ expect(session).to include(expected_session_entry(be_nil))
+ end
end
- end
- describe '#disable_admin_mode!' do
- let(:user) { build_stubbed(:user, :admin) }
+ describe '.with_current_request_admin_mode' do
+ context 'with a regular user' do
+ it 'user is not available inside nor outside the yielded block' do
+ described_class.with_current_admin(user) do
+ expect(described_class.current_admin).to be_nil
+ end
- it 'sets the session timestamp to nil' do
- subject.request_admin_mode!
- subject.disable_admin_mode!
+ expect(described_class.bypass_session_admin_id).to be_nil
+ end
+ end
- expect(session).to include(expected_session_entry(be_nil))
- end
- end
+ context 'with an admin user' do
+ let(:user) { build_stubbed(:user, :admin) }
- describe '.bypass_session!' do
- context 'with a regular user' do
- it 'admin mode is false' do
- described_class.bypass_session!(user.id) do
- expect(subject.admin_mode?).to be(false)
- expect(described_class.bypass_session_admin_id).to be(user.id)
+ context 'admin mode is disabled' do
+ it 'user is not available inside nor outside the yielded block' do
+ described_class.with_current_admin(user) do
+ expect(described_class.current_admin).to be_nil
+ end
+
+ expect(described_class.bypass_session_admin_id).to be_nil
+ end
end
- expect(described_class.bypass_session_admin_id).to be_nil
- end
- end
+ context 'admin mode is enabled' do
+ before do
+ subject.request_admin_mode!
+ subject.enable_admin_mode!(password: user.password)
+ end
- context 'with an admin user' do
- let(:user) { build_stubbed(:user, :admin) }
+ it 'user is available only inside the yielded block' do
+ described_class.with_current_admin(user) do
+ expect(described_class.current_admin).to be(user)
+ end
- it 'admin mode is true' do
- described_class.bypass_session!(user.id) do
- expect(subject.admin_mode?).to be(true)
- expect(described_class.bypass_session_admin_id).to be(user.id)
+ expect(described_class.current_admin).to be_nil
+ end
end
-
- expect(described_class.bypass_session_admin_id).to be_nil
end
end
- end
- describe '.with_current_request_admin_mode' do
- context 'with a regular user' do
- it 'user is not available inside nor outside the yielded block' do
- described_class.with_current_admin(user) do
- expect(described_class.current_admin).to be_nil
- end
+ def expected_session_entry(value_matcher)
+ {
+ Gitlab::Auth::CurrentUserMode::SESSION_STORE_KEY => a_hash_including(
+ Gitlab::Auth::CurrentUserMode::ADMIN_MODE_START_TIME_KEY => value_matcher)
+ }
+ end
+ end
- expect(described_class.bypass_session_admin_id).to be_nil
+ context 'when no session available' do
+ around do |example|
+ Gitlab::Session.with_session(nil) do
+ example.run
end
end
- context 'with an admin user' do
- let(:user) { build_stubbed(:user, :admin) }
+ describe '.bypass_session!' do
+ context 'when providing a block' do
+ context 'with a regular user' do
+ it 'admin mode is false' do
+ described_class.bypass_session!(user.id) do
+ expect(Gitlab::Session.current).to be_nil
+ expect(subject.admin_mode?).to be(false)
+ expect(described_class.bypass_session_admin_id).to be(user.id)
+ end
- context 'admin mode is disabled' do
- it 'user is not available inside nor outside the yielded block' do
- described_class.with_current_admin(user) do
- expect(described_class.current_admin).to be_nil
+ expect(described_class.bypass_session_admin_id).to be_nil
end
+ end
- expect(described_class.bypass_session_admin_id).to be_nil
+ context 'with an admin user' do
+ let(:user) { build_stubbed(:user, :admin) }
+
+ it 'admin mode is true' do
+ described_class.bypass_session!(user.id) do
+ expect(Gitlab::Session.current).to be_nil
+ expect(subject.admin_mode?).to be(true)
+ expect(described_class.bypass_session_admin_id).to be(user.id)
+ end
+
+ expect(described_class.bypass_session_admin_id).to be_nil
+ end
end
end
- context 'admin mode is enabled' do
- before do
- subject.request_admin_mode!
- subject.enable_admin_mode!(password: user.password)
- end
+ context 'when not providing a block' do
+ context 'with a regular user' do
+ it 'admin mode is false' do
+ described_class.bypass_session!(user.id)
- it 'user is available only inside the yielded block' do
- described_class.with_current_admin(user) do
- expect(described_class.current_admin).to be(user)
+ expect(Gitlab::Session.current).to be_nil
+ expect(subject.admin_mode?).to be(false)
+ expect(described_class.bypass_session_admin_id).to be(user.id)
+
+ described_class.reset_bypass_session!
+
+ expect(described_class.bypass_session_admin_id).to be_nil
end
+ end
- expect(described_class.current_admin).to be_nil
+ context 'with an admin user' do
+ let(:user) { build_stubbed(:user, :admin) }
+
+ it 'admin mode is true' do
+ described_class.bypass_session!(user.id)
+
+ expect(Gitlab::Session.current).to be_nil
+ expect(subject.admin_mode?).to be(true)
+ expect(described_class.bypass_session_admin_id).to be(user.id)
+
+ described_class.reset_bypass_session!
+
+ expect(described_class.bypass_session_admin_id).to be_nil
+ end
end
end
end
end
-
- def expected_session_entry(value_matcher)
- {
- Gitlab::Auth::CurrentUserMode::SESSION_STORE_KEY => a_hash_including(
- Gitlab::Auth::CurrentUserMode::ADMIN_MODE_START_TIME_KEY => value_matcher)
- }
- end
end
diff --git a/spec/lib/gitlab/auth/key_status_checker_spec.rb b/spec/lib/gitlab/auth/key_status_checker_spec.rb
new file mode 100644
index 00000000000..b1a540eae81
--- /dev/null
+++ b/spec/lib/gitlab/auth/key_status_checker_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Auth::KeyStatusChecker do
+ let_it_be(:never_expires_key) { build(:personal_key, expires_at: nil) }
+ let_it_be(:expired_key) { build(:personal_key, expires_at: 3.days.ago) }
+ let_it_be(:expiring_soon_key) { build(:personal_key, expires_at: 3.days.from_now) }
+ let_it_be(:expires_in_future_key) { build(:personal_key, expires_at: 14.days.from_now) }
+
+ let(:key_status_checker) { described_class.new(key) }
+
+ describe '#show_console_message?' do
+ subject { key_status_checker.show_console_message? }
+
+ context 'for an expired key' do
+ let(:key) { expired_key }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'for a key expiring in the next 7 days' do
+ let(:key) { expiring_soon_key }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'for a key expiring after the next 7 days' do
+ let(:key) { expires_in_future_key }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'for a key that never expires' do
+ let(:key) { never_expires_key }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#console_message' do
+ subject { key_status_checker.console_message }
+
+ context 'for an expired key' do
+ let(:key) { expired_key }
+
+ it { is_expected.to eq('INFO: Your SSH key has expired. Please generate a new key.') }
+ end
+
+ context 'for a key expiring in the next 7 days' do
+ let(:key) { expiring_soon_key }
+
+ it { is_expected.to eq('INFO: Your SSH key is expiring soon. Please generate a new key.') }
+ end
+
+ context 'for a key expiring after the next 7 days' do
+ let(:key) { expires_in_future_key }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'for a key that never expires' do
+ let(:key) { never_expires_key }
+
+ it { is_expected.to be_nil }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth/ldap/access_spec.rb b/spec/lib/gitlab/auth/ldap/access_spec.rb
index f9eb4a30190..2f691429541 100644
--- a/spec/lib/gitlab/auth/ldap/access_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/access_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Auth::LDAP::Access do
+describe Gitlab::Auth::Ldap::Access do
include LdapHelpers
let(:user) { create(:omniauth_user) }
@@ -64,7 +64,7 @@ describe Gitlab::Auth::LDAP::Access do
context 'and the user is disabled via active directory' do
before do
- allow(Gitlab::Auth::LDAP::Person).to receive(:disabled_via_active_directory?).and_return(true)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:disabled_via_active_directory?).and_return(true)
end
it 'returns false' do
@@ -90,7 +90,7 @@ describe Gitlab::Auth::LDAP::Access do
context 'and has no disabled flag in active directory' do
before do
- allow(Gitlab::Auth::LDAP::Person).to receive(:disabled_via_active_directory?).and_return(false)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:disabled_via_active_directory?).and_return(false)
end
it { is_expected.to be_truthy }
@@ -135,8 +135,8 @@ describe Gitlab::Auth::LDAP::Access do
context 'without ActiveDirectory enabled' do
before do
- allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
- allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
+ allow(Gitlab::Auth::Ldap::Config).to receive(:enabled?).and_return(true)
+ allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive(:active_directory).and_return(false)
end
end
diff --git a/spec/lib/gitlab/auth/ldap/adapter_spec.rb b/spec/lib/gitlab/auth/ldap/adapter_spec.rb
index 54486913b72..34853acdd0f 100644
--- a/spec/lib/gitlab/auth/ldap/adapter_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/adapter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Auth::LDAP::Adapter do
+describe Gitlab::Auth::Ldap::Adapter do
include LdapHelpers
let(:ldap) { double(:ldap) }
@@ -138,7 +138,7 @@ describe Gitlab::Auth::LDAP::Adapter do
it 'as many times as MAX_SEARCH_RETRIES' do
expect(ldap).to receive(:search).exactly(3).times
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::LDAPConnectionError)
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
end
context 'when no more retries' do
@@ -147,11 +147,11 @@ describe Gitlab::Auth::LDAP::Adapter do
end
it 'raises the exception' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::LDAPConnectionError)
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
end
it 'logs the error' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::LDAPConnectionError)
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
expect(Rails.logger).to have_received(:warn).with(
"LDAP search raised exception Net::LDAP::Error: some error")
end
@@ -161,6 +161,6 @@ describe Gitlab::Auth::LDAP::Adapter do
end
def ldap_attributes
- Gitlab::Auth::LDAP::Person.ldap_attributes(Gitlab::Auth::LDAP::Config.new('ldapmain'))
+ Gitlab::Auth::Ldap::Person.ldap_attributes(Gitlab::Auth::Ldap::Config.new('ldapmain'))
end
end
diff --git a/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb b/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
index f1050b9f830..7bc92d0abea 100644
--- a/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Auth::LDAP::AuthHash do
+describe Gitlab::Auth::Ldap::AuthHash do
include LdapHelpers
let(:auth_hash) do
@@ -58,7 +58,7 @@ describe Gitlab::Auth::LDAP::AuthHash do
end
before do
- allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
+ allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive(:attributes).and_return(attributes)
end
end
diff --git a/spec/lib/gitlab/auth/ldap/authentication_spec.rb b/spec/lib/gitlab/auth/ldap/authentication_spec.rb
index ebaf8383ce5..1f8b1474539 100644
--- a/spec/lib/gitlab/auth/ldap/authentication_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/authentication_spec.rb
@@ -2,15 +2,15 @@
require 'spec_helper'
-describe Gitlab::Auth::LDAP::Authentication do
+describe Gitlab::Auth::Ldap::Authentication do
let(:dn) { 'uid=John Smith, ou=People, dc=example, dc=com' }
- let(:user) { create(:omniauth_user, extern_uid: Gitlab::Auth::LDAP::Person.normalize_dn(dn)) }
+ let(:user) { create(:omniauth_user, extern_uid: Gitlab::Auth::Ldap::Person.normalize_dn(dn)) }
let(:login) { 'john' }
let(:password) { 'password' }
describe 'login' do
before do
- allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
+ allow(Gitlab::Auth::Ldap::Config).to receive(:enabled?).and_return(true)
end
it "finds the user if authentication is successful" do
@@ -48,7 +48,7 @@ describe Gitlab::Auth::LDAP::Authentication do
end
it "fails if ldap is disabled" do
- allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(false)
+ allow(Gitlab::Auth::Ldap::Config).to receive(:enabled?).and_return(false)
expect(described_class.login(login, password)).to be_falsey
end
diff --git a/spec/lib/gitlab/auth/ldap/config_spec.rb b/spec/lib/gitlab/auth/ldap/config_spec.rb
index e4a90d4018d..0967c45d36b 100644
--- a/spec/lib/gitlab/auth/ldap/config_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/config_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Auth::LDAP::Config do
+describe Gitlab::Auth::Ldap::Config do
include LdapHelpers
let(:config) { described_class.new('ldapmain') }
diff --git a/spec/lib/gitlab/auth/ldap/dn_spec.rb b/spec/lib/gitlab/auth/ldap/dn_spec.rb
index 63656efba29..7aaffa52ae4 100644
--- a/spec/lib/gitlab/auth/ldap/dn_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/dn_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Auth::LDAP::DN do
+describe Gitlab::Auth::Ldap::DN do
using RSpec::Parameterized::TableSyntax
describe '#normalize_value' do
@@ -15,7 +15,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'John Smith,' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'DN string ended unexpectedly')
end
end
@@ -23,7 +23,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { '#aa aa' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the end of an attribute value, but got \"a\"")
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, "Expected the end of an attribute value, but got \"a\"")
end
end
@@ -31,7 +31,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { '#aaXaaa' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the first character of a hex pair, but got \"X\"")
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, "Expected the first character of a hex pair, but got \"X\"")
end
end
@@ -39,7 +39,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { '#aaaYaa' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the second character of a hex pair, but got \"Y\"")
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, "Expected the second character of a hex pair, but got \"Y\"")
end
end
@@ -47,7 +47,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { '"Sebasti\\cX\\a1n"' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"X\"")
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"X\"")
end
end
@@ -55,7 +55,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { '"James' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'DN string ended unexpectedly')
end
end
@@ -63,7 +63,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'J\ames' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Invalid escaped hex code "\am"')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'Invalid escaped hex code "\am"')
end
end
@@ -71,7 +71,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'foo\\' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'DN string ended unexpectedly')
end
end
end
@@ -88,7 +88,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'uid=john smith+telephonenumber=+1 555-555-5555,ou=people,dc=example,dc=com' }
it 'raises UnsupportedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::UnsupportedError)
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::UnsupportedError)
end
end
@@ -97,7 +97,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'uid = John Smith + telephoneNumber = + 1 555-555-5555 , ou = People,dc=example,dc=com' }
it 'raises UnsupportedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::UnsupportedError)
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::UnsupportedError)
end
end
@@ -105,7 +105,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'uid = John Smith + telephoneNumber = +1 555-555-5555 , ou = People,dc=example,dc=com' }
it 'raises UnsupportedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::UnsupportedError)
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::UnsupportedError)
end
end
end
@@ -117,7 +117,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'uid=John Smith,' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'DN string ended unexpectedly')
end
end
@@ -125,7 +125,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { '0.9.2342.19200300.100.1.25=#aa aa' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the end of an attribute value, but got \"a\"")
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, "Expected the end of an attribute value, but got \"a\"")
end
end
@@ -133,7 +133,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { '0.9.2342.19200300.100.1.25=#aaXaaa' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the first character of a hex pair, but got \"X\"")
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, "Expected the first character of a hex pair, but got \"X\"")
end
end
@@ -141,7 +141,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { '0.9.2342.19200300.100.1.25=#aaaYaa' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the second character of a hex pair, but got \"Y\"")
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, "Expected the second character of a hex pair, but got \"Y\"")
end
end
@@ -149,7 +149,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'uid="Sebasti\\cX\\a1n"' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"X\"")
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"X\"")
end
end
@@ -157,7 +157,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'John' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'DN string ended unexpectedly')
end
end
@@ -165,7 +165,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'cn="James' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'DN string ended unexpectedly')
end
end
@@ -173,7 +173,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'cn=J\ames' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Invalid escaped hex code "\am"')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'Invalid escaped hex code "\am"')
end
end
@@ -181,7 +181,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'cn=\\' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'DN string ended unexpectedly')
end
end
@@ -189,7 +189,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { '1.2.d=Value' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Unrecognized RDN OID attribute type name character "d"')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'Unrecognized RDN OID attribute type name character "d"')
end
end
@@ -197,7 +197,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'd1.2=Value' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Unrecognized RDN attribute type name character "."')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'Unrecognized RDN attribute type name character "."')
end
end
@@ -205,7 +205,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { ' -uid=John Smith' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Unrecognized first character of an RDN attribute type name "-"')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'Unrecognized first character of an RDN attribute type name "-"')
end
end
@@ -213,7 +213,7 @@ describe Gitlab::Auth::LDAP::DN do
let(:given) { 'uid\\=john' }
it 'raises MalformedError' do
- expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Unrecognized RDN attribute type name character "\\"')
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::DN::MalformedError, 'Unrecognized RDN attribute type name character "\\"')
end
end
end
diff --git a/spec/lib/gitlab/auth/ldap/person_spec.rb b/spec/lib/gitlab/auth/ldap/person_spec.rb
index 985732e69f9..e90917cfce1 100644
--- a/spec/lib/gitlab/auth/ldap/person_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/person_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Auth::LDAP::Person do
+describe Gitlab::Auth::Ldap::Person do
include LdapHelpers
let(:entry) { ldap_user_entry('john.doe') }
@@ -61,7 +61,7 @@ describe Gitlab::Auth::LDAP::Person do
}
}
)
- config = Gitlab::Auth::LDAP::Config.new('ldapmain')
+ config = Gitlab::Auth::Ldap::Config.new('ldapmain')
ldap_attributes = described_class.ldap_attributes(config)
expect(ldap_attributes).to match_array(%w(dn uid cn mail memberof))
diff --git a/spec/lib/gitlab/auth/ldap/user_spec.rb b/spec/lib/gitlab/auth/ldap/user_spec.rb
index 071d687b2bf..867633e54df 100644
--- a/spec/lib/gitlab/auth/ldap/user_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/user_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Auth::LDAP::User do
+describe Gitlab::Auth::Ldap::User do
include LdapHelpers
let(:ldap_user) { described_class.new(auth_hash) }
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index 022a544395e..62b83ff8b88 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -22,7 +22,7 @@ describe Gitlab::Auth::OAuth::User do
}
}
end
- let(:ldap_user) { Gitlab::Auth::LDAP::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
+ let(:ldap_user) { Gitlab::Auth::Ldap::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
describe '#persisted?' do
let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') }
@@ -230,7 +230,7 @@ describe Gitlab::Auth::OAuth::User do
context "and no account for the LDAP user" do
before do
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user)
oauth_user.save
end
@@ -269,7 +269,7 @@ describe Gitlab::Auth::OAuth::User do
let!(:existing_user) { create(:omniauth_user, name: 'John Doe', email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') }
it "adds the omniauth identity to the LDAP account" do
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user)
oauth_user.save
@@ -290,8 +290,8 @@ describe Gitlab::Auth::OAuth::User do
context 'when an LDAP person is not found by uid' do
it 'tries to find an LDAP person by email and adds the omniauth identity to the user' do
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(nil)
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_email).and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(nil)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_email).and_return(ldap_user)
oauth_user.save
@@ -301,9 +301,9 @@ describe Gitlab::Auth::OAuth::User do
context 'when also not found by email' do
it 'tries to find an LDAP person by DN and adds the omniauth identity to the user' do
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(nil)
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_email).and_return(nil)
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(nil)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_email).and_return(nil)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_dn).and_return(ldap_user)
oauth_user.save
@@ -344,7 +344,7 @@ describe Gitlab::Auth::OAuth::User do
context 'and no account for the LDAP user' do
it 'creates a user favoring the LDAP username and strips email domain' do
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user)
oauth_user.save
@@ -356,7 +356,7 @@ describe Gitlab::Auth::OAuth::User do
context "and no corresponding LDAP person" do
before do
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(nil)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(nil)
end
include_examples "to verify compliance with allow_single_sign_on"
@@ -405,13 +405,13 @@ describe Gitlab::Auth::OAuth::User do
allow(ldap_user).to receive(:username) { uid }
allow(ldap_user).to receive(:email) { ['johndoe@example.com', 'john2@example.com'] }
allow(ldap_user).to receive(:dn) { dn }
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user)
end
context "and no account for the LDAP user" do
context 'dont block on create (LDAP)' do
before do
- allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
+ allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
@@ -425,7 +425,7 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do
before do
- allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
+ allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
@@ -443,7 +443,7 @@ describe Gitlab::Auth::OAuth::User do
context 'dont block on create (LDAP)' do
before do
- allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
+ allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
@@ -457,7 +457,7 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do
before do
- allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
+ allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
@@ -503,7 +503,7 @@ describe Gitlab::Auth::OAuth::User do
context 'dont block on create (LDAP)' do
before do
- allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
+ allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
@@ -517,7 +517,7 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do
before do
- allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
+ allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
@@ -806,7 +806,7 @@ describe Gitlab::Auth::OAuth::User do
end
it 'returns nil' do
- adapter = Gitlab::Auth::LDAP::Adapter.new('ldapmain')
+ adapter = Gitlab::Auth::Ldap::Adapter.new('ldapmain')
hash = OmniAuth::AuthHash.new(uid: 'whatever', provider: 'ldapmain')
expect(oauth_user.send(:find_ldap_person, hash, adapter)).to be_nil
diff --git a/spec/lib/gitlab/auth/saml/user_spec.rb b/spec/lib/gitlab/auth/saml/user_spec.rb
index 4c400636ddf..55d2f22b923 100644
--- a/spec/lib/gitlab/auth/saml/user_spec.rb
+++ b/spec/lib/gitlab/auth/saml/user_spec.rb
@@ -19,7 +19,7 @@ describe Gitlab::Auth::Saml::User do
email: 'john@mail.com'
}
end
- let(:ldap_user) { Gitlab::Auth::LDAP::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
+ let(:ldap_user) { Gitlab::Auth::Ldap::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
describe '#save' do
before do
@@ -161,10 +161,10 @@ describe Gitlab::Auth::Saml::User do
allow(ldap_user).to receive(:username) { uid }
allow(ldap_user).to receive(:email) { %w(john@mail.com john2@example.com) }
allow(ldap_user).to receive(:dn) { dn }
- allow(Gitlab::Auth::LDAP::Adapter).to receive(:new).and_return(adapter)
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).with(uid, adapter).and_return(ldap_user)
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).with(dn, adapter).and_return(ldap_user)
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_email).with('john@mail.com', adapter).and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Adapter).to receive(:new).and_return(adapter)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).with(uid, adapter).and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_dn).with(dn, adapter).and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_email).with('john@mail.com', adapter).and_return(ldap_user)
end
context 'and no account for the LDAP user' do
@@ -212,10 +212,10 @@ describe Gitlab::Auth::Saml::User do
nil_types = uid_types - [uid_type]
nil_types.each do |type|
- allow(Gitlab::Auth::LDAP::Person).to receive(:"find_by_#{type}").and_return(nil)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:"find_by_#{type}").and_return(nil)
end
- allow(Gitlab::Auth::LDAP::Person).to receive(:"find_by_#{uid_type}").and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:"find_by_#{uid_type}").and_return(ldap_user)
end
it 'adds the omniauth identity to the LDAP account' do
@@ -282,7 +282,7 @@ describe Gitlab::Auth::Saml::User do
it 'adds the LDAP identity to the existing SAML user' do
create(:omniauth_user, email: 'john@mail.com', extern_uid: dn, provider: 'saml', username: 'john')
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).with(dn, adapter).and_return(ldap_user)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).with(dn, adapter).and_return(ldap_user)
local_hash = OmniAuth::AuthHash.new(uid: dn, provider: provider, info: info_hash)
local_saml_user = described_class.new(local_hash)
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index ed763f63756..e0c1f830165 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -3,10 +3,9 @@
require 'spec_helper'
describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
+ let_it_be(:project) { create(:project) }
let(:gl_auth) { described_class }
- set(:project) { create(:project) }
-
describe 'constants' do
it 'API_SCOPES contains all scopes for API access' do
expect(subject::API_SCOPES).to eq %i[api read_user]
@@ -591,23 +590,23 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context "with ldap enabled" do
before do
- allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
+ allow(Gitlab::Auth::Ldap::Config).to receive(:enabled?).and_return(true)
end
it "tries to autheticate with db before ldap" do
- expect(Gitlab::Auth::LDAP::Authentication).not_to receive(:login)
+ expect(Gitlab::Auth::Ldap::Authentication).not_to receive(:login)
expect(gl_auth.find_with_user_password(username, password)).to eq(user)
end
it "does not find user by using ldap as fallback to for authentication" do
- expect(Gitlab::Auth::LDAP::Authentication).to receive(:login).and_return(nil)
+ expect(Gitlab::Auth::Ldap::Authentication).to receive(:login).and_return(nil)
expect(gl_auth.find_with_user_password('ldap_user', 'password')).to be_nil
end
it "find new user by using ldap as fallback to for authentication" do
- expect(Gitlab::Auth::LDAP::Authentication).to receive(:login).and_return(user)
+ expect(Gitlab::Auth::Ldap::Authentication).to receive(:login).and_return(user)
expect(gl_auth.find_with_user_password('ldap_user', 'password')).to eq(user)
end
@@ -624,7 +623,7 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context "with ldap enabled" do
before do
- allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
+ allow(Gitlab::Auth::Ldap::Config).to receive(:enabled?).and_return(true)
end
it "does not find non-ldap user by valid login/password" do
diff --git a/spec/lib/gitlab/authorized_keys_spec.rb b/spec/lib/gitlab/authorized_keys_spec.rb
index adf36cf1050..d89eb9ef114 100644
--- a/spec/lib/gitlab/authorized_keys_spec.rb
+++ b/spec/lib/gitlab/authorized_keys_spec.rb
@@ -162,10 +162,10 @@ describe Gitlab::AuthorizedKeys do
end
end
- describe '#rm_key' do
+ describe '#remove_key' do
let(:key) { 'key-741' }
- subject { authorized_keys.rm_key(key) }
+ subject { authorized_keys.remove_key(key) }
context 'authorized_keys file exists' do
let(:other_line) { "command=\"#{Gitlab.config.gitlab_shell.path}/bin/gitlab-shell key-742\",options ssh-rsa AAAAB3NzaDAxx2E" }
diff --git a/spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb b/spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb
index e299e2a366f..b77c67b120f 100644
--- a/spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb
+++ b/spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::AddMergeRequestDiffCommitsCount, :migration, schema: 20180105212544 do
+describe Gitlab::BackgroundMigration::AddMergeRequestDiffCommitsCount, schema: 20180105212544 do
let(:projects_table) { table(:projects) }
let(:merge_requests_table) { table(:merge_requests) }
let(:merge_request_diffs_table) { table(:merge_request_diffs) }
diff --git a/spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb b/spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb
index 2a7cffb2f3e..5c8dcb38511 100644
--- a/spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb
+++ b/spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::ArchiveLegacyTraces, :migration, schema: 20180529152628 do
+describe Gitlab::BackgroundMigration::ArchiveLegacyTraces, schema: 20180529152628 do
include TraceHelpers
let(:namespaces) { table(:namespaces) }
diff --git a/spec/lib/gitlab/background_migration/backfill_hashed_project_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_hashed_project_repositories_spec.rb
index e802613490b..f64c3ccc058 100644
--- a/spec/lib/gitlab/background_migration/backfill_hashed_project_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_hashed_project_repositories_spec.rb
@@ -2,6 +2,6 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::BackfillHashedProjectRepositories, :migration, schema: 20181130102132 do
+describe Gitlab::BackgroundMigration::BackfillHashedProjectRepositories, schema: 20181130102132 do
it_behaves_like 'backfill migration for project repositories', :hashed
end
diff --git a/spec/lib/gitlab/background_migration/backfill_legacy_project_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_legacy_project_repositories_spec.rb
index 947c99b860f..806d044ab40 100644
--- a/spec/lib/gitlab/background_migration/backfill_legacy_project_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_legacy_project_repositories_spec.rb
@@ -2,6 +2,6 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::BackfillLegacyProjectRepositories, :migration, schema: 20181212171634 do
+describe Gitlab::BackgroundMigration::BackfillLegacyProjectRepositories, schema: 20181212171634 do
it_behaves_like 'backfill migration for project repositories', :legacy
end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb
index 4714712f733..91ede05f395 100644
--- a/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::BackfillProjectFullpathInRepoConfig, :migration, schema: 20181010133639 do
+describe Gitlab::BackgroundMigration::BackfillProjectFullpathInRepoConfig, schema: 20181010133639 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:group) { namespaces.create!(name: 'foo', path: 'foo') }
diff --git a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
index 718109bb720..a2b4e003d82 100644
--- a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::BackfillProjectSettings, :migration, schema: 20200114113341 do
+describe Gitlab::BackgroundMigration::BackfillProjectSettings, schema: 20200114113341 do
let(:projects) { table(:projects) }
let(:project_settings) { table(:project_settings) }
let(:namespace) { table(:namespaces).create(name: 'user', path: 'user') }
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
new file mode 100644
index 00000000000..08d3b7bec6a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -0,0 +1,154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2020_02_26_162723 do
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:users) { table(:users) }
+ let(:snippets) { table(:snippets) }
+ let(:snippet_repositories) { table(:snippet_repositories) }
+
+ let(:user) { users.create(id: 1, email: 'user@example.com', projects_limit: 10, username: 'test', name: 'Test') }
+ let!(:snippet_with_repo) { snippets.create(id: 1, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
+ let!(:snippet_with_empty_repo) { snippets.create(id: 2, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
+ let!(:snippet_without_repo) { snippets.create(id: 3, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
+
+ let(:file_name) { 'file_name.rb' }
+ let(:content) { 'content' }
+ let(:ids) { snippets.pluck('MIN(id)', 'MAX(id)').first }
+ let(:service) { described_class.new }
+
+ subject { service.perform(*ids) }
+
+ before do
+ allow(snippet_with_repo).to receive(:disk_path).and_return(disk_path(snippet_with_repo))
+
+ TestEnv.copy_repo(snippet_with_repo,
+ bare_repo: TestEnv.factory_repo_path_bare,
+ refs: TestEnv::BRANCH_SHA)
+
+ raw_repository(snippet_with_empty_repo).create_repository
+ end
+
+ after do
+ raw_repository(snippet_with_repo).remove
+ raw_repository(snippet_without_repo).remove
+ raw_repository(snippet_with_empty_repo).remove
+ end
+
+ describe '#perform' do
+ it 'logs successful migrated snippets' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:info).exactly(3).times
+ end
+
+ subject
+ end
+
+ context 'when snippet has a non empty repository' do
+ it 'does not perform any action' do
+ expect(service).not_to receive(:create_repository_and_files).with(snippet_with_repo)
+
+ subject
+ end
+ end
+
+ shared_examples 'commits the file to the repository' do
+ it do
+ subject
+
+ blob = blob_at(snippet, file_name)
+
+ aggregate_failures do
+ expect(blob).to be
+ expect(blob.data).to eq content
+ end
+ end
+ end
+
+ context 'when snippet has an empty repo' do
+ before do
+ expect(repository_exists?(snippet_with_empty_repo)).to be_truthy
+ end
+
+ it_behaves_like 'commits the file to the repository' do
+ let(:snippet) { snippet_with_empty_repo }
+ end
+ end
+
+ context 'when snippet does not have a repository' do
+ it 'creates the repository' do
+ expect { subject }.to change { repository_exists?(snippet_without_repo) }.from(false).to(true)
+ end
+
+ it_behaves_like 'commits the file to the repository' do
+ let(:snippet) { snippet_without_repo }
+ end
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(service).to receive(:create_commit).and_raise(StandardError)
+ end
+
+ it 'logs errors' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:error).exactly(3).times
+ end
+
+ subject
+ end
+
+ it "retries #{described_class::MAX_RETRIES} times the operation if it fails" do
+ expect(service).to receive(:create_commit).exactly(snippets.count * described_class::MAX_RETRIES).times
+
+ subject
+ end
+
+ it 'destroys the snippet repository' do
+ expect(service).to receive(:destroy_snippet_repository).exactly(3).times.and_call_original
+
+ subject
+
+ expect(snippet_repositories.count).to eq 0
+ end
+
+ it 'deletes the repository on disk' do
+ subject
+
+ aggregate_failures do
+ expect(repository_exists?(snippet_with_repo)).to be_falsey
+ expect(repository_exists?(snippet_without_repo)).to be_falsey
+ expect(repository_exists?(snippet_with_empty_repo)).to be_falsey
+ end
+ end
+ end
+ end
+
+ def blob_at(snippet, path)
+ raw_repository(snippet).blob_at('master', path)
+ end
+
+ def repository_exists?(snippet)
+ gitlab_shell.repository_exists?('default', "#{disk_path(snippet)}.git")
+ end
+
+ def raw_repository(snippet)
+ Gitlab::Git::Repository.new('default',
+ "#{disk_path(snippet)}.git",
+ Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet),
+ "@snippets/#{snippet.id}")
+ end
+
+ def hashed_repository(snippet)
+ Storage::Hashed.new(snippet, prefix: '@snippets')
+ end
+
+ def disk_path(snippet)
+ hashed_repository(snippet).disk_path
+ end
+
+ def ls_files(snippet)
+ raw_repository(snippet).ls_files(nil)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/digest_column_spec.rb b/spec/lib/gitlab/background_migration/digest_column_spec.rb
index a25dcb06005..0c76ebe9c66 100644
--- a/spec/lib/gitlab/background_migration/digest_column_spec.rb
+++ b/spec/lib/gitlab/background_migration/digest_column_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::DigestColumn, :migration, schema: 20180913142237 do
+describe Gitlab::BackgroundMigration::DigestColumn, schema: 20180913142237 do
let(:personal_access_tokens) { table(:personal_access_tokens) }
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/encrypt_columns_spec.rb b/spec/lib/gitlab/background_migration/encrypt_columns_spec.rb
index 3c2ed6d3a6d..6d3ccde7df2 100644
--- a/spec/lib/gitlab/background_migration/encrypt_columns_spec.rb
+++ b/spec/lib/gitlab/background_migration/encrypt_columns_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::EncryptColumns, :migration, schema: 20180910115836 do
+describe Gitlab::BackgroundMigration::EncryptColumns, schema: 20180910115836 do
let(:model) { Gitlab::BackgroundMigration::Models::EncryptColumns::WebHook }
let(:web_hooks) { table(:web_hooks) }
diff --git a/spec/lib/gitlab/background_migration/encrypt_runners_tokens_spec.rb b/spec/lib/gitlab/background_migration/encrypt_runners_tokens_spec.rb
index 54af9807e7b..89262788d9b 100644
--- a/spec/lib/gitlab/background_migration/encrypt_runners_tokens_spec.rb
+++ b/spec/lib/gitlab/background_migration/encrypt_runners_tokens_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::EncryptRunnersTokens, :migration, schema: 20181121111200 do
+describe Gitlab::BackgroundMigration::EncryptRunnersTokens, schema: 20181121111200 do
let(:settings) { table(:application_settings) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb b/spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb
index f3127cbf5df..cc4ce023f04 100644
--- a/spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::FixCrossProjectLabelLinks, :migration, schema: 20180702120647 do
+describe Gitlab::BackgroundMigration::FixCrossProjectLabelLinks, schema: 20180702120647 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:issues_table) { table(:issues) }
diff --git a/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb b/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb
index 0dca542cb9f..056ddd7adf9 100644
--- a/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::FixProjectsWithoutProjectFeature, :migration, schema: 2020_01_27_111840 do
+describe Gitlab::BackgroundMigration::FixProjectsWithoutProjectFeature, schema: 2020_01_27_111840 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:project_features) { table(:project_features) }
diff --git a/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb b/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb
index 73c855ac184..141a0af6c29 100644
--- a/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::FixPromotedEpicsDiscussionIds, :migration, schema: 20190715193142 do
+describe Gitlab::BackgroundMigration::FixPromotedEpicsDiscussionIds, schema: 20190715193142 do
let(:namespaces) { table(:namespaces) }
let(:users) { table(:users) }
let(:epics) { table(:epics) }
diff --git a/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb b/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb
index 5938ecca459..52760cdd115 100644
--- a/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::FixUserNamespaceNames, :migration, schema: 20190620112608 do
+describe Gitlab::BackgroundMigration::FixUserNamespaceNames, schema: 20190620112608 do
let(:namespaces) { table(:namespaces) }
let(:users) { table(:users) }
let(:user) { users.create(name: "The user's full name", projects_limit: 10, username: 'not-null', email: '1') }
diff --git a/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb b/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb
index d1d6d8411d1..0fb7eea2bd7 100644
--- a/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::FixUserProjectRouteNames, :migration, schema: 20190620112608 do
+describe Gitlab::BackgroundMigration::FixUserProjectRouteNames, schema: 20190620112608 do
let(:namespaces) { table(:namespaces) }
let(:users) { table(:users) }
let(:routes) { table(:routes) }
diff --git a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
index f2de73d5aea..850ef48d44a 100644
--- a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
+++ b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
@@ -149,10 +149,12 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover do
context 'when an upload belongs to a legacy_diff_note' do
let!(:merge_request) { create(:merge_request, source_project: project) }
+
let!(:note) do
create(:legacy_diff_note_on_merge_request,
note: 'some note', project: project, noteable: merge_request)
end
+
let(:legacy_upload) do
create(:upload, :with_file, :attachment_upload,
path: "uploads/-/system/note/attachment/#{note.id}/#{filename}", model: note)
@@ -193,6 +195,17 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover do
it_behaves_like 'move error'
end
+ context 'when upload has mount_point nil' do
+ let(:legacy_upload) do
+ create(:upload, :with_file, :attachment_upload,
+ path: "uploads/-/system/note/attachment/#{note.id}/#{filename}", model: note, mount_point: nil)
+ end
+
+ it_behaves_like 'migrates the file correctly'
+ it_behaves_like 'legacy local file'
+ it_behaves_like 'legacy upload deletion'
+ end
+
context 'when the file can be handled correctly' do
it_behaves_like 'migrates the file correctly'
it_behaves_like 'legacy local file'
diff --git a/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb
new file mode 100644
index 00000000000..5700cac2e0f
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::LinkLfsObjectsProjects, :migration, schema: 2020_03_10_075115 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:fork_networks) { table(:fork_networks) }
+ let(:fork_network_members) { table(:fork_network_members) }
+ let(:lfs_objects) { table(:lfs_objects) }
+ let(:lfs_objects_projects) { table(:lfs_objects_projects) }
+
+ let(:namespace) { namespaces.create(name: 'GitLab', path: 'gitlab') }
+
+ let(:fork_network) { fork_networks.create(root_project_id: source_project.id) }
+ let(:another_fork_network) { fork_networks.create(root_project_id: another_source_project.id) }
+
+ let(:source_project) { projects.create(namespace_id: namespace.id) }
+ let(:another_source_project) { projects.create(namespace_id: namespace.id) }
+ let(:project) { projects.create(namespace_id: namespace.id) }
+ let(:another_project) { projects.create(namespace_id: namespace.id) }
+ let(:partially_linked_project) { projects.create(namespace_id: namespace.id) }
+ let(:fully_linked_project) { projects.create(namespace_id: namespace.id) }
+
+ let(:lfs_object) { lfs_objects.create(oid: 'abc123', size: 100) }
+ let(:another_lfs_object) { lfs_objects.create(oid: 'def456', size: 200) }
+
+ let!(:source_project_lop_1) do
+ lfs_objects_projects.create(
+ lfs_object_id: lfs_object.id,
+ project_id: source_project.id
+ )
+ end
+
+ let!(:source_project_lop_2) do
+ lfs_objects_projects.create(
+ lfs_object_id: another_lfs_object.id,
+ project_id: source_project.id
+ )
+ end
+
+ let!(:another_source_project_lop_1) do
+ lfs_objects_projects.create(
+ lfs_object_id: lfs_object.id,
+ project_id: another_source_project.id
+ )
+ end
+
+ let!(:another_source_project_lop_2) do
+ lfs_objects_projects.create(
+ lfs_object_id: another_lfs_object.id,
+ project_id: another_source_project.id
+ )
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ # Create links between projects
+ fork_network_members.create(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil)
+
+ [project, partially_linked_project, fully_linked_project].each do |p|
+ fork_network_members.create(
+ fork_network_id: fork_network.id,
+ project_id: p.id,
+ forked_from_project_id: fork_network.root_project_id
+ )
+ end
+
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil)
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_project.id, forked_from_project_id: another_fork_network.root_project_id)
+
+ # Links LFS objects to some projects
+ lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: fully_linked_project.id)
+ lfs_objects_projects.create(lfs_object_id: another_lfs_object.id, project_id: fully_linked_project.id)
+ lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: partially_linked_project.id)
+ end
+
+ context 'when there are LFS objects to be linked' do
+ it 'creates LfsObjectsProject records for forks based on the specified range of LfsObjectProject id' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
+ expect(logger).to receive(:info).exactly(4).times
+ end
+
+ expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }.to change { lfs_objects_projects.count }.by(5)
+
+ expect(lfs_object_ids_for(project)).to match_array(lfs_object_ids_for(source_project))
+ expect(lfs_object_ids_for(another_project)).to match_array(lfs_object_ids_for(another_source_project))
+ expect(lfs_object_ids_for(partially_linked_project)).to match_array(lfs_object_ids_for(source_project))
+
+ expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }.not_to change { lfs_objects_projects.count }
+ end
+ end
+
+ context 'when there are no LFS objects to be linked' do
+ before do
+ # Links LFS objects to all projects
+ projects.all.each do |p|
+ lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: p.id)
+ lfs_objects_projects.create(lfs_object_id: another_lfs_object.id, project_id: p.id)
+ end
+ end
+
+ it 'does not create LfsObjectProject records' do
+ expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }
+ .not_to change { lfs_objects_projects.count }
+ end
+ end
+
+ def lfs_object_ids_for(project)
+ lfs_objects_projects.where(project_id: project.id).pluck(:lfs_object_id)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb
index a496f8416bf..adf358f5320 100644
--- a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::MigrateBuildStage, :migration, schema: 20180212101928 do
+describe Gitlab::BackgroundMigration::MigrateBuildStage, schema: 20180212101928 do
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
let(:stages) { table(:ci_stages) }
diff --git a/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb b/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb
index 3ccb2379936..79a8cd926a7 100644
--- a/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::MigrateFingerprintSha256WithinKeys, :migration, schema: 20200106071113 do
+describe Gitlab::BackgroundMigration::MigrateFingerprintSha256WithinKeys, schema: 20200106071113 do
subject(:fingerprint_migrator) { described_class.new }
let(:key_table) { table(:keys) }
diff --git a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
index 4c70877befe..7dae28f72a5 100644
--- a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, :migration, schema: 20200130145430 do
+describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema: 20200130145430 do
let(:services) { table(:services) }
# we need to define the classes due to encryption
diff --git a/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb b/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
index 268626d58fd..5f2a27acd9b 100644
--- a/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::MigrateLegacyArtifacts, :migration, schema: 20180816161409 do
+describe Gitlab::BackgroundMigration::MigrateLegacyArtifacts, schema: 20180816161409 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
diff --git a/spec/lib/gitlab/background_migration/migrate_null_private_profile_to_false_spec.rb b/spec/lib/gitlab/background_migration/migrate_null_private_profile_to_false_spec.rb
index c45c64f6a23..ff88d2a5d00 100644
--- a/spec/lib/gitlab/background_migration/migrate_null_private_profile_to_false_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_null_private_profile_to_false_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::MigrateNullPrivateProfileToFalse, :migration, schema: 20190620105427 do
+describe Gitlab::BackgroundMigration::MigrateNullPrivateProfileToFalse, schema: 20190620105427 do
let(:users) { table(:users) }
it 'correctly migrates nil private_profile to false' do
diff --git a/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb b/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
index d94a312f605..10a1d4ee1b9 100644
--- a/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::MigratePagesMetadata, :migration, schema: 20190919040324 do
+describe Gitlab::BackgroundMigration::MigratePagesMetadata, schema: 20190919040324 do
let(:projects) { table(:projects) }
subject(:migrate_pages_metadata) { described_class.new }
diff --git a/spec/lib/gitlab/background_migration/migrate_stage_index_spec.rb b/spec/lib/gitlab/background_migration/migrate_stage_index_spec.rb
index 1a8b0355fd9..437be125cf0 100644
--- a/spec/lib/gitlab/background_migration/migrate_stage_index_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_stage_index_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::MigrateStageIndex, :migration, schema: 20180420080616 do
+describe Gitlab::BackgroundMigration::MigrateStageIndex, schema: 20180420080616 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
diff --git a/spec/lib/gitlab/background_migration/populate_cluster_kubernetes_namespace_table_spec.rb b/spec/lib/gitlab/background_migration/populate_cluster_kubernetes_namespace_table_spec.rb
index 128e118ac17..d445858b8e8 100644
--- a/spec/lib/gitlab/background_migration/populate_cluster_kubernetes_namespace_table_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_cluster_kubernetes_namespace_table_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::PopulateClusterKubernetesNamespaceTable, :migration, schema: 20181022173835 do
+describe Gitlab::BackgroundMigration::PopulateClusterKubernetesNamespaceTable, schema: 20181022173835 do
include MigrationHelpers::ClusterHelpers
let(:migration) { described_class.new }
diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
index c1f6871a568..e65b3549de0 100644
--- a/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::PopulateMergeRequestAssigneesTable, :migration, schema: 20190315191339 do
+describe Gitlab::BackgroundMigration::PopulateMergeRequestAssigneesTable, schema: 20190315191339 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb b/spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb
index ea1eaa6417d..0250ebd7759 100644
--- a/spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work.
-describe Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::UntrackedFile, :migration, schema: 20180208183958 do
+describe Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::UntrackedFile, schema: 20180208183958 do
include MigrationsHelpers::TrackUntrackedUploadsHelpers
let!(:appearances) { table(:appearances) }
diff --git a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
index f25804c3ca2..44cec112bfd 100644
--- a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work.
-describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :migration, schema: 20180208183958 do
+describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, schema: 20180208183958 do
include MigrationsHelpers::TrackUntrackedUploadsHelpers
subject { described_class.new }
diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
index 9072431e8f6..2957d0bed15 100644
--- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
+++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work.
-describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :migration, schema: 20180208183958 do
+describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, schema: 20180208183958 do
include MigrationsHelpers::TrackUntrackedUploadsHelpers
let!(:untracked_files_for_uploads) { table(:untracked_files_for_uploads) }
diff --git a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb b/spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb
index 1ef2c451aa2..4699cc42b38 100644
--- a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb
+++ b/spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizations, :migration, schema: 20200204113223 do
+describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizations, schema: 20200204113223 do
let(:users_table) { table(:users) }
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
diff --git a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb b/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb
index 14ba57eecbf..edb46efad7c 100644
--- a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb
+++ b/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizationsWithMinMaxUserId, :migration, schema: 20200204113224 do
+describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizationsWithMinMaxUserId, schema: 20200204113224 do
let(:users_table) { table(:users) }
let(:min) { 1 }
let(:max) { 5 }
diff --git a/spec/lib/gitlab/background_migration/remove_restricted_todos_spec.rb b/spec/lib/gitlab/background_migration/remove_restricted_todos_spec.rb
index dae754112dc..3de24f577ab 100644
--- a/spec/lib/gitlab/background_migration/remove_restricted_todos_spec.rb
+++ b/spec/lib/gitlab/background_migration/remove_restricted_todos_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::RemoveRestrictedTodos, :migration, schema: 20180704204006 do
+describe Gitlab::BackgroundMigration::RemoveRestrictedTodos, schema: 20180704204006 do
let(:projects) { table(:projects) }
let(:users) { table(:users) }
let(:todos) { table(:todos) }
diff --git a/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb b/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb
index fd35ddc49a2..40340f89448 100644
--- a/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb
+++ b/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::ResetMergeStatus, :migration, schema: 20190528180441 do
+describe Gitlab::BackgroundMigration::ResetMergeStatus, schema: 20190528180441 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
diff --git a/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb b/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb
index cee9a3e8822..387e3343ede 100644
--- a/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb
+++ b/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190527194900_schedule_calculate_wiki_sizes.rb')
-describe ScheduleCalculateWikiSizes, :migration do
+describe ScheduleCalculateWikiSizes do
let(:migration_class) { Gitlab::BackgroundMigration::CalculateWikiSizes }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb b/spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb
index 3600755ada7..5ce4a322e51 100644
--- a/spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb
+++ b/spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnServices, :migration, schema: 20180122154930 do
+describe Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnServices, schema: 20180122154930 do
let(:services) { table(:services) }
describe '#perform' do
diff --git a/spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb b/spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb
index 5cd9c02fd3f..08f1f543f5d 100644
--- a/spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb
+++ b/spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnWebhooks, :migration, schema: 20180104131052 do
+describe Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnWebhooks, schema: 20180104131052 do
let(:web_hooks) { table(:web_hooks) }
describe '#perform' do
diff --git a/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb b/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
index 37280110b91..70397ae1e30 100644
--- a/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
+++ b/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::UpdateExistingSubgroupToMatchVisibilityLevelOfParent, :migration, schema: 2020_01_10_121314 do
+describe Gitlab::BackgroundMigration::UpdateExistingSubgroupToMatchVisibilityLevelOfParent, schema: 2020_01_10_121314 do
include MigrationHelpers::NamespacesHelpers
context 'private visibility level' do
diff --git a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
new file mode 100644
index 00000000000..a273dcf9e5c
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require './db/post_migrate/20200128134110_migrate_commit_notes_mentions_to_db'
+require './db/post_migrate/20200211155539_migrate_merge_request_mentions_to_db'
+
+describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, schema: 20200211155539 do
+ include MigrationsHelpers
+
+ context 'when migrating data' do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:notes) { table(:notes) }
+
+ let(:author) { users.create!(email: 'author@example.com', notification_email: 'author@example.com', name: 'author', username: 'author', projects_limit: 10, state: 'active') }
+ let(:member) { users.create!(email: 'member@example.com', notification_email: 'member@example.com', name: 'member', username: 'member', projects_limit: 10, state: 'active') }
+ let(:admin) { users.create!(email: 'administrator@example.com', notification_email: 'administrator@example.com', name: 'administrator', username: 'administrator', admin: 1, projects_limit: 10, state: 'active') }
+ let(:john_doe) { users.create!(email: 'john_doe@example.com', notification_email: 'john_doe@example.com', name: 'john_doe', username: 'john_doe', projects_limit: 10, state: 'active') }
+ let(:skipped) { users.create!(email: 'skipped@example.com', notification_email: 'skipped@example.com', name: 'skipped', username: 'skipped', projects_limit: 10, state: 'active') }
+
+ let(:mentioned_users) { [author, member, admin, john_doe, skipped] }
+ let(:mentioned_users_refs) { mentioned_users.map { |u| "@#{u.username}" }.join(' ') }
+
+ let(:group) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') }
+ let(:inaccessible_group) { namespaces.create!(name: 'test2', path: 'test2', runners_token: 'my-token2', project_creation_level: 1, visibility_level: 0, type: 'Group') }
+ let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
+
+ let(:mentioned_groups) { [group, inaccessible_group] }
+ let(:group_mentions) { [group, inaccessible_group].map { |gr| "@#{gr.path}" }.join(' ') }
+ let(:description_mentions) { "description with mentions #{mentioned_users_refs} and #{group_mentions}" }
+
+ before do
+ # build personal namespaces and routes for users
+ mentioned_users.each { |u| u.becomes(User).save! }
+
+ # build namespaces and routes for groups
+ mentioned_groups.each do |gr|
+ gr.name += '-org'
+ gr.path += '-org'
+ gr.becomes(Namespace).save!
+ end
+ end
+
+ context 'migrate merge request mentions' do
+ let(:merge_requests) { table(:merge_requests) }
+ let(:merge_request_user_mentions) { table(:merge_request_user_mentions) }
+
+ let!(:mr1) do
+ merge_requests.create!(
+ title: "title 1", state_id: 1, target_branch: 'feature1', source_branch: 'master',
+ source_project_id: project.id, target_project_id: project.id, author_id: author.id,
+ description: description_mentions
+ )
+ end
+
+ let!(:mr2) do
+ merge_requests.create!(
+ title: "title 2", state_id: 1, target_branch: 'feature2', source_branch: 'master',
+ source_project_id: project.id, target_project_id: project.id, author_id: author.id,
+ description: 'some description'
+ )
+ end
+
+ let!(:mr3) do
+ merge_requests.create!(
+ title: "title 3", state_id: 1, target_branch: 'feature3', source_branch: 'master',
+ source_project_id: project.id, target_project_id: project.id, author_id: author.id,
+ description: 'description with an email@example.com and some other @ char here.')
+ end
+
+ let(:user_mentions) { merge_request_user_mentions }
+ let(:resource) { merge_request }
+
+ it_behaves_like 'resource mentions migration', MigrateMergeRequestMentionsToDb, MergeRequest
+ end
+
+ context 'migrate commit mentions' do
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
+ let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
+ let(:commit_user_mentions) { table(:commit_user_mentions) }
+
+ let!(:note1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: description_mentions) }
+ let!(:note2) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'sample note') }
+ let!(:note3) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: description_mentions, system: true) }
+
+ # this not does not have actual mentions
+ let!(:note4) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'note for an email@somesite.com and some other random @ ref' ) }
+ # this should have pointed to an innexisted commit record in a commits table
+ # but because commit is not an AR we'll just make it so that it does not have mentions
+ let!(:note5) { notes.create!(commit_id: 'abc', noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'note for an email@somesite.com and some other random @ ref') }
+
+ let(:user_mentions) { commit_user_mentions }
+ let(:resource) { commit }
+
+ it_behaves_like 'resource notes mentions migration', MigrateCommitNotesMentionsToDb, Commit
+ end
+ end
+
+ context 'checks no_quote_columns' do
+ it 'has correct no_quote_columns' do
+ expect(Gitlab::BackgroundMigration::UserMentions::Models::MergeRequest.no_quote_columns).to match([:note_id, :merge_request_id])
+ end
+
+ it 'commit has correct no_quote_columns' do
+ expect(Gitlab::BackgroundMigration::UserMentions::Models::Commit.no_quote_columns).to match([:note_id])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bare_repository_import/repository_spec.rb b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
index 0607e2232a1..d2ecb1869fc 100644
--- a/spec/lib/gitlab/bare_repository_import/repository_spec.rb
+++ b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
@@ -54,16 +54,15 @@ describe ::Gitlab::BareRepositoryImport::Repository do
end
context 'hashed storage' do
- let(:gitlab_shell) { Gitlab::Shell.new }
- let(:repository_storage) { 'default' }
- let(:root_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path }
let(:hash) { '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
let(:hashed_path) { "@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b" }
+ let(:root_path) { TestEnv.repos_path }
let(:repo_path) { File.join(root_path, "#{hashed_path}.git") }
let(:wiki_path) { File.join(root_path, "#{hashed_path}.wiki.git") }
before do
- gitlab_shell.create_repository(repository_storage, hashed_path, 'group/project')
+ TestEnv.create_bare_repository(repo_path)
+
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
repository = Rugged::Repository.new(repo_path)
repository.config['gitlab.fullpath'] = 'to/repo'
@@ -71,7 +70,7 @@ describe ::Gitlab::BareRepositoryImport::Repository do
end
after do
- gitlab_shell.remove_repository(repository_storage, hashed_path)
+ FileUtils.rm_rf(repo_path)
end
subject { described_class.new(root_path, repo_path) }
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index b0d07c6e0b0..b95175efc0c 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -80,8 +80,7 @@ describe Gitlab::BitbucketImport::Importer do
end
let(:importer) { described_class.new(project) }
- let(:gitlab_shell) { double }
-
+ let(:sample) { RepoHelpers.sample_compare }
let(:issues_statuses_sample_data) do
{
count: sample_issues_statuses.count,
@@ -89,12 +88,6 @@ describe Gitlab::BitbucketImport::Importer do
}
end
- let(:sample) { RepoHelpers.sample_compare }
-
- before do
- allow(importer).to receive(:gitlab_shell) { gitlab_shell }
- end
-
subject { described_class.new(project) }
describe '#import_pull_requests' do
@@ -316,7 +309,7 @@ describe Gitlab::BitbucketImport::Importer do
describe 'wiki import' do
it 'is skipped when the wiki exists' do
expect(project.wiki).to receive(:repository_exists?) { true }
- expect(importer.gitlab_shell).not_to receive(:import_wiki_repository)
+ expect(project.wiki.repository).not_to receive(:import_repository)
importer.execute
@@ -325,7 +318,7 @@ describe Gitlab::BitbucketImport::Importer do
it 'imports to the project disk_path' do
expect(project.wiki).to receive(:repository_exists?) { false }
- expect(importer.gitlab_shell).to receive(:import_wiki_repository)
+ expect(project.wiki.repository).to receive(:import_repository)
importer.execute
diff --git a/spec/lib/gitlab/cache/import/caching_spec.rb b/spec/lib/gitlab/cache/import/caching_spec.rb
new file mode 100644
index 00000000000..e4aec0f4dec
--- /dev/null
+++ b/spec/lib/gitlab/cache/import/caching_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
+ describe '.read' do
+ it 'reads a value from the cache' do
+ described_class.write('foo', 'bar')
+
+ expect(described_class.read('foo')).to eq('bar')
+ end
+
+ it 'returns nil if the cache key does not exist' do
+ expect(described_class.read('foo')).to be_nil
+ end
+
+ it 'refreshes the cache key if a value is present' do
+ described_class.write('foo', 'bar')
+
+ redis = double(:redis)
+
+ expect(redis).to receive(:get).with(/foo/).and_return('bar')
+ expect(redis).to receive(:expire).with(/foo/, described_class::TIMEOUT)
+ expect(Gitlab::Redis::Cache).to receive(:with).twice.and_yield(redis)
+
+ described_class.read('foo')
+ end
+
+ it 'does not refresh the cache key if a value is empty' do
+ described_class.write('foo', nil)
+
+ redis = double(:redis)
+
+ expect(redis).to receive(:get).with(/foo/).and_return('')
+ expect(redis).not_to receive(:expire)
+ expect(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
+
+ described_class.read('foo')
+ end
+ end
+
+ describe '.read_integer' do
+ it 'returns an Integer' do
+ described_class.write('foo', '10')
+
+ expect(described_class.read_integer('foo')).to eq(10)
+ end
+
+ it 'returns nil if no value was found' do
+ expect(described_class.read_integer('foo')).to be_nil
+ end
+ end
+
+ describe '.write' do
+ it 'writes a value to the cache and returns the written value' do
+ expect(described_class.write('foo', 10)).to eq(10)
+ expect(described_class.read('foo')).to eq('10')
+ end
+ end
+
+ describe '.set_add' do
+ it 'adds a value to a set' do
+ described_class.set_add('foo', 10)
+ described_class.set_add('foo', 10)
+
+ key = described_class.cache_key_for('foo')
+ values = Gitlab::Redis::Cache.with { |r| r.smembers(key) }
+
+ expect(values).to eq(['10'])
+ end
+ end
+
+ describe '.set_includes?' do
+ it 'returns false when the key does not exist' do
+ expect(described_class.set_includes?('foo', 10)).to eq(false)
+ end
+
+ it 'returns false when the value is not present in the set' do
+ described_class.set_add('foo', 10)
+
+ expect(described_class.set_includes?('foo', 20)).to eq(false)
+ end
+
+ it 'returns true when the set includes the given value' do
+ described_class.set_add('foo', 10)
+
+ expect(described_class.set_includes?('foo', 10)).to eq(true)
+ end
+ end
+
+ describe '.write_multiple' do
+ it 'sets multiple keys' do
+ mapping = { 'foo' => 10, 'bar' => 20 }
+
+ described_class.write_multiple(mapping)
+
+ mapping.each do |key, value|
+ full_key = described_class.cache_key_for(key)
+ found = Gitlab::Redis::Cache.with { |r| r.get(full_key) }
+
+ expect(found).to eq(value.to_s)
+ end
+ end
+ end
+
+ describe '.expire' do
+ it 'sets the expiration time of a key' do
+ timeout = 1.hour.to_i
+
+ described_class.write('foo', 'bar', timeout: 2.hours.to_i)
+ described_class.expire('foo', timeout)
+
+ key = described_class.cache_key_for('foo')
+ found_ttl = Gitlab::Redis::Cache.with { |r| r.ttl(key) }
+
+ expect(found_ttl).to be <= timeout
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/branch_check_spec.rb b/spec/lib/gitlab/checks/branch_check_spec.rb
index 7cc1722dfd4..fd7eaa1603f 100644
--- a/spec/lib/gitlab/checks/branch_check_spec.rb
+++ b/spec/lib/gitlab/checks/branch_check_spec.rb
@@ -15,7 +15,7 @@ describe Gitlab::Checks::BranchCheck do
let(:ref) { 'refs/heads/master' }
it 'raises an error' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'The default branch of a project cannot be deleted.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'The default branch of a project cannot be deleted.')
end
end
@@ -28,7 +28,7 @@ describe Gitlab::Checks::BranchCheck do
it 'raises an error if the user is not allowed to do forced pushes to protected branches' do
expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true)
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to force push code to a protected branch on this project.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to force push code to a protected branch on this project.')
end
it 'raises an error if the user is not allowed to merge to protected branches' do
@@ -38,13 +38,13 @@ describe Gitlab::Checks::BranchCheck do
expect(user_access).to receive(:can_merge_to_branch?).and_return(false)
expect(user_access).to receive(:can_push_to_branch?).and_return(false)
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to merge code into protected branches on this project.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to merge code into protected branches on this project.')
end
it 'raises an error if the user is not allowed to push to protected branches' do
expect(user_access).to receive(:can_push_to_branch?).and_return(false)
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to protected branches on this project.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to push code to protected branches on this project.')
end
context 'when project repository is empty' do
@@ -58,7 +58,7 @@ describe Gitlab::Checks::BranchCheck do
end
it 'raises an error' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /Ask a project Owner or Maintainer to create a default branch/)
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /Ask a project Owner or Maintainer to create a default branch/)
end
end
@@ -109,7 +109,7 @@ describe Gitlab::Checks::BranchCheck do
end
it 'raises an error' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to create protected branches on this project.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to create protected branches on this project.')
end
end
@@ -135,7 +135,7 @@ describe Gitlab::Checks::BranchCheck do
end
it 'raises an error' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only use an existing protected branch ref as the basis of a new protected branch.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You can only use an existing protected branch ref as the basis of a new protected branch.')
end
end
@@ -157,7 +157,7 @@ describe Gitlab::Checks::BranchCheck do
context 'via SSH' do
it 'raises an error' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only create protected branches using the web interface and API.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You can only create protected branches using the web interface and API.')
end
end
end
@@ -171,7 +171,7 @@ describe Gitlab::Checks::BranchCheck do
context 'if the user is not allowed to delete protected branches' do
it 'raises an error' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to delete protected branches from this project. Only a project maintainer or owner can delete a protected branch.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to delete protected branches from this project. Only a project maintainer or owner can delete a protected branch.')
end
end
@@ -190,7 +190,7 @@ describe Gitlab::Checks::BranchCheck do
context 'over SSH or HTTP' do
it 'raises an error' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only delete protected branches using the web interface.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You can only delete protected branches using the web interface.')
end
end
end
diff --git a/spec/lib/gitlab/checks/diff_check_spec.rb b/spec/lib/gitlab/checks/diff_check_spec.rb
index b9134b8d6ab..467b4ed3a21 100644
--- a/spec/lib/gitlab/checks/diff_check_spec.rb
+++ b/spec/lib/gitlab/checks/diff_check_spec.rb
@@ -34,7 +34,7 @@ describe Gitlab::Checks::DiffCheck do
context 'when change is sent by a different user' do
it 'raises an error if the user is not allowed to update the file' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "The path 'README' is locked in Git LFS by #{lock.user.name}")
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "The path 'README' is locked in Git LFS by #{lock.user.name}")
end
end
diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb
index 9432be083d3..334dd8635a3 100644
--- a/spec/lib/gitlab/checks/force_push_spec.rb
+++ b/spec/lib/gitlab/checks/force_push_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Checks::ForcePush do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
describe '.force_push?' do
it 'returns false if the repo is empty' do
diff --git a/spec/lib/gitlab/checks/lfs_check_spec.rb b/spec/lib/gitlab/checks/lfs_check_spec.rb
index dad14e100a7..c86481d1abe 100644
--- a/spec/lib/gitlab/checks/lfs_check_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_check_spec.rb
@@ -50,7 +50,7 @@ describe Gitlab::Checks::LfsCheck do
end
it 'fails if any LFS blobs are missing' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /LFS objects are missing/)
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /LFS objects are missing/)
end
it 'succeeds if LFS objects have already been uploaded' do
diff --git a/spec/lib/gitlab/checks/project_created_spec.rb b/spec/lib/gitlab/checks/project_created_spec.rb
index 373fef2a240..bbc97155374 100644
--- a/spec/lib/gitlab/checks/project_created_spec.rb
+++ b/spec/lib/gitlab/checks/project_created_spec.rb
@@ -3,24 +3,29 @@
require 'spec_helper'
describe Gitlab::Checks::ProjectCreated, :clean_gitlab_redis_shared_state do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
+ let(:protocol) { 'http' }
+ let(:git_user) { user }
+ let(:repository) { project.repository }
+
+ subject { described_class.new(repository, git_user, 'http') }
describe '.fetch_message' do
context 'with a project created message queue' do
- let(:project_created) { described_class.new(project, user, 'http') }
-
before do
- project_created.add_message
+ subject.add_message
end
it 'returns project created message' do
- expect(described_class.fetch_message(user.id, project.id)).to eq(project_created.message)
+ expect(described_class.fetch_message(user.id, project.id)).to eq(subject.message)
end
it 'deletes the project created message from redis' do
expect(Gitlab::Redis::SharedState.with { |redis| redis.get("project_created:#{user.id}:#{project.id}") }).not_to be_nil
+
described_class.fetch_message(user.id, project.id)
+
expect(Gitlab::Redis::SharedState.with { |redis| redis.get("project_created:#{user.id}:#{project.id}") }).to be_nil
end
end
@@ -34,15 +39,15 @@ describe Gitlab::Checks::ProjectCreated, :clean_gitlab_redis_shared_state do
describe '#add_message' do
it 'queues a project created message' do
- project_created = described_class.new(project, user, 'http')
-
- expect(project_created.add_message).to eq('OK')
+ expect(subject.add_message).to eq('OK')
end
- it 'handles anonymous push' do
- project_created = described_class.new(nil, user, 'http')
+ context 'when user is nil' do
+ let(:git_user) { nil }
- expect(project_created.add_message).to be_nil
+ it 'handles anonymous push' do
+ expect(subject.add_message).to be_nil
+ end
end
end
end
diff --git a/spec/lib/gitlab/checks/project_moved_spec.rb b/spec/lib/gitlab/checks/project_moved_spec.rb
index 3de397760b4..1d1d6211088 100644
--- a/spec/lib/gitlab/checks/project_moved_spec.rb
+++ b/spec/lib/gitlab/checks/project_moved_spec.rb
@@ -3,24 +3,30 @@
require 'spec_helper'
describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
+ let(:repository) { project.repository }
+ let(:protocol) { 'http' }
+ let(:git_user) { user }
+ let(:redirect_path) { 'foo/bar' }
+
+ subject { described_class.new(repository, git_user, protocol, redirect_path) }
describe '.fetch_message' do
context 'with a redirect message queue' do
- it 'returns the redirect message' do
- project_moved = described_class.new(project, user, 'http', 'foo/bar')
- project_moved.add_message
+ before do
+ subject.add_message
+ end
- expect(described_class.fetch_message(user.id, project.id)).to eq(project_moved.message)
+ it 'returns the redirect message' do
+ expect(described_class.fetch_message(user.id, project.id)).to eq(subject.message)
end
it 'deletes the redirect message from redis' do
- project_moved = described_class.new(project, user, 'http', 'foo/bar')
- project_moved.add_message
-
expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).not_to be_nil
+
described_class.fetch_message(user.id, project.id)
+
expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).to be_nil
end
end
@@ -34,29 +40,82 @@ describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
describe '#add_message' do
it 'queues a redirect message' do
- project_moved = described_class.new(project, user, 'http', 'foo/bar')
- expect(project_moved.add_message).to eq("OK")
+ expect(subject.add_message).to eq("OK")
end
- it 'handles anonymous clones' do
- project_moved = described_class.new(project, nil, 'http', 'foo/bar')
+ context 'when user is nil' do
+ let(:git_user) { nil }
- expect(project_moved.add_message).to eq(nil)
+ it 'handles anonymous clones' do
+ expect(subject.add_message).to be_nil
+ end
end
end
describe '#message' do
- it 'returns a redirect message' do
- project_moved = described_class.new(project, user, 'http', 'foo/bar')
- message = <<~MSG
- Project 'foo/bar' was moved to '#{project.full_path}'.
+ shared_examples 'errors per protocol' do
+ shared_examples 'returns redirect message' do
+ it do
+ message = <<~MSG
+ Project '#{redirect_path}' was moved to '#{project.full_path}'.
+
+ Please update your Git remote:
+
+ git remote set-url origin #{url_to_repo}
+ MSG
+
+ expect(subject.message).to eq(message)
+ end
+ end
+
+ context 'when protocol is http' do
+ it_behaves_like 'returns redirect message' do
+ let(:url_to_repo) { http_url_to_repo }
+ end
+ end
+
+ context 'when protocol is ssh' do
+ let(:protocol) { 'ssh' }
- Please update your Git remote:
+ it_behaves_like 'returns redirect message' do
+ let(:url_to_repo) { ssh_url_to_repo }
+ end
+ end
+ end
+
+ context 'with project' do
+ it_behaves_like 'errors per protocol' do
+ let(:http_url_to_repo) { project.http_url_to_repo }
+ let(:ssh_url_to_repo) { project.ssh_url_to_repo }
+ end
+ end
+
+ context 'with wiki' do
+ let(:repository) { project.wiki.repository }
- git remote set-url origin #{project.http_url_to_repo}
- MSG
+ it_behaves_like 'errors per protocol' do
+ let(:http_url_to_repo) { project.wiki.http_url_to_repo }
+ let(:ssh_url_to_repo) { project.wiki.ssh_url_to_repo }
+ end
+ end
- expect(project_moved.message).to eq(message)
+ context 'with project snippet' do
+ let_it_be(:snippet) { create(:project_snippet, :repository, project: project, author: user) }
+ let(:repository) { snippet.repository }
+
+ it_behaves_like 'errors per protocol' do
+ let(:http_url_to_repo) { snippet.http_url_to_repo }
+ let(:ssh_url_to_repo) { snippet.ssh_url_to_repo }
+ end
+ end
+
+ context 'with personal snippet' do
+ let_it_be(:snippet) { create(:personal_snippet, :repository, author: user) }
+ let(:repository) { snippet.repository }
+
+ it 'returns nil' do
+ expect(subject.add_message).to be_nil
+ end
end
end
end
diff --git a/spec/lib/gitlab/checks/push_check_spec.rb b/spec/lib/gitlab/checks/push_check_spec.rb
index e1bd52d6c0b..857d71732fe 100644
--- a/spec/lib/gitlab/checks/push_check_spec.rb
+++ b/spec/lib/gitlab/checks/push_check_spec.rb
@@ -15,7 +15,7 @@ describe Gitlab::Checks::PushCheck do
expect(user_access).to receive(:can_do_action?).with(:push_code).and_return(false)
expect(project).to receive(:branch_allows_collaboration?).with(user_access.user, 'master').and_return(false)
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to this project.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to push code to this project.')
end
end
end
diff --git a/spec/lib/gitlab/checks/push_file_count_check_spec.rb b/spec/lib/gitlab/checks/push_file_count_check_spec.rb
new file mode 100644
index 00000000000..58ba7d579a3
--- /dev/null
+++ b/spec/lib/gitlab/checks/push_file_count_check_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Checks::PushFileCountCheck do
+ let(:snippet) { create(:personal_snippet, :repository) }
+ let(:changes) { { oldrev: oldrev, newrev: newrev, ref: ref } }
+ let(:timeout) { Gitlab::GitAccess::INTERNAL_TIMEOUT }
+ let(:logger) { Gitlab::Checks::TimedLogger.new(timeout: timeout) }
+
+ subject { described_class.new(changes, repository: snippet.repository, limit: 1, logger: logger) }
+
+ describe '#validate!' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ allow(snippet.repository).to receive(:new_commits).and_return(
+ snippet.repository.commits_between(oldrev, newrev)
+ )
+ end
+
+ context 'initial creation' do
+ let(:oldrev) { Gitlab::Git::EMPTY_TREE_ID }
+ let(:newrev) { TestEnv::BRANCH_SHA["snippet/single-file"] }
+ let(:ref) { "refs/heads/snippet/single-file" }
+
+ it 'allows creation' do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+
+ where(:old, :new, :valid, :message) do
+ 'single-file' | 'edit-file' | true | nil
+ 'single-file' | 'multiple-files' | false | 'The repository can contain at most 1 file(s).'
+ 'single-file' | 'no-files' | false | 'The repository must contain at least 1 file.'
+ 'edit-file' | 'rename-and-edit-file' | true | nil
+ end
+
+ with_them do
+ let(:oldrev) { TestEnv::BRANCH_SHA["snippet/#{old}"] }
+ let(:newrev) { TestEnv::BRANCH_SHA["snippet/#{new}"] }
+ let(:ref) { "refs/heads/snippet/#{new}" }
+
+ it "verifies" do
+ if valid
+ expect { subject.validate! }.not_to raise_error
+ else
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, message)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/snippet_check_spec.rb b/spec/lib/gitlab/checks/snippet_check_spec.rb
new file mode 100644
index 00000000000..3eee5ccfc0a
--- /dev/null
+++ b/spec/lib/gitlab/checks/snippet_check_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Checks::SnippetCheck do
+ include_context 'change access checks context'
+
+ let(:snippet) { create(:personal_snippet, :repository) }
+ let(:user_access) { Gitlab::UserAccessSnippet.new(user, snippet: snippet) }
+
+ subject { Gitlab::Checks::SnippetCheck.new(changes, logger: logger) }
+
+ describe '#validate!' do
+ it 'does not raise any error' do
+ expect { subject.validate! }.not_to raise_error
+ end
+
+ context 'trying to delete the branch' do
+ let(:newrev) { '0000000000000000000000000000000000000000' }
+
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You can not create or delete branches.')
+ end
+ end
+
+ context 'trying to create the branch' do
+ let(:oldrev) { '0000000000000000000000000000000000000000' }
+ let(:ref) { 'refs/heads/feature' }
+
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You can not create or delete branches.')
+ end
+
+ context "when branch is 'master'" do
+ let(:ref) { 'refs/heads/master' }
+
+ it "allows the operation" do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/tag_check_spec.rb b/spec/lib/gitlab/checks/tag_check_spec.rb
index 80e9eb504ad..0c94171646e 100644
--- a/spec/lib/gitlab/checks/tag_check_spec.rb
+++ b/spec/lib/gitlab/checks/tag_check_spec.rb
@@ -11,7 +11,7 @@ describe Gitlab::Checks::TagCheck do
it 'raises an error when user does not have access' do
allow(user_access).to receive(:can_do_action?).with(:admin_tag).and_return(false)
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to change existing tags on this project.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to change existing tags on this project.')
end
context 'with protected tag' do
@@ -27,7 +27,7 @@ describe Gitlab::Checks::TagCheck do
let(:newrev) { '0000000000000000000000000000000000000000' }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /cannot be deleted/)
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /cannot be deleted/)
end
end
@@ -36,7 +36,7 @@ describe Gitlab::Checks::TagCheck do
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /cannot be updated/)
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /cannot be updated/)
end
end
end
@@ -47,7 +47,7 @@ describe Gitlab::Checks::TagCheck do
let(:ref) { 'refs/tags/v9.1.0' }
it 'prevents creation below access level' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /allowed to create this tag as it is protected/)
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /allowed to create this tag as it is protected/)
end
context 'when user has access' do
diff --git a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
new file mode 100644
index 00000000000..04017b9ae3e
--- /dev/null
+++ b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::ArtifactFileReader do
+ let(:job) { create(:ci_build) }
+ let(:path) { 'generated.yml' } # included in the ci_build_artifacts.zip
+
+ describe '#read' do
+ subject { described_class.new(job).read(path) }
+
+ context 'when job has artifacts and metadata' do
+ let!(:artifacts) { create(:ci_job_artifact, :archive, job: job) }
+ let!(:metadata) { create(:ci_job_artifact, :metadata, job: job) }
+
+ it 'returns the content at the path' do
+ is_expected.to be_present
+ expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')
+ end
+
+ context 'when path does not exist' do
+ let(:path) { 'file/does/not/exist.txt' }
+ let(:expected_error) do
+ "Path `#{path}` does not exist inside the `#{job.name}` artifacts archive!"
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::Error, expected_error)
+ end
+ end
+
+ context 'when path points to a directory' do
+ let(:path) { 'other_artifacts_0.1.2' }
+ let(:expected_error) do
+ "Path `#{path}` was expected to be a file but it was a directory!"
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::Error, expected_error)
+ end
+ end
+
+ context 'when path is nested' do
+ # path exists in ci_build_artifacts.zip
+ let(:path) { 'other_artifacts_0.1.2/doc_sample.txt' }
+
+ it 'returns the content at the nested path' do
+ is_expected.to be_present
+ end
+ end
+
+ context 'when artifact archive size is greater than the limit' do
+ let(:expected_error) do
+ "Artifacts archive for job `#{job.name}` is too large: max 1 KB"
+ end
+
+ before do
+ stub_const("#{described_class}::MAX_ARCHIVE_SIZE", 1.kilobyte)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::Error, expected_error)
+ end
+ end
+
+ context 'when metadata entry shows size greater than the limit' do
+ let(:expected_error) do
+ "Artifacts archive for job `#{job.name}` is too large: max 5 MB"
+ end
+
+ before do
+ expect_next_instance_of(Gitlab::Ci::Build::Artifacts::Metadata::Entry) do |entry|
+ expect(entry).to receive(:total_size).and_return(10.megabytes)
+ end
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::Error, expected_error)
+ end
+ end
+ end
+
+ context 'when job does not have metadata artifacts' do
+ let!(:artifacts) { create(:ci_job_artifact, :archive, job: job) }
+ let(:expected_error) do
+ "Job `#{job.name}` has missing artifacts metadata and cannot be extracted!"
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::Error, expected_error)
+ end
+ end
+
+ context 'when job does not have artifacts' do
+ it 'raises ArgumentError' do
+ expect { subject }.to raise_error(ArgumentError, 'Job does not have artifacts')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy/changes_spec.rb b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
index 48ac2e4e657..07f52605929 100644
--- a/spec/lib/gitlab/ci/build/policy/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Build::Policy::Changes do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
describe '#satisfied_by?' do
describe 'paths matching' do
@@ -89,7 +89,7 @@ describe Gitlab::Ci::Build::Policy::Changes do
end
describe 'gitaly integration' do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:pipeline) do
create(:ci_empty_pipeline, project: project,
@@ -119,6 +119,7 @@ describe Gitlab::Ci::Build::Policy::Changes do
end
context 'when branch is created' do
+ let_it_be(:project) { create(:project, :repository) }
let(:pipeline) do
create(:ci_empty_pipeline, project: project,
ref: 'feature',
diff --git a/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
index bc2e6fe6b8d..6d96bdc30c7 100644
--- a/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
@@ -18,7 +18,7 @@ describe Gitlab::Ci::Build::Policy::Kubernetes do
end
context 'when kubernetes service is inactive' do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
it 'is not satisfied by a pipeline without kubernetes available' do
expect(described_class.new('active'))
diff --git a/spec/lib/gitlab/ci/build/policy/variables_spec.rb b/spec/lib/gitlab/ci/build/policy/variables_spec.rb
index 66f2cb640b9..0e75726b81c 100644
--- a/spec/lib/gitlab/ci/build/policy/variables_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/variables_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Build::Policy::Variables do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:pipeline) do
build(:ci_empty_pipeline, project: project, ref: 'master', source: :push)
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index ad388886681..d08ce30618d 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -106,6 +106,7 @@ describe Gitlab::Ci::Config::Entry::Bridge do
ignore: false,
stage: 'test',
only: { refs: %w[branches tags] },
+ variables: {},
scheduling_type: :stage)
end
end
@@ -128,6 +129,7 @@ describe Gitlab::Ci::Config::Entry::Bridge do
ignore: false,
stage: 'test',
only: { refs: %w[branches tags] },
+ variables: {},
scheduling_type: :stage)
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb b/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb
new file mode 100644
index 00000000000..073f93ce542
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::Gitlab::Ci::Config::Entry::Inherit::Default do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { described_class.new(config) }
+
+ context 'validations' do
+ where(:config, :valid) do
+ true | true
+ false | true
+ %w[image] | true
+ %w[unknown] | false
+ %i[image] | false
+ [true] | false
+ "string" | false
+ end
+
+ with_them do
+ it do
+ expect(subject.valid?).to eq(valid)
+ end
+ end
+ end
+
+ describe '#inherit?' do
+ where(:config, :inherit) do
+ true | true
+ false | false
+ %w[image] | true
+ %w[before_script] | false
+ end
+
+ with_them do
+ it do
+ expect(subject.inherit?('image')).to eq(inherit)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/inherit/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/inherit/variables_spec.rb
new file mode 100644
index 00000000000..06deed11c15
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/inherit/variables_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::Gitlab::Ci::Config::Entry::Inherit::Variables do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { described_class.new(config) }
+
+ context 'validations' do
+ where(:config, :valid) do
+ true | true
+ false | true
+ %w[A] | true
+ %w[A B] | true
+ %i[image] | true
+ [true] | false
+ "string" | false
+ end
+
+ with_them do
+ it do
+ expect(subject.valid?).to eq(valid)
+ end
+ end
+ end
+
+ describe '#inherit?' do
+ where(:config, :inherit) do
+ true | true
+ false | false
+ %w[A] | true
+ %w[B] | false
+ end
+
+ with_them do
+ it do
+ expect(subject.inherit?('A')).to eq(inherit)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 313b504ab59..b6279485426 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -6,6 +6,7 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:entry) { described_class.new(config, name: :rspec) }
it_behaves_like 'with inheritable CI config' do
+ let(:config) { { script: 'echo' } }
let(:inheritable_key) { 'default' }
let(:inheritable_class) { Gitlab::Ci::Config::Entry::Default }
@@ -15,6 +16,10 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:ignored_inheritable_columns) do
%i[]
end
+
+ before do
+ allow(entry).to receive_message_chain(:inherit_entry, :default_entry, :inherit?).and_return(true)
+ end
end
describe '.nodes' do
@@ -24,7 +29,8 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:result) do
%i[before_script script stage type after_script cache
image services only except rules needs variables artifacts
- environment coverage retry interruptible timeout release tags]
+ environment coverage retry interruptible timeout release tags
+ inherit]
end
it { is_expected.to match_array result }
@@ -500,7 +506,13 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:unspecified) { double('unspecified', 'specified?' => false) }
let(:default) { double('default', '[]' => unspecified) }
let(:workflow) { double('workflow', 'has_rules?' => false) }
- let(:deps) { double('deps', 'default' => default, '[]' => unspecified, 'workflow' => workflow) }
+
+ let(:deps) do
+ double('deps',
+ 'default_entry' => default,
+ 'workflow_entry' => workflow,
+ 'variables_value' => nil)
+ end
context 'when job config overrides default config' do
before do
diff --git a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
index c8c188d71bf..203342ab620 100644
--- a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
@@ -99,6 +99,7 @@ describe Gitlab::Ci::Config::Entry::Jobs do
only: { refs: %w[branches tags] },
stage: 'test',
trigger: { project: 'my/project' },
+ variables: {},
scheduling_type: :stage
},
regular_job: {
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
new file mode 100644
index 00000000000..8447a29c772
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -0,0 +1,381 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Processable do
+ let(:node_class) do
+ Class.new(::Gitlab::Config::Entry::Node) do
+ include Gitlab::Ci::Config::Entry::Processable
+
+ entry :tags, ::Gitlab::Config::Entry::ArrayOfStrings,
+ description: 'Set the default tags.',
+ inherit: true
+
+ def self.name
+ 'job'
+ end
+ end
+ end
+
+ let(:entry) { node_class.new(config, name: :rspec) }
+
+ describe 'validations' do
+ before do
+ entry.compose!
+ end
+
+ context 'when entry config value is correct' do
+ let(:config) { { stage: 'test' } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when job name is empty' do
+ let(:entry) { node_class.new(config, name: ''.to_sym) }
+
+ it 'reports error' do
+ expect(entry.errors).to include "job name can't be blank"
+ end
+ end
+ end
+
+ context 'when entry value is not correct' do
+ context 'incorrect config value type' do
+ let(:config) { ['incorrect'] }
+
+ describe '#errors' do
+ it 'reports error about a config type' do
+ expect(entry.errors)
+ .to include 'job config should be a hash'
+ end
+ end
+ end
+
+ context 'when config is empty' do
+ let(:config) { {} }
+
+ describe '#valid' do
+ it 'is invalid' do
+ expect(entry).not_to be_valid
+ end
+ end
+ end
+
+ context 'when extends key is not a string' do
+ let(:config) { { extends: 123 } }
+
+ it 'returns error about wrong value type' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include "job extends should be an array of strings or a string"
+ end
+ end
+
+ context 'when it uses both "when:" and "rules:"' do
+ let(:config) do
+ {
+ script: 'echo',
+ when: 'on_failure',
+ rules: [{ if: '$VARIABLE', when: 'on_success' }]
+ }
+ end
+
+ it 'returns an error about when: being combined with rules' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job config key may not be used with `rules`: when'
+ end
+ end
+
+ context 'when only: is used with rules:' do
+ let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } }
+
+ it 'returns error about mixing only: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+
+ context 'and only: is blank' do
+ let(:config) { { only: nil, rules: [{ if: '$THIS' }] } }
+
+ it 'returns error about mixing only: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+
+ context 'and rules: is blank' do
+ let(:config) { { only: ['merge_requests'], rules: nil } }
+
+ it 'returns error about mixing only: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+ end
+
+ context 'when except: is used with rules:' do
+ let(:config) { { except: { refs: %w[master] }, rules: [{ if: '$THIS' }] } }
+
+ it 'returns error about mixing except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+
+ context 'and except: is blank' do
+ let(:config) { { except: nil, rules: [{ if: '$THIS' }] } }
+
+ it 'returns error about mixing except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+
+ context 'and rules: is blank' do
+ let(:config) { { except: { refs: %w[master] }, rules: nil } }
+
+ it 'returns error about mixing except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+ end
+
+ context 'when only: and except: are both used with rules:' do
+ let(:config) do
+ {
+ only: %w[merge_requests],
+ except: { refs: %w[master] },
+ rules: [{ if: '$THIS' }]
+ }
+ end
+
+ it 'returns errors about mixing both only: and except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+
+ context 'when only: and except: as both blank' do
+ let(:config) do
+ { only: nil, except: nil, rules: [{ if: '$THIS' }] }
+ end
+
+ it 'returns errors about mixing both only: and except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+
+ context 'when rules: is blank' do
+ let(:config) do
+ { only: %w[merge_requests], except: { refs: %w[master] }, rules: nil }
+ end
+
+ it 'returns errors about mixing both only: and except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+ end
+ end
+ end
+
+ describe '#relevant?' do
+ it 'is a relevant entry' do
+ entry = node_class.new({ stage: 'test' }, name: :rspec)
+
+ expect(entry).to be_relevant
+ end
+ end
+
+ describe '#compose!' do
+ let(:unspecified) { double('unspecified', 'specified?' => false) }
+ let(:default) { double('default', '[]' => unspecified) }
+ let(:workflow) { double('workflow', 'has_rules?' => false) }
+ let(:variables) { }
+
+ let(:deps) do
+ double('deps',
+ default_entry: default,
+ workflow_entry: workflow,
+ variables_value: variables)
+ end
+
+ context 'with workflow rules' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:name, :has_workflow_rules?, :only, :rules, :result) do
+ "uses default only" | false | nil | nil | { refs: %w[branches tags] }
+ "uses user only" | false | %w[branches] | nil | { refs: %w[branches] }
+ "does not define only" | false | nil | [] | nil
+ "does not define only" | true | nil | nil | nil
+ "uses user only" | true | %w[branches] | nil | { refs: %w[branches] }
+ "does not define only" | true | nil | [] | nil
+ end
+
+ with_them do
+ let(:config) { { script: 'ls', rules: rules, only: only }.compact }
+
+ it "#{name}" do
+ expect(workflow).to receive(:has_rules?) { has_workflow_rules? }
+
+ entry.compose!(deps)
+
+ expect(entry.only_value).to eq(result)
+ end
+ end
+ end
+
+ context 'when workflow rules is used' do
+ context 'when rules are used' do
+ let(:config) { { script: 'ls', cache: { key: 'test' }, rules: [] } }
+
+ it 'does not define only' do
+ expect(entry).not_to be_only_defined
+ end
+ end
+
+ context 'when rules are not used' do
+ let(:config) { { script: 'ls', cache: { key: 'test' }, only: [] } }
+
+ it 'does not define only' do
+ expect(entry).not_to be_only_defined
+ end
+ end
+ end
+
+ context 'with inheritance' do
+ context 'of variables' do
+ let(:config) do
+ { variables: { A: 'job', B: 'job' } }
+ end
+
+ before do
+ entry.compose!(deps)
+ end
+
+ context 'with only job variables' do
+ it 'does return defined variables' do
+ expect(entry.value).to include(
+ variables: { 'A' => 'job', 'B' => 'job' }
+ )
+ end
+ end
+
+ context 'when root yaml variables are used' do
+ let(:variables) do
+ Gitlab::Ci::Config::Entry::Variables.new(
+ A: 'root', C: 'root', D: 'root'
+ ).value
+ end
+
+ it 'does return all variables and overwrite them' do
+ expect(entry.value).to include(
+ variables: { 'A' => 'job', 'B' => 'job', 'C' => 'root', 'D' => 'root' }
+ )
+ end
+
+ context 'when inherit of defaults is disabled' do
+ let(:config) do
+ {
+ variables: { A: 'job', B: 'job' },
+ inherit: { variables: false }
+ }
+ end
+
+ it 'does return only job variables' do
+ expect(entry.value).to include(
+ variables: { 'A' => 'job', 'B' => 'job' }
+ )
+ end
+ end
+
+ context 'when inherit of only specific variable is enabled' do
+ let(:config) do
+ {
+ variables: { A: 'job', B: 'job' },
+ inherit: { variables: ['D'] }
+ }
+ end
+
+ it 'does return only job variables' do
+ expect(entry.value).to include(
+ variables: { 'A' => 'job', 'B' => 'job', 'D' => 'root' }
+ )
+ end
+ end
+ end
+ end
+
+ context 'of default:tags' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:name, :default_tags, :tags, :inherit_default, :result) do
+ "only local tags" | nil | %w[a b] | nil | %w[a b]
+ "only local tags" | nil | %w[a b] | true | %w[a b]
+ "only local tags" | nil | %w[a b] | false | %w[a b]
+ "global and local tags" | %w[b c] | %w[a b] | nil | %w[a b]
+ "global and local tags" | %w[b c] | %w[a b] | true | %w[a b]
+ "global and local tags" | %w[b c] | %w[a b] | false | %w[a b]
+ "only global tags" | %w[b c] | nil | nil | %w[b c]
+ "only global tags" | %w[b c] | nil | true | %w[b c]
+ "only global tags" | %w[b c] | nil | false | nil
+ "only global tags" | %w[b c] | nil | %w[image] | nil
+ "only global tags" | %w[b c] | nil | %w[tags] | %w[b c]
+ end
+
+ with_them do
+ let(:config) do
+ { tags: tags,
+ inherit: { default: inherit_default } }
+ end
+
+ let(:default_specified_tags) do
+ double('tags',
+ 'specified?' => true,
+ 'valid?' => true,
+ 'value' => default_tags,
+ 'errors' => [])
+ end
+
+ before do
+ allow(default).to receive('[]').with(:tags).and_return(default_specified_tags)
+
+ entry.compose!(deps)
+
+ expect(entry).to be_valid
+ end
+
+ it { expect(entry.tags_value).to eq(result) }
+ end
+ end
+ end
+ end
+
+ context 'when composed' do
+ before do
+ entry.compose!
+ end
+
+ describe '#value' do
+ context 'when entry is correct' do
+ let(:config) do
+ { stage: 'test' }
+ end
+
+ it 'returns correct value' do
+ expect(entry.value).to eq(
+ name: :rspec,
+ stage: 'test',
+ only: { refs: %w[branches tags] },
+ variables: {}
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index c64bb0a4cc3..2c8f76c8f34 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -44,6 +44,8 @@ describe Gitlab::Ci::Config::Entry::Reports do
:license_scanning | 'gl-license-scanning-report.json'
:performance | 'performance.json'
:lsif | 'lsif.json'
+ :dotenv | 'build.dotenv'
+ :cobertura | 'cobertura-coverage.xml'
end
with_them do
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index cf0a3cfa963..c3871b6b3cf 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -32,7 +32,7 @@ describe Gitlab::Ci::Config::Entry::Root do
image: 'ruby:2.2',
default: {},
services: ['postgres:9.1', 'mysql:5.5'],
- variables: { VAR: 'value' },
+ variables: { VAR: 'root' },
after_script: ['make clean'],
stages: %w(build pages release),
cache: { key: 'k', untracked: true, paths: ['public/'] },
@@ -42,6 +42,7 @@ describe Gitlab::Ci::Config::Entry::Root do
stage: 'release',
before_script: [],
after_script: [],
+ variables: { 'VAR' => 'job' },
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: 'v0.06',
@@ -127,7 +128,7 @@ describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push' },
- variables: {},
+ variables: { 'VAR' => 'root' },
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
@@ -141,7 +142,7 @@ describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push' },
- variables: {},
+ variables: { 'VAR' => 'root' },
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
@@ -157,7 +158,7 @@ describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push" },
only: { refs: %w(branches tags) },
- variables: {},
+ variables: { 'VAR' => 'job' },
after_script: [],
ignore: false,
scheduling_type: :stage }
@@ -175,11 +176,11 @@ describe Gitlab::Ci::Config::Entry::Root do
image: 'ruby:2.1',
services: ['postgres:9.1', 'mysql:5.5']
},
- variables: { VAR: 'value' },
+ variables: { VAR: 'root' },
stages: %w(build pages),
cache: { key: 'k', untracked: true, paths: ['public/'] },
rspec: { script: %w[rspec ls] },
- spinach: { before_script: [], variables: { VAR: 'AA' }, script: 'spinach' } }
+ spinach: { before_script: [], variables: { VAR: 'job' }, script: 'spinach' } }
end
context 'when composed' do
@@ -203,7 +204,7 @@ describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: "pull-push" },
- variables: {},
+ variables: { 'VAR' => 'root' },
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
@@ -215,7 +216,7 @@ describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: "pull-push" },
- variables: { 'VAR' => 'AA' },
+ variables: { 'VAR' => 'job' },
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
diff --git a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
new file mode 100644
index 00000000000..a8eb13c47bc
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
@@ -0,0 +1,167 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::External::File::Artifact do
+ let(:parent_pipeline) { create(:ci_pipeline) }
+ let(:context) do
+ Gitlab::Ci::Config::External::Context.new(parent_pipeline: parent_pipeline)
+ end
+
+ let(:external_file) { described_class.new(params, context) }
+
+ describe '#matching?' do
+ context 'when params contain artifact location' do
+ let(:params) { { artifact: 'generated.yml' } }
+
+ it 'returns true' do
+ expect(external_file).to be_matching
+ end
+ end
+
+ context 'when params does not contain artifact location' do
+ let(:params) { {} }
+
+ it 'returns false' do
+ expect(external_file).not_to be_matching
+ end
+ end
+ end
+
+ describe '#valid?' do
+ shared_examples 'is invalid' do
+ it 'is not valid' do
+ expect(external_file).not_to be_valid
+ end
+
+ it 'sets the expected error' do
+ expect(external_file.errors)
+ .to contain_exactly(expected_error)
+ end
+ end
+
+ describe 'when used in non child pipeline context' do
+ let(:parent_pipeline) { nil }
+ let(:params) { { artifact: 'generated.yml' } }
+
+ let(:expected_error) do
+ 'Including configs from artifacts is only allowed when triggering child pipelines'
+ end
+
+ it_behaves_like 'is invalid'
+ end
+
+ context 'when used in child pipeline context' do
+ let(:parent_pipeline) { create(:ci_pipeline) }
+
+ context 'when job is not provided' do
+ let(:params) { { artifact: 'generated.yml' } }
+
+ let(:expected_error) do
+ 'Job must be provided when including configs from artifacts'
+ end
+
+ it_behaves_like 'is invalid'
+ end
+
+ context 'when job is provided' do
+ let(:params) { { artifact: 'generated.yml', job: 'generator' } }
+
+ context 'when job does not exist in the parent pipeline' do
+ let(:expected_error) do
+ 'Job `generator` not found in parent pipeline or does not have artifacts!'
+ end
+
+ it_behaves_like 'is invalid'
+ end
+
+ context 'when job exists in the parent pipeline' do
+ let!(:generator_job) { create(:ci_build, name: 'generator', pipeline: parent_pipeline) }
+
+ context 'when job does not have artifacts' do
+ let(:expected_error) do
+ 'Job `generator` not found in parent pipeline or does not have artifacts!'
+ end
+
+ it_behaves_like 'is invalid'
+ end
+
+ context 'when job has archive artifacts' do
+ let!(:artifacts) do
+ create(:ci_job_artifact, :archive,
+ job: generator_job,
+ file: fixture_file_upload(Rails.root.join('spec/fixtures/pages.zip'), 'application/zip'))
+ end
+
+ let(:expected_error) do
+ 'Job `generator` has missing artifacts metadata and cannot be extracted!'
+ end
+
+ it_behaves_like 'is invalid'
+
+ context 'when job has artifacts exceeding the max allowed size' do
+ let(:expected_error) do
+ "Artifacts archive for job `generator` is too large: max 1 KB"
+ end
+
+ before do
+ stub_const("#{Gitlab::Ci::ArtifactFileReader}::MAX_ARCHIVE_SIZE", 1.kilobyte)
+ end
+
+ it_behaves_like 'is invalid'
+ end
+
+ context 'when job has artifacts metadata' do
+ let!(:metadata) do
+ create(:ci_job_artifact, :metadata, job: generator_job)
+ end
+
+ let(:expected_error) do
+ 'Path `generated.yml` does not exist inside the `generator` artifacts archive!'
+ end
+
+ it_behaves_like 'is invalid'
+
+ context 'when file is found in metadata' do
+ let!(:artifacts) { create(:ci_job_artifact, :archive, job: generator_job) }
+ let!(:metadata) { create(:ci_job_artifact, :metadata, job: generator_job) }
+
+ context 'when file is empty' do
+ before do
+ allow_next_instance_of(Gitlab::Ci::ArtifactFileReader) do |reader|
+ allow(reader).to receive(:read).and_return('')
+ end
+ end
+
+ let(:expected_error) do
+ 'File `generated.yml` is empty!'
+ end
+
+ it_behaves_like 'is invalid'
+ end
+
+ context 'when file is not empty' do
+ it 'is valid' do
+ expect(external_file).to be_valid
+ expect(external_file.content).to be_present
+ end
+
+ it 'propagates parent_pipeline to nested includes' do
+ expected_attrs = {
+ parent_pipeline: parent_pipeline,
+ project: anything,
+ sha: anything,
+ user: anything
+ }
+ expect(context).to receive(:mutate).with(expected_attrs).and_call_original
+ external_file.content
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index 95f0c93e758..c9851239859 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -3,15 +3,22 @@
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Local do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
-
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:sha) { '12345' }
- let(:context_params) { { project: project, sha: sha, user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
-
let(:params) { { local: location } }
let(:local_file) { described_class.new(params, context) }
+ let(:parent_pipeline) { double(:parent_pipeline) }
+
+ let(:context_params) do
+ {
+ project: project,
+ sha: sha,
+ user: user,
+ parent_pipeline: parent_pipeline
+ }
+ end
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
@@ -119,7 +126,11 @@ describe Gitlab::Ci::Config::External::File::Local do
subject { local_file.send(:expand_context_attrs) }
it 'inherits project, user and sha' do
- is_expected.to include(user: user, project: project, sha: sha)
+ is_expected.to include(
+ user: user,
+ project: project,
+ sha: sha,
+ parent_pipeline: parent_pipeline)
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
index 0aea3a59b33..b2924ae9d91 100644
--- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
@@ -3,15 +3,23 @@
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Project do
- set(:context_project) { create(:project) }
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
-
+ let_it_be(:context_project) { create(:project) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:context_user) { user }
- let(:context_params) { { project: context_project, sha: '12345', user: context_user } }
+ let(:parent_pipeline) { double(:parent_pipeline) }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:project_file) { described_class.new(params, context) }
+ let(:context_params) do
+ {
+ project: context_project,
+ sha: '12345',
+ user: context_user,
+ parent_pipeline: parent_pipeline
+ }
+ end
+
before do
project.add_developer(user)
@@ -153,7 +161,11 @@ describe Gitlab::Ci::Config::External::File::Project do
subject { project_file.send(:expand_context_attrs) }
it 'inherits user, and target project and sha' do
- is_expected.to include(user: user, project: project, sha: project.commit('master').id)
+ is_expected.to include(
+ user: user,
+ project: project,
+ sha: project.commit('master').id,
+ parent_pipeline: parent_pipeline)
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/template_spec.rb b/spec/lib/gitlab/ci/config/external/file/template_spec.rb
index ee1660e4dfd..7ea5aadac52 100644
--- a/spec/lib/gitlab/ci/config/external/file/template_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/template_spec.rb
@@ -3,14 +3,12 @@
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Template do
- set(:project) { create(:project) }
- set(:user) { create(:user) }
-
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
let(:context_params) { { project: project, sha: '12345', user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:template) { 'Auto-DevOps.gitlab-ci.yml' }
let(:params) { { template: template } }
-
let(:template_file) { described_class.new(params, context) }
before do
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 2a5f62f7e74..6839002c3ab 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -5,9 +5,8 @@ require 'spec_helper'
describe Gitlab::Ci::Config::External::Mapper do
include StubRequests
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
-
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:local_file) { '/lib/gitlab/ci/templates/non-existent-file.yml' }
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
let(:template_file) { 'Auto-DevOps.gitlab-ci.yml' }
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index bb2d3f66972..ff08d4703fe 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -5,10 +5,9 @@ require 'spec_helper'
describe Gitlab::Ci::Config::External::Processor do
include StubRequests
- set(:project) { create(:project, :repository) }
- set(:another_project) { create(:project, :repository) }
- set(:user) { create(:user) }
-
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:another_project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:sha) { '12345' }
let(:context_params) { { project: project, sha: sha, user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index 63a36995284..d8101e8a621 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::Ci::Config do
include StubRequests
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
@@ -79,26 +79,6 @@ describe Gitlab::Ci::Config do
it { is_expected.to eq %w[.pre stage1 stage2 .post] }
end
-
- context 'with feature disabled' do
- before do
- stub_feature_flags(ci_pre_post_pipeline_stages: false)
- end
-
- let(:yml) do
- <<-EOS
- stages:
- - stage1
- - stage2
- job1:
- stage: stage1
- script:
- - ls
- EOS
- end
-
- it { is_expected.to eq %w[stage1 stage2] }
- end
end
end
@@ -376,23 +356,6 @@ describe Gitlab::Ci::Config do
end
end
- context 'when context expansion timeout is disabled' do
- before do
- allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
- allow(instance).to receive(:check_execution_time!).and_call_original
- end
-
- allow(Feature)
- .to receive(:enabled?)
- .with(:ci_limit_yaml_expansion, project, default_enabled: true)
- .and_return(false)
- end
-
- it 'does not raises errors' do
- expect { config }.not_to raise_error
- end
- end
-
describe 'external file version' do
context 'when external local file SHA is defined' do
it 'is using a defined value' do
@@ -541,5 +504,76 @@ describe Gitlab::Ci::Config do
end
end
end
+
+ context 'when including file from artifact' do
+ let(:config) do
+ described_class.new(
+ gitlab_ci_yml,
+ project: nil,
+ sha: nil,
+ user: nil,
+ parent_pipeline: parent_pipeline)
+ end
+
+ let(:gitlab_ci_yml) do
+ <<~HEREDOC
+ include:
+ - artifact: generated.yml
+ job: rspec
+ HEREDOC
+ end
+
+ let(:parent_pipeline) { nil }
+
+ context 'when used in the context of a child pipeline' do
+ # This job has ci_build_artifacts.zip artifact archive which
+ # contains generated.yml
+ let!(:job) { create(:ci_build, :artifacts, name: 'rspec', pipeline: parent_pipeline) }
+ let(:parent_pipeline) { create(:ci_pipeline) }
+
+ it 'returns valid config' do
+ expect(config).to be_valid
+ end
+
+ context 'when job key is missing' do
+ let(:gitlab_ci_yml) do
+ <<~HEREDOC
+ include:
+ - artifact: generated.yml
+ HEREDOC
+ end
+
+ it 'raises an error' do
+ expect { config }.to raise_error(
+ described_class::ConfigError,
+ 'Job must be provided when including configs from artifacts'
+ )
+ end
+ end
+
+ context 'when artifact key is missing' do
+ let(:gitlab_ci_yml) do
+ <<~HEREDOC
+ include:
+ - job: rspec
+ HEREDOC
+ end
+
+ it 'raises an error' do
+ expect { config }.to raise_error(
+ described_class::ConfigError,
+ /needs to match exactly one accessor!/
+ )
+ end
+ end
+ end
+
+ it 'disallows the use in parent pipelines' do
+ expect { config }.to raise_error(
+ described_class::ConfigError,
+ 'Including configs from artifacts is only allowed when triggering child pipelines'
+ )
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
new file mode 100644
index 00000000000..e97544683db
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
@@ -0,0 +1,176 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+describe Gitlab::Ci::Parsers::Coverage::Cobertura do
+ describe '#parse!' do
+ subject { described_class.new.parse!(cobertura, coverage_report) }
+
+ let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
+
+ context 'when data is Cobertura style XML' do
+ context 'when there is no <class>' do
+ let(:cobertura) { '' }
+
+ it 'parses XML and returns empty coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'when there is a single <class>' do
+ context 'with no lines' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes><class filename="app.rb"></class></classes>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'with a single line' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes>
+ <class filename="app.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'with multipe lines and methods info' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ </classes>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+ end
+
+ context 'when there are multipe <class>' do
+ context 'with the same filename and different lines' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with merged coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
+ end
+ end
+
+ context 'with the same filename and lines' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <packages><package><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="1"/>
+ <line number="2" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with summed-up coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 3, 2 => 1 } })
+ end
+ end
+
+ context 'with missing filename' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes>
+ EOF
+ end
+
+ it 'parses XML and ignores class with missing name' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+
+ context 'with invalid line information' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line null="test" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes>
+ EOF
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::CoberturaParserError)
+ end
+ end
+ end
+ end
+
+ context 'when data is not Cobertura style XML' do
+ let(:cobertura) { { coverage: '12%' }.to_json }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::CoberturaParserError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
index b91cf1dd3ed..9a486c312d4 100644
--- a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
@@ -205,6 +205,75 @@ describe Gitlab::Ci::Parsers::Test::Junit do
end
end
+ context 'when data contains an attachment tag' do
+ let(:junit) do
+ <<~EOF
+ <testsuites>
+ <testsuite>
+ <testcase classname='Calculator' name='sumTest1' time='0.01'>
+ <failure>Some failure</failure>
+ <system-out>[[ATTACHMENT|some/path.png]]</system-out>
+ </testcase>
+ </testsuite>
+ </testsuites>
+ EOF
+ end
+
+ it 'add attachment to a test case' do
+ expect { subject }.not_to raise_error
+
+ expect(test_cases[0].has_attachment?).to be_truthy
+ expect(test_cases[0].attachment).to eq("some/path.png")
+ end
+ end
+
+ context 'when data contains multiple attachments tag' do
+ let(:junit) do
+ <<~EOF
+ <testsuites>
+ <testsuite>
+ <testcase classname='Calculator' name='sumTest1' time='0.01'>
+ <failure>Some failure</failure>
+ <system-out>
+ [[ATTACHMENT|some/path.png]]
+ [[ATTACHMENT|some/path.html]]
+ </system-out>
+ </testcase>
+ </testsuite>
+ </testsuites>
+ EOF
+ end
+
+ it 'adds the first match attachment to a test case' do
+ expect { subject }.not_to raise_error
+
+ expect(test_cases[0].has_attachment?).to be_truthy
+ expect(test_cases[0].attachment).to eq("some/path.png")
+ end
+ end
+
+ context 'when data does not match attachment tag regex' do
+ let(:junit) do
+ <<~EOF
+ <testsuites>
+ <testsuite>
+ <testcase classname='Calculator' name='sumTest1' time='0.01'>
+ <failure>Some failure</failure>
+ <system-out>[[attachment]some/path.png]]</system-out>
+ </testcase>
+ </testsuite>
+ </testsuites>
+ EOF
+ end
+
+ it 'does not add attachment to a test case' do
+ expect { subject }.not_to raise_error
+
+ expect(test_cases[0].has_attachment?).to be_falsy
+ expect(test_cases[0].attachment).to be_nil
+ end
+ end
+
private
def flattened_test_cases(test_suite)
diff --git a/spec/lib/gitlab/ci/parsers_spec.rb b/spec/lib/gitlab/ci/parsers_spec.rb
index 4b647bffe59..9d6896b3cb4 100644
--- a/spec/lib/gitlab/ci/parsers_spec.rb
+++ b/spec/lib/gitlab/ci/parsers_spec.rb
@@ -6,7 +6,7 @@ describe Gitlab::Ci::Parsers do
describe '.fabricate!' do
subject { described_class.fabricate!(file_type) }
- context 'when file_type exists' do
+ context 'when file_type is junit' do
let(:file_type) { 'junit' }
it 'fabricates the class' do
@@ -14,6 +14,14 @@ describe Gitlab::Ci::Parsers do
end
end
+ context 'when file_type is cobertura' do
+ let(:file_type) { 'cobertura' }
+
+ it 'fabricates the class' do
+ is_expected.to be_a(described_class::Coverage::Cobertura)
+ end
+ end
+
context 'when file_type does not exist' do
let(:file_type) { 'undefined' }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build/associations_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build/associations_spec.rb
new file mode 100644
index 00000000000..542a2462b59
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/build/associations_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Chain::Build::Associations do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user, developer_projects: [project]) }
+ let(:pipeline) { Ci::Pipeline.new }
+ let(:step) { described_class.new(pipeline, command) }
+
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(
+ source: :push,
+ origin_ref: 'master',
+ checkout_sha: project.commit.id,
+ after_sha: nil,
+ before_sha: nil,
+ trigger_request: nil,
+ schedule: nil,
+ merge_request: nil,
+ project: project,
+ current_user: user,
+ bridge: bridge)
+ end
+
+ context 'when a bridge is passed in to the pipeline creation' do
+ let(:bridge) { create(:ci_bridge) }
+
+ it 'links the pipeline to the upstream bridge job' do
+ step.perform!
+
+ expect(pipeline.source_pipeline).to be_present
+ expect(pipeline.source_pipeline).to be_valid
+ expect(pipeline.source_pipeline).to have_attributes(
+ source_pipeline: bridge.pipeline, source_project: bridge.project,
+ source_bridge: bridge, project: project
+ )
+ end
+
+ it 'never breaks the chain' do
+ step.perform!
+
+ expect(step.break?).to eq(false)
+ end
+ end
+
+ context 'when a bridge is not passed in to the pipeline creation' do
+ let(:bridge) { nil }
+
+ it 'leaves the source pipeline empty' do
+ step.perform!
+
+ expect(pipeline.source_pipeline).to be_nil
+ end
+
+ it 'never breaks the chain' do
+ step.perform!
+
+ expect(step.break?).to eq(false)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
index b81094f8b4a..094563bd979 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Build do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user, developer_projects: [project]) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user, developer_projects: [project]) }
let(:pipeline) { Ci::Pipeline.new }
let(:variables_attributes) do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
index 5775e934cfd..56707f4e6e4 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Command do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
describe '#initialize' do
subject do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
index 650ab193997..f18ad05a704 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Create do
- set(:project) { create(:project) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
let(:pipeline) do
build(:ci_empty_pipeline, project: project, ref: 'master')
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 52e9432dc92..6dab5679e60 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Populate do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:pipeline) do
build(:ci_pipeline, project: project, ref: 'master', user: user)
diff --git a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
index 9cb59442dfd..9033b71b19f 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
@@ -3,9 +3,8 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Sequence do
- set(:project) { create(:project) }
- set(:user) { create(:user) }
-
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
let(:pipeline) { build_stubbed(:ci_pipeline) }
let(:command) { Gitlab::Ci::Pipeline::Chain::Command.new }
let(:first_step) { spy('first step') }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
index fe46633ed1b..55d6d35340e 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Skip do
- set(:project) { create(:project) }
- set(:user) { create(:user) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pipeline, reload: true) { create(:ci_pipeline, project: project) }
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
index 24d3beb35b9..8bf8bdf08ff 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Validate::Abilities do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:pipeline) do
build_stubbed(:ci_pipeline, project: project)
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
index b866355906e..83271007ab0 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
@@ -3,10 +3,9 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:pipeline) { build_stubbed(:ci_pipeline) }
-
let!(:step) { described_class.new(pipeline, command) }
before do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 1f5fc000832..01f65939da7 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -214,24 +214,98 @@ describe Gitlab::Ci::Pipeline::Seed::Build do
it { is_expected.to be_a(::Ci::Build) }
it { is_expected.to be_valid }
- context 'when job has environment name' do
- let(:attributes) { { name: 'rspec', ref: 'master', environment: 'production' } }
-
+ shared_examples_for 'deployment job' do
it 'returns a job with deployment' do
expect(subject.deployment).not_to be_nil
expect(subject.deployment.deployable).to eq(subject)
- expect(subject.deployment.environment.name).to eq('production')
+ expect(subject.deployment.environment.name).to eq(expected_environment_name)
end
+ end
+
+ shared_examples_for 'non-deployment job' do
+ it 'returns a job without deployment' do
+ expect(subject.deployment).to be_nil
+ end
+ end
+
+ shared_examples_for 'ensures environment existence' do
+ it 'has environment' do
+ expect(subject).to be_has_environment
+ expect(subject.environment).to eq(environment_name)
+ expect(subject.metadata.expanded_environment_name).to eq(expected_environment_name)
+ expect(Environment.exists?(name: expected_environment_name)).to eq(true)
+ end
+ end
+
+ shared_examples_for 'ensures environment inexistence' do
+ it 'does not have environment' do
+ expect(subject).not_to be_has_environment
+ expect(subject.environment).to be_nil
+ expect(subject.metadata.expanded_environment_name).to be_nil
+ expect(Environment.exists?(name: expected_environment_name)).to eq(false)
+ end
+ end
+
+ context 'when job deploys to production' do
+ let(:environment_name) { 'production' }
+ let(:expected_environment_name) { 'production' }
+ let(:attributes) { { name: 'deploy', ref: 'master', environment: 'production' } }
+
+ it_behaves_like 'deployment job'
+ it_behaves_like 'ensures environment existence'
context 'when the environment name is invalid' do
- let(:attributes) { { name: 'rspec', ref: 'master', environment: '!!!' } }
+ let(:attributes) { { name: 'deploy', ref: 'master', environment: '!!!' } }
- it 'returns a job without deployment' do
- expect(subject.deployment).to be_nil
+ it_behaves_like 'non-deployment job'
+ it_behaves_like 'ensures environment inexistence'
+
+ it 'tracks an exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(an_instance_of(described_class::EnvironmentCreationFailure),
+ project_id: project.id,
+ reason: %q{Name can contain only letters, digits, '-', '_', '/', '$', '{', '}', '.', and spaces, but it cannot start or end with '/'})
+ .once
+
+ subject
end
end
end
+ context 'when job starts a review app' do
+ let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
+ let(:expected_environment_name) { "review/#{pipeline.ref}" }
+
+ let(:attributes) do
+ {
+ name: 'deploy', ref: 'master', environment: environment_name,
+ options: { environment: { name: environment_name } }
+ }
+ end
+
+ it_behaves_like 'deployment job'
+ it_behaves_like 'ensures environment existence'
+ end
+
+ context 'when job stops a review app' do
+ let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
+ let(:expected_environment_name) { "review/#{pipeline.ref}" }
+
+ let(:attributes) do
+ {
+ name: 'deploy', ref: 'master', environment: environment_name,
+ options: { environment: { name: environment_name, action: 'stop' } }
+ }
+ end
+
+ it 'returns a job without deployment' do
+ expect(subject.deployment).to be_nil
+ end
+
+ it_behaves_like 'non-deployment job'
+ it_behaves_like 'ensures environment existence'
+ end
+
context 'when job belongs to a resource group' do
let(:attributes) { { name: 'rspec', ref: 'master', resource_group_key: 'iOS' } }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
index c5c91135f60..ceb3cb28bc9 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
@@ -3,14 +3,15 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Seed::Deployment do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project, refind: true) { create(:project, :repository) }
let(:pipeline) do
create(:ci_pipeline, project: project,
sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0')
end
let(:job) { build(:ci_build, project: project, pipeline: pipeline) }
- let(:seed) { described_class.new(job) }
+ let(:environment) { Gitlab::Ci::Pipeline::Seed::Environment.new(job).to_resource }
+ let(:seed) { described_class.new(job, environment) }
let(:attributes) { {} }
before do
@@ -24,10 +25,12 @@ describe Gitlab::Ci::Pipeline::Seed::Deployment do
let(:attributes) do
{
environment: 'production',
- options: { environment: { name: 'production' } }
+ options: { environment: { name: 'production', **kubernetes_options } }
}
end
+ let(:kubernetes_options) { {} }
+
it 'returns a deployment object with environment' do
expect(subject).to be_a(Deployment)
expect(subject.iid).to be_present
@@ -37,14 +40,30 @@ describe Gitlab::Ci::Pipeline::Seed::Deployment do
end
context 'when environment has deployment platform' do
- let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+ let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project], managed: managed_cluster) }
+ let(:managed_cluster) { true }
it 'sets the cluster and deployment_cluster' do
expect(subject.cluster).to eq(cluster) # until we stop double writing in 12.9: https://gitlab.com/gitlab-org/gitlab/issues/202628
- expect(subject.deployment_cluster).to have_attributes(
- cluster_id: cluster.id,
- kubernetes_namespace: subject.environment.deployment_namespace
- )
+ expect(subject.deployment_cluster.cluster).to eq(cluster)
+ end
+
+ context 'when a custom namespace is given' do
+ let(:kubernetes_options) { { kubernetes: { namespace: 'the-custom-namespace' } } }
+
+ context 'when cluster is managed' do
+ it 'does not set the custom namespace' do
+ expect(subject.deployment_cluster.kubernetes_namespace).not_to eq('the-custom-namespace')
+ end
+ end
+
+ context 'when cluster is not managed' do
+ let(:managed_cluster) { false }
+
+ it 'sets the custom namespace' do
+ expect(subject.deployment_cluster.kubernetes_namespace).to eq('the-custom-namespace')
+ end
+ end
end
end
@@ -82,5 +101,13 @@ describe Gitlab::Ci::Pipeline::Seed::Deployment do
is_expected.to be_nil
end
end
+
+ context 'when job does not have environment attribute' do
+ let(:attributes) { { name: 'test' } }
+
+ it 'returns nothing' do
+ is_expected.to be_nil
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
index 71389999c6e..4c0464e5e7c 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
@@ -15,29 +15,68 @@ describe Gitlab::Ci::Pipeline::Seed::Environment do
describe '#to_resource' do
subject { seed.to_resource }
- context 'when job has environment attribute' do
- let(:attributes) do
- {
- environment: 'production',
- options: { environment: { name: 'production' } }
- }
- end
-
+ shared_examples_for 'returning a correct environment' do
it 'returns a persisted environment object' do
+ expect { subject }.to change { Environment.count }.by(1)
+
expect(subject).to be_a(Environment)
expect(subject).to be_persisted
expect(subject.project).to eq(project)
- expect(subject.name).to eq('production')
+ expect(subject.name).to eq(expected_environment_name)
end
context 'when environment has already existed' do
- let!(:environment) { create(:environment, project: project, name: 'production') }
+ let!(:environment) { create(:environment, project: project, name: expected_environment_name) }
it 'returns the existing environment object' do
+ expect { subject }.not_to change { Environment.count }
+
expect(subject).to be_persisted
expect(subject).to eq(environment)
end
end
end
+
+ context 'when job has environment attribute' do
+ let(:environment_name) { 'production' }
+ let(:expected_environment_name) { 'production' }
+
+ let(:attributes) do
+ {
+ environment: environment_name,
+ options: { environment: { name: environment_name } }
+ }
+ end
+
+ it_behaves_like 'returning a correct environment'
+ end
+
+ context 'when job starts a review app' do
+ let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
+ let(:expected_environment_name) { "review/#{job.ref}" }
+
+ let(:attributes) do
+ {
+ environment: environment_name,
+ options: { environment: { name: environment_name } }
+ }
+ end
+
+ it_behaves_like 'returning a correct environment'
+ end
+
+ context 'when job stops a review app' do
+ let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
+ let(:expected_environment_name) { "review/#{job.ref}" }
+
+ let(:attributes) do
+ {
+ environment: environment_name,
+ options: { environment: { name: environment_name, action: 'stop' } }
+ }
+ end
+
+ it_behaves_like 'returning a correct environment'
+ end
end
end
diff --git a/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb b/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb
new file mode 100644
index 00000000000..7cf43ceab32
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Reports::CoverageReports do
+ let(:coverage_report) { described_class.new }
+
+ it { expect(coverage_report.files).to eq({}) }
+
+ describe '#pick' do
+ before do
+ coverage_report.add_file('app.rb', { 1 => 0, 2 => 1 })
+ coverage_report.add_file('routes.rb', { 3 => 1, 4 => 0 })
+ end
+
+ it 'returns only picked files while ignoring nonexistent ones' do
+ expect(coverage_report.pick(['routes.rb', 'nonexistent.txt'])).to eq({
+ files: { 'routes.rb' => { 3 => 1, 4 => 0 } }
+ })
+ end
+ end
+
+ describe '#add_file' do
+ context 'when providing two individual files' do
+ before do
+ coverage_report.add_file('app.rb', { 1 => 0, 2 => 1 })
+ coverage_report.add_file('routes.rb', { 3 => 1, 4 => 0 })
+ end
+
+ it 'initializes a new test suite and returns it' do
+ expect(coverage_report.files).to eq({
+ 'app.rb' => { 1 => 0, 2 => 1 },
+ 'routes.rb' => { 3 => 1, 4 => 0 }
+ })
+ end
+ end
+
+ context 'when providing the same files twice' do
+ context 'with different line coverage' do
+ before do
+ coverage_report.add_file('admin.rb', { 1 => 0, 2 => 1 })
+ coverage_report.add_file('admin.rb', { 3 => 1, 4 => 0 })
+ end
+
+ it 'initializes a new test suite and returns it' do
+ expect(coverage_report.files).to eq({
+ 'admin.rb' => { 1 => 0, 2 => 1, 3 => 1, 4 => 0 }
+ })
+ end
+ end
+
+ context 'with identical line coverage' do
+ before do
+ coverage_report.add_file('projects.rb', { 1 => 0, 2 => 1 })
+ coverage_report.add_file('projects.rb', { 1 => 0, 2 => 1 })
+ end
+
+ it 'initializes a new test suite and returns it' do
+ expect(coverage_report.files).to eq({
+ 'projects.rb' => { 1 => 0, 2 => 2 }
+ })
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/test_case_spec.rb b/spec/lib/gitlab/ci/reports/test_case_spec.rb
index 20c489ee94c..c13161f3e7c 100644
--- a/spec/lib/gitlab/ci/reports/test_case_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_case_spec.rb
@@ -88,5 +88,17 @@ describe Gitlab::Ci::Reports::TestCase do
expect { test_case }.to raise_error(ArgumentError)
end
end
+
+ context 'when attachment is present' do
+ let(:attachment_test_case) { build(:test_case, :with_attachment) }
+
+ it "initializes the attachment if present" do
+ expect(attachment_test_case.attachment).to eq("some/path.png")
+ end
+
+ it '#has_attachment?' do
+ expect(attachment_test_case.has_attachment?).to be_truthy
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/status/composite_spec.rb b/spec/lib/gitlab/ci/status/composite_spec.rb
index 857483a9e0a..b9d4c39e0c2 100644
--- a/spec/lib/gitlab/ci/status/composite_spec.rb
+++ b/spec/lib/gitlab/ci/status/composite_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Status::Composite do
- set(:pipeline) { create(:ci_pipeline) }
+ let_it_be(:pipeline) { create(:ci_pipeline) }
before(:all) do
@statuses = HasStatus::STATUSES_ENUM.map do |status, idx|
diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb
index b52064b3036..bc3d5b89220 100644
--- a/spec/lib/gitlab/ci/templates/templates_spec.rb
+++ b/spec/lib/gitlab/ci/templates/templates_spec.rb
@@ -2,33 +2,43 @@
require 'spec_helper'
-describe "CI YML Templates" do
- using RSpec::Parameterized::TableSyntax
-
+describe 'CI YML Templates' do
subject { Gitlab::Ci::YamlProcessor.new(content) }
- where(:template_name) do
- Gitlab::Template::GitlabCiYmlTemplate.all.map(&:full_name)
- end
-
- with_them do
- let(:content) do
- <<~EOS
- include:
- - template: #{template_name}
+ let(:all_templates) { Gitlab::Template::GitlabCiYmlTemplate.all.map(&:full_name) }
- concrete_build_implemented_by_a_user:
- stage: test
- script: do something
- EOS
+ let(:disabled_templates) do
+ Gitlab::Template::GitlabCiYmlTemplate.disabled_templates.map do |template|
+ template + Gitlab::Template::GitlabCiYmlTemplate.extension
end
+ end
+
+ context 'included in a CI YAML configuration' do
+ using RSpec::Parameterized::TableSyntax
- it 'is valid' do
- expect { subject }.not_to raise_error
+ where(:template_name) do
+ all_templates - disabled_templates
end
- it 'require default stages to be included' do
- expect(subject.stages).to include(*Gitlab::Ci::Config::Entry::Stages.default)
+ with_them do
+ let(:content) do
+ <<~EOS
+ include:
+ - template: #{template_name}
+
+ concrete_build_implemented_by_a_user:
+ stage: test
+ script: do something
+ EOS
+ end
+
+ it 'is valid' do
+ expect { subject }.not_to raise_error
+ end
+
+ it 'require default stages to be included' do
+ expect(subject.stages).to include(*Gitlab::Ci::Config::Entry::Stages.default)
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
index 1b034656e7d..92b3e5562a9 100644
--- a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
+++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
include ChunkedIOHelpers
- set(:build) { create(:ci_build, :running) }
+ let_it_be(:build) { create(:ci_build, :running) }
let(:chunked_io) { described_class.new(build) }
before do
diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb
index 45b59541ce6..ea79073840d 100644
--- a/spec/lib/gitlab/ci/trace/stream_spec.rb
+++ b/spec/lib/gitlab/ci/trace/stream_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
- set(:build) { create(:ci_build, :running) }
+ let_it_be(:build) { create(:ci_build, :running) }
before do
stub_feature_flags(ci_enable_live_trace: true)
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index cfc3d852de0..af0a85f6c4e 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -509,28 +509,48 @@ module Gitlab
describe "before_script" do
context "in global context" do
- let(:config) do
- {
- before_script: ["global script"],
- test: { script: ["script"] }
- }
- end
+ using RSpec::Parameterized::TableSyntax
- it "return commands with scripts concatenated" do
- expect(subject[:options][:before_script]).to eq(["global script"])
+ where(:inherit, :result) do
+ nil | ["global script"]
+ { default: false } | nil
+ { default: true } | ["global script"]
+ { default: %w[before_script] } | ["global script"]
+ { default: %w[image] } | nil
end
- end
- context "in default context" do
- let(:config) do
- {
- default: { before_script: ["global script"] },
- test: { script: ["script"] }
- }
+ with_them do
+ let(:config) do
+ {
+ before_script: ["global script"],
+ test: { script: ["script"], inherit: inherit }
+ }
+ end
+
+ it { expect(subject[:options][:before_script]).to eq(result) }
end
- it "return commands with scripts concatenated" do
- expect(subject[:options][:before_script]).to eq(["global script"])
+ context "in default context" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:inherit, :result) do
+ nil | ["global script"]
+ { default: false } | nil
+ { default: true } | ["global script"]
+ { default: %w[before_script] } | ["global script"]
+ { default: %w[image] } | nil
+ end
+
+ with_them do
+ let(:config) do
+ {
+ default: { before_script: ["global script"] },
+ test: { script: ["script"], inherit: inherit }
+ }
+ end
+
+ it { expect(subject[:options][:before_script]).to eq(result) }
+ end
end
end
@@ -793,7 +813,7 @@ module Gitlab
context 'when job and global variables are defined' do
let(:global_variables) do
- { 'VAR1' => 'global1', 'VAR3' => 'global3' }
+ { 'VAR1' => 'global1', 'VAR3' => 'global3', 'VAR4' => 'global4' }
end
let(:job_variables) do
{ 'VAR1' => 'value1', 'VAR2' => 'value2' }
@@ -802,16 +822,44 @@ module Gitlab
{
before_script: ['pwd'],
variables: global_variables,
- rspec: { script: 'rspec', variables: job_variables }
+ rspec: { script: 'rspec', variables: job_variables, inherit: inherit }
}
end
- it 'returns all unique variables' do
- expect(subject).to contain_exactly(
- { key: 'VAR3', value: 'global3', public: true },
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
- )
+ context 'when no inheritance is specified' do
+ let(:inherit) { }
+
+ it 'returns all unique variables' do
+ expect(subject).to contain_exactly(
+ { key: 'VAR4', value: 'global4', public: true },
+ { key: 'VAR3', value: 'global3', public: true },
+ { key: 'VAR1', value: 'value1', public: true },
+ { key: 'VAR2', value: 'value2', public: true }
+ )
+ end
+ end
+
+ context 'when inheritance is disabled' do
+ let(:inherit) { { variables: false } }
+
+ it 'does not inherit variables' do
+ expect(subject).to contain_exactly(
+ { key: 'VAR1', value: 'value1', public: true },
+ { key: 'VAR2', value: 'value2', public: true }
+ )
+ end
+ end
+
+ context 'when specific variables are to inherited' do
+ let(:inherit) { { variables: %w[VAR1 VAR4] } }
+
+ it 'returns all unique variables and inherits only specified variables' do
+ expect(subject).to contain_exactly(
+ { key: 'VAR4', value: 'global4', public: true },
+ { key: 'VAR1', value: 'value1', public: true },
+ { key: 'VAR2', value: 'value2', public: true }
+ )
+ end
end
end
@@ -2419,7 +2467,9 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
- expect(subject.errors).to eq(['jobs:rspec config contains unknown keys: bad_tags', 'jobs:rspec rules should be an array of hashes'])
+ expect(subject.errors).to contain_exactly(
+ 'jobs:rspec config contains unknown keys: bad_tags',
+ 'jobs:rspec rules should be an array of hashes')
expect(subject.content).to be_blank
end
end
diff --git a/spec/lib/gitlab/config/entry/attributable_spec.rb b/spec/lib/gitlab/config/entry/attributable_spec.rb
index bc29a194181..64a4670f483 100644
--- a/spec/lib/gitlab/config/entry/attributable_spec.rb
+++ b/spec/lib/gitlab/config/entry/attributable_spec.rb
@@ -59,7 +59,7 @@ describe Gitlab::Config::Entry::Attributable do
end
end
- expectation.to raise_error(ArgumentError, 'Method already defined: length')
+ expectation.to raise_error(ArgumentError, /Method 'length' already defined in/)
end
end
end
diff --git a/spec/lib/gitlab/config_checker/puma_rugged_checker_spec.rb b/spec/lib/gitlab/config_checker/puma_rugged_checker_spec.rb
new file mode 100644
index 00000000000..badfd56d571
--- /dev/null
+++ b/spec/lib/gitlab/config_checker/puma_rugged_checker_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ConfigChecker::PumaRuggedChecker do
+ describe '#check' do
+ subject { described_class.check }
+
+ context 'application is not puma' do
+ before do
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(false)
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'application is puma' do
+ let(:notice_multi_threaded_puma_with_rugged) do
+ {
+ type: 'warning',
+ message: 'Puma is running with a thread count above 1 and the Rugged '\
+ 'service is enabled. This may decrease performance in some environments. '\
+ 'See our <a href="https://docs.gitlab.com/ee/administration/operations/puma.html#performance-caveat-when-using-puma-with-rugged">documentation</a> '\
+ 'for details of this issue.'
+ }
+ end
+
+ before do
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
+ allow(described_class).to receive(:running_puma_with_multiple_threads?).and_return(multithreaded_puma)
+ allow(described_class).to receive(:rugged_enabled_through_feature_flag?).and_return(rugged_enabled)
+ end
+
+ context 'not multithreaded_puma and rugged API enabled' do
+ let(:multithreaded_puma) { false }
+ let(:rugged_enabled) { true }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'not multithreaded_puma and rugged API is not enabled' do
+ let(:multithreaded_puma) { false }
+ let(:rugged_enabled) { false }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'multithreaded_puma and rugged API is not enabled' do
+ let(:multithreaded_puma) { true }
+ let(:rugged_enabled) { false }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'multithreaded_puma and rugged API is enabled' do
+ let(:multithreaded_puma) { true }
+ let(:rugged_enabled) { true }
+
+ it 'report multi_threaded_puma_with_rugged notices' do
+ is_expected.to contain_exactly(notice_multi_threaded_puma_with_rugged)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/danger/commit_linter_spec.rb b/spec/lib/gitlab/danger/commit_linter_spec.rb
index 0cf7ac64e43..a4760c942dc 100644
--- a/spec/lib/gitlab/danger/commit_linter_spec.rb
+++ b/spec/lib/gitlab/danger/commit_linter_spec.rb
@@ -152,6 +152,18 @@ describe Gitlab::Danger::CommitLinter do
end
end
+ context 'when subject is a WIP' do
+ let(:final_message) { 'A B C' }
+ # commit message with prefix will be over max length. commit message without prefix will be of maximum size
+ let(:commit_message) { described_class::WIP_PREFIX + final_message + 'D' * (described_class::WARN_SUBJECT_LENGTH - final_message.size) }
+
+ it 'does not have any problems' do
+ commit_linter.lint
+
+ expect(commit_linter.problems).to be_empty
+ end
+ end
+
context 'when subject is too short and too long' do
let(:commit_message) { 'A ' + 'B' * described_class::MAX_LINE_LENGTH }
@@ -183,7 +195,40 @@ describe Gitlab::Danger::CommitLinter do
end
end
- context 'when subject ands with a period' do
+ [
+ '[ci skip] A commit message',
+ '[Ci skip] A commit message',
+ '[API] A commit message'
+ ].each do |message|
+ context "when subject is '#{message}'" do
+ let(:commit_message) { message }
+
+ it 'does not add a problem' do
+ expect(commit_linter).not_to receive(:add_problem)
+
+ commit_linter.lint
+ end
+ end
+ end
+
+ [
+ '[ci skip]A commit message',
+ '[Ci skip] A commit message',
+ '[ci skip] a commit message',
+ '! A commit message'
+ ].each do |message|
+ context "when subject is '#{message}'" do
+ let(:commit_message) { message }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:subject_starts_with_lowercase, described_class::DEFAULT_SUBJECT_DESCRIPTION)
+
+ commit_linter.lint
+ end
+ end
+ end
+
+ context 'when subject ends with a period' do
let(:commit_message) { 'A B C.' }
it 'adds a problem' do
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index 2561e763429..dd183281977 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -175,9 +175,12 @@ describe Gitlab::Danger::Helper do
'spec/javascripts/foo' | :frontend
'spec/frontend/bar' | :frontend
'vendor/assets/foo' | :frontend
+ 'babel.config.js' | :frontend
'jest.config.js' | :frontend
'package.json' | :frontend
'yarn.lock' | :frontend
+ 'config/foo.js' | :frontend
+ 'config/deep/foo.js' | :frontend
'ee/app/assets/foo' | :frontend
'ee/app/views/foo' | :frontend
@@ -218,6 +221,9 @@ describe Gitlab::Danger::Helper do
'scripts/foo' | :engineering_productivity
'lib/gitlab/danger/foo' | :engineering_productivity
'ee/lib/gitlab/danger/foo' | :engineering_productivity
+ '.overcommit.yml.example' | :engineering_productivity
+ '.editorconfig' | :engineering_productivity
+ 'tooling/overcommit/foo' | :engineering_productivity
'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | :backend
diff --git a/spec/lib/gitlab/data_builder/wiki_page_spec.rb b/spec/lib/gitlab/data_builder/wiki_page_spec.rb
index 404d54bf2da..ae338e30a69 100644
--- a/spec/lib/gitlab/data_builder/wiki_page_spec.rb
+++ b/spec/lib/gitlab/data_builder/wiki_page_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::DataBuilder::WikiPage do
- set(:project) { create(:project, :repository, :wiki_repo) }
+ let_it_be(:project) { create(:project, :repository, :wiki_repo) }
let(:wiki_page) { create(:wiki_page, wiki: project.wiki) }
let(:user) { create(:user) }
diff --git a/spec/lib/gitlab/database/connection_timer_spec.rb b/spec/lib/gitlab/database/connection_timer_spec.rb
new file mode 100644
index 00000000000..c9e9d770343
--- /dev/null
+++ b/spec/lib/gitlab/database/connection_timer_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Database::ConnectionTimer do
+ let(:current_clock_value) { 1234.56 }
+
+ before do
+ allow(described_class).to receive(:current_clock_value).and_return(current_clock_value)
+ end
+
+ describe '.starting_now' do
+ let(:default_interval) { described_class::DEFAULT_INTERVAL }
+ let(:random_value) { 120 }
+
+ before do
+ allow(described_class).to receive(:rand).and_return(random_value)
+ end
+
+ context 'when the configured interval is positive' do
+ before do
+ allow(described_class).to receive(:interval).and_return(default_interval)
+ end
+
+ it 'randomizes the interval of the created timer' do
+ timer = described_class.starting_now
+
+ expect(timer.interval).to eq(default_interval + random_value)
+ end
+ end
+
+ context 'when the configured interval is not positive' do
+ before do
+ allow(described_class).to receive(:interval).and_return(0)
+ end
+
+ it 'sets the interval of the created timer to nil' do
+ timer = described_class.starting_now
+
+ expect(timer.interval).to be_nil
+ end
+ end
+ end
+
+ describe '.expired?' do
+ context 'when the interval is positive' do
+ context 'when the interval has elapsed' do
+ it 'returns true' do
+ timer = described_class.new(20, current_clock_value - 30)
+
+ expect(timer).to be_expired
+ end
+ end
+
+ context 'when the interval has not elapsed' do
+ it 'returns false' do
+ timer = described_class.new(20, current_clock_value - 10)
+
+ expect(timer).not_to be_expired
+ end
+ end
+ end
+
+ context 'when the interval is not positive' do
+ context 'when the interval has elapsed' do
+ it 'returns false' do
+ timer = described_class.new(0, current_clock_value - 30)
+
+ expect(timer).not_to be_expired
+ end
+ end
+
+ context 'when the interval has not elapsed' do
+ it 'returns false' do
+ timer = described_class.new(0, current_clock_value + 10)
+
+ expect(timer).not_to be_expired
+ end
+ end
+ end
+
+ context 'when the interval is nil' do
+ it 'returns false' do
+ timer = described_class.new(nil, current_clock_value - 30)
+
+ expect(timer).not_to be_expired
+ end
+ end
+ end
+
+ describe '.reset!' do
+ it 'updates the timer clock value' do
+ timer = described_class.new(20, current_clock_value - 20)
+ expect(timer.starting_clock_value).not_to eql(current_clock_value)
+
+ timer.reset!
+ expect(timer.starting_clock_value).to eql(current_clock_value)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 4cf2553b90d..1fd6157ce43 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -383,7 +383,8 @@ describe Gitlab::Database::MigrationHelpers do
it 'raises an error' do
expect(model).to receive(:foreign_key_exists?).and_return(false)
- expect { model.validate_foreign_key(:projects, :user_id) }.to raise_error(/cannot find/)
+ error_message = /Could not find foreign key "fk_name" on table "projects"/
+ expect { model.validate_foreign_key(:projects, :user_id, name: :fk_name) }.to raise_error(error_message)
end
end
end
@@ -587,6 +588,8 @@ describe Gitlab::Database::MigrationHelpers do
end
describe '#add_column_with_default' do
+ let(:column) { Project.columns.find { |c| c.name == "id" } }
+
context 'outside of a transaction' do
context 'when a column limit is not set' do
before do
@@ -601,6 +604,9 @@ describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:change_column_default)
.with(:projects, :foo, 10)
+
+ expect(model).to receive(:column_for)
+ .with(:projects, :foo).and_return(column)
end
it 'adds the column while allowing NULL values' do
@@ -655,6 +661,7 @@ describe Gitlab::Database::MigrationHelpers do
it 'adds the column with a limit' do
allow(model).to receive(:transaction_open?).and_return(false)
allow(model).to receive(:transaction).and_yield
+ allow(model).to receive(:column_for).with(:projects, :foo).and_return(column)
allow(model).to receive(:update_column_in_batches).with(:projects, :foo, 10)
allow(model).to receive(:change_column_null).with(:projects, :foo, false)
allow(model).to receive(:change_column_default).with(:projects, :foo, 10)
@@ -721,50 +728,68 @@ describe Gitlab::Database::MigrationHelpers do
before do
allow(model).to receive(:transaction_open?).and_return(false)
- allow(model).to receive(:column_for).and_return(old_column)
end
- it 'renames a column concurrently' do
- expect(model).to receive(:check_trigger_permissions!).with(:users)
+ context 'when the column to rename exists' do
+ before do
+ allow(model).to receive(:column_for).and_return(old_column)
+ end
- expect(model).to receive(:install_rename_triggers_for_postgresql)
- .with(trigger_name, '"users"', '"old"', '"new"')
+ it 'renames a column concurrently' do
+ expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:add_column)
- .with(:users, :new, :integer,
- limit: old_column.limit,
- precision: old_column.precision,
- scale: old_column.scale)
+ expect(model).to receive(:install_rename_triggers_for_postgresql)
+ .with(trigger_name, '"users"', '"old"', '"new"')
- expect(model).to receive(:change_column_default)
- .with(:users, :new, old_column.default)
+ expect(model).to receive(:add_column)
+ .with(:users, :new, :integer,
+ limit: old_column.limit,
+ precision: old_column.precision,
+ scale: old_column.scale)
- expect(model).to receive(:update_column_in_batches)
+ expect(model).to receive(:change_column_default)
+ .with(:users, :new, old_column.default)
+
+ expect(model).to receive(:update_column_in_batches)
+
+ expect(model).to receive(:change_column_null).with(:users, :new, false)
+
+ expect(model).to receive(:copy_indexes).with(:users, :old, :new)
+ expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new)
- expect(model).to receive(:change_column_null).with(:users, :new, false)
+ model.rename_column_concurrently(:users, :old, :new)
+ end
- expect(model).to receive(:copy_indexes).with(:users, :old, :new)
- expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new)
+ context 'when default is false' do
+ let(:old_column) do
+ double(:column,
+ type: :boolean,
+ limit: nil,
+ default: false,
+ null: false,
+ precision: nil,
+ scale: nil)
+ end
- model.rename_column_concurrently(:users, :old, :new)
+ it 'copies the default to the new column' do
+ expect(model).to receive(:change_column_default)
+ .with(:users, :new, old_column.default)
+
+ model.rename_column_concurrently(:users, :old, :new)
+ end
+ end
end
- context 'when default is false' do
- let(:old_column) do
- double(:column,
- type: :boolean,
- limit: nil,
- default: false,
- null: false,
- precision: nil,
- scale: nil)
+ context 'when the column to be renamed does not exist' do
+ before do
+ allow(model).to receive(:columns).and_return([])
end
- it 'copies the default to the new column' do
- expect(model).to receive(:change_column_default)
- .with(:users, :new, old_column.default)
+ it 'raises an error with appropriate message' do
+ expect(model).to receive(:check_trigger_permissions!).with(:users)
- model.rename_column_concurrently(:users, :old, :new)
+ error_message = /Could not find column "missing_column" on table "users"/
+ expect { model.rename_column_concurrently(:users, :missing_column, :new) }.to raise_error(error_message)
end
end
end
@@ -1133,8 +1158,9 @@ describe Gitlab::Database::MigrationHelpers do
expect(column.name).to eq('id')
end
- it 'returns nil when a column does not exist' do
- expect(model.column_for(:users, :kittens)).to be_nil
+ it 'raises an error when a column does not exist' do
+ error_message = /Could not find column "kittens" on table "users"/
+ expect { model.column_for(:users, :kittens) }.to raise_error(error_message)
end
end
@@ -1332,6 +1358,15 @@ describe Gitlab::Database::MigrationHelpers do
end
end
end
+
+ context 'with other_arguments option' do
+ it 'queues jobs correctly' do
+ model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_arguments: [1, 2])
+
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]])
+ expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
+ end
+ end
end
context "when the model doesn't have an ID column" do
@@ -1893,4 +1928,60 @@ describe Gitlab::Database::MigrationHelpers do
end
end
end
+
+ describe '#migrate_async' do
+ it 'calls BackgroundMigrationWorker.perform_async' do
+ expect(BackgroundMigrationWorker).to receive(:perform_async).with("Class", "hello", "world")
+
+ model.migrate_async("Class", "hello", "world")
+ end
+
+ it 'pushes a context with the current class name as caller_id' do
+ expect(Gitlab::ApplicationContext).to receive(:with_context).with(caller_id: model.class.to_s)
+
+ model.migrate_async('Class', 'hello', 'world')
+ end
+ end
+
+ describe '#migrate_in' do
+ it 'calls BackgroundMigrationWorker.perform_in' do
+ expect(BackgroundMigrationWorker).to receive(:perform_in).with(10.minutes, 'Class', 'Hello', 'World')
+
+ model.migrate_in(10.minutes, 'Class', 'Hello', 'World')
+ end
+
+ it 'pushes a context with the current class name as caller_id' do
+ expect(Gitlab::ApplicationContext).to receive(:with_context).with(caller_id: model.class.to_s)
+
+ model.migrate_in(10.minutes, 'Class', 'Hello', 'World')
+ end
+ end
+
+ describe '#bulk_migrate_async' do
+ it 'calls BackgroundMigrationWorker.bulk_perform_async' do
+ expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([%w(Class hello world)])
+
+ model.bulk_migrate_async([%w(Class hello world)])
+ end
+
+ it 'pushes a context with the current class name as caller_id' do
+ expect(Gitlab::ApplicationContext).to receive(:with_context).with(caller_id: model.class.to_s)
+
+ model.bulk_migrate_async([%w(Class hello world)])
+ end
+ end
+
+ describe '#bulk_migrate_in' do
+ it 'calls BackgroundMigrationWorker.bulk_perform_in_' do
+ expect(BackgroundMigrationWorker).to receive(:bulk_perform_in).with(10.minutes, [%w(Class hello world)])
+
+ model.bulk_migrate_in(10.minutes, [%w(Class hello world)])
+ end
+
+ it 'pushes a context with the current class name as caller_id' do
+ expect(Gitlab::ApplicationContext).to receive(:with_context).with(caller_id: model.class.to_s)
+
+ model.bulk_migrate_in(10.minutes, [%w(Class hello world)])
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
new file mode 100644
index 00000000000..0523066b593
--- /dev/null
+++ b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin do
+ describe 'checking in a connection to the pool' do
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.abstract_class = true
+
+ def self.name
+ 'ForceDisconnectTestModel'
+ end
+ end
+ end
+ let(:config) { Rails.application.config_for(:database).merge(pool: 1) }
+ let(:pool) { model.establish_connection(config) }
+
+ it 'calls the force disconnect callback on checkin' do
+ connection = pool.connection
+
+ expect(pool.active_connection?).to be_truthy
+ expect(connection).to receive(:force_disconnect_if_old!).and_call_original
+
+ model.clear_active_connections!
+ end
+ end
+
+ describe 'disconnecting from the database' do
+ let(:connection) { ActiveRecord::Base.connection_pool.connection }
+ let(:timer) { connection.force_disconnect_timer }
+
+ context 'when the timer is expired' do
+ it 'disconnects from the database' do
+ allow(timer).to receive(:expired?).and_return(true)
+
+ expect(connection).to receive(:disconnect!).and_call_original
+ expect(timer).to receive(:reset!).and_call_original
+
+ connection.force_disconnect_if_old!
+ end
+ end
+
+ context 'when the timer is not expired' do
+ it 'does not disconnect from the database' do
+ allow(timer).to receive(:expired?).and_return(false)
+
+ expect(connection).not_to receive(:disconnect!)
+ expect(timer).not_to receive(:reset!)
+
+ connection.force_disconnect_if_old!
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
index 0b8f64e97a1..923f620a81d 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
@@ -50,7 +50,7 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
end
end
- context 'with application settings and admin users' do
+ context 'with application settings and admin users', :request_store do
let(:project) { result[:project] }
let(:group) { result[:group] }
let(:application_setting) { Gitlab::CurrentSettings.current_application_settings }
@@ -58,8 +58,9 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
let!(:user) { create(:user, :admin) }
before do
- allow(ApplicationSetting).to receive(:current_without_cache) { application_setting }
- application_setting.allow_local_requests_from_web_hooks_and_services = true
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+
+ application_setting.update(allow_local_requests_from_web_hooks_and_services: true)
end
shared_examples 'has prometheus service' do |listen_address|
@@ -130,12 +131,17 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
it 'saves the project id' do
expect(result[:status]).to eq(:success)
- expect(application_setting.self_monitoring_project_id).to eq(project.id)
+ expect(application_setting.reload.self_monitoring_project_id).to eq(project.id)
end
- it 'expires application_setting cache' do
- expect(Gitlab::CurrentSettings).to receive(:expire_current_application_settings)
+ it 'creates a Prometheus service' do
expect(result[:status]).to eq(:success)
+
+ services = result[:project].reload.services
+
+ expect(services.count).to eq(1)
+ # Ensures PrometheusService#self_monitoring_project? is true
+ expect(services.first.allow_local_api_url?).to be_truthy
end
it 'creates an environment for the project' do
@@ -158,8 +164,8 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
end
it 'returns error when saving project ID fails' do
- allow(application_setting).to receive(:update).and_call_original
- allow(application_setting).to receive(:update)
+ allow(subject.application_settings).to receive(:update).and_call_original
+ allow(subject.application_settings).to receive(:update)
.with(self_monitoring_project_id: anything)
.and_return(false)
@@ -175,8 +181,8 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
let(:existing_project) { create(:project, namespace: existing_group) }
before do
- application_setting.instance_administrators_group_id = existing_group.id
- application_setting.self_monitoring_project_id = existing_project.id
+ application_setting.update(instance_administrators_group_id: existing_group.id,
+ self_monitoring_project_id: existing_project.id)
end
it 'returns success' do
@@ -189,7 +195,7 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
context 'when local requests from hooks and services are not allowed' do
before do
- application_setting.allow_local_requests_from_web_hooks_and_services = false
+ application_setting.update(allow_local_requests_from_web_hooks_and_services: false)
end
it_behaves_like 'has prometheus service', 'http://localhost:9090'
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index 218c393c409..a16e5e185bb 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -53,26 +53,22 @@ describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
fallback_diff_refs: diffs.fallback_diff_refs)
end
- it 'does not calculate highlighting when reading from cache' do
+ before do
cache.write_if_empty
cache.decorate(diff_file)
+ end
+ it 'does not calculate highlighting when reading from cache' do
expect_any_instance_of(Gitlab::Diff::Highlight).not_to receive(:highlight)
diff_file.highlighted_diff_lines
end
it 'assigns highlighted diff lines to the DiffFile' do
- cache.write_if_empty
- cache.decorate(diff_file)
-
expect(diff_file.highlighted_diff_lines.size).to be > 5
end
it 'assigns highlighted diff lines which rich_text are HTML-safe' do
- cache.write_if_empty
- cache.decorate(diff_file)
-
rich_texts = diff_file.highlighted_diff_lines.map(&:rich_text)
expect(rich_texts).to all(be_html_safe)
@@ -101,6 +97,28 @@ describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
let(:paths) { merge_request.diffs.raw_diff_files.select(&:text?).map(&:file_path) }
end
+ it 'updates memory usage metrics if Redis version >= 4' do
+ allow_next_instance_of(Redis) do |redis|
+ allow(redis).to receive(:info).and_return({ "redis_version" => "4.0.0" })
+
+ expect(described_class.gitlab_redis_diff_caching_memory_usage_bytes)
+ .to receive(:observe).and_call_original
+
+ cache.send(:write_to_redis_hash, diff_hash)
+ end
+ end
+
+ it 'does not update memory usage metrics if Redis version < 4' do
+ allow_next_instance_of(Redis) do |redis|
+ allow(redis).to receive(:info).and_return({ "redis_version" => "3.0.0" })
+
+ expect(described_class.gitlab_redis_diff_caching_memory_usage_bytes)
+ .not_to receive(:observe).and_call_original
+
+ cache.send(:write_to_redis_hash, diff_hash)
+ end
+ end
+
context 'different diff_collections for the same diffable' do
before do
cache.write_if_empty
@@ -149,5 +167,13 @@ describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
it 'defines :gitlab_redis_diff_caching_memory_usage_bytes histogram' do
expect(described_class).to respond_to(:gitlab_redis_diff_caching_memory_usage_bytes)
end
+
+ it 'defines :gitlab_redis_diff_caching_hit' do
+ expect(described_class).to respond_to(:gitlab_redis_diff_caching_hit)
+ end
+
+ it 'defines :gitlab_redis_diff_caching_miss' do
+ expect(described_class).to respond_to(:gitlab_redis_diff_caching_miss)
+ end
end
end
diff --git a/spec/lib/gitlab/elasticsearch/logs_spec.rb b/spec/lib/gitlab/elasticsearch/logs_spec.rb
new file mode 100644
index 00000000000..f82c4acb82b
--- /dev/null
+++ b/spec/lib/gitlab/elasticsearch/logs_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Elasticsearch::Logs do
+ let(:client) { Elasticsearch::Transport::Client }
+
+ let(:es_message_1) { { timestamp: "2019-12-13T14:35:34.034Z", message: "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } }
+ let(:es_message_2) { { timestamp: "2019-12-13T14:35:35.034Z", message: "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" } }
+ let(:es_message_3) { { timestamp: "2019-12-13T14:35:36.034Z", message: "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } }
+ let(:es_message_4) { { timestamp: "2019-12-13T14:35:37.034Z", message: "- -\u003e /" } }
+
+ let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/logs_response.json')) }
+
+ subject { described_class.new(client) }
+
+ let(:namespace) { "autodevops-deploy-9-production" }
+ let(:pod_name) { "production-6866bc8974-m4sk4" }
+ let(:container_name) { "auto-deploy-app" }
+ let(:search) { "foo +bar "}
+ let(:start_time) { "2019-12-13T14:35:34.034Z" }
+ let(:end_time) { "2019-12-13T14:35:34.034Z" }
+ let(:cursor) { "9999934,1572449784442" }
+
+ let(:body) { JSON.parse(fixture_file('lib/elasticsearch/query.json')) }
+ let(:body_with_container) { JSON.parse(fixture_file('lib/elasticsearch/query_with_container.json')) }
+ let(:body_with_search) { JSON.parse(fixture_file('lib/elasticsearch/query_with_search.json')) }
+ let(:body_with_times) { JSON.parse(fixture_file('lib/elasticsearch/query_with_times.json')) }
+ let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
+ let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
+ let(:body_with_cursor) { JSON.parse(fixture_file('lib/elasticsearch/query_with_cursor.json')) }
+
+ RSpec::Matchers.define :a_hash_equal_to_json do |expected|
+ match do |actual|
+ actual.as_json == expected
+ end
+ end
+
+ describe '#pod_logs' do
+ it 'returns the logs as an array' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name)
+ expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
+ end
+
+ it 'can further filter the logs by container name' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_container)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, container_name: container_name)
+ expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
+ end
+
+ it 'can further filter the logs by search' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_search)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, search: search)
+ expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
+ end
+
+ it 'can further filter the logs by start_time and end_time' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_times)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, start_time: start_time, end_time: end_time)
+ expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
+ end
+
+ it 'can further filter the logs by only start_time' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_start_time)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, start_time: start_time)
+ expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
+ end
+
+ it 'can further filter the logs by only end_time' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_end_time)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, end_time: end_time)
+ expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
+ end
+
+ it 'can search after a cursor' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_cursor)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, cursor: cursor)
+ expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index d091b6c1601..e6dfd8728aa 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -128,6 +128,12 @@ describe Gitlab::EncodingHelper do
expect { ext_class.encode_utf8('') }.not_to raise_error
end
+ it 'replaces invalid and undefined chars with the replace argument' do
+ str = 'hællo'.encode(Encoding::UTF_16LE).force_encoding(Encoding::ASCII_8BIT)
+
+ expect(ext_class.encode_utf8(str, replace: "\u{FFFD}")).to eq("h�llo")
+ end
+
context 'with strings that can be forcefully encoded into utf8' do
let(:test_string) do
"refs/heads/FixSymbolsTitleDropdown".encode("ASCII-8BIT")
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 1506794cbb5..a39c50ab038 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -30,7 +30,12 @@ describe Gitlab::Experimentation do
end
describe '#set_experimentation_subject_id_cookie' do
+ let(:do_not_track) { nil }
+ let(:cookie) { cookies.permanent.signed[:experimentation_subject_id] }
+
before do
+ request.headers['DNT'] = do_not_track if do_not_track.present?
+
get :index
end
@@ -46,12 +51,30 @@ describe Gitlab::Experimentation do
context 'cookie is not present' do
it 'sets a permanent signed cookie' do
- expect(cookies.permanent.signed[:experimentation_subject_id]).to be_present
+ expect(cookie).to be_present
+ end
+
+ context 'DNT: 0' do
+ let(:do_not_Track) { '0' }
+
+ it 'sets a permanent signed cookie' do
+ expect(cookie).to be_present
+ end
+ end
+
+ context 'DNT: 1' do
+ let(:do_not_track) { '1' }
+
+ it 'does nothing' do
+ expect(cookie).not_to be_present
+ end
end
end
end
describe '#experiment_enabled?' do
+ subject { controller.experiment_enabled?(:test_experiment) }
+
context 'cookie is not present' do
it 'calls Gitlab::Experimentation.enabled_for_user? with the name of the experiment and an experimentation_subject_index of nil' do
expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, nil)
@@ -72,11 +95,25 @@ describe Gitlab::Experimentation do
end
end
+ it 'returns true when DNT: 0 is set in the request' do
+ allow(Gitlab::Experimentation).to receive(:enabled_for_user?) { true }
+ controller.request.headers['DNT'] = '0'
+
+ is_expected.to be_truthy
+ end
+
+ it 'returns false when DNT: 1 is set in the request' do
+ allow(Gitlab::Experimentation).to receive(:enabled_for_user?) { true }
+ controller.request.headers['DNT'] = '1'
+
+ is_expected.to be_falsy
+ end
+
describe 'URL parameter to force enable experiment' do
- it 'returns true' do
+ it 'returns true unconditionally' do
get :index, params: { force_experiment: :test_experiment }
- expect(controller.experiment_enabled?(:test_experiment)).to be_truthy
+ is_expected.to be_truthy
end
end
end
diff --git a/spec/lib/gitlab/file_type_detection_spec.rb b/spec/lib/gitlab/file_type_detection_spec.rb
index 05008bf895c..2f1fc57c559 100644
--- a/spec/lib/gitlab/file_type_detection_spec.rb
+++ b/spec/lib/gitlab/file_type_detection_spec.rb
@@ -2,6 +2,35 @@
require 'spec_helper'
describe Gitlab::FileTypeDetection do
+ describe '.extension_match?' do
+ let(:extensions) { %w[foo bar] }
+
+ it 'returns false when filename is blank' do
+ expect(described_class.extension_match?(nil, extensions)).to eq(false)
+ expect(described_class.extension_match?('', extensions)).to eq(false)
+ end
+
+ it 'returns true when filename matches extensions' do
+ expect(described_class.extension_match?('file.foo', extensions)).to eq(true)
+ expect(described_class.extension_match?('file.bar', extensions)).to eq(true)
+ end
+
+ it 'returns false when filename does not match extensions' do
+ expect(described_class.extension_match?('file.baz', extensions)).to eq(false)
+ end
+
+ it 'can match case insensitive filenames' do
+ expect(described_class.extension_match?('file.FOO', extensions)).to eq(true)
+ end
+
+ it 'can match filenames with periods' do
+ expect(described_class.extension_match?('my.file.foo', extensions)).to eq(true)
+ end
+
+ it 'can match filenames with directories' do
+ expect(described_class.extension_match?('my/file.foo', extensions)).to eq(true)
+ end
+ end
context 'when class is an uploader' do
let(:uploader) do
example_uploader = Class.new(CarrierWave::Uploader::Base) do
diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb
index 294e67a19d4..9c2f0e910b1 100644
--- a/spec/lib/gitlab/git/blob_spec.rb
+++ b/spec/lib/gitlab/git/blob_spec.rb
@@ -12,10 +12,34 @@ describe Gitlab::Git::Blob, :seed_helper do
let(:blob) { Gitlab::Git::Blob.new(name: 'test') }
it 'handles nil data' do
+ expect(described_class).not_to receive(:gitlab_blob_size)
+
expect(blob.name).to eq('test')
expect(blob.size).to eq(nil)
expect(blob.loaded_size).to eq(nil)
end
+
+ it 'records blob size' do
+ expect(described_class).to receive(:gitlab_blob_size).and_call_original
+
+ Gitlab::Git::Blob.new(name: 'test', size: 4, data: 'abcd')
+ end
+
+ context 'when untruncated' do
+ it 'attempts to record gitlab_blob_truncated_false' do
+ expect(described_class).to receive(:gitlab_blob_truncated_false).and_call_original
+
+ Gitlab::Git::Blob.new(name: 'test', size: 4, data: 'abcd')
+ end
+ end
+
+ context 'when truncated' do
+ it 'attempts to record gitlab_blob_truncated_true' do
+ expect(described_class).to receive(:gitlab_blob_truncated_true).and_call_original
+
+ Gitlab::Git::Blob.new(name: 'test', size: 40, data: 'abcd')
+ end
+ end
end
shared_examples '.find' do
@@ -589,6 +613,40 @@ describe Gitlab::Git::Blob, :seed_helper do
end
end
+ describe '#truncated?' do
+ context 'when blob.size is nil' do
+ let(:nil_size_blob) { Gitlab::Git::Blob.new(name: 'test', data: 'abcd') }
+
+ it 'returns false' do
+ expect(nil_size_blob.truncated?).to be_falsey
+ end
+ end
+
+ context 'when blob.data is missing' do
+ let(:nil_data_blob) { Gitlab::Git::Blob.new(name: 'test', size: 4) }
+
+ it 'returns false' do
+ expect(nil_data_blob.truncated?).to be_falsey
+ end
+ end
+
+ context 'when the blob is truncated' do
+ let(:truncated_blob) { Gitlab::Git::Blob.new(name: 'test', size: 40, data: 'abcd') }
+
+ it 'returns true' do
+ expect(truncated_blob.truncated?).to be_truthy
+ end
+ end
+
+ context 'when the blob is untruncated' do
+ let(:untruncated_blob) { Gitlab::Git::Blob.new(name: 'test', size: 4, data: 'abcd') }
+
+ it 'returns false' do
+ expect(untruncated_blob.truncated?).to be_falsey
+ end
+ end
+ end
+
describe 'metrics' do
it 'defines :gitlab_blob_truncated_true counter' do
expect(described_class).to respond_to(:gitlab_blob_truncated_true)
@@ -597,5 +655,9 @@ describe Gitlab::Git::Blob, :seed_helper do
it 'defines :gitlab_blob_truncated_false counter' do
expect(described_class).to respond_to(:gitlab_blob_truncated_false)
end
+
+ it 'defines :gitlab_blob_size histogram' do
+ expect(described_class).to respond_to(:gitlab_blob_size)
+ end
end
end
diff --git a/spec/lib/gitlab/git/lfs_changes_spec.rb b/spec/lib/gitlab/git/lfs_changes_spec.rb
index a99e8c4f60c..adc63401b89 100644
--- a/spec/lib/gitlab/git/lfs_changes_spec.rb
+++ b/spec/lib/gitlab/git/lfs_changes_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Git::LfsChanges do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
let(:blob_object_id) { '0c304a93cb8430108629bbbcaa27db3343299bc0' }
diff --git a/spec/lib/gitlab/git/merge_base_spec.rb b/spec/lib/gitlab/git/merge_base_spec.rb
index fa95a1664ea..d92b13c5023 100644
--- a/spec/lib/gitlab/git/merge_base_spec.rb
+++ b/spec/lib/gitlab/git/merge_base_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Git::MergeBase do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
subject(:merge_base) { described_class.new(repository, refs) }
diff --git a/spec/lib/gitlab/git/push_spec.rb b/spec/lib/gitlab/git/push_spec.rb
index 32c4c1c82d4..c09e8a085df 100644
--- a/spec/lib/gitlab/git/push_spec.rb
+++ b/spec/lib/gitlab/git/push_spec.rb
@@ -3,8 +3,7 @@
require 'spec_helper'
describe Gitlab::Git::Push do
- set(:project) { create(:project, :repository) }
-
+ let_it_be(:project) { create(:project, :repository) }
let(:oldrev) { project.commit('HEAD~2').id }
let(:newrev) { project.commit.id }
let(:ref) { 'refs/heads/some-branch' }
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index a9d7beb0fea..b706cad612a 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -492,50 +492,6 @@ describe Gitlab::Git::Repository, :seed_helper do
end
end
- describe '#fetch_repository_as_mirror' do
- let(:new_repository) do
- Gitlab::Git::Repository.new('default', 'my_project.git', '', 'group/project')
- end
-
- subject { new_repository.fetch_repository_as_mirror(repository) }
-
- before do
- Gitlab::Shell.new.create_repository('default', 'my_project', 'group/project')
- end
-
- after do
- Gitlab::Shell.new.remove_repository('default', 'my_project')
- end
-
- it 'fetches a repository as a mirror remote' do
- subject
-
- expect(refs(new_repository_path)).to eq(refs(repository_path))
- end
-
- context 'with keep-around refs' do
- let(:sha) { SeedRepo::Commit::ID }
- let(:keep_around_ref) { "refs/keep-around/#{sha}" }
- let(:tmp_ref) { "refs/tmp/#{SecureRandom.hex}" }
-
- before do
- repository_rugged.references.create(keep_around_ref, sha, force: true)
- repository_rugged.references.create(tmp_ref, sha, force: true)
- end
-
- it 'includes the temporary and keep-around refs' do
- subject
-
- expect(refs(new_repository_path)).to include(keep_around_ref)
- expect(refs(new_repository_path)).to include(tmp_ref)
- end
- end
-
- def new_repository_path
- File.join(TestEnv.repos_path, new_repository.relative_path)
- end
- end
-
describe '#fetch_remote' do
it 'delegates to the gitaly RepositoryService' do
ssh_auth = double(:ssh_auth)
@@ -2181,4 +2137,76 @@ describe Gitlab::Git::Repository, :seed_helper do
end
end
end
+
+ describe '#import_repository' do
+ let_it_be(:project) { create(:project) }
+
+ let(:repository) { project.repository }
+ let(:url) { 'http://invalid.invalid' }
+
+ it 'raises an error if a relative path is provided' do
+ expect { repository.import_repository('/foo') }.to raise_error(ArgumentError, /disk path/)
+ end
+
+ it 'raises an error if an absolute path is provided' do
+ expect { repository.import_repository('./foo') }.to raise_error(ArgumentError, /disk path/)
+ end
+
+ it 'delegates to Gitaly' do
+ expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |svc|
+ expect(svc).to receive(:import_repository).with(url).and_return(nil)
+ end
+
+ repository.import_repository(url)
+ end
+
+ it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RepositoryService, :import_repository do
+ subject { repository.import_repository('http://invalid.invalid') }
+ end
+ end
+
+ describe '#replicate' do
+ let(:new_repository) do
+ Gitlab::Git::Repository.new('test_second_storage', TEST_REPO_PATH, '', 'group/project')
+ end
+ let(:new_repository_path) { File.join(TestEnv::SECOND_STORAGE_PATH, new_repository.relative_path) }
+
+ subject { new_repository.replicate(repository) }
+
+ before do
+ stub_storage_settings('test_second_storage' => {
+ 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
+ 'path' => TestEnv::SECOND_STORAGE_PATH
+ })
+ new_repository.create_repository
+ end
+
+ after do
+ new_repository.remove
+ end
+
+ it 'mirrors the source repository' do
+ subject
+
+ expect(refs(new_repository_path)).to eq(refs(repository_path))
+ end
+
+ context 'with keep-around refs' do
+ let(:sha) { SeedRepo::Commit::ID }
+ let(:keep_around_ref) { "refs/keep-around/#{sha}" }
+ let(:tmp_ref) { "refs/tmp/#{SecureRandom.hex}" }
+
+ before do
+ repository.write_ref(keep_around_ref, sha)
+ repository.write_ref(tmp_ref, sha)
+ end
+
+ it 'includes the temporary and keep-around refs' do
+ subject
+
+ expect(refs(new_repository_path)).to include(keep_around_ref)
+ expect(refs(new_repository_path)).to include(tmp_ref)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index 300d7bb14b6..b396e5d22c3 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -152,6 +152,43 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
end
end
+ describe '#rugged_enabled_through_feature_flag?' do
+ subject { wrapper.send(:rugged_enabled_through_feature_flag?) }
+
+ before do
+ allow(Feature).to receive(:enabled?).with(:feature_key_1).and_return(true)
+ allow(Feature).to receive(:enabled?).with(:feature_key_2).and_return(true)
+ allow(Feature).to receive(:enabled?).with(:feature_key_3).and_return(false)
+ allow(Feature).to receive(:enabled?).with(:feature_key_4).and_return(false)
+
+ stub_const('Gitlab::Git::RuggedImpl::Repository::FEATURE_FLAGS', feature_keys)
+ end
+
+ context 'no feature keys given' do
+ let(:feature_keys) { [] }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'all features are enabled' do
+ let(:feature_keys) { [:feature_key_1, :feature_key_2] }
+
+ it { is_expected.to be_truthy}
+ end
+
+ context 'all features are not enabled' do
+ let(:feature_keys) { [:feature_key_3, :feature_key_4] }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'some feature is enabled' do
+ let(:feature_keys) { [:feature_key_4, :feature_key_2] }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
def create_temporary_gitaly_metadata_file
tmp = Tempfile.new('.gitaly-metadata')
gitaly_metadata = {
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index ffb3d86408a..f52fe8ef612 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -3,83 +3,225 @@
require 'spec_helper'
describe Gitlab::GitAccessSnippet do
- include GitHelpers
+ include ProjectHelpers
+ include TermsHelper
+ include_context 'ProjectPolicyTable context'
+ using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
- let_it_be(:personal_snippet) { create(:personal_snippet, :private, :repository) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:snippet) { create(:project_snippet, :public, :repository, project: project) }
+ let(:actor) { user }
let(:protocol) { 'ssh' }
let(:changes) { Gitlab::GitAccess::ANY }
+ let(:authentication_abilities) { [:download_code, :push_code] }
+
let(:push_access_check) { access.check('git-receive-pack', changes) }
let(:pull_access_check) { access.check('git-upload-pack', changes) }
- let(:snippet) { personal_snippet }
- let(:actor) { personal_snippet.author }
- describe 'when feature flag :version_snippets is enabled' do
- it 'allows push and pull access' do
- aggregate_failures do
- expect { pull_access_check }.not_to raise_error
- expect { push_access_check }.not_to raise_error
- end
+ subject(:access) { Gitlab::GitAccessSnippet.new(actor, snippet, protocol, authentication_abilities: authentication_abilities) }
+
+ describe 'when actor is a DeployKey' do
+ let(:actor) { build(:deploy_key) }
+
+ it 'does not allow push and pull access' do
+ expect { pull_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:authentication_mechanism])
end
end
describe 'when feature flag :version_snippets is disabled' do
+ let(:user) { snippet.author }
+
before do
stub_feature_flags(version_snippets: false)
end
- it 'does not allow push and pull access' do
- aggregate_failures do
- expect { push_access_check }.to raise_snippet_not_found
- expect { pull_access_check }.to raise_snippet_not_found
- end
+ it 'allows push and pull access' do
+ expect { pull_access_check }.not_to raise_error
+ expect { push_access_check }.not_to raise_error
end
end
describe '#check_snippet_accessibility!' do
context 'when the snippet exists' do
- it 'allows push and pull access' do
- aggregate_failures do
- expect { pull_access_check }.not_to raise_error
- expect { push_access_check }.not_to raise_error
- end
+ it 'allows access' do
+ project.add_developer(actor)
+
+ expect { pull_access_check }.not_to raise_error
end
end
context 'when the snippet is nil' do
let(:snippet) { nil }
- it 'blocks push and pull with "not found"' do
- aggregate_failures do
- expect { pull_access_check }.to raise_snippet_not_found
- expect { push_access_check }.to raise_snippet_not_found
- end
+ it 'blocks access with "not found"' do
+ expect { pull_access_check }.to raise_snippet_not_found
end
end
context 'when the snippet does not have a repository' do
let(:snippet) { build_stubbed(:personal_snippet) }
- it 'blocks push and pull with "not found"' do
- aggregate_failures do
- expect { pull_access_check }.to raise_snippet_not_found
- expect { push_access_check }.to raise_snippet_not_found
+ it 'blocks access with "not found"' do
+ expect { pull_access_check }.to raise_snippet_not_found
+ end
+ end
+ end
+
+ context 'terms are enforced', :aggregate_failures do
+ before do
+ enforce_terms
+ end
+
+ let(:user) { snippet.author }
+
+ it 'blocks access when the user did not accept terms' do
+ message = /must accept the Terms of Service in order to perform this action/
+
+ expect { push_access_check }.to raise_forbidden(message)
+ expect { pull_access_check }.to raise_forbidden(message)
+ end
+
+ it 'allows access when the user accepted the terms' do
+ accept_terms(user)
+
+ expect { push_access_check }.not_to raise_error
+ expect { pull_access_check }.not_to raise_error
+ end
+ end
+
+ context 'project snippet accessibility', :aggregate_failures do
+ let(:snippet) { create(:project_snippet, :private, :repository, project: project) }
+ let(:user) { membership == :author ? snippet.author : create_user_from_membership(project, membership) }
+
+ shared_examples_for 'checks accessibility' do
+ [:anonymous, :non_member, :guest, :reporter, :maintainer, :admin, :author].each do |membership|
+ context membership.to_s do
+ let(:membership) { membership }
+
+ it 'respects accessibility' do
+ if Ability.allowed?(user, :update_snippet, snippet)
+ expect { push_access_check }.not_to raise_error
+ else
+ expect { push_access_check }.to raise_error(described_class::ForbiddenError)
+ end
+
+ if Ability.allowed?(user, :read_snippet, snippet)
+ expect { pull_access_check }.not_to raise_error
+ else
+ expect { pull_access_check }.to raise_error(described_class::ForbiddenError)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when project is public' do
+ it_behaves_like 'checks accessibility'
+ end
+
+ context 'when project is public but snippet feature is private' do
+ let(:project) { create(:project, :public) }
+
+ before do
+ update_feature_access_level(project, :private)
+ end
+
+ it_behaves_like 'checks accessibility'
+ end
+
+ context 'when project is not accessible' do
+ let(:project) { create(:project, :private) }
+
+ [:anonymous, :non_member].each do |membership|
+ context membership.to_s do
+ let(:membership) { membership }
+
+ it 'respects accessibility' do
+ expect { push_access_check }.to raise_error(described_class::NotFoundError)
+ expect { pull_access_check }.to raise_error(described_class::NotFoundError)
+ end
end
end
end
end
- private
+ context 'personal snippet accessibility', :aggregate_failures do
+ let(:snippet) { create(:personal_snippet, snippet_level, :repository) }
+ let(:user) { membership == :author ? snippet.author : create_user_from_membership(nil, membership) }
+
+ where(:snippet_level, :membership, :_expected_count) do
+ permission_table_for_personal_snippet_access
+ end
+
+ with_them do
+ it "respects accessibility" do
+ error_class = described_class::ForbiddenError
+
+ if Ability.allowed?(user, :update_snippet, snippet)
+ expect { push_access_check }.not_to raise_error
+ else
+ expect { push_access_check }.to raise_error(error_class)
+ end
+
+ if Ability.allowed?(user, :read_snippet, snippet)
+ expect { pull_access_check }.not_to raise_error
+ else
+ expect { pull_access_check }.to raise_error(error_class)
+ end
+ end
+ end
+ end
+
+ context 'when geo is enabled', if: Gitlab.ee? do
+ let(:user) { snippet.author }
+ let!(:primary_node) { FactoryBot.create(:geo_node, :primary) }
+
+ # Without override, push access would return Gitlab::GitAccessResult::CustomAction
+ it 'skips geo for snippet' do
+ allow(::Gitlab::Database).to receive(:read_only?).and_return(true)
+ allow(::Gitlab::Geo).to receive(:secondary_with_primary?).and_return(true)
- def access
- described_class.new(actor, snippet, protocol,
- authentication_abilities: [],
- namespace_path: nil, project_path: nil,
- redirected_path: nil, auth_result_type: nil)
+ expect { push_access_check }.to raise_forbidden(/You can't push code to a read-only GitLab instance/)
+ end
end
+ context 'when changes are specific' do
+ let(:changes) { "2d1db523e11e777e49377cfb22d368deec3f0793 ddd0f15ae83993f5cb66a927a28673882e99100b master" }
+ let(:user) { snippet.author }
+
+ it 'does not raise error if SnippetCheck does not raise error' do
+ expect_next_instance_of(Gitlab::Checks::SnippetCheck) do |check|
+ expect(check).to receive(:validate!).and_call_original
+ end
+ expect_next_instance_of(Gitlab::Checks::PushFileCountCheck) do |check|
+ expect(check).to receive(:validate!)
+ end
+
+ expect { push_access_check }.not_to raise_error
+ end
+
+ it 'raises error if SnippetCheck raises error' do
+ expect_next_instance_of(Gitlab::Checks::SnippetCheck) do |check|
+ allow(check).to receive(:validate!).and_raise(Gitlab::GitAccess::ForbiddenError, 'foo')
+ end
+
+ expect { push_access_check }.to raise_forbidden('foo')
+ end
+ end
+
+ private
+
def raise_snippet_not_found
raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:snippet_not_found])
end
+
+ def raise_project_not_found
+ raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:project_not_found])
+ end
+
+ def raise_forbidden(message)
+ raise_error(Gitlab::GitAccess::ForbiddenError, message)
+ end
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 0831021b22b..a29c56c598f 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -32,8 +32,8 @@ describe Gitlab::GitAccess do
it 'blocks ssh git push and pull' do
aggregate_failures do
- expect { push_access_check }.to raise_unauthorized('Git access over SSH is not allowed')
- expect { pull_access_check }.to raise_unauthorized('Git access over SSH is not allowed')
+ expect { push_access_check }.to raise_forbidden('Git access over SSH is not allowed')
+ expect { pull_access_check }.to raise_forbidden('Git access over SSH is not allowed')
end
end
end
@@ -48,8 +48,8 @@ describe Gitlab::GitAccess do
it 'blocks http push and pull' do
aggregate_failures do
- expect { push_access_check }.to raise_unauthorized('Git access over HTTP is not allowed')
- expect { pull_access_check }.to raise_unauthorized('Git access over HTTP is not allowed')
+ expect { push_access_check }.to raise_forbidden('Git access over HTTP is not allowed')
+ expect { pull_access_check }.to raise_forbidden('Git access over HTTP is not allowed')
end
end
@@ -58,7 +58,7 @@ describe Gitlab::GitAccess do
it "doesn't block http pull" do
aggregate_failures do
- expect { pull_access_check }.not_to raise_unauthorized('Git access over HTTP is not allowed')
+ expect { pull_access_check }.not_to raise_forbidden('Git access over HTTP is not allowed')
end
end
@@ -67,7 +67,7 @@ describe Gitlab::GitAccess do
it "doesn't block http pull" do
aggregate_failures do
- expect { pull_access_check }.not_to raise_unauthorized('Git access over HTTP is not allowed')
+ expect { pull_access_check }.not_to raise_forbidden('Git access over HTTP is not allowed')
end
end
end
@@ -165,7 +165,7 @@ describe Gitlab::GitAccess do
end
it 'does not block pushes with "not found"' do
- expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:auth_upload])
+ expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:auth_upload])
end
end
@@ -178,7 +178,7 @@ describe Gitlab::GitAccess do
end
it 'blocks the push' do
- expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:upload])
+ expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:upload])
end
end
@@ -208,7 +208,7 @@ describe Gitlab::GitAccess do
end
it 'does not block pushes with "not found"' do
- expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:upload])
+ expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:upload])
end
end
@@ -240,7 +240,7 @@ describe Gitlab::GitAccess do
let(:access) do
described_class.new(actor, nil,
protocol, authentication_abilities: authentication_abilities,
- project_path: project_path, namespace_path: namespace_path,
+ repository_path: project_path, namespace_path: namespace_path,
redirected_path: redirected_path)
end
@@ -259,7 +259,7 @@ describe Gitlab::GitAccess do
let(:access) do
described_class.new(actor, nil,
protocol, authentication_abilities: authentication_abilities,
- project_path: project_path, namespace_path: namespace_path,
+ repository_path: project_path, namespace_path: namespace_path,
redirected_path: redirected_path)
end
@@ -285,8 +285,8 @@ describe Gitlab::GitAccess do
it 'does not allow keys which are too small', :aggregate_failures do
expect(actor).not_to be_valid
- expect { pull_access_check }.to raise_unauthorized('Your SSH key must be at least 4096 bits.')
- expect { push_access_check }.to raise_unauthorized('Your SSH key must be at least 4096 bits.')
+ expect { pull_access_check }.to raise_forbidden('Your SSH key must be at least 4096 bits.')
+ expect { push_access_check }.to raise_forbidden('Your SSH key must be at least 4096 bits.')
end
end
@@ -297,8 +297,8 @@ describe Gitlab::GitAccess do
it 'does not allow keys which are too small', :aggregate_failures do
expect(actor).not_to be_valid
- expect { pull_access_check }.to raise_unauthorized(/Your SSH key type is forbidden/)
- expect { push_access_check }.to raise_unauthorized(/Your SSH key type is forbidden/)
+ expect { pull_access_check }.to raise_forbidden(/Your SSH key type is forbidden/)
+ expect { push_access_check }.to raise_forbidden(/Your SSH key type is forbidden/)
end
end
end
@@ -363,7 +363,7 @@ describe Gitlab::GitAccess do
let(:authentication_abilities) { [] }
it 'raises unauthorized with download error' do
- expect { pull_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:auth_download])
+ expect { pull_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:auth_download])
end
context 'when authentication abilities include download code' do
@@ -387,7 +387,7 @@ describe Gitlab::GitAccess do
let(:authentication_abilities) { [] }
it 'raises unauthorized with push error' do
- expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:auth_upload])
+ expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:auth_upload])
end
context 'when authentication abilities include push code' do
@@ -414,7 +414,7 @@ describe Gitlab::GitAccess do
end
context 'when calling git-upload-pack' do
- it { expect { pull_access_check }.to raise_unauthorized('Pulling over HTTP is not allowed.') }
+ it { expect { pull_access_check }.to raise_forbidden('Pulling over HTTP is not allowed.') }
end
context 'when calling git-receive-pack' do
@@ -428,7 +428,7 @@ describe Gitlab::GitAccess do
end
context 'when calling git-receive-pack' do
- it { expect { push_access_check }.to raise_unauthorized('Pushing over HTTP is not allowed.') }
+ it { expect { push_access_check }.to raise_forbidden('Pushing over HTTP is not allowed.') }
end
context 'when calling git-upload-pack' do
@@ -445,7 +445,7 @@ describe Gitlab::GitAccess do
allow(Gitlab::Database).to receive(:read_only?) { true }
end
- it { expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:cannot_push_to_read_only]) }
+ it { expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:cannot_push_to_read_only]) }
end
end
@@ -453,7 +453,7 @@ describe Gitlab::GitAccess do
let(:access) do
described_class.new(actor, project,
protocol, authentication_abilities: authentication_abilities,
- project_path: project_path, namespace_path: namespace_path,
+ repository_path: project_path, namespace_path: namespace_path,
redirected_path: redirected_path)
end
@@ -559,21 +559,21 @@ describe Gitlab::GitAccess do
it 'disallows guests to pull' do
project.add_guest(user)
- expect { pull_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:download])
+ expect { pull_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:download])
end
it 'disallows blocked users to pull' do
project.add_maintainer(user)
user.block
- expect { pull_access_check }.to raise_unauthorized('Your account has been blocked.')
+ expect { pull_access_check }.to raise_forbidden('Your account has been blocked.')
end
it 'disallows deactivated users to pull' do
project.add_maintainer(user)
user.deactivate!
- expect { pull_access_check }.to raise_unauthorized("Your account has been deactivated by your administrator. Please log back in from a web browser to reactivate your account at #{Gitlab.config.gitlab.url}")
+ expect { pull_access_check }.to raise_forbidden("Your account has been deactivated by your administrator. Please log back in from a web browser to reactivate your account at #{Gitlab.config.gitlab.url}")
end
context 'when the project repository does not exist' do
@@ -598,7 +598,7 @@ describe Gitlab::GitAccess do
let(:public_project) { create(:project, :public, :repository) }
let(:project_path) { public_project.path }
let(:namespace_path) { public_project.namespace.path }
- let(:access) { described_class.new(nil, public_project, 'web', authentication_abilities: [:download_code], project_path: project_path, namespace_path: namespace_path) }
+ let(:access) { described_class.new(nil, public_project, 'web', authentication_abilities: [:download_code], repository_path: project_path, namespace_path: namespace_path) }
context 'when repository is enabled' do
it 'give access to download code' do
@@ -610,7 +610,7 @@ describe Gitlab::GitAccess do
it 'does not give access to download code' do
public_project.project_feature.update_attribute(:repository_access_level, ProjectFeature::DISABLED)
- expect { pull_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:download])
+ expect { pull_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:download])
end
end
end
@@ -722,7 +722,7 @@ describe Gitlab::GitAccess do
context 'when is not member of the project' do
context 'pull code' do
- it { expect { pull_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:download]) }
+ it { expect { pull_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:download]) }
end
end
end
@@ -828,7 +828,7 @@ describe Gitlab::GitAccess do
expect(&check).not_to raise_error,
-> { "expected #{action} to be allowed" }
else
- expect(&check).to raise_error(Gitlab::GitAccess::UnauthorizedError),
+ expect(&check).to raise_error(Gitlab::GitAccess::ForbiddenError),
-> { "expected #{action} to be disallowed" }
end
end
@@ -965,7 +965,7 @@ describe Gitlab::GitAccess do
it 'does not allow deactivated users to push' do
user.deactivate!
- expect { push_access_check }.to raise_unauthorized("Your account has been deactivated by your administrator. Please log back in from a web browser to reactivate your account at #{Gitlab.config.gitlab.url}")
+ expect { push_access_check }.to raise_forbidden("Your account has been deactivated by your administrator. Please log back in from a web browser to reactivate your account at #{Gitlab.config.gitlab.url}")
end
it 'cleans up the files' do
@@ -1009,26 +1009,26 @@ describe Gitlab::GitAccess do
project.add_reporter(user)
end
- it { expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:auth_upload]) }
+ it { expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:auth_upload]) }
end
context 'when unauthorized' do
context 'to public project' do
let(:project) { create(:project, :public, :repository) }
- it { expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:auth_upload]) }
+ it { expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:auth_upload]) }
end
context 'to internal project' do
let(:project) { create(:project, :internal, :repository) }
- it { expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:auth_upload]) }
+ it { expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:auth_upload]) }
end
context 'to private project' do
let(:project) { create(:project, :private, :repository) }
- it { expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:auth_upload]) }
+ it { expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:auth_upload]) }
end
end
end
@@ -1039,7 +1039,7 @@ describe Gitlab::GitAccess do
it 'denies push access' do
project.add_maintainer(user)
- expect { push_access_check }.to raise_unauthorized('The repository is temporarily read-only. Please try again later.')
+ expect { push_access_check }.to raise_forbidden('The repository is temporarily read-only. Please try again later.')
end
end
@@ -1060,7 +1060,7 @@ describe Gitlab::GitAccess do
context 'to public project' do
let(:project) { create(:project, :public, :repository) }
- it { expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:deploy_key_upload]) }
+ it { expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:deploy_key_upload]) }
end
context 'to internal project' do
@@ -1083,14 +1083,14 @@ describe Gitlab::GitAccess do
key.deploy_keys_projects.create(project: project, can_push: false)
end
- it { expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:deploy_key_upload]) }
+ it { expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:deploy_key_upload]) }
end
context 'when unauthorized' do
context 'to public project' do
let(:project) { create(:project, :public, :repository) }
- it { expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:deploy_key_upload]) }
+ it { expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:deploy_key_upload]) }
end
context 'to internal project' do
@@ -1121,7 +1121,7 @@ describe Gitlab::GitAccess do
it 'blocks access when the user did not accept terms', :aggregate_failures do
actions.each do |action|
- expect { action.call }.to raise_unauthorized(/must accept the Terms of Service in order to perform this action/)
+ expect { action.call }.to raise_forbidden(/must accept the Terms of Service in order to perform this action/)
end
end
@@ -1203,7 +1203,7 @@ describe Gitlab::GitAccess do
def access
described_class.new(actor, project, protocol,
authentication_abilities: authentication_abilities,
- namespace_path: namespace_path, project_path: project_path,
+ namespace_path: namespace_path, repository_path: project_path,
redirected_path: redirected_path, auth_result_type: auth_result_type)
end
@@ -1211,8 +1211,8 @@ describe Gitlab::GitAccess do
access.check('git-receive-pack', changes)
end
- def raise_unauthorized(message)
- raise_error(Gitlab::GitAccess::UnauthorizedError, message)
+ def raise_forbidden(message)
+ raise_error(Gitlab::GitAccess::ForbiddenError, message)
end
def raise_not_found
diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb
index 99c9369a2b9..b5e673c9e79 100644
--- a/spec/lib/gitlab/git_access_wiki_spec.rb
+++ b/spec/lib/gitlab/git_access_wiki_spec.rb
@@ -33,7 +33,7 @@ describe Gitlab::GitAccessWiki do
end
it 'does not give access to upload wiki code' do
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "You can't push code to a read-only GitLab instance.")
+ expect { subject }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You can't push code to a read-only GitLab instance.")
end
end
end
@@ -70,7 +70,7 @@ describe Gitlab::GitAccessWiki do
it 'does not give access to download wiki code' do
project.project_feature.update_attribute(:wiki_access_level, ProjectFeature::DISABLED)
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to download code from this project.')
+ expect { subject }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to download code from this project.')
end
end
end
diff --git a/spec/lib/gitlab/git_post_receive_spec.rb b/spec/lib/gitlab/git_post_receive_spec.rb
index f0df3794e29..0e25a616810 100644
--- a/spec/lib/gitlab/git_post_receive_spec.rb
+++ b/spec/lib/gitlab/git_post_receive_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe ::Gitlab::GitPostReceive do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
subject { described_class.new(project, "project-#{project.id}", changes.dup, {}) }
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 5c36d6d35af..00182983418 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -306,5 +306,19 @@ describe Gitlab::GitalyClient::CommitService do
client.find_commits(order: 'topo')
end
+
+ it 'sends an RPC request with an author' do
+ request = Gitaly::FindCommitsRequest.new(
+ repository: repository_message,
+ disable_walk: true,
+ order: 'NONE',
+ author: "Billy Baggins <bilbo@shire.com>"
+ )
+
+ expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:find_commits)
+ .with(request, kind_of(Hash)).and_return([])
+
+ client.find_commits(order: 'default', author: "Billy Baggins <bilbo@shire.com>")
+ end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index d4337c51279..45701b501bb 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
describe Gitlab::GitalyClient::OperationService do
- set(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository.raw }
let(:client) { described_class.new(repository) }
- set(:user) { create(:user) }
let(:gitaly_user) { Gitlab::Git::User.from_gitlab(user).to_gitaly }
describe '#user_create_branch' do
@@ -274,7 +274,6 @@ describe Gitlab::GitalyClient::OperationService do
end
describe '#user_squash' do
- let(:branch_name) { 'my-branch' }
let(:squash_id) { '1' }
let(:start_sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
let(:end_sha) { '54cec5282aa9f21856362fe321c800c236a61615' }
@@ -284,7 +283,6 @@ describe Gitlab::GitalyClient::OperationService do
repository: repository.gitaly_repository,
user: gitaly_user,
squash_id: squash_id.to_s,
- branch: branch_name,
start_sha: start_sha,
end_sha: end_sha,
author: gitaly_user,
@@ -295,7 +293,7 @@ describe Gitlab::GitalyClient::OperationService do
let(:response) { Gitaly::UserSquashResponse.new(squash_sha: squash_sha) }
subject do
- client.user_squash(user, squash_id, branch_name, start_sha, end_sha, user, commit_message)
+ client.user_squash(user, squash_id, start_sha, end_sha, user, commit_message)
end
it 'sends a user_squash message and returns the squash sha' do
diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
index 73ae4cd95ce..2658414d9b0 100644
--- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
@@ -34,19 +34,6 @@ describe Gitlab::GitalyClient::RemoteService do
end
end
- describe '#fetch_internal_remote' do
- let(:remote_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '', 'group/project') }
-
- it 'sends an fetch_internal_remote message and returns the result value' do
- expect_any_instance_of(Gitaly::RemoteService::Stub)
- .to receive(:fetch_internal_remote)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(result: true))
-
- expect(client.fetch_internal_remote(remote_repository)).to be(true)
- end
- end
-
describe '#find_remote_root_ref' do
it 'sends an find_remote_root_ref message and returns the root ref' do
expect_any_instance_of(Gitaly::RemoteService::Stub)
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 503ac57ade6..5f4147f6ff6 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -275,7 +275,18 @@ describe Gitlab::GitalyClient::RepositoryService do
end
end
- describe 'remove' do
+ describe '#rename' do
+ it 'sends a rename_repository message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:rename_repository)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(double(value: true))
+
+ client.rename('some/new/path')
+ end
+ end
+
+ describe '#remove' do
it 'sends a remove_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
.to receive(:remove_repository)
@@ -286,14 +297,15 @@ describe Gitlab::GitalyClient::RepositoryService do
end
end
- describe 'rename' do
- it 'sends a rename_repository message' do
+ describe '#replicate' do
+ let(:source_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '', 'group/project') }
+
+ it 'sends a replicate_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:rename_repository)
+ .to receive(:replicate_repository)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(value: true))
- client.rename('some/new/path')
+ client.replicate(source_repository)
end
end
end
diff --git a/spec/lib/gitlab/github_import/caching_spec.rb b/spec/lib/gitlab/github_import/caching_spec.rb
deleted file mode 100644
index 18c3e382532..00000000000
--- a/spec/lib/gitlab/github_import/caching_spec.rb
+++ /dev/null
@@ -1,119 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::GithubImport::Caching, :clean_gitlab_redis_cache do
- describe '.read' do
- it 'reads a value from the cache' do
- described_class.write('foo', 'bar')
-
- expect(described_class.read('foo')).to eq('bar')
- end
-
- it 'returns nil if the cache key does not exist' do
- expect(described_class.read('foo')).to be_nil
- end
-
- it 'refreshes the cache key if a value is present' do
- described_class.write('foo', 'bar')
-
- redis = double(:redis)
-
- expect(redis).to receive(:get).with(/foo/).and_return('bar')
- expect(redis).to receive(:expire).with(/foo/, described_class::TIMEOUT)
- expect(Gitlab::Redis::Cache).to receive(:with).twice.and_yield(redis)
-
- described_class.read('foo')
- end
-
- it 'does not refresh the cache key if a value is empty' do
- described_class.write('foo', nil)
-
- redis = double(:redis)
-
- expect(redis).to receive(:get).with(/foo/).and_return('')
- expect(redis).not_to receive(:expire)
- expect(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
-
- described_class.read('foo')
- end
- end
-
- describe '.read_integer' do
- it 'returns an Integer' do
- described_class.write('foo', '10')
-
- expect(described_class.read_integer('foo')).to eq(10)
- end
-
- it 'returns nil if no value was found' do
- expect(described_class.read_integer('foo')).to be_nil
- end
- end
-
- describe '.write' do
- it 'writes a value to the cache and returns the written value' do
- expect(described_class.write('foo', 10)).to eq(10)
- expect(described_class.read('foo')).to eq('10')
- end
- end
-
- describe '.set_add' do
- it 'adds a value to a set' do
- described_class.set_add('foo', 10)
- described_class.set_add('foo', 10)
-
- key = described_class.cache_key_for('foo')
- values = Gitlab::Redis::Cache.with { |r| r.smembers(key) }
-
- expect(values).to eq(['10'])
- end
- end
-
- describe '.set_includes?' do
- it 'returns false when the key does not exist' do
- expect(described_class.set_includes?('foo', 10)).to eq(false)
- end
-
- it 'returns false when the value is not present in the set' do
- described_class.set_add('foo', 10)
-
- expect(described_class.set_includes?('foo', 20)).to eq(false)
- end
-
- it 'returns true when the set includes the given value' do
- described_class.set_add('foo', 10)
-
- expect(described_class.set_includes?('foo', 10)).to eq(true)
- end
- end
-
- describe '.write_multiple' do
- it 'sets multiple keys' do
- mapping = { 'foo' => 10, 'bar' => 20 }
-
- described_class.write_multiple(mapping)
-
- mapping.each do |key, value|
- full_key = described_class.cache_key_for(key)
- found = Gitlab::Redis::Cache.with { |r| r.get(full_key) }
-
- expect(found).to eq(value.to_s)
- end
- end
- end
-
- describe '.expire' do
- it 'sets the expiration time of a key' do
- timeout = 1.hour.to_i
-
- described_class.write('foo', 'bar', timeout: 2.hours.to_i)
- described_class.expire('foo', timeout)
-
- key = described_class.cache_key_for('foo')
- found_ttl = Gitlab::Redis::Cache.with { |r| r.ttl(key) }
-
- expect(found_ttl).to be <= timeout
- end
- end
-end
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
index c65b28fafbf..e26ac7bf81e 100644
--- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -11,10 +11,15 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do
double(
:wiki,
disk_path: 'foo.wiki',
- full_path: 'group/foo.wiki'
+ full_path: 'group/foo.wiki',
+ repository: wiki_repository
)
end
+ let(:wiki_repository) do
+ double(:wiki_repository)
+ end
+
let(:project) do
double(
:project,
@@ -221,17 +226,19 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do
describe '#import_wiki_repository' do
it 'imports the wiki repository' do
- expect(importer.gitlab_shell)
+ expect(wiki_repository)
.to receive(:import_repository)
- .with('foo', 'foo.wiki', 'foo.wiki.git', 'group/foo.wiki')
+ .with(importer.wiki_url)
+ .and_return(true)
expect(importer.import_wiki_repository).to eq(true)
end
it 'marks the import as failed and creates an empty repo if an error was raised' do
- expect(importer.gitlab_shell)
+ expect(wiki_repository)
.to receive(:import_repository)
- .and_raise(Gitlab::Shell::Error)
+ .with(importer.wiki_url)
+ .and_raise(Gitlab::Git::CommandError)
expect(importer)
.to receive(:fail_import)
diff --git a/spec/lib/gitlab/github_import/issuable_finder_spec.rb b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
index b8a6feb6c73..55add863d43 100644
--- a/spec/lib/gitlab/github_import/issuable_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
@@ -30,7 +30,7 @@ describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache do
describe '#cache_database_id' do
it 'caches the ID of a database row' do
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:write)
.with('github-import/issuable-finder/4/MergeRequest/1', 10)
diff --git a/spec/lib/gitlab/github_import/label_finder_spec.rb b/spec/lib/gitlab/github_import/label_finder_spec.rb
index 039ae27ad57..bb946a15a2d 100644
--- a/spec/lib/gitlab/github_import/label_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/label_finder_spec.rb
@@ -21,7 +21,7 @@ describe Gitlab::GithubImport::LabelFinder, :clean_gitlab_redis_cache do
it 'returns nil for an empty cache key' do
key = finder.cache_key_for(bug.name)
- Gitlab::GithubImport::Caching.write(key, '')
+ Gitlab::Cache::Import::Caching.write(key, '')
expect(finder.id_for(bug.name)).to be_nil
end
@@ -40,7 +40,7 @@ describe Gitlab::GithubImport::LabelFinder, :clean_gitlab_redis_cache do
describe '#build_cache' do
it 'builds the cache of all project labels' do
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:write_multiple)
.with(
{
diff --git a/spec/lib/gitlab/github_import/milestone_finder_spec.rb b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
index 407e2e67ec9..ecb533b7e39 100644
--- a/spec/lib/gitlab/github_import/milestone_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
@@ -22,7 +22,7 @@ describe Gitlab::GithubImport::MilestoneFinder, :clean_gitlab_redis_cache do
it 'returns nil for an empty cache key' do
key = finder.cache_key_for(milestone.iid)
- Gitlab::GithubImport::Caching.write(key, '')
+ Gitlab::Cache::Import::Caching.write(key, '')
expect(finder.id_for(issuable)).to be_nil
end
@@ -41,7 +41,7 @@ describe Gitlab::GithubImport::MilestoneFinder, :clean_gitlab_redis_cache do
describe '#build_cache' do
it 'builds the cache of all project milestones' do
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:write_multiple)
.with("github-import/milestone-finder/#{project.id}/1" => milestone.id)
.and_call_original
diff --git a/spec/lib/gitlab/github_import/page_counter_spec.rb b/spec/lib/gitlab/github_import/page_counter_spec.rb
index 87f3ce45fd3..95125c9c22f 100644
--- a/spec/lib/gitlab/github_import/page_counter_spec.rb
+++ b/spec/lib/gitlab/github_import/page_counter_spec.rb
@@ -12,7 +12,7 @@ describe Gitlab::GithubImport::PageCounter, :clean_gitlab_redis_cache do
end
it 'sets the initial page number to the cached value when one is present' do
- Gitlab::GithubImport::Caching.write(counter.cache_key, 2)
+ Gitlab::Cache::Import::Caching.write(counter.cache_key, 2)
expect(described_class.new(project, :issues).current).to eq(2)
end
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index f4d107e3dce..a6ae99b395c 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -57,7 +57,7 @@ describe Gitlab::GithubImport::ParallelScheduling do
expect(importer).to receive(:parallel_import)
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:expire)
.with(importer.already_imported_cache_key, a_kind_of(Numeric))
@@ -287,7 +287,7 @@ describe Gitlab::GithubImport::ParallelScheduling do
.with(object)
.and_return(object.id)
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:set_add)
.with(importer.already_imported_cache_key, object.id)
.and_call_original
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index 74b5c1c52cd..8764ebef32b 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -162,7 +162,7 @@ describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
context 'when an Email address is cached' do
it 'reads the Email address from the cache' do
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:read)
.and_return(email)
@@ -182,7 +182,7 @@ describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
it 'caches the Email address when an Email address is available' do
expect(client).to receive(:user).with('kittens').and_return(user)
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:write)
.with(an_instance_of(String), email)
@@ -195,7 +195,7 @@ describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
.with('kittens')
.and_return(nil)
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.not_to receive(:write)
expect(finder.email_for_github_username('kittens')).to be_nil
@@ -207,7 +207,7 @@ describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
let(:id) { 4 }
it 'reads a user ID from the cache' do
- Gitlab::GithubImport::Caching
+ Gitlab::Cache::Import::Caching
.write(described_class::ID_CACHE_KEY % id, 4)
expect(finder.cached_id_for_github_id(id)).to eq([true, 4])
@@ -222,7 +222,7 @@ describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
let(:email) { 'kittens@example.com' }
it 'reads a user ID from the cache' do
- Gitlab::GithubImport::Caching
+ Gitlab::Cache::Import::Caching
.write(described_class::ID_FOR_EMAIL_CACHE_KEY % email, 4)
expect(finder.cached_id_for_github_email(email)).to eq([true, 4])
@@ -241,7 +241,7 @@ describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
.with(id)
.and_return(42)
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:write)
.with(described_class::ID_CACHE_KEY % id, 42)
@@ -253,7 +253,7 @@ describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
.with(id)
.and_return(nil)
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:write)
.with(described_class::ID_CACHE_KEY % id, nil)
@@ -269,7 +269,7 @@ describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
.with(email)
.and_return(42)
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:write)
.with(described_class::ID_FOR_EMAIL_CACHE_KEY % email, 42)
@@ -281,7 +281,7 @@ describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
.with(email)
.and_return(nil)
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:write)
.with(described_class::ID_FOR_EMAIL_CACHE_KEY % email, nil)
@@ -317,13 +317,13 @@ describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
describe '#read_id_from_cache' do
it 'reads an ID from the cache' do
- Gitlab::GithubImport::Caching.write('foo', 10)
+ Gitlab::Cache::Import::Caching.write('foo', 10)
expect(finder.read_id_from_cache('foo')).to eq([true, 10])
end
it 'reads a cache key with an empty value' do
- Gitlab::GithubImport::Caching.write('foo', nil)
+ Gitlab::Cache::Import::Caching.write('foo', nil)
expect(finder.read_id_from_cache('foo')).to eq([true, nil])
end
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
index c3ddac01c87..290d66243aa 100644
--- a/spec/lib/gitlab/github_import_spec.rb
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -35,7 +35,7 @@ describe Gitlab::GithubImport do
end
it 'caches the ghost user ID' do
- expect(Gitlab::GithubImport::Caching)
+ expect(Gitlab::Cache::Import::Caching)
.to receive(:write)
.once
.and_call_original
diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
index 7cf0442fbe1..6185b068d4c 100644
--- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb
+++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
@@ -5,46 +5,62 @@ describe Gitlab::GlRepository::RepoType do
let_it_be(:project) { create(:project) }
let_it_be(:personal_snippet) { create(:personal_snippet, author: project.owner) }
let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) }
+ let(:project_path) { project.repository.full_path }
+ let(:wiki_path) { project.wiki.repository.full_path }
+ let(:personal_snippet_path) { "snippets/#{personal_snippet.id}" }
+ let(:project_snippet_path) { "#{project.full_path}/snippets/#{project_snippet.id}" }
describe Gitlab::GlRepository::PROJECT do
it_behaves_like 'a repo type' do
- let(:expected_identifier) { "project-#{project.id}" }
let(:expected_id) { project.id.to_s }
+ let(:expected_identifier) { "project-#{expected_id}" }
let(:expected_suffix) { '' }
- let(:expected_repository) { project.repository }
let(:expected_container) { project }
+ let(:expected_repository) { expected_container.repository }
end
it 'knows its type' do
- expect(described_class).not_to be_wiki
- expect(described_class).to be_project
- expect(described_class).not_to be_snippet
+ aggregate_failures do
+ expect(described_class).not_to be_wiki
+ expect(described_class).to be_project
+ expect(described_class).not_to be_snippet
+ end
end
it 'checks if repository path is valid' do
- expect(described_class.valid?(project.repository.full_path)).to be_truthy
- expect(described_class.valid?(project.wiki.repository.full_path)).to be_truthy
+ aggregate_failures do
+ expect(described_class.valid?(project_path)).to be_truthy
+ expect(described_class.valid?(wiki_path)).to be_truthy
+ expect(described_class.valid?(personal_snippet_path)).to be_truthy
+ expect(described_class.valid?(project_snippet_path)).to be_truthy
+ end
end
end
describe Gitlab::GlRepository::WIKI do
it_behaves_like 'a repo type' do
- let(:expected_identifier) { "wiki-#{project.id}" }
let(:expected_id) { project.id.to_s }
+ let(:expected_identifier) { "wiki-#{expected_id}" }
let(:expected_suffix) { '.wiki' }
- let(:expected_repository) { project.wiki.repository }
let(:expected_container) { project }
+ let(:expected_repository) { expected_container.wiki.repository }
end
it 'knows its type' do
- expect(described_class).to be_wiki
- expect(described_class).not_to be_project
- expect(described_class).not_to be_snippet
+ aggregate_failures do
+ expect(described_class).to be_wiki
+ expect(described_class).not_to be_project
+ expect(described_class).not_to be_snippet
+ end
end
it 'checks if repository path is valid' do
- expect(described_class.valid?(project.repository.full_path)).to be_falsey
- expect(described_class.valid?(project.wiki.repository.full_path)).to be_truthy
+ aggregate_failures do
+ expect(described_class.valid?(project_path)).to be_falsey
+ expect(described_class.valid?(wiki_path)).to be_truthy
+ expect(described_class.valid?(personal_snippet_path)).to be_falsey
+ expect(described_class.valid?(project_snippet_path)).to be_falsey
+ end
end
end
@@ -59,9 +75,20 @@ describe Gitlab::GlRepository::RepoType do
end
it 'knows its type' do
- expect(described_class).to be_snippet
- expect(described_class).not_to be_wiki
- expect(described_class).not_to be_project
+ aggregate_failures do
+ expect(described_class).to be_snippet
+ expect(described_class).not_to be_wiki
+ expect(described_class).not_to be_project
+ end
+ end
+
+ it 'checks if repository path is valid' do
+ aggregate_failures do
+ expect(described_class.valid?(project_path)).to be_falsey
+ expect(described_class.valid?(wiki_path)).to be_falsey
+ expect(described_class.valid?(personal_snippet_path)).to be_truthy
+ expect(described_class.valid?(project_snippet_path)).to be_truthy
+ end
end
end
@@ -75,9 +102,20 @@ describe Gitlab::GlRepository::RepoType do
end
it 'knows its type' do
- expect(described_class).to be_snippet
- expect(described_class).not_to be_wiki
- expect(described_class).not_to be_project
+ aggregate_failures do
+ expect(described_class).to be_snippet
+ expect(described_class).not_to be_wiki
+ expect(described_class).not_to be_project
+ end
+ end
+
+ it 'checks if repository path is valid' do
+ aggregate_failures do
+ expect(described_class.valid?(project_path)).to be_falsey
+ expect(described_class.valid?(wiki_path)).to be_falsey
+ expect(described_class.valid?(personal_snippet_path)).to be_truthy
+ expect(described_class.valid?(project_snippet_path)).to be_truthy
+ end
end
end
end
diff --git a/spec/lib/gitlab/gl_repository_spec.rb b/spec/lib/gitlab/gl_repository_spec.rb
index 3cfc4c2a132..858f436047e 100644
--- a/spec/lib/gitlab/gl_repository_spec.rb
+++ b/spec/lib/gitlab/gl_repository_spec.rb
@@ -5,13 +5,18 @@ require 'spec_helper'
describe ::Gitlab::GlRepository do
describe '.parse' do
let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:snippet) { create(:personal_snippet) }
it 'parses a project gl_repository' do
- expect(described_class.parse("project-#{project.id}")).to eq([project, Gitlab::GlRepository::PROJECT])
+ expect(described_class.parse("project-#{project.id}")).to eq([project, project, Gitlab::GlRepository::PROJECT])
end
it 'parses a wiki gl_repository' do
- expect(described_class.parse("wiki-#{project.id}")).to eq([project, Gitlab::GlRepository::WIKI])
+ expect(described_class.parse("wiki-#{project.id}")).to eq([project, project, Gitlab::GlRepository::WIKI])
+ end
+
+ it 'parses a snippet gl_repository' do
+ expect(described_class.parse("snippet-#{snippet.id}")).to eq([snippet, nil, Gitlab::GlRepository::SNIPPET])
end
it 'throws an argument error on an invalid gl_repository type' do
diff --git a/spec/lib/gitlab/global_id_spec.rb b/spec/lib/gitlab/global_id_spec.rb
index d35b5da0b75..719743ed5dc 100644
--- a/spec/lib/gitlab/global_id_spec.rb
+++ b/spec/lib/gitlab/global_id_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe Gitlab::GlobalId do
describe '.build' do
- set(:object) { create(:issue) }
+ let_it_be(:object) { create(:issue) }
it 'returns a standard GlobalId if only object is passed' do
expect(described_class.build(object).to_s).to eq(object.to_global_id.to_s)
diff --git a/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb b/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
new file mode 100644
index 00000000000..d3b108f60ff
--- /dev/null
+++ b/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::GrapeLogging::Formatters::LogrageWithTimestamp do
+ let(:log_entry) do
+ {
+ status: 200,
+ time: {
+ total: 758.58,
+ db: 77.06,
+ view: 681.52
+ },
+ method: 'PUT',
+ path: '/api/v4/projects/1',
+ params: {
+ 'description': '[FILTERED]',
+ 'name': 'gitlab test'
+ },
+ host: 'localhost',
+ remote_ip: '127.0.0.1',
+ ua: 'curl/7.66.0',
+ route: '/api/:version/projects/:id',
+ user_id: 1,
+ username: 'root',
+ queue_duration: 1764.06,
+ gitaly_calls: 6,
+ gitaly_duration: 20.0,
+ correlation_id: 'WMefXn60429'
+ }
+ end
+ let(:time) { Time.now }
+ let(:result) { JSON.parse(subject) }
+
+ subject { described_class.new.call(:info, time, nil, log_entry) }
+
+ it 'turns the log entry to valid JSON' do
+ expect(result['status']).to eq(200)
+ end
+
+ it 're-formats the params hash' do
+ params = result['params']
+
+ expect(params).to eq([
+ { 'key' => 'description', 'value' => '[FILTERED]' },
+ { 'key' => 'name', 'value' => 'gitlab test' }
+ ])
+ end
+end
diff --git a/spec/lib/gitlab/graphql/docs/renderer_spec.rb b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
new file mode 100644
index 00000000000..5ba70bb8f0a
--- /dev/null
+++ b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::Docs::Renderer do
+ describe '#contents' do
+ # Returns a Schema that uses the given `type`
+ def mock_schema(type)
+ query_type = Class.new(GraphQL::Schema::Object) do
+ graphql_name 'QueryType'
+
+ field :foo, type, null: true
+ end
+
+ GraphQL::Schema.define(query: query_type)
+ end
+
+ let_it_be(:template) { Rails.root.join('lib/gitlab/graphql/docs/templates/', 'default.md.haml') }
+
+ subject(:contents) do
+ described_class.new(
+ mock_schema(type).graphql_definition,
+ output_dir: nil,
+ template: template
+ ).contents
+ end
+
+ context 'A type with a field with a [Array] return type' do
+ let(:type) do
+ Class.new(GraphQL::Schema::Object) do
+ graphql_name 'ArrayTest'
+
+ field :foo, [GraphQL::STRING_TYPE], null: false, description: 'A description'
+ end
+ end
+
+ specify do
+ expectation = <<~DOC
+ ## ArrayTest
+
+ | Name | Type | Description |
+ | --- | ---- | ---------- |
+ | `foo` | String! => Array | A description |
+ DOC
+
+ is_expected.to include(expectation)
+ end
+ end
+
+ context 'A type with fields defined in reverse alphabetical order' do
+ let(:type) do
+ Class.new(GraphQL::Schema::Object) do
+ graphql_name 'OrderingTest'
+
+ field :foo, GraphQL::STRING_TYPE, null: false, description: 'A description of foo field'
+ field :bar, GraphQL::STRING_TYPE, null: false, description: 'A description of bar field'
+ end
+ end
+
+ specify do
+ expectation = <<~DOC
+ ## OrderingTest
+
+ | Name | Type | Description |
+ | --- | ---- | ---------- |
+ | `bar` | String! | A description of bar field |
+ | `foo` | String! | A description of foo field |
+ DOC
+
+ is_expected.to include(expectation)
+ end
+ end
+
+ context 'A type with a deprecated field' do
+ let(:type) do
+ Class.new(GraphQL::Schema::Object) do
+ graphql_name 'DeprecatedTest'
+
+ field :foo, GraphQL::STRING_TYPE, null: false, deprecation_reason: 'This is deprecated', description: 'A description'
+ end
+ end
+
+ specify do
+ expectation = <<~DOC
+ ## DeprecatedTest
+
+ | Name | Type | Description |
+ | --- | ---- | ---------- |
+ | `foo` **{warning-solid}** | String! | **Deprecated:** This is deprecated |
+ DOC
+
+ is_expected.to include(expectation)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/loaders/batch_lfs_oid_loader_spec.rb b/spec/lib/gitlab/graphql/loaders/batch_lfs_oid_loader_spec.rb
index 1e8de144b8d..b3d57c899d5 100644
--- a/spec/lib/gitlab/graphql/loaders/batch_lfs_oid_loader_spec.rb
+++ b/spec/lib/gitlab/graphql/loaders/batch_lfs_oid_loader_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::Graphql::Loaders::BatchLfsOidLoader do
include GraphqlHelpers
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:blob) { Gitlab::Graphql::Representation::TreeEntry.new(repository.blob_at('master', 'files/lfs/lfs_object.iso'), repository) }
let(:otherblob) { Gitlab::Graphql::Representation::TreeEntry.new(repository.blob_at('master', 'README'), repository) }
diff --git a/spec/lib/gitlab/graphql/pagination/offset_active_record_relation_connection_spec.rb b/spec/lib/gitlab/graphql/pagination/offset_active_record_relation_connection_spec.rb
new file mode 100644
index 00000000000..2269b4def82
--- /dev/null
+++ b/spec/lib/gitlab/graphql/pagination/offset_active_record_relation_connection_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection do
+ it 'subclasses from GraphQL::Relay::RelationConnection' do
+ expect(described_class.superclass).to eq GraphQL::Relay::RelationConnection
+ end
+end
diff --git a/spec/lib/gitlab/graphql/timeout_spec.rb b/spec/lib/gitlab/graphql/timeout_spec.rb
new file mode 100644
index 00000000000..8e04586d0ec
--- /dev/null
+++ b/spec/lib/gitlab/graphql/timeout_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::Timeout do
+ it 'inherits from ' do
+ expect(described_class.superclass).to eq GraphQL::Schema::Timeout
+ end
+
+ it 'sends the error to our GraphQL logger' do
+ parent_type = double(graphql_name: 'parent_type')
+ field = double(graphql_name: 'field')
+ query = double(query_string: 'query_string', provided_variables: 'provided_variables')
+ error = GraphQL::Schema::Timeout::TimeoutError.new(parent_type, field)
+
+ expect(Gitlab::GraphqlLogger)
+ .to receive(:error)
+ .with(message: 'Timeout on parent_type.field', query: 'query_string', query_variables: 'provided_variables')
+
+ timeout = described_class.new(max_seconds: 30)
+ timeout.handle_timeout(error, query)
+ end
+end
diff --git a/spec/lib/gitlab/hashed_storage/migrator_spec.rb b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
index f3cbb811679..c59b152a982 100644
--- a/spec/lib/gitlab/hashed_storage/migrator_spec.rb
+++ b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
@@ -186,7 +186,7 @@ describe Gitlab::HashedStorage::Migrator, :redis do
end
describe 'migration_pending?' do
- set(:project) { create(:project, :empty_repo) }
+ let_it_be(:project) { create(:project, :empty_repo) }
it 'returns true when there are MigratorWorker jobs scheduled' do
Sidekiq::Testing.disable! do
@@ -210,7 +210,7 @@ describe Gitlab::HashedStorage::Migrator, :redis do
end
describe 'rollback_pending?' do
- set(:project) { create(:project, :empty_repo) }
+ let_it_be(:project) { create(:project, :empty_repo) }
it 'returns true when there are RollbackerWorker jobs scheduled' do
Sidekiq::Testing.disable! do
diff --git a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
index 5135c84df19..cff489e0f3b 100644
--- a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::HookData::IssuableBuilder do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
# This shared example requires a `builder` and `user` variable
shared_examples 'issuable hook data' do |kind|
diff --git a/spec/lib/gitlab/hook_data/issue_builder_spec.rb b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
index 8008f3d72b2..168603d6fcc 100644
--- a/spec/lib/gitlab/hook_data/issue_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::HookData::IssueBuilder do
- set(:label) { create(:label) }
- set(:issue) { create(:labeled_issue, labels: [label], project: label.project) }
+ let_it_be(:label) { create(:label) }
+ let_it_be(:issue) { create(:labeled_issue, labels: [label], project: label.project) }
let(:builder) { described_class.new(issue) }
describe '#build' do
diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
index 506354e370c..67fa0a7426a 100644
--- a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::HookData::MergeRequestBuilder do
- set(:merge_request) { create(:merge_request) }
+ let_it_be(:merge_request) { create(:merge_request) }
let(:builder) { described_class.new(merge_request) }
describe '#build' do
diff --git a/spec/lib/gitlab/import/merge_request_helpers_spec.rb b/spec/lib/gitlab/import/merge_request_helpers_spec.rb
index 2b165994152..d81251c4a43 100644
--- a/spec/lib/gitlab/import/merge_request_helpers_spec.rb
+++ b/spec/lib/gitlab/import/merge_request_helpers_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::Import::MergeRequestHelpers, type: :helper do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
describe '.create_merge_request_without_hooks' do
let(:iid) { 42 }
diff --git a/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb
index 86ceb97b250..1631de393b5 100644
--- a/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb
+++ b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb
@@ -3,6 +3,12 @@
require 'spec_helper'
describe Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy do
+ before do
+ allow_next_instance_of(ProjectExportWorker) do |job|
+ allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
+ end
+ end
+
let!(:service) { described_class.new }
let!(:project) { create(:project, :with_export) }
let(:shared) { project.import_export_shared }
diff --git a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb
index 95c47d15f8f..7792daed99c 100644
--- a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb
+++ b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb
@@ -5,6 +5,12 @@ require 'spec_helper'
describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do
include StubRequests
+ before do
+ allow_next_instance_of(ProjectExportWorker) do |job|
+ allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
+ end
+ end
+
let(:example_url) { 'http://www.example.com' }
let(:strategy) { subject.new(url: example_url, http_method: 'post') }
let!(:project) { create(:project, :with_export) }
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 4dadb310029..37b3e4a4a22 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -9,6 +9,8 @@ issues:
- notes
- resource_label_events
- resource_weight_events
+- resource_milestone_events
+- sent_notifications
- sentry_issue
- label_links
- labels
@@ -109,6 +111,7 @@ merge_requests:
- milestone
- notes
- resource_label_events
+- resource_milestone_events
- label_links
- labels
- last_edited_by
@@ -190,9 +193,11 @@ ci_pipelines:
- environments
- chat_data
- source_pipeline
+- ref_status
- source_bridge
- source_job
- sourced_pipelines
+- source_project
- triggered_by_pipeline
- triggered_pipelines
- child_pipelines
@@ -357,6 +362,7 @@ project:
- ci_pipelines
- all_pipelines
- stages
+- ci_refs
- builds
- runner_projects
- runners
@@ -461,6 +467,9 @@ project:
- container_expiration_policy
- resource_groups
- autoclose_referenced_issues
+- status_page_setting
+- requirements
+- export_jobs
award_emoji:
- awardable
- user
@@ -565,6 +574,8 @@ designs: *design
actions:
- design
- version
+- uploads
+- file_uploads
versions: &version
- author
- issue
@@ -602,3 +613,6 @@ epic:
- events
- resource_label_events
- user_mentions
+epic_issue:
+- epic
+- issue
diff --git a/spec/lib/gitlab/import_export/base/object_builder_spec.rb b/spec/lib/gitlab/import_export/base/object_builder_spec.rb
new file mode 100644
index 00000000000..e5242ae0bfc
--- /dev/null
+++ b/spec/lib/gitlab/import_export/base/object_builder_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Base::ObjectBuilder do
+ let(:project) do
+ create(:project, :repository,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project')
+ end
+ let(:klass) { Milestone }
+ let(:attributes) { { 'title' => 'Test Base::ObjectBuilder Milestone', 'project' => project } }
+
+ subject { described_class.build(klass, attributes) }
+
+ describe '#build' do
+ context 'when object exists' do
+ context 'when where_clauses are implemented' do
+ before do
+ allow_next_instance_of(described_class) do |object_builder|
+ allow(object_builder).to receive(:where_clauses).and_return([klass.arel_table['title'].eq(attributes['title'])])
+ end
+ end
+
+ let!(:milestone) { create(:milestone, title: attributes['title'], project: project) }
+
+ it 'finds existing object instead of creating one' do
+ expect(subject).to eq(milestone)
+ end
+ end
+
+ context 'when where_clauses are not implemented' do
+ it 'raises NotImplementedError' do
+ expect { subject }.to raise_error(NotImplementedError)
+ end
+ end
+ end
+
+ context 'when object does not exist' do
+ before do
+ allow_next_instance_of(described_class) do |object_builder|
+ allow(object_builder).to receive(:find_object).and_return(nil)
+ end
+ end
+
+ it 'creates new object' do
+ expect { subject }.to change { Milestone.count }.from(0).to(1)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
new file mode 100644
index 00000000000..50d93763ad6
--- /dev/null
+++ b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Base::RelationFactory do
+ let(:user) { create(:admin) }
+ let(:project) { create(:project) }
+ let(:members_mapper) { double('members_mapper').as_null_object }
+ let(:relation_sym) { :project_snippets }
+ let(:relation_hash) { {} }
+ let(:excluded_keys) { [] }
+
+ subject do
+ described_class.create(relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
+ members_mapper: members_mapper,
+ user: user,
+ importable: project,
+ excluded_keys: excluded_keys)
+ end
+
+ describe '#create' do
+ context 'when relation is invalid' do
+ before do
+ expect_next_instance_of(described_class) do |relation_factory|
+ expect(relation_factory).to receive(:invalid_relation?).and_return(true)
+ end
+ end
+
+ it 'returns without creating new relations' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when the relation is predefined' do
+ let(:relation_sym) { :milestone }
+ let(:relation_hash) { { 'name' => '#upcoming', 'title' => 'Upcoming', 'id' => -2 } }
+
+ it 'returns without creating a new relation' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when #setup_models is not implemented' do
+ it 'raises NotImplementedError' do
+ expect { subject }.to raise_error(NotImplementedError)
+ end
+ end
+
+ context 'when #setup_models is implemented' do
+ let(:relation_sym) { :notes }
+ let(:relation_hash) do
+ {
+ "id" => 4947,
+ "note" => "merged",
+ "noteable_type" => "MergeRequest",
+ "author_id" => 999,
+ "created_at" => "2016-11-18T09:29:42.634Z",
+ "updated_at" => "2016-11-18T09:29:42.634Z",
+ "project_id" => 1,
+ "attachment" => {
+ "url" => nil
+ },
+ "noteable_id" => 377,
+ "system" => true,
+ "events" => []
+ }
+ end
+
+ before do
+ expect_next_instance_of(described_class) do |relation_factory|
+ expect(relation_factory).to receive(:setup_models).and_return(true)
+ end
+ end
+
+ it 'creates imported object' do
+ expect(subject).to be_instance_of(Note)
+ end
+
+ context 'when relation contains user references' do
+ let(:new_user) { create(:user) }
+ let(:exported_member) do
+ {
+ "id" => 111,
+ "access_level" => 30,
+ "source_id" => 1,
+ "source_type" => "Project",
+ "user_id" => 3,
+ "notification_level" => 3,
+ "created_at" => "2016-11-18T09:29:42.634Z",
+ "updated_at" => "2016-11-18T09:29:42.634Z",
+ "user" => {
+ "id" => 999,
+ "email" => new_user.email,
+ "username" => new_user.username
+ }
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member],
+ user: user,
+ importable: project)
+ end
+
+ it 'maps the right author to the imported note' do
+ expect(subject.author).to eq(new_user)
+ end
+ end
+
+ context 'when relation contains token attributes' do
+ let(:relation_sym) { 'ProjectHook' }
+ let(:relation_hash) { { token: 'secret' } }
+
+ it 'removes token attributes' do
+ expect(subject.token).to be_nil
+ end
+ end
+
+ context 'when relation contains encrypted attributes' do
+ let(:relation_sym) { 'Ci::Variable' }
+ let(:relation_hash) do
+ create(:ci_variable).as_json
+ end
+
+ it 'removes encrypted attributes' do
+ expect(subject.value).to be_nil
+ end
+ end
+ end
+ end
+
+ describe '.relation_class' do
+ context 'when relation name is pluralized' do
+ let(:relation_name) { 'MergeRequest::Metrics' }
+
+ it 'returns constantized class' do
+ expect(described_class.relation_class(relation_name)).to eq(MergeRequest::Metrics)
+ end
+ end
+
+ context 'when relation name is singularized' do
+ let(:relation_name) { 'Badge' }
+
+ it 'returns constantized class' do
+ expect(described_class.relation_class(relation_name)).to eq(Badge)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/base_object_builder_spec.rb b/spec/lib/gitlab/import_export/base_object_builder_spec.rb
deleted file mode 100644
index fbb3b08cf56..00000000000
--- a/spec/lib/gitlab/import_export/base_object_builder_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::BaseObjectBuilder do
- let(:project) do
- create(:project, :repository,
- :builds_disabled,
- :issues_disabled,
- name: 'project',
- path: 'project')
- end
- let(:klass) { Milestone }
- let(:attributes) { { 'title' => 'Test BaseObjectBuilder Milestone', 'project' => project } }
-
- subject { described_class.build(klass, attributes) }
-
- describe '#build' do
- context 'when object exists' do
- context 'when where_clauses are implemented' do
- before do
- allow_next_instance_of(described_class) do |object_builder|
- allow(object_builder).to receive(:where_clauses).and_return([klass.arel_table['title'].eq(attributes['title'])])
- end
- end
-
- let!(:milestone) { create(:milestone, title: attributes['title'], project: project) }
-
- it 'finds existing object instead of creating one' do
- expect(subject).to eq(milestone)
- end
- end
-
- context 'when where_clauses are not implemented' do
- it 'raises NotImplementedError' do
- expect { subject }.to raise_error(NotImplementedError)
- end
- end
- end
-
- context 'when object does not exist' do
- before do
- allow_next_instance_of(described_class) do |object_builder|
- allow(object_builder).to receive(:find_object).and_return(nil)
- end
- end
-
- it 'creates new object' do
- expect { subject }.to change { Milestone.count }.from(0).to(1)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/base_relation_factory_spec.rb b/spec/lib/gitlab/import_export/base_relation_factory_spec.rb
deleted file mode 100644
index def3e43de9b..00000000000
--- a/spec/lib/gitlab/import_export/base_relation_factory_spec.rb
+++ /dev/null
@@ -1,145 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::BaseRelationFactory do
- let(:user) { create(:admin) }
- let(:project) { create(:project) }
- let(:members_mapper) { double('members_mapper').as_null_object }
- let(:relation_sym) { :project_snippets }
- let(:merge_requests_mapping) { {} }
- let(:relation_hash) { {} }
- let(:excluded_keys) { [] }
-
- subject do
- described_class.create(relation_sym: relation_sym,
- relation_hash: relation_hash,
- object_builder: Gitlab::ImportExport::GroupProjectObjectBuilder,
- members_mapper: members_mapper,
- merge_requests_mapping: merge_requests_mapping,
- user: user,
- importable: project,
- excluded_keys: excluded_keys)
- end
-
- describe '#create' do
- context 'when relation is invalid' do
- before do
- expect_next_instance_of(described_class) do |relation_factory|
- expect(relation_factory).to receive(:invalid_relation?).and_return(true)
- end
- end
-
- it 'returns without creating new relations' do
- expect(subject).to be_nil
- end
- end
-
- context 'when #setup_models is not implemented' do
- it 'raises NotImplementedError' do
- expect { subject }.to raise_error(NotImplementedError)
- end
- end
-
- context 'when #setup_models is implemented' do
- let(:relation_sym) { :notes }
- let(:relation_hash) do
- {
- "id" => 4947,
- "note" => "merged",
- "noteable_type" => "MergeRequest",
- "author_id" => 999,
- "created_at" => "2016-11-18T09:29:42.634Z",
- "updated_at" => "2016-11-18T09:29:42.634Z",
- "project_id" => 1,
- "attachment" => {
- "url" => nil
- },
- "noteable_id" => 377,
- "system" => true,
- "events" => []
- }
- end
-
- before do
- expect_next_instance_of(described_class) do |relation_factory|
- expect(relation_factory).to receive(:setup_models).and_return(true)
- end
- end
-
- it 'creates imported object' do
- expect(subject).to be_instance_of(Note)
- end
-
- context 'when relation contains user references' do
- let(:new_user) { create(:user) }
- let(:exported_member) do
- {
- "id" => 111,
- "access_level" => 30,
- "source_id" => 1,
- "source_type" => "Project",
- "user_id" => 3,
- "notification_level" => 3,
- "created_at" => "2016-11-18T09:29:42.634Z",
- "updated_at" => "2016-11-18T09:29:42.634Z",
- "user" => {
- "id" => 999,
- "email" => new_user.email,
- "username" => new_user.username
- }
- }
- end
-
- let(:members_mapper) do
- Gitlab::ImportExport::MembersMapper.new(
- exported_members: [exported_member],
- user: user,
- importable: project)
- end
-
- it 'maps the right author to the imported note' do
- expect(subject.author).to eq(new_user)
- end
- end
-
- context 'when relation contains token attributes' do
- let(:relation_sym) { 'ProjectHook' }
- let(:relation_hash) { { token: 'secret' } }
-
- it 'removes token attributes' do
- expect(subject.token).to be_nil
- end
- end
-
- context 'when relation contains encrypted attributes' do
- let(:relation_sym) { 'Ci::Variable' }
- let(:relation_hash) do
- create(:ci_variable).as_json
- end
-
- it 'removes encrypted attributes' do
- expect(subject.value).to be_nil
- end
- end
- end
- end
-
- describe '.relation_class' do
- context 'when relation name is pluralized' do
- let(:relation_name) { 'MergeRequest::Metrics' }
-
- it 'returns constantized class' do
- expect(described_class.relation_class(relation_name)).to eq(MergeRequest::Metrics)
- end
- end
-
- context 'when relation name is singularized' do
- let(:relation_name) { 'Badge' }
-
- it 'returns constantized class' do
- expect(described_class.relation_class(relation_name)).to eq(Badge)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/error_spec.rb b/spec/lib/gitlab/import_export/error_spec.rb
new file mode 100644
index 00000000000..067f7049097
--- /dev/null
+++ b/spec/lib/gitlab/import_export/error_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Error do
+ describe '.permission_error' do
+ subject(:error) do
+ described_class.permission_error(user, importable)
+ end
+
+ let(:user) { build(:user, id: 1) }
+
+ context 'when supplied a project' do
+ let(:importable) { build(:project, id: 1, name: 'project1') }
+
+ it 'returns an error with the correct message' do
+ expect(error.message)
+ .to eq 'User with ID: 1 does not have required permissions for Project: project1 with ID: 1'
+ end
+ end
+
+ context 'when supplied a group' do
+ let(:importable) { build(:group, id: 1, name: 'group1') }
+
+ it 'returns an error with the correct message' do
+ expect(error.message)
+ .to eq 'User with ID: 1 does not have required permissions for Group: group1 with ID: 1'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index 56ec6ec0f59..15058684229 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -215,6 +215,14 @@ describe Gitlab::ImportExport::FastHashSerializer do
expect(subject['boards'].first['lists']).not_to be_empty
end
+ context 'relation ordering' do
+ it 'orders exported pipelines by primary key' do
+ expected_order = project.ci_pipelines.reorder(:id).ids
+
+ expect(subject['ci_pipelines'].pluck('id')).to eq(expected_order)
+ end
+ end
+
def setup_project
release = create(:release)
group = create(:group)
@@ -246,6 +254,8 @@ describe Gitlab::ImportExport::FastHashSerializer do
ci_build.pipeline.update(project: project)
create(:commit_status, project: project, pipeline: ci_build.pipeline)
+ create_list(:ci_pipeline, 5, :success, project: project)
+
create(:milestone, project: project)
create(:discussion_note, noteable: issue, project: project)
create(:note, noteable: merge_request, project: project)
diff --git a/spec/lib/gitlab/import_export/fork_spec.rb b/spec/lib/gitlab/import_export/fork_spec.rb
index 09e4f62c686..8aa28353c04 100644
--- a/spec/lib/gitlab/import_export/fork_spec.rb
+++ b/spec/lib/gitlab/import_export/fork_spec.rb
@@ -24,11 +24,11 @@ describe 'forked project import' do
end
let(:saver) do
- Gitlab::ImportExport::ProjectTreeSaver.new(project: project_with_repo, current_user: user, shared: shared)
+ Gitlab::ImportExport::Project::TreeSaver.new(project: project_with_repo, current_user: user, shared: shared)
end
let(:restorer) do
- Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project)
+ Gitlab::ImportExport::Project::TreeRestorer.new(user: user, shared: shared, project: project)
end
before do
diff --git a/spec/lib/gitlab/import_export/group/object_builder_spec.rb b/spec/lib/gitlab/import_export/group/object_builder_spec.rb
new file mode 100644
index 00000000000..781670b0aa5
--- /dev/null
+++ b/spec/lib/gitlab/import_export/group/object_builder_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Group::ObjectBuilder do
+ let(:group) { create(:group) }
+ let(:base_attributes) do
+ {
+ 'title' => 'title',
+ 'description' => 'description',
+ 'group' => group
+ }
+ end
+
+ context 'labels' do
+ let(:label_attributes) { base_attributes.merge('type' => 'GroupLabel') }
+
+ it 'finds the existing group label' do
+ group_label = create(:group_label, base_attributes)
+
+ expect(described_class.build(Label, label_attributes)).to eq(group_label)
+ end
+
+ it 'creates a new label' do
+ label = described_class.build(Label, label_attributes)
+
+ expect(label.persisted?).to be true
+ end
+
+ context 'when description is an empty string' do
+ let(:label_attributes) { base_attributes.merge('type' => 'GroupLabel', 'description' => '') }
+
+ it 'finds the existing group label' do
+ group_label = create(:group_label, label_attributes)
+
+ expect(described_class.build(Label, label_attributes)).to eq(group_label)
+ end
+ end
+ end
+
+ context 'milestones' do
+ it 'finds the existing group milestone' do
+ milestone = create(:milestone, base_attributes)
+
+ expect(described_class.build(Milestone, base_attributes)).to eq(milestone)
+ end
+
+ it 'creates a new milestone' do
+ milestone = described_class.build(Milestone, base_attributes)
+
+ expect(milestone.persisted?).to be true
+ end
+ end
+
+ describe '#initialize' do
+ context 'when attributes contain description as empty string' do
+ let(:attributes) { base_attributes.merge('description' => '') }
+
+ it 'converts empty string to nil' do
+ builder = described_class.new(Label, attributes)
+
+ expect(builder.send(:attributes)).to include({ 'description' => nil })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/group/relation_factory_spec.rb b/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
new file mode 100644
index 00000000000..332648d5c89
--- /dev/null
+++ b/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Group::RelationFactory do
+ let(:group) { create(:group) }
+ let(:members_mapper) { double('members_mapper').as_null_object }
+ let(:user) { create(:admin) }
+ let(:excluded_keys) { [] }
+ let(:created_object) do
+ described_class.create(relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ members_mapper: members_mapper,
+ object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
+ user: user,
+ importable: group,
+ excluded_keys: excluded_keys)
+ end
+
+ context 'label object' do
+ let(:relation_sym) { :group_label }
+ let(:id) { random_id }
+ let(:original_group_id) { random_id }
+
+ let(:relation_hash) do
+ {
+ 'id' => 123456,
+ 'title' => 'Bruffefunc',
+ 'color' => '#1d2da4',
+ 'project_id' => nil,
+ 'created_at' => '2019-11-20T17:02:20.546Z',
+ 'updated_at' => '2019-11-20T17:02:20.546Z',
+ 'template' => false,
+ 'description' => 'Description',
+ 'group_id' => original_group_id,
+ 'type' => 'GroupLabel',
+ 'priorities' => [],
+ 'textColor' => '#FFFFFF'
+ }
+ end
+
+ it 'does not have the original ID' do
+ expect(created_object.id).not_to eq(id)
+ end
+
+ it 'does not have the original group_id' do
+ expect(created_object.group_id).not_to eq(original_group_id)
+ end
+
+ it 'has the new group_id' do
+ expect(created_object.group_id).to eq(group.id)
+ end
+
+ context 'excluded attributes' do
+ let(:excluded_keys) { %w[description] }
+
+ it 'are removed from the imported object' do
+ expect(created_object.description).to be_nil
+ end
+ end
+ end
+
+ context 'Notes user references' do
+ let(:relation_sym) { :notes }
+ let(:new_user) { create(:user) }
+ let(:exported_member) do
+ {
+ 'id' => 111,
+ 'access_level' => 30,
+ 'source_id' => 1,
+ 'source_type' => 'Namespace',
+ 'user_id' => 3,
+ 'notification_level' => 3,
+ 'created_at' => '2016-11-18T09:29:42.634Z',
+ 'updated_at' => '2016-11-18T09:29:42.634Z',
+ 'user' => {
+ 'id' => 999,
+ 'email' => new_user.email,
+ 'username' => new_user.username
+ }
+ }
+ end
+
+ let(:relation_hash) do
+ {
+ 'id' => 4947,
+ 'note' => 'note',
+ 'noteable_type' => 'Epic',
+ 'author_id' => 999,
+ 'created_at' => '2016-11-18T09:29:42.634Z',
+ 'updated_at' => '2016-11-18T09:29:42.634Z',
+ 'project_id' => 1,
+ 'attachment' => {
+ 'url' => nil
+ },
+ 'noteable_id' => 377,
+ 'system' => true,
+ 'author' => {
+ 'name' => 'Administrator'
+ },
+ 'events' => []
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member],
+ user: user,
+ importable: group)
+ end
+
+ it 'maps the right author to the imported note' do
+ expect(created_object.author).to eq(new_user)
+ end
+ end
+
+ def random_id
+ rand(1000..10000)
+ end
+end
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
new file mode 100644
index 00000000000..5584f1503f7
--- /dev/null
+++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Group::TreeRestorer do
+ include ImportExport::CommonUtil
+
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+
+ describe 'restore group tree' do
+ before(:context) do
+ # Using an admin for import, so we can check assignment of existing members
+ user = create(:admin, email: 'root@gitlabexample.com')
+ create(:user, email: 'adriene.mcclure@gitlabexample.com')
+ create(:user, email: 'gwendolyn_robel@gitlabexample.com')
+
+ RSpec::Mocks.with_temporary_scope do
+ @group = create(:group, name: 'group', path: 'group')
+ @shared = Gitlab::ImportExport::Shared.new(@group)
+
+ setup_import_export_config('group_exports/complex')
+
+ group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group, group_hash: nil)
+
+ @restored_group_json = group_tree_restorer.restore
+ end
+ end
+
+ context 'JSON' do
+ it 'restores models based on JSON' do
+ expect(@restored_group_json).to be_truthy
+ end
+
+ it 'has the group description' do
+ expect(Group.find_by_path('group').description).to eq('Group Description')
+ end
+
+ it 'has group labels' do
+ expect(@group.labels.count).to eq(10)
+ end
+
+ context 'issue boards' do
+ it 'has issue boards' do
+ expect(@group.boards.count).to eq(1)
+ end
+
+ it 'has board label lists' do
+ lists = @group.boards.find_by(name: 'first board').lists
+
+ expect(lists.count).to eq(3)
+ expect(lists.first.label.title).to eq('TSL')
+ expect(lists.second.label.title).to eq('Sosync')
+ end
+ end
+
+ it 'has badges' do
+ expect(@group.badges.count).to eq(1)
+ end
+
+ it 'has milestones' do
+ expect(@group.milestones.count).to eq(5)
+ end
+
+ it 'has group children' do
+ expect(@group.children.count).to eq(2)
+ end
+
+ it 'has group members' do
+ expect(@group.members.map(&:user).map(&:email)).to contain_exactly('root@gitlabexample.com', 'adriene.mcclure@gitlabexample.com', 'gwendolyn_robel@gitlabexample.com')
+ end
+ end
+ end
+
+ context 'excluded attributes' do
+ let!(:source_user) { create(:user, id: 123) }
+ let!(:importer_user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group, group_hash: nil) }
+ let(:group_json) { ActiveSupport::JSON.decode(IO.read(File.join(shared.export_path, 'group.json'))) }
+
+ shared_examples 'excluded attributes' do
+ excluded_attributes = %w[
+ id
+ owner_id
+ parent_id
+ created_at
+ updated_at
+ runners_token
+ runners_token_encrypted
+ saml_discovery_token
+ ]
+
+ before do
+ group.add_owner(importer_user)
+
+ setup_import_export_config('group_exports/complex')
+ end
+
+ excluded_attributes.each do |excluded_attribute|
+ it 'does not allow override of excluded attributes' do
+ expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute))
+ end
+ end
+ end
+
+ include_examples 'excluded attributes'
+ end
+
+ context 'group.json file access check' do
+ let(:user) { create(:user) }
+ let!(:group) { create(:group, name: 'group2', path: 'group2') }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group, group_hash: nil) }
+ let(:restored_group_json) { group_tree_restorer.restore }
+
+ it 'does not read a symlink' do
+ Dir.mktmpdir do |tmpdir|
+ setup_symlink(tmpdir, 'group.json')
+ allow(shared).to receive(:export_path).and_call_original
+
+ expect(group_tree_restorer.restore).to eq(false)
+ expect(shared.errors).to include('Incorrect JSON format')
+ end
+ end
+ end
+
+ context 'group visibility levels' do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group, group_hash: nil) }
+
+ before do
+ setup_import_export_config(filepath)
+
+ group_tree_restorer.restore
+ end
+
+ shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
+ context "when visibility level is #{visibility_level}" do
+ let(:group) { create(:group, visibility_level) }
+ let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
+
+ it "imports all subgroups as #{visibility_level}" do
+ expect(group.children.map(&:visibility_level)).to eq(expected_visibilities)
+ end
+ end
+ end
+
+ include_examples 'with visibility level', :public, [20, 10, 0]
+ include_examples 'with visibility level', :private, [0, 0, 0]
+ include_examples 'with visibility level', :internal, [10, 10, 0]
+ end
+end
diff --git a/spec/lib/gitlab/import_export/group/tree_saver_spec.rb b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
new file mode 100644
index 00000000000..44fd49f0ac3
--- /dev/null
+++ b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
@@ -0,0 +1,202 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Group::TreeSaver do
+ describe 'saves the group tree into a json object' do
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_saver) { described_class.new(group: group, current_user: user, shared: shared) }
+ let(:export_path) { "#{Dir.tmpdir}/group_tree_saver_spec" }
+ let(:user) { create(:user) }
+ let!(:group) { setup_group }
+
+ before do
+ group.add_maintainer(user)
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ it 'saves group successfully' do
+ expect(group_tree_saver.save).to be true
+ end
+
+ context ':export_fast_serialize feature flag checks' do
+ before do
+ expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared, config: group_config).and_return(reader)
+ expect(reader).to receive(:group_tree).and_return(group_tree)
+ end
+
+ let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
+ let(:group_config) { Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.group_config_file).to_h }
+ let(:group_tree) do
+ {
+ include: [{ milestones: { include: [] } }],
+ preload: { milestones: nil }
+ }
+ end
+
+ context 'when :export_fast_serialize feature is enabled' do
+ let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
+
+ before do
+ stub_feature_flags(export_fast_serialize: true)
+
+ expect(Gitlab::ImportExport::FastHashSerializer).to receive(:new).with(group, group_tree).and_return(serializer)
+ end
+
+ it 'uses FastHashSerializer' do
+ expect(serializer).to receive(:execute)
+
+ group_tree_saver.save
+ end
+ end
+
+ context 'when :export_fast_serialize feature is disabled' do
+ before do
+ stub_feature_flags(export_fast_serialize: false)
+ end
+
+ it 'is serialized via built-in `as_json`' do
+ expect(group).to receive(:as_json).with(group_tree).and_call_original
+
+ group_tree_saver.save
+ end
+ end
+ end
+
+ # It is mostly duplicated in
+ # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
+ # except:
+ # context 'with description override' do
+ # context 'group members' do
+ # ^ These are specific for the Group::TreeSaver
+ context 'JSON' do
+ let(:saved_group_json) do
+ group_tree_saver.save
+ group_json(group_tree_saver.full_path)
+ end
+
+ it 'saves the correct json' do
+ expect(saved_group_json).to include({ 'description' => 'description' })
+ end
+
+ it 'has milestones' do
+ expect(saved_group_json['milestones']).not_to be_empty
+ end
+
+ it 'has labels' do
+ expect(saved_group_json['labels']).not_to be_empty
+ end
+
+ it 'has boards' do
+ expect(saved_group_json['boards']).not_to be_empty
+ end
+
+ it 'has board label list' do
+ expect(saved_group_json['boards'].first['lists']).not_to be_empty
+ end
+
+ it 'has group members' do
+ expect(saved_group_json['members']).not_to be_empty
+ end
+
+ it 'has priorities associated to labels' do
+ expect(saved_group_json['labels'].first['priorities']).not_to be_empty
+ end
+
+ it 'has badges' do
+ expect(saved_group_json['badges']).not_to be_empty
+ end
+
+ context 'group children' do
+ let(:children) { group.children }
+
+ it 'exports group children' do
+ expect(saved_group_json['children'].length).to eq(children.count)
+ end
+
+ it 'exports group children of children' do
+ expect(saved_group_json['children'].first['children'].length).to eq(children.first.children.count)
+ end
+ end
+
+ context 'group members' do
+ let(:user2) { create(:user, email: 'group@member.com') }
+ let(:member_emails) do
+ saved_group_json['members'].map do |pm|
+ pm['user']['email']
+ end
+ end
+
+ before do
+ group.add_developer(user2)
+ end
+
+ it 'exports group members as group owner' do
+ group.add_owner(user)
+
+ expect(member_emails).to include('group@member.com')
+ end
+
+ context 'as admin' do
+ let(:user) { create(:admin) }
+
+ it 'exports group members as admin' do
+ expect(member_emails).to include('group@member.com')
+ end
+
+ it 'exports group members' do
+ member_types = saved_group_json['members'].map { |pm| pm['source_type'] }
+
+ expect(member_types).to all(eq('Namespace'))
+ end
+ end
+ end
+
+ context 'group attributes' do
+ shared_examples 'excluded attributes' do
+ excluded_attributes = %w[
+ id
+ owner_id
+ parent_id
+ created_at
+ updated_at
+ runners_token
+ runners_token_encrypted
+ saml_discovery_token
+ ]
+
+ excluded_attributes.each do |excluded_attribute|
+ it 'does not contain excluded attribute' do
+ expect(saved_group_json).not_to include(excluded_attribute => group.public_send(excluded_attribute))
+ end
+ end
+ end
+
+ include_examples 'excluded attributes'
+ end
+ end
+ end
+
+ def setup_group
+ group = create(:group, description: 'description')
+ sub_group = create(:group, description: 'description', parent: group)
+ create(:group, description: 'description', parent: sub_group)
+ create(:milestone, group: group)
+ create(:group_badge, group: group)
+ group_label = create(:group_label, group: group)
+ create(:label_priority, label: group_label, priority: 1)
+ board = create(:board, group: group, milestone_id: Milestone::Upcoming.id)
+ create(:list, board: board, label: group_label)
+ create(:group_badge, group: group)
+
+ group
+ end
+
+ def group_json(filename)
+ ::JSON.parse(IO.read(filename))
+ end
+end
diff --git a/spec/lib/gitlab/import_export/group_object_builder_spec.rb b/spec/lib/gitlab/import_export/group_object_builder_spec.rb
deleted file mode 100644
index 08b2dae1147..00000000000
--- a/spec/lib/gitlab/import_export/group_object_builder_spec.rb
+++ /dev/null
@@ -1,66 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::GroupObjectBuilder do
- let(:group) { create(:group) }
- let(:base_attributes) do
- {
- 'title' => 'title',
- 'description' => 'description',
- 'group' => group
- }
- end
-
- context 'labels' do
- let(:label_attributes) { base_attributes.merge('type' => 'GroupLabel') }
-
- it 'finds the existing group label' do
- group_label = create(:group_label, base_attributes)
-
- expect(described_class.build(Label, label_attributes)).to eq(group_label)
- end
-
- it 'creates a new label' do
- label = described_class.build(Label, label_attributes)
-
- expect(label.persisted?).to be true
- end
-
- context 'when description is an empty string' do
- let(:label_attributes) { base_attributes.merge('type' => 'GroupLabel', 'description' => '') }
-
- it 'finds the existing group label' do
- group_label = create(:group_label, label_attributes)
-
- expect(described_class.build(Label, label_attributes)).to eq(group_label)
- end
- end
- end
-
- context 'milestones' do
- it 'finds the existing group milestone' do
- milestone = create(:milestone, base_attributes)
-
- expect(described_class.build(Milestone, base_attributes)).to eq(milestone)
- end
-
- it 'creates a new milestone' do
- milestone = described_class.build(Milestone, base_attributes)
-
- expect(milestone.persisted?).to be true
- end
- end
-
- describe '#initialize' do
- context 'when attributes contain description as empty string' do
- let(:attributes) { base_attributes.merge('description' => '') }
-
- it 'converts empty string to nil' do
- builder = described_class.new(Label, attributes)
-
- expect(builder.send(:attributes)).to include({ 'description' => nil })
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb b/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
deleted file mode 100644
index 34049cbf570..00000000000
--- a/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
+++ /dev/null
@@ -1,153 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::GroupProjectObjectBuilder do
- let!(:group) { create(:group, :private) }
- let!(:subgroup) { create(:group, :private, parent: group) }
- let!(:project) do
- create(:project, :repository,
- :builds_disabled,
- :issues_disabled,
- name: 'project',
- path: 'project',
- group: subgroup)
- end
-
- let(:lru_cache) { subject.send(:lru_cache) }
- let(:cache_key) { subject.send(:cache_key) }
-
- context 'request store is not active' do
- subject do
- described_class.new(Label,
- 'title' => 'group label',
- 'project' => project,
- 'group' => project.group)
- end
-
- it 'ignore cache initialize' do
- expect(lru_cache).to be_nil
- expect(cache_key).to be_nil
- end
- end
-
- context 'request store is active', :request_store do
- subject do
- described_class.new(Label,
- 'title' => 'group label',
- 'project' => project,
- 'group' => project.group)
- end
-
- it 'initialize cache in memory' do
- expect(lru_cache).not_to be_nil
- expect(cache_key).not_to be_nil
- end
-
- it 'cache object when first time find the object' do
- group_label = create(:group_label, name: 'group label', group: project.group)
-
- expect(subject).to receive(:find_object).and_call_original
- expect { subject.find }
- .to change { lru_cache[cache_key] }
- .from(nil).to(group_label)
-
- expect(subject.find).to eq(group_label)
- end
-
- it 'read from cache when object has been cached' do
- group_label = create(:group_label, name: 'group label', group: project.group)
-
- subject.find
-
- expect(subject).not_to receive(:find_object)
- expect { subject.find }.not_to change { lru_cache[cache_key] }
-
- expect(subject.find).to eq(group_label)
- end
- end
-
- context 'labels' do
- it 'finds the existing group label' do
- group_label = create(:group_label, name: 'group label', group: project.group)
-
- expect(described_class.build(Label,
- 'title' => 'group label',
- 'project' => project,
- 'group' => project.group)).to eq(group_label)
- end
-
- it 'finds the existing group label in root ancestor' do
- group_label = create(:group_label, name: 'group label', group: group)
-
- expect(described_class.build(Label,
- 'title' => 'group label',
- 'project' => project,
- 'group' => group)).to eq(group_label)
- end
-
- it 'creates a new label' do
- label = described_class.build(Label,
- 'title' => 'group label',
- 'project' => project,
- 'group' => project.group)
-
- expect(label.persisted?).to be true
- end
- end
-
- context 'milestones' do
- it 'finds the existing group milestone' do
- milestone = create(:milestone, name: 'group milestone', group: project.group)
-
- expect(described_class.build(Milestone,
- 'title' => 'group milestone',
- 'project' => project,
- 'group' => project.group)).to eq(milestone)
- end
-
- it 'finds the existing group milestone in root ancestor' do
- milestone = create(:milestone, name: 'group milestone', group: group)
-
- expect(described_class.build(Milestone,
- 'title' => 'group milestone',
- 'project' => project,
- 'group' => group)).to eq(milestone)
- end
-
- it 'creates a new milestone' do
- milestone = described_class.build(Milestone,
- 'title' => 'group milestone',
- 'project' => project,
- 'group' => project.group)
-
- expect(milestone.persisted?).to be true
- end
- end
-
- context 'merge_request' do
- it 'finds the existing merge_request' do
- merge_request = create(:merge_request, title: 'MergeRequest', iid: 7, target_project: project, source_project: project)
- expect(described_class.build(MergeRequest,
- 'title' => 'MergeRequest',
- 'source_project_id' => project.id,
- 'target_project_id' => project.id,
- 'source_branch' => 'SourceBranch',
- 'iid' => 7,
- 'target_branch' => 'TargetBranch',
- 'author_id' => project.creator.id)).to eq(merge_request)
- end
-
- it 'creates a new merge_request' do
- merge_request = described_class.build(MergeRequest,
- 'title' => 'MergeRequest',
- 'iid' => 8,
- 'source_project_id' => project.id,
- 'target_project_id' => project.id,
- 'source_branch' => 'SourceBranch',
- 'target_branch' => 'TargetBranch',
- 'author_id' => project.creator.id)
- expect(merge_request.persisted?).to be true
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/group_relation_factory_spec.rb b/spec/lib/gitlab/import_export/group_relation_factory_spec.rb
deleted file mode 100644
index 9208b2ad203..00000000000
--- a/spec/lib/gitlab/import_export/group_relation_factory_spec.rb
+++ /dev/null
@@ -1,120 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::GroupRelationFactory do
- let(:group) { create(:group) }
- let(:members_mapper) { double('members_mapper').as_null_object }
- let(:user) { create(:admin) }
- let(:excluded_keys) { [] }
- let(:created_object) do
- described_class.create(relation_sym: relation_sym,
- relation_hash: relation_hash,
- members_mapper: members_mapper,
- object_builder: Gitlab::ImportExport::GroupObjectBuilder,
- user: user,
- importable: group,
- excluded_keys: excluded_keys)
- end
-
- context 'label object' do
- let(:relation_sym) { :group_label }
- let(:id) { random_id }
- let(:original_group_id) { random_id }
-
- let(:relation_hash) do
- {
- 'id' => 123456,
- 'title' => 'Bruffefunc',
- 'color' => '#1d2da4',
- 'project_id' => nil,
- 'created_at' => '2019-11-20T17:02:20.546Z',
- 'updated_at' => '2019-11-20T17:02:20.546Z',
- 'template' => false,
- 'description' => 'Description',
- 'group_id' => original_group_id,
- 'type' => 'GroupLabel',
- 'priorities' => [],
- 'textColor' => '#FFFFFF'
- }
- end
-
- it 'does not have the original ID' do
- expect(created_object.id).not_to eq(id)
- end
-
- it 'does not have the original group_id' do
- expect(created_object.group_id).not_to eq(original_group_id)
- end
-
- it 'has the new group_id' do
- expect(created_object.group_id).to eq(group.id)
- end
-
- context 'excluded attributes' do
- let(:excluded_keys) { %w[description] }
-
- it 'are removed from the imported object' do
- expect(created_object.description).to be_nil
- end
- end
- end
-
- context 'Notes user references' do
- let(:relation_sym) { :notes }
- let(:new_user) { create(:user) }
- let(:exported_member) do
- {
- 'id' => 111,
- 'access_level' => 30,
- 'source_id' => 1,
- 'source_type' => 'Namespace',
- 'user_id' => 3,
- 'notification_level' => 3,
- 'created_at' => '2016-11-18T09:29:42.634Z',
- 'updated_at' => '2016-11-18T09:29:42.634Z',
- 'user' => {
- 'id' => 999,
- 'email' => new_user.email,
- 'username' => new_user.username
- }
- }
- end
-
- let(:relation_hash) do
- {
- 'id' => 4947,
- 'note' => 'note',
- 'noteable_type' => 'Epic',
- 'author_id' => 999,
- 'created_at' => '2016-11-18T09:29:42.634Z',
- 'updated_at' => '2016-11-18T09:29:42.634Z',
- 'project_id' => 1,
- 'attachment' => {
- 'url' => nil
- },
- 'noteable_id' => 377,
- 'system' => true,
- 'author' => {
- 'name' => 'Administrator'
- },
- 'events' => []
- }
- end
-
- let(:members_mapper) do
- Gitlab::ImportExport::MembersMapper.new(
- exported_members: [exported_member],
- user: user,
- importable: group)
- end
-
- it 'maps the right author to the imported note' do
- expect(created_object.author).to eq(new_user)
- end
- end
-
- def random_id
- rand(1000..10000)
- end
-end
diff --git a/spec/lib/gitlab/import_export/group_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group_tree_restorer_spec.rb
deleted file mode 100644
index b2c8398d358..00000000000
--- a/spec/lib/gitlab/import_export/group_tree_restorer_spec.rb
+++ /dev/null
@@ -1,153 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::GroupTreeRestorer do
- include ImportExport::CommonUtil
-
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
-
- describe 'restore group tree' do
- before(:context) do
- # Using an admin for import, so we can check assignment of existing members
- user = create(:admin, email: 'root@gitlabexample.com')
- create(:user, email: 'adriene.mcclure@gitlabexample.com')
- create(:user, email: 'gwendolyn_robel@gitlabexample.com')
-
- RSpec::Mocks.with_temporary_scope do
- @group = create(:group, name: 'group', path: 'group')
- @shared = Gitlab::ImportExport::Shared.new(@group)
-
- setup_import_export_config('group_exports/complex')
-
- group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group, group_hash: nil)
-
- @restored_group_json = group_tree_restorer.restore
- end
- end
-
- context 'JSON' do
- it 'restores models based on JSON' do
- expect(@restored_group_json).to be_truthy
- end
-
- it 'has the group description' do
- expect(Group.find_by_path('group').description).to eq('Group Description')
- end
-
- it 'has group labels' do
- expect(@group.labels.count).to eq(10)
- end
-
- context 'issue boards' do
- it 'has issue boards' do
- expect(@group.boards.count).to eq(1)
- end
-
- it 'has board label lists' do
- lists = @group.boards.find_by(name: 'first board').lists
-
- expect(lists.count).to eq(3)
- expect(lists.first.label.title).to eq('TSL')
- expect(lists.second.label.title).to eq('Sosync')
- end
- end
-
- it 'has badges' do
- expect(@group.badges.count).to eq(1)
- end
-
- it 'has milestones' do
- expect(@group.milestones.count).to eq(5)
- end
-
- it 'has group children' do
- expect(@group.children.count).to eq(2)
- end
-
- it 'has group members' do
- expect(@group.members.map(&:user).map(&:email)).to contain_exactly('root@gitlabexample.com', 'adriene.mcclure@gitlabexample.com', 'gwendolyn_robel@gitlabexample.com')
- end
- end
- end
-
- context 'excluded attributes' do
- let!(:source_user) { create(:user, id: 123) }
- let!(:importer_user) { create(:user) }
- let(:group) { create(:group) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group, group_hash: nil) }
- let(:group_json) { ActiveSupport::JSON.decode(IO.read(File.join(shared.export_path, 'group.json'))) }
-
- shared_examples 'excluded attributes' do
- excluded_attributes = %w[
- id
- owner_id
- parent_id
- created_at
- updated_at
- runners_token
- runners_token_encrypted
- saml_discovery_token
- ]
-
- before do
- group.add_owner(importer_user)
-
- setup_import_export_config('group_exports/complex')
- end
-
- excluded_attributes.each do |excluded_attribute|
- it 'does not allow override of excluded attributes' do
- expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute))
- end
- end
- end
-
- include_examples 'excluded attributes'
- end
-
- context 'group.json file access check' do
- let(:user) { create(:user) }
- let!(:group) { create(:group, name: 'group2', path: 'group2') }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group, group_hash: nil) }
- let(:restored_group_json) { group_tree_restorer.restore }
-
- it 'does not read a symlink' do
- Dir.mktmpdir do |tmpdir|
- setup_symlink(tmpdir, 'group.json')
- allow(shared).to receive(:export_path).and_call_original
-
- expect(group_tree_restorer.restore).to eq(false)
- expect(shared.errors).to include('Incorrect JSON format')
- end
- end
- end
-
- context 'group visibility levels' do
- let(:user) { create(:user) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group, group_hash: nil) }
-
- before do
- setup_import_export_config(filepath)
-
- group_tree_restorer.restore
- end
-
- shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
- context "when visibility level is #{visibility_level}" do
- let(:group) { create(:group, visibility_level) }
- let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
-
- it "imports all subgroups as #{visibility_level}" do
- expect(group.children.map(&:visibility_level)).to eq(expected_visibilities)
- end
- end
- end
-
- include_examples 'with visibility level', :public, [20, 10, 0]
- include_examples 'with visibility level', :private, [0, 0, 0]
- include_examples 'with visibility level', :internal, [10, 10, 0]
- end
-end
diff --git a/spec/lib/gitlab/import_export/group_tree_saver_spec.rb b/spec/lib/gitlab/import_export/group_tree_saver_spec.rb
deleted file mode 100644
index 7f49c7af8fa..00000000000
--- a/spec/lib/gitlab/import_export/group_tree_saver_spec.rb
+++ /dev/null
@@ -1,202 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::GroupTreeSaver do
- describe 'saves the group tree into a json object' do
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_saver) { described_class.new(group: group, current_user: user, shared: shared) }
- let(:export_path) { "#{Dir.tmpdir}/group_tree_saver_spec" }
- let(:user) { create(:user) }
- let!(:group) { setup_group }
-
- before do
- group.add_maintainer(user)
- allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
- end
-
- after do
- FileUtils.rm_rf(export_path)
- end
-
- it 'saves group successfully' do
- expect(group_tree_saver.save).to be true
- end
-
- context ':export_fast_serialize feature flag checks' do
- before do
- expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared, config: group_config).and_return(reader)
- expect(reader).to receive(:group_tree).and_return(group_tree)
- end
-
- let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
- let(:group_config) { Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.group_config_file).to_h }
- let(:group_tree) do
- {
- include: [{ milestones: { include: [] } }],
- preload: { milestones: nil }
- }
- end
-
- context 'when :export_fast_serialize feature is enabled' do
- let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
-
- before do
- stub_feature_flags(export_fast_serialize: true)
-
- expect(Gitlab::ImportExport::FastHashSerializer).to receive(:new).with(group, group_tree).and_return(serializer)
- end
-
- it 'uses FastHashSerializer' do
- expect(serializer).to receive(:execute)
-
- group_tree_saver.save
- end
- end
-
- context 'when :export_fast_serialize feature is disabled' do
- before do
- stub_feature_flags(export_fast_serialize: false)
- end
-
- it 'is serialized via built-in `as_json`' do
- expect(group).to receive(:as_json).with(group_tree).and_call_original
-
- group_tree_saver.save
- end
- end
- end
-
- # It is mostly duplicated in
- # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
- # except:
- # context 'with description override' do
- # context 'group members' do
- # ^ These are specific for the groupTreeSaver
- context 'JSON' do
- let(:saved_group_json) do
- group_tree_saver.save
- group_json(group_tree_saver.full_path)
- end
-
- it 'saves the correct json' do
- expect(saved_group_json).to include({ 'description' => 'description' })
- end
-
- it 'has milestones' do
- expect(saved_group_json['milestones']).not_to be_empty
- end
-
- it 'has labels' do
- expect(saved_group_json['labels']).not_to be_empty
- end
-
- it 'has boards' do
- expect(saved_group_json['boards']).not_to be_empty
- end
-
- it 'has board label list' do
- expect(saved_group_json['boards'].first['lists']).not_to be_empty
- end
-
- it 'has group members' do
- expect(saved_group_json['members']).not_to be_empty
- end
-
- it 'has priorities associated to labels' do
- expect(saved_group_json['labels'].first['priorities']).not_to be_empty
- end
-
- it 'has badges' do
- expect(saved_group_json['badges']).not_to be_empty
- end
-
- context 'group children' do
- let(:children) { group.children }
-
- it 'exports group children' do
- expect(saved_group_json['children'].length).to eq(children.count)
- end
-
- it 'exports group children of children' do
- expect(saved_group_json['children'].first['children'].length).to eq(children.first.children.count)
- end
- end
-
- context 'group members' do
- let(:user2) { create(:user, email: 'group@member.com') }
- let(:member_emails) do
- saved_group_json['members'].map do |pm|
- pm['user']['email']
- end
- end
-
- before do
- group.add_developer(user2)
- end
-
- it 'exports group members as group owner' do
- group.add_owner(user)
-
- expect(member_emails).to include('group@member.com')
- end
-
- context 'as admin' do
- let(:user) { create(:admin) }
-
- it 'exports group members as admin' do
- expect(member_emails).to include('group@member.com')
- end
-
- it 'exports group members' do
- member_types = saved_group_json['members'].map { |pm| pm['source_type'] }
-
- expect(member_types).to all(eq('Namespace'))
- end
- end
- end
-
- context 'group attributes' do
- shared_examples 'excluded attributes' do
- excluded_attributes = %w[
- id
- owner_id
- parent_id
- created_at
- updated_at
- runners_token
- runners_token_encrypted
- saml_discovery_token
- ]
-
- excluded_attributes.each do |excluded_attribute|
- it 'does not contain excluded attribute' do
- expect(saved_group_json).not_to include(excluded_attribute => group.public_send(excluded_attribute))
- end
- end
- end
-
- include_examples 'excluded attributes'
- end
- end
- end
-
- def setup_group
- group = create(:group, description: 'description')
- sub_group = create(:group, description: 'description', parent: group)
- create(:group, description: 'description', parent: sub_group)
- create(:milestone, group: group)
- create(:group_badge, group: group)
- group_label = create(:group_label, group: group)
- create(:label_priority, label: group_label, priority: 1)
- board = create(:board, group: group)
- create(:list, board: board, label: group_label)
- create(:group_badge, group: group)
-
- group
- end
-
- def group_json(filename)
- JSON.parse(IO.read(filename))
- end
-end
diff --git a/spec/lib/gitlab/import_export/import_export_spec.rb b/spec/lib/gitlab/import_export/import_export_spec.rb
index 2ece0dd4b56..300ba66ee5b 100644
--- a/spec/lib/gitlab/import_export/import_export_spec.rb
+++ b/spec/lib/gitlab/import_export/import_export_spec.rb
@@ -21,4 +21,12 @@ describe Gitlab::ImportExport do
expect(described_class.export_filename(exportable: project).length).to be < 70
end
end
+
+ describe '#snippet_repo_bundle_filename_for' do
+ let(:snippet) { build(:snippet, id: 1) }
+
+ it 'generates the snippet bundle name' do
+ expect(described_class.snippet_repo_bundle_filename_for(snippet)).to eq "#{snippet.hexdigest}.bundle"
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index 97d5ce07d47..335b0031147 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -89,8 +89,6 @@ describe 'Test coverage of the Project Import' do
def relations_from_json(json_file)
json = ActiveSupport::JSON.decode(IO.read(json_file))
- Gitlab::ImportExport::RelationRenameService.rename(json)
-
[].tap {|res| gather_relations({ project: json }, res, [])}
.map {|relation_names| relation_names.join('.')}
end
diff --git a/spec/lib/gitlab/import_export/importer_spec.rb b/spec/lib/gitlab/import_export/importer_spec.rb
index 942af4084e5..e03c95525df 100644
--- a/spec/lib/gitlab/import_export/importer_spec.rb
+++ b/spec/lib/gitlab/import_export/importer_spec.rb
@@ -50,7 +50,8 @@ describe Gitlab::ImportExport::Importer do
Gitlab::ImportExport::WikiRestorer,
Gitlab::ImportExport::UploadsRestorer,
Gitlab::ImportExport::LfsRestorer,
- Gitlab::ImportExport::StatisticsRestorer
+ Gitlab::ImportExport::StatisticsRestorer,
+ Gitlab::ImportExport::SnippetsRepoRestorer
].each do |restorer|
it "calls the #{restorer}" do
fake_restorer = double(restorer.to_s)
@@ -63,7 +64,7 @@ describe Gitlab::ImportExport::Importer do
end
it 'restores the ProjectTree' do
- expect(Gitlab::ImportExport::ProjectTreeRestorer).to receive(:new).and_call_original
+ expect(Gitlab::ImportExport::Project::TreeRestorer).to receive(:new).and_call_original
importer.execute
end
diff --git a/spec/lib/gitlab/import_export/json/legacy_reader_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader_spec.rb
new file mode 100644
index 00000000000..0009a5f81de
--- /dev/null
+++ b/spec/lib/gitlab/import_export/json/legacy_reader_spec.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::JSON::LegacyReader::User do
+ let(:relation_names) { [] }
+ let(:legacy_reader) { described_class.new(tree_hash, relation_names) }
+
+ describe '#valid?' do
+ subject { legacy_reader.valid? }
+
+ context 'tree_hash not present' do
+ let(:tree_hash) { nil }
+
+ it { is_expected.to be false }
+ end
+
+ context 'tree_hash presents' do
+ let(:tree_hash) { { "issues": [] } }
+
+ it { is_expected.to be true }
+ end
+ end
+end
+
+describe Gitlab::ImportExport::JSON::LegacyReader::File do
+ let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' }
+ let(:project_tree) { JSON.parse(File.read(fixture)) }
+ let(:relation_names) { [] }
+ let(:legacy_reader) { described_class.new(path, relation_names) }
+
+ describe '#valid?' do
+ subject { legacy_reader.valid? }
+
+ context 'given valid path' do
+ let(:path) { fixture }
+
+ it { is_expected.to be true }
+ end
+
+ context 'given invalid path' do
+ let(:path) { 'spec/non-existing-folder/do-not-create-this-file.json' }
+
+ it { is_expected.to be false }
+ end
+ end
+
+ describe '#root_attributes' do
+ let(:path) { fixture }
+
+ subject { legacy_reader.root_attributes(excluded_attributes) }
+
+ context 'No excluded attributes' do
+ let(:excluded_attributes) { [] }
+ let(:relation_names) { [] }
+
+ it 'returns the whole tree from parsed JSON' do
+ expect(subject).to eq(project_tree)
+ end
+ end
+
+ context 'Some attributes are excluded' do
+ let(:excluded_attributes) { %w[milestones labels issues services snippets] }
+ let(:relation_names) { %w[import_type archived] }
+
+ it 'returns hash without excluded attributes and relations' do
+ expect(subject).not_to include('milestones', 'labels', 'issues', 'services', 'snippets', 'import_type', 'archived')
+ end
+ end
+ end
+
+ describe '#consume_relation' do
+ let(:path) { fixture }
+ let(:key) { 'description' }
+
+ context 'block not given' do
+ it 'returns value of the key' do
+ expect(legacy_reader).to receive(:relations).and_return({ key => 'test value' })
+ expect(legacy_reader.consume_relation(key)).to eq('test value')
+ end
+ end
+
+ context 'key has been consumed' do
+ before do
+ legacy_reader.consume_relation(key)
+ end
+
+ it 'does not yield' do
+ expect do |blk|
+ legacy_reader.consume_relation(key, &blk)
+ end.not_to yield_control
+ end
+ end
+
+ context 'value is nil' do
+ before do
+ expect(legacy_reader).to receive(:relations).and_return({ key => nil })
+ end
+
+ it 'does not yield' do
+ expect do |blk|
+ legacy_reader.consume_relation(key, &blk)
+ end.not_to yield_control
+ end
+ end
+
+ context 'value is not array' do
+ before do
+ expect(legacy_reader).to receive(:relations).and_return({ key => 'value' })
+ end
+
+ it 'yield the value with index 0' do
+ expect do |blk|
+ legacy_reader.consume_relation(key, &blk)
+ end.to yield_with_args('value', 0)
+ end
+ end
+
+ context 'value is an array' do
+ before do
+ expect(legacy_reader).to receive(:relations).and_return({ key => %w[item1 item2 item3] })
+ end
+
+ it 'yield each array element with index' do
+ expect do |blk|
+ legacy_reader.consume_relation(key, &blk)
+ end.to yield_successive_args(['item1', 0], ['item2', 1], ['item3', 2])
+ end
+ end
+ end
+
+ describe '#tree_hash' do
+ let(:path) { fixture }
+
+ subject { legacy_reader.send(:tree_hash) }
+
+ it 'parses the JSON into the expected tree' do
+ expect(subject).to eq(project_tree)
+ end
+
+ context 'invalid JSON' do
+ let(:path) { 'spec/fixtures/lib/gitlab/import_export/invalid_json/project.json' }
+
+ it 'raise Exception' do
+ expect { subject }.to raise_exception(Gitlab::ImportExport::Error, 'Incorrect JSON format')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb b/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb
new file mode 100644
index 00000000000..b4cdfee3b22
--- /dev/null
+++ b/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::JSON::LegacyWriter do
+ let(:path) { "#{Dir.tmpdir}/legacy_writer_spec/test.json" }
+
+ subject { described_class.new(path) }
+
+ after do
+ FileUtils.rm_rf(path)
+ end
+
+ describe "#write" do
+ context "when key is already written" do
+ it "raises exception" do
+ key = "key"
+ value = "value"
+ subject.write(key, value)
+
+ expect { subject.write(key, "new value") }.to raise_exception("key '#{key}' already written")
+ end
+ end
+
+ context "when key is not already written" do
+ context "when multiple key value pairs are stored" do
+ it "writes correct json" do
+ expected_hash = { "key" => "value_1", "key_1" => "value_2" }
+ expected_hash.each do |key, value|
+ subject.write(key, value)
+ end
+ subject.close
+
+ expect(saved_json(path)).to eq(expected_hash)
+ end
+ end
+ end
+ end
+
+ describe "#append" do
+ context "when key is already written" do
+ it "appends values under a given key" do
+ key = "key"
+ values = %w(value_1 value_2)
+ expected_hash = { key => values }
+ values.each do |value|
+ subject.append(key, value)
+ end
+ subject.close
+
+ expect(saved_json(path)).to eq(expected_hash)
+ end
+ end
+
+ context "when key is not already written" do
+ it "writes correct json" do
+ expected_hash = { "key" => ["value"] }
+ subject.append("key", "value")
+ subject.close
+
+ expect(saved_json(path)).to eq(expected_hash)
+ end
+ end
+ end
+
+ describe "#set" do
+ it "writes correct json" do
+ expected_hash = { "key" => "value_1", "key_1" => "value_2" }
+ subject.set(expected_hash)
+ subject.close
+
+ expect(saved_json(path)).to eq(expected_hash)
+ end
+ end
+
+ def saved_json(filename)
+ ::JSON.parse(IO.read(filename))
+ end
+end
diff --git a/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb b/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
new file mode 100644
index 00000000000..db77bd338e1
--- /dev/null
+++ b/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::LegacyRelationTreeSaver do
+ let(:exportable) { create(:group) }
+ let(:relation_tree_saver) { described_class.new }
+ let(:tree) { {} }
+
+ describe '#serialize' do
+ context 'when :export_fast_serialize feature is enabled' do
+ let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
+
+ before do
+ stub_feature_flags(export_fast_serialize: true)
+ end
+
+ it 'uses FastHashSerializer' do
+ expect(Gitlab::ImportExport::FastHashSerializer)
+ .to receive(:new)
+ .with(exportable, tree)
+ .and_return(serializer)
+
+ expect(serializer).to receive(:execute)
+
+ relation_tree_saver.serialize(exportable, tree)
+ end
+ end
+
+ context 'when :export_fast_serialize feature is disabled' do
+ before do
+ stub_feature_flags(export_fast_serialize: false)
+ end
+
+ it 'is serialized via built-in `as_json`' do
+ expect(exportable).to receive(:as_json).with(tree)
+
+ relation_tree_saver.serialize(exportable, tree)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/members_mapper_spec.rb b/spec/lib/gitlab/import_export/members_mapper_spec.rb
index 01a7901062a..7e2b5ed534f 100644
--- a/spec/lib/gitlab/import_export/members_mapper_spec.rb
+++ b/spec/lib/gitlab/import_export/members_mapper_spec.rb
@@ -4,167 +4,191 @@ require 'spec_helper'
describe Gitlab::ImportExport::MembersMapper do
describe 'map members' do
- let(:user) { create(:admin) }
- let(:project) { create(:project, :public, name: 'searchable_project') }
- let(:user2) { create(:user) }
- let(:exported_user_id) { 99 }
- let(:exported_members) do
- [{
- "id" => 2,
- "access_level" => 40,
- "source_id" => 14,
- "source_type" => "Project",
- "notification_level" => 3,
- "created_at" => "2016-03-11T10:21:44.822Z",
- "updated_at" => "2016-03-11T10:21:44.822Z",
- "created_by_id" => nil,
- "invite_email" => nil,
- "invite_token" => nil,
- "invite_accepted_at" => nil,
- "user" =>
- {
- "id" => exported_user_id,
- "email" => user2.email,
- "username" => 'test'
- },
- "user_id" => 19
- },
- {
- "id" => 3,
- "access_level" => 40,
- "source_id" => 14,
- "source_type" => "Project",
- "user_id" => nil,
- "notification_level" => 3,
- "created_at" => "2016-03-11T10:21:44.822Z",
- "updated_at" => "2016-03-11T10:21:44.822Z",
- "created_by_id" => 1,
- "invite_email" => 'invite@test.com',
- "invite_token" => 'token',
- "invite_accepted_at" => nil
- }]
- end
-
- let(:members_mapper) do
- described_class.new(
- exported_members: exported_members, user: user, importable: project)
- end
-
- it 'includes the exported user ID in the map' do
- expect(members_mapper.map.keys).to include(exported_user_id)
- end
-
- it 'maps a project member' do
- expect(members_mapper.map[exported_user_id]).to eq(user2.id)
- end
-
- it 'defaults to importer project member if it does not exist' do
- expect(members_mapper.map[-1]).to eq(user.id)
- end
-
- it 'has invited members with no user' do
- members_mapper.map
-
- expect(ProjectMember.find_by_invite_email('invite@test.com')).not_to be_nil
- end
-
- it 'authorizes the users to the project' do
- members_mapper.map
-
- expect(user.authorized_project?(project)).to be true
- expect(user2.authorized_project?(project)).to be true
- end
-
- it 'maps an owner as a maintainer' do
- exported_members.first['access_level'] = ProjectMember::OWNER
-
- expect(members_mapper.map[exported_user_id]).to eq(user2.id)
- expect(ProjectMember.find_by_user_id(user2.id).access_level).to eq(ProjectMember::MAINTAINER)
- end
-
- it 'removes old user_id from member_hash to avoid conflict with user key' do
- expect(ProjectMember)
- .to receive(:create)
- .twice
- .with(hash_excluding('user_id'))
- .and_call_original
-
- members_mapper.map
- end
-
- context 'user is not an admin' do
- let(:user) { create(:user) }
-
- it 'does not map a project member' do
- expect(members_mapper.map[exported_user_id]).to eq(user.id)
+ shared_examples 'imports exported members' do
+ let(:user) { create(:admin) }
+ let(:user2) { create(:user) }
+ let(:exported_user_id) { 99 }
+ let(:exported_members) do
+ [{
+ "id" => 2,
+ "access_level" => 40,
+ "source_id" => 14,
+ "source_type" => source_type,
+ "notification_level" => 3,
+ "created_at" => "2016-03-11T10:21:44.822Z",
+ "updated_at" => "2016-03-11T10:21:44.822Z",
+ "created_by_id" => nil,
+ "invite_email" => nil,
+ "invite_token" => nil,
+ "invite_accepted_at" => nil,
+ "user" =>
+ {
+ "id" => exported_user_id,
+ "email" => user2.email,
+ "username" => 'test'
+ },
+ "user_id" => 19
+ },
+ {
+ "id" => 3,
+ "access_level" => 40,
+ "source_id" => 14,
+ "source_type" => source_type,
+ "user_id" => nil,
+ "notification_level" => 3,
+ "created_at" => "2016-03-11T10:21:44.822Z",
+ "updated_at" => "2016-03-11T10:21:44.822Z",
+ "created_by_id" => 1,
+ "invite_email" => 'invite@test.com',
+ "invite_token" => 'token',
+ "invite_accepted_at" => nil
+ }]
end
- it 'defaults to importer project member if it does not exist' do
- expect(members_mapper.map[-1]).to eq(user.id)
+ let(:members_mapper) do
+ described_class.new(
+ exported_members: exported_members, user: user, importable: importable)
end
- end
- context 'chooses the one with an email first' do
- let(:user3) { create(:user, username: 'test') }
+ it 'includes the exported user ID in the map' do
+ expect(members_mapper.map.keys).to include(exported_user_id)
+ end
- it 'maps the project member that has a matching email first' do
+ it 'maps a member' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end
- end
- context 'importer same as group member' do
- let(:user2) { create(:admin) }
- let(:group) { create(:group) }
- let(:project) { create(:project, :public, name: 'searchable_project', namespace: group) }
- let(:members_mapper) do
- described_class.new(
- exported_members: exported_members, user: user2, importable: project)
+ it 'defaults to importer member if it does not exist' do
+ expect(members_mapper.map[-1]).to eq(user.id)
end
- before do
- group.add_users([user, user2], GroupMember::DEVELOPER)
- end
+ it 'has invited members with no user' do
+ members_mapper.map
- it 'maps the project member' do
- expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ expect(member_class.find_by_invite_email('invite@test.com')).not_to be_nil
end
- it 'maps the project member if it already exists' do
- project.add_maintainer(user2)
+ it 'removes old user_id from member_hash to avoid conflict with user key' do
+ expect(member_class)
+ .to receive(:create)
+ .twice
+ .with(hash_excluding('user_id'))
+ .and_call_original
- expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ members_mapper.map
end
- end
- context 'importing group members' do
- let(:group) { create(:group) }
- let(:project) { create(:project, namespace: group) }
- let(:members_mapper) do
- described_class.new(
- exported_members: exported_members, user: user, importable: project)
- end
+ context 'user is not an admin' do
+ let(:user) { create(:user) }
- before do
- group.add_users([user, user2], GroupMember::DEVELOPER)
- user.update(email: 'invite@test.com')
+ it 'does not map a member' do
+ expect(members_mapper.map[exported_user_id]).to eq(user.id)
+ end
+
+ it 'defaults to importer member if it does not exist' do
+ expect(members_mapper.map[-1]).to eq(user.id)
+ end
end
- it 'maps the importer' do
- expect(members_mapper.map[-1]).to eq(user.id)
+ context 'chooses the one with an email' do
+ let(:user3) { create(:user, username: 'test') }
+
+ it 'maps the member that has a matching email' do
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ end
end
+ end
- it 'maps the group member' do
- expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ context 'when importable is Project' do
+ include_examples 'imports exported members' do
+ let(:source_type) { 'Project' }
+ let(:member_class) { ProjectMember }
+ let(:importable) { create(:project, :public, name: 'searchable_project') }
+
+ it 'authorizes the users to the project' do
+ members_mapper.map
+
+ expect(user.authorized_project?(importable)).to be true
+ expect(user2.authorized_project?(importable)).to be true
+ end
+
+ it 'maps an owner as a maintainer' do
+ exported_members.first['access_level'] = ProjectMember::OWNER
+
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ expect(member_class.find_by_user_id(user2.id).access_level).to eq(ProjectMember::MAINTAINER)
+ end
+
+ context 'importer same as group member' do
+ let(:user2) { create(:admin) }
+ let(:group) { create(:group) }
+ let(:importable) { create(:project, :public, name: 'searchable_project', namespace: group) }
+ let(:members_mapper) do
+ described_class.new(
+ exported_members: exported_members, user: user2, importable: importable)
+ end
+
+ before do
+ group.add_users([user, user2], GroupMember::DEVELOPER)
+ end
+
+ it 'maps the project member' do
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ end
+
+ it 'maps the project member if it already exists' do
+ importable.add_maintainer(user2)
+
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ end
+ end
+
+ context 'importing group members' do
+ let(:group) { create(:group) }
+ let(:importable) { create(:project, namespace: group) }
+ let(:members_mapper) do
+ described_class.new(
+ exported_members: exported_members, user: user, importable: importable)
+ end
+
+ before do
+ group.add_users([user, user2], GroupMember::DEVELOPER)
+ user.update(email: 'invite@test.com')
+ end
+
+ it 'maps the importer' do
+ expect(members_mapper.map[-1]).to eq(user.id)
+ end
+
+ it 'maps the group member' do
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ end
+ end
+
+ context 'when importer mapping fails' do
+ let(:exception_message) { 'Something went wrong' }
+
+ it 'includes importer specific error message' do
+ expect(member_class).to receive(:create!).and_raise(StandardError.new(exception_message))
+
+ expect { members_mapper.map }.to raise_error(StandardError, "Error adding importer user to Project members. #{exception_message}")
+ end
+ end
end
end
- context 'when importer mapping fails' do
- let(:exception_message) { 'Something went wrong' }
+ context 'when importable is Group' do
+ include_examples 'imports exported members' do
+ let(:source_type) { 'Namespace' }
+ let(:member_class) { GroupMember }
+ let(:importable) { create(:group) }
- it 'includes importer specific error message' do
- expect(ProjectMember).to receive(:create!).and_raise(StandardError.new(exception_message))
+ it 'does not lower owner access level' do
+ exported_members.first['access_level'] = member_class::OWNER
- expect { members_mapper.map }.to raise_error(StandardError, "Error adding importer user to Project members. #{exception_message}")
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ expect(member_class.find_by_user_id(user2.id).access_level).to eq(member_class::OWNER)
+ end
end
end
end
diff --git a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
index ab834ac3fa8..0cdb3c43992 100644
--- a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
+++ b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
@@ -52,10 +52,10 @@ describe Gitlab::ImportExport::MergeRequestParser do
context 'when the diff is invalid' do
let(:merge_request_diff) { build(:merge_request_diff, merge_request: merge_request, base_commit_sha: 'foobar') }
- it 'sets the diff to nil' do
+ it 'sets the diff to empty diff' do
expect(merge_request_diff).to be_invalid
expect(merge_request_diff.merge_request).to eq merge_request
- expect(parsed_merge_request.merge_request_diff).to be_nil
+ expect(parsed_merge_request.merge_request_diff).to be_empty
end
end
end
diff --git a/spec/lib/gitlab/import_export/project/export_task_spec.rb b/spec/lib/gitlab/import_export/project/export_task_spec.rb
new file mode 100644
index 00000000000..cf11a1df33c
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/export_task_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+describe Gitlab::ImportExport::Project::ExportTask do
+ let(:username) { 'root' }
+ let(:namespace_path) { username }
+ let!(:user) { create(:user, username: username) }
+ let(:measurement_enabled) { false }
+ let(:file_path) { 'spec/fixtures/gitlab/import_export/test_project_export.tar.gz' }
+ let(:project) { create(:project, creator: user, namespace: user.namespace) }
+ let(:project_name) { project.name }
+
+ let(:task_params) do
+ {
+ username: username,
+ namespace_path: namespace_path,
+ project_path: project_name,
+ file_path: file_path,
+ measurement_enabled: measurement_enabled
+ }
+ end
+
+ subject { described_class.new(task_params).export }
+
+ context 'when project is found' do
+ let(:project) { create(:project, creator: user, namespace: user.namespace) }
+
+ around do |example|
+ example.run
+ ensure
+ File.delete(file_path)
+ end
+
+ it 'performs project export successfully' do
+ expect { subject }.to output(/Done!/).to_stdout
+
+ expect(subject).to eq(true)
+
+ expect(File).to exist(file_path)
+ end
+
+ it_behaves_like 'measurable'
+ end
+
+ context 'when project is not found' do
+ let(:project_name) { 'invalid project name' }
+
+ it 'logs an error' do
+ expect { subject }.to output(/Project with path: #{project_name} was not found. Please provide correct project path/).to_stdout
+ end
+
+ it 'returns false' do
+ expect(subject).to eq(false)
+ end
+ end
+
+ context 'when file path is invalid' do
+ let(:file_path) { '/invalid_file_path/test_project_export.tar.gz' }
+
+ it 'logs an error' do
+ expect { subject }.to output(/Invalid file path: #{file_path}. Please provide correct file path/ ).to_stdout
+ end
+
+ it 'returns false' do
+ expect(subject).to eq(false)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/import_task_spec.rb b/spec/lib/gitlab/import_export/project/import_task_spec.rb
new file mode 100644
index 00000000000..f7b9cbaa095
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/import_task_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+describe Gitlab::ImportExport::Project::ImportTask do
+ let(:username) { 'root' }
+ let(:namespace_path) { username }
+ let!(:user) { create(:user, username: username) }
+ let(:measurement_enabled) { false }
+ let(:project) { Project.find_by_full_path("#{namespace_path}/#{project_name}") }
+ let(:import_task) { described_class.new(task_params) }
+ let(:task_params) do
+ {
+ username: username,
+ namespace_path: namespace_path,
+ project_path: project_name,
+ file_path: file_path,
+ measurement_enabled: measurement_enabled
+ }
+ end
+
+ before do
+ allow(Settings.uploads.object_store).to receive(:[]=).and_call_original
+ end
+
+ around do |example|
+ old_direct_upload_setting = Settings.uploads.object_store['direct_upload']
+ old_background_upload_setting = Settings.uploads.object_store['background_upload']
+
+ Settings.uploads.object_store['direct_upload'] = true
+ Settings.uploads.object_store['background_upload'] = true
+
+ example.run
+
+ Settings.uploads.object_store['direct_upload'] = old_direct_upload_setting
+ Settings.uploads.object_store['background_upload'] = old_background_upload_setting
+ end
+
+ subject { import_task.import }
+
+ context 'when project import is valid' do
+ let(:project_name) { 'import_rake_test_project' }
+ let(:file_path) { 'spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz' }
+
+ it 'performs project import successfully' do
+ expect { subject }.to output(/Done!/).to_stdout
+ expect { subject }.not_to raise_error
+ expect(subject).to eq(true)
+
+ expect(project.merge_requests.count).to be > 0
+ expect(project.issues.count).to be > 0
+ expect(project.milestones.count).to be > 0
+ expect(project.import_state.status).to eq('finished')
+ end
+
+ it 'disables direct & background upload only during project creation' do
+ expect_next_instance_of(Projects::GitlabProjectsImportService) do |service|
+ expect(service).to receive(:execute).and_wrap_original do |m|
+ expect(Settings.uploads.object_store['background_upload']).to eq(false)
+ expect(Settings.uploads.object_store['direct_upload']).to eq(false)
+
+ m.call
+ end
+ end
+
+ expect(import_task).to receive(:execute_sidekiq_job).and_wrap_original do |m|
+ expect(Settings.uploads.object_store['background_upload']).to eq(true)
+ expect(Settings.uploads.object_store['direct_upload']).to eq(true)
+ expect(Settings.uploads.object_store).not_to receive(:[]=).with('backgroud_upload', false)
+ expect(Settings.uploads.object_store).not_to receive(:[]=).with('direct_upload', false)
+
+ m.call
+ end
+
+ subject
+ end
+
+ it_behaves_like 'measurable'
+ end
+
+ context 'when project import is invalid' do
+ let(:project_name) { 'import_rake_invalid_test_project' }
+ let(:file_path) { 'spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz' }
+ let(:not_imported_message) { /Total number of not imported relations: 1/ }
+
+ it 'performs project import successfully' do
+ expect { subject }.to output(not_imported_message).to_stdout
+ expect { subject }.not_to raise_error
+ expect(subject).to eq(true)
+
+ expect(project.merge_requests).to be_empty
+ expect(project.import_state.last_error).to be_nil
+ expect(project.import_state.status).to eq('finished')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/legacy_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/legacy_tree_saver_spec.rb
new file mode 100644
index 00000000000..d4406dbc60b
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/legacy_tree_saver_spec.rb
@@ -0,0 +1,397 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Project::LegacyTreeSaver do
+ describe 'saves the project tree into a json object' do
+ let(:shared) { project.import_export_shared }
+ let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared) }
+ let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
+ let(:user) { create(:user) }
+ let!(:project) { setup_project }
+
+ before do
+ project.add_maintainer(user)
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
+ allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ it 'saves project successfully' do
+ expect(project_tree_saver.save).to be true
+ end
+
+ context ':export_fast_serialize feature flag checks' do
+ before do
+ expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared).and_return(reader)
+ expect(reader).to receive(:project_tree).and_return(project_tree)
+ end
+
+ let(:serializer) { instance_double('Gitlab::ImportExport::FastHashSerializer') }
+ let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
+ let(:project_tree) do
+ {
+ include: [{ issues: { include: [] } }],
+ preload: { issues: nil }
+ }
+ end
+
+ context 'when :export_fast_serialize feature is enabled' do
+ before do
+ stub_feature_flags(export_fast_serialize: true)
+ end
+
+ it 'uses FastHashSerializer' do
+ expect(Gitlab::ImportExport::FastHashSerializer)
+ .to receive(:new)
+ .with(project, project_tree)
+ .and_return(serializer)
+
+ expect(serializer).to receive(:execute)
+
+ project_tree_saver.save
+ end
+ end
+
+ context 'when :export_fast_serialize feature is disabled' do
+ before do
+ stub_feature_flags(export_fast_serialize: false)
+ end
+
+ it 'is serialized via built-in `as_json`' do
+ expect(project).to receive(:as_json).with(project_tree)
+
+ project_tree_saver.save
+ end
+ end
+ end
+
+ # It is mostly duplicated in
+ # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
+ # except:
+ # context 'with description override' do
+ # context 'group members' do
+ # ^ These are specific for the Project::TreeSaver
+ context 'JSON' do
+ let(:saved_project_json) do
+ project_tree_saver.save
+ project_json(project_tree_saver.full_path)
+ end
+
+ # It is not duplicated in
+ # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
+ context 'with description override' do
+ let(:params) { { description: 'Foo Bar' } }
+ let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared, params: params) }
+
+ it 'overrides the project description' do
+ expect(saved_project_json).to include({ 'description' => params[:description] })
+ end
+ end
+
+ it 'saves the correct json' do
+ expect(saved_project_json).to include({ 'description' => 'description', 'visibility_level' => 20 })
+ end
+
+ it 'has approvals_before_merge set' do
+ expect(saved_project_json['approvals_before_merge']).to eq(1)
+ end
+
+ it 'has milestones' do
+ expect(saved_project_json['milestones']).not_to be_empty
+ end
+
+ it 'has merge requests' do
+ expect(saved_project_json['merge_requests']).not_to be_empty
+ end
+
+ it 'has merge request\'s milestones' do
+ expect(saved_project_json['merge_requests'].first['milestone']).not_to be_empty
+ end
+
+ it 'has merge request\'s source branch SHA' do
+ expect(saved_project_json['merge_requests'].first['source_branch_sha']).to eq('ABCD')
+ end
+
+ it 'has merge request\'s target branch SHA' do
+ expect(saved_project_json['merge_requests'].first['target_branch_sha']).to eq('DCBA')
+ end
+
+ it 'has events' do
+ expect(saved_project_json['merge_requests'].first['milestone']['events']).not_to be_empty
+ end
+
+ it 'has snippets' do
+ expect(saved_project_json['snippets']).not_to be_empty
+ end
+
+ it 'has snippet notes' do
+ expect(saved_project_json['snippets'].first['notes']).not_to be_empty
+ end
+
+ it 'has releases' do
+ expect(saved_project_json['releases']).not_to be_empty
+ end
+
+ it 'has no author on releases' do
+ expect(saved_project_json['releases'].first['author']).to be_nil
+ end
+
+ it 'has the author ID on releases' do
+ expect(saved_project_json['releases'].first['author_id']).not_to be_nil
+ end
+
+ it 'has issues' do
+ expect(saved_project_json['issues']).not_to be_empty
+ end
+
+ it 'has issue comments' do
+ notes = saved_project_json['issues'].first['notes']
+
+ expect(notes).not_to be_empty
+ expect(notes.first['type']).to eq('DiscussionNote')
+ end
+
+ it 'has issue assignees' do
+ expect(saved_project_json['issues'].first['issue_assignees']).not_to be_empty
+ end
+
+ it 'has author on issue comments' do
+ expect(saved_project_json['issues'].first['notes'].first['author']).not_to be_empty
+ end
+
+ it 'has project members' do
+ expect(saved_project_json['project_members']).not_to be_empty
+ end
+
+ it 'has merge requests diffs' do
+ expect(saved_project_json['merge_requests'].first['merge_request_diff']).not_to be_empty
+ end
+
+ it 'has merge request diff files' do
+ expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_files']).not_to be_empty
+ end
+
+ it 'has merge request diff commits' do
+ expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_commits']).not_to be_empty
+ end
+
+ it 'has merge requests comments' do
+ expect(saved_project_json['merge_requests'].first['notes']).not_to be_empty
+ end
+
+ it 'has author on merge requests comments' do
+ expect(saved_project_json['merge_requests'].first['notes'].first['author']).not_to be_empty
+ end
+
+ it 'has pipeline stages' do
+ expect(saved_project_json.dig('ci_pipelines', 0, 'stages')).not_to be_empty
+ end
+
+ it 'has pipeline statuses' do
+ expect(saved_project_json.dig('ci_pipelines', 0, 'stages', 0, 'statuses')).not_to be_empty
+ end
+
+ it 'has pipeline builds' do
+ builds_count = saved_project_json
+ .dig('ci_pipelines', 0, 'stages', 0, 'statuses')
+ .count { |hash| hash['type'] == 'Ci::Build' }
+
+ expect(builds_count).to eq(1)
+ end
+
+ it 'has no when YML attributes but only the DB column' do
+ expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
+
+ saved_project_json
+ end
+
+ it 'has pipeline commits' do
+ expect(saved_project_json['ci_pipelines']).not_to be_empty
+ end
+
+ it 'has ci pipeline notes' do
+ expect(saved_project_json['ci_pipelines'].first['notes']).not_to be_empty
+ end
+
+ it 'has labels with no associations' do
+ expect(saved_project_json['labels']).not_to be_empty
+ end
+
+ it 'has labels associated to records' do
+ expect(saved_project_json['issues'].first['label_links'].first['label']).not_to be_empty
+ end
+
+ it 'has project and group labels' do
+ label_types = saved_project_json['issues'].first['label_links'].map { |link| link['label']['type'] }
+
+ expect(label_types).to match_array(%w(ProjectLabel GroupLabel))
+ end
+
+ it 'has priorities associated to labels' do
+ priorities = saved_project_json['issues'].first['label_links'].flat_map { |link| link['label']['priorities'] }
+
+ expect(priorities).not_to be_empty
+ end
+
+ it 'has issue resource label events' do
+ expect(saved_project_json['issues'].first['resource_label_events']).not_to be_empty
+ end
+
+ it 'has merge request resource label events' do
+ expect(saved_project_json['merge_requests'].first['resource_label_events']).not_to be_empty
+ end
+
+ it 'saves the correct service type' do
+ expect(saved_project_json['services'].first['type']).to eq('CustomIssueTrackerService')
+ end
+
+ it 'saves the properties for a service' do
+ expect(saved_project_json['services'].first['properties']).to eq('one' => 'value')
+ end
+
+ it 'has project feature' do
+ project_feature = saved_project_json['project_feature']
+ expect(project_feature).not_to be_empty
+ expect(project_feature["issues_access_level"]).to eq(ProjectFeature::DISABLED)
+ expect(project_feature["wiki_access_level"]).to eq(ProjectFeature::ENABLED)
+ expect(project_feature["builds_access_level"]).to eq(ProjectFeature::PRIVATE)
+ end
+
+ it 'has custom attributes' do
+ expect(saved_project_json['custom_attributes'].count).to eq(2)
+ end
+
+ it 'has badges' do
+ expect(saved_project_json['project_badges'].count).to eq(2)
+ end
+
+ it 'does not complain about non UTF-8 characters in MR diff files' do
+ ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
+
+ expect(project_tree_saver.save).to be true
+ end
+
+ context 'group members' do
+ let(:user2) { create(:user, email: 'group@member.com') }
+ let(:member_emails) do
+ saved_project_json['project_members'].map do |pm|
+ pm['user']['email']
+ end
+ end
+
+ before do
+ Group.first.add_developer(user2)
+ end
+
+ it 'does not export group members if it has no permission' do
+ Group.first.add_developer(user)
+
+ expect(member_emails).not_to include('group@member.com')
+ end
+
+ it 'does not export group members as maintainer' do
+ Group.first.add_maintainer(user)
+
+ expect(member_emails).not_to include('group@member.com')
+ end
+
+ it 'exports group members as group owner' do
+ Group.first.add_owner(user)
+
+ expect(member_emails).to include('group@member.com')
+ end
+
+ context 'as admin' do
+ let(:user) { create(:admin) }
+
+ it 'exports group members as admin' do
+ expect(member_emails).to include('group@member.com')
+ end
+
+ it 'exports group members as project members' do
+ member_types = saved_project_json['project_members'].map { |pm| pm['source_type'] }
+
+ expect(member_types).to all(eq('Project'))
+ end
+ end
+ end
+
+ context 'project attributes' do
+ it 'does not contain the runners token' do
+ expect(saved_project_json).not_to include("runners_token" => 'token')
+ end
+ end
+
+ it 'has a board and a list' do
+ expect(saved_project_json['boards'].first['lists']).not_to be_empty
+ end
+ end
+ end
+
+ def setup_project
+ release = create(:release)
+ group = create(:group)
+
+ project = create(:project,
+ :public,
+ :repository,
+ :issues_disabled,
+ :wiki_enabled,
+ :builds_private,
+ description: 'description',
+ releases: [release],
+ group: group,
+ approvals_before_merge: 1
+ )
+ allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
+
+ issue = create(:issue, assignees: [user], project: project)
+ snippet = create(:project_snippet, project: project)
+ project_label = create(:label, project: project)
+ group_label = create(:group_label, group: group)
+ create(:label_link, label: project_label, target: issue)
+ create(:label_link, label: group_label, target: issue)
+ create(:label_priority, label: group_label, priority: 1)
+ milestone = create(:milestone, project: project)
+ merge_request = create(:merge_request, source_project: project, milestone: milestone)
+
+ ci_build = create(:ci_build, project: project, when: nil)
+ ci_build.pipeline.update(project: project)
+ create(:commit_status, project: project, pipeline: ci_build.pipeline)
+
+ create(:milestone, project: project)
+ create(:discussion_note, noteable: issue, project: project)
+ create(:note, noteable: merge_request, project: project)
+ create(:note, noteable: snippet, project: project)
+ create(:note_on_commit,
+ author: user,
+ project: project,
+ commit_id: ci_build.pipeline.sha)
+
+ create(:resource_label_event, label: project_label, issue: issue)
+ create(:resource_label_event, label: group_label, merge_request: merge_request)
+
+ create(:event, :created, target: milestone, project: project, author: user)
+ create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' })
+
+ create(:project_custom_attribute, project: project)
+ create(:project_custom_attribute, project: project)
+
+ create(:project_badge, project: project)
+ create(:project_badge, project: project)
+
+ board = create(:board, project: project, name: 'TestBoard')
+ create(:list, board: board, position: 0, label: project_label)
+
+ project
+ end
+
+ def project_json(filename)
+ ::JSON.parse(IO.read(filename))
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/object_builder_spec.rb b/spec/lib/gitlab/import_export/project/object_builder_spec.rb
new file mode 100644
index 00000000000..c9d1410400a
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/object_builder_spec.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Project::ObjectBuilder do
+ let!(:group) { create(:group, :private) }
+ let!(:subgroup) { create(:group, :private, parent: group) }
+ let!(:project) do
+ create(:project, :repository,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project',
+ group: subgroup)
+ end
+
+ let(:lru_cache) { subject.send(:lru_cache) }
+ let(:cache_key) { subject.send(:cache_key) }
+
+ context 'request store is not active' do
+ subject do
+ described_class.new(Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => project.group)
+ end
+
+ it 'ignore cache initialize' do
+ expect(lru_cache).to be_nil
+ expect(cache_key).to be_nil
+ end
+ end
+
+ context 'request store is active', :request_store do
+ subject do
+ described_class.new(Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => project.group)
+ end
+
+ it 'initialize cache in memory' do
+ expect(lru_cache).not_to be_nil
+ expect(cache_key).not_to be_nil
+ end
+
+ it 'cache object when first time find the object' do
+ group_label = create(:group_label, name: 'group label', group: project.group)
+
+ expect(subject).to receive(:find_object).and_call_original
+ expect { subject.find }
+ .to change { lru_cache[cache_key] }
+ .from(nil).to(group_label)
+
+ expect(subject.find).to eq(group_label)
+ end
+
+ it 'read from cache when object has been cached' do
+ group_label = create(:group_label, name: 'group label', group: project.group)
+
+ subject.find
+
+ expect(subject).not_to receive(:find_object)
+ expect { subject.find }.not_to change { lru_cache[cache_key] }
+
+ expect(subject.find).to eq(group_label)
+ end
+ end
+
+ context 'labels' do
+ it 'finds the existing group label' do
+ group_label = create(:group_label, name: 'group label', group: project.group)
+
+ expect(described_class.build(Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => project.group)).to eq(group_label)
+ end
+
+ it 'finds the existing group label in root ancestor' do
+ group_label = create(:group_label, name: 'group label', group: group)
+
+ expect(described_class.build(Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => group)).to eq(group_label)
+ end
+
+ it 'creates a new label' do
+ label = described_class.build(Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => project.group)
+
+ expect(label.persisted?).to be true
+ end
+ end
+
+ context 'milestones' do
+ it 'finds the existing group milestone' do
+ milestone = create(:milestone, name: 'group milestone', group: project.group)
+
+ expect(described_class.build(Milestone,
+ 'title' => 'group milestone',
+ 'project' => project,
+ 'group' => project.group)).to eq(milestone)
+ end
+
+ it 'finds the existing group milestone in root ancestor' do
+ milestone = create(:milestone, name: 'group milestone', group: group)
+
+ expect(described_class.build(Milestone,
+ 'title' => 'group milestone',
+ 'project' => project,
+ 'group' => group)).to eq(milestone)
+ end
+
+ it 'creates a new milestone' do
+ milestone = described_class.build(Milestone,
+ 'title' => 'group milestone',
+ 'project' => project,
+ 'group' => project.group)
+
+ expect(milestone.persisted?).to be true
+ end
+ end
+
+ context 'merge_request' do
+ it 'finds the existing merge_request' do
+ merge_request = create(:merge_request, title: 'MergeRequest', iid: 7, target_project: project, source_project: project)
+ expect(described_class.build(MergeRequest,
+ 'title' => 'MergeRequest',
+ 'source_project_id' => project.id,
+ 'target_project_id' => project.id,
+ 'source_branch' => 'SourceBranch',
+ 'iid' => 7,
+ 'target_branch' => 'TargetBranch',
+ 'author_id' => project.creator.id)).to eq(merge_request)
+ end
+
+ it 'creates a new merge_request' do
+ merge_request = described_class.build(MergeRequest,
+ 'title' => 'MergeRequest',
+ 'iid' => 8,
+ 'source_project_id' => project.id,
+ 'target_project_id' => project.id,
+ 'source_branch' => 'SourceBranch',
+ 'target_branch' => 'TargetBranch',
+ 'author_id' => project.creator.id)
+ expect(merge_request.persisted?).to be true
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
new file mode 100644
index 00000000000..73ae6810706
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -0,0 +1,326 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Project::RelationFactory do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, :repository, group: group) }
+ let(:members_mapper) { double('members_mapper').as_null_object }
+ let(:user) { create(:admin) }
+ let(:excluded_keys) { [] }
+ let(:created_object) do
+ described_class.create(relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
+ members_mapper: members_mapper,
+ user: user,
+ importable: project,
+ excluded_keys: excluded_keys)
+ end
+
+ context 'hook object' do
+ let(:relation_sym) { :hooks }
+ let(:id) { 999 }
+ let(:service_id) { 99 }
+ let(:original_project_id) { 8 }
+ let(:token) { 'secret' }
+
+ let(:relation_hash) do
+ {
+ 'id' => id,
+ 'url' => 'https://example.json',
+ 'project_id' => original_project_id,
+ 'created_at' => '2016-08-12T09:41:03.462Z',
+ 'updated_at' => '2016-08-12T09:41:03.462Z',
+ 'service_id' => service_id,
+ 'push_events' => true,
+ 'issues_events' => false,
+ 'confidential_issues_events' => false,
+ 'merge_requests_events' => true,
+ 'tag_push_events' => false,
+ 'note_events' => true,
+ 'enable_ssl_verification' => true,
+ 'job_events' => false,
+ 'wiki_page_events' => true,
+ 'token' => token
+ }
+ end
+
+ it 'does not have the original ID' do
+ expect(created_object.id).not_to eq(id)
+ end
+
+ it 'does not have the original service_id' do
+ expect(created_object.service_id).not_to eq(service_id)
+ end
+
+ it 'does not have the original project_id' do
+ expect(created_object.project_id).not_to eq(original_project_id)
+ end
+
+ it 'has the new project_id' do
+ expect(created_object.project_id).to eql(project.id)
+ end
+
+ it 'has a nil token' do
+ expect(created_object.token).to eq(nil)
+ end
+
+ context 'original service exists' do
+ let(:service_id) { create(:service, project: project).id }
+
+ it 'does not have the original service_id' do
+ expect(created_object.service_id).not_to eq(service_id)
+ end
+ end
+
+ context 'excluded attributes' do
+ let(:excluded_keys) { %w[url] }
+
+ it 'are removed from the imported object' do
+ expect(created_object.url).to be_nil
+ end
+ end
+ end
+
+ # Mocks an ActiveRecordish object with the dodgy columns
+ class FooModel
+ include ActiveModel::Model
+
+ def initialize(params = {})
+ params.each { |key, value| send("#{key}=", value) }
+ end
+
+ def values
+ instance_variables.map { |ivar| instance_variable_get(ivar) }
+ end
+ end
+
+ context 'merge_request object' do
+ let(:relation_sym) { :merge_requests }
+
+ let(:exported_member) do
+ {
+ "id" => 111,
+ "access_level" => 30,
+ "source_id" => 1,
+ "source_type" => "Project",
+ "user_id" => 3,
+ "notification_level" => 3,
+ "created_at" => "2016-11-18T09:29:42.634Z",
+ "updated_at" => "2016-11-18T09:29:42.634Z",
+ "user" => {
+ "id" => user.id,
+ "email" => user.email,
+ "username" => user.username
+ }
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member],
+ user: user,
+ importable: project)
+ end
+
+ let(:relation_hash) do
+ {
+ 'id' => 27,
+ 'target_branch' => "feature",
+ 'source_branch' => "feature_conflict",
+ 'source_project_id' => project.id,
+ 'target_project_id' => project.id,
+ 'author_id' => user.id,
+ 'assignee_id' => user.id,
+ 'updated_by_id' => user.id,
+ 'title' => "MR1",
+ 'created_at' => "2016-06-14T15:02:36.568Z",
+ 'updated_at' => "2016-06-14T15:02:56.815Z",
+ 'state' => "opened",
+ 'merge_status' => "unchecked",
+ 'description' => "Description",
+ 'position' => 0,
+ 'source_branch_sha' => "ABCD",
+ 'target_branch_sha' => "DCBA",
+ 'merge_when_pipeline_succeeds' => true
+ }
+ end
+
+ it 'has preloaded author' do
+ expect(created_object.author).to equal(user)
+ end
+
+ it 'has preloaded updated_by' do
+ expect(created_object.updated_by).to equal(user)
+ end
+
+ it 'has preloaded source project' do
+ expect(created_object.source_project).to equal(project)
+ end
+
+ it 'has preloaded target project' do
+ expect(created_object.source_project).to equal(project)
+ end
+ end
+
+ context 'label object' do
+ let(:relation_sym) { :labels }
+ let(:relation_hash) do
+ {
+ "id": 3,
+ "title": "test3",
+ "color": "#428bca",
+ "group_id": project.group.id,
+ "created_at": "2016-07-22T08:55:44.161Z",
+ "updated_at": "2016-07-22T08:55:44.161Z",
+ "template": false,
+ "description": "",
+ "project_id": project.id,
+ "type": "GroupLabel"
+ }
+ end
+
+ it 'has preloaded project' do
+ expect(created_object.project).to equal(project)
+ end
+
+ it 'has preloaded group' do
+ expect(created_object.group).to equal(project.group)
+ end
+ end
+
+ # `project_id`, `described_class.USER_REFERENCES`, noteable_id, target_id, and some project IDs are already
+ # re-assigned by described_class.
+ context 'Potentially hazardous foreign keys' do
+ let(:relation_sym) { :hazardous_foo_model }
+ let(:relation_hash) do
+ {
+ 'service_id' => 99,
+ 'moved_to_id' => 99,
+ 'namespace_id' => 99,
+ 'ci_id' => 99,
+ 'random_project_id' => 99,
+ 'random_id' => 99,
+ 'milestone_id' => 99,
+ 'project_id' => 99,
+ 'user_id' => 99
+ }
+ end
+
+ class HazardousFooModel < FooModel
+ attr_accessor :service_id, :moved_to_id, :namespace_id, :ci_id, :random_project_id, :random_id, :milestone_id, :project_id
+ end
+
+ before do
+ allow(HazardousFooModel).to receive(:reflect_on_association).and_return(nil)
+ end
+
+ it 'does not preserve any foreign key IDs' do
+ expect(created_object.values).not_to include(99)
+ end
+ end
+
+ context 'overrided model with pluralized name' do
+ let(:relation_sym) { :metrics }
+
+ let(:relation_hash) do
+ {
+ 'id' => 99,
+ 'merge_request_id' => 99,
+ 'merged_at' => Time.now,
+ 'merged_by_id' => 99,
+ 'latest_closed_at' => nil,
+ 'latest_closed_by_id' => nil
+ }
+ end
+
+ it 'does not raise errors' do
+ expect { created_object }.not_to raise_error
+ end
+ end
+
+ context 'Project references' do
+ let(:relation_sym) { :project_foo_model }
+ let(:relation_hash) do
+ Gitlab::ImportExport::Project::RelationFactory::PROJECT_REFERENCES.map { |ref| { ref => 99 } }.inject(:merge)
+ end
+
+ class ProjectFooModel < FooModel
+ attr_accessor(*Gitlab::ImportExport::Project::RelationFactory::PROJECT_REFERENCES)
+ end
+
+ before do
+ allow(ProjectFooModel).to receive(:reflect_on_association).and_return(nil)
+ end
+
+ it 'does not preserve any project foreign key IDs' do
+ expect(created_object.values).not_to include(99)
+ end
+ end
+
+ context 'Notes user references' do
+ let(:relation_sym) { :notes }
+ let(:new_user) { create(:user) }
+ let(:exported_member) do
+ {
+ "id" => 111,
+ "access_level" => 30,
+ "source_id" => 1,
+ "source_type" => "Project",
+ "user_id" => 3,
+ "notification_level" => 3,
+ "created_at" => "2016-11-18T09:29:42.634Z",
+ "updated_at" => "2016-11-18T09:29:42.634Z",
+ "user" => {
+ "id" => 999,
+ "email" => new_user.email,
+ "username" => new_user.username
+ }
+ }
+ end
+
+ let(:relation_hash) do
+ {
+ "id" => 4947,
+ "note" => "merged",
+ "noteable_type" => "MergeRequest",
+ "author_id" => 999,
+ "created_at" => "2016-11-18T09:29:42.634Z",
+ "updated_at" => "2016-11-18T09:29:42.634Z",
+ "project_id" => 1,
+ "attachment" => {
+ "url" => nil
+ },
+ "noteable_id" => 377,
+ "system" => true,
+ "author" => {
+ "name" => "Administrator"
+ },
+ "events" => []
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member],
+ user: user,
+ importable: project)
+ end
+
+ it 'maps the right author to the imported note' do
+ expect(created_object.author).to eq(new_user)
+ end
+ end
+
+ context 'encrypted attributes' do
+ let(:relation_sym) { 'Ci::Variable' }
+ let(:relation_hash) do
+ create(:ci_variable).as_json
+ end
+
+ it 'has no value for the encrypted attribute' do
+ expect(created_object.value).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
new file mode 100644
index 00000000000..e38ef75d085
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -0,0 +1,896 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Project::TreeRestorer do
+ include ImportExport::CommonUtil
+
+ let(:shared) { project.import_export_shared }
+
+ describe 'restore project tree' do
+ before(:context) do
+ # Using an admin for import, so we can check assignment of existing members
+ @user = create(:admin)
+ @existing_members = [
+ create(:user, email: 'bernard_willms@gitlabexample.com'),
+ create(:user, email: 'saul_will@gitlabexample.com')
+ ]
+
+ RSpec::Mocks.with_temporary_scope do
+ @project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project')
+ @shared = @project.import_export_shared
+
+ setup_import_export_config('complex')
+
+ allow_any_instance_of(Repository).to receive(:fetch_source_branch!).and_return(true)
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false)
+
+ expect_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch).with('feature', 'DCBA')
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch)
+
+ project_tree_restorer = described_class.new(user: @user, shared: @shared, project: @project)
+
+ @restored_project_json = project_tree_restorer.restore
+ end
+ end
+
+ context 'JSON' do
+ it 'restores models based on JSON' do
+ expect(@restored_project_json).to be_truthy
+ end
+
+ it 'restore correct project features' do
+ project = Project.find_by_path('project')
+
+ expect(project.project_feature.issues_access_level).to eq(ProjectFeature::PRIVATE)
+ expect(project.project_feature.builds_access_level).to eq(ProjectFeature::PRIVATE)
+ expect(project.project_feature.snippets_access_level).to eq(ProjectFeature::PRIVATE)
+ expect(project.project_feature.wiki_access_level).to eq(ProjectFeature::PRIVATE)
+ expect(project.project_feature.merge_requests_access_level).to eq(ProjectFeature::PRIVATE)
+ end
+
+ it 'has the project description' do
+ expect(Project.find_by_path('project').description).to eq('Nisi et repellendus ut enim quo accusamus vel magnam.')
+ end
+
+ it 'has the same label associated to two issues' do
+ expect(ProjectLabel.find_by_title('test2').issues.count).to eq(2)
+ end
+
+ it 'has milestones associated to two separate issues' do
+ expect(Milestone.find_by_description('test milestone').issues.count).to eq(2)
+ end
+
+ context 'when importing a project with cached_markdown_version and note_html' do
+ context 'for an Issue' do
+ it 'does not import note_html' do
+ note_content = 'Quo reprehenderit aliquam qui dicta impedit cupiditate eligendi'
+ issue_note = Issue.find_by(description: 'Aliquam enim illo et possimus.').notes.select { |n| n.note.match(/#{note_content}/)}.first
+
+ expect(issue_note.note_html).to match(/#{note_content}/)
+ end
+ end
+
+ context 'for a Merge Request' do
+ it 'does not import note_html' do
+ note_content = 'Sit voluptatibus eveniet architecto quidem'
+ merge_request_note = MergeRequest.find_by(title: 'MR1').notes.select { |n| n.note.match(/#{note_content}/)}.first
+
+ expect(merge_request_note.note_html).to match(/#{note_content}/)
+ end
+ end
+ end
+
+ it 'creates a valid pipeline note' do
+ expect(Ci::Pipeline.find_by_sha('sha-notes').notes).not_to be_empty
+ end
+
+ it 'pipeline has the correct user ID' do
+ expect(Ci::Pipeline.find_by_sha('sha-notes').user_id).to eq(@user.id)
+ end
+
+ it 'restores pipelines with missing ref' do
+ expect(Ci::Pipeline.where(ref: nil)).not_to be_empty
+ end
+
+ it 'restores pipeline for merge request' do
+ pipeline = Ci::Pipeline.find_by_sha('048721d90c449b244b7b4c53a9186b04330174ec')
+
+ expect(pipeline).to be_valid
+ expect(pipeline.tag).to be_falsey
+ expect(pipeline.source).to eq('merge_request_event')
+ expect(pipeline.merge_request.id).to be > 0
+ expect(pipeline.merge_request.target_branch).to eq('feature')
+ expect(pipeline.merge_request.source_branch).to eq('feature_conflict')
+ end
+
+ it 'restores pipelines based on ascending id order' do
+ expected_ordered_shas = %w[
+ 2ea1f3dec713d940208fb5ce4a38765ecb5d3f73
+ ce84140e8b878ce6e7c4d298c7202ff38170e3ac
+ 048721d90c449b244b7b4c53a9186b04330174ec
+ sha-notes
+ 5f923865dde3436854e9ceb9cdb7815618d4e849
+ d2d430676773caa88cdaf7c55944073b2fd5561a
+ 2ea1f3dec713d940208fb5ce4a38765ecb5d3f73
+ ]
+
+ project = Project.find_by_path('project')
+
+ project.ci_pipelines.order(:id).each_with_index do |pipeline, i|
+ expect(pipeline['sha']).to eq expected_ordered_shas[i]
+ end
+ end
+
+ it 'preserves updated_at on issues' do
+ issue = Issue.where(description: 'Aliquam enim illo et possimus.').first
+
+ expect(issue.reload.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC')
+ end
+
+ it 'has multiple issue assignees' do
+ expect(Issue.find_by(title: 'Voluptatem').assignees).to contain_exactly(@user, *@existing_members)
+ expect(Issue.find_by(title: 'Issue without assignees').assignees).to be_empty
+ end
+
+ it 'restores timelogs for issues' do
+ timelog = Issue.find_by(title: 'issue_with_timelogs').timelogs.last
+
+ aggregate_failures do
+ expect(timelog.time_spent).to eq(72000)
+ expect(timelog.spent_at).to eq("2019-12-27T00:00:00.000Z")
+ end
+ end
+
+ it 'contains the merge access levels on a protected branch' do
+ expect(ProtectedBranch.first.merge_access_levels).not_to be_empty
+ end
+
+ it 'contains the push access levels on a protected branch' do
+ expect(ProtectedBranch.first.push_access_levels).not_to be_empty
+ end
+
+ it 'contains the create access levels on a protected tag' do
+ expect(ProtectedTag.first.create_access_levels).not_to be_empty
+ end
+
+ it 'restores issue resource label events' do
+ expect(Issue.find_by(title: 'Voluptatem').resource_label_events).not_to be_empty
+ end
+
+ it 'restores merge requests resource label events' do
+ expect(MergeRequest.find_by(title: 'MR1').resource_label_events).not_to be_empty
+ end
+
+ it 'restores suggestion' do
+ note = Note.find_by("note LIKE 'Saepe asperiores exercitationem non dignissimos laborum reiciendis et ipsum%'")
+
+ expect(note.suggestions.count).to eq(1)
+ expect(note.suggestions.first.from_content).to eq("Original line\n")
+ end
+
+ context 'event at forth level of the tree' do
+ let(:event) { Event.where(action: 6).first }
+
+ it 'restores the event' do
+ expect(event).not_to be_nil
+ end
+
+ it 'has the action' do
+ expect(event.action).not_to be_nil
+ end
+
+ it 'event belongs to note, belongs to merge request, belongs to a project' do
+ expect(event.note.noteable.project).not_to be_nil
+ end
+ end
+
+ it 'has the correct data for merge request diff files' do
+ expect(MergeRequestDiffFile.where.not(diff: nil).count).to eq(55)
+ end
+
+ it 'has the correct data for merge request diff commits' do
+ expect(MergeRequestDiffCommit.count).to eq(77)
+ end
+
+ it 'has the correct data for merge request latest_merge_request_diff' do
+ MergeRequest.find_each do |merge_request|
+ expect(merge_request.latest_merge_request_diff_id).to eq(merge_request.merge_request_diffs.maximum(:id))
+ end
+ end
+
+ it 'has labels associated to label links, associated to issues' do
+ expect(Label.first.label_links.first.target).not_to be_nil
+ end
+
+ it 'has project labels' do
+ expect(ProjectLabel.count).to eq(3)
+ end
+
+ it 'has no group labels' do
+ expect(GroupLabel.count).to eq(0)
+ end
+
+ it 'has issue boards' do
+ expect(Project.find_by_path('project').boards.count).to eq(1)
+ end
+
+ it 'has lists associated with the issue board' do
+ expect(Project.find_by_path('project').boards.find_by_name('TestBoardABC').lists.count).to eq(3)
+ end
+
+ it 'has a project feature' do
+ expect(@project.project_feature).not_to be_nil
+ end
+
+ it 'has custom attributes' do
+ expect(@project.custom_attributes.count).to eq(2)
+ end
+
+ it 'has badges' do
+ expect(@project.project_badges.count).to eq(2)
+ end
+
+ it 'has snippets' do
+ expect(@project.snippets.count).to eq(1)
+ end
+
+ it 'has award emoji for a snippet' do
+ award_emoji = @project.snippets.first.award_emoji
+
+ expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'coffee')
+ end
+
+ it 'snippet has notes' do
+ expect(@project.snippets.first.notes.count).to eq(1)
+ end
+
+ it 'snippet has award emojis on notes' do
+ award_emoji = @project.snippets.first.notes.first.award_emoji.first
+
+ expect(award_emoji.name).to eq('thumbsup')
+ end
+
+ it 'restores `ci_cd_settings` : `group_runners_enabled` setting' do
+ expect(@project.ci_cd_settings.group_runners_enabled?).to eq(false)
+ end
+
+ it 'restores `auto_devops`' do
+ expect(@project.auto_devops_enabled?).to eq(true)
+ expect(@project.auto_devops.deploy_strategy).to eq('continuous')
+ end
+
+ it 'restores the correct service' do
+ expect(CustomIssueTrackerService.first).not_to be_nil
+ end
+
+ it 'restores zoom meetings' do
+ meetings = @project.issues.first.zoom_meetings
+
+ expect(meetings.count).to eq(1)
+ expect(meetings.first.url).to eq('https://zoom.us/j/123456789')
+ end
+
+ it 'restores sentry issues' do
+ sentry_issue = @project.issues.first.sentry_issue
+
+ expect(sentry_issue.sentry_issue_identifier).to eq(1234567891)
+ end
+
+ it 'has award emoji for an issue' do
+ award_emoji = @project.issues.first.award_emoji.first
+
+ expect(award_emoji.name).to eq('musical_keyboard')
+ end
+
+ it 'has award emoji for a note in an issue' do
+ award_emoji = @project.issues.first.notes.first.award_emoji.first
+
+ expect(award_emoji.name).to eq('clapper')
+ end
+
+ it 'restores container_expiration_policy' do
+ policy = Project.find_by_path('project').container_expiration_policy
+
+ aggregate_failures do
+ expect(policy).to be_an_instance_of(ContainerExpirationPolicy)
+ expect(policy).to be_persisted
+ expect(policy.cadence).to eq('3month')
+ end
+ end
+
+ it 'restores error_tracking_setting' do
+ setting = @project.error_tracking_setting
+
+ aggregate_failures do
+ expect(setting.api_url).to eq("https://gitlab.example.com/api/0/projects/sentry-org/sentry-project")
+ expect(setting.project_name).to eq("Sentry Project")
+ expect(setting.organization_name).to eq("Sentry Org")
+ end
+ end
+
+ it 'restores external pull requests' do
+ external_pr = @project.external_pull_requests.last
+
+ aggregate_failures do
+ expect(external_pr.pull_request_iid).to eq(4)
+ expect(external_pr.source_branch).to eq("feature")
+ expect(external_pr.target_branch).to eq("master")
+ expect(external_pr.status).to eq("open")
+ end
+ end
+
+ it 'restores pipeline schedules' do
+ pipeline_schedule = @project.pipeline_schedules.last
+
+ aggregate_failures do
+ expect(pipeline_schedule.description).to eq('Schedule Description')
+ expect(pipeline_schedule.ref).to eq('master')
+ expect(pipeline_schedule.cron).to eq('0 4 * * 0')
+ expect(pipeline_schedule.cron_timezone).to eq('UTC')
+ expect(pipeline_schedule.active).to eq(true)
+ end
+ end
+
+ it 'restores releases with links' do
+ release = @project.releases.last
+ link = release.links.last
+
+ aggregate_failures do
+ expect(release.tag).to eq('release-1.1')
+ expect(release.description).to eq('Some release notes')
+ expect(release.name).to eq('release-1.1')
+ expect(release.sha).to eq('901de3a8bd5573f4a049b1457d28bc1592ba6bf9')
+ expect(release.released_at).to eq('2019-12-26T10:17:14.615Z')
+
+ expect(link.url).to eq('http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download')
+ expect(link.name).to eq('release-1.1.dmg')
+ end
+ end
+
+ context 'Merge requests' do
+ it 'always has the new project as a target' do
+ expect(MergeRequest.find_by_title('MR1').target_project).to eq(@project)
+ end
+
+ it 'has the same source project as originally if source/target are the same' do
+ expect(MergeRequest.find_by_title('MR1').source_project).to eq(@project)
+ end
+
+ it 'has the new project as target if source/target differ' do
+ expect(MergeRequest.find_by_title('MR2').target_project).to eq(@project)
+ end
+
+ it 'has no source if source/target differ' do
+ expect(MergeRequest.find_by_title('MR2').source_project_id).to be_nil
+ end
+
+ it 'has award emoji' do
+ award_emoji = MergeRequest.find_by_title('MR1').award_emoji
+
+ expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'drum')
+ end
+
+ context 'notes' do
+ it 'has award emoji' do
+ award_emoji = MergeRequest.find_by_title('MR1').notes.first.award_emoji.first
+
+ expect(award_emoji.name).to eq('tada')
+ end
+ end
+ end
+
+ context 'tokens are regenerated' do
+ it 'has new CI trigger tokens' do
+ expect(Ci::Trigger.where(token: %w[cdbfasdf44a5958c83654733449e585 33a66349b5ad01fc00174af87804e40]))
+ .to be_empty
+ end
+
+ it 'has a new CI build token' do
+ expect(Ci::Build.where(token: 'abcd')).to be_empty
+ end
+ end
+
+ context 'has restored the correct number of records' do
+ it 'has the correct number of merge requests' do
+ expect(@project.merge_requests.size).to eq(9)
+ end
+
+ it 'only restores valid triggers' do
+ expect(@project.triggers.size).to eq(1)
+ end
+
+ it 'has the correct number of pipelines and statuses' do
+ expect(@project.ci_pipelines.size).to eq(7)
+
+ @project.ci_pipelines.order(:id).zip([2, 0, 2, 2, 2, 2, 0])
+ .each do |(pipeline, expected_status_size)|
+ expect(pipeline.statuses.size).to eq(expected_status_size)
+ end
+ end
+ end
+
+ context 'when restoring hierarchy of pipeline, stages and jobs' do
+ it 'restores pipelines' do
+ expect(Ci::Pipeline.all.count).to be 7
+ end
+
+ it 'restores pipeline stages' do
+ expect(Ci::Stage.all.count).to be 6
+ end
+
+ it 'correctly restores association between stage and a pipeline' do
+ expect(Ci::Stage.all).to all(have_attributes(pipeline_id: a_value > 0))
+ end
+
+ it 'restores statuses' do
+ expect(CommitStatus.all.count).to be 10
+ end
+
+ it 'correctly restores association between a stage and a job' do
+ expect(CommitStatus.all).to all(have_attributes(stage_id: a_value > 0))
+ end
+
+ it 'correctly restores association between a pipeline and a job' do
+ expect(CommitStatus.all).to all(have_attributes(pipeline_id: a_value > 0))
+ end
+
+ it 'restores a Hash for CommitStatus options' do
+ expect(CommitStatus.all.map(&:options).compact).to all(be_a(Hash))
+ end
+
+ it 'restores external pull request for the restored pipeline' do
+ pipeline_with_external_pr = @project.ci_pipelines.where(source: 'external_pull_request_event').first
+
+ expect(pipeline_with_external_pr.external_pull_request).to be_persisted
+ end
+
+ it 'has no import failures' do
+ expect(@project.import_failures.size).to eq 0
+ end
+ end
+ end
+ end
+
+ shared_examples 'restores group correctly' do |**results|
+ it 'has group label' do
+ expect(project.group.labels.size).to eq(results.fetch(:labels, 0))
+ expect(project.group.labels.where(type: "GroupLabel").where.not(project_id: nil).count).to eq(0)
+ end
+
+ it 'has group milestone' do
+ expect(project.group.milestones.size).to eq(results.fetch(:milestones, 0))
+ end
+
+ it 'has the correct visibility level' do
+ # INTERNAL in the `project.json`, group's is PRIVATE
+ expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
+
+ context 'project.json file access check' do
+ let(:user) { create(:user) }
+ let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
+ let(:project_tree_restorer) do
+ described_class.new(user: user, shared: shared, project: project)
+ end
+ let(:restored_project_json) { project_tree_restorer.restore }
+
+ it 'does not read a symlink' do
+ Dir.mktmpdir do |tmpdir|
+ setup_symlink(tmpdir, 'project.json')
+ allow(shared).to receive(:export_path).and_call_original
+
+ expect(project_tree_restorer.restore).to eq(false)
+ expect(shared.errors).to include('Incorrect JSON format')
+ end
+ end
+ end
+
+ context 'Light JSON' do
+ let(:user) { create(:user) }
+ let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
+ let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
+ let(:restored_project_json) { project_tree_restorer.restore }
+
+ context 'with a simple project' do
+ before do
+ setup_import_export_config('light')
+ expect(restored_project_json).to eq(true)
+ end
+
+ it_behaves_like 'restores project successfully',
+ issues: 1,
+ labels: 2,
+ label_with_priorities: 'A project label',
+ milestones: 1,
+ first_issue_labels: 1,
+ services: 1
+
+ context 'when there is an existing build with build token' do
+ before do
+ create(:ci_build, token: 'abcd')
+ end
+
+ it_behaves_like 'restores project successfully',
+ issues: 1,
+ labels: 2,
+ label_with_priorities: 'A project label',
+ milestones: 1,
+ first_issue_labels: 1
+ end
+ end
+
+ context 'multiple pipelines reference the same external pull request' do
+ before do
+ setup_import_export_config('multi_pipeline_ref_one_external_pr')
+ expect(restored_project_json).to eq(true)
+ end
+
+ it_behaves_like 'restores project successfully',
+ issues: 0,
+ labels: 0,
+ milestones: 0,
+ ci_pipelines: 2,
+ external_pull_requests: 1,
+ import_failures: 0
+
+ it 'restores external pull request for the restored pipelines' do
+ external_pr = project.external_pull_requests.first
+
+ project.ci_pipelines.each do |pipeline_with_external_pr|
+ expect(pipeline_with_external_pr.external_pull_request).to be_persisted
+ expect(pipeline_with_external_pr.external_pull_request).to eq(external_pr)
+ end
+ end
+ end
+
+ context 'when post import action throw non-retriable exception' do
+ let(:exception) { StandardError.new('post_import_error') }
+
+ before do
+ setup_import_export_config('light')
+ expect(project)
+ .to receive(:merge_requests)
+ .and_raise(exception)
+ end
+
+ it 'report post import error' do
+ expect(restored_project_json).to eq(false)
+ expect(shared.errors).to include('post_import_error')
+ end
+ end
+
+ context 'when post import action throw retriable exception one time' do
+ let(:exception) { GRPC::DeadlineExceeded.new }
+
+ before do
+ setup_import_export_config('light')
+ expect(project)
+ .to receive(:merge_requests)
+ .and_raise(exception)
+ expect(project)
+ .to receive(:merge_requests)
+ .and_call_original
+ expect(restored_project_json).to eq(true)
+ end
+
+ it_behaves_like 'restores project successfully',
+ issues: 1,
+ labels: 2,
+ label_with_priorities: 'A project label',
+ milestones: 1,
+ first_issue_labels: 1,
+ services: 1,
+ import_failures: 1
+
+ it 'records the failures in the database' do
+ import_failure = ImportFailure.last
+
+ expect(import_failure.project_id).to eq(project.id)
+ expect(import_failure.relation_key).to be_nil
+ expect(import_failure.relation_index).to be_nil
+ expect(import_failure.exception_class).to eq('GRPC::DeadlineExceeded')
+ expect(import_failure.exception_message).to be_present
+ expect(import_failure.correlation_id_value).not_to be_empty
+ expect(import_failure.created_at).to be_present
+ end
+ end
+
+ context 'when the project has overridden params in import data' do
+ before do
+ setup_import_export_config('light')
+ end
+
+ it 'handles string versions of visibility_level' do
+ # Project needs to be in a group for visibility level comparison
+ # to happen
+ group = create(:group)
+ project.group = group
+
+ project.create_import_data(data: { override_params: { visibility_level: Gitlab::VisibilityLevel::INTERNAL.to_s } })
+
+ expect(restored_project_json).to eq(true)
+ expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ end
+
+ it 'overwrites the params stored in the JSON' do
+ project.create_import_data(data: { override_params: { description: "Overridden" } })
+
+ expect(restored_project_json).to eq(true)
+ expect(project.description).to eq("Overridden")
+ end
+
+ it 'does not allow setting params that are excluded from import_export settings' do
+ project.create_import_data(data: { override_params: { lfs_enabled: true } })
+
+ expect(restored_project_json).to eq(true)
+ expect(project.lfs_enabled).to be_falsey
+ end
+
+ it 'overrides project feature access levels' do
+ access_level_keys = project.project_feature.attributes.keys.select { |a| a =~ /_access_level/ }
+
+ # `pages_access_level` is not included, since it is not available in the public API
+ # and has a dependency on project's visibility level
+ # see ProjectFeature model
+ access_level_keys.delete('pages_access_level')
+
+ disabled_access_levels = Hash[access_level_keys.collect { |item| [item, 'disabled'] }]
+
+ project.create_import_data(data: { override_params: disabled_access_levels })
+
+ expect(restored_project_json).to eq(true)
+
+ aggregate_failures do
+ access_level_keys.each do |key|
+ expect(project.public_send(key)).to eq(ProjectFeature::DISABLED)
+ end
+ end
+ end
+ end
+
+ context 'with a project that has a group' do
+ let!(:project) do
+ create(:project,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project',
+ group: create(:group, visibility_level: Gitlab::VisibilityLevel::PRIVATE))
+ end
+
+ before do
+ setup_import_export_config('group')
+ expect(restored_project_json).to eq(true)
+ end
+
+ it_behaves_like 'restores project successfully',
+ issues: 3,
+ labels: 2,
+ label_with_priorities: 'A project label',
+ milestones: 2,
+ first_issue_labels: 1
+
+ it_behaves_like 'restores group correctly',
+ labels: 0,
+ milestones: 0,
+ first_issue_labels: 1
+
+ it 'restores issue states' do
+ expect(project.issues.with_state(:closed).count).to eq(1)
+ expect(project.issues.with_state(:opened).count).to eq(2)
+ end
+ end
+
+ context 'with existing group models' do
+ let!(:project) do
+ create(:project,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project',
+ group: create(:group))
+ end
+
+ before do
+ setup_import_export_config('light')
+ end
+
+ it 'does not import any templated services' do
+ expect(restored_project_json).to eq(true)
+
+ expect(project.services.where(template: true).count).to eq(0)
+ end
+
+ it 'does not import any instance services' do
+ expect(restored_project_json).to eq(true)
+
+ expect(project.services.where(instance: true).count).to eq(0)
+ end
+
+ it 'imports labels' do
+ create(:group_label, name: 'Another label', group: project.group)
+
+ expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
+
+ expect(restored_project_json).to eq(true)
+ expect(project.labels.count).to eq(1)
+ end
+
+ it 'imports milestones' do
+ create(:milestone, name: 'A milestone', group: project.group)
+
+ expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
+
+ expect(restored_project_json).to eq(true)
+ expect(project.group.milestones.count).to eq(1)
+ expect(project.milestones.count).to eq(0)
+ end
+ end
+
+ context 'with clashing milestones on IID' do
+ let!(:project) do
+ create(:project,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project',
+ group: create(:group))
+ end
+
+ before do
+ setup_import_export_config('milestone-iid')
+ end
+
+ it 'preserves the project milestone IID' do
+ expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
+
+ expect(restored_project_json).to eq(true)
+ expect(project.milestones.count).to eq(2)
+ expect(Milestone.find_by_title('Another milestone').iid).to eq(1)
+ expect(Milestone.find_by_title('Group-level milestone').iid).to eq(2)
+ end
+ end
+
+ context 'with external authorization classification labels' do
+ before do
+ setup_import_export_config('light')
+ end
+
+ it 'converts empty external classification authorization labels to nil' do
+ project.create_import_data(data: { override_params: { external_authorization_classification_label: "" } })
+
+ expect(restored_project_json).to eq(true)
+ expect(project.external_authorization_classification_label).to be_nil
+ end
+
+ it 'preserves valid external classification authorization labels' do
+ project.create_import_data(data: { override_params: { external_authorization_classification_label: "foobar" } })
+
+ expect(restored_project_json).to eq(true)
+ expect(project.external_authorization_classification_label).to eq("foobar")
+ end
+ end
+ end
+
+ context 'Minimal JSON' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:tree_hash) { { 'visibility_level' => visibility } }
+ let(:restorer) do
+ described_class.new(user: user, shared: shared, project: project)
+ end
+
+ before do
+ allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:valid?).and_return(true)
+ allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:tree_hash) { tree_hash }
+ end
+
+ context 'no group visibility' do
+ let(:visibility) { Gitlab::VisibilityLevel::PRIVATE }
+
+ it 'uses the project visibility' do
+ expect(restorer.restore).to eq(true)
+ expect(restorer.project.visibility_level).to eq(visibility)
+ end
+ end
+
+ context 'with restricted internal visibility' do
+ describe 'internal project' do
+ let(:visibility) { Gitlab::VisibilityLevel::INTERNAL }
+
+ it 'uses private visibility' do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
+
+ expect(restorer.restore).to eq(true)
+ expect(restorer.project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
+ end
+
+ context 'with group visibility' do
+ before do
+ group = create(:group, visibility_level: group_visibility)
+
+ project.update(group: group)
+ end
+
+ context 'private group visibility' do
+ let(:group_visibility) { Gitlab::VisibilityLevel::PRIVATE }
+ let(:visibility) { Gitlab::VisibilityLevel::PUBLIC }
+
+ it 'uses the group visibility' do
+ expect(restorer.restore).to eq(true)
+ expect(restorer.project.visibility_level).to eq(group_visibility)
+ end
+ end
+
+ context 'public group visibility' do
+ let(:group_visibility) { Gitlab::VisibilityLevel::PUBLIC }
+ let(:visibility) { Gitlab::VisibilityLevel::PRIVATE }
+
+ it 'uses the project visibility' do
+ expect(restorer.restore).to eq(true)
+ expect(restorer.project.visibility_level).to eq(visibility)
+ end
+ end
+
+ context 'internal group visibility' do
+ let(:group_visibility) { Gitlab::VisibilityLevel::INTERNAL }
+ let(:visibility) { Gitlab::VisibilityLevel::PUBLIC }
+
+ it 'uses the group visibility' do
+ expect(restorer.restore).to eq(true)
+ expect(restorer.project.visibility_level).to eq(group_visibility)
+ end
+
+ context 'with restricted internal visibility' do
+ it 'sets private visibility' do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
+
+ expect(restorer.restore).to eq(true)
+ expect(restorer.project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
+ end
+ end
+ end
+
+ context 'JSON with invalid records' do
+ subject(:restored_project_json) { project_tree_restorer.restore }
+
+ let(:user) { create(:user) }
+ let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
+ let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
+
+ before do
+ setup_import_export_config('with_invalid_records')
+
+ subject
+ end
+
+ context 'when failures occur because a relation fails to be processed' do
+ it_behaves_like 'restores project successfully',
+ issues: 0,
+ labels: 0,
+ label_with_priorities: nil,
+ milestones: 1,
+ first_issue_labels: 0,
+ services: 0,
+ import_failures: 1
+
+ it 'records the failures in the database' do
+ import_failure = ImportFailure.last
+
+ expect(import_failure.project_id).to eq(project.id)
+ expect(import_failure.relation_key).to eq('milestones')
+ expect(import_failure.relation_index).to be_present
+ expect(import_failure.exception_class).to eq('ActiveRecord::RecordInvalid')
+ expect(import_failure.exception_message).to be_present
+ expect(import_failure.correlation_id_value).not_to be_empty
+ expect(import_failure.created_at).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
new file mode 100644
index 00000000000..23360b725b9
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -0,0 +1,346 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Project::TreeSaver do
+ describe 'saves the project tree into a json object' do
+ let(:shared) { project.import_export_shared }
+ let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared) }
+ let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
+ let(:user) { create(:user) }
+ let!(:project) { setup_project }
+
+ before do
+ project.add_maintainer(user)
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
+ allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ it 'saves project successfully' do
+ expect(project_tree_saver.save).to be true
+ end
+
+ context 'JSON' do
+ let(:saved_project_json) do
+ project_tree_saver.save
+ project_json(project_tree_saver.full_path)
+ end
+
+ # It is not duplicated in
+ # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
+ context 'with description override' do
+ let(:params) { { description: 'Foo Bar' } }
+ let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared, params: params) }
+
+ it 'overrides the project description' do
+ expect(saved_project_json).to include({ 'description' => params[:description] })
+ end
+ end
+
+ it 'saves the correct json' do
+ expect(saved_project_json).to include({ 'description' => 'description', 'visibility_level' => 20 })
+ end
+
+ it 'has approvals_before_merge set' do
+ expect(saved_project_json['approvals_before_merge']).to eq(1)
+ end
+
+ it 'has milestones' do
+ expect(saved_project_json['milestones']).not_to be_empty
+ end
+
+ it 'has merge requests' do
+ expect(saved_project_json['merge_requests']).not_to be_empty
+ end
+
+ it 'has merge request\'s milestones' do
+ expect(saved_project_json['merge_requests'].first['milestone']).not_to be_empty
+ end
+
+ it 'has merge request\'s source branch SHA' do
+ expect(saved_project_json['merge_requests'].first['source_branch_sha']).to eq('ABCD')
+ end
+
+ it 'has merge request\'s target branch SHA' do
+ expect(saved_project_json['merge_requests'].first['target_branch_sha']).to eq('DCBA')
+ end
+
+ it 'has events' do
+ expect(saved_project_json['merge_requests'].first['milestone']['events']).not_to be_empty
+ end
+
+ it 'has snippets' do
+ expect(saved_project_json['snippets']).not_to be_empty
+ end
+
+ it 'has snippet notes' do
+ expect(saved_project_json['snippets'].first['notes']).not_to be_empty
+ end
+
+ it 'has releases' do
+ expect(saved_project_json['releases']).not_to be_empty
+ end
+
+ it 'has no author on releases' do
+ expect(saved_project_json['releases'].first['author']).to be_nil
+ end
+
+ it 'has the author ID on releases' do
+ expect(saved_project_json['releases'].first['author_id']).not_to be_nil
+ end
+
+ it 'has issues' do
+ expect(saved_project_json['issues']).not_to be_empty
+ end
+
+ it 'has issue comments' do
+ notes = saved_project_json['issues'].first['notes']
+
+ expect(notes).not_to be_empty
+ expect(notes.first['type']).to eq('DiscussionNote')
+ end
+
+ it 'has issue assignees' do
+ expect(saved_project_json['issues'].first['issue_assignees']).not_to be_empty
+ end
+
+ it 'has author on issue comments' do
+ expect(saved_project_json['issues'].first['notes'].first['author']).not_to be_empty
+ end
+
+ it 'has project members' do
+ expect(saved_project_json['project_members']).not_to be_empty
+ end
+
+ it 'has merge requests diffs' do
+ expect(saved_project_json['merge_requests'].first['merge_request_diff']).not_to be_empty
+ end
+
+ it 'has merge request diff files' do
+ expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_files']).not_to be_empty
+ end
+
+ it 'has merge request diff commits' do
+ expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_commits']).not_to be_empty
+ end
+
+ it 'has merge requests comments' do
+ expect(saved_project_json['merge_requests'].first['notes']).not_to be_empty
+ end
+
+ it 'has author on merge requests comments' do
+ expect(saved_project_json['merge_requests'].first['notes'].first['author']).not_to be_empty
+ end
+
+ it 'has pipeline stages' do
+ expect(saved_project_json.dig('ci_pipelines', 0, 'stages')).not_to be_empty
+ end
+
+ it 'has pipeline statuses' do
+ expect(saved_project_json.dig('ci_pipelines', 0, 'stages', 0, 'statuses')).not_to be_empty
+ end
+
+ it 'has pipeline builds' do
+ builds_count = saved_project_json
+ .dig('ci_pipelines', 0, 'stages', 0, 'statuses')
+ .count { |hash| hash['type'] == 'Ci::Build' }
+
+ expect(builds_count).to eq(1)
+ end
+
+ it 'has no when YML attributes but only the DB column' do
+ expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
+
+ saved_project_json
+ end
+
+ it 'has pipeline commits' do
+ expect(saved_project_json['ci_pipelines']).not_to be_empty
+ end
+
+ it 'has ci pipeline notes' do
+ expect(saved_project_json['ci_pipelines'].first['notes']).not_to be_empty
+ end
+
+ it 'has labels with no associations' do
+ expect(saved_project_json['labels']).not_to be_empty
+ end
+
+ it 'has labels associated to records' do
+ expect(saved_project_json['issues'].first['label_links'].first['label']).not_to be_empty
+ end
+
+ it 'has project and group labels' do
+ label_types = saved_project_json['issues'].first['label_links'].map { |link| link['label']['type'] }
+
+ expect(label_types).to match_array(%w(ProjectLabel GroupLabel))
+ end
+
+ it 'has priorities associated to labels' do
+ priorities = saved_project_json['issues'].first['label_links'].flat_map { |link| link['label']['priorities'] }
+
+ expect(priorities).not_to be_empty
+ end
+
+ it 'has issue resource label events' do
+ expect(saved_project_json['issues'].first['resource_label_events']).not_to be_empty
+ end
+
+ it 'has merge request resource label events' do
+ expect(saved_project_json['merge_requests'].first['resource_label_events']).not_to be_empty
+ end
+
+ it 'saves the correct service type' do
+ expect(saved_project_json['services'].first['type']).to eq('CustomIssueTrackerService')
+ end
+
+ it 'saves the properties for a service' do
+ expect(saved_project_json['services'].first['properties']).to eq('one' => 'value')
+ end
+
+ it 'has project feature' do
+ project_feature = saved_project_json['project_feature']
+ expect(project_feature).not_to be_empty
+ expect(project_feature["issues_access_level"]).to eq(ProjectFeature::DISABLED)
+ expect(project_feature["wiki_access_level"]).to eq(ProjectFeature::ENABLED)
+ expect(project_feature["builds_access_level"]).to eq(ProjectFeature::PRIVATE)
+ end
+
+ it 'has custom attributes' do
+ expect(saved_project_json['custom_attributes'].count).to eq(2)
+ end
+
+ it 'has badges' do
+ expect(saved_project_json['project_badges'].count).to eq(2)
+ end
+
+ it 'does not complain about non UTF-8 characters in MR diff files' do
+ ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
+
+ expect(project_tree_saver.save).to be true
+ end
+
+ context 'group members' do
+ let(:user2) { create(:user, email: 'group@member.com') }
+ let(:member_emails) do
+ saved_project_json['project_members'].map do |pm|
+ pm['user']['email']
+ end
+ end
+
+ before do
+ Group.first.add_developer(user2)
+ end
+
+ it 'does not export group members if it has no permission' do
+ Group.first.add_developer(user)
+
+ expect(member_emails).not_to include('group@member.com')
+ end
+
+ it 'does not export group members as maintainer' do
+ Group.first.add_maintainer(user)
+
+ expect(member_emails).not_to include('group@member.com')
+ end
+
+ it 'exports group members as group owner' do
+ Group.first.add_owner(user)
+
+ expect(member_emails).to include('group@member.com')
+ end
+
+ context 'as admin' do
+ let(:user) { create(:admin) }
+
+ it 'exports group members as admin' do
+ expect(member_emails).to include('group@member.com')
+ end
+
+ it 'exports group members as project members' do
+ member_types = saved_project_json['project_members'].map { |pm| pm['source_type'] }
+
+ expect(member_types).to all(eq('Project'))
+ end
+ end
+ end
+
+ context 'project attributes' do
+ it 'does not contain the runners token' do
+ expect(saved_project_json).not_to include("runners_token" => 'token')
+ end
+ end
+
+ it 'has a board and a list' do
+ expect(saved_project_json['boards'].first['lists']).not_to be_empty
+ end
+ end
+ end
+
+ def setup_project
+ release = create(:release)
+ group = create(:group)
+
+ project = create(:project,
+ :public,
+ :repository,
+ :issues_disabled,
+ :wiki_enabled,
+ :builds_private,
+ description: 'description',
+ releases: [release],
+ group: group,
+ approvals_before_merge: 1
+ )
+ allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
+
+ issue = create(:issue, assignees: [user], project: project)
+ snippet = create(:project_snippet, project: project)
+ project_label = create(:label, project: project)
+ group_label = create(:group_label, group: group)
+ create(:label_link, label: project_label, target: issue)
+ create(:label_link, label: group_label, target: issue)
+ create(:label_priority, label: group_label, priority: 1)
+ milestone = create(:milestone, project: project)
+ merge_request = create(:merge_request, source_project: project, milestone: milestone)
+
+ ci_build = create(:ci_build, project: project, when: nil)
+ ci_build.pipeline.update(project: project)
+ create(:commit_status, project: project, pipeline: ci_build.pipeline)
+
+ create(:milestone, project: project)
+ create(:discussion_note, noteable: issue, project: project)
+ create(:note, noteable: merge_request, project: project)
+ create(:note, noteable: snippet, project: project)
+ create(:note_on_commit,
+ author: user,
+ project: project,
+ commit_id: ci_build.pipeline.sha)
+
+ create(:resource_label_event, label: project_label, issue: issue)
+ create(:resource_label_event, label: group_label, merge_request: merge_request)
+
+ create(:event, :created, target: milestone, project: project, author: user)
+ create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' })
+
+ create(:project_custom_attribute, project: project)
+ create(:project_custom_attribute, project: project)
+
+ create(:project_badge, project: project)
+ create(:project_badge, project: project)
+
+ board = create(:board, project: project, name: 'TestBoard')
+ create(:list, board: board, position: 0, label: project_label)
+
+ project
+ end
+
+ def project_json(filename)
+ ::JSON.parse(IO.read(filename))
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project_relation_factory_spec.rb b/spec/lib/gitlab/import_export/project_relation_factory_spec.rb
deleted file mode 100644
index 0ade7ac4fc7..00000000000
--- a/spec/lib/gitlab/import_export/project_relation_factory_spec.rb
+++ /dev/null
@@ -1,328 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::ProjectRelationFactory do
- let(:group) { create(:group) }
- let(:project) { create(:project, :repository, group: group) }
- let(:members_mapper) { double('members_mapper').as_null_object }
- let(:merge_requests_mapping) { {} }
- let(:user) { create(:admin) }
- let(:excluded_keys) { [] }
- let(:created_object) do
- described_class.create(relation_sym: relation_sym,
- relation_hash: relation_hash,
- object_builder: Gitlab::ImportExport::GroupProjectObjectBuilder,
- members_mapper: members_mapper,
- merge_requests_mapping: merge_requests_mapping,
- user: user,
- importable: project,
- excluded_keys: excluded_keys)
- end
-
- context 'hook object' do
- let(:relation_sym) { :hooks }
- let(:id) { 999 }
- let(:service_id) { 99 }
- let(:original_project_id) { 8 }
- let(:token) { 'secret' }
-
- let(:relation_hash) do
- {
- 'id' => id,
- 'url' => 'https://example.json',
- 'project_id' => original_project_id,
- 'created_at' => '2016-08-12T09:41:03.462Z',
- 'updated_at' => '2016-08-12T09:41:03.462Z',
- 'service_id' => service_id,
- 'push_events' => true,
- 'issues_events' => false,
- 'confidential_issues_events' => false,
- 'merge_requests_events' => true,
- 'tag_push_events' => false,
- 'note_events' => true,
- 'enable_ssl_verification' => true,
- 'job_events' => false,
- 'wiki_page_events' => true,
- 'token' => token
- }
- end
-
- it 'does not have the original ID' do
- expect(created_object.id).not_to eq(id)
- end
-
- it 'does not have the original service_id' do
- expect(created_object.service_id).not_to eq(service_id)
- end
-
- it 'does not have the original project_id' do
- expect(created_object.project_id).not_to eq(original_project_id)
- end
-
- it 'has the new project_id' do
- expect(created_object.project_id).to eql(project.id)
- end
-
- it 'has a nil token' do
- expect(created_object.token).to eq(nil)
- end
-
- context 'original service exists' do
- let(:service_id) { create(:service, project: project).id }
-
- it 'does not have the original service_id' do
- expect(created_object.service_id).not_to eq(service_id)
- end
- end
-
- context 'excluded attributes' do
- let(:excluded_keys) { %w[url] }
-
- it 'are removed from the imported object' do
- expect(created_object.url).to be_nil
- end
- end
- end
-
- # Mocks an ActiveRecordish object with the dodgy columns
- class FooModel
- include ActiveModel::Model
-
- def initialize(params = {})
- params.each { |key, value| send("#{key}=", value) }
- end
-
- def values
- instance_variables.map { |ivar| instance_variable_get(ivar) }
- end
- end
-
- context 'merge_request object' do
- let(:relation_sym) { :merge_requests }
-
- let(:exported_member) do
- {
- "id" => 111,
- "access_level" => 30,
- "source_id" => 1,
- "source_type" => "Project",
- "user_id" => 3,
- "notification_level" => 3,
- "created_at" => "2016-11-18T09:29:42.634Z",
- "updated_at" => "2016-11-18T09:29:42.634Z",
- "user" => {
- "id" => user.id,
- "email" => user.email,
- "username" => user.username
- }
- }
- end
-
- let(:members_mapper) do
- Gitlab::ImportExport::MembersMapper.new(
- exported_members: [exported_member],
- user: user,
- importable: project)
- end
-
- let(:relation_hash) do
- {
- 'id' => 27,
- 'target_branch' => "feature",
- 'source_branch' => "feature_conflict",
- 'source_project_id' => project.id,
- 'target_project_id' => project.id,
- 'author_id' => user.id,
- 'assignee_id' => user.id,
- 'updated_by_id' => user.id,
- 'title' => "MR1",
- 'created_at' => "2016-06-14T15:02:36.568Z",
- 'updated_at' => "2016-06-14T15:02:56.815Z",
- 'state' => "opened",
- 'merge_status' => "unchecked",
- 'description' => "Description",
- 'position' => 0,
- 'source_branch_sha' => "ABCD",
- 'target_branch_sha' => "DCBA",
- 'merge_when_pipeline_succeeds' => true
- }
- end
-
- it 'has preloaded author' do
- expect(created_object.author).to equal(user)
- end
-
- it 'has preloaded updated_by' do
- expect(created_object.updated_by).to equal(user)
- end
-
- it 'has preloaded source project' do
- expect(created_object.source_project).to equal(project)
- end
-
- it 'has preloaded target project' do
- expect(created_object.source_project).to equal(project)
- end
- end
-
- context 'label object' do
- let(:relation_sym) { :labels }
- let(:relation_hash) do
- {
- "id": 3,
- "title": "test3",
- "color": "#428bca",
- "group_id": project.group.id,
- "created_at": "2016-07-22T08:55:44.161Z",
- "updated_at": "2016-07-22T08:55:44.161Z",
- "template": false,
- "description": "",
- "project_id": project.id,
- "type": "GroupLabel"
- }
- end
-
- it 'has preloaded project' do
- expect(created_object.project).to equal(project)
- end
-
- it 'has preloaded group' do
- expect(created_object.group).to equal(project.group)
- end
- end
-
- # `project_id`, `described_class.USER_REFERENCES`, noteable_id, target_id, and some project IDs are already
- # re-assigned by described_class.
- context 'Potentially hazardous foreign keys' do
- let(:relation_sym) { :hazardous_foo_model }
- let(:relation_hash) do
- {
- 'service_id' => 99,
- 'moved_to_id' => 99,
- 'namespace_id' => 99,
- 'ci_id' => 99,
- 'random_project_id' => 99,
- 'random_id' => 99,
- 'milestone_id' => 99,
- 'project_id' => 99,
- 'user_id' => 99
- }
- end
-
- class HazardousFooModel < FooModel
- attr_accessor :service_id, :moved_to_id, :namespace_id, :ci_id, :random_project_id, :random_id, :milestone_id, :project_id
- end
-
- before do
- allow(HazardousFooModel).to receive(:reflect_on_association).and_return(nil)
- end
-
- it 'does not preserve any foreign key IDs' do
- expect(created_object.values).not_to include(99)
- end
- end
-
- context 'overrided model with pluralized name' do
- let(:relation_sym) { :metrics }
-
- let(:relation_hash) do
- {
- 'id' => 99,
- 'merge_request_id' => 99,
- 'merged_at' => Time.now,
- 'merged_by_id' => 99,
- 'latest_closed_at' => nil,
- 'latest_closed_by_id' => nil
- }
- end
-
- it 'does not raise errors' do
- expect { created_object }.not_to raise_error
- end
- end
-
- context 'Project references' do
- let(:relation_sym) { :project_foo_model }
- let(:relation_hash) do
- Gitlab::ImportExport::ProjectRelationFactory::PROJECT_REFERENCES.map { |ref| { ref => 99 } }.inject(:merge)
- end
-
- class ProjectFooModel < FooModel
- attr_accessor(*Gitlab::ImportExport::ProjectRelationFactory::PROJECT_REFERENCES)
- end
-
- before do
- allow(ProjectFooModel).to receive(:reflect_on_association).and_return(nil)
- end
-
- it 'does not preserve any project foreign key IDs' do
- expect(created_object.values).not_to include(99)
- end
- end
-
- context 'Notes user references' do
- let(:relation_sym) { :notes }
- let(:new_user) { create(:user) }
- let(:exported_member) do
- {
- "id" => 111,
- "access_level" => 30,
- "source_id" => 1,
- "source_type" => "Project",
- "user_id" => 3,
- "notification_level" => 3,
- "created_at" => "2016-11-18T09:29:42.634Z",
- "updated_at" => "2016-11-18T09:29:42.634Z",
- "user" => {
- "id" => 999,
- "email" => new_user.email,
- "username" => new_user.username
- }
- }
- end
-
- let(:relation_hash) do
- {
- "id" => 4947,
- "note" => "merged",
- "noteable_type" => "MergeRequest",
- "author_id" => 999,
- "created_at" => "2016-11-18T09:29:42.634Z",
- "updated_at" => "2016-11-18T09:29:42.634Z",
- "project_id" => 1,
- "attachment" => {
- "url" => nil
- },
- "noteable_id" => 377,
- "system" => true,
- "author" => {
- "name" => "Administrator"
- },
- "events" => []
- }
- end
-
- let(:members_mapper) do
- Gitlab::ImportExport::MembersMapper.new(
- exported_members: [exported_member],
- user: user,
- importable: project)
- end
-
- it 'maps the right author to the imported note' do
- expect(created_object.author).to eq(new_user)
- end
- end
-
- context 'encrypted attributes' do
- let(:relation_sym) { 'Ci::Variable' }
- let(:relation_hash) do
- create(:ci_variable).as_json
- end
-
- it 'has no value for the encrypted attribute' do
- expect(created_object.value).to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/project_tree_loader_spec.rb b/spec/lib/gitlab/import_export/project_tree_loader_spec.rb
deleted file mode 100644
index b22de5a3f7b..00000000000
--- a/spec/lib/gitlab/import_export/project_tree_loader_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::ProjectTreeLoader do
- let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/with_duplicates.json' }
- let(:project_tree) { JSON.parse(File.read(fixture)) }
-
- context 'without de-duplicating entries' do
- let(:parsed_tree) do
- subject.load(fixture)
- end
-
- it 'parses the JSON into the expected tree' do
- expect(parsed_tree).to eq(project_tree)
- end
-
- it 'does not de-duplicate entries' do
- expect(parsed_tree['duped_hash_with_id']).not_to be(parsed_tree['array'][0]['duped_hash_with_id'])
- end
- end
-
- context 'with de-duplicating entries' do
- let(:parsed_tree) do
- subject.load(fixture, dedup_entries: true)
- end
-
- it 'parses the JSON into the expected tree' do
- expect(parsed_tree).to eq(project_tree)
- end
-
- it 'de-duplicates equal values' do
- expect(parsed_tree['duped_hash_with_id']).to be(parsed_tree['array'][0]['duped_hash_with_id'])
- expect(parsed_tree['duped_hash_with_id']).to be(parsed_tree['nested']['duped_hash_with_id'])
- expect(parsed_tree['duped_array']).to be(parsed_tree['array'][1]['duped_array'])
- expect(parsed_tree['duped_array']).to be(parsed_tree['nested']['duped_array'])
- end
-
- it 'does not de-duplicate hashes without IDs' do
- expect(parsed_tree['duped_hash_no_id']).to eq(parsed_tree['array'][2]['duped_hash_no_id'])
- expect(parsed_tree['duped_hash_no_id']).not_to be(parsed_tree['array'][2]['duped_hash_no_id'])
- end
-
- it 'keeps single entries intact' do
- expect(parsed_tree['simple']).to eq(42)
- expect(parsed_tree['nested']['array']).to eq(["don't touch"])
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
deleted file mode 100644
index c899217d164..00000000000
--- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
+++ /dev/null
@@ -1,844 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-include ImportExport::CommonUtil
-
-describe Gitlab::ImportExport::ProjectTreeRestorer do
- include ImportExport::CommonUtil
-
- let(:shared) { project.import_export_shared }
-
- describe 'restore project tree' do
- before(:context) do
- # Using an admin for import, so we can check assignment of existing members
- @user = create(:admin)
- @existing_members = [
- create(:user, email: 'bernard_willms@gitlabexample.com'),
- create(:user, email: 'saul_will@gitlabexample.com')
- ]
-
- RSpec::Mocks.with_temporary_scope do
- @project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project')
- @shared = @project.import_export_shared
-
- setup_import_export_config('complex')
-
- allow_any_instance_of(Repository).to receive(:fetch_source_branch!).and_return(true)
- allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false)
-
- expect_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch).with('feature', 'DCBA')
- allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch)
-
- project_tree_restorer = described_class.new(user: @user, shared: @shared, project: @project)
-
- @restored_project_json = project_tree_restorer.restore
- end
- end
-
- context 'JSON' do
- it 'restores models based on JSON' do
- expect(@restored_project_json).to be_truthy
- end
-
- it 'restore correct project features' do
- project = Project.find_by_path('project')
-
- expect(project.project_feature.issues_access_level).to eq(ProjectFeature::PRIVATE)
- expect(project.project_feature.builds_access_level).to eq(ProjectFeature::PRIVATE)
- expect(project.project_feature.snippets_access_level).to eq(ProjectFeature::PRIVATE)
- expect(project.project_feature.wiki_access_level).to eq(ProjectFeature::PRIVATE)
- expect(project.project_feature.merge_requests_access_level).to eq(ProjectFeature::PRIVATE)
- end
-
- it 'has the project description' do
- expect(Project.find_by_path('project').description).to eq('Nisi et repellendus ut enim quo accusamus vel magnam.')
- end
-
- it 'has the same label associated to two issues' do
- expect(ProjectLabel.find_by_title('test2').issues.count).to eq(2)
- end
-
- it 'has milestones associated to two separate issues' do
- expect(Milestone.find_by_description('test milestone').issues.count).to eq(2)
- end
-
- context 'when importing a project with cached_markdown_version and note_html' do
- context 'for an Issue' do
- it 'does not import note_html' do
- note_content = 'Quo reprehenderit aliquam qui dicta impedit cupiditate eligendi'
- issue_note = Issue.find_by(description: 'Aliquam enim illo et possimus.').notes.select { |n| n.note.match(/#{note_content}/)}.first
-
- expect(issue_note.note_html).to match(/#{note_content}/)
- end
- end
-
- context 'for a Merge Request' do
- it 'does not import note_html' do
- note_content = 'Sit voluptatibus eveniet architecto quidem'
- merge_request_note = MergeRequest.find_by(title: 'MR1').notes.select { |n| n.note.match(/#{note_content}/)}.first
-
- expect(merge_request_note.note_html).to match(/#{note_content}/)
- end
- end
- end
-
- it 'creates a valid pipeline note' do
- expect(Ci::Pipeline.find_by_sha('sha-notes').notes).not_to be_empty
- end
-
- it 'pipeline has the correct user ID' do
- expect(Ci::Pipeline.find_by_sha('sha-notes').user_id).to eq(@user.id)
- end
-
- it 'restores pipelines with missing ref' do
- expect(Ci::Pipeline.where(ref: nil)).not_to be_empty
- end
-
- it 'restores pipeline for merge request' do
- pipeline = Ci::Pipeline.find_by_sha('048721d90c449b244b7b4c53a9186b04330174ec')
-
- expect(pipeline).to be_valid
- expect(pipeline.tag).to be_falsey
- expect(pipeline.source).to eq('merge_request_event')
- expect(pipeline.merge_request.id).to be > 0
- expect(pipeline.merge_request.target_branch).to eq('feature')
- expect(pipeline.merge_request.source_branch).to eq('feature_conflict')
- end
-
- it 'preserves updated_at on issues' do
- issue = Issue.where(description: 'Aliquam enim illo et possimus.').first
-
- expect(issue.reload.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC')
- end
-
- it 'has multiple issue assignees' do
- expect(Issue.find_by(title: 'Voluptatem').assignees).to contain_exactly(@user, *@existing_members)
- expect(Issue.find_by(title: 'Issue without assignees').assignees).to be_empty
- end
-
- it 'restores timelogs for issues' do
- timelog = Issue.find_by(title: 'issue_with_timelogs').timelogs.last
-
- aggregate_failures do
- expect(timelog.time_spent).to eq(72000)
- expect(timelog.spent_at).to eq("2019-12-27T00:00:00.000Z")
- end
- end
-
- it 'contains the merge access levels on a protected branch' do
- expect(ProtectedBranch.first.merge_access_levels).not_to be_empty
- end
-
- it 'contains the push access levels on a protected branch' do
- expect(ProtectedBranch.first.push_access_levels).not_to be_empty
- end
-
- it 'contains the create access levels on a protected tag' do
- expect(ProtectedTag.first.create_access_levels).not_to be_empty
- end
-
- it 'restores issue resource label events' do
- expect(Issue.find_by(title: 'Voluptatem').resource_label_events).not_to be_empty
- end
-
- it 'restores merge requests resource label events' do
- expect(MergeRequest.find_by(title: 'MR1').resource_label_events).not_to be_empty
- end
-
- it 'restores suggestion' do
- note = Note.find_by("note LIKE 'Saepe asperiores exercitationem non dignissimos laborum reiciendis et ipsum%'")
-
- expect(note.suggestions.count).to eq(1)
- expect(note.suggestions.first.from_content).to eq("Original line\n")
- end
-
- context 'event at forth level of the tree' do
- let(:event) { Event.where(action: 6).first }
-
- it 'restores the event' do
- expect(event).not_to be_nil
- end
-
- it 'has the action' do
- expect(event.action).not_to be_nil
- end
-
- it 'event belongs to note, belongs to merge request, belongs to a project' do
- expect(event.note.noteable.project).not_to be_nil
- end
- end
-
- it 'has the correct data for merge request diff files' do
- expect(MergeRequestDiffFile.where.not(diff: nil).count).to eq(55)
- end
-
- it 'has the correct data for merge request diff commits' do
- expect(MergeRequestDiffCommit.count).to eq(77)
- end
-
- it 'has the correct data for merge request latest_merge_request_diff' do
- MergeRequest.find_each do |merge_request|
- expect(merge_request.latest_merge_request_diff_id).to eq(merge_request.merge_request_diffs.maximum(:id))
- end
- end
-
- it 'has labels associated to label links, associated to issues' do
- expect(Label.first.label_links.first.target).not_to be_nil
- end
-
- it 'has project labels' do
- expect(ProjectLabel.count).to eq(3)
- end
-
- it 'has no group labels' do
- expect(GroupLabel.count).to eq(0)
- end
-
- it 'has issue boards' do
- expect(Project.find_by_path('project').boards.count).to eq(1)
- end
-
- it 'has lists associated with the issue board' do
- expect(Project.find_by_path('project').boards.find_by_name('TestBoardABC').lists.count).to eq(3)
- end
-
- it 'has a project feature' do
- expect(@project.project_feature).not_to be_nil
- end
-
- it 'has custom attributes' do
- expect(@project.custom_attributes.count).to eq(2)
- end
-
- it 'has badges' do
- expect(@project.project_badges.count).to eq(2)
- end
-
- it 'has snippets' do
- expect(@project.snippets.count).to eq(1)
- end
-
- it 'has award emoji for a snippet' do
- award_emoji = @project.snippets.first.award_emoji
-
- expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'coffee')
- end
-
- it 'snippet has notes' do
- expect(@project.snippets.first.notes.count).to eq(1)
- end
-
- it 'snippet has award emojis on notes' do
- award_emoji = @project.snippets.first.notes.first.award_emoji.first
-
- expect(award_emoji.name).to eq('thumbsup')
- end
-
- it 'restores `ci_cd_settings` : `group_runners_enabled` setting' do
- expect(@project.ci_cd_settings.group_runners_enabled?).to eq(false)
- end
-
- it 'restores `auto_devops`' do
- expect(@project.auto_devops_enabled?).to eq(true)
- expect(@project.auto_devops.deploy_strategy).to eq('continuous')
- end
-
- it 'restores the correct service' do
- expect(CustomIssueTrackerService.first).not_to be_nil
- end
-
- it 'restores zoom meetings' do
- meetings = @project.issues.first.zoom_meetings
-
- expect(meetings.count).to eq(1)
- expect(meetings.first.url).to eq('https://zoom.us/j/123456789')
- end
-
- it 'restores sentry issues' do
- sentry_issue = @project.issues.first.sentry_issue
-
- expect(sentry_issue.sentry_issue_identifier).to eq(1234567891)
- end
-
- it 'has award emoji for an issue' do
- award_emoji = @project.issues.first.award_emoji.first
-
- expect(award_emoji.name).to eq('musical_keyboard')
- end
-
- it 'has award emoji for a note in an issue' do
- award_emoji = @project.issues.first.notes.first.award_emoji.first
-
- expect(award_emoji.name).to eq('clapper')
- end
-
- it 'restores container_expiration_policy' do
- policy = Project.find_by_path('project').container_expiration_policy
-
- aggregate_failures do
- expect(policy).to be_an_instance_of(ContainerExpirationPolicy)
- expect(policy).to be_persisted
- expect(policy.cadence).to eq('3month')
- end
- end
-
- it 'restores error_tracking_setting' do
- setting = @project.error_tracking_setting
-
- aggregate_failures do
- expect(setting.api_url).to eq("https://gitlab.example.com/api/0/projects/sentry-org/sentry-project")
- expect(setting.project_name).to eq("Sentry Project")
- expect(setting.organization_name).to eq("Sentry Org")
- end
- end
-
- it 'restores external pull requests' do
- external_pr = @project.external_pull_requests.last
-
- aggregate_failures do
- expect(external_pr.pull_request_iid).to eq(4)
- expect(external_pr.source_branch).to eq("feature")
- expect(external_pr.target_branch).to eq("master")
- expect(external_pr.status).to eq("open")
- end
- end
-
- it 'restores pipeline schedules' do
- pipeline_schedule = @project.pipeline_schedules.last
-
- aggregate_failures do
- expect(pipeline_schedule.description).to eq('Schedule Description')
- expect(pipeline_schedule.ref).to eq('master')
- expect(pipeline_schedule.cron).to eq('0 4 * * 0')
- expect(pipeline_schedule.cron_timezone).to eq('UTC')
- expect(pipeline_schedule.active).to eq(true)
- end
- end
-
- it 'restores releases with links' do
- release = @project.releases.last
- link = release.links.last
-
- aggregate_failures do
- expect(release.tag).to eq('release-1.1')
- expect(release.description).to eq('Some release notes')
- expect(release.name).to eq('release-1.1')
- expect(release.sha).to eq('901de3a8bd5573f4a049b1457d28bc1592ba6bf9')
- expect(release.released_at).to eq('2019-12-26T10:17:14.615Z')
-
- expect(link.url).to eq('http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download')
- expect(link.name).to eq('release-1.1.dmg')
- end
- end
-
- context 'Merge requests' do
- it 'always has the new project as a target' do
- expect(MergeRequest.find_by_title('MR1').target_project).to eq(@project)
- end
-
- it 'has the same source project as originally if source/target are the same' do
- expect(MergeRequest.find_by_title('MR1').source_project).to eq(@project)
- end
-
- it 'has the new project as target if source/target differ' do
- expect(MergeRequest.find_by_title('MR2').target_project).to eq(@project)
- end
-
- it 'has no source if source/target differ' do
- expect(MergeRequest.find_by_title('MR2').source_project_id).to be_nil
- end
-
- it 'has award emoji' do
- award_emoji = MergeRequest.find_by_title('MR1').award_emoji
-
- expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'drum')
- end
-
- context 'notes' do
- it 'has award emoji' do
- award_emoji = MergeRequest.find_by_title('MR1').notes.first.award_emoji.first
-
- expect(award_emoji.name).to eq('tada')
- end
- end
- end
-
- context 'tokens are regenerated' do
- it 'has new CI trigger tokens' do
- expect(Ci::Trigger.where(token: %w[cdbfasdf44a5958c83654733449e585 33a66349b5ad01fc00174af87804e40]))
- .to be_empty
- end
-
- it 'has a new CI build token' do
- expect(Ci::Build.where(token: 'abcd')).to be_empty
- end
- end
-
- context 'has restored the correct number of records' do
- it 'has the correct number of merge requests' do
- expect(@project.merge_requests.size).to eq(9)
- end
-
- it 'only restores valid triggers' do
- expect(@project.triggers.size).to eq(1)
- end
-
- it 'has the correct number of pipelines and statuses' do
- expect(@project.ci_pipelines.size).to eq(7)
-
- @project.ci_pipelines.order(:id).zip([2, 2, 2, 2, 2, 0, 0])
- .each do |(pipeline, expected_status_size)|
- expect(pipeline.statuses.size).to eq(expected_status_size)
- end
- end
- end
-
- context 'when restoring hierarchy of pipeline, stages and jobs' do
- it 'restores pipelines' do
- expect(Ci::Pipeline.all.count).to be 7
- end
-
- it 'restores pipeline stages' do
- expect(Ci::Stage.all.count).to be 6
- end
-
- it 'correctly restores association between stage and a pipeline' do
- expect(Ci::Stage.all).to all(have_attributes(pipeline_id: a_value > 0))
- end
-
- it 'restores statuses' do
- expect(CommitStatus.all.count).to be 10
- end
-
- it 'correctly restores association between a stage and a job' do
- expect(CommitStatus.all).to all(have_attributes(stage_id: a_value > 0))
- end
-
- it 'correctly restores association between a pipeline and a job' do
- expect(CommitStatus.all).to all(have_attributes(pipeline_id: a_value > 0))
- end
-
- it 'restores a Hash for CommitStatus options' do
- expect(CommitStatus.all.map(&:options).compact).to all(be_a(Hash))
- end
-
- it 'restores external pull request for the restored pipeline' do
- pipeline_with_external_pr = @project.ci_pipelines.order(:id).last
-
- expect(pipeline_with_external_pr.external_pull_request).to be_persisted
- end
- end
- end
- end
-
- shared_examples 'restores group correctly' do |**results|
- it 'has group label' do
- expect(project.group.labels.size).to eq(results.fetch(:labels, 0))
- expect(project.group.labels.where(type: "GroupLabel").where.not(project_id: nil).count).to eq(0)
- end
-
- it 'has group milestone' do
- expect(project.group.milestones.size).to eq(results.fetch(:milestones, 0))
- end
-
- it 'has the correct visibility level' do
- # INTERNAL in the `project.json`, group's is PRIVATE
- expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
- end
-
- context 'project.json file access check' do
- let(:user) { create(:user) }
- let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
- let(:project_tree_restorer) do
- described_class.new(user: user, shared: shared, project: project)
- end
- let(:restored_project_json) { project_tree_restorer.restore }
-
- it 'does not read a symlink' do
- Dir.mktmpdir do |tmpdir|
- setup_symlink(tmpdir, 'project.json')
- allow(shared).to receive(:export_path).and_call_original
-
- expect(project_tree_restorer.restore).to eq(false)
- expect(shared.errors).to include('Incorrect JSON format')
- end
- end
- end
-
- context 'Light JSON' do
- let(:user) { create(:user) }
- let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
- let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
- let(:restored_project_json) { project_tree_restorer.restore }
-
- context 'with a simple project' do
- before do
- setup_import_export_config('light')
- expect(restored_project_json).to eq(true)
- end
-
- it_behaves_like 'restores project successfully',
- issues: 1,
- labels: 2,
- label_with_priorities: 'A project label',
- milestones: 1,
- first_issue_labels: 1,
- services: 1
-
- context 'when there is an existing build with build token' do
- before do
- create(:ci_build, token: 'abcd')
- end
-
- it_behaves_like 'restores project successfully',
- issues: 1,
- labels: 2,
- label_with_priorities: 'A project label',
- milestones: 1,
- first_issue_labels: 1
- end
- end
-
- context 'when post import action throw non-retriable exception' do
- let(:exception) { StandardError.new('post_import_error') }
-
- before do
- setup_import_export_config('light')
- expect(project)
- .to receive(:merge_requests)
- .and_raise(exception)
- end
-
- it 'report post import error' do
- expect(restored_project_json).to eq(false)
- expect(shared.errors).to include('post_import_error')
- end
- end
-
- context 'when post import action throw retriable exception one time' do
- let(:exception) { GRPC::DeadlineExceeded.new }
-
- before do
- setup_import_export_config('light')
- expect(project)
- .to receive(:merge_requests)
- .and_raise(exception)
- expect(project)
- .to receive(:merge_requests)
- .and_call_original
- expect(restored_project_json).to eq(true)
- end
-
- it_behaves_like 'restores project successfully',
- issues: 1,
- labels: 2,
- label_with_priorities: 'A project label',
- milestones: 1,
- first_issue_labels: 1,
- services: 1,
- import_failures: 1
-
- it 'records the failures in the database' do
- import_failure = ImportFailure.last
-
- expect(import_failure.project_id).to eq(project.id)
- expect(import_failure.relation_key).to be_nil
- expect(import_failure.relation_index).to be_nil
- expect(import_failure.exception_class).to eq('GRPC::DeadlineExceeded')
- expect(import_failure.exception_message).to be_present
- expect(import_failure.correlation_id_value).not_to be_empty
- expect(import_failure.created_at).to be_present
- end
- end
-
- context 'when the project has overridden params in import data' do
- before do
- setup_import_export_config('light')
- end
-
- it 'handles string versions of visibility_level' do
- # Project needs to be in a group for visibility level comparison
- # to happen
- group = create(:group)
- project.group = group
-
- project.create_import_data(data: { override_params: { visibility_level: Gitlab::VisibilityLevel::INTERNAL.to_s } })
-
- expect(restored_project_json).to eq(true)
- expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
-
- it 'overwrites the params stored in the JSON' do
- project.create_import_data(data: { override_params: { description: "Overridden" } })
-
- expect(restored_project_json).to eq(true)
- expect(project.description).to eq("Overridden")
- end
-
- it 'does not allow setting params that are excluded from import_export settings' do
- project.create_import_data(data: { override_params: { lfs_enabled: true } })
-
- expect(restored_project_json).to eq(true)
- expect(project.lfs_enabled).to be_falsey
- end
-
- it 'overrides project feature access levels' do
- access_level_keys = project.project_feature.attributes.keys.select { |a| a =~ /_access_level/ }
-
- # `pages_access_level` is not included, since it is not available in the public API
- # and has a dependency on project's visibility level
- # see ProjectFeature model
- access_level_keys.delete('pages_access_level')
-
- disabled_access_levels = Hash[access_level_keys.collect { |item| [item, 'disabled'] }]
-
- project.create_import_data(data: { override_params: disabled_access_levels })
-
- expect(restored_project_json).to eq(true)
-
- aggregate_failures do
- access_level_keys.each do |key|
- expect(project.public_send(key)).to eq(ProjectFeature::DISABLED)
- end
- end
- end
- end
-
- context 'with a project that has a group' do
- let!(:project) do
- create(:project,
- :builds_disabled,
- :issues_disabled,
- name: 'project',
- path: 'project',
- group: create(:group, visibility_level: Gitlab::VisibilityLevel::PRIVATE))
- end
-
- before do
- setup_import_export_config('group')
- expect(restored_project_json).to eq(true)
- end
-
- it_behaves_like 'restores project successfully',
- issues: 3,
- labels: 2,
- label_with_priorities: 'A project label',
- milestones: 2,
- first_issue_labels: 1
-
- it_behaves_like 'restores group correctly',
- labels: 0,
- milestones: 0,
- first_issue_labels: 1
-
- it 'restores issue states' do
- expect(project.issues.with_state(:closed).count).to eq(1)
- expect(project.issues.with_state(:opened).count).to eq(2)
- end
- end
-
- context 'with existing group models' do
- let!(:project) do
- create(:project,
- :builds_disabled,
- :issues_disabled,
- name: 'project',
- path: 'project',
- group: create(:group))
- end
-
- before do
- setup_import_export_config('light')
- end
-
- it 'does not import any templated services' do
- expect(restored_project_json).to eq(true)
-
- expect(project.services.where(template: true).count).to eq(0)
- end
-
- it 'imports labels' do
- create(:group_label, name: 'Another label', group: project.group)
-
- expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
-
- expect(restored_project_json).to eq(true)
- expect(project.labels.count).to eq(1)
- end
-
- it 'imports milestones' do
- create(:milestone, name: 'A milestone', group: project.group)
-
- expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
-
- expect(restored_project_json).to eq(true)
- expect(project.group.milestones.count).to eq(1)
- expect(project.milestones.count).to eq(0)
- end
- end
-
- context 'with clashing milestones on IID' do
- let!(:project) do
- create(:project,
- :builds_disabled,
- :issues_disabled,
- name: 'project',
- path: 'project',
- group: create(:group))
- end
-
- before do
- setup_import_export_config('milestone-iid')
- end
-
- it 'preserves the project milestone IID' do
- expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
-
- expect(restored_project_json).to eq(true)
- expect(project.milestones.count).to eq(2)
- expect(Milestone.find_by_title('Another milestone').iid).to eq(1)
- expect(Milestone.find_by_title('Group-level milestone').iid).to eq(2)
- end
- end
-
- context 'with external authorization classification labels' do
- before do
- setup_import_export_config('light')
- end
-
- it 'converts empty external classification authorization labels to nil' do
- project.create_import_data(data: { override_params: { external_authorization_classification_label: "" } })
-
- expect(restored_project_json).to eq(true)
- expect(project.external_authorization_classification_label).to be_nil
- end
-
- it 'preserves valid external classification authorization labels' do
- project.create_import_data(data: { override_params: { external_authorization_classification_label: "foobar" } })
-
- expect(restored_project_json).to eq(true)
- expect(project.external_authorization_classification_label).to eq("foobar")
- end
- end
- end
-
- context 'Minimal JSON' do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
- let(:tree_hash) { { 'visibility_level' => visibility } }
- let(:restorer) do
- described_class.new(user: user, shared: shared, project: project)
- end
-
- before do
- expect(restorer).to receive(:read_tree_hash) { tree_hash }
- end
-
- context 'no group visibility' do
- let(:visibility) { Gitlab::VisibilityLevel::PRIVATE }
-
- it 'uses the project visibility' do
- expect(restorer.restore).to eq(true)
- expect(restorer.project.visibility_level).to eq(visibility)
- end
- end
-
- context 'with restricted internal visibility' do
- describe 'internal project' do
- let(:visibility) { Gitlab::VisibilityLevel::INTERNAL }
-
- it 'uses private visibility' do
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
-
- expect(restorer.restore).to eq(true)
- expect(restorer.project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
- end
- end
-
- context 'with group visibility' do
- before do
- group = create(:group, visibility_level: group_visibility)
-
- project.update(group: group)
- end
-
- context 'private group visibility' do
- let(:group_visibility) { Gitlab::VisibilityLevel::PRIVATE }
- let(:visibility) { Gitlab::VisibilityLevel::PUBLIC }
-
- it 'uses the group visibility' do
- expect(restorer.restore).to eq(true)
- expect(restorer.project.visibility_level).to eq(group_visibility)
- end
- end
-
- context 'public group visibility' do
- let(:group_visibility) { Gitlab::VisibilityLevel::PUBLIC }
- let(:visibility) { Gitlab::VisibilityLevel::PRIVATE }
-
- it 'uses the project visibility' do
- expect(restorer.restore).to eq(true)
- expect(restorer.project.visibility_level).to eq(visibility)
- end
- end
-
- context 'internal group visibility' do
- let(:group_visibility) { Gitlab::VisibilityLevel::INTERNAL }
- let(:visibility) { Gitlab::VisibilityLevel::PUBLIC }
-
- it 'uses the group visibility' do
- expect(restorer.restore).to eq(true)
- expect(restorer.project.visibility_level).to eq(group_visibility)
- end
-
- context 'with restricted internal visibility' do
- it 'sets private visibility' do
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
-
- expect(restorer.restore).to eq(true)
- expect(restorer.project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
- end
- end
- end
- end
-
- context 'JSON with invalid records' do
- subject(:restored_project_json) { project_tree_restorer.restore }
-
- let(:user) { create(:user) }
- let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
- let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
-
- before do
- setup_import_export_config('with_invalid_records')
-
- subject
- end
-
- context 'when failures occur because a relation fails to be processed' do
- it_behaves_like 'restores project successfully',
- issues: 0,
- labels: 0,
- label_with_priorities: nil,
- milestones: 1,
- first_issue_labels: 0,
- services: 0,
- import_failures: 1
-
- it 'records the failures in the database' do
- import_failure = ImportFailure.last
-
- expect(import_failure.project_id).to eq(project.id)
- expect(import_failure.relation_key).to eq('milestones')
- expect(import_failure.relation_index).to be_present
- expect(import_failure.exception_class).to eq('ActiveRecord::RecordInvalid')
- expect(import_failure.exception_message).to be_present
- expect(import_failure.correlation_id_value).not_to be_empty
- expect(import_failure.created_at).to be_present
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
deleted file mode 100644
index 126ac289a56..00000000000
--- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
+++ /dev/null
@@ -1,397 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::ProjectTreeSaver do
- describe 'saves the project tree into a json object' do
- let(:shared) { project.import_export_shared }
- let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared) }
- let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
- let(:user) { create(:user) }
- let!(:project) { setup_project }
-
- before do
- project.add_maintainer(user)
- allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
- allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
- allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
- end
-
- after do
- FileUtils.rm_rf(export_path)
- end
-
- it 'saves project successfully' do
- expect(project_tree_saver.save).to be true
- end
-
- context ':export_fast_serialize feature flag checks' do
- before do
- expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared).and_return(reader)
- expect(reader).to receive(:project_tree).and_return(project_tree)
- end
-
- let(:serializer) { instance_double('Gitlab::ImportExport::FastHashSerializer') }
- let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
- let(:project_tree) do
- {
- include: [{ issues: { include: [] } }],
- preload: { issues: nil }
- }
- end
-
- context 'when :export_fast_serialize feature is enabled' do
- before do
- stub_feature_flags(export_fast_serialize: true)
- end
-
- it 'uses FastHashSerializer' do
- expect(Gitlab::ImportExport::FastHashSerializer)
- .to receive(:new)
- .with(project, project_tree)
- .and_return(serializer)
-
- expect(serializer).to receive(:execute)
-
- project_tree_saver.save
- end
- end
-
- context 'when :export_fast_serialize feature is disabled' do
- before do
- stub_feature_flags(export_fast_serialize: false)
- end
-
- it 'is serialized via built-in `as_json`' do
- expect(project).to receive(:as_json).with(project_tree)
-
- project_tree_saver.save
- end
- end
- end
-
- # It is mostly duplicated in
- # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
- # except:
- # context 'with description override' do
- # context 'group members' do
- # ^ These are specific for the ProjectTreeSaver
- context 'JSON' do
- let(:saved_project_json) do
- project_tree_saver.save
- project_json(project_tree_saver.full_path)
- end
-
- # It is not duplicated in
- # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
- context 'with description override' do
- let(:params) { { description: 'Foo Bar' } }
- let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared, params: params) }
-
- it 'overrides the project description' do
- expect(saved_project_json).to include({ 'description' => params[:description] })
- end
- end
-
- it 'saves the correct json' do
- expect(saved_project_json).to include({ 'description' => 'description', 'visibility_level' => 20 })
- end
-
- it 'has approvals_before_merge set' do
- expect(saved_project_json['approvals_before_merge']).to eq(1)
- end
-
- it 'has milestones' do
- expect(saved_project_json['milestones']).not_to be_empty
- end
-
- it 'has merge requests' do
- expect(saved_project_json['merge_requests']).not_to be_empty
- end
-
- it 'has merge request\'s milestones' do
- expect(saved_project_json['merge_requests'].first['milestone']).not_to be_empty
- end
-
- it 'has merge request\'s source branch SHA' do
- expect(saved_project_json['merge_requests'].first['source_branch_sha']).to eq('ABCD')
- end
-
- it 'has merge request\'s target branch SHA' do
- expect(saved_project_json['merge_requests'].first['target_branch_sha']).to eq('DCBA')
- end
-
- it 'has events' do
- expect(saved_project_json['merge_requests'].first['milestone']['events']).not_to be_empty
- end
-
- it 'has snippets' do
- expect(saved_project_json['snippets']).not_to be_empty
- end
-
- it 'has snippet notes' do
- expect(saved_project_json['snippets'].first['notes']).not_to be_empty
- end
-
- it 'has releases' do
- expect(saved_project_json['releases']).not_to be_empty
- end
-
- it 'has no author on releases' do
- expect(saved_project_json['releases'].first['author']).to be_nil
- end
-
- it 'has the author ID on releases' do
- expect(saved_project_json['releases'].first['author_id']).not_to be_nil
- end
-
- it 'has issues' do
- expect(saved_project_json['issues']).not_to be_empty
- end
-
- it 'has issue comments' do
- notes = saved_project_json['issues'].first['notes']
-
- expect(notes).not_to be_empty
- expect(notes.first['type']).to eq('DiscussionNote')
- end
-
- it 'has issue assignees' do
- expect(saved_project_json['issues'].first['issue_assignees']).not_to be_empty
- end
-
- it 'has author on issue comments' do
- expect(saved_project_json['issues'].first['notes'].first['author']).not_to be_empty
- end
-
- it 'has project members' do
- expect(saved_project_json['project_members']).not_to be_empty
- end
-
- it 'has merge requests diffs' do
- expect(saved_project_json['merge_requests'].first['merge_request_diff']).not_to be_empty
- end
-
- it 'has merge request diff files' do
- expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_files']).not_to be_empty
- end
-
- it 'has merge request diff commits' do
- expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_commits']).not_to be_empty
- end
-
- it 'has merge requests comments' do
- expect(saved_project_json['merge_requests'].first['notes']).not_to be_empty
- end
-
- it 'has author on merge requests comments' do
- expect(saved_project_json['merge_requests'].first['notes'].first['author']).not_to be_empty
- end
-
- it 'has pipeline stages' do
- expect(saved_project_json.dig('ci_pipelines', 0, 'stages')).not_to be_empty
- end
-
- it 'has pipeline statuses' do
- expect(saved_project_json.dig('ci_pipelines', 0, 'stages', 0, 'statuses')).not_to be_empty
- end
-
- it 'has pipeline builds' do
- builds_count = saved_project_json
- .dig('ci_pipelines', 0, 'stages', 0, 'statuses')
- .count { |hash| hash['type'] == 'Ci::Build' }
-
- expect(builds_count).to eq(1)
- end
-
- it 'has no when YML attributes but only the DB column' do
- expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
-
- saved_project_json
- end
-
- it 'has pipeline commits' do
- expect(saved_project_json['ci_pipelines']).not_to be_empty
- end
-
- it 'has ci pipeline notes' do
- expect(saved_project_json['ci_pipelines'].first['notes']).not_to be_empty
- end
-
- it 'has labels with no associations' do
- expect(saved_project_json['labels']).not_to be_empty
- end
-
- it 'has labels associated to records' do
- expect(saved_project_json['issues'].first['label_links'].first['label']).not_to be_empty
- end
-
- it 'has project and group labels' do
- label_types = saved_project_json['issues'].first['label_links'].map { |link| link['label']['type'] }
-
- expect(label_types).to match_array(%w(ProjectLabel GroupLabel))
- end
-
- it 'has priorities associated to labels' do
- priorities = saved_project_json['issues'].first['label_links'].flat_map { |link| link['label']['priorities'] }
-
- expect(priorities).not_to be_empty
- end
-
- it 'has issue resource label events' do
- expect(saved_project_json['issues'].first['resource_label_events']).not_to be_empty
- end
-
- it 'has merge request resource label events' do
- expect(saved_project_json['merge_requests'].first['resource_label_events']).not_to be_empty
- end
-
- it 'saves the correct service type' do
- expect(saved_project_json['services'].first['type']).to eq('CustomIssueTrackerService')
- end
-
- it 'saves the properties for a service' do
- expect(saved_project_json['services'].first['properties']).to eq('one' => 'value')
- end
-
- it 'has project feature' do
- project_feature = saved_project_json['project_feature']
- expect(project_feature).not_to be_empty
- expect(project_feature["issues_access_level"]).to eq(ProjectFeature::DISABLED)
- expect(project_feature["wiki_access_level"]).to eq(ProjectFeature::ENABLED)
- expect(project_feature["builds_access_level"]).to eq(ProjectFeature::PRIVATE)
- end
-
- it 'has custom attributes' do
- expect(saved_project_json['custom_attributes'].count).to eq(2)
- end
-
- it 'has badges' do
- expect(saved_project_json['project_badges'].count).to eq(2)
- end
-
- it 'does not complain about non UTF-8 characters in MR diff files' do
- ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
-
- expect(project_tree_saver.save).to be true
- end
-
- context 'group members' do
- let(:user2) { create(:user, email: 'group@member.com') }
- let(:member_emails) do
- saved_project_json['project_members'].map do |pm|
- pm['user']['email']
- end
- end
-
- before do
- Group.first.add_developer(user2)
- end
-
- it 'does not export group members if it has no permission' do
- Group.first.add_developer(user)
-
- expect(member_emails).not_to include('group@member.com')
- end
-
- it 'does not export group members as maintainer' do
- Group.first.add_maintainer(user)
-
- expect(member_emails).not_to include('group@member.com')
- end
-
- it 'exports group members as group owner' do
- Group.first.add_owner(user)
-
- expect(member_emails).to include('group@member.com')
- end
-
- context 'as admin' do
- let(:user) { create(:admin) }
-
- it 'exports group members as admin' do
- expect(member_emails).to include('group@member.com')
- end
-
- it 'exports group members as project members' do
- member_types = saved_project_json['project_members'].map { |pm| pm['source_type'] }
-
- expect(member_types).to all(eq('Project'))
- end
- end
- end
-
- context 'project attributes' do
- it 'does not contain the runners token' do
- expect(saved_project_json).not_to include("runners_token" => 'token')
- end
- end
-
- it 'has a board and a list' do
- expect(saved_project_json['boards'].first['lists']).not_to be_empty
- end
- end
- end
-
- def setup_project
- release = create(:release)
- group = create(:group)
-
- project = create(:project,
- :public,
- :repository,
- :issues_disabled,
- :wiki_enabled,
- :builds_private,
- description: 'description',
- releases: [release],
- group: group,
- approvals_before_merge: 1
- )
- allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
-
- issue = create(:issue, assignees: [user], project: project)
- snippet = create(:project_snippet, project: project)
- project_label = create(:label, project: project)
- group_label = create(:group_label, group: group)
- create(:label_link, label: project_label, target: issue)
- create(:label_link, label: group_label, target: issue)
- create(:label_priority, label: group_label, priority: 1)
- milestone = create(:milestone, project: project)
- merge_request = create(:merge_request, source_project: project, milestone: milestone)
-
- ci_build = create(:ci_build, project: project, when: nil)
- ci_build.pipeline.update(project: project)
- create(:commit_status, project: project, pipeline: ci_build.pipeline)
-
- create(:milestone, project: project)
- create(:discussion_note, noteable: issue, project: project)
- create(:note, noteable: merge_request, project: project)
- create(:note, noteable: snippet, project: project)
- create(:note_on_commit,
- author: user,
- project: project,
- commit_id: ci_build.pipeline.sha)
-
- create(:resource_label_event, label: project_label, issue: issue)
- create(:resource_label_event, label: group_label, merge_request: merge_request)
-
- create(:event, :created, target: milestone, project: project, author: user)
- create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' })
-
- create(:project_custom_attribute, project: project)
- create(:project_custom_attribute, project: project)
-
- create(:project_badge, project: project)
- create(:project_badge, project: project)
-
- board = create(:board, project: project, name: 'TestBoard')
- create(:list, board: board, position: 0, label: project_label)
-
- project
- end
-
- def project_json(filename)
- JSON.parse(IO.read(filename))
- end
-end
diff --git a/spec/lib/gitlab/import_export/relation_rename_service_spec.rb b/spec/lib/gitlab/import_export/relation_rename_service_spec.rb
deleted file mode 100644
index d62f5725f9e..00000000000
--- a/spec/lib/gitlab/import_export/relation_rename_service_spec.rb
+++ /dev/null
@@ -1,122 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::RelationRenameService do
- include ImportExport::CommonUtil
-
- let(:renames) do
- {
- 'example_relation1' => 'new_example_relation1',
- 'example_relation2' => 'new_example_relation2'
- }
- end
-
- let(:user) { create(:admin) }
- let(:group) { create(:group, :nested) }
- let!(:project) { create(:project, :builds_disabled, :issues_disabled, group: group, name: 'project', path: 'project') }
- let(:shared) { project.import_export_shared }
-
- before do
- stub_const("#{described_class}::RENAMES", renames)
- end
-
- context 'when importing' do
- let(:project_tree_restorer) { Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project) }
- let(:file_content) { IO.read(File.join(shared.export_path, 'project.json')) }
- let(:json_file) { ActiveSupport::JSON.decode(file_content) }
-
- before do
- setup_import_export_config('complex')
-
- allow(ActiveSupport::JSON).to receive(:decode).and_call_original
- allow(ActiveSupport::JSON).to receive(:decode).with(file_content).and_return(json_file)
- end
-
- context 'when the file has only old relationship names' do
- # Configuring the json as an old version exported file, with only
- # the previous association with the old name
- before do
- renames.each do |old_name, _|
- json_file[old_name.to_s] = []
- end
- end
-
- it 'renames old relationships to the new name' do
- expect(json_file.keys).to include(*renames.keys)
-
- project_tree_restorer.restore
-
- expect(json_file.keys).to include(*renames.values)
- expect(json_file.keys).not_to include(*renames.keys)
- end
- end
-
- context 'when the file has both the old and new relationships' do
- # Configuring the json as the new version exported file, with both
- # the old association name and the new one
- before do
- renames.each do |old_name, new_name|
- json_file[old_name.to_s] = [1]
- json_file[new_name.to_s] = [2]
- end
- end
-
- it 'uses the new relationships and removes the old ones from the hash' do
- expect(json_file.keys).to include(*renames.keys)
-
- project_tree_restorer.restore
-
- expect(json_file.keys).to include(*renames.values)
- expect(json_file.values_at(*renames.values).flatten.uniq.first).to eq 2
- expect(json_file.keys).not_to include(*renames.keys)
- end
- end
-
- context 'when the file has only new relationship names' do
- # Configuring the json as the future version exported file, with only
- # the new association name
- before do
- renames.each do |_, new_name|
- json_file[new_name.to_s] = []
- end
- end
-
- it 'uses the new relationships' do
- expect(json_file.keys).not_to include(*renames.keys)
-
- project_tree_restorer.restore
-
- expect(json_file.keys).to include(*renames.values)
- end
- end
- end
-
- context 'when exporting' do
- let(:export_content_path) { project_tree_saver.full_path }
- let(:export_content_hash) { ActiveSupport::JSON.decode(File.read(export_content_path)) }
- let(:injected_hash) { renames.values.product([{}]).to_h }
- let(:relation_tree_saver) { Gitlab::ImportExport::RelationTreeSaver.new }
-
- let(:project_tree_saver) do
- Gitlab::ImportExport::ProjectTreeSaver.new(
- project: project, current_user: user, shared: shared)
- end
-
- before do
- allow(project_tree_saver).to receive(:tree_saver).and_return(relation_tree_saver)
- end
-
- it 'adds old relationships to the exported file' do
- # we inject relations with new names that should be rewritten
- expect(relation_tree_saver).to receive(:serialize).and_wrap_original do |method, *args|
- method.call(*args).merge(injected_hash)
- end
-
- expect(project_tree_saver.save).to eq(true)
-
- expect(export_content_hash.keys).to include(*renames.keys)
- expect(export_content_hash.keys).to include(*renames.values)
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
index edb2c0a131a..578418998c0 100644
--- a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
# This spec is a lightweight version of:
-# * project_tree_restorer_spec.rb
+# * project/tree_restorer_spec.rb
#
# In depth testing is being done in the above specs.
# This spec tests that restore project works
@@ -25,7 +25,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
described_class.new(
user: user,
shared: shared,
- tree_hash: tree_hash,
+ relation_reader: relation_reader,
importable: importable,
object_builder: object_builder,
members_mapper: members_mapper,
@@ -36,14 +36,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
subject { relation_tree_restorer.restore }
- context 'when restoring a project' do
- let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' }
- let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
- let(:object_builder) { Gitlab::ImportExport::GroupProjectObjectBuilder }
- let(:relation_factory) { Gitlab::ImportExport::ProjectRelationFactory }
- let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
- let(:tree_hash) { importable_hash }
-
+ shared_examples 'import project successfully' do
it 'restores project tree' do
expect(subject).to eq(true)
end
@@ -66,4 +59,18 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
end
end
end
+
+ context 'when restoring a project' do
+ let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' }
+ let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
+ let(:object_builder) { Gitlab::ImportExport::Project::ObjectBuilder }
+ let(:relation_factory) { Gitlab::ImportExport::Project::RelationFactory }
+ let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
+
+ context 'using legacy reader' do
+ let(:relation_reader) { Gitlab::ImportExport::JSON::LegacyReader::File.new(path, reader.project_relation_names) }
+
+ it_behaves_like 'import project successfully'
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/relation_tree_saver_spec.rb b/spec/lib/gitlab/import_export/relation_tree_saver_spec.rb
deleted file mode 100644
index 2fc26c0e3d4..00000000000
--- a/spec/lib/gitlab/import_export/relation_tree_saver_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::ImportExport::RelationTreeSaver do
- let(:exportable) { create(:group) }
- let(:relation_tree_saver) { described_class.new }
- let(:tree) { {} }
-
- describe '#serialize' do
- context 'when :export_fast_serialize feature is enabled' do
- let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
-
- before do
- stub_feature_flags(export_fast_serialize: true)
- end
-
- it 'uses FastHashSerializer' do
- expect(Gitlab::ImportExport::FastHashSerializer)
- .to receive(:new)
- .with(exportable, tree)
- .and_return(serializer)
-
- expect(serializer).to receive(:execute)
-
- relation_tree_saver.serialize(exportable, tree)
- end
- end
-
- context 'when :export_fast_serialize feature is disabled' do
- before do
- stub_feature_flags(export_fast_serialize: false)
- end
-
- it 'is serialized via built-in `as_json`' do
- expect(exportable).to receive(:as_json).with(tree)
-
- relation_tree_saver.serialize(exportable, tree)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/repo_saver_spec.rb b/spec/lib/gitlab/import_export/repo_saver_spec.rb
index fc1f782bfdd..a95d661ec3c 100644
--- a/spec/lib/gitlab/import_export/repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/repo_saver_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe Gitlab::ImportExport::RepoSaver do
describe 'bundle a project Git repo' do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let!(:project) { create(:project, :repository) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 807b017a67c..91b88349ee0 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -72,6 +72,7 @@ Note:
- resolved_by_push
- discussion_id
- original_discussion_id
+- confidential
LabelLink:
- id
- target_type
@@ -133,6 +134,7 @@ Releases::Link:
- id
- url
- name
+- filepath
- created_at
- updated_at
ProjectMember:
@@ -270,6 +272,8 @@ MergeRequest::Metrics:
- diff_size
- modified_paths_size
- commits_count
+- first_approved_at
+- first_reassigned_at
Ci::Pipeline:
- id
- project_id
@@ -455,6 +459,7 @@ Service:
- active
- properties
- template
+- instance
- push_events
- issues_events
- commit_events
@@ -769,7 +774,9 @@ DesignManagement::Design:
- project_id
- filename
DesignManagement::Action:
+- id
- event
+- image_v432x230
DesignManagement::Version:
- id
- created_at
@@ -827,3 +834,6 @@ Epic:
- start_date_sourcing_epic_id
- due_date_sourcing_epic_id
- health_status
+EpicIssue:
+ - id
+ - relative_position
diff --git a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
new file mode 100644
index 00000000000..d72d41ddf38
--- /dev/null
+++ b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::SnippetRepoRestorer do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+ let(:snippet) { create(:project_snippet, project: project, author: user) }
+
+ let(:shared) { project.import_export_shared }
+ let(:exporter) { Gitlab::ImportExport::SnippetsRepoSaver.new(project: project, shared: shared, current_user: user) }
+ let(:restorer) do
+ described_class.new(user: user,
+ shared: shared,
+ snippet: snippet,
+ path_to_bundle: snippet_bundle_path)
+ end
+
+ after do
+ FileUtils.rm_rf(shared.export_path)
+ end
+
+ shared_examples 'no bundle file present' do
+ it 'creates the repository from the database content' do
+ expect(snippet.repository_exists?).to be_falsey
+
+ aggregate_failures do
+ expect(restorer.restore).to be_truthy
+
+ expect(snippet.repository_exists?).to be_truthy
+ expect(snippet.snippet_repository).not_to be_nil
+
+ blob = snippet.repository.blob_at('HEAD', snippet.file_name)
+ expect(blob).not_to be_nil
+ expect(blob.data).to eq(snippet.content)
+ end
+ end
+ end
+
+ context 'when the snippet does not have a bundle file path' do
+ let(:snippet_bundle_path) { nil }
+
+ it_behaves_like 'no bundle file present'
+ end
+
+ context 'when the snippet bundle path is not present' do
+ let(:snippet_bundle_path) { 'foo' }
+
+ it_behaves_like 'no bundle file present'
+ end
+
+ context 'when the snippet bundle exists' do
+ let!(:snippet_with_repo) { create(:project_snippet, :repository, project: project) }
+ let(:bundle_path) { ::Gitlab::ImportExport.snippets_repo_bundle_path(shared.export_path) }
+ let(:snippet_bundle_path) { File.join(bundle_path, "#{snippet_with_repo.hexdigest}.bundle") }
+ let(:result) { exporter.save }
+
+ it 'creates the repository from the bundle' do
+ expect(exporter.save).to be_truthy
+
+ expect(snippet.repository_exists?).to be_falsey
+ expect(snippet.snippet_repository).to be_nil
+ expect(snippet.repository).to receive(:create_from_bundle).and_call_original
+
+ expect(restorer.restore).to be_truthy
+ expect(snippet.repository_exists?).to be_truthy
+ expect(snippet.snippet_repository).not_to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/snippet_repo_saver_spec.rb b/spec/lib/gitlab/import_export/snippet_repo_saver_spec.rb
new file mode 100644
index 00000000000..7ad1ff213a1
--- /dev/null
+++ b/spec/lib/gitlab/import_export/snippet_repo_saver_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::SnippetRepoSaver do
+ describe 'bundle a project Git repo' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+ let_it_be(:snippet) { create(:project_snippet, :repository, project: project, author: user) }
+ let(:shared) { project.import_export_shared }
+ let(:bundler) { described_class.new(project: project, shared: shared, repository: snippet.repository) }
+ let(:bundle_path) { ::Gitlab::ImportExport.snippets_repo_bundle_path(shared.export_path) }
+
+ around do |example|
+ FileUtils.mkdir_p(bundle_path)
+ example.run
+ ensure
+ FileUtils.rm_rf(bundle_path)
+ end
+
+ context 'with project snippet' do
+ it 'bundles the repo successfully' do
+ aggregate_failures do
+ expect(bundler.save).to be_truthy
+ expect(Dir.empty?(bundle_path)).to be_falsey
+ end
+ end
+
+ context 'when snippet does not have a repository' do
+ let(:snippet) { build(:personal_snippet) }
+
+ it 'returns true' do
+ expect(bundler.save).to be_truthy
+ end
+
+ it 'does not create any file' do
+ aggregate_failures do
+ expect(snippet.repository).not_to receive(:bundle_to_disk)
+
+ bundler.save
+
+ expect(Dir.empty?(bundle_path)).to be_truthy
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
new file mode 100644
index 00000000000..242f6f6b58c
--- /dev/null
+++ b/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::SnippetsRepoRestorer do
+ include GitHelpers
+
+ describe 'bundle a snippet Git repo' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+ let_it_be(:snippet_with_repo) { create(:project_snippet, :repository, project: project, author: user) }
+ let_it_be(:snippet_without_repo) { create(:project_snippet, project: project, author: user) }
+
+ let(:shared) { project.import_export_shared }
+ let(:exporter) { Gitlab::ImportExport::SnippetsRepoSaver.new(current_user: user, project: project, shared: shared) }
+ let(:bundle_dir) { ::Gitlab::ImportExport.snippets_repo_bundle_path(shared.export_path) }
+ let(:restorer) do
+ described_class.new(user: user,
+ shared: shared,
+ project: project)
+ end
+ let(:service) { instance_double(Gitlab::ImportExport::SnippetRepoRestorer) }
+
+ before do
+ exporter.save
+ end
+
+ after do
+ FileUtils.rm_rf(shared.export_path)
+ end
+
+ it 'calls SnippetRepoRestorer per each snippet with the bundle path' do
+ allow(service).to receive(:restore).and_return(true)
+
+ expect(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet_with_repo, path_to_bundle: bundle_path(snippet_with_repo))).and_return(service)
+ expect(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet_without_repo, path_to_bundle: bundle_path(snippet_without_repo))).and_return(service)
+
+ expect(restorer.restore).to be_truthy
+ end
+
+ context 'when one snippet cannot be saved' do
+ it 'returns false and do not process other snippets' do
+ allow(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet_with_repo)).and_return(service)
+ allow(service).to receive(:restore).and_return(false)
+
+ expect(Gitlab::ImportExport::SnippetRepoRestorer).not_to receive(:new).with(hash_including(snippet: snippet_without_repo))
+ expect(restorer.restore).to be_falsey
+ end
+ end
+
+ def bundle_path(snippet)
+ File.join(bundle_dir, ::Gitlab::ImportExport.snippet_repo_bundle_filename_for(snippet))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/snippets_repo_saver_spec.rb b/spec/lib/gitlab/import_export/snippets_repo_saver_spec.rb
new file mode 100644
index 00000000000..5332990a975
--- /dev/null
+++ b/spec/lib/gitlab/import_export/snippets_repo_saver_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::SnippetsRepoSaver do
+ describe 'bundle a project Git repo' do
+ let_it_be(:user) { create(:user) }
+ let!(:project) { create(:project) }
+ let(:shared) { project.import_export_shared }
+ let(:bundler) { described_class.new(current_user: user, project: project, shared: shared) }
+
+ after do
+ FileUtils.rm_rf(shared.export_path)
+ end
+
+ it 'creates the snippet bundles dir if not exists' do
+ snippets_dir = ::Gitlab::ImportExport.snippets_repo_bundle_path(shared.export_path)
+ expect(Dir.exist?(snippets_dir)).to be_falsey
+
+ bundler.save
+
+ expect(Dir.exist?(snippets_dir)).to be_truthy
+ end
+
+ context 'when project does not have any snippet' do
+ it 'does not perform any action' do
+ expect(Gitlab::ImportExport::SnippetRepoSaver).not_to receive(:new)
+
+ bundler.save
+ end
+ end
+
+ context 'when project has snippets' do
+ let!(:snippet1) { create(:project_snippet, :repository, project: project, author: user) }
+ let!(:snippet2) { create(:project_snippet, project: project, author: user) }
+ let(:service) { instance_double(Gitlab::ImportExport::SnippetRepoSaver) }
+
+ it 'calls the SnippetRepoSaver for each snippet' do
+ allow(Gitlab::ImportExport::SnippetRepoSaver).to receive(:new).and_return(service)
+ expect(service).to receive(:save).and_return(true).twice
+
+ bundler.save
+ end
+
+ context 'when one snippet cannot be saved' do
+ it 'returns false and do not process other snippets' do
+ allow(Gitlab::ImportExport::SnippetRepoSaver).to receive(:new).with(hash_including(repository: snippet1.repository)).and_return(service)
+ allow(service).to receive(:save).and_return(false)
+
+ expect(Gitlab::ImportExport::SnippetRepoSaver).not_to receive(:new).with(hash_including(repository: snippet2.repository))
+ expect(bundler.save).to be_falsey
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb b/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
index 59a59223d8d..264272d2026 100644
--- a/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
describe Gitlab::ImportExport::WikiRepoSaver do
describe 'bundle a wiki Git repo' do
- set(:user) { create(:user) }
- let!(:project) { create(:project, :wiki_repo) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :wiki_repo) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
let(:wiki_bundler) { described_class.new(project: project, shared: shared) }
diff --git a/spec/lib/gitlab/incoming_email_spec.rb b/spec/lib/gitlab/incoming_email_spec.rb
index f5a6ea4d5b0..2dd45d18ee9 100644
--- a/spec/lib/gitlab/incoming_email_spec.rb
+++ b/spec/lib/gitlab/incoming_email_spec.rb
@@ -89,6 +89,17 @@ describe Gitlab::IncomingEmail do
it 'does not match emails with extra bits' do
expect(described_class.key_from_address('somereplies+somekey@example.com.someotherdomain.com')).to be nil
end
+
+ context 'when a custom wildcard address is used' do
+ let(:wildcard_address) { 'custom.address+%{key}@example.com' }
+
+ it 'finds key if email matches address pattern' do
+ key = described_class.key_from_address(
+ 'custom.address+foo@example.com', wildcard_address: wildcard_address
+ )
+ expect(key).to eq('foo')
+ end
+ end
end
context 'self.key_from_fallback_message_id' do
diff --git a/spec/lib/gitlab/job_waiter_spec.rb b/spec/lib/gitlab/job_waiter_spec.rb
index efa7fd4b975..da6a6a9149b 100644
--- a/spec/lib/gitlab/job_waiter_spec.rb
+++ b/spec/lib/gitlab/job_waiter_spec.rb
@@ -37,5 +37,40 @@ describe Gitlab::JobWaiter do
expect(result).to contain_exactly('a')
end
+
+ context 'when a label is provided' do
+ let(:waiter) { described_class.new(2, worker_label: 'Foo') }
+ let(:started_total) { double(:started_total) }
+ let(:timeouts_total) { double(:timeouts_total) }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(described_class::STARTED_METRIC, anything)
+ .and_return(started_total)
+
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(described_class::TIMEOUTS_METRIC, anything)
+ .and_return(timeouts_total)
+ end
+
+ it 'increments just job_waiter_started_total when all jobs complete' do
+ expect(started_total).to receive(:increment).with(worker: 'Foo')
+ expect(timeouts_total).not_to receive(:increment)
+
+ described_class.notify(waiter.key, 'a')
+ described_class.notify(waiter.key, 'b')
+
+ result = nil
+ expect { Timeout.timeout(1) { result = waiter.wait(2) } }.not_to raise_error
+ end
+
+ it 'increments job_waiter_started_total and job_waiter_timeouts_total when it times out' do
+ expect(started_total).to receive(:increment).with(worker: 'Foo')
+ expect(timeouts_total).to receive(:increment).with(worker: 'Foo')
+
+ result = nil
+ expect { Timeout.timeout(2) { result = waiter.wait(1) } }.not_to raise_error
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/lib/gitlab/json_cache_spec.rb
index 9d986abb8dd..9379499f602 100644
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ b/spec/lib/gitlab/json_cache_spec.rb
@@ -3,13 +3,12 @@
require 'spec_helper'
describe Gitlab::JsonCache do
+ let_it_be(:broadcast_message) { create(:broadcast_message) }
let(:backend) { double('backend').as_null_object }
let(:namespace) { 'geo' }
let(:key) { 'foo' }
let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}" }
- set(:broadcast_message) { create(:broadcast_message) }
-
subject(:cache) { described_class.new(namespace: namespace, backend: backend) }
describe '#active?' do
diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
index e493acd7bad..8147990ecc3 100644
--- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Kubernetes::Helm::Api do
+describe Gitlab::Kubernetes::Helm::API do
let(:client) { double('kubernetes client') }
let(:helm) { described_class.new(client) }
let(:gitlab_namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
diff --git a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
index 24a734a2915..3c62219a9a5 100644
--- a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
@@ -32,7 +32,7 @@ describe Gitlab::Kubernetes::Helm::Pod do
it 'generates the appropriate specifications for the container' do
container = subject.generate.spec.containers.first
expect(container.name).to eq('helm')
- expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.1-kube-1.13.12')
+ expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.3-kube-1.13.12')
expect(container.env.count).to eq(3)
expect(container.env.map(&:name)).to match_array([:HELM_VERSION, :TILLER_NAMESPACE, :COMMAND_SCRIPT])
expect(container.command).to match_array(["/bin/sh"])
diff --git a/spec/lib/gitlab/kubernetes/namespace_spec.rb b/spec/lib/gitlab/kubernetes/namespace_spec.rb
index d44a803410f..467b10e21b1 100644
--- a/spec/lib/gitlab/kubernetes/namespace_spec.rb
+++ b/spec/lib/gitlab/kubernetes/namespace_spec.rb
@@ -92,12 +92,14 @@ describe Gitlab::Kubernetes::Namespace do
it 'logs the error' do
expect(subject.send(:logger)).to receive(:error).with(
hash_including(
- exception: 'Kubeclient::HttpError',
+ exception: {
+ class: 'Kubeclient::HttpError',
+ message: 'system failure'
+ },
status_code: 500,
namespace: 'a_namespace',
class_name: 'Gitlab::Kubernetes::Namespace',
- event: :failed_to_create_namespace,
- message: 'system failure'
+ event: :failed_to_create_namespace
)
)
diff --git a/spec/lib/gitlab/language_detection_spec.rb b/spec/lib/gitlab/language_detection_spec.rb
index f558ce0d527..770fe2f80db 100644
--- a/spec/lib/gitlab/language_detection_spec.rb
+++ b/spec/lib/gitlab/language_detection_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Gitlab::LanguageDetection do
- set(:project) { create(:project, :repository) }
- set(:ruby) { create(:programming_language, name: 'Ruby') }
- set(:haskell) { create(:programming_language, name: 'Haskell') }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:ruby) { create(:programming_language, name: 'Ruby') }
+ let_it_be(:haskell) { create(:programming_language, name: 'Haskell') }
let(:repository) { project.repository }
let(:detection) do
[{ value: 66.63, label: "Ruby", color: "#701516", highlight: "#701516" },
diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index 7fef763f64d..af0bffa91a5 100644
--- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -45,7 +45,7 @@ describe Gitlab::LegacyGithubImport::Importer do
allow(Rails).to receive(:cache).and_return(ActiveSupport::Cache::MemoryStore.new)
allow_any_instance_of(Octokit::Client).to receive(:rate_limit!).and_raise(Octokit::NotFound)
- allow_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_raise(Gitlab::Shell::Error)
+ allow(project.wiki.repository).to receive(:import_repository).and_raise(Gitlab::Git::CommandError)
allow_any_instance_of(Octokit::Client).to receive(:user).and_return(octocat)
allow_any_instance_of(Octokit::Client).to receive(:labels).and_return([label1, label2])
@@ -169,14 +169,10 @@ describe Gitlab::LegacyGithubImport::Importer do
errors: [
{ type: :label, url: "#{api_root}/repos/octocat/Hello-World/labels/bug", errors: "Validation failed: Title can't be blank, Title is invalid" },
{ type: :issue, url: "#{api_root}/repos/octocat/Hello-World/issues/1348", errors: "Validation failed: Title can't be blank" },
- { type: :wiki, errors: "Gitlab::Shell::Error" }
+ { type: :wiki, errors: "Gitlab::Git::CommandError" }
]
}
- unless project.gitea_import?
- error[:errors] << { type: :release, url: "#{api_root}/repos/octocat/Hello-World/releases/2", errors: "Validation failed: Description can't be blank" }
- end
-
described_class.new(project).execute
expect(project.import_state.last_error).to eq error.to_json
diff --git a/spec/lib/gitlab/lograge/custom_options_spec.rb b/spec/lib/gitlab/lograge/custom_options_spec.rb
new file mode 100644
index 00000000000..48d06283b7a
--- /dev/null
+++ b/spec/lib/gitlab/lograge/custom_options_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Lograge::CustomOptions do
+ describe '.call' do
+ let(:params) do
+ {
+ 'controller' => 'ApplicationController',
+ 'action' => 'show',
+ 'format' => 'html',
+ 'a' => 'b'
+ }
+ end
+
+ let(:event) do
+ ActiveSupport::Notifications::Event.new(
+ 'test',
+ 1,
+ 2,
+ 'transaction_id',
+ { params: params, user_id: 'test' }
+ )
+ end
+
+ subject { described_class.call(event) }
+
+ it 'ignores some parameters' do
+ param_keys = subject[:params].map { |param| param[:key] }
+
+ expect(param_keys).not_to include(*described_class::IGNORE_PARAMS)
+ end
+
+ it 'formats the parameters' do
+ expect(subject[:params]).to eq([{ key: 'a', value: 'b' }])
+ end
+
+ it 'adds the current time' do
+ travel_to(5.days.ago) do
+ expected_time = Time.now.utc.iso8601(3)
+
+ expect(subject[:time]).to eq(expected_time)
+ end
+ end
+
+ it 'adds the user id' do
+ expect(subject[:user_id]).to eq('test')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
index 2d3b61e61ce..d772b0c7a5f 100644
--- a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:project) { create(:project) }
- set(:user) { create(:user) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb
index 5c2ec6dae6b..e41004bb57e 100644
--- a/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb
@@ -11,8 +11,9 @@ describe Gitlab::Metrics::Dashboard::Stages::GrafanaFormatter do
describe '#transform!' do
let(:grafana_dashboard) { JSON.parse(fixture_file('grafana/simplified_dashboard_response.json'), symbolize_names: true) }
let(:datasource) { JSON.parse(fixture_file('grafana/datasource_response.json'), symbolize_names: true) }
+ let(:expected_dashboard) { JSON.parse(fixture_file('grafana/expected_grafana_embed.json'), symbolize_names: true) }
- let(:dashboard) { described_class.new(project, {}, params).transform! }
+ subject(:dashboard) { described_class.new(project, {}, params).transform! }
let(:params) do
{
@@ -23,83 +24,34 @@ describe Gitlab::Metrics::Dashboard::Stages::GrafanaFormatter do
end
context 'when the query and resources are configured correctly' do
- let(:expected_dashboard) { JSON.parse(fixture_file('grafana/expected_grafana_embed.json'), symbolize_names: true) }
-
- it 'generates a gitlab-yml formatted dashboard' do
- expect(dashboard).to eq(expected_dashboard)
- end
+ it { is_expected.to eq expected_dashboard }
end
- context 'when the inputs are invalid' do
- shared_examples_for 'processing error' do
- it 'raises a processing error' do
- expect { dashboard }
- .to raise_error(Gitlab::Metrics::Dashboard::Stages::InputFormatValidator::DashboardProcessingError)
- end
- end
-
- context 'when the datasource is not proxyable' do
- before do
- params[:datasource][:access] = 'not-proxy'
- end
-
- it_behaves_like 'processing error'
+ context 'when a panelId is not included in the grafana_url' do
+ before do
+ params[:grafana_url].gsub('&panelId=8', '')
end
- context 'when query param "panelId" is not specified' do
- before do
- params[:grafana_url].gsub!('panelId=8', '')
- end
-
- it_behaves_like 'processing error'
- end
-
- context 'when query param "from" is not specified' do
- before do
- params[:grafana_url].gsub!('from=1570397739557', '')
- end
-
- it_behaves_like 'processing error'
- end
+ it { is_expected.to eq expected_dashboard }
- context 'when query param "to" is not specified' do
+ context 'when there is also no valid panel in the dashboard' do
before do
- params[:grafana_url].gsub!('to=1570484139557', '')
+ params[:grafana_dashboard][:dashboard][:panels] = []
end
- it_behaves_like 'processing error'
- end
-
- context 'when the panel is not a graph' do
- before do
- params[:grafana_dashboard][:dashboard][:panels][0][:type] = 'singlestat'
+ it 'raises a processing error' do
+ expect { dashboard }.to raise_error(::Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError)
end
-
- it_behaves_like 'processing error'
end
+ end
- context 'when the panel is not a line graph' do
- before do
- params[:grafana_dashboard][:dashboard][:panels][0][:lines] = false
- end
-
- it_behaves_like 'processing error'
- end
-
- context 'when the query dashboard includes undefined variables' do
- before do
- params[:grafana_url].gsub!('&var-instance=localhost:9121', '')
- end
-
- it_behaves_like 'processing error'
+ context 'when an input is invalid' do
+ before do
+ params[:datasource][:access] = 'not-proxy'
end
- context 'when the expression contains unsupported global variables' do
- before do
- params[:grafana_dashboard][:dashboard][:panels][0][:targets][0][:expr] = 'sum(important_metric[$__interval_ms])'
- end
-
- it_behaves_like 'processing error'
+ it 'raises a processing error' do
+ expect { dashboard }.to raise_error(::Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError)
end
end
end
diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb
index 99c2a364dfc..43a489f6df0 100644
--- a/spec/lib/gitlab/middleware/go_spec.rb
+++ b/spec/lib/gitlab/middleware/go_spec.rb
@@ -89,6 +89,13 @@ describe Gitlab::Middleware::Go do
it 'returns the full project path' do
expect_response_with_path(go, enabled_protocol, project.full_path, project.default_branch)
end
+
+ context 'with an empty ssh_user' do
+ it 'returns the full project path' do
+ allow(Gitlab.config.gitlab_shell).to receive(:ssh_user).and_return('')
+ expect_response_with_path(go, enabled_protocol, project.full_path, project.default_branch)
+ end
+ end
end
context 'without access to the project' do
@@ -234,7 +241,9 @@ describe Gitlab::Middleware::Go do
def expect_response_with_path(response, protocol, path, branch)
repository_url = case protocol
when :ssh
- "ssh://#{Gitlab.config.gitlab.user}@#{Gitlab.config.gitlab.host}/#{path}.git"
+ shell = Gitlab.config.gitlab_shell
+ user = "#{shell.ssh_user}@" unless shell.ssh_user.empty?
+ "ssh://#{user}#{shell.ssh_host}/#{path}.git"
when :http, nil
"http://#{Gitlab.config.gitlab.host}/#{path}.git"
end
diff --git a/spec/lib/gitlab/omniauth_logging/json_formatter_spec.rb b/spec/lib/gitlab/omniauth_logging/json_formatter_spec.rb
new file mode 100644
index 00000000000..36405daed5a
--- /dev/null
+++ b/spec/lib/gitlab/omniauth_logging/json_formatter_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::OmniauthLogging::JSONFormatter do
+ it "generates log in json format" do
+ Timecop.freeze(Time.utc(2019, 12, 04, 9, 10, 11, 123456)) do
+ expect(subject.call(:info, Time.now, 'omniauth', 'log message'))
+ .to eq %Q({"severity":"info","timestamp":"2019-12-04T09:10:11.123Z","pid":#{Process.pid},"progname":"omniauth","message":"log message"}\n)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index 3cbcae4cdeb..8dabe5a756b 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -411,4 +411,37 @@ describe Gitlab::PathRegex do
it { is_expected.not_to match('git lab') }
it { is_expected.not_to match('gitlab.git') }
end
+
+ shared_examples 'invalid snippet routes' do
+ it { is_expected.not_to match('gitlab-org/gitlab/snippets/1.git') }
+ it { is_expected.not_to match('snippets/1.git') }
+ it { is_expected.not_to match('gitlab-org/gitlab/snippets/') }
+ it { is_expected.not_to match('/gitlab-org/gitlab/snippets/1') }
+ it { is_expected.not_to match('gitlab-org/gitlab/snippets/foo') }
+ it { is_expected.not_to match('root/snippets/1') }
+ it { is_expected.not_to match('/snippets/1') }
+ it { is_expected.not_to match('snippets/') }
+ it { is_expected.not_to match('snippets/foo') }
+ end
+
+ describe '.full_snippets_repository_path_regex' do
+ subject { described_class.full_snippets_repository_path_regex }
+
+ it { is_expected.to match('gitlab-org/gitlab/snippets/1') }
+ it { is_expected.to match('snippets/1') }
+
+ it_behaves_like 'invalid snippet routes'
+ end
+
+ describe '.personal_and_project_snippets_path_regex' do
+ subject { %r{\A#{described_class.personal_and_project_snippets_path_regex}\z} }
+
+ it { is_expected.to match('gitlab-org/gitlab/snippets') }
+ it { is_expected.to match('snippets') }
+
+ it { is_expected.not_to match('gitlab-org/gitlab/snippets/1') }
+ it { is_expected.not_to match('snippets/1') }
+
+ it_behaves_like 'invalid snippet routes'
+ end
end
diff --git a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
index 14a5d40d445..b2a63e4f026 100644
--- a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::PhabricatorImport::Cache::Map, :clean_gitlab_redis_cache do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:redis) { Gitlab::Redis::Cache }
subject(:map) { described_class.new(project) }
diff --git a/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb b/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
index 06ed264e781..79f11d7fae6 100644
--- a/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
describe Gitlab::PhabricatorImport::Issues::TaskImporter do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:task) do
Gitlab::PhabricatorImport::Representation::Task.new(
{
diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb
index 0186d48fd1b..6440f74a49a 100644
--- a/spec/lib/gitlab/profiler_spec.rb
+++ b/spec/lib/gitlab/profiler_spec.rb
@@ -195,6 +195,7 @@ describe Gitlab::Profiler do
describe '.print_by_total_time' do
let(:stdout) { StringIO.new }
+ let(:regexp) { /^\s+\d+\.\d+\s+(\d+\.\d+)/ }
let(:output) do
stdout.rewind
@@ -202,6 +203,8 @@ describe Gitlab::Profiler do
end
let_it_be(:result) do
+ Thread.new { sleep 1 }
+
RubyProf.profile do
sleep 0.1
1.to_s
@@ -215,14 +218,19 @@ describe Gitlab::Profiler do
it 'prints a profile result sorted by total time' do
described_class.print_by_total_time(result)
- total_times =
- output
- .scan(/^\s+\d+\.\d+\s+(\d+\.\d+)/)
- .map { |(total)| total.to_f }
-
expect(output).to include('Kernel#sleep')
- expect(total_times).to eq(total_times.sort.reverse)
- expect(total_times).not_to eq(total_times.uniq)
+
+ thread_profiles = output.split('Sort by: total_time').select { |x| x =~ regexp }
+
+ thread_profiles.each do |profile|
+ total_times =
+ profile
+ .scan(regexp)
+ .map { |(total)| total.to_f }
+
+ expect(total_times).to eq(total_times.sort.reverse)
+ expect(total_times).not_to eq(total_times.uniq)
+ end
end
it 'accepts a max_percent option' do
diff --git a/spec/lib/gitlab/project_authorizations_spec.rb b/spec/lib/gitlab/project_authorizations_spec.rb
index 7b282433061..d1c441c8605 100644
--- a/spec/lib/gitlab/project_authorizations_spec.rb
+++ b/spec/lib/gitlab/project_authorizations_spec.rb
@@ -43,6 +43,78 @@ describe Gitlab::ProjectAuthorizations do
end
end
+ context 'unapproved access request' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ subject(:mapping) { map_access_levels(authorizations) }
+
+ context 'group membership' do
+ let!(:group_project) { create(:project, namespace: group) }
+
+ before do
+ create(:group_member, :developer, :access_request, user: user, group: group)
+ end
+
+ it 'does not create authorization' do
+ expect(mapping[group_project.id]).to be_nil
+ end
+ end
+
+ context 'inherited group membership' do
+ let!(:sub_group) { create(:group, parent: group) }
+ let!(:sub_group_project) { create(:project, namespace: sub_group) }
+
+ before do
+ create(:group_member, :developer, :access_request, user: user, group: group)
+ end
+
+ it 'does not create authorization' do
+ expect(mapping[sub_group_project.id]).to be_nil
+ end
+ end
+
+ context 'project membership' do
+ let!(:group_project) { create(:project, namespace: group) }
+
+ before do
+ create(:project_member, :developer, :access_request, user: user, project: group_project)
+ end
+
+ it 'does not create authorization' do
+ expect(mapping[group_project.id]).to be_nil
+ end
+ end
+
+ context 'shared group' do
+ let!(:shared_group) { create(:group) }
+ let!(:shared_group_project) { create(:project, namespace: shared_group) }
+
+ before do
+ create(:group_group_link, shared_group: shared_group, shared_with_group: group)
+ create(:group_member, :developer, :access_request, user: user, group: group)
+ end
+
+ it 'does not create authorization' do
+ expect(mapping[shared_group_project.id]).to be_nil
+ end
+ end
+
+ context 'shared project' do
+ let!(:another_group) { create(:group) }
+ let!(:shared_project) { create(:project, namespace: another_group) }
+
+ before do
+ create(:project_group_link, group: group, project: shared_project)
+ create(:group_member, :developer, :access_request, user: user, group: group)
+ end
+
+ it 'does not create authorization' do
+ expect(mapping[shared_project.id]).to be_nil
+ end
+ end
+ end
+
context 'with nested groups' do
let(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index a2e3e2146f3..3948e53bc17 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -13,6 +13,7 @@ describe Gitlab::ProjectTemplate do
described_class.new('dotnetcore', '.NET Core', 'A .NET Core console application template, customizable for any .NET Core project', 'https://gitlab.com/gitlab-org/project-templates/dotnetcore'),
described_class.new('android', 'Android', 'A ready-to-go template for use with Android apps.', 'https://gitlab.com/gitlab-org/project-templates/android'),
described_class.new('gomicro', 'Go Micro', 'Go Micro is a framework for micro service development.', 'https://gitlab.com/gitlab-org/project-templates/go-micro'),
+ described_class.new('gatsby', 'Pages/Gatsby', 'Everything you need to get started using a Gatsby site.', 'https://gitlab.com/pages/gatsby'),
described_class.new('hugo', 'Pages/Hugo', 'Everything you need to get started using a Hugo Pages site.', 'https://gitlab.com/pages/hugo'),
described_class.new('jekyll', 'Pages/Jekyll', 'Everything you need to get started using a Jekyll Pages site.', 'https://gitlab.com/pages/jekyll'),
described_class.new('plainhtml', 'Pages/Plain HTML', 'Everything you need to get started using a plain HTML Pages site.', 'https://gitlab.com/pages/plain-html'),
@@ -73,7 +74,7 @@ describe Gitlab::ProjectTemplate do
end
describe 'validate all templates' do
- set(:admin) { create(:admin) }
+ let_it_be(:admin) { create(:admin) }
described_class.all.each do |template|
it "#{template.name} has a valid archive" do
diff --git a/spec/lib/gitlab/prometheus/query_variables_spec.rb b/spec/lib/gitlab/prometheus/query_variables_spec.rb
index 849265de513..d8f8a2b7e7c 100644
--- a/spec/lib/gitlab/prometheus/query_variables_spec.rb
+++ b/spec/lib/gitlab/prometheus/query_variables_spec.rb
@@ -11,6 +11,10 @@ describe Gitlab::Prometheus::QueryVariables do
subject { described_class.call(environment) }
it { is_expected.to include(ci_environment_slug: slug) }
+ it { is_expected.to include(ci_project_name: project.name) }
+ it { is_expected.to include(ci_project_namespace: project.namespace.name) }
+ it { is_expected.to include(ci_project_path: project.full_path) }
+ it { is_expected.to include(ci_environment_name: environment.name) }
it do
is_expected.to include(environment_filter:
diff --git a/spec/lib/gitlab/quick_actions/extractor_spec.rb b/spec/lib/gitlab/quick_actions/extractor_spec.rb
index 2536e4a372b..6ea597bf01e 100644
--- a/spec/lib/gitlab/quick_actions/extractor_spec.rb
+++ b/spec/lib/gitlab/quick_actions/extractor_spec.rb
@@ -216,6 +216,22 @@ describe Gitlab::QuickActions::Extractor do
expect(msg).to eq "hello\nworld\nthis is great? SHRUG"
end
+ it 'extracts and performs multiple substitution commands' do
+ msg = %(hello\nworld\n/reopen\n/shrug this is great?\n/shrug meh)
+ msg, commands = extractor.extract_commands(msg)
+
+ expect(commands).to eq [['reopen'], ['shrug', 'this is great?'], %w(shrug meh)]
+ expect(msg).to eq "hello\nworld\nthis is great? SHRUG\nmeh SHRUG"
+ end
+
+ it 'does not extract substitution command in inline code' do
+ msg = %(hello\nworld\n/reopen\n`/tableflip this is great`?)
+ msg, commands = extractor.extract_commands(msg)
+
+ expect(commands).to eq [['reopen']]
+ expect(msg).to eq "hello\nworld\n`/tableflip this is great`?"
+ end
+
it 'extracts and performs substitution commands case insensitive' do
msg = %(hello\nworld\n/reOpen\n/sHRuG this is great?)
msg, commands = extractor.extract_commands(msg)
@@ -275,6 +291,33 @@ describe Gitlab::QuickActions::Extractor do
expect(msg).to eq expected
end
+ it 'does not extract commands in multiline inline code on seperated rows' do
+ msg = "Hello\r\n`\r\nThis is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
+ expected = msg.delete("\r")
+ msg, commands = extractor.extract_commands(msg)
+
+ expect(commands).to be_empty
+ expect(msg).to eq expected
+ end
+
+ it 'does not extract commands in multiline inline code starting from text' do
+ msg = "Hello `This is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
+ expected = msg.delete("\r")
+ msg, commands = extractor.extract_commands(msg)
+
+ expect(commands).to be_empty
+ expect(msg).to eq expected
+ end
+
+ it 'does not extract commands in inline code' do
+ msg = "`This is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
+ expected = msg.delete("\r")
+ msg, commands = extractor.extract_commands(msg)
+
+ expect(commands).to be_empty
+ expect(msg).to eq expected
+ end
+
it 'limits to passed commands when they are passed' do
msg = <<~MSG.strip
Hello, we should only extract the commands passed
diff --git a/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb b/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb
index a09aca31cdc..d1a44e2feeb 100644
--- a/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb
+++ b/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb
@@ -7,6 +7,7 @@ describe Gitlab::QuickActions::SubstitutionDefinition do
<<EOF
Hello! Let's do this!
/sub_name I like this stuff
+/sub_name second substitution
EOF
end
@@ -24,6 +25,7 @@ EOF
expect(subject.perform_substitution(self, content)).to eq <<EOF
Hello! Let's do this!
I like this stuff foo
+/sub_name second substitution
EOF
end
end
diff --git a/spec/lib/gitlab/rate_limit_helpers_spec.rb b/spec/lib/gitlab/rate_limit_helpers_spec.rb
new file mode 100644
index 00000000000..5ab79a2bbfe
--- /dev/null
+++ b/spec/lib/gitlab/rate_limit_helpers_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::RateLimitHelpers, :clean_gitlab_redis_shared_state do
+ let(:limiter_class) do
+ Class.new do
+ include ::Gitlab::RateLimitHelpers
+
+ attr_reader :request
+
+ def initialize(request)
+ @request = request
+ end
+ end
+ end
+
+ let(:request) { instance_double(ActionDispatch::Request, request_method: 'GET', ip: '127.0.0.1', fullpath: '/') }
+ let(:class_instance) { limiter_class.new(request) }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ describe '#archive_rate_limit_reached?' do
+ context 'with a user' do
+ it 'rate limits the user properly' do
+ 5.times do
+ expect(class_instance.archive_rate_limit_reached?(user, project)).to be_falsey
+ end
+
+ expect(class_instance.archive_rate_limit_reached?(user, project)).to be_truthy
+ end
+ end
+
+ context 'with an anonymous user' do
+ before do
+ stub_const('Gitlab::RateLimitHelpers::ARCHIVE_RATE_ANONYMOUS_THRESHOLD', 2)
+ end
+
+ it 'rate limits with higher limits' do
+ 2.times do
+ expect(class_instance.archive_rate_limit_reached?(nil, project)).to be_falsey
+ end
+
+ expect(class_instance.archive_rate_limit_reached?(nil, project)).to be_truthy
+ expect(class_instance.archive_rate_limit_reached?(user, project)).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/reactive_cache_set_cache_spec.rb b/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
new file mode 100644
index 00000000000..a0d8f15ba1b
--- /dev/null
+++ b/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ReactiveCacheSetCache, :clean_gitlab_redis_cache do
+ let_it_be(:project) { create(:project) }
+ let(:cache_prefix) { 'cache_prefix' }
+ let(:expires_in) { 10.minutes }
+ let(:cache) { described_class.new(expires_in: expires_in) }
+
+ describe '#cache_key' do
+ subject { cache.cache_key(cache_prefix) }
+
+ it 'includes the suffix' do
+ expect(subject).to eq "#{Gitlab::Redis::Cache::CACHE_NAMESPACE}:#{cache_prefix}:set"
+ end
+ end
+
+ describe '#read' do
+ subject { cache.read(cache_prefix) }
+
+ it { is_expected.to be_empty }
+
+ context 'after item added' do
+ before do
+ cache.write(cache_prefix, 'test_item')
+ end
+
+ it { is_expected.to contain_exactly('test_item') }
+ end
+ end
+
+ describe '#write' do
+ it 'writes the value to the cache' do
+ cache.write(cache_prefix, 'test_item')
+
+ expect(cache.read(cache_prefix)).to contain_exactly('test_item')
+ end
+
+ it 'sets the expiry of the set' do
+ cache.write(cache_prefix, 'test_item')
+
+ expect(cache.ttl(cache_prefix)).to be_within(1).of(expires_in.seconds)
+ end
+ end
+
+ describe '#clear_cache!', :use_clean_rails_redis_caching do
+ it 'deletes the cached items' do
+ # Cached key and value
+ Rails.cache.write('test_item', 'test_value')
+ # Add key to set
+ cache.write(cache_prefix, 'test_item')
+
+ expect(cache.read(cache_prefix)).to contain_exactly('test_item')
+ cache.clear_cache!(cache_prefix)
+
+ expect(cache.read(cache_prefix)).to be_empty
+ end
+ end
+
+ describe '#include?' do
+ subject { cache.include?(cache_prefix, 'test_item') }
+
+ it { is_expected.to be(false) }
+
+ context 'item added' do
+ before do
+ cache.write(cache_prefix, 'test_item')
+ end
+
+ it { is_expected.to be(true) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/reference_counter_spec.rb b/spec/lib/gitlab/reference_counter_spec.rb
index f9361d08faf..ae7b18ca007 100644
--- a/spec/lib/gitlab/reference_counter_spec.rb
+++ b/spec/lib/gitlab/reference_counter_spec.rb
@@ -2,38 +2,54 @@
require 'spec_helper'
-describe Gitlab::ReferenceCounter do
- let(:redis) { double('redis') }
- let(:reference_counter_key) { "git-receive-pack-reference-counter:project-1" }
+describe Gitlab::ReferenceCounter, :clean_gitlab_redis_shared_state do
let(:reference_counter) { described_class.new('project-1') }
- before do
- allow(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
+ describe '#increase' do
+ it 'increases and sets the expire time of a reference count for a path' do
+ expect { reference_counter.increase }.to change { reference_counter.value }.by(1)
+ expect(reference_counter.expires_in).to be_positive
+ expect(reference_counter.increase).to be(true)
+ end
end
- it 'increases and set the expire time of a reference count for a path' do
- expect(redis).to receive(:incr).with(reference_counter_key)
- expect(redis).to receive(:expire).with(reference_counter_key,
- described_class::REFERENCE_EXPIRE_TIME)
- expect(reference_counter.increase).to be(true)
+ describe '#decrease' do
+ it 'decreases the reference count for a path' do
+ reference_counter.increase
+
+ expect { reference_counter.decrease }.to change { reference_counter.value }.by(-1)
+ end
+
+ it 'warns if attempting to decrease a counter with a value of zero or less, and resets the counter' do
+ expect(Rails.logger).to receive(:warn).with("Reference counter for project-1" \
+ " decreased when its value was less than 1. Resetting the counter.")
+ expect { reference_counter.decrease }.not_to change { reference_counter.value }
+ end
end
- it 'decreases the reference count for a path' do
- allow(redis).to receive(:decr).and_return(0)
- expect(redis).to receive(:decr).with(reference_counter_key)
- expect(reference_counter.decrease).to be(true)
+ describe '#value' do
+ it 'get the reference count for a path' do
+ expect(reference_counter.value).to eq(0)
+
+ reference_counter.increase
+
+ expect(reference_counter.value).to eq(1)
+ end
end
- it 'warns if attempting to decrease a counter with a value of one or less, and resets the counter' do
- expect(redis).to receive(:decr).and_return(-1)
- expect(redis).to receive(:del)
- expect(Rails.logger).to receive(:warn).with("Reference counter for project-1" \
- " decreased when its value was less than 1. Reseting the counter.")
- expect(reference_counter.decrease).to be(true)
+ describe '#reset!' do
+ it 'resets reference count down to zero' do
+ 3.times { reference_counter.increase }
+
+ expect { reference_counter.reset! }.to change { reference_counter.value}.from(3).to(0)
+ end
end
- it 'get the reference count for a path' do
- allow(redis).to receive(:get).and_return(1)
- expect(reference_counter.value).to be(1)
+ describe '#expires_in' do
+ it 'displays the expiration time in seconds' do
+ reference_counter.increase
+
+ expect(reference_counter.expires_in).to be_between(500, 600)
+ end
end
end
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index 0faaaa50621..a7961190ff1 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -225,6 +225,24 @@ describe Gitlab::ReferenceExtractor do
end
end
+ context 'with an inactive external issue tracker' do
+ let(:project) { create(:project) }
+ let!(:jira_service) { create(:jira_service, project: project, active: false) }
+ let(:issue) { create(:issue, project: project) }
+
+ context 'when GitLab issues are enabled' do
+ it 'returns only internal issue' do
+ subject.analyze("JIRA-123 and FOOBAR-4567 and #{issue.to_reference}")
+ expect(subject.issues).to eq([issue])
+ end
+
+ it 'does not return any issue if the internal one does not exists' do
+ subject.analyze("JIRA-123 and FOOBAR-4567 and #999")
+ expect(subject.issues).to be_empty
+ end
+ end
+ end
+
context 'with a project with an underscore' do
let(:other_project) { create(:project, path: 'test_project') }
let(:issue) { create(:issue, project: other_project) }
diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb
index cffd7cc89e7..68571b9de20 100644
--- a/spec/lib/gitlab/repo_path_spec.rb
+++ b/spec/lib/gitlab/repo_path_spec.rb
@@ -3,64 +3,75 @@
require 'spec_helper'
describe ::Gitlab::RepoPath do
- describe '.parse' do
- set(:project) { create(:project, :repository) }
+ include Gitlab::Routing
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:personal_snippet) { create(:personal_snippet) }
+ let_it_be(:project_snippet) { create(:project_snippet, project: project) }
+ let_it_be(:redirect_route) { 'foo/bar/baz' }
+ let_it_be(:redirect) { project.route.create_redirect(redirect_route) }
+ describe '.parse' do
context 'a repository storage path' do
- it 'parses a full repository path' do
- expect(described_class.parse(project.repository.full_path)).to eq([project, Gitlab::GlRepository::PROJECT, nil])
+ it 'parses a full repository project path' do
+ expect(described_class.parse(project.repository.full_path)).to eq([project, project, Gitlab::GlRepository::PROJECT, nil])
+ end
+
+ it 'parses a full wiki project path' do
+ expect(described_class.parse(project.wiki.repository.full_path)).to eq([project, project, Gitlab::GlRepository::WIKI, nil])
+ end
+
+ it 'parses a personal snippet repository path' do
+ expect(described_class.parse("snippets/#{personal_snippet.id}")).to eq([personal_snippet, nil, Gitlab::GlRepository::SNIPPET, nil])
end
- it 'parses a full wiki path' do
- expect(described_class.parse(project.wiki.repository.full_path)).to eq([project, Gitlab::GlRepository::WIKI, nil])
+ it 'parses a project snippet repository path' do
+ expect(described_class.parse("#{project.full_path}/snippets/#{project_snippet.id}")).to eq([project_snippet, project, Gitlab::GlRepository::SNIPPET, nil])
end
end
context 'a relative path' do
it 'parses a relative repository path' do
- expect(described_class.parse(project.full_path + '.git')).to eq([project, Gitlab::GlRepository::PROJECT, nil])
+ expect(described_class.parse(project.full_path + '.git')).to eq([project, project, Gitlab::GlRepository::PROJECT, nil])
end
it 'parses a relative wiki path' do
- expect(described_class.parse(project.full_path + '.wiki.git')).to eq([project, Gitlab::GlRepository::WIKI, nil])
+ expect(described_class.parse(project.full_path + '.wiki.git')).to eq([project, project, Gitlab::GlRepository::WIKI, nil])
end
it 'parses a relative path starting with /' do
- expect(described_class.parse('/' + project.full_path + '.git')).to eq([project, Gitlab::GlRepository::PROJECT, nil])
+ expect(described_class.parse('/' + project.full_path + '.git')).to eq([project, project, Gitlab::GlRepository::PROJECT, nil])
end
context 'of a redirected project' do
- let(:redirect) { project.route.create_redirect('foo/bar') }
-
it 'parses a relative repository path' do
- expect(described_class.parse(redirect.path + '.git')).to eq([project, Gitlab::GlRepository::PROJECT, 'foo/bar'])
+ expect(described_class.parse(redirect.path + '.git')).to eq([project, project, Gitlab::GlRepository::PROJECT, redirect_route])
end
it 'parses a relative wiki path' do
- expect(described_class.parse(redirect.path + '.wiki.git')).to eq([project, Gitlab::GlRepository::WIKI, 'foo/bar.wiki'])
+ expect(described_class.parse(redirect.path + '.wiki.git')).to eq([project, project, Gitlab::GlRepository::WIKI, redirect_route])
end
it 'parses a relative path starting with /' do
- expect(described_class.parse('/' + redirect.path + '.git')).to eq([project, Gitlab::GlRepository::PROJECT, 'foo/bar'])
+ expect(described_class.parse('/' + redirect.path + '.git')).to eq([project, project, Gitlab::GlRepository::PROJECT, redirect_route])
+ end
+
+ it 'parses a redirected project snippet repository path' do
+ expect(described_class.parse(redirect.path + "/snippets/#{project_snippet.id}.git")).to eq([project_snippet, project, Gitlab::GlRepository::SNIPPET, redirect_route])
end
end
end
- it "returns the default type for non existent paths" do
- _project, type, _redirected = described_class.parse("path/non-existent.git")
-
- expect(type).to eq(Gitlab::GlRepository.default_type)
+ it 'returns the default type for non existent paths' do
+ expect(described_class.parse('path/non-existent.git')).to eq([nil, nil, Gitlab::GlRepository.default_type, nil])
end
end
describe '.find_project' do
- let(:project) { create(:project) }
- let(:redirect) { project.route.create_redirect('foo/bar/baz') }
-
context 'when finding a project by its canonical path' do
context 'when the cases match' do
- it 'returns the project and false' do
- expect(described_class.find_project(project.full_path)).to eq([project, false])
+ it 'returns the project and nil' do
+ expect(described_class.find_project(project.full_path)).to eq([project, nil])
end
end
@@ -69,15 +80,45 @@ describe ::Gitlab::RepoPath do
# easy and safe to redirect someone to the correctly-cased URL. For git
# requests, we should accept wrongly-cased URLs because it is a pain to
# block people's git operations and force them to update remote URLs.
- it 'returns the project and false' do
- expect(described_class.find_project(project.full_path.upcase)).to eq([project, false])
+ it 'returns the project and nil' do
+ expect(described_class.find_project(project.full_path.upcase)).to eq([project, nil])
end
end
end
context 'when finding a project via a redirect' do
+ it 'returns the project and nil' do
+ expect(described_class.find_project(redirect.path)).to eq([project, redirect.path])
+ end
+ end
+ end
+
+ describe '.find_snippet' do
+ it 'extracts path and id from personal snippet route' do
+ expect(described_class.find_snippet("snippets/#{personal_snippet.id}")).to eq([personal_snippet, nil])
+ end
+
+ it 'extracts path and id from project snippet route' do
+ expect(described_class.find_snippet("#{project.full_path}/snippets/#{project_snippet.id}")).to eq([project_snippet, nil])
+ end
+
+ it 'returns nil for invalid snippet paths' do
+ aggregate_failures do
+ expect(described_class.find_snippet("snippets/#{project_snippet.id}")).to eq([nil, nil])
+ expect(described_class.find_snippet("#{project.full_path}/snippets/#{personal_snippet.id}")).to eq([nil, nil])
+ expect(described_class.find_snippet('')).to eq([nil, nil])
+ end
+ end
+
+ it 'returns nil for snippets not associated with the project' do
+ snippet = create(:project_snippet)
+
+ expect(described_class.find_snippet("#{project.full_path}/snippets/#{snippet.id}")).to eq([nil, nil])
+ end
+
+ context 'when finding a project snippet via a redirect' do
it 'returns the project and true' do
- expect(described_class.find_project(redirect.path)).to eq([project, true])
+ expect(described_class.find_snippet("#{redirect.path}/snippets/#{project_snippet.id}")).to eq([project_snippet, redirect.path])
end
end
end
diff --git a/spec/lib/gitlab/repository_cache_adapter_spec.rb b/spec/lib/gitlab/repository_cache_adapter_spec.rb
index b4fc504ea60..dba5ffc84c5 100644
--- a/spec/lib/gitlab/repository_cache_adapter_spec.rb
+++ b/spec/lib/gitlab/repository_cache_adapter_spec.rb
@@ -211,8 +211,7 @@ describe Gitlab::RepositoryCacheAdapter do
it 'expires the caches of the given methods' do
expect(cache).to receive(:expire).with(:rendered_readme)
expect(cache).to receive(:expire).with(:branch_names)
- expect(redis_set_cache).to receive(:expire).with(:rendered_readme)
- expect(redis_set_cache).to receive(:expire).with(:branch_names)
+ expect(redis_set_cache).to receive(:expire).with(:rendered_readme, :branch_names)
expect(redis_hash_cache).to receive(:delete).with(:rendered_readme)
expect(redis_hash_cache).to receive(:delete).with(:branch_names)
diff --git a/spec/lib/gitlab/repository_cache_spec.rb b/spec/lib/gitlab/repository_cache_spec.rb
index e787288fc51..be31be761ad 100644
--- a/spec/lib/gitlab/repository_cache_spec.rb
+++ b/spec/lib/gitlab/repository_cache_spec.rb
@@ -12,19 +12,44 @@ describe Gitlab::RepositoryCache do
describe '#cache_key' do
subject { cache.cache_key(:foo) }
- it 'includes the namespace' do
- expect(subject).to eq "foo:#{namespace}"
+ shared_examples 'cache_key examples' do
+ it 'includes the namespace' do
+ expect(subject).to eq "foo:#{namespace}"
+ end
+
+ context 'with a given namespace' do
+ let(:extra_namespace) { 'my:data' }
+ let(:cache) do
+ described_class.new(repository, extra_namespace: extra_namespace,
+ backend: backend)
+ end
+
+ it 'includes the full namespace' do
+ expect(subject).to eq "foo:#{namespace}:#{extra_namespace}"
+ end
+ end
end
- context 'with a given namespace' do
- let(:extra_namespace) { 'my:data' }
- let(:cache) do
- described_class.new(repository, extra_namespace: extra_namespace,
- backend: backend)
+ describe 'project repository' do
+ it_behaves_like 'cache_key examples' do
+ let(:repository) { project.repository }
end
+ end
+
+ describe 'personal snippet repository' do
+ let_it_be(:personal_snippet) { create(:personal_snippet) }
+ let(:namespace) { repository.full_path }
+
+ it_behaves_like 'cache_key examples' do
+ let(:repository) { personal_snippet.repository }
+ end
+ end
+
+ describe 'project snippet repository' do
+ let_it_be(:project_snippet) { create(:project_snippet, project: project) }
- it 'includes the full namespace' do
- expect(subject).to eq "foo:#{namespace}:#{extra_namespace}"
+ it_behaves_like 'cache_key examples' do
+ let(:repository) { project_snippet.repository }
end
end
end
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index de0f3602346..6221d6fb45f 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -11,27 +11,98 @@ describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
describe '#cache_key' do
subject { cache.cache_key(:foo) }
- it 'includes the namespace' do
- is_expected.to eq("foo:#{namespace}:set")
+ shared_examples 'cache_key examples' do
+ it 'includes the namespace' do
+ is_expected.to eq("foo:#{namespace}:set")
+ end
+
+ context 'with a given namespace' do
+ let(:extra_namespace) { 'my:data' }
+ let(:cache) { described_class.new(repository, extra_namespace: extra_namespace) }
+
+ it 'includes the full namespace' do
+ is_expected.to eq("foo:#{namespace}:#{extra_namespace}:set")
+ end
+ end
end
- context 'with a given namespace' do
- let(:extra_namespace) { 'my:data' }
- let(:cache) { described_class.new(repository, extra_namespace: extra_namespace) }
+ describe 'project repository' do
+ it_behaves_like 'cache_key examples' do
+ let(:repository) { project.repository }
+ end
+ end
+
+ describe 'personal snippet repository' do
+ let_it_be(:personal_snippet) { create(:personal_snippet) }
+ let(:namespace) { repository.full_path }
- it 'includes the full namespace' do
- is_expected.to eq("foo:#{namespace}:#{extra_namespace}:set")
+ it_behaves_like 'cache_key examples' do
+ let(:repository) { personal_snippet.repository }
+ end
+ end
+
+ describe 'project snippet repository' do
+ let_it_be(:project_snippet) { create(:project_snippet, project: project) }
+
+ it_behaves_like 'cache_key examples' do
+ let(:repository) { project_snippet.repository }
end
end
end
describe '#expire' do
- it 'expires the given key from the cache' do
+ subject { cache.expire(*keys) }
+
+ before do
cache.write(:foo, ['value'])
+ cache.write(:bar, ['value2'])
+ end
+ it 'actually wrote the values' do
expect(cache.read(:foo)).to contain_exactly('value')
- expect(cache.expire(:foo)).to eq(1)
- expect(cache.read(:foo)).to be_empty
+ expect(cache.read(:bar)).to contain_exactly('value2')
+ end
+
+ context 'single key' do
+ let(:keys) { %w(foo) }
+
+ it { is_expected.to eq(1) }
+
+ it 'deletes the given key from the cache' do
+ subject
+
+ expect(cache.read(:foo)).to be_empty
+ end
+ end
+
+ context 'multiple keys' do
+ let(:keys) { %w(foo bar) }
+
+ it { is_expected.to eq(2) }
+
+ it 'deletes the given keys from the cache' do
+ subject
+
+ expect(cache.read(:foo)).to be_empty
+ expect(cache.read(:bar)).to be_empty
+ end
+ end
+
+ context 'no keys' do
+ let(:keys) { [] }
+
+ it { is_expected.to eq(0) }
+ end
+
+ context "unlink isn't supported" do
+ before do
+ allow_any_instance_of(Redis).to receive(:unlink) { raise ::Redis::CommandError }
+ end
+
+ it 'still deletes the given key' do
+ expect(cache.expire(:foo)).to eq(1)
+ expect(cache.read(:foo)).to be_empty
+ end
end
end
diff --git a/spec/lib/gitlab/sanitizers/exif_spec.rb b/spec/lib/gitlab/sanitizers/exif_spec.rb
index f0b733817b3..58fba673f8e 100644
--- a/spec/lib/gitlab/sanitizers/exif_spec.rb
+++ b/spec/lib/gitlab/sanitizers/exif_spec.rb
@@ -7,9 +7,9 @@ describe Gitlab::Sanitizers::Exif do
describe '#batch_clean' do
context 'with image uploads' do
- set(:upload1) { create(:upload, :with_file, :issuable_upload) }
- set(:upload2) { create(:upload, :with_file, :personal_snippet_upload) }
- set(:upload3) { create(:upload, :with_file, created_at: 3.days.ago) }
+ let_it_be(:upload1) { create(:upload, :with_file, :issuable_upload) }
+ let_it_be(:upload2) { create(:upload, :with_file, :personal_snippet_upload) }
+ let_it_be(:upload3) { create(:upload, :with_file, created_at: 3.days.ago) }
it 'processes all uploads if range ID is not set' do
expect(sanitizer).to receive(:clean).exactly(3).times
diff --git a/spec/lib/gitlab/serverless/domain_spec.rb b/spec/lib/gitlab/serverless/domain_spec.rb
deleted file mode 100644
index ae5551977d4..00000000000
--- a/spec/lib/gitlab/serverless/domain_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::Serverless::Domain do
- describe '.generate_uuid' do
- it 'has 14 characters' do
- expect(described_class.generate_uuid.length).to eq(described_class::UUID_LENGTH)
- end
-
- it 'consists of only hexadecimal characters' do
- expect(described_class.generate_uuid).to match(/\A\h+\z/)
- end
-
- it 'uses random characters' do
- uuid = 'abcd1234567890'
-
- expect(SecureRandom).to receive(:hex).with(described_class::UUID_LENGTH / 2).and_return(uuid)
- expect(described_class.generate_uuid).to eq(uuid)
- end
- end
-end
diff --git a/spec/lib/gitlab/serverless/function_uri_spec.rb b/spec/lib/gitlab/serverless/function_uri_spec.rb
deleted file mode 100644
index cd4abeb89f5..00000000000
--- a/spec/lib/gitlab/serverless/function_uri_spec.rb
+++ /dev/null
@@ -1,81 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::Serverless::FunctionURI do
- let(:function) { 'test-function' }
- let(:domain) { 'serverless.gitlab.io' }
- let(:pages_domain) { create(:pages_domain, :instance_serverless, domain: domain) }
- let!(:cluster) { create(:serverless_domain_cluster, uuid: 'abcdef12345678', pages_domain: pages_domain) }
- let(:valid_cluster) { 'aba1cdef123456f278' }
- let(:invalid_cluster) { 'aba1cdef123456f178' }
- let!(:environment) { create(:environment, name: 'test') }
-
- let(:valid_uri) { "https://#{function}-#{valid_cluster}#{"%x" % environment.id}-#{environment.slug}.#{domain}" }
- let(:valid_fqdn) { "#{function}-#{valid_cluster}#{"%x" % environment.id}-#{environment.slug}.#{domain}" }
- let(:invalid_uri) { "https://#{function}-#{invalid_cluster}#{"%x" % environment.id}-#{environment.slug}.#{domain}" }
-
- shared_examples 'a valid FunctionURI class' do
- describe '#to_s' do
- it 'matches valid URI' do
- expect(subject.to_s).to eq valid_uri
- end
- end
-
- describe '#function' do
- it 'returns function' do
- expect(subject.function).to eq function
- end
- end
-
- describe '#cluster' do
- it 'returns cluster' do
- expect(subject.cluster).to eq cluster
- end
- end
-
- describe '#environment' do
- it 'returns environment' do
- expect(subject.environment).to eq environment
- end
- end
- end
-
- describe '.new' do
- context 'with valid arguments' do
- subject { described_class.new(function: function, cluster: cluster, environment: environment) }
-
- it_behaves_like 'a valid FunctionURI class'
- end
-
- context 'with invalid arguments' do
- subject { described_class.new(function: function, environment: environment) }
-
- it 'raises an exception' do
- expect { subject }.to raise_error(ArgumentError)
- end
- end
- end
-
- describe '.parse' do
- context 'with valid URI' do
- subject { described_class.parse(valid_uri) }
-
- it_behaves_like 'a valid FunctionURI class'
- end
-
- context 'with valid FQDN' do
- subject { described_class.parse(valid_fqdn) }
-
- it_behaves_like 'a valid FunctionURI class'
- end
-
- context 'with invalid URI' do
- subject { described_class.parse(invalid_uri) }
-
- it 'returns nil' do
- expect(subject).to be_nil
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/serverless/service_spec.rb b/spec/lib/gitlab/serverless/service_spec.rb
index f618dd02cdb..6db8b9cd0ba 100644
--- a/spec/lib/gitlab/serverless/service_spec.rb
+++ b/spec/lib/gitlab/serverless/service_spec.rb
@@ -94,17 +94,19 @@ describe Gitlab::Serverless::Service do
end
describe '#url' do
+ let(:serverless_domain) { instance_double(::Serverless::Domain, uri: URI('https://proxy.example.com')) }
+
it 'returns proxy URL if cluster has serverless domain' do
# cluster = create(:cluster)
knative = create(:clusters_applications_knative, :installed, cluster: cluster)
create(:serverless_domain_cluster, clusters_applications_knative_id: knative.id)
service = Gitlab::Serverless::Service.new(attributes.merge('cluster' => cluster))
- expect(Gitlab::Serverless::FunctionURI).to receive(:new).with(
- function: service.name,
- cluster: service.cluster.serverless_domain,
+ expect(::Serverless::Domain).to receive(:new).with(
+ function_name: service.name,
+ serverless_domain_cluster: service.cluster.serverless_domain,
environment: service.environment
- ).and_return('https://proxy.example.com')
+ ).and_return(serverless_domain)
expect(service.url).to eq('https://proxy.example.com')
end
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index 7b8d1b6cd9b..e4c33863ac2 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -4,25 +4,19 @@ require 'spec_helper'
require 'stringio'
describe Gitlab::Shell do
- set(:project) { create(:project, :repository) }
-
+ let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:gitlab_shell) { described_class.new }
- let(:popen_vars) { { 'GIT_TERMINAL_PROMPT' => ENV['GIT_TERMINAL_PROMPT'] } }
- let(:timeout) { Gitlab.config.gitlab_shell.git_timeout }
- let(:gitlab_authorized_keys) { double }
-
- before do
- allow(Project).to receive(:find).and_return(project)
- end
- it { is_expected.to respond_to :add_key }
- it { is_expected.to respond_to :remove_key }
- it { is_expected.to respond_to :create_repository }
it { is_expected.to respond_to :remove_repository }
- it { is_expected.to respond_to :fork_repository }
- it { expect(gitlab_shell.url_to_repo('diaspora')).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "diaspora.git") }
+ describe '.url_to_repo' do
+ let(:full_path) { 'diaspora/disaspora-rails' }
+
+ subject { described_class.url_to_repo(full_path) }
+
+ it { is_expected.to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + full_path + '.git') }
+ end
describe 'memoized secret_token' do
let(:secret_file) { 'tmp/tests/.secret_shell_test' }
@@ -50,261 +44,15 @@ describe Gitlab::Shell do
end
end
- describe '#add_key' do
- context 'when authorized_keys_enabled is true' do
- it 'calls Gitlab::AuthorizedKeys#add_key with id and key' do
- expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
-
- expect(gitlab_authorized_keys)
- .to receive(:add_key)
- .with('key-123', 'ssh-rsa foobar')
-
- gitlab_shell.add_key('key-123', 'ssh-rsa foobar')
- end
- end
-
- context 'when authorized_keys_enabled is false' do
- before do
- stub_application_setting(authorized_keys_enabled: false)
- end
-
- it 'does nothing' do
- expect(Gitlab::AuthorizedKeys).not_to receive(:new)
-
- gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
- end
- end
-
- context 'when authorized_keys_enabled is nil' do
- before do
- stub_application_setting(authorized_keys_enabled: nil)
- end
-
- it 'calls Gitlab::AuthorizedKeys#add_key with id and key' do
- expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
-
- expect(gitlab_authorized_keys)
- .to receive(:add_key)
- .with('key-123', 'ssh-rsa foobar')
-
- gitlab_shell.add_key('key-123', 'ssh-rsa foobar')
- end
- end
- end
-
- describe '#batch_add_keys' do
- let(:keys) { [double(shell_id: 'key-123', key: 'ssh-rsa foobar')] }
-
- context 'when authorized_keys_enabled is true' do
- it 'calls Gitlab::AuthorizedKeys#batch_add_keys with keys to be added' do
- expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
-
- expect(gitlab_authorized_keys)
- .to receive(:batch_add_keys)
- .with(keys)
-
- gitlab_shell.batch_add_keys(keys)
- end
- end
-
- context 'when authorized_keys_enabled is false' do
- before do
- stub_application_setting(authorized_keys_enabled: false)
- end
-
- it 'does nothing' do
- expect(Gitlab::AuthorizedKeys).not_to receive(:new)
-
- gitlab_shell.batch_add_keys(keys)
- end
- end
-
- context 'when authorized_keys_enabled is nil' do
- before do
- stub_application_setting(authorized_keys_enabled: nil)
- end
-
- it 'calls Gitlab::AuthorizedKeys#batch_add_keys with keys to be added' do
- expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
-
- expect(gitlab_authorized_keys)
- .to receive(:batch_add_keys)
- .with(keys)
-
- gitlab_shell.batch_add_keys(keys)
- end
- end
- end
-
- describe '#remove_key' do
- context 'when authorized_keys_enabled is true' do
- it 'calls Gitlab::AuthorizedKeys#rm_key with the key to be removed' do
- expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
- expect(gitlab_authorized_keys).to receive(:rm_key).with('key-123')
-
- gitlab_shell.remove_key('key-123')
- end
- end
-
- context 'when authorized_keys_enabled is false' do
- before do
- stub_application_setting(authorized_keys_enabled: false)
- end
-
- it 'does nothing' do
- expect(Gitlab::AuthorizedKeys).not_to receive(:new)
-
- gitlab_shell.remove_key('key-123')
- end
- end
-
- context 'when authorized_keys_enabled is nil' do
- before do
- stub_application_setting(authorized_keys_enabled: nil)
- end
-
- it 'calls Gitlab::AuthorizedKeys#rm_key with the key to be removed' do
- expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
- expect(gitlab_authorized_keys).to receive(:rm_key).with('key-123')
-
- gitlab_shell.remove_key('key-123')
- end
- end
- end
-
- describe '#remove_all_keys' do
- context 'when authorized_keys_enabled is true' do
- it 'calls Gitlab::AuthorizedKeys#clear' do
- expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
- expect(gitlab_authorized_keys).to receive(:clear)
-
- gitlab_shell.remove_all_keys
- end
- end
-
- context 'when authorized_keys_enabled is false' do
- before do
- stub_application_setting(authorized_keys_enabled: false)
- end
-
- it 'does nothing' do
- expect(Gitlab::AuthorizedKeys).not_to receive(:new)
-
- gitlab_shell.remove_all_keys
- end
- end
-
- context 'when authorized_keys_enabled is nil' do
- before do
- stub_application_setting(authorized_keys_enabled: nil)
- end
-
- it 'calls Gitlab::AuthorizedKeys#clear' do
- expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
- expect(gitlab_authorized_keys).to receive(:clear)
-
- gitlab_shell.remove_all_keys
- end
- end
- end
-
- describe '#remove_keys_not_found_in_db' do
- context 'when keys are in the file that are not in the DB' do
- before do
- gitlab_shell.remove_all_keys
- gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
- gitlab_shell.add_key('key-9876', 'ssh-rsa ASDFASDF')
- @another_key = create(:key) # this one IS in the DB
- end
-
- it 'removes the keys' do
- expect(gitlab_shell).to receive(:remove_key).with('key-1234')
- expect(gitlab_shell).to receive(:remove_key).with('key-9876')
- expect(gitlab_shell).not_to receive(:remove_key).with("key-#{@another_key.id}")
-
- gitlab_shell.remove_keys_not_found_in_db
- end
- end
-
- context 'when keys there are duplicate keys in the file that are not in the DB' do
- before do
- gitlab_shell.remove_all_keys
- gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
- gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
- end
-
- it 'removes the keys' do
- expect(gitlab_shell).to receive(:remove_key).with('key-1234')
-
- gitlab_shell.remove_keys_not_found_in_db
- end
- end
-
- context 'when keys there are duplicate keys in the file that ARE in the DB' do
- before do
- gitlab_shell.remove_all_keys
- @key = create(:key)
- gitlab_shell.add_key(@key.shell_id, @key.key)
- end
-
- it 'does not remove the key' do
- expect(gitlab_shell).not_to receive(:remove_key).with("key-#{@key.id}")
-
- gitlab_shell.remove_keys_not_found_in_db
- end
- end
-
- unless ENV['CI'] # Skip in CI, it takes 1 minute
- context 'when the first batch can be skipped, but the next batch has keys that are not in the DB' do
- before do
- gitlab_shell.remove_all_keys
- 100.times { |i| create(:key) } # first batch is all in the DB
- gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
- end
-
- it 'removes the keys not in the DB' do
- expect(gitlab_shell).to receive(:remove_key).with('key-1234')
-
- gitlab_shell.remove_keys_not_found_in_db
- end
- end
- end
- end
-
describe 'projects commands' do
let(:gitlab_shell_path) { File.expand_path('tmp/tests/gitlab-shell') }
let(:projects_path) { File.join(gitlab_shell_path, 'bin/gitlab-projects') }
- let(:gitlab_shell_hooks_path) { File.join(gitlab_shell_path, 'hooks') }
before do
allow(Gitlab.config.gitlab_shell).to receive(:path).and_return(gitlab_shell_path)
allow(Gitlab.config.gitlab_shell).to receive(:git_timeout).and_return(800)
end
- describe '#create_repository' do
- let(:repository_storage) { 'default' }
- let(:repository_storage_path) do
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- Gitlab.config.repositories.storages[repository_storage].legacy_disk_path
- end
- end
- let(:repo_name) { 'project/path' }
- let(:created_path) { File.join(repository_storage_path, repo_name + '.git') }
-
- after do
- FileUtils.rm_rf(created_path)
- end
-
- it 'returns false when the command fails' do
- FileUtils.mkdir_p(File.dirname(created_path))
- # This file will block the creation of the repo's .git directory. That
- # should cause #create_repository to fail.
- FileUtils.touch(created_path)
-
- expect(gitlab_shell.create_repository(repository_storage, repo_name, repo_name)).to be_falsy
- end
- end
-
describe '#remove_repository' do
let!(:project) { create(:project, :repository, :legacy_storage) }
let(:disk_path) { "#{project.disk_path}.git" }
@@ -346,52 +94,6 @@ describe Gitlab::Shell do
expect(TestEnv.storage_dir_exists?(project2.repository_storage, "#{project2.disk_path}.git")).to be(true)
end
end
-
- describe '#fork_repository' do
- let(:target_project) { create(:project) }
-
- subject do
- gitlab_shell.fork_repository(project, target_project)
- end
-
- it 'returns true when the command succeeds' do
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:fork_repository)
- .with(repository.raw_repository) { :gitaly_response_object }
-
- is_expected.to be_truthy
- end
-
- it 'return false when the command fails' do
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:fork_repository)
- .with(repository.raw_repository) { raise GRPC::BadStatus, 'bla' }
-
- is_expected.to be_falsy
- end
- end
-
- describe '#import_repository' do
- let(:import_url) { 'https://gitlab.com/gitlab-org/gitlab-foss.git' }
-
- context 'with gitaly' do
- it 'returns true when the command succeeds' do
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:import_repository).with(import_url)
-
- result = gitlab_shell.import_repository(project.repository_storage, project.disk_path, import_url, project.full_path)
-
- expect(result).to be_truthy
- end
-
- it 'raises an exception when the command fails' do
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:import_repository)
- .with(import_url) { raise GRPC::BadStatus, 'bla' }
- expect_any_instance_of(Gitlab::Shell::GitalyGitlabProjects).to receive(:output) { 'error'}
-
- expect do
- gitlab_shell.import_repository(project.repository_storage, project.disk_path, import_url, project.full_path)
- end.to raise_error(Gitlab::Shell::Error, "error")
- end
- end
- end
end
describe 'namespace actions' do
diff --git a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
new file mode 100644
index 00000000000..5bda8ff8c72
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
@@ -0,0 +1,282 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+describe Gitlab::SidekiqCluster::CLI do
+ let(:cli) { described_class.new('/dev/null') }
+ let(:default_options) do
+ { env: 'test', directory: Dir.pwd, max_concurrency: 50, min_concurrency: 0, dryrun: false }
+ end
+
+ before do
+ stub_env('RAILS_ENV', 'test')
+ end
+
+ describe '#run' do
+ context 'without any arguments' do
+ it 'raises CommandError' do
+ expect { cli.run([]) }.to raise_error(described_class::CommandError)
+ end
+ end
+
+ context 'with arguments' do
+ before do
+ allow(cli).to receive(:write_pid)
+ allow(cli).to receive(:trap_signals)
+ allow(cli).to receive(:start_loop)
+ end
+
+ it 'starts the Sidekiq workers' do
+ expect(Gitlab::SidekiqCluster).to receive(:start)
+ .with([['foo']], default_options)
+ .and_return([])
+
+ cli.run(%w(foo))
+ end
+
+ it 'allows the special * selector' do
+ worker_queues = %w(foo bar baz)
+
+ expect(Gitlab::SidekiqConfig::CliMethods)
+ .to receive(:worker_queues).and_return(worker_queues)
+
+ expect(Gitlab::SidekiqCluster)
+ .to receive(:start).with([worker_queues], default_options)
+
+ cli.run(%w(*))
+ end
+
+ context 'with --negate flag' do
+ it 'starts Sidekiq workers for all queues in all_queues.yml except the ones in argv' do
+ expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['baz'])
+ expect(Gitlab::SidekiqCluster).to receive(:start)
+ .with([['baz']], default_options)
+ .and_return([])
+
+ cli.run(%w(foo -n))
+ end
+ end
+
+ context 'with --max-concurrency flag' do
+ it 'starts Sidekiq workers for specified queues with a max concurrency' do
+ expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz))
+ expect(Gitlab::SidekiqCluster).to receive(:start)
+ .with([%w(foo bar baz), %w(solo)], default_options.merge(max_concurrency: 2))
+ .and_return([])
+
+ cli.run(%w(foo,bar,baz solo -m 2))
+ end
+ end
+
+ context 'with --min-concurrency flag' do
+ it 'starts Sidekiq workers for specified queues with a min concurrency' do
+ expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz))
+ expect(Gitlab::SidekiqCluster).to receive(:start)
+ .with([%w(foo bar baz), %w(solo)], default_options.merge(min_concurrency: 2))
+ .and_return([])
+
+ cli.run(%w(foo,bar,baz solo --min-concurrency 2))
+ end
+ end
+
+ context 'queue namespace expansion' do
+ it 'starts Sidekiq workers for all queues in all_queues.yml with a namespace in argv' do
+ expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['cronjob:foo', 'cronjob:bar'])
+ expect(Gitlab::SidekiqCluster).to receive(:start)
+ .with([['cronjob', 'cronjob:foo', 'cronjob:bar']], default_options)
+ .and_return([])
+
+ cli.run(%w(cronjob))
+ end
+ end
+
+ context 'with --experimental-queue-selector' do
+ where do
+ {
+ 'memory-bound queues' => {
+ query: 'resource_boundary=memory',
+ included_queues: %w(project_export),
+ excluded_queues: %w(merge)
+ },
+ 'memory- or CPU-bound queues' => {
+ query: 'resource_boundary=memory,cpu',
+ included_queues: %w(auto_merge:auto_merge_process project_export),
+ excluded_queues: %w(merge)
+ },
+ 'high urgency CI queues' => {
+ query: 'feature_category=continuous_integration&urgency=high',
+ included_queues: %w(pipeline_cache:expire_job_cache pipeline_cache:expire_pipeline_cache),
+ excluded_queues: %w(merge)
+ },
+ 'CPU-bound high urgency CI queues' => {
+ query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu',
+ included_queues: %w(pipeline_cache:expire_pipeline_cache),
+ excluded_queues: %w(pipeline_cache:expire_job_cache merge)
+ },
+ 'CPU-bound high urgency non-CI queues' => {
+ query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu',
+ included_queues: %w(new_issue),
+ excluded_queues: %w(pipeline_cache:expire_pipeline_cache)
+ },
+ 'CI and SCM queues' => {
+ query: 'feature_category=continuous_integration|feature_category=source_code_management',
+ included_queues: %w(pipeline_cache:expire_job_cache merge),
+ excluded_queues: %w(mailers)
+ }
+ }
+ end
+
+ with_them do
+ it 'expands queues by attributes' do
+ expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
+ expect(opts).to eq(default_options)
+ expect(queues.first).to include(*included_queues)
+ expect(queues.first).not_to include(*excluded_queues)
+
+ []
+ end
+
+ cli.run(%W(--experimental-queue-selector #{query}))
+ end
+
+ it 'works when negated' do
+ expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
+ expect(opts).to eq(default_options)
+ expect(queues.first).not_to include(*included_queues)
+ expect(queues.first).to include(*excluded_queues)
+
+ []
+ end
+
+ cli.run(%W(--negate --experimental-queue-selector #{query}))
+ end
+ end
+
+ it 'expands multiple queue groups correctly' do
+ expect(Gitlab::SidekiqCluster)
+ .to receive(:start)
+ .with([['chat_notification'], ['project_export']], default_options)
+ .and_return([])
+
+ cli.run(%w(--experimental-queue-selector feature_category=chatops&has_external_dependencies=true resource_boundary=memory&feature_category=importers))
+ end
+
+ it 'allows the special * selector' do
+ worker_queues = %w(foo bar baz)
+
+ expect(Gitlab::SidekiqConfig::CliMethods)
+ .to receive(:worker_queues).and_return(worker_queues)
+
+ expect(Gitlab::SidekiqCluster)
+ .to receive(:start).with([worker_queues], default_options)
+
+ cli.run(%w(--experimental-queue-selector *))
+ end
+
+ it 'errors when the selector matches no queues' do
+ expect(Gitlab::SidekiqCluster).not_to receive(:start)
+
+ expect { cli.run(%w(--experimental-queue-selector has_external_dependencies=true&has_external_dependencies=false)) }
+ .to raise_error(described_class::CommandError)
+ end
+
+ it 'errors on an invalid query multiple queue groups correctly' do
+ expect(Gitlab::SidekiqCluster).not_to receive(:start)
+
+ expect { cli.run(%w(--experimental-queue-selector unknown_field=chatops)) }
+ .to raise_error(Gitlab::SidekiqConfig::CliMethods::QueryError)
+ end
+ end
+ end
+ end
+
+ describe '#write_pid' do
+ context 'when a PID is specified' do
+ it 'writes the PID to a file' do
+ expect(Gitlab::SidekiqCluster).to receive(:write_pid).with('/dev/null')
+
+ cli.option_parser.parse!(%w(-P /dev/null))
+ cli.write_pid
+ end
+ end
+
+ context 'when no PID is specified' do
+ it 'does not write a PID' do
+ expect(Gitlab::SidekiqCluster).not_to receive(:write_pid)
+
+ cli.write_pid
+ end
+ end
+ end
+
+ describe '#wait_for_termination' do
+ it 'waits for termination of all sub-processes and succeeds after 3 checks' do
+ expect(Gitlab::SidekiqCluster).to receive(:any_alive?)
+ .with(an_instance_of(Array)).and_return(true, true, true, false)
+
+ expect(Gitlab::SidekiqCluster).to receive(:pids_alive)
+ .with([]).and_return([])
+
+ expect(Gitlab::SidekiqCluster).to receive(:signal_processes)
+ .with([], :KILL)
+
+ stub_const("Gitlab::SidekiqCluster::CLI::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1)
+ stub_const("Gitlab::SidekiqCluster::CLI::TERMINATE_TIMEOUT_SECONDS", 1)
+ cli.wait_for_termination
+ end
+
+ context 'with hanging workers' do
+ before do
+ expect(cli).to receive(:write_pid)
+ expect(cli).to receive(:trap_signals)
+ expect(cli).to receive(:start_loop)
+ end
+
+ it 'hard kills workers after timeout expires' do
+ worker_pids = [101, 102, 103]
+ expect(Gitlab::SidekiqCluster).to receive(:start)
+ .with([['foo']], default_options)
+ .and_return(worker_pids)
+
+ expect(Gitlab::SidekiqCluster).to receive(:any_alive?)
+ .with(worker_pids).and_return(true).at_least(10).times
+
+ expect(Gitlab::SidekiqCluster).to receive(:pids_alive)
+ .with(worker_pids).and_return([102])
+
+ expect(Gitlab::SidekiqCluster).to receive(:signal_processes)
+ .with([102], :KILL)
+
+ cli.run(%w(foo))
+
+ stub_const("Gitlab::SidekiqCluster::CLI::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1)
+ stub_const("Gitlab::SidekiqCluster::CLI::TERMINATE_TIMEOUT_SECONDS", 1)
+ cli.wait_for_termination
+ end
+ end
+ end
+
+ describe '#trap_signals' do
+ it 'traps the termination and forwarding signals' do
+ expect(Gitlab::SidekiqCluster).to receive(:trap_terminate)
+ expect(Gitlab::SidekiqCluster).to receive(:trap_forward)
+
+ cli.trap_signals
+ end
+ end
+
+ describe '#start_loop' do
+ it 'runs until one of the processes has been terminated' do
+ allow(cli).to receive(:sleep).with(a_kind_of(Numeric))
+
+ expect(Gitlab::SidekiqCluster).to receive(:all_alive?)
+ .with(an_instance_of(Array)).and_return(false)
+
+ expect(Gitlab::SidekiqCluster).to receive(:signal_processes)
+ .with(an_instance_of(Array), :TERM)
+
+ cli.start_loop
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_cluster_spec.rb b/spec/lib/gitlab/sidekiq_cluster_spec.rb
new file mode 100644
index 00000000000..fa5de04f2f3
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_cluster_spec.rb
@@ -0,0 +1,196 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+describe Gitlab::SidekiqCluster do
+ describe '.trap_signals' do
+ it 'traps the given signals' do
+ expect(described_class).to receive(:trap).ordered.with(:INT)
+ expect(described_class).to receive(:trap).ordered.with(:HUP)
+
+ described_class.trap_signals(%i(INT HUP))
+ end
+ end
+
+ describe '.trap_terminate' do
+ it 'traps the termination signals' do
+ expect(described_class).to receive(:trap_signals)
+ .with(described_class::TERMINATE_SIGNALS)
+
+ described_class.trap_terminate { }
+ end
+ end
+
+ describe '.trap_forward' do
+ it 'traps the signals to forward' do
+ expect(described_class).to receive(:trap_signals)
+ .with(described_class::FORWARD_SIGNALS)
+
+ described_class.trap_forward { }
+ end
+ end
+
+ describe '.signal' do
+ it 'sends a signal to the given process' do
+ allow(Process).to receive(:kill).with(:INT, 4)
+ expect(described_class.signal(4, :INT)).to eq(true)
+ end
+
+ it 'returns false when the process does not exist' do
+ allow(Process).to receive(:kill).with(:INT, 4).and_raise(Errno::ESRCH)
+ expect(described_class.signal(4, :INT)).to eq(false)
+ end
+ end
+
+ describe '.signal_processes' do
+ it 'sends a signal to every thread' do
+ expect(described_class).to receive(:signal).with(1, :INT)
+
+ described_class.signal_processes([1], :INT)
+ end
+ end
+
+ describe '.start' do
+ it 'starts Sidekiq with the given queues, environment and options' do
+ expected_options = {
+ env: :production,
+ directory: 'foo/bar',
+ max_concurrency: 20,
+ min_concurrency: 10,
+ dryrun: true
+ }
+
+ expect(described_class).to receive(:start_sidekiq).ordered.with(%w(foo), expected_options.merge(worker_id: 0))
+ expect(described_class).to receive(:start_sidekiq).ordered.with(%w(bar baz), expected_options.merge(worker_id: 1))
+
+ described_class.start([%w(foo), %w(bar baz)], env: :production, directory: 'foo/bar', max_concurrency: 20, min_concurrency: 10, dryrun: true)
+ end
+
+ it 'starts Sidekiq with the given queues and sensible default options' do
+ expected_options = {
+ env: :development,
+ directory: an_instance_of(String),
+ max_concurrency: 50,
+ min_concurrency: 0,
+ worker_id: an_instance_of(Integer),
+ dryrun: false
+ }
+
+ expect(described_class).to receive(:start_sidekiq).ordered.with(%w(foo bar baz), expected_options)
+ expect(described_class).to receive(:start_sidekiq).ordered.with(%w(solo), expected_options)
+
+ described_class.start([%w(foo bar baz), %w(solo)])
+ end
+ end
+
+ describe '.start_sidekiq' do
+ let(:first_worker_id) { 0 }
+ let(:options) do
+ { env: :production, directory: 'foo/bar', max_concurrency: 20, min_concurrency: 0, worker_id: first_worker_id, dryrun: false }
+ end
+ let(:env) { { "ENABLE_SIDEKIQ_CLUSTER" => "1", "SIDEKIQ_WORKER_ID" => first_worker_id.to_s } }
+ let(:args) { ['bundle', 'exec', 'sidekiq', anything, '-eproduction', *([anything] * 5)] }
+
+ it 'starts a Sidekiq process' do
+ allow(Process).to receive(:spawn).and_return(1)
+
+ expect(described_class).to receive(:wait_async).with(1)
+ expect(described_class.start_sidekiq(%w(foo), options)).to eq(1)
+ end
+
+ it 'handles duplicate queue names' do
+ allow(Process)
+ .to receive(:spawn)
+ .with(env, *args, anything)
+ .and_return(1)
+
+ expect(described_class).to receive(:wait_async).with(1)
+ expect(described_class.start_sidekiq(%w(foo foo bar baz), options)).to eq(1)
+ end
+
+ it 'runs the sidekiq process in a new process group' do
+ expect(Process)
+ .to receive(:spawn)
+ .with(anything, *args, a_hash_including(pgroup: true))
+ .and_return(1)
+
+ allow(described_class).to receive(:wait_async)
+ expect(described_class.start_sidekiq(%w(foo bar baz), options)).to eq(1)
+ end
+ end
+
+ describe '.concurrency' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:queue_count, :min, :max, :expected) do
+ 2 | 0 | 0 | 3 # No min or max specified
+ 2 | 0 | 9 | 3 # No min specified, value < max
+ 2 | 1 | 4 | 3 # Value between min and max
+ 2 | 4 | 5 | 4 # Value below range
+ 5 | 2 | 3 | 3 # Value above range
+ 2 | 1 | 1 | 1 # Value above explicit setting (min == max)
+ 0 | 3 | 3 | 3 # Value below explicit setting (min == max)
+ 1 | 4 | 3 | 3 # Min greater than max
+ end
+
+ with_them do
+ let(:queues) { Array.new(queue_count) }
+
+ it { expect(described_class.concurrency(queues, min, max)).to eq(expected) }
+ end
+ end
+
+ describe '.wait_async' do
+ it 'waits for a process in a separate thread' do
+ thread = described_class.wait_async(Process.spawn('true'))
+
+ # Upon success Process.wait just returns the PID.
+ expect(thread.value).to be_a_kind_of(Numeric)
+ end
+ end
+
+ # In the X_alive? checks, we check negative PIDs sometimes as a simple way
+ # to be sure the pids are definitely for non-existent processes.
+ # Note that -1 is special, and sends the signal to every process we have permission
+ # for, so we use -2, -3 etc
+ describe '.all_alive?' do
+ it 'returns true if all processes are alive' do
+ processes = [Process.pid]
+
+ expect(described_class.all_alive?(processes)).to eq(true)
+ end
+
+ it 'returns false when a thread was not alive' do
+ processes = [-2]
+
+ expect(described_class.all_alive?(processes)).to eq(false)
+ end
+ end
+
+ describe '.any_alive?' do
+ it 'returns true if at least one process is alive' do
+ processes = [Process.pid, -2]
+
+ expect(described_class.any_alive?(processes)).to eq(true)
+ end
+
+ it 'returns false when all threads are dead' do
+ processes = [-2, -3]
+
+ expect(described_class.any_alive?(processes)).to eq(false)
+ end
+ end
+
+ describe '.write_pid' do
+ it 'writes the PID of the current process to the given file' do
+ handle = double(:handle)
+
+ allow(File).to receive(:open).with('/dev/null', 'w').and_yield(handle)
+
+ expect(handle).to receive(:write).with(Process.pid.to_s)
+
+ described_class.write_pid('/dev/null')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
index e6d0055df64..0aaff12f278 100644
--- a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
@@ -124,28 +124,28 @@ describe Gitlab::SidekiqConfig::CliMethods do
name: 'a',
feature_category: :category_a,
has_external_dependencies: false,
- latency_sensitive: false,
+ urgency: :low,
resource_boundary: :cpu
},
{
- name: 'a_2',
+ name: 'a:2',
feature_category: :category_a,
has_external_dependencies: false,
- latency_sensitive: true,
+ urgency: :high,
resource_boundary: :none
},
{
name: 'b',
feature_category: :category_b,
has_external_dependencies: true,
- latency_sensitive: true,
+ urgency: :high,
resource_boundary: :memory
},
{
name: 'c',
feature_category: :category_c,
has_external_dependencies: false,
- latency_sensitive: false,
+ urgency: :throttled,
resource_boundary: :memory
}
]
@@ -154,40 +154,40 @@ describe Gitlab::SidekiqConfig::CliMethods do
context 'with valid input' do
where(:query, :selected_queues) do
# feature_category
- 'feature_category=category_a' | %w(a a_2)
- 'feature_category=category_a,category_c' | %w(a a_2 c)
- 'feature_category=category_a|feature_category=category_c' | %w(a a_2 c)
+ 'feature_category=category_a' | %w(a a:2)
+ 'feature_category=category_a,category_c' | %w(a a:2 c)
+ 'feature_category=category_a|feature_category=category_c' | %w(a a:2 c)
'feature_category!=category_a' | %w(b c)
# has_external_dependencies
'has_external_dependencies=true' | %w(b)
- 'has_external_dependencies=false' | %w(a a_2 c)
- 'has_external_dependencies=true,false' | %w(a a_2 b c)
- 'has_external_dependencies=true|has_external_dependencies=false' | %w(a a_2 b c)
- 'has_external_dependencies!=true' | %w(a a_2 c)
-
- # latency_sensitive
- 'latency_sensitive=true' | %w(a_2 b)
- 'latency_sensitive=false' | %w(a c)
- 'latency_sensitive=true,false' | %w(a a_2 b c)
- 'latency_sensitive=true|latency_sensitive=false' | %w(a a_2 b c)
- 'latency_sensitive!=true' | %w(a c)
+ 'has_external_dependencies=false' | %w(a a:2 c)
+ 'has_external_dependencies=true,false' | %w(a a:2 b c)
+ 'has_external_dependencies=true|has_external_dependencies=false' | %w(a a:2 b c)
+ 'has_external_dependencies!=true' | %w(a a:2 c)
+
+ # urgency
+ 'urgency=high' | %w(a:2 b)
+ 'urgency=low' | %w(a)
+ 'urgency=high,low,throttled' | %w(a a:2 b c)
+ 'urgency=low|urgency=throttled' | %w(a c)
+ 'urgency!=high' | %w(a c)
# name
'name=a' | %w(a)
'name=a,b' | %w(a b)
- 'name=a,a_2|name=b' | %w(a a_2 b)
- 'name!=a,a_2' | %w(b c)
+ 'name=a,a:2|name=b' | %w(a a:2 b)
+ 'name!=a,a:2' | %w(b c)
# resource_boundary
'resource_boundary=memory' | %w(b c)
'resource_boundary=memory,cpu' | %w(a b c)
'resource_boundary=memory|resource_boundary=cpu' | %w(a b c)
- 'resource_boundary!=memory,cpu' | %w(a_2)
+ 'resource_boundary!=memory,cpu' | %w(a:2)
# combinations
- 'feature_category=category_a&latency_sensitive=true' | %w(a_2)
- 'feature_category=category_a&latency_sensitive=true|feature_category=category_c' | %w(a_2 c)
+ 'feature_category=category_a&urgency=high' | %w(a:2)
+ 'feature_category=category_a&urgency=high|feature_category=category_c' | %w(a:2 c)
end
with_them do
diff --git a/spec/lib/gitlab/sidekiq_config/worker_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
index 38edd0f5eeb..817755e3507 100644
--- a/spec/lib/gitlab/sidekiq_config/worker_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
@@ -11,8 +11,9 @@ describe Gitlab::SidekiqConfig::Worker do
get_feature_category: attributes[:feature_category],
get_weight: attributes[:weight],
get_worker_resource_boundary: attributes[:resource_boundary],
- latency_sensitive_worker?: attributes[:latency_sensitive],
- worker_has_external_dependencies?: attributes[:has_external_dependencies]
+ get_urgency: attributes[:urgency],
+ worker_has_external_dependencies?: attributes[:has_external_dependencies],
+ idempotent?: attributes[:idempotent]
)
described_class.new(inner_worker, ee: false)
@@ -46,7 +47,7 @@ describe Gitlab::SidekiqConfig::Worker do
describe 'delegations' do
[
:feature_category_not_owned?, :get_feature_category, :get_weight,
- :get_worker_resource_boundary, :latency_sensitive_worker?, :queue,
+ :get_worker_resource_boundary, :get_urgency, :queue,
:queue_namespace, :worker_has_external_dependencies?
].each do |meth|
it "delegates #{meth} to the worker class" do
@@ -87,17 +88,19 @@ describe Gitlab::SidekiqConfig::Worker do
attributes_a = {
feature_category: :source_code_management,
has_external_dependencies: false,
- latency_sensitive: false,
+ urgency: :low,
resource_boundary: :memory,
- weight: 2
+ weight: 2,
+ idempotent: true
}
attributes_b = {
feature_category: :not_owned,
has_external_dependencies: true,
- latency_sensitive: true,
+ urgency: :high,
resource_boundary: :unknown,
- weight: 1
+ weight: 3,
+ idempotent: false
}
worker_a = create_worker(queue: 'a', **attributes_a)
diff --git a/spec/lib/gitlab/sidekiq_logging/deduplication_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/deduplication_logger_spec.rb
new file mode 100644
index 00000000000..3cc5c0bed1b
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_logging/deduplication_logger_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqLogging::DeduplicationLogger do
+ describe '#log_deduplication' do
+ let(:job) do
+ {
+ 'class' => 'TestWorker',
+ 'args' => [1234, 'hello', { 'key' => 'value' }],
+ 'jid' => 'da883554ee4fe414012f5f42',
+ 'correlation_id' => 'cid',
+ 'duplicate-of' => 'other_jid'
+ }
+ end
+
+ it 'logs a deduplication message to the sidekiq logger' do
+ expected_payload = {
+ 'job_status' => 'deduplicated',
+ 'message' => "#{job['class']} JID-#{job['jid']}: deduplicated: a fancy strategy",
+ 'deduplication_type' => 'a fancy strategy'
+ }
+ expect(Sidekiq.logger).to receive(:info).with(a_hash_including(expected_payload)).and_call_original
+
+ described_class.instance.log(job, "a fancy strategy")
+ end
+
+ it "does not modify the job" do
+ expect { described_class.instance.log(job, "a fancy strategy") }
+ .not_to change { job }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index f294d7f7fcd..bd04d30f85f 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -11,7 +11,7 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
let(:job) do
{
"class" => "TestWorker",
- "args" => [1234, 'hello'],
+ "args" => [1234, 'hello', { 'key' => 'value' }],
"retry" => false,
"queue" => "cronjob:test_queue",
"queue_namespace" => "cronjob",
@@ -30,6 +30,7 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
let(:clock_thread_cputime_end) { 1.333333799 }
let(:start_payload) do
job.except('error_backtrace', 'error_class', 'error_message').merge(
+ 'args' => %w(1234 hello {"key"=>"value"}),
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
'job_status' => 'start',
'pid' => Process.pid,
@@ -99,13 +100,27 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
+ it 'does not modify the job' do
+ Timecop.freeze(timestamp) do
+ job_copy = job.deep_dup
+
+ allow(logger).to receive(:info)
+ allow(subject).to receive(:log_job_start).and_call_original
+ allow(subject).to receive(:log_job_done).and_call_original
+
+ subject.call(job, 'test_queue') do
+ expect(job).to eq(job_copy)
+ end
+ end
+ end
+
context 'when the job args are bigger than the maximum allowed' do
it 'keeps args from the front until they exceed the limit' do
Timecop.freeze(timestamp) do
half_limit = Gitlab::Utils::LogLimitedArray::MAXIMUM_ARRAY_LENGTH / 2
job['args'] = [1, 2, 'a' * half_limit, 'b' * half_limit, 3]
- expected_args = job['args'].take(3) + ['...']
+ expected_args = job['args'].take(3).map(&:to_s) + ['...']
expect(logger).to receive(:info).with(start_payload.merge('args' => expected_args)).ordered
expect(logger).to receive(:info).with(end_payload.merge('args' => expected_args)).ordered
diff --git a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
index daee2c0bbd0..59639409183 100644
--- a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
@@ -9,7 +9,7 @@ describe Gitlab::SidekiqMiddleware::ClientMetrics do
let(:queue) { :test }
let(:worker_class) { worker.class }
let(:job) { {} }
- let(:default_labels) { { queue: queue.to_s, boundary: "", external_dependencies: "no", feature_category: "", latency_sensitive: "no" } }
+ let(:default_labels) { { queue: queue.to_s, boundary: "", external_dependencies: "no", feature_category: "", urgency: "low" } }
shared_examples "a metrics client middleware" do
context "with mocked prometheus" do
@@ -46,17 +46,17 @@ describe Gitlab::SidekiqMiddleware::ClientMetrics do
it_behaves_like "a metrics client middleware" do
let(:worker) { TestNonAttributedWorker.new }
- let(:labels) { default_labels }
+ let(:labels) { default_labels.merge(urgency: "") }
end
end
context "when workers are attributed" do
- def create_attributed_worker_class(latency_sensitive, external_dependencies, resource_boundary, category)
+ def create_attributed_worker_class(urgency, external_dependencies, resource_boundary, category)
klass = Class.new do
include Sidekiq::Worker
include WorkerAttributes
- latency_sensitive_worker! if latency_sensitive
+ urgency urgency if urgency
worker_has_external_dependencies! if external_dependencies
worker_resource_boundary resource_boundary unless resource_boundary == :unknown
feature_category category unless category.nil?
@@ -64,17 +64,24 @@ describe Gitlab::SidekiqMiddleware::ClientMetrics do
stub_const("TestAttributedWorker", klass)
end
- let(:latency_sensitive) { false }
+ let(:urgency) { nil }
let(:external_dependencies) { false }
let(:resource_boundary) { :unknown }
let(:feature_category) { nil }
- let(:worker_class) { create_attributed_worker_class(latency_sensitive, external_dependencies, resource_boundary, feature_category) }
+ let(:worker_class) { create_attributed_worker_class(urgency, external_dependencies, resource_boundary, feature_category) }
let(:worker) { worker_class.new }
- context "latency sensitive" do
+ context "high urgency" do
it_behaves_like "a metrics client middleware" do
- let(:latency_sensitive) { true }
- let(:labels) { default_labels.merge(latency_sensitive: "yes") }
+ let(:urgency) { :high }
+ let(:labels) { default_labels.merge(urgency: "high") }
+ end
+ end
+
+ context "no urgency" do
+ it_behaves_like "a metrics client middleware" do
+ let(:urgency) { :throttled }
+ let(:labels) { default_labels.merge(urgency: "throttled") }
end
end
@@ -108,11 +115,11 @@ describe Gitlab::SidekiqMiddleware::ClientMetrics do
context "combined" do
it_behaves_like "a metrics client middleware" do
- let(:latency_sensitive) { true }
+ let(:urgency) { :high }
let(:external_dependencies) { true }
let(:resource_boundary) { :cpu }
let(:feature_category) { :authentication }
- let(:labels) { default_labels.merge(latency_sensitive: "yes", external_dependencies: "yes", boundary: "cpu", feature_category: "authentication") }
+ let(:labels) { default_labels.merge(urgency: "high", external_dependencies: "yes", boundary: "cpu", feature_category: "authentication") }
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
new file mode 100644
index 00000000000..b6e47afc7e8
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::DuplicateJobs::Client, :clean_gitlab_redis_queues do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestDeduplicationWorker'
+ end
+
+ include ApplicationWorker
+
+ def perform(*args)
+ end
+ end
+ end
+
+ before do
+ stub_const('TestDeduplicationWorker', worker_class)
+ end
+
+ describe '#call' do
+ it 'adds a correct duplicate tag to the jobs', :aggregate_failures do
+ TestDeduplicationWorker.bulk_perform_async([['args1'], ['args2'], ['args1']])
+
+ job1, job2, job3 = TestDeduplicationWorker.jobs
+
+ expect(job1['duplicate-of']).to be_nil
+ expect(job2['duplicate-of']).to be_nil
+ expect(job3['duplicate-of']).to eq(job1['jid'])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
new file mode 100644
index 00000000000..058e0737a25
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -0,0 +1,157 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:duplicate_job) do
+ described_class.new(job, queue)
+ end
+
+ let(:job) { { 'class' => 'AuthorizedProjectsWorker', 'args' => [1], 'jid' => '123' } }
+ let(:queue) { 'authorized_projects' }
+
+ let(:idempotency_key) do
+ hash = Digest::SHA256.hexdigest("#{job['class']}:#{job['args'].join('-')}")
+ "#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:duplicate:#{queue}:#{hash}"
+ end
+
+ describe '#schedule' do
+ it 'calls schedule on the strategy' do
+ expect do |block|
+ expect_next_instance_of(Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecuting) do |strategy|
+ expect(strategy).to receive(:schedule).with(job, &block)
+ end
+
+ duplicate_job.schedule(&block)
+ end.to yield_control
+ end
+ end
+
+ describe '#perform' do
+ it 'calls perform on the strategy' do
+ expect do |block|
+ expect_next_instance_of(Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecuting) do |strategy|
+ expect(strategy).to receive(:perform).with(job, &block)
+ end
+
+ duplicate_job.perform(&block)
+ end.to yield_control
+ end
+ end
+
+ describe '#check!' do
+ context 'when there was no job in the queue yet' do
+ it { expect(duplicate_job.check!).to eq('123') }
+
+ it "adds a key with ttl set to #{described_class::DUPLICATE_KEY_TTL}" do
+ expect { duplicate_job.check! }
+ .to change { read_idempotency_key_with_ttl(idempotency_key) }
+ .from([nil, -2])
+ .to(['123', be_within(1).of(described_class::DUPLICATE_KEY_TTL)])
+ end
+ end
+
+ context 'when there was already a job with same arguments in the same queue' do
+ before do
+ set_idempotency_key(idempotency_key, 'existing-key')
+ end
+
+ it { expect(duplicate_job.check!).to eq('existing-key') }
+
+ it "does not change the existing key's TTL" do
+ expect { duplicate_job.check! }
+ .not_to change { read_idempotency_key_with_ttl(idempotency_key) }
+ .from(['existing-key', -1])
+ end
+
+ it 'sets the existing jid' do
+ duplicate_job.check!
+
+ expect(duplicate_job.existing_jid).to eq('existing-key')
+ end
+ end
+ end
+
+ describe '#delete!' do
+ context "when we didn't track the definition" do
+ it { expect { duplicate_job.delete! }.not_to raise_error }
+ end
+
+ context 'when the key exists in redis' do
+ before do
+ set_idempotency_key(idempotency_key, 'existing-key')
+ end
+
+ it 'removes the key from redis' do
+ expect { duplicate_job.delete! }
+ .to change { read_idempotency_key_with_ttl(idempotency_key) }
+ .from(['existing-key', -1])
+ .to([nil, -2])
+ end
+ end
+ end
+
+ describe '#duplicate?' do
+ it "raises an error if the check wasn't performed" do
+ expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
+ end
+
+ it 'returns false if the existing jid equals the job jid' do
+ duplicate_job.check!
+
+ expect(duplicate_job.duplicate?).to be(false)
+ end
+
+ it 'returns false if the existing jid is different from the job jid' do
+ set_idempotency_key(idempotency_key, 'a different jid')
+ duplicate_job.check!
+
+ expect(duplicate_job.duplicate?).to be(true)
+ end
+ end
+
+ describe 'droppable?' do
+ where(:idempotent, :duplicate, :feature_enabled) do
+ # [true, false].repeated_permutation(3)
+ [[true, true, true],
+ [true, true, false],
+ [true, false, true],
+ [true, false, false],
+ [false, true, true],
+ [false, true, false],
+ [false, false, true],
+ [false, false, false]]
+ end
+
+ with_them do
+ before do
+ allow(AuthorizedProjectsWorker).to receive(:idempotent?).and_return(idempotent)
+ allow(duplicate_job).to receive(:duplicate?).and_return(duplicate)
+ stub_feature_flags(drop_duplicate_sidekiq_jobs: feature_enabled)
+ end
+
+ it 'is droppable when all conditions are met' do
+ if idempotent && duplicate && feature_enabled
+ expect(duplicate_job).to be_droppable
+ else
+ expect(duplicate_job).not_to be_droppable
+ end
+ end
+ end
+ end
+
+ def set_idempotency_key(key, value = '1')
+ Sidekiq.redis { |r| r.set(key, value) }
+ end
+
+ def read_idempotency_key_with_ttl(key)
+ Sidekiq.redis do |redis|
+ redis.pipelined do |p|
+ p.get(key)
+ p.ttl(key)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
new file mode 100644
index 00000000000..0ea248fbcf1
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::DuplicateJobs::Server, :clean_gitlab_redis_queues do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestDeduplicationWorker'
+ end
+
+ include ApplicationWorker
+
+ def perform(*args)
+ end
+ end
+ end
+
+ before do
+ stub_const('TestDeduplicationWorker', worker_class)
+ end
+
+ around do |example|
+ Sidekiq::Testing.inline! { example.run }
+ end
+
+ before(:context) do
+ Sidekiq::Testing.server_middleware do |chain|
+ chain.add described_class
+ end
+ end
+
+ after(:context) do
+ Sidekiq::Testing.server_middleware do |chain|
+ chain.remove described_class
+ end
+ end
+
+ describe '#call' do
+ it 'removes the stored job from redis' do
+ bare_job = { 'class' => 'TestDeduplicationWorker', 'args' => ['hello'] }
+ job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'test_deduplication')
+
+ expect(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
+ .to receive(:new).with(a_hash_including(bare_job), 'test_deduplication')
+ .and_return(job_definition).twice # once in client middleware
+ expect(job_definition).to receive(:delete!).and_call_original
+
+ TestDeduplicationWorker.perform_async('hello')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
new file mode 100644
index 00000000000..31b51260ebd
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecuting do
+ let(:fake_duplicate_job) do
+ instance_double(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
+ end
+
+ subject(:strategy) { described_class.new(fake_duplicate_job) }
+
+ describe '#schedule' do
+ before do
+ allow(Gitlab::SidekiqLogging::DeduplicationLogger.instance).to receive(:log)
+ end
+
+ it 'checks for duplicates before yielding' do
+ expect(fake_duplicate_job).to receive(:check!).ordered.and_return('a jid')
+ expect(fake_duplicate_job).to receive(:duplicate?).ordered.and_return(false)
+ expect(fake_duplicate_job).to receive(:droppable?).ordered.and_return(false)
+
+ expect { |b| strategy.schedule({}, &b) }.to yield_control
+ end
+
+ it 'adds the jid of the existing job to the job hash' do
+ allow(fake_duplicate_job).to receive(:check!).and_return('the jid')
+ allow(fake_duplicate_job).to receive(:droppable?).and_return(true)
+ job_hash = {}
+
+ expect(fake_duplicate_job).to receive(:duplicate?).and_return(true)
+ expect(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
+
+ strategy.schedule(job_hash) {}
+
+ expect(job_hash).to include('duplicate-of' => 'the jid')
+ end
+
+ context "when the job is droppable" do
+ before do
+ allow(fake_duplicate_job).to receive(:check!).and_return('the jid')
+ allow(fake_duplicate_job).to receive(:duplicate?).and_return(true)
+ allow(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
+ allow(fake_duplicate_job).to receive(:droppable?).and_return(true)
+ end
+
+ it 'drops the job' do
+ schedule_result = nil
+
+ expect(fake_duplicate_job).to receive(:droppable?).and_return(true)
+
+ expect { |b| schedule_result = strategy.schedule({}, &b) }.not_to yield_control
+ expect(schedule_result).to be(false)
+ end
+
+ it 'logs that the job wass dropped' do
+ fake_logger = instance_double(Gitlab::SidekiqLogging::DeduplicationLogger)
+
+ expect(Gitlab::SidekiqLogging::DeduplicationLogger).to receive(:instance).and_return(fake_logger)
+ expect(fake_logger).to receive(:log).with(a_hash_including({ 'jid' => 'new jid' }), 'dropped until executing')
+
+ strategy.schedule({ 'jid' => 'new jid' }) {}
+ end
+ end
+ end
+
+ describe '#perform' do
+ it 'deletes the lock before executing' do
+ expect(fake_duplicate_job).to receive(:delete!).ordered
+ expect { |b| strategy.perform({}, &b) }.to yield_control
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb
new file mode 100644
index 00000000000..6ecc2a3a5f8
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies do
+ describe '.for' do
+ it 'returns the right class for `until_executing`' do
+ expect(described_class.for(:until_executing)).to eq(described_class::UntilExecuting)
+ end
+
+ it 'raises an UnknownStrategyError when passing an unknown key' do
+ expect { described_class.for(:unknown) }.to raise_error(described_class::UnknownStrategyError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 65a961b34f8..3343587beff 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -11,7 +11,7 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
let(:job) { {} }
let(:job_status) { :done }
let(:labels_with_job_status) { labels.merge(job_status: job_status.to_s) }
- let(:default_labels) { { queue: queue.to_s, boundary: "", external_dependencies: "no", feature_category: "", latency_sensitive: "no" } }
+ let(:default_labels) { { queue: queue.to_s, boundary: "", external_dependencies: "no", feature_category: "", urgency: "low" } }
shared_examples "a metrics middleware" do
context "with mocked prometheus" do
@@ -20,6 +20,8 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
let(:queue_duration_seconds) { double('queue duration seconds metric') }
let(:completion_seconds_metric) { double('completion seconds metric') }
let(:user_execution_seconds_metric) { double('user execution seconds metric') }
+ let(:db_seconds_metric) { double('db seconds metric') }
+ let(:gitaly_seconds_metric) { double('gitaly seconds metric') }
let(:failed_total_metric) { double('failed total metric') }
let(:retried_total_metric) { double('retried total metric') }
let(:running_jobs_metric) { double('running jobs metric') }
@@ -28,6 +30,8 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_queue_duration_seconds, anything, anything, anything).and_return(queue_duration_seconds)
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_completion_seconds, anything, anything, anything).and_return(completion_seconds_metric)
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_cpu_seconds, anything, anything, anything).and_return(user_execution_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_db_seconds, anything, anything, anything).and_return(db_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_gitaly_seconds, anything, anything, anything).and_return(gitaly_seconds_metric)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_failed_total, anything).and_return(failed_total_metric)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_retried_total, anything).and_return(retried_total_metric)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :all).and_return(running_jobs_metric)
@@ -55,16 +59,23 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
let(:queue_duration_for_job) { 0.01 }
+ let(:db_duration) { 3 }
+ let(:gitaly_duration) { 4 }
+
before do
allow(subject).to receive(:get_thread_cputime).and_return(thread_cputime_before, thread_cputime_after)
allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
allow(Gitlab::InstrumentationHelper).to receive(:queue_duration_for_job).with(job).and_return(queue_duration_for_job)
+ allow(ActiveRecord::LogSubscriber).to receive(:runtime).and_return(db_duration * 1000)
+ allow(subject).to receive(:get_gitaly_time).and_return(gitaly_duration)
expect(running_jobs_metric).to receive(:increment).with(labels, 1)
expect(running_jobs_metric).to receive(:increment).with(labels, -1)
expect(queue_duration_seconds).to receive(:observe).with(labels, queue_duration_for_job) if queue_duration_for_job
expect(user_execution_seconds_metric).to receive(:observe).with(labels_with_job_status, thread_cputime_duration)
+ expect(db_seconds_metric).to receive(:observe).with(labels_with_job_status, db_duration)
+ expect(gitaly_seconds_metric).to receive(:observe).with(labels_with_job_status, gitaly_duration)
expect(completion_seconds_metric).to receive(:observe).with(labels_with_job_status, monotonic_time_duration)
end
@@ -130,34 +141,34 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
include Sidekiq::Worker
end
let(:worker) { TestNonAttributedWorker.new }
- let(:labels) { default_labels }
+ let(:labels) { default_labels.merge(urgency: "") }
it_behaves_like "a metrics middleware"
end
context "when workers are attributed" do
- def create_attributed_worker_class(latency_sensitive, external_dependencies, resource_boundary, category)
+ def create_attributed_worker_class(urgency, external_dependencies, resource_boundary, category)
Class.new do
include Sidekiq::Worker
include WorkerAttributes
- latency_sensitive_worker! if latency_sensitive
+ urgency urgency if urgency
worker_has_external_dependencies! if external_dependencies
worker_resource_boundary resource_boundary unless resource_boundary == :unknown
feature_category category unless category.nil?
end
end
- let(:latency_sensitive) { false }
+ let(:urgency) { nil }
let(:external_dependencies) { false }
let(:resource_boundary) { :unknown }
let(:feature_category) { nil }
- let(:worker_class) { create_attributed_worker_class(latency_sensitive, external_dependencies, resource_boundary, feature_category) }
+ let(:worker_class) { create_attributed_worker_class(urgency, external_dependencies, resource_boundary, feature_category) }
let(:worker) { worker_class.new }
- context "latency sensitive" do
- let(:latency_sensitive) { true }
- let(:labels) { default_labels.merge(latency_sensitive: "yes") }
+ context "high urgency" do
+ let(:urgency) { :high }
+ let(:labels) { default_labels.merge(urgency: "high") }
it_behaves_like "a metrics middleware"
end
@@ -191,11 +202,11 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
context "combined" do
- let(:latency_sensitive) { true }
+ let(:urgency) { :throttled }
let(:external_dependencies) { true }
let(:resource_boundary) { :cpu }
let(:feature_category) { :authentication }
- let(:labels) { default_labels.merge(latency_sensitive: "yes", external_dependencies: "yes", boundary: "cpu", feature_category: "authentication") }
+ let(:labels) { default_labels.merge(urgency: "throttled", external_dependencies: "yes", boundary: "cpu", feature_category: "authentication") }
it_behaves_like "a metrics middleware"
end
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 19242d25e27..88f83ebc2ac 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -8,6 +8,8 @@ describe Gitlab::SidekiqMiddleware do
include Sidekiq::Worker
def perform(_arg)
+ Gitlab::SafeRequestStore['gitaly_call_actual'] = 1
+ Gitlab::GitalyClient.query_time = 5
end
end
@@ -46,7 +48,8 @@ describe Gitlab::SidekiqMiddleware do
Gitlab::SidekiqMiddleware::MemoryKiller,
Gitlab::SidekiqMiddleware::RequestStoreMiddleware,
Gitlab::SidekiqMiddleware::WorkerContext::Server,
- Gitlab::SidekiqMiddleware::AdminMode::Server
+ Gitlab::SidekiqMiddleware::AdminMode::Server,
+ Gitlab::SidekiqMiddleware::DuplicateJobs::Server
]
end
let(:enabled_sidekiq_middlewares) { all_sidekiq_middlewares - disabled_sidekiq_middlewares }
@@ -98,6 +101,24 @@ describe Gitlab::SidekiqMiddleware do
it "passes through server middlewares" do
worker_class.perform_async(*job_args)
end
+
+ context "server metrics" do
+ let(:gitaly_histogram) { double(:gitaly_histogram) }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:histogram).and_call_original
+
+ allow(Gitlab::Metrics).to receive(:histogram)
+ .with(:sidekiq_jobs_gitaly_seconds, anything, anything, anything)
+ .and_return(gitaly_histogram)
+ end
+
+ it "records correct Gitaly duration" do
+ expect(gitaly_histogram).to receive(:observe).with(anything, 5.0)
+
+ worker_class.perform_async(*job_args)
+ end
+ end
end
end
@@ -117,7 +138,8 @@ describe Gitlab::SidekiqMiddleware do
Gitlab::SidekiqMiddleware::ClientMetrics,
Gitlab::SidekiqMiddleware::WorkerContext::Client,
Labkit::Middleware::Sidekiq::Client,
- Gitlab::SidekiqMiddleware::AdminMode::Client
+ Gitlab::SidekiqMiddleware::AdminMode::Client,
+ Gitlab::SidekiqMiddleware::DuplicateJobs::Client
]
end
diff --git a/spec/lib/gitlab/sidekiq_queue_spec.rb b/spec/lib/gitlab/sidekiq_queue_spec.rb
new file mode 100644
index 00000000000..f5be8d9bfed
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_queue_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
+ around do |example|
+ Sidekiq::Queue.new('authorized_projects').clear
+ Sidekiq::Testing.disable!(&example)
+ Sidekiq::Queue.new('authorized_projects').clear
+ end
+
+ def add_job(user, args)
+ Sidekiq::Client.push(
+ 'class' => 'AuthorizedProjectsWorker',
+ 'queue' => 'authorized_projects',
+ 'args' => args,
+ 'meta.user' => user.username
+ )
+ end
+
+ describe '#drop_jobs!' do
+ shared_examples 'queue processing' do
+ let(:sidekiq_queue) { described_class.new('authorized_projects') }
+ let_it_be(:sidekiq_queue_user) { create(:user) }
+
+ before do
+ add_job(create(:user), [1])
+ add_job(sidekiq_queue_user, [2])
+ add_job(sidekiq_queue_user, [3])
+ end
+
+ context 'when the queue is not processed in time' do
+ before do
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(1, 2, 12)
+ end
+
+ it 'returns a non-completion flag, the number of jobs deleted, and the remaining queue size' do
+ expect(sidekiq_queue.drop_jobs!(search_metadata, timeout: 10))
+ .to eq(completed: false,
+ deleted_jobs: timeout_deleted,
+ queue_size: 3 - timeout_deleted)
+ end
+ end
+
+ context 'when the queue is processed in time' do
+ it 'returns a completion flag, the number of jobs deleted, and the remaining queue size' do
+ expect(sidekiq_queue.drop_jobs!(search_metadata, timeout: 10))
+ .to eq(completed: true,
+ deleted_jobs: no_timeout_deleted,
+ queue_size: 3 - no_timeout_deleted)
+ end
+ end
+ end
+
+ context 'when there are no matching jobs' do
+ include_examples 'queue processing' do
+ let(:search_metadata) { { project: 1 } }
+ let(:timeout_deleted) { 0 }
+ let(:no_timeout_deleted) { 0 }
+ end
+ end
+
+ context 'when there are matching jobs' do
+ include_examples 'queue processing' do
+ let(:search_metadata) { { user: sidekiq_queue_user.username } }
+ let(:timeout_deleted) { 1 }
+ let(:no_timeout_deleted) { 2 }
+ end
+ end
+
+ context 'when there are no valid metadata keys passed' do
+ it 'raises NoMetadataError' do
+ add_job(create(:user), [1])
+
+ expect { described_class.new('authorized_projects').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
+ .to raise_error(described_class::NoMetadataError)
+ end
+ end
+
+ context 'when the queue does not exist' do
+ it 'raises InvalidQueueError' do
+ expect { described_class.new('foo').drop_jobs!({ user: 'sidekiq_queue_user' }, timeout: 1) }
+ .to raise_error(described_class::InvalidQueueError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/slash_commands/issue_move_spec.rb b/spec/lib/gitlab/slash_commands/issue_move_spec.rb
index 962ac3668bc..1a45b0e06ba 100644
--- a/spec/lib/gitlab/slash_commands/issue_move_spec.rb
+++ b/spec/lib/gitlab/slash_commands/issue_move_spec.rb
@@ -22,11 +22,11 @@ describe Gitlab::SlashCommands::IssueMove, service: true do
end
describe '#execute' do
- set(:user) { create(:user) }
- set(:issue) { create(:issue) }
- set(:chat_name) { create(:chat_name, user: user) }
- set(:project) { issue.project }
- set(:other_project) { create(:project, namespace: project.namespace) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:chat_name) { create(:chat_name, user: user) }
+ let_it_be(:project) { issue.project }
+ let_it_be(:other_project) { create(:project, namespace: project.namespace) }
before do
[project, other_project].each { |prj| prj.add_maintainer(user) }
diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
index 56b64d32192..7726c3b6a87 100644
--- a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
+++ b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
describe Gitlab::SlashCommands::Presenters::IssueMove do
- set(:admin) { create(:admin) }
- set(:project) { create(:project) }
- set(:other_project) { create(:project) }
- set(:old_issue) { create(:issue, project: project) }
- set(:new_issue) { Issues::MoveService.new(project, admin).execute(old_issue, other_project) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
+ let_it_be(:old_issue, reload: true) { create(:issue, project: project) }
+ let(:new_issue) { Issues::MoveService.new(project, admin).execute(old_issue, other_project) }
let(:attachment) { subject[:attachments].first }
subject { described_class.new(new_issue).present(old_issue) }
diff --git a/spec/lib/gitlab/template/finders/global_template_finder_spec.rb b/spec/lib/gitlab/template/finders/global_template_finder_spec.rb
index 082ffa855b7..580da497944 100644
--- a/spec/lib/gitlab/template/finders/global_template_finder_spec.rb
+++ b/spec/lib/gitlab/template/finders/global_template_finder_spec.rb
@@ -15,23 +15,87 @@ describe Gitlab::Template::Finders::GlobalTemplateFinder do
FileUtils.rm_rf(base_dir)
end
- subject(:finder) { described_class.new(base_dir, '', 'Foo' => '', 'Bar' => 'bar') }
+ subject(:finder) { described_class.new(base_dir, '', { 'General' => '', 'Bar' => 'Bar' }, exclusions: exclusions) }
+
+ let(:exclusions) { [] }
describe '.find' do
- it 'finds a template in the Foo category' do
- create_template!('test-template')
+ context 'with a non-prefixed General template' do
+ before do
+ create_template!('test-template')
+ end
- expect(finder.find('test-template')).to be_present
- end
+ it 'finds the template with no prefix' do
+ expect(finder.find('test-template')).to be_present
+ end
+
+ it 'does not find a prefixed template' do
+ expect(finder.find('Bar/test-template')).to be_nil
+ end
+
+ it 'does not permit path traversal requests' do
+ expect { finder.find('../foo') }.to raise_error(/Invalid path/)
+ end
- it 'finds a template in the Bar category' do
- create_template!('bar/test-template')
+ context 'while listed as an exclusion' do
+ let(:exclusions) { %w[test-template] }
- expect(finder.find('test-template')).to be_present
+ it 'does not find the template without a prefix' do
+ expect(finder.find('test-template')).to be_nil
+ end
+
+ it 'does not find the template with a prefix' do
+ expect(finder.find('Bar/test-template')).to be_nil
+ end
+
+ it 'finds another prefixed template with the same name' do
+ create_template!('Bar/test-template')
+
+ expect(finder.find('test-template')).to be_nil
+ expect(finder.find('Bar/test-template')).to be_present
+ end
+ end
end
- it 'does not permit path traversal requests' do
- expect { finder.find('../foo') }.to raise_error(/Invalid path/)
+ context 'with a prefixed template' do
+ before do
+ create_template!('Bar/test-template')
+ end
+
+ it 'finds the template with a prefix' do
+ expect(finder.find('Bar/test-template')).to be_present
+ end
+
+ # NOTE: This spec fails, the template Bar/test-template is found
+ # See Gitlab issue: https://gitlab.com/gitlab-org/gitlab/issues/205719
+ xit 'does not find the template without a prefix' do
+ expect(finder.find('test-template')).to be_nil
+ end
+
+ it 'does not permit path traversal requests' do
+ expect { finder.find('../foo') }.to raise_error(/Invalid path/)
+ end
+
+ context 'while listed as an exclusion' do
+ let(:exclusions) { %w[Bar/test-template] }
+
+ it 'does not find the template with a prefix' do
+ expect(finder.find('Bar/test-template')).to be_nil
+ end
+
+ # NOTE: This spec fails, the template Bar/test-template is found
+ # See Gitlab issue: https://gitlab.com/gitlab-org/gitlab/issues/205719
+ xit 'does not find the template without a prefix' do
+ expect(finder.find('test-template')).to be_nil
+ end
+
+ it 'finds another non-prefixed template with the same name' do
+ create_template!('Bar/test-template')
+
+ expect(finder.find('test-template')).to be_present
+ expect(finder.find('Bar/test-template')).to be_nil
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/template/finders/repo_template_finders_spec.rb b/spec/lib/gitlab/template/finders/repo_template_finders_spec.rb
index c8f2a37c5d6..cd5cde76b73 100644
--- a/spec/lib/gitlab/template/finders/repo_template_finders_spec.rb
+++ b/spec/lib/gitlab/template/finders/repo_template_finders_spec.rb
@@ -3,8 +3,7 @@
require 'spec_helper'
describe Gitlab::Template::Finders::RepoTemplateFinder do
- set(:project) { create(:project, :repository) }
-
+ let_it_be(:project) { create(:project, :repository) }
let(:categories) { { 'HTML' => 'html' } }
subject(:finder) { described_class.new(project, 'files/', '.html', categories) }
diff --git a/spec/lib/gitlab/tracing_spec.rb b/spec/lib/gitlab/tracing_spec.rb
deleted file mode 100644
index e913bb600ec..00000000000
--- a/spec/lib/gitlab/tracing_spec.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-require 'rspec-parameterized'
-
-describe Gitlab::Tracing do
- using RSpec::Parameterized::TableSyntax
-
- describe '.enabled?' do
- where(:connection_string, :enabled_state) do
- nil | false
- "" | false
- "opentracing://jaeger" | true
- end
-
- with_them do
- it 'returns the correct state for .enabled?' do
- expect(described_class).to receive(:connection_string).and_return(connection_string)
-
- expect(described_class.enabled?).to eq(enabled_state)
- end
- end
- end
-
- describe '.tracing_url_enabled?' do
- where(:enabled?, :tracing_url_template, :tracing_url_enabled_state) do
- false | nil | false
- false | "" | false
- false | "http://localhost" | false
- true | nil | false
- true | "" | false
- true | "http://localhost" | true
- end
-
- with_them do
- it 'returns the correct state for .tracing_url_enabled?' do
- expect(described_class).to receive(:enabled?).and_return(enabled?)
- allow(described_class).to receive(:tracing_url_template).and_return(tracing_url_template)
-
- expect(described_class.tracing_url_enabled?).to eq(tracing_url_enabled_state)
- end
- end
- end
-
- describe '.tracing_url' do
- where(:tracing_url_enabled?, :tracing_url_template, :correlation_id, :process_name, :tracing_url) do
- false | "https://localhost" | "123" | "web" | nil
- true | "https://localhost" | "123" | "web" | "https://localhost"
- true | "https://localhost?service={{ service }}" | "123" | "web" | "https://localhost?service=web"
- true | "https://localhost?c={{ correlation_id }}" | "123" | "web" | "https://localhost?c=123"
- true | "https://localhost?c={{ correlation_id }}&s={{ service }}" | "123" | "web" | "https://localhost?c=123&s=web"
- true | "https://localhost?c={{ correlation_id }}" | nil | "web" | "https://localhost?c="
- true | "https://localhost?c={{ correlation_id }}&s=%22{{ service }}%22" | "123" | "web" | "https://localhost?c=123&s=%22web%22"
- true | "https://localhost?c={{correlation_id}}&s={{service}}" | "123" | "web" | "https://localhost?c=123&s=web"
- true | "https://localhost?c={{correlation_id }}&s={{ service}}" | "123" | "web" | "https://localhost?c=123&s=web"
- end
-
- with_them do
- it 'returns the correct state for .tracing_url' do
- expect(described_class).to receive(:tracing_url_enabled?).and_return(tracing_url_enabled?)
- allow(described_class).to receive(:tracing_url_template).and_return(tracing_url_template)
- allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return(correlation_id)
- allow(Gitlab).to receive(:process_name).and_return(process_name)
-
- expect(described_class.tracing_url).to eq(tracing_url)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index 97859c82e9e..08678de87c9 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -502,63 +502,31 @@ describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
end
end
- context 'with ip ranges in whitelist' do
- let(:ipv4_range) { '127.0.0.0/28' }
- let(:ipv6_range) { 'fd84:6d02:f6d8:c89e::/124' }
-
+ context 'with ports' do
let(:whitelist) do
- [
- ipv4_range,
- ipv6_range
- ]
+ ["127.0.0.1:2000"]
end
- it 'blocks ipv4 range when not in whitelist' do
- stub_application_setting(outbound_local_requests_whitelist: [])
-
- IPAddr.new(ipv4_range).to_range.to_a.each do |ip|
- expect(described_class).to be_blocked_url("http://#{ip}",
- url_blocker_attributes)
+ it 'allows domain with port when resolved ip has port whitelisted' do
+ stub_domain_resolv("www.resolve-domain.com", '127.0.0.1') do
+ expect(described_class).not_to be_blocked_url("http://www.resolve-domain.com:2000", url_blocker_attributes)
end
end
-
- it 'allows all ipv4s in the range when in whitelist' do
- IPAddr.new(ipv4_range).to_range.to_a.each do |ip|
- expect(described_class).not_to be_blocked_url("http://#{ip}",
- url_blocker_attributes)
- end
- end
-
- it 'blocks ipv6 range when not in whitelist' do
- stub_application_setting(outbound_local_requests_whitelist: [])
-
- IPAddr.new(ipv6_range).to_range.to_a.each do |ip|
- expect(described_class).to be_blocked_url("http://[#{ip}]",
- url_blocker_attributes)
- end
- end
-
- it 'allows all ipv6s in the range when in whitelist' do
- IPAddr.new(ipv6_range).to_range.to_a.each do |ip|
- expect(described_class).not_to be_blocked_url("http://[#{ip}]",
- url_blocker_attributes)
- end
- end
-
- it 'blocks IPs outside the range' do
- expect(described_class).to be_blocked_url("http://[fd84:6d02:f6d8:c89e:0:0:1:f]",
- url_blocker_attributes)
-
- expect(described_class).to be_blocked_url("http://127.0.1.15",
- url_blocker_attributes)
- end
end
end
end
- def stub_domain_resolv(domain, ip, &block)
- address = double(ip_address: ip, ipv4_private?: true, ipv6_link_local?: false, ipv4_loopback?: false, ipv6_loopback?: false, ipv4?: false)
- allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([address])
+ def stub_domain_resolv(domain, ip, port = 80, &block)
+ address = instance_double(Addrinfo,
+ ip_address: ip,
+ ipv4_private?: true,
+ ipv6_linklocal?: false,
+ ipv4_loopback?: false,
+ ipv6_loopback?: false,
+ ipv4?: false,
+ ip_port: port
+ )
+ allow(Addrinfo).to receive(:getaddrinfo).with(domain, port, any_args).and_return([address])
allow(address).to receive(:ipv6_v4mapped?).and_return(false)
yield
diff --git a/spec/lib/gitlab/url_blockers/domain_whitelist_entry_spec.rb b/spec/lib/gitlab/url_blockers/domain_whitelist_entry_spec.rb
new file mode 100644
index 00000000000..34ea6c328e6
--- /dev/null
+++ b/spec/lib/gitlab/url_blockers/domain_whitelist_entry_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::UrlBlockers::DomainWhitelistEntry do
+ let(:domain) { 'www.example.com' }
+
+ describe '#initialize' do
+ it 'initializes without port' do
+ domain_whitelist_entry = described_class.new(domain)
+
+ expect(domain_whitelist_entry.domain).to eq(domain)
+ expect(domain_whitelist_entry.port).to be(nil)
+ end
+
+ it 'initializes with port' do
+ port = 8080
+ domain_whitelist_entry = described_class.new(domain, port: port)
+
+ expect(domain_whitelist_entry.domain).to eq(domain)
+ expect(domain_whitelist_entry.port).to eq(port)
+ end
+ end
+
+ describe '#match?' do
+ it 'matches when domain and port are equal' do
+ port = 8080
+ domain_whitelist_entry = described_class.new(domain, port: port)
+
+ expect(domain_whitelist_entry).to be_match(domain, port)
+ end
+
+ it 'matches any port when port is nil' do
+ domain_whitelist_entry = described_class.new(domain)
+
+ expect(domain_whitelist_entry).to be_match(domain, 8080)
+ expect(domain_whitelist_entry).to be_match(domain, 9090)
+ end
+
+ it 'does not match when port is present but requested_port is nil' do
+ domain_whitelist_entry = described_class.new(domain, port: 8080)
+
+ expect(domain_whitelist_entry).not_to be_match(domain, nil)
+ end
+
+ it 'matches when port and requested_port are nil' do
+ domain_whitelist_entry = described_class.new(domain)
+
+ expect(domain_whitelist_entry).to be_match(domain)
+ end
+
+ it 'does not match if domain is not equal' do
+ domain_whitelist_entry = described_class.new(domain)
+
+ expect(domain_whitelist_entry).not_to be_match('www.gitlab.com', 8080)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/url_blockers/ip_whitelist_entry_spec.rb b/spec/lib/gitlab/url_blockers/ip_whitelist_entry_spec.rb
new file mode 100644
index 00000000000..042d135d265
--- /dev/null
+++ b/spec/lib/gitlab/url_blockers/ip_whitelist_entry_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::UrlBlockers::IpWhitelistEntry do
+ let(:ipv4) { IPAddr.new('192.168.1.1') }
+
+ describe '#initialize' do
+ it 'initializes without port' do
+ ip_whitelist_entry = described_class.new(ipv4)
+
+ expect(ip_whitelist_entry.ip).to eq(ipv4)
+ expect(ip_whitelist_entry.port).to be(nil)
+ end
+
+ it 'initializes with port' do
+ port = 8080
+ ip_whitelist_entry = described_class.new(ipv4, port: port)
+
+ expect(ip_whitelist_entry.ip).to eq(ipv4)
+ expect(ip_whitelist_entry.port).to eq(port)
+ end
+ end
+
+ describe '#match?' do
+ it 'matches with equivalent IP and port' do
+ port = 8080
+ ip_whitelist_entry = described_class.new(ipv4, port: port)
+
+ expect(ip_whitelist_entry).to be_match(ipv4.to_s, port)
+ end
+
+ it 'matches any port when port is nil' do
+ ip_whitelist_entry = described_class.new(ipv4)
+
+ expect(ip_whitelist_entry).to be_match(ipv4.to_s, 8080)
+ expect(ip_whitelist_entry).to be_match(ipv4.to_s, 9090)
+ end
+
+ it 'does not match when port is present but requested_port is nil' do
+ ip_whitelist_entry = described_class.new(ipv4, port: 8080)
+
+ expect(ip_whitelist_entry).not_to be_match(ipv4.to_s, nil)
+ end
+
+ it 'matches when port and requested_port are nil' do
+ ip_whitelist_entry = described_class.new(ipv4)
+
+ expect(ip_whitelist_entry).to be_match(ipv4.to_s)
+ end
+
+ it 'works with ipv6' do
+ ipv6 = IPAddr.new('fe80::c800:eff:fe74:8')
+ ip_whitelist_entry = described_class.new(ipv6)
+
+ expect(ip_whitelist_entry).to be_match(ipv6.to_s, 8080)
+ end
+
+ it 'matches ipv4 within IPv4 range' do
+ ipv4_range = IPAddr.new('127.0.0.0/28')
+ ip_whitelist_entry = described_class.new(ipv4_range)
+
+ expect(ip_whitelist_entry).to be_match(ipv4_range.to_range.last.to_s, 8080)
+ expect(ip_whitelist_entry).not_to be_match('127.0.1.1', 8080)
+ end
+
+ it 'matches IPv6 within IPv6 range' do
+ ipv6_range = IPAddr.new('fd84:6d02:f6d8:c89e::/124')
+ ip_whitelist_entry = described_class.new(ipv6_range)
+
+ expect(ip_whitelist_entry).to be_match(ipv6_range.to_range.last.to_s, 8080)
+ expect(ip_whitelist_entry).not_to be_match('fd84:6d02:f6d8:f::f', 8080)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/url_blockers/url_whitelist_spec.rb b/spec/lib/gitlab/url_blockers/url_whitelist_spec.rb
index 906e0f0ba3d..e43cd819838 100644
--- a/spec/lib/gitlab/url_blockers/url_whitelist_spec.rb
+++ b/spec/lib/gitlab/url_blockers/url_whitelist_spec.rb
@@ -13,20 +13,17 @@ describe Gitlab::UrlBlockers::UrlWhitelist do
end
describe '#domain_whitelisted?' do
- let(:whitelist) do
- [
- 'www.example.com',
- 'example.com'
- ]
- end
+ let(:whitelist) { ['www.example.com', 'example.com'] }
it 'returns true if domains present in whitelist' do
+ not_whitelisted = ['subdomain.example.com', 'example.org']
+
aggregate_failures do
whitelist.each do |domain|
expect(described_class).to be_domain_whitelisted(domain)
end
- ['subdomain.example.com', 'example.org'].each do |domain|
+ not_whitelisted.each do |domain|
expect(described_class).not_to be_domain_whitelisted(domain)
end
end
@@ -35,6 +32,28 @@ describe Gitlab::UrlBlockers::UrlWhitelist do
it 'returns false when domain is blank' do
expect(described_class).not_to be_domain_whitelisted(nil)
end
+
+ context 'with ports' do
+ let(:whitelist) { ['example.io:3000'] }
+
+ it 'returns true if domain and ports present in whitelist' do
+ parsed_whitelist = [['example.io', { port: 3000 }]]
+ not_whitelisted = [
+ 'example.io',
+ ['example.io', { port: 3001 }]
+ ]
+
+ aggregate_failures do
+ parsed_whitelist.each do |domain_and_port|
+ expect(described_class).to be_domain_whitelisted(*domain_and_port)
+ end
+
+ not_whitelisted.each do |domain_and_port|
+ expect(described_class).not_to be_domain_whitelisted(*domain_and_port)
+ end
+ end
+ end
+ end
end
describe '#ip_whitelisted?' do
@@ -68,5 +87,78 @@ describe Gitlab::UrlBlockers::UrlWhitelist do
it 'returns false when ip is blank' do
expect(described_class).not_to be_ip_whitelisted(nil)
end
+
+ context 'with ip ranges in whitelist' do
+ let(:ipv4_range) { '127.0.0.0/28' }
+ let(:ipv6_range) { 'fd84:6d02:f6d8:c89e::/124' }
+
+ let(:whitelist) do
+ [
+ ipv4_range,
+ ipv6_range
+ ]
+ end
+
+ it 'does not whitelist ipv4 range when not in whitelist' do
+ stub_application_setting(outbound_local_requests_whitelist: [])
+
+ IPAddr.new(ipv4_range).to_range.to_a.each do |ip|
+ expect(described_class).not_to be_ip_whitelisted(ip.to_s)
+ end
+ end
+
+ it 'whitelists all ipv4s in the range when in whitelist' do
+ IPAddr.new(ipv4_range).to_range.to_a.each do |ip|
+ expect(described_class).to be_ip_whitelisted(ip.to_s)
+ end
+ end
+
+ it 'does not whitelist ipv6 range when not in whitelist' do
+ stub_application_setting(outbound_local_requests_whitelist: [])
+
+ IPAddr.new(ipv6_range).to_range.to_a.each do |ip|
+ expect(described_class).not_to be_ip_whitelisted(ip.to_s)
+ end
+ end
+
+ it 'whitelists all ipv6s in the range when in whitelist' do
+ IPAddr.new(ipv6_range).to_range.to_a.each do |ip|
+ expect(described_class).to be_ip_whitelisted(ip.to_s)
+ end
+ end
+
+ it 'does not whitelist IPs outside the range' do
+ expect(described_class).not_to be_ip_whitelisted("fd84:6d02:f6d8:c89e:0:0:1:f")
+
+ expect(described_class).not_to be_ip_whitelisted("127.0.1.15")
+ end
+ end
+
+ context 'with ports' do
+ let(:whitelist) { ['127.0.0.9:3000', '[2001:db8:85a3:8d3:1319:8a2e:370:7348]:443'] }
+
+ it 'returns true if ip and ports present in whitelist' do
+ parsed_whitelist = [
+ ['127.0.0.9', { port: 3000 }],
+ ['[2001:db8:85a3:8d3:1319:8a2e:370:7348]', { port: 443 }]
+ ]
+ not_whitelisted = [
+ '127.0.0.9',
+ ['127.0.0.9', { port: 3001 }],
+ '[2001:db8:85a3:8d3:1319:8a2e:370:7348]',
+ ['[2001:db8:85a3:8d3:1319:8a2e:370:7348]', { port: 3001 }]
+ ]
+
+ aggregate_failures do
+ parsed_whitelist.each do |ip_and_port|
+ expect(described_class).to be_ip_whitelisted(*ip_and_port)
+ end
+
+ not_whitelisted.each do |ip_and_port|
+ expect(described_class).not_to be_ip_whitelisted(*ip_and_port)
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index 49011b100ab..c2eb1b4c25d 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -14,13 +14,25 @@ describe Gitlab::UrlBuilder do
end
end
+ context 'when passing a batch loaded Commit' do
+ it 'returns a proper URL' do
+ commit = BatchLoader.for(:commit).batch do |batch, loader|
+ batch.each { |commit| loader.call(:commit, build_stubbed(:commit)) }
+ end
+
+ url = described_class.build(commit)
+
+ expect(url).to eq "#{Settings.gitlab['url']}/#{commit.project.full_path}/-/commit/#{commit.id}"
+ end
+ end
+
context 'when passing an Issue' do
it 'returns a proper URL' do
issue = build_stubbed(:issue, iid: 42)
url = described_class.build(issue)
- expect(url).to eq "#{Settings.gitlab['url']}/#{issue.project.full_path}/issues/#{issue.iid}"
+ expect(url).to eq "#{Settings.gitlab['url']}/#{issue.project.full_path}/-/issues/#{issue.iid}"
end
end
@@ -107,7 +119,7 @@ describe Gitlab::UrlBuilder do
url = described_class.build(note)
- expect(url).to eq "#{Settings.gitlab['url']}/#{issue.project.full_path}/issues/#{issue.iid}#note_#{note.id}"
+ expect(url).to eq "#{Settings.gitlab['url']}/#{issue.project.full_path}/-/issues/#{issue.iid}#note_#{note.id}"
end
end
@@ -160,7 +172,7 @@ describe Gitlab::UrlBuilder do
project = build_stubbed(:project)
expect { described_class.build(project) }
- .to raise_error(NotImplementedError, 'No URL builder defined for Project')
+ .to raise_error(NotImplementedError, "No URL builder defined for #{project.inspect}")
end
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index b50481a85cd..21117f11f63 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -27,7 +27,7 @@ describe Gitlab::UsageData do
create(:service, project: projects[1], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'MattermostService', active: false)
- create(:service, project: projects[2], type: 'MattermostService', active: true, template: true)
+ create(:service, :template, type: 'MattermostService', active: true)
create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
@@ -324,6 +324,24 @@ describe Gitlab::UsageData do
end
end
+ describe '#cycle_analytics_usage_data' do
+ subject { described_class.cycle_analytics_usage_data }
+
+ it 'works when queries time out in new' do
+ allow(Gitlab::CycleAnalytics::UsageData)
+ .to receive(:new).and_raise(ActiveRecord::StatementInvalid.new(''))
+
+ expect { subject }.not_to raise_error
+ end
+
+ it 'works when queries time out in to_json' do
+ allow_any_instance_of(Gitlab::CycleAnalytics::UsageData)
+ .to receive(:to_json).and_raise(ActiveRecord::StatementInvalid.new(''))
+
+ expect { subject }.not_to raise_error
+ end
+ end
+
describe '#ingress_modsecurity_usage' do
subject { described_class.ingress_modsecurity_usage }
@@ -370,26 +388,19 @@ describe Gitlab::UsageData do
end
end
- describe '#approximate_counts' do
- it 'gets approximate counts for selected models', :aggregate_failures do
- create(:label)
-
- expect(Gitlab::Database::Count).to receive(:approximate_counts)
- .with(described_class::APPROXIMATE_COUNT_MODELS).once.and_call_original
+ describe '#distinct_count' do
+ let(:relation) { double(:relation) }
- counts = described_class.approximate_counts.values
+ it 'returns the count when counting succeeds' do
+ allow(relation).to receive(:distinct_count_by).and_return(1)
- expect(counts.count).to eq(described_class::APPROXIMATE_COUNT_MODELS.count)
- expect(counts.any? { |count| count < 0 }).to be_falsey
+ expect(described_class.distinct_count(relation, batch: false)).to eq(1)
end
- it 'returns default values if counts can not be retrieved', :aggregate_failures do
- described_class::APPROXIMATE_COUNT_MODELS.map do |model|
- model.name.underscore.pluralize.to_sym
- end
+ it 'returns the fallback value when counting fails' do
+ allow(relation).to receive(:distinct_count_by).and_raise(ActiveRecord::StatementInvalid.new(''))
- expect(Gitlab::Database::Count).to receive(:approximate_counts).and_return({})
- expect(described_class.approximate_counts.values.uniq).to eq([-1])
+ expect(described_class.distinct_count(relation, fallback: 15, batch: false)).to eq(15)
end
end
end
diff --git a/spec/lib/gitlab/user_access_snippet_spec.rb b/spec/lib/gitlab/user_access_snippet_spec.rb
new file mode 100644
index 00000000000..57e52e2e93d
--- /dev/null
+++ b/spec/lib/gitlab/user_access_snippet_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::UserAccessSnippet do
+ subject(:access) { described_class.new(user, snippet: snippet) }
+
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:snippet) { create(:project_snippet, :private, project: project) }
+ let(:user) { create(:user) }
+
+ describe '#can_do_action?' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :ability, snippet).and_return(:foo)
+ end
+
+ context 'when can access_git' do
+ it 'calls Ability#allowed? and returns its result' do
+ expect(access.can_do_action?(:ability)).to eq(:foo)
+ end
+ end
+
+ context 'when can not access_git' do
+ it 'disallows access' do
+ expect(Ability).to receive(:allowed?).with(user, :access_git, :global).and_return(false)
+
+ expect(access.can_do_action?(:ability)).to eq(false)
+ end
+ end
+
+ context 'when user is nil' do
+ let(:user) { nil }
+
+ it 'disallows access' do
+ expect(access.can_do_action?(:ability)).to eq(false)
+ end
+ end
+ end
+
+ describe '#can_push_to_branch?' do
+ include ProjectHelpers
+
+ [:anonymous, :non_member, :guest, :reporter, :maintainer, :admin, :author].each do |membership|
+ context membership.to_s do
+ let(:user) do
+ membership == :author ? snippet.author : create_user_from_membership(project, membership)
+ end
+
+ context 'when can access_git' do
+ it 'respects accessibility' do
+ expected_result = Ability.allowed?(user, :update_snippet, snippet)
+
+ expect(access.can_push_to_branch?('random_branch')).to eq(expected_result)
+ end
+ end
+
+ context 'when can not access_git' do
+ it 'disallows access' do
+ expect(Ability).to receive(:allowed?).with(user, :access_git, :global).and_return(false) if user
+
+ expect(access.can_push_to_branch?('random_branch')).to eq(false)
+ end
+ end
+ end
+ end
+
+ context 'when snippet is nil' do
+ let(:user) { create_user_from_membership(project, :admin) }
+ let(:snippet) { nil }
+
+ it 'disallows access' do
+ expect(access.can_push_to_branch?('random_branch')).to eq(false)
+ end
+ end
+ end
+
+ describe '#can_create_tag?' do
+ it 'returns false' do
+ expect(access.can_create_tag?('random_tag')).to be_falsey
+ end
+ end
+
+ describe '#can_delete_branch?' do
+ it 'returns false' do
+ expect(access.can_delete_branch?('random_branch')).to be_falsey
+ end
+ end
+
+ describe '#can_merge_to_branch?' do
+ it 'returns false' do
+ expect(access.can_merge_to_branch?('random_branch')).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/gitlab/user_access_spec.rb b/spec/lib/gitlab/user_access_spec.rb
index 181ea1e7fd3..78370f0136c 100644
--- a/spec/lib/gitlab/user_access_spec.rb
+++ b/spec/lib/gitlab/user_access_spec.rb
@@ -57,32 +57,27 @@ describe Gitlab::UserAccess do
expect(project_access.can_push_to_branch?('master')).to be_truthy
end
- it 'returns false if user is developer and project is fully protected' do
- empty_project.add_developer(user)
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_FULL)
-
- expect(project_access.can_push_to_branch?('master')).to be_falsey
- end
-
- it 'returns false if user is developer and it is not allowed to push new commits but can merge into branch' do
- empty_project.add_developer(user)
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
-
- expect(project_access.can_push_to_branch?('master')).to be_falsey
- end
-
- it 'returns true if user is developer and project is unprotected' do
- empty_project.add_developer(user)
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE)
-
- expect(project_access.can_push_to_branch?('master')).to be_truthy
- end
-
- it 'returns true if user is developer and project grants developers permission' do
- empty_project.add_developer(user)
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
-
- expect(project_access.can_push_to_branch?('master')).to be_truthy
+ context 'when the user is a developer' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ empty_project.add_developer(user)
+ end
+
+ where(:default_branch_protection_level, :result) do
+ Gitlab::Access::PROTECTION_NONE | true
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
+ Gitlab::Access::PROTECTION_FULL | false
+ end
+
+ with_them do
+ it do
+ expect(empty_project.namespace).to receive(:default_branch_protection).and_return(default_branch_protection_level).at_least(:once)
+
+ expect(project_access.can_push_to_branch?('master')).to eq(result)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/utils/json_size_estimator_spec.rb b/spec/lib/gitlab/utils/json_size_estimator_spec.rb
new file mode 100644
index 00000000000..ae24e25558a
--- /dev/null
+++ b/spec/lib/gitlab/utils/json_size_estimator_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Utils::JsonSizeEstimator do
+ RSpec::Matchers.define :match_json_bytesize_of do |expected|
+ match do |actual|
+ actual == expected.to_json.bytesize
+ end
+ end
+
+ def estimate(object)
+ described_class.estimate(object)
+ end
+
+ [
+ [],
+ [[[[]]]],
+ [1, "str", 3.14, ["str", { a: -1 }]],
+ {},
+ { a: {} },
+ { a: { b: { c: [1, 2, 3], e: Time.now, f: nil } } },
+ { 100 => 500 },
+ { '狸' => '狸' },
+ nil
+ ].each do |example|
+ it { expect(estimate(example)).to match_json_bytesize_of(example) }
+ end
+
+ it 'calls #to_s on unknown object' do
+ klass = Class.new do
+ def to_s
+ 'hello'
+ end
+ end
+
+ expect(estimate(klass.new)).to match_json_bytesize_of(klass.new.to_s) # "hello"
+ end
+end
diff --git a/spec/lib/gitlab/utils/log_limited_array_spec.rb b/spec/lib/gitlab/utils/log_limited_array_spec.rb
index 2729b2c7b6f..a236ab37614 100644
--- a/spec/lib/gitlab/utils/log_limited_array_spec.rb
+++ b/spec/lib/gitlab/utils/log_limited_array_spec.rb
@@ -18,12 +18,26 @@ describe Gitlab::Utils::LogLimitedArray do
end
context 'when the array exceeds the limit' do
- it 'replaces arguments after the limit with an ellipsis string' do
+ let(:long_array) do
half_limit = described_class::MAXIMUM_ARRAY_LENGTH / 2
- long_array = ['a' * half_limit, 'b' * half_limit, 'c']
- expect(described_class.log_limited_array(long_array))
- .to eq(long_array.take(1) + ['...'])
+ ['a' * half_limit, 'b' * half_limit, 'c']
+ end
+
+ context 'when no sentinel value is passed' do
+ it 'replaces arguments after the limit with an ellipsis string' do
+ expect(described_class.log_limited_array(long_array))
+ .to eq(long_array.take(1) + ['...'])
+ end
+ end
+
+ context 'when a sentinel value is passed' do
+ it 'replaces arguments after the limit with the sentinel' do
+ sentinel = { truncated: true }
+
+ expect(described_class.log_limited_array(long_array, sentinel: sentinel))
+ .to eq(long_array.take(1) + [sentinel])
+ end
end
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 6841e7719dc..d3780d22241 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -31,6 +31,14 @@ describe Gitlab::Utils do
it 'does nothing for a safe string' do
expect(check_path_traversal!('./foo')).to eq('./foo')
end
+
+ it 'does nothing if an absolute path is allowed' do
+ expect(check_path_traversal!('/etc/folder/path', allowed_absolute: true)). to eq('/etc/folder/path')
+ end
+
+ it 'raises exception if an absolute path is not allowed' do
+ expect { check_path_traversal!('/etc/folder/path') }.to raise_error(/Invalid path/)
+ end
end
describe '.slugify' do
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 89381057f6b..921ed568b71 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Workhorse do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
def decode_workhorse_header(array)
diff --git a/spec/lib/gitlab/x509/commit_spec.rb b/spec/lib/gitlab/x509/commit_spec.rb
index c31e9e4b8e6..07d7eba6b9a 100644
--- a/spec/lib/gitlab/x509/commit_spec.rb
+++ b/spec/lib/gitlab/x509/commit_spec.rb
@@ -111,6 +111,22 @@ describe Gitlab::X509::Commit do
expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes)
expect(signature.persisted?).to be_truthy
end
+
+ context 'revoked certificate' do
+ let(:x509_issuer) { create(:x509_issuer, user1_issuer_attributes) }
+ let!(:x509_certificate) { create(:x509_certificate, user1_certificate_attributes.merge(x509_issuer_id: x509_issuer.id, certificate_status: :revoked)) }
+
+ it 'returns an unverified signature' do
+ expect(signature).to have_attributes(
+ commit_sha: commit_sha,
+ project: project,
+ verification_status: 'unverified'
+ )
+ expect(signature.x509_certificate).to have_attributes(user1_certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes)
+ expect(signature.persisted?).to be_truthy
+ end
+ end
end
context 'without trusted certificate within store' do
diff --git a/spec/lib/gitlab_danger_spec.rb b/spec/lib/gitlab_danger_spec.rb
index 26bf5d76756..f4620e54979 100644
--- a/spec/lib/gitlab_danger_spec.rb
+++ b/spec/lib/gitlab_danger_spec.rb
@@ -9,7 +9,7 @@ describe GitlabDanger do
describe '.local_warning_message' do
it 'returns an informational message with rules that can run' do
- expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changes_size, gemfile, documentation, frozen_string, duplicate_yarn_dependencies, prettier, eslint, database, commit_messages')
+ expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changes_size, documentation, frozen_string, duplicate_yarn_dependencies, prettier, eslint, karma, database, commit_messages, telemetry')
end
end
diff --git a/spec/lib/grafana/time_window_spec.rb b/spec/lib/grafana/time_window_spec.rb
new file mode 100644
index 00000000000..e70861658ca
--- /dev/null
+++ b/spec/lib/grafana/time_window_spec.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Grafana::TimeWindow do
+ let(:from) { '1552799400000' }
+ let(:to) { '1552828200000' }
+
+ around do |example|
+ Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ end
+
+ describe '#formatted' do
+ subject { described_class.new(from, to).formatted }
+
+ it { is_expected.to eq(start: "2019-03-17T05:10:00Z", end: "2019-03-17T13:10:00Z") }
+ end
+
+ describe '#in_milliseconds' do
+ subject { described_class.new(from, to).in_milliseconds }
+
+ it { is_expected.to eq(from: 1552799400000, to: 1552828200000) }
+
+ context 'when non-unix parameters are provided' do
+ let(:to) { Time.now.to_s }
+
+ let(:default_from) { 8.hours.ago.to_i * 1000 }
+ let(:default_to) { Time.now.to_i * 1000 }
+
+ it { is_expected.to eq(from: default_from, to: default_to) }
+ end
+ end
+end
+
+describe Grafana::RangeWithDefaults do
+ let(:from) { Grafana::Timestamp.from_ms_since_epoch('1552799400000') }
+ let(:to) { Grafana::Timestamp.from_ms_since_epoch('1552828200000') }
+
+ around do |example|
+ Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ end
+
+ describe '#to_hash' do
+ subject { described_class.new(from: from, to: to).to_hash }
+
+ it { is_expected.to eq(from: from, to: to) }
+
+ context 'when only "to" is provided' do
+ let(:from) { nil }
+
+ it 'has the expected properties' do
+ expect(subject[:to]).to eq(to)
+ expect(subject[:from].time).to eq(to.time - 8.hours)
+ end
+ end
+
+ context 'when only "from" is provided' do
+ let(:to) { nil }
+
+ it 'has the expected properties' do
+ expect(subject[:to].time).to eq(from.time + 8.hours)
+ expect(subject[:from]).to eq(from)
+ end
+ end
+
+ context 'when no parameters are provided' do
+ let(:to) { nil }
+ let(:from) { nil }
+
+ let(:default_from) { 8.hours.ago }
+ let(:default_to) { Time.now }
+
+ it 'has the expected properties' do
+ expect(subject[:to].time).to eq(default_to)
+ expect(subject[:from].time).to eq(default_from)
+ end
+ end
+ end
+end
+
+describe Grafana::Timestamp do
+ let(:timestamp) { Time.at(1552799400) }
+
+ around do |example|
+ Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ end
+
+ describe '#formatted' do
+ subject { described_class.new(timestamp).formatted }
+
+ it { is_expected.to eq "2019-03-17T05:10:00Z" }
+ end
+
+ describe '#to_ms' do
+ subject { described_class.new(timestamp).to_ms }
+
+ it { is_expected.to eq 1552799400000 }
+ end
+
+ describe '.from_ms_since_epoch' do
+ let(:timestamp) { '1552799400000' }
+
+ subject { described_class.from_ms_since_epoch(timestamp) }
+
+ it { is_expected.to be_a described_class }
+
+ context 'when the input is not a unix-ish timestamp' do
+ let(:timestamp) { Time.now.to_s }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Grafana::Timestamp::Error)
+ end
+ end
+ end
+end
diff --git a/spec/lib/grafana/validator_spec.rb b/spec/lib/grafana/validator_spec.rb
new file mode 100644
index 00000000000..603e27fd0c0
--- /dev/null
+++ b/spec/lib/grafana/validator_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Grafana::Validator do
+ let(:grafana_dashboard) { JSON.parse(fixture_file('grafana/simplified_dashboard_response.json'), symbolize_names: true) }
+ let(:datasource) { JSON.parse(fixture_file('grafana/datasource_response.json'), symbolize_names: true) }
+ let(:panel) { grafana_dashboard[:dashboard][:panels].first }
+
+ let(:query_params) do
+ {
+ from: '1570397739557',
+ to: '1570484139557',
+ panelId: '8',
+ 'var-instance': 'localhost:9121'
+ }
+ end
+
+ describe 'validate!' do
+ shared_examples_for 'processing error' do |message|
+ it 'raises a processing error' do
+ expect { subject }
+ .to raise_error(::Grafana::Validator::Error, message)
+ end
+ end
+
+ subject { described_class.new(grafana_dashboard, datasource, panel, query_params).validate! }
+
+ it 'does not raise an error' do
+ expect { subject }.not_to raise_error
+ end
+
+ context 'when query param "from" is not specified' do
+ before do
+ query_params.delete(:from)
+ end
+
+ it_behaves_like 'processing error', 'Grafana query parameters must include from and to.'
+ end
+
+ context 'when query param "to" is not specified' do
+ before do
+ query_params.delete(:to)
+ end
+
+ it_behaves_like 'processing error', 'Grafana query parameters must include from and to.'
+ end
+
+ context 'when the panel is not provided' do
+ let(:panel) { nil }
+
+ it_behaves_like 'processing error', 'Panel type must be a line graph.'
+ end
+
+ context 'when the panel is not a graph' do
+ before do
+ panel[:type] = 'singlestat'
+ end
+
+ it_behaves_like 'processing error', 'Panel type must be a line graph.'
+ end
+
+ context 'when the panel is not a line graph' do
+ before do
+ panel[:lines] = false
+ end
+
+ it_behaves_like 'processing error', 'Panel type must be a line graph.'
+ end
+
+ context 'when the query dashboard includes undefined variables' do
+ before do
+ query_params.delete(:'var-instance')
+ end
+
+ it_behaves_like 'processing error', 'All Grafana variables must be defined in the query parameters.'
+ end
+
+ context 'when the expression contains unsupported global variables' do
+ before do
+ grafana_dashboard[:dashboard][:panels][0][:targets][0][:expr] = 'sum(important_metric[$__interval_ms])'
+ end
+
+ it_behaves_like 'processing error', "Prometheus must not include #{described_class::UNSUPPORTED_GRAFANA_GLOBAL_VARS}"
+ end
+
+ context 'when the datasource is not proxyable' do
+ before do
+ datasource[:access] = 'not-proxy'
+ end
+
+ it_behaves_like 'processing error', 'Only Prometheus datasources with proxy access in Grafana are supported.'
+ end
+
+ # Skipping datasource validation allows for checks to be
+ # run without a secondary call to Grafana API
+ context 'when the datasource is not provided' do
+ let(:datasource) { nil }
+
+ it 'does not raise an error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+
+ describe 'valid?' do
+ subject { described_class.new(grafana_dashboard, datasource, panel, query_params).valid? }
+
+ context 'with valid arguments' do
+ it { is_expected.to be true }
+ end
+
+ context 'with invalid arguments' do
+ let(:query_params) { {} }
+
+ it { is_expected.to be false }
+ end
+ end
+end
diff --git a/spec/lib/omni_auth/strategies/saml_spec.rb b/spec/lib/omni_auth/strategies/saml_spec.rb
deleted file mode 100644
index 447800bd93c..00000000000
--- a/spec/lib/omni_auth/strategies/saml_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe OmniAuth::Strategies::SAML, type: :strategy do
- let(:idp_sso_target_url) { 'https://login.example.com/idp' }
- let(:strategy) { [OmniAuth::Strategies::SAML, { idp_sso_target_url: idp_sso_target_url }] }
-
- describe 'POST /users/auth/saml' do
- it 'redirects to the provider login page' do
- post '/users/auth/saml'
-
- expect(last_response).to redirect_to(/\A#{Regexp.quote(idp_sso_target_url)}/)
- end
-
- it 'stores request ID during request phase' do
- request_id = double
- allow_next_instance_of(OneLogin::RubySaml::Authrequest) do |instance|
- allow(instance).to receive(:uuid).and_return(request_id)
- end
-
- post '/users/auth/saml'
- expect(session['last_authn_request_id']).to eq(request_id)
- end
- end
-end
diff --git a/spec/lib/quality/kubernetes_client_spec.rb b/spec/lib/quality/kubernetes_client_spec.rb
index 3a362dfccbf..1cfee5200f3 100644
--- a/spec/lib/quality/kubernetes_client_spec.rb
+++ b/spec/lib/quality/kubernetes_client_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Quality::KubernetesClient do
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with([%(kubectl delete --namespace "#{namespace}" #{pod_for_release})])
+ .with([%(kubectl delete --namespace "#{namespace}" --ignore-not-found #{pod_for_release})])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
# We're not verifying the output here, just silencing it
@@ -64,7 +64,7 @@ RSpec.describe Quality::KubernetesClient do
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with([%(kubectl delete --namespace "#{namespace}" #{pod_for_release})])
+ .with([%(kubectl delete --namespace "#{namespace}" --ignore-not-found #{pod_for_release})])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
# We're not verifying the output here, just silencing it
@@ -89,7 +89,7 @@ RSpec.describe Quality::KubernetesClient do
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with([%(kubectl delete --namespace "#{namespace}" #{pod_for_release})])
+ .with([%(kubectl delete --namespace "#{namespace}" --ignore-not-found #{pod_for_release})])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
# We're not verifying the output here, just silencing it
diff --git a/spec/lib/quality/test_level_spec.rb b/spec/lib/quality/test_level_spec.rb
index 757a003946b..6042ab24787 100644
--- a/spec/lib/quality/test_level_spec.rb
+++ b/spec/lib/quality/test_level_spec.rb
@@ -28,7 +28,14 @@ RSpec.describe Quality::TestLevel do
context 'when level is migration' do
it 'returns a pattern' do
expect(subject.pattern(:migration))
- .to eq("spec/{migrations,lib/gitlab/background_migration}{,/**/}*_spec.rb")
+ .to eq("spec/{migrations}{,/**/}*_spec.rb")
+ end
+ end
+
+ context 'when level is background_migration' do
+ it 'returns a pattern' do
+ expect(subject.pattern(:background_migration))
+ .to eq("spec/{lib/gitlab/background_migration,lib/ee/gitlab/background_migration}{,/**/}*_spec.rb")
end
end
@@ -89,7 +96,14 @@ RSpec.describe Quality::TestLevel do
context 'when level is migration' do
it 'returns a regexp' do
expect(subject.regexp(:migration))
- .to eq(%r{spec/(migrations|lib/gitlab/background_migration)})
+ .to eq(%r{spec/(migrations)})
+ end
+ end
+
+ context 'when level is background_migration' do
+ it 'returns a regexp' do
+ expect(subject.regexp(:background_migration))
+ .to eq(%r{spec/(lib/gitlab/background_migration|lib/ee/gitlab/background_migration)})
end
end
@@ -134,7 +148,7 @@ RSpec.describe Quality::TestLevel do
expect(subject.level_for('spec/migrations/add_default_and_free_plans_spec.rb')).to eq(:migration)
end
- it 'returns the correct level for a background_migration test' do
+ it 'returns the correct level for a background migration test' do
expect(subject.level_for('spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb')).to eq(:migration)
end
@@ -142,6 +156,10 @@ RSpec.describe Quality::TestLevel do
expect(described_class.new('ee/').level_for('ee/spec/migrations/geo/migrate_ci_job_artifacts_to_separate_registry_spec.rb')).to eq(:migration)
end
+ it 'returns the correct level for a EE-namespaced background migration test' do
+ expect(described_class.new('ee/').level_for('ee/spec/lib/ee/gitlab/background_migration/prune_orphaned_geo_events_spec.rb')).to eq(:migration)
+ end
+
it 'returns the correct level for an integration test' do
expect(subject.level_for('spec/mailers/abuse_report_mailer_spec.rb')).to eq(:integration)
end
@@ -156,4 +174,26 @@ RSpec.describe Quality::TestLevel do
%r{Test level for spec/unknown/foo_spec.rb couldn't be set. Please rename the file properly or change the test level detection regexes in .+/lib/quality/test_level.rb.})
end
end
+
+ describe '#background_migration?' do
+ it 'returns false for a unit test' do
+ expect(subject.background_migration?('spec/models/abuse_report_spec.rb')).to be(false)
+ end
+
+ it 'returns true for a migration test' do
+ expect(subject.background_migration?('spec/migrations/add_default_and_free_plans_spec.rb')).to be(false)
+ end
+
+ it 'returns true for a background migration test' do
+ expect(subject.background_migration?('spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb')).to be(true)
+ end
+
+ it 'returns true for a geo migration test' do
+ expect(described_class.new('ee/').background_migration?('ee/spec/migrations/geo/migrate_ci_job_artifacts_to_separate_registry_spec.rb')).to be(false)
+ end
+
+ it 'returns true for a EE-namespaced background migration test' do
+ expect(described_class.new('ee/').background_migration?('ee/spec/lib/ee/gitlab/background_migration/prune_orphaned_geo_events_spec.rb')).to be(true)
+ end
+ end
end
diff --git a/spec/lib/sentry/client/issue_spec.rb b/spec/lib/sentry/client/issue_spec.rb
index d35e4b83d7f..0f57d38d290 100644
--- a/spec/lib/sentry/client/issue_spec.rb
+++ b/spec/lib/sentry/client/issue_spec.rb
@@ -49,7 +49,7 @@ describe Sentry::Client::Issue do
it_behaves_like 'calls sentry api'
it_behaves_like 'issues have correct return type', Gitlab::ErrorTracking::Error
- it_behaves_like 'issues have correct length', 2
+ it_behaves_like 'issues have correct length', 3
shared_examples 'has correct external_url' do
context 'external_url' do
@@ -184,7 +184,7 @@ describe Sentry::Client::Issue do
it_behaves_like 'calls sentry api'
it_behaves_like 'issues have correct return type', Gitlab::ErrorTracking::Error
- it_behaves_like 'issues have correct length', 2
+ it_behaves_like 'issues have correct length', 3
end
context 'when cursor is present' do
@@ -194,7 +194,7 @@ describe Sentry::Client::Issue do
it_behaves_like 'calls sentry api'
it_behaves_like 'issues have correct return type', Gitlab::ErrorTracking::Error
- it_behaves_like 'issues have correct length', 2
+ it_behaves_like 'issues have correct length', 3
end
end
@@ -254,6 +254,34 @@ describe Sentry::Client::Issue do
expect(subject.gitlab_issue).to eq('https://gitlab.com/gitlab-org/gitlab/issues/1')
end
+ context 'when issue annotations exist' do
+ before do
+ issue_sample_response['annotations'] = [
+ nil,
+ '',
+ "<a href=\"http://github.com/issues/6\">github-issue-6</a>",
+ "<div>annotation</a>",
+ "<a href=\"http://localhost/gitlab-org/gitlab/issues/2\">gitlab-org/gitlab#2</a>"
+ ]
+ stub_sentry_request(sentry_request_url, body: issue_sample_response)
+ end
+
+ it 'has a correct GitLab issue url' do
+ expect(subject.gitlab_issue).to eq('http://localhost/gitlab-org/gitlab/issues/2')
+ end
+ end
+
+ context 'when no GitLab issue is linked' do
+ before do
+ issue_sample_response['pluginIssues'] = []
+ stub_sentry_request(sentry_request_url, body: issue_sample_response)
+ end
+
+ it 'does not find a GitLab issue' do
+ expect(subject.gitlab_issue).to be_nil
+ end
+ end
+
it 'has the correct tags' do
expect(subject.tags).to eq({ level: issue_sample_response['level'], logger: issue_sample_response['logger'] })
end
diff --git a/spec/mailers/emails/pages_domains_spec.rb b/spec/mailers/emails/pages_domains_spec.rb
index e360e38256e..78887cef7ab 100644
--- a/spec/mailers/emails/pages_domains_spec.rb
+++ b/spec/mailers/emails/pages_domains_spec.rb
@@ -7,8 +7,8 @@ describe Emails::PagesDomains do
include EmailSpec::Matchers
include_context 'gitlab email notification'
- set(:domain) { create(:pages_domain, project: project) }
- set(:user) { project.creator }
+ let_it_be(:domain, reload: true) { create(:pages_domain, project: project) }
+ let_it_be(:user) { project.creator }
shared_examples 'a pages domain email' do
let(:recipient) { user }
diff --git a/spec/mailers/emails/pipelines_spec.rb b/spec/mailers/emails/pipelines_spec.rb
index ad1aa915fbb..cc901da98dc 100644
--- a/spec/mailers/emails/pipelines_spec.rb
+++ b/spec/mailers/emails/pipelines_spec.rb
@@ -6,7 +6,7 @@ require 'email_spec'
describe Emails::Pipelines do
include EmailSpec::Matchers
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
shared_examples_for 'correct pipeline information' do
it 'has a correct information' do
@@ -106,4 +106,17 @@ describe Emails::Pipelines do
let(:status_text) { 'Your pipeline has failed.' }
end
end
+
+ describe '#pipeline_fixed_email' do
+ subject { Notify.pipeline_fixed_email(pipeline, pipeline.user.try(:email)) }
+
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: ref, sha: sha) }
+ let(:ref) { 'master' }
+ let(:sha) { project.commit(ref).sha }
+
+ it_behaves_like 'correct pipeline information' do
+ let(:status) { 'been fixed' }
+ let(:status_text) { 'Your pipeline has been fixed!' }
+ end
+ end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 19b15a6c6e2..f49abb24c44 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -13,11 +13,11 @@ describe Notify do
let(:current_user_sanitized) { 'www_example_com' }
- set(:user) { create(:user) }
- set(:current_user) { create(:user, email: "current@email.com", name: 'www.example.com') }
- set(:assignee) { create(:user, email: 'assignee@example.com', name: 'John Doe') }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:current_user) { create(:user, email: "current@email.com", name: 'www.example.com') }
+ let_it_be(:assignee) { create(:user, email: 'assignee@example.com', name: 'John Doe') }
- set(:merge_request) do
+ let_it_be(:merge_request) do
create(:merge_request, source_project: project,
target_project: project,
author: current_user,
@@ -25,7 +25,7 @@ describe Notify do
description: 'Awesome description')
end
- set(:issue) do
+ let_it_be(:issue, reload: true) do
create(:issue, author: current_user,
assignees: [assignee],
project: project,
@@ -487,7 +487,7 @@ describe Notify do
end
describe 'that are unmergeable' do
- set(:merge_request) do
+ let_it_be(:merge_request) do
create(:merge_request, :conflict,
source_project: project,
target_project: project,
@@ -568,7 +568,7 @@ describe Notify do
end
describe '#mail_thread' do
- set(:mail_thread_note) { create(:note) }
+ let_it_be(:mail_thread_note) { create(:note) }
let(:headers) do
{
@@ -638,9 +638,9 @@ describe Notify do
let(:host) { Gitlab.config.gitlab.host }
context 'in discussion' do
- set(:first_note) { create(:discussion_note_on_issue, project: project) }
- set(:second_note) { create(:discussion_note_on_issue, in_reply_to: first_note, project: project) }
- set(:third_note) { create(:discussion_note_on_issue, in_reply_to: second_note, project: project) }
+ let_it_be(:first_note) { create(:discussion_note_on_issue, project: project) }
+ let_it_be(:second_note) { create(:discussion_note_on_issue, in_reply_to: first_note, project: project) }
+ let_it_be(:third_note) { create(:discussion_note_on_issue, in_reply_to: second_note, project: project) }
subject { described_class.note_issue_email(recipient.id, third_note.id) }
@@ -664,7 +664,7 @@ describe Notify do
end
context 'individual issue comments' do
- set(:note) { create(:note_on_issue, project: project) }
+ let_it_be(:note) { create(:note_on_issue, project: project) }
subject { described_class.note_issue_email(recipient.id, note.id) }
diff --git a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
index a84cac0623b..dfa4cc21d63 100644
--- a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
+++ b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190924152703_migrate_issue_trackers_data.rb')
-describe MigrateIssueTrackersData, :migration do
+describe MigrateIssueTrackersData do
let(:services) { table(:services) }
let(:migration_class) { Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/20191015154408_drop_merge_requests_require_code_owner_approval_from_projects_spec.rb b/spec/migrations/20191015154408_drop_merge_requests_require_code_owner_approval_from_projects_spec.rb
index e90d56cfeef..ac9ff5632eb 100644
--- a/spec/migrations/20191015154408_drop_merge_requests_require_code_owner_approval_from_projects_spec.rb
+++ b/spec/migrations/20191015154408_drop_merge_requests_require_code_owner_approval_from_projects_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191015154408_drop_merge_requests_require_code_owner_approval_from_projects.rb')
-describe DropMergeRequestsRequireCodeOwnerApprovalFromProjects, :migration do
+describe DropMergeRequestsRequireCodeOwnerApprovalFromProjects do
let(:projects_table) { table(:projects) }
subject(:migration) { described_class.new }
diff --git a/spec/migrations/20191125114345_add_admin_mode_protected_path_spec.rb b/spec/migrations/20191125114345_add_admin_mode_protected_path_spec.rb
index 110da221393..669e31618a3 100644
--- a/spec/migrations/20191125114345_add_admin_mode_protected_path_spec.rb
+++ b/spec/migrations/20191125114345_add_admin_mode_protected_path_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20191125114345_add_admin_mode_protected_path.rb')
-describe AddAdminModeProtectedPath, :migration do
+describe AddAdminModeProtectedPath do
ADMIN_MODE_ENDPOINT = '/admin/session'
subject(:migration) { described_class.new }
diff --git a/spec/migrations/20191204114127_delete_legacy_triggers_spec.rb b/spec/migrations/20191204114127_delete_legacy_triggers_spec.rb
index c2660d699ca..58061d80f21 100644
--- a/spec/migrations/20191204114127_delete_legacy_triggers_spec.rb
+++ b/spec/migrations/20191204114127_delete_legacy_triggers_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191204114127_delete_legacy_triggers.rb')
-describe DeleteLegacyTriggers, :migration, schema: 2019_11_25_140458 do
+describe DeleteLegacyTriggers, schema: 2019_11_25_140458 do
let(:ci_trigger_table) { table(:ci_triggers) }
let(:user) { table(:users).create!(name: 'test', email: 'test@example.com', projects_limit: 1) }
diff --git a/spec/migrations/20200107172020_add_timestamp_softwarelicensespolicy_spec.rb b/spec/migrations/20200107172020_add_timestamp_softwarelicensespolicy_spec.rb
index b0d2aea7015..7a6b21d485b 100644
--- a/spec/migrations/20200107172020_add_timestamp_softwarelicensespolicy_spec.rb
+++ b/spec/migrations/20200107172020_add_timestamp_softwarelicensespolicy_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200107172020_add_timestamp_softwarelicensespolicy.rb')
-describe AddTimestampSoftwarelicensespolicy, :migration do
+describe AddTimestampSoftwarelicensespolicy do
let(:software_licenses_policy) { table(:software_license_policies) }
let(:projects) { table(:projects) }
let(:licenses) { table(:software_licenses) }
diff --git a/spec/migrations/20200122123016_backfill_project_settings_spec.rb b/spec/migrations/20200122123016_backfill_project_settings_spec.rb
index fec18d6d52b..ce86e94b6d5 100644
--- a/spec/migrations/20200122123016_backfill_project_settings_spec.rb
+++ b/spec/migrations/20200122123016_backfill_project_settings_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200122123016_backfill_project_settings.rb')
-describe BackfillProjectSettings, :migration, :sidekiq, schema: 20200114113341 do
+describe BackfillProjectSettings, :sidekiq, schema: 20200114113341 do
let(:projects) { table(:projects) }
let(:namespace) { table(:namespaces).create(name: 'user', path: 'user') }
let(:project) { projects.create(namespace_id: namespace.id) }
diff --git a/spec/migrations/20200123155929_remove_invalid_jira_data_spec.rb b/spec/migrations/20200123155929_remove_invalid_jira_data_spec.rb
index 0e640623ea9..253e39c1647 100644
--- a/spec/migrations/20200123155929_remove_invalid_jira_data_spec.rb
+++ b/spec/migrations/20200123155929_remove_invalid_jira_data_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200123155929_remove_invalid_jira_data.rb')
-describe RemoveInvalidJiraData, :migration do
+describe RemoveInvalidJiraData do
let(:jira_tracker_data) { table(:jira_tracker_data) }
let(:services) { table(:services) }
diff --git a/spec/migrations/20200127090233_remove_invalid_issue_tracker_data_spec.rb b/spec/migrations/20200127090233_remove_invalid_issue_tracker_data_spec.rb
index d1b1dfb1904..ca7cde08071 100644
--- a/spec/migrations/20200127090233_remove_invalid_issue_tracker_data_spec.rb
+++ b/spec/migrations/20200127090233_remove_invalid_issue_tracker_data_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200127090233_remove_invalid_issue_tracker_data.rb')
-describe RemoveInvalidIssueTrackerData, :migration do
+describe RemoveInvalidIssueTrackerData do
let(:issue_tracker_data) { table(:issue_tracker_data) }
let(:services) { table(:services) }
diff --git a/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb b/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
index b51708dd5cd..8e3e55f3e19 100644
--- a/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
+++ b/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200130145430_reschedule_migrate_issue_trackers_data.rb')
-describe RescheduleMigrateIssueTrackersData, :migration do
+describe RescheduleMigrateIssueTrackersData do
let(:services) { table(:services) }
let(:migration_class) { Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb b/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
index 4de43e21ed3..e973454ecc8 100644
--- a/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
+++ b/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180122154930_schedule_set_confidential_note_events_on_services.rb')
-describe ScheduleSetConfidentialNoteEventsOnServices, :migration do
+describe ScheduleSetConfidentialNoteEventsOnServices do
let(:services_table) { table(:services) }
let(:migration_class) { Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnServices }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/active_record/schema_spec.rb b/spec/migrations/active_record/schema_spec.rb
index 617e31f359b..086d6317c32 100644
--- a/spec/migrations/active_record/schema_spec.rb
+++ b/spec/migrations/active_record/schema_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
# Check consistency of db/schema.rb version, migrations' timestamps, and the latest migration timestamp
# stored in the database's schema_migrations table.
-describe ActiveRecord::Schema do
+describe ActiveRecord::Schema, schema: :latest do
let(:latest_migration_timestamp) do
migrations_paths = %w[db/migrate db/post_migrate]
.map { |path| Rails.root.join(*path, '*') }
diff --git a/spec/migrations/add_default_and_free_plans_spec.rb b/spec/migrations/add_default_and_free_plans_spec.rb
index ae40b5b10c2..dffdeb8e71a 100644
--- a/spec/migrations/add_default_and_free_plans_spec.rb
+++ b/spec/migrations/add_default_and_free_plans_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20191023152913_add_default_and_free_plans.rb')
-describe AddDefaultAndFreePlans, :migration do
+describe AddDefaultAndFreePlans do
describe 'migrate' do
let(:plans) { table(:plans) }
diff --git a/spec/migrations/add_deploy_token_type_to_deploy_tokens_spec.rb b/spec/migrations/add_deploy_token_type_to_deploy_tokens_spec.rb
index fb8213a6bd6..3bc3d3f8ee2 100644
--- a/spec/migrations/add_deploy_token_type_to_deploy_tokens_spec.rb
+++ b/spec/migrations/add_deploy_token_type_to_deploy_tokens_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200122161638_add_deploy_token_type_to_deploy_tokens.rb')
-describe AddDeployTokenTypeToDeployTokens, :migration do
+describe AddDeployTokenTypeToDeployTokens do
let(:deploy_tokens) { table(:deploy_tokens) }
let(:deploy_token) do
deploy_tokens.create(name: 'token_test',
diff --git a/spec/migrations/add_foreign_key_from_notification_settings_to_users_spec.rb b/spec/migrations/add_foreign_key_from_notification_settings_to_users_spec.rb
index 656d4f75e3b..ceca38b148e 100644
--- a/spec/migrations/add_foreign_key_from_notification_settings_to_users_spec.rb
+++ b/spec/migrations/add_foreign_key_from_notification_settings_to_users_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180710162338_add_foreign_key_from_notification_settings_to_users.rb')
-describe AddForeignKeyFromNotificationSettingsToUsers, :migration do
+describe AddForeignKeyFromNotificationSettingsToUsers do
let(:notification_settings) { table(:notification_settings) }
let(:users) { table(:users) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/add_foreign_keys_to_todos_spec.rb b/spec/migrations/add_foreign_keys_to_todos_spec.rb
index 9932113a003..49fb3c1a911 100644
--- a/spec/migrations/add_foreign_keys_to_todos_spec.rb
+++ b/spec/migrations/add_foreign_keys_to_todos_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180201110056_add_foreign_keys_to_todos.rb')
-describe AddForeignKeysToTodos, :migration do
+describe AddForeignKeysToTodos do
let(:todos) { table(:todos) }
let(:users) { table(:users) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/add_not_null_constraint_to_project_mirror_data_foreign_key_spec.rb b/spec/migrations/add_not_null_constraint_to_project_mirror_data_foreign_key_spec.rb
index 24ae939afa7..03f65aba7c0 100644
--- a/spec/migrations/add_not_null_constraint_to_project_mirror_data_foreign_key_spec.rb
+++ b/spec/migrations/add_not_null_constraint_to_project_mirror_data_foreign_key_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180508100222_add_not_null_constraint_to_project_mirror_data_foreign_key.rb')
-describe AddNotNullConstraintToProjectMirrorDataForeignKey, :migration do
+describe AddNotNullConstraintToProjectMirrorDataForeignKey do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:import_state) { table(:project_mirror_data) }
diff --git a/spec/migrations/add_pages_access_level_to_project_feature_spec.rb b/spec/migrations/add_pages_access_level_to_project_feature_spec.rb
index a5e2bf2de71..69f1e3ba3d0 100644
--- a/spec/migrations/add_pages_access_level_to_project_feature_spec.rb
+++ b/spec/migrations/add_pages_access_level_to_project_feature_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180423204600_add_pages_access_level_to_project_feature.rb')
-describe AddPagesAccessLevelToProjectFeature, :migration do
+describe AddPagesAccessLevelToProjectFeature do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:features) { table(:project_features) }
diff --git a/spec/migrations/add_pipeline_build_foreign_key_spec.rb b/spec/migrations/add_pipeline_build_foreign_key_spec.rb
index bb40ead9b93..dd0189b6bfc 100644
--- a/spec/migrations/add_pipeline_build_foreign_key_spec.rb
+++ b/spec/migrations/add_pipeline_build_foreign_key_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180420010016_add_pipeline_build_foreign_key.rb')
-describe AddPipelineBuildForeignKey, :migration do
+describe AddPipelineBuildForeignKey do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
diff --git a/spec/migrations/add_temporary_partial_index_on_project_id_to_services_spec.rb b/spec/migrations/add_temporary_partial_index_on_project_id_to_services_spec.rb
index 2d12fec5cb3..ce790b0266c 100644
--- a/spec/migrations/add_temporary_partial_index_on_project_id_to_services_spec.rb
+++ b/spec/migrations/add_temporary_partial_index_on_project_id_to_services_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200114112932_add_temporary_partial_index_on_project_id_to_services.rb')
-describe AddTemporaryPartialIndexOnProjectIdToServices, :migration do
+describe AddTemporaryPartialIndexOnProjectIdToServices do
let(:migration) { described_class.new }
describe '#up' do
diff --git a/spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb b/spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb
index cad10ba30ef..74830ab4ce2 100644
--- a/spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb
+++ b/spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190404143330_add_unique_constraint_to_approvals_user_id_and_merge_request_id.rb')
-describe AddUniqueConstraintToApprovalsUserIdAndMergeRequestId, :migration do
+describe AddUniqueConstraintToApprovalsUserIdAndMergeRequestId do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb b/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb
index 8b128ff5ab8..91abf0f7d1c 100644
--- a/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb
+++ b/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180511174224_add_unique_constraint_to_project_features_project_id.rb')
-describe AddUniqueConstraintToProjectFeaturesProjectId, :migration do
+describe AddUniqueConstraintToProjectFeaturesProjectId do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:features) { table(:project_features) }
diff --git a/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb b/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb
index 17342dcaab3..e9ef6bf3e2d 100644
--- a/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb
+++ b/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180425131009_assure_commits_count_for_merge_request_diff.rb')
-describe AssureCommitsCountForMergeRequestDiff, :migration, :redis do
+describe AssureCommitsCountForMergeRequestDiff, :redis do
let(:migration) { spy('migration') }
before do
diff --git a/spec/migrations/backfill_and_add_not_null_constraint_to_released_at_column_on_releases_table_spec.rb b/spec/migrations/backfill_and_add_not_null_constraint_to_released_at_column_on_releases_table_spec.rb
index 9cae1daacea..8fceba276ce 100644
--- a/spec/migrations/backfill_and_add_not_null_constraint_to_released_at_column_on_releases_table_spec.rb
+++ b/spec/migrations/backfill_and_add_not_null_constraint_to_released_at_column_on_releases_table_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190628185004_backfill_and_add_not_null_constraint_to_released_at_column_on_releases_table.rb')
-describe BackfillAndAddNotNullConstraintToReleasedAtColumnOnReleasesTable, :migration do
+describe BackfillAndAddNotNullConstraintToReleasedAtColumnOnReleasesTable do
let(:releases) { table(:releases) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/backfill_operations_feature_flags_active_spec.rb b/spec/migrations/backfill_operations_feature_flags_active_spec.rb
index ad69b776052..c51ed9fea8c 100644
--- a/spec/migrations/backfill_operations_feature_flags_active_spec.rb
+++ b/spec/migrations/backfill_operations_feature_flags_active_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20191213184609_backfill_operations_feature_flags_active.rb')
-describe BackfillOperationsFeatureFlagsActive, :migration do
+describe BackfillOperationsFeatureFlagsActive do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:flags) { table(:operations_feature_flags) }
diff --git a/spec/migrations/backfill_operations_feature_flags_iid_spec.rb b/spec/migrations/backfill_operations_feature_flags_iid_spec.rb
index f7a223e794a..4628780787c 100644
--- a/spec/migrations/backfill_operations_feature_flags_iid_spec.rb
+++ b/spec/migrations/backfill_operations_feature_flags_iid_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200117194850_backfill_operations_feature_flags_iid.rb')
-describe BackfillOperationsFeatureFlagsIid, :migration do
+describe BackfillOperationsFeatureFlagsIid do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:flags) { table(:operations_feature_flags) }
diff --git a/spec/migrations/backfill_releases_name_with_tag_name_spec.rb b/spec/migrations/backfill_releases_name_with_tag_name_spec.rb
index 6f436de84b7..b38b8dff3fa 100644
--- a/spec/migrations/backfill_releases_name_with_tag_name_spec.rb
+++ b/spec/migrations/backfill_releases_name_with_tag_name_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20181212104941_backfill_releases_name_with_tag_name.rb')
-describe BackfillReleasesNameWithTagName, :migration do
+describe BackfillReleasesNameWithTagName do
let(:releases) { table(:releases) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/backfill_releases_table_updated_at_and_add_not_null_constraints_to_timestamps_spec.rb b/spec/migrations/backfill_releases_table_updated_at_and_add_not_null_constraints_to_timestamps_spec.rb
index 3ca7af8ea37..bf9a8154e1e 100644
--- a/spec/migrations/backfill_releases_table_updated_at_and_add_not_null_constraints_to_timestamps_spec.rb
+++ b/spec/migrations/backfill_releases_table_updated_at_and_add_not_null_constraints_to_timestamps_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190920194925_backfill_releases_table_updated_at_and_add_not_null_constraints_to_timestamps.rb')
-describe BackfillReleasesTableUpdatedAtAndAddNotNullConstraintsToTimestamps, :migration do
+describe BackfillReleasesTableUpdatedAtAndAddNotNullConstraintsToTimestamps do
let(:releases) { table(:releases) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/backfill_store_project_full_path_in_repo_spec.rb b/spec/migrations/backfill_store_project_full_path_in_repo_spec.rb
index 913b4d3f114..a2adde37f11 100644
--- a/spec/migrations/backfill_store_project_full_path_in_repo_spec.rb
+++ b/spec/migrations/backfill_store_project_full_path_in_repo_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181010133639_backfill_store_project_full_path_in_repo.rb')
-describe BackfillStoreProjectFullPathInRepo, :migration do
+describe BackfillStoreProjectFullPathInRepo do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:group) { namespaces.create!(name: 'foo', path: 'foo') }
diff --git a/spec/migrations/backport_enterprise_schema_spec.rb b/spec/migrations/backport_enterprise_schema_spec.rb
index 8d2d9d4953a..c167301e1e3 100644
--- a/spec/migrations/backport_enterprise_schema_spec.rb
+++ b/spec/migrations/backport_enterprise_schema_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190402150158_backport_enterprise_schema.rb')
-describe BackportEnterpriseSchema, :migration, schema: 20190329085614 do
+describe BackportEnterpriseSchema, schema: 20190329085614 do
include MigrationsHelpers
def drop_if_exists(table)
diff --git a/spec/migrations/change_default_value_for_dsa_key_restriction_spec.rb b/spec/migrations/change_default_value_for_dsa_key_restriction_spec.rb
index 699708ad1d4..448f1e2106e 100644
--- a/spec/migrations/change_default_value_for_dsa_key_restriction_spec.rb
+++ b/spec/migrations/change_default_value_for_dsa_key_restriction_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180531220618_change_default_value_for_dsa_key_restriction.rb')
-describe ChangeDefaultValueForDsaKeyRestriction, :migration do
+describe ChangeDefaultValueForDsaKeyRestriction do
let(:application_settings) { table(:application_settings) }
before do
diff --git a/spec/migrations/change_outbound_local_requests_whitelist_default_spec.rb b/spec/migrations/change_outbound_local_requests_whitelist_default_spec.rb
index 232f6f090c3..dd45cac4a70 100644
--- a/spec/migrations/change_outbound_local_requests_whitelist_default_spec.rb
+++ b/spec/migrations/change_outbound_local_requests_whitelist_default_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190725012225_change_outbound_local_requests_whitelist_default.rb')
-describe ChangeOutboundLocalRequestsWhitelistDefault, :migration do
+describe ChangeOutboundLocalRequestsWhitelistDefault do
let(:application_settings) { table(:application_settings) }
it 'defaults to empty array' do
diff --git a/spec/migrations/change_packages_size_defaults_in_project_statistics_spec.rb b/spec/migrations/change_packages_size_defaults_in_project_statistics_spec.rb
index 93e7e9304b1..c36506643de 100644
--- a/spec/migrations/change_packages_size_defaults_in_project_statistics_spec.rb
+++ b/spec/migrations/change_packages_size_defaults_in_project_statistics_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190516155724_change_packages_size_defaults_in_project_statistics.rb')
-describe ChangePackagesSizeDefaultsInProjectStatistics, :migration do
+describe ChangePackagesSizeDefaultsInProjectStatistics do
let(:project_statistics) { table(:project_statistics) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/clean_grafana_url_spec.rb b/spec/migrations/clean_grafana_url_spec.rb
index 9f060fbaf7d..f6ea88a6f8d 100644
--- a/spec/migrations/clean_grafana_url_spec.rb
+++ b/spec/migrations/clean_grafana_url_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200214085940_clean_grafana_url.rb')
-describe CleanGrafanaUrl, :migration do
+describe CleanGrafanaUrl do
let(:application_settings_table) { table(:application_settings) }
[
diff --git a/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb b/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb
index 572b7dfd0c8..602e1c1fe93 100644
--- a/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb
+++ b/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190313092516_clean_up_noteable_id_for_notes_on_commits.rb')
-describe CleanUpNoteableIdForNotesOnCommits, :migration do
+describe CleanUpNoteableIdForNotesOnCommits do
let(:notes) { table(:notes) }
before do
diff --git a/spec/migrations/cleanup_build_stage_migration_spec.rb b/spec/migrations/cleanup_build_stage_migration_spec.rb
index 2142b7b5275..961e719e2fc 100644
--- a/spec/migrations/cleanup_build_stage_migration_spec.rb
+++ b/spec/migrations/cleanup_build_stage_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180420010616_cleanup_build_stage_migration.rb')
-describe CleanupBuildStageMigration, :migration, :redis do
+describe CleanupBuildStageMigration, :redis do
let(:migration) { spy('migration') }
before do
diff --git a/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb b/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb
new file mode 100644
index 00000000000..7e6afbec520
--- /dev/null
+++ b/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200128133510_cleanup_empty_commit_user_mentions')
+
+describe CleanupEmptyCommitUserMentions, :migration, :sidekiq do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:notes) { table(:notes) }
+
+ let(:user) { users.create!(name: 'root', email: 'root@example.com', username: 'root', projects_limit: 0) }
+ let(:group) { namespaces.create!(name: 'group1', path: 'group1', owner_id: user.id) }
+ let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
+
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
+ let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
+ let(:commit_user_mentions) { table(:commit_user_mentions) }
+
+ let!(:resource1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
+ let!(:resource2) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
+ let!(:resource3) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check', system: true) }
+
+ # this note is already migrated, as it has a record in the commit_user_mentions table
+ let!(:resource4) { notes.create!(note: 'note3 for @root to check', commit_id: commit.id, noteable_type: 'Commit') }
+ let!(:user_mention) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource4.id, mentioned_users_ids: [1]) }
+
+ # these should get cleanup, by the migration
+ let!(:blank_commit_user_mention1) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource1.id)}
+ let!(:blank_commit_user_mention2) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource2.id)}
+ let!(:blank_commit_user_mention3) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource3.id)}
+
+ it 'cleanups blank user mentions' do
+ expect { migrate! }.to change { commit_user_mentions.count }.by(-3)
+ end
+end
diff --git a/spec/migrations/cleanup_environments_external_url_spec.rb b/spec/migrations/cleanup_environments_external_url_spec.rb
index bc20f936593..54fcb8c62cd 100644
--- a/spec/migrations/cleanup_environments_external_url_spec.rb
+++ b/spec/migrations/cleanup_environments_external_url_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20181108091549_cleanup_environments_external_url.rb')
-describe CleanupEnvironmentsExternalUrl, :migration do
+describe CleanupEnvironmentsExternalUrl do
let(:environments) { table(:environments) }
let(:invalid_entries) { environments.where(environments.arel_table[:external_url].matches('javascript://%')) }
let(:namespaces) { table(:namespaces) }
diff --git a/spec/migrations/cleanup_legacy_artifact_migration_spec.rb b/spec/migrations/cleanup_legacy_artifact_migration_spec.rb
index 0ab7d7ec05f..29a5c34373c 100644
--- a/spec/migrations/cleanup_legacy_artifact_migration_spec.rb
+++ b/spec/migrations/cleanup_legacy_artifact_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190104182041_cleanup_legacy_artifact_migration.rb')
-describe CleanupLegacyArtifactMigration, :migration, :redis do
+describe CleanupLegacyArtifactMigration, :redis do
let(:migration) { spy('migration') }
context 'when still legacy artifacts exist' do
diff --git a/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb b/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb
new file mode 100644
index 00000000000..d32a374b914
--- /dev/null
+++ b/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200128210353_cleanup_optimistic_locking_nulls')
+
+describe CleanupOptimisticLockingNulls do
+ TABLES = %w(epics merge_requests issues).freeze
+ TABLES.each do |table|
+ let(table.to_sym) { table(table.to_sym) }
+ end
+ let(:tables) { TABLES.map { |t| method(t.to_sym).call } }
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users)}
+
+ before do
+ namespaces.create!(id: 123, name: 'gitlab1', path: 'gitlab1')
+ projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 123)
+ users.create!(id: 123, username: 'author', projects_limit: 1000)
+
+ # Create necessary rows
+ epics.create!(iid: 123, group_id: 123, author_id: 123, title: 'a', title_html: 'a')
+ merge_requests.create!(iid: 123, target_project_id: 123, source_project_id: 123, target_branch: 'master', source_branch: 'hmm', title: 'a', title_html: 'a')
+ issues.create!(iid: 123, project_id: 123, title: 'a', title_html: 'a')
+
+ # Nullify `lock_version` column for all rows
+ # Needs to be done with a SQL fragment, otherwise Rails will coerce it to 0
+ tables.each do |table|
+ table.update_all('lock_version = NULL')
+ end
+ end
+
+ it 'correctly migrates nullified lock_version column', :sidekiq_inline do
+ tables.each do |table|
+ expect(table.where(lock_version: nil).count).to eq(1)
+ end
+
+ tables.each do |table|
+ expect(table.where(lock_version: 0).count).to eq(0)
+ end
+
+ migrate!
+
+ tables.each do |table|
+ expect(table.where(lock_version: nil).count).to eq(0)
+ end
+
+ tables.each do |table|
+ expect(table.where(lock_version: 0).count).to eq(1)
+ end
+ end
+end
diff --git a/spec/migrations/cleanup_stages_position_migration_spec.rb b/spec/migrations/cleanup_stages_position_migration_spec.rb
index c2077629919..62b9c4e84e3 100644
--- a/spec/migrations/cleanup_stages_position_migration_spec.rb
+++ b/spec/migrations/cleanup_stages_position_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180604123514_cleanup_stages_position_migration.rb')
-describe CleanupStagesPositionMigration, :migration, :redis do
+describe CleanupStagesPositionMigration, :redis do
let(:migration) { spy('migration') }
before do
diff --git a/spec/migrations/create_environment_for_self_monitoring_project_spec.rb b/spec/migrations/create_environment_for_self_monitoring_project_spec.rb
new file mode 100644
index 00000000000..aee0651dee0
--- /dev/null
+++ b/spec/migrations/create_environment_for_self_monitoring_project_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200214214934_create_environment_for_self_monitoring_project')
+
+describe CreateEnvironmentForSelfMonitoringProject do
+ let(:application_settings_table) { table(:application_settings) }
+
+ let(:environments) { table(:environments) }
+
+ let(:instance_administrators_group) do
+ table(:namespaces).create!(
+ id: 1,
+ name: 'GitLab Instance Administrators',
+ path: 'gitlab-instance-administrators-random',
+ type: 'Group'
+ )
+ end
+
+ let(:self_monitoring_project) do
+ table(:projects).create!(
+ id: 2,
+ name: 'Self Monitoring',
+ path: 'self_monitoring',
+ namespace_id: instance_administrators_group.id
+ )
+ end
+
+ context 'when the self monitoring project ID is not set' do
+ it 'does not make changes' do
+ expect(environments.find_by(project_id: self_monitoring_project.id)).to be_nil
+
+ migrate!
+
+ expect(environments.find_by(project_id: self_monitoring_project.id)).to be_nil
+ end
+ end
+
+ context 'when the self monitoring project ID is set' do
+ before do
+ application_settings_table.create!(instance_administration_project_id: self_monitoring_project.id)
+ end
+
+ context 'when the environment already exists' do
+ let!(:environment) do
+ environments.create!(project_id: self_monitoring_project.id, name: 'production', slug: 'production')
+ end
+
+ it 'does not make changes' do
+ expect(environments.find_by(project_id: self_monitoring_project.id)).to eq(environment)
+
+ migrate!
+
+ expect(environments.find_by(project_id: self_monitoring_project.id)).to eq(environment)
+ end
+ end
+
+ context 'when the environment does not exist' do
+ it 'creates the environment' do
+ expect(environments.find_by(project_id: self_monitoring_project.id)).to be_nil
+
+ migrate!
+
+ expect(environments.find_by(project_id: self_monitoring_project.id)).to be
+ end
+ end
+ end
+end
diff --git a/spec/migrations/create_missing_namespace_for_internal_users_spec.rb b/spec/migrations/create_missing_namespace_for_internal_users_spec.rb
index 5df08a74e56..0872f23c02e 100644
--- a/spec/migrations/create_missing_namespace_for_internal_users_spec.rb
+++ b/spec/migrations/create_missing_namespace_for_internal_users_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180413022611_create_missing_namespace_for_internal_users.rb')
-describe CreateMissingNamespaceForInternalUsers, :migration do
+describe CreateMissingNamespaceForInternalUsers do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:routes) { table(:routes) }
diff --git a/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb b/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb
index b9c6b489aca..6eecd0870ed 100644
--- a/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb
+++ b/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200117194900_delete_internal_ids_where_feature_flags_usage')
-describe DeleteInternalIdsWhereFeatureFlagsUsage, :migration do
+describe DeleteInternalIdsWhereFeatureFlagsUsage do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:internal_ids) { table(:internal_ids) }
diff --git a/spec/migrations/delete_template_project_services_spec.rb b/spec/migrations/delete_template_project_services_spec.rb
new file mode 100644
index 00000000000..3c6709ec310
--- /dev/null
+++ b/spec/migrations/delete_template_project_services_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200305151736_delete_template_project_services.rb')
+
+describe DeleteTemplateProjectServices, :migration do
+ let(:services) { table(:services) }
+ let(:project) { table(:projects).create!(namespace_id: 1) }
+
+ before do
+ services.create!(template: true, project_id: project.id)
+ services.create!(template: true)
+ services.create!(template: false, project_id: project.id)
+ end
+
+ it 'deletes services when template and attached to a project' do
+ expect { migrate! }.to change { services.where(template: true, project_id: project.id).count }.from(1).to(0)
+ .and not_change { services.where(template: true, project_id: nil).count }
+ .and not_change { services.where(template: false).where.not(project_id: nil).count }
+ end
+end
diff --git a/spec/migrations/delete_template_services_duplicated_by_type_spec.rb b/spec/migrations/delete_template_services_duplicated_by_type_spec.rb
new file mode 100644
index 00000000000..64da0664e2c
--- /dev/null
+++ b/spec/migrations/delete_template_services_duplicated_by_type_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200304160801_delete_template_services_duplicated_by_type.rb')
+
+describe DeleteTemplateServicesDuplicatedByType do
+ let(:services) { table(:services) }
+
+ before do
+ services.create!(template: true, type: 'JenkinsService')
+ services.create!(template: true, type: 'JenkinsService')
+ services.create!(template: true, type: 'JiraService')
+ services.create!(template: true, type: 'JenkinsService')
+ end
+
+ it 'deletes service templates duplicated by type except the one with the lowest ID' do
+ jenkins_service_id = services.where(type: 'JenkinsService').order(:id).pluck(:id).first
+ jira_service_id = services.where(type: 'JiraService').pluck(:id).first
+
+ migrate!
+
+ expect(services.pluck(:id)).to contain_exactly(jenkins_service_id, jira_service_id)
+ end
+end
diff --git a/spec/migrations/drop_activate_prometheus_services_background_jobs_spec.rb b/spec/migrations/drop_activate_prometheus_services_background_jobs_spec.rb
new file mode 100644
index 00000000000..a02a0819a7b
--- /dev/null
+++ b/spec/migrations/drop_activate_prometheus_services_background_jobs_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200221144534_drop_activate_prometheus_services_background_jobs.rb')
+
+describe DropActivatePrometheusServicesBackgroundJobs, :sidekiq, :redis, schema: 2020_02_21_144534 do
+ subject(:migration) { described_class.new }
+
+ describe '#up' do
+ let(:retry_set) { Sidekiq::RetrySet.new }
+ let(:scheduled_set) { Sidekiq::ScheduledSet.new }
+
+ context 'there are only affected jobs on the queue' do
+ let(:payload) { { 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1] } }
+ let(:queue_payload) { payload.merge('queue' => described_class::QUEUE) }
+
+ it 'removes enqueued ActivatePrometheusServicesForSharedClusterApplications background jobs' do
+ Sidekiq::Testing.disable! do # https://github.com/mperham/sidekiq/wiki/testing#api Sidekiq's API does not have a testing mode
+ retry_set.schedule(1.hour.from_now, payload)
+ scheduled_set.schedule(1.hour.from_now, payload)
+ Sidekiq::Client.push(queue_payload)
+
+ expect { migration.up }.to change { Sidekiq::Queue.new(described_class::QUEUE).size }.from(1).to(0)
+ expect(retry_set.size).to eq(0)
+ expect(scheduled_set.size).to eq(0)
+ end
+ end
+ end
+
+ context "there aren't any affected jobs on the queue" do
+ let(:payload) { { 'class' => ::BackgroundMigrationWorker, 'args' => ['SomeOtherClass', 1] } }
+ let(:queue_payload) { payload.merge('queue' => described_class::QUEUE) }
+
+ it 'skips other enqueued jobs' do
+ Sidekiq::Testing.disable! do
+ retry_set.schedule(1.hour.from_now, payload)
+ scheduled_set.schedule(1.hour.from_now, payload)
+ Sidekiq::Client.push(queue_payload)
+
+ expect { migration.up }.not_to change { Sidekiq::Queue.new(described_class::QUEUE).size }
+ expect(retry_set.size).to eq(1)
+ expect(scheduled_set.size).to eq(1)
+ end
+ end
+ end
+
+ context "there are multiple types of jobs on the queue" do
+ let(:payload) { { 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1] } }
+ let(:queue_payload) { payload.merge('queue' => described_class::QUEUE) }
+
+ it 'skips other enqueued jobs' do
+ Sidekiq::Testing.disable! do
+ queue = Sidekiq::Queue.new(described_class::QUEUE)
+ # these jobs will be deleted
+ retry_set.schedule(1.hour.from_now, payload)
+ scheduled_set.schedule(1.hour.from_now, payload)
+ Sidekiq::Client.push(queue_payload)
+ # this jobs will be skipped
+ skipped_jobs_args = [['SomeOtherClass', 1], [described_class::DROPPED_JOB_CLASS, 'wrong id type'], [described_class::DROPPED_JOB_CLASS, 1, 'some wired argument']]
+ skipped_jobs_args.each do |args|
+ retry_set.schedule(1.hour.from_now, { 'class' => ::BackgroundMigrationWorker, 'args' => args })
+ scheduled_set.schedule(1.hour.from_now, { 'class' => ::BackgroundMigrationWorker, 'args' => args })
+ Sidekiq::Client.push('queue' => described_class::QUEUE, 'class' => ::BackgroundMigrationWorker, 'args' => args)
+ end
+
+ migration.up
+
+ expect(retry_set.size).to be 3
+ expect(scheduled_set.size).to be 3
+ expect(queue.size).to be 3
+ expect(queue.map(&:args)).to match_array skipped_jobs_args
+ expect(retry_set.map(&:args)).to match_array skipped_jobs_args
+ expect(scheduled_set.map(&:args)).to match_array skipped_jobs_args
+ end
+ end
+ end
+
+ context "other queues" do
+ it 'does not modify them' do
+ Sidekiq::Testing.disable! do
+ Sidekiq::Client.push('queue' => 'other', 'class' => ::BackgroundMigrationWorker, 'args' => ['SomeOtherClass', 1])
+ Sidekiq::Client.push('queue' => 'other', 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1])
+
+ expect { migration.up }.not_to change { Sidekiq::Queue.new('other').size }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/drop_background_migration_jobs_spec.rb b/spec/migrations/drop_background_migration_jobs_spec.rb
index ac76e897f6c..d9e0561f326 100644
--- a/spec/migrations/drop_background_migration_jobs_spec.rb
+++ b/spec/migrations/drop_background_migration_jobs_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200116051619_drop_background_migration_jobs.rb')
-describe DropBackgroundMigrationJobs, :sidekiq, :redis, :migration, schema: 2020_01_16_051619 do
+describe DropBackgroundMigrationJobs, :sidekiq, :redis, schema: 2020_01_16_051619 do
subject(:migration) { described_class.new }
describe '#up' do
diff --git a/spec/migrations/drop_duplicate_protected_tags_spec.rb b/spec/migrations/drop_duplicate_protected_tags_spec.rb
index 7f0c7efbf66..7135a15484c 100644
--- a/spec/migrations/drop_duplicate_protected_tags_spec.rb
+++ b/spec/migrations/drop_duplicate_protected_tags_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180711103851_drop_duplicate_protected_tags.rb')
-describe DropDuplicateProtectedTags, :migration do
+describe DropDuplicateProtectedTags do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:protected_tags) { table(:protected_tags) }
diff --git a/spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb b/spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb
index 1b0e6e140ca..9166f626922 100644
--- a/spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb
+++ b/spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191128162854_drop_project_ci_cd_settings_merge_trains_enabled.rb')
-describe DropProjectCiCdSettingsMergeTrainsEnabled, :migration do
+describe DropProjectCiCdSettingsMergeTrainsEnabled do
let!(:project_ci_cd_setting) { table(:project_ci_cd_settings) }
it 'correctly migrates up and down' do
diff --git a/spec/migrations/encrypt_deploy_tokens_tokens_spec.rb b/spec/migrations/encrypt_deploy_tokens_tokens_spec.rb
index a398e079731..4d0a0b31571 100644
--- a/spec/migrations/encrypt_deploy_tokens_tokens_spec.rb
+++ b/spec/migrations/encrypt_deploy_tokens_tokens_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190711201818_encrypt_deploy_tokens_tokens.rb')
-describe EncryptDeployTokensTokens, :migration do
+describe EncryptDeployTokensTokens do
let(:migration) { described_class.new }
let(:deployment_tokens) { table(:deploy_tokens) }
let(:plaintext) { "secret-token" }
diff --git a/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb b/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb
index 95b02d20594..9b139c4b57b 100644
--- a/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb
+++ b/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190606175050_encrypt_feature_flags_clients_tokens.rb')
-describe EncryptFeatureFlagsClientsTokens, :migration do
+describe EncryptFeatureFlagsClientsTokens do
let(:migration) { described_class.new }
let(:feature_flags_clients) { table(:operations_feature_flags_clients) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb b/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb
index 122da7b3d72..87a72ed0cf5 100644
--- a/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb
+++ b/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20191120115530_encrypt_plaintext_attributes_on_application_settings.rb')
-describe EncryptPlaintextAttributesOnApplicationSettings, :migration do
+describe EncryptPlaintextAttributesOnApplicationSettings do
let(:migration) { described_class.new }
let(:application_settings) { table(:application_settings) }
let(:plaintext) { 'secret-token' }
diff --git a/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb b/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb
index bdc248f2cf2..d4cf3d15758 100644
--- a/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb
+++ b/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190620112608_enqueue_reset_merge_status_second_run.rb')
-describe EnqueueResetMergeStatusSecondRun, :migration do
+describe EnqueueResetMergeStatusSecondRun do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
diff --git a/spec/migrations/enqueue_reset_merge_status_spec.rb b/spec/migrations/enqueue_reset_merge_status_spec.rb
index 4b312a3bc62..9728ada14ba 100644
--- a/spec/migrations/enqueue_reset_merge_status_spec.rb
+++ b/spec/migrations/enqueue_reset_merge_status_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190528180441_enqueue_reset_merge_status.rb')
-describe EnqueueResetMergeStatus, :migration do
+describe EnqueueResetMergeStatus do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
diff --git a/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb b/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb
index 8efaab871a1..ffb1c04a6c5 100644
--- a/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb
+++ b/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180216121030_enqueue_verify_pages_domain_workers')
-describe EnqueueVerifyPagesDomainWorkers, :migration do
+describe EnqueueVerifyPagesDomainWorkers do
around do |example|
Sidekiq::Testing.fake! do
example.run
diff --git a/spec/migrations/fill_empty_finished_at_in_deployments_spec.rb b/spec/migrations/fill_empty_finished_at_in_deployments_spec.rb
index 50ecf083f27..546a805dec8 100644
--- a/spec/migrations/fill_empty_finished_at_in_deployments_spec.rb
+++ b/spec/migrations/fill_empty_finished_at_in_deployments_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181030135124_fill_empty_finished_at_in_deployments')
-describe FillEmptyFinishedAtInDeployments, :migration do
+describe FillEmptyFinishedAtInDeployments do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:environments) { table(:environments) }
diff --git a/spec/migrations/fill_file_store_spec.rb b/spec/migrations/fill_file_store_spec.rb
index 806c9283634..732fdc2a0bb 100644
--- a/spec/migrations/fill_file_store_spec.rb
+++ b/spec/migrations/fill_file_store_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180424151928_fill_file_store')
-describe FillFileStore, :migration do
+describe FillFileStore do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:builds) { table(:ci_builds) }
diff --git a/spec/migrations/fill_productivity_analytics_start_date_spec.rb b/spec/migrations/fill_productivity_analytics_start_date_spec.rb
index 7cbba9ef20e..4ae7b0eed24 100644
--- a/spec/migrations/fill_productivity_analytics_start_date_spec.rb
+++ b/spec/migrations/fill_productivity_analytics_start_date_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20191004081520_fill_productivity_analytics_start_date.rb')
-describe FillProductivityAnalyticsStartDate, :migration do
+describe FillProductivityAnalyticsStartDate do
let(:settings_table) { table('application_settings') }
let(:metrics_table) { table('merge_request_metrics') }
diff --git a/spec/migrations/fix_max_pages_size_spec.rb b/spec/migrations/fix_max_pages_size_spec.rb
index 36b5445603e..9882cda7fba 100644
--- a/spec/migrations/fix_max_pages_size_spec.rb
+++ b/spec/migrations/fix_max_pages_size_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20191213120427_fix_max_pages_size.rb')
-describe FixMaxPagesSize, :migration do
+describe FixMaxPagesSize do
let(:application_settings) { table(:application_settings) }
let!(:default_setting) { application_settings.create! }
let!(:max_possible_setting) { application_settings.create!(max_pages_size: described_class::MAX_SIZE) }
diff --git a/spec/migrations/fix_null_type_labels_spec.rb b/spec/migrations/fix_null_type_labels_spec.rb
index 462ae9b913f..b098141c5e9 100644
--- a/spec/migrations/fix_null_type_labels_spec.rb
+++ b/spec/migrations/fix_null_type_labels_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190131122559_fix_null_type_labels')
-describe FixNullTypeLabels, :migration do
+describe FixNullTypeLabels do
let(:migration) { described_class.new }
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
diff --git a/spec/migrations/fix_pool_repository_source_project_id_spec.rb b/spec/migrations/fix_pool_repository_source_project_id_spec.rb
index 8ddee9bb575..5a878dba6e7 100644
--- a/spec/migrations/fix_pool_repository_source_project_id_spec.rb
+++ b/spec/migrations/fix_pool_repository_source_project_id_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190604184643_fix_pool_repository_source_project_id.rb')
-describe FixPoolRepositorySourceProjectId, :migration do
+describe FixPoolRepositorySourceProjectId do
let(:projects) { table(:projects) }
let(:pool_repositories) { table(:pool_repositories) }
let(:shards) { table(:shards) }
diff --git a/spec/migrations/fix_projects_without_project_feature_spec.rb b/spec/migrations/fix_projects_without_project_feature_spec.rb
index 6e0345da078..01413261008 100644
--- a/spec/migrations/fix_projects_without_project_feature_spec.rb
+++ b/spec/migrations/fix_projects_without_project_feature_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200127111840_fix_projects_without_project_feature.rb')
-describe FixProjectsWithoutProjectFeature, :migration do
+describe FixProjectsWithoutProjectFeature do
let(:namespace) { table(:namespaces).create(name: 'gitlab', path: 'gitlab-org') }
let!(:projects) do
diff --git a/spec/migrations/fix_wrong_pages_access_level_spec.rb b/spec/migrations/fix_wrong_pages_access_level_spec.rb
index 73d8218b95c..e0d09add740 100644
--- a/spec/migrations/fix_wrong_pages_access_level_spec.rb
+++ b/spec/migrations/fix_wrong_pages_access_level_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190703185326_fix_wrong_pages_access_level.rb')
-describe FixWrongPagesAccessLevel, :migration, :sidekiq_might_not_need_inline, schema: 20190628185004 do
+describe FixWrongPagesAccessLevel, :sidekiq_might_not_need_inline, schema: 20190628185004 do
using RSpec::Parameterized::TableSyntax
let(:migration_class) { described_class::MIGRATION }
diff --git a/spec/migrations/generate_lets_encrypt_private_key_spec.rb b/spec/migrations/generate_lets_encrypt_private_key_spec.rb
index 7746ba46446..c0cb39fd519 100644
--- a/spec/migrations/generate_lets_encrypt_private_key_spec.rb
+++ b/spec/migrations/generate_lets_encrypt_private_key_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190524062810_generate_lets_encrypt_private_key.rb')
-describe GenerateLetsEncryptPrivateKey, :migration do
+describe GenerateLetsEncryptPrivateKey do
describe '#up' do
it 'does not fail' do
expect do
diff --git a/spec/migrations/generate_missing_routes_spec.rb b/spec/migrations/generate_missing_routes_spec.rb
index a4a25951ff0..3ff220aa8d3 100644
--- a/spec/migrations/generate_missing_routes_spec.rb
+++ b/spec/migrations/generate_missing_routes_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180702134423_generate_missing_routes.rb')
-describe GenerateMissingRoutes, :migration do
+describe GenerateMissingRoutes do
describe '#up' do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/import_common_metrics_spec.rb b/spec/migrations/import_common_metrics_spec.rb
index 1001629007c..8c28b46cb38 100644
--- a/spec/migrations/import_common_metrics_spec.rb
+++ b/spec/migrations/import_common_metrics_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180831164910_import_common_metrics.rb')
-describe ImportCommonMetrics, :migration do
+describe ImportCommonMetrics do
describe '#up' do
it "imports all prometheus metrics" do
expect(PrometheusMetric.common).to be_empty
diff --git a/spec/migrations/insert_project_hooks_plan_limits_spec.rb b/spec/migrations/insert_project_hooks_plan_limits_spec.rb
index abc2ccd0507..e4bdda4cf5e 100644
--- a/spec/migrations/insert_project_hooks_plan_limits_spec.rb
+++ b/spec/migrations/insert_project_hooks_plan_limits_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20191216183532_insert_project_hooks_plan_limits.rb')
-describe InsertProjectHooksPlanLimits, :migration do
+describe InsertProjectHooksPlanLimits do
let(:migration) { described_class.new }
let(:plans) { table(:plans) }
let(:plan_limits) { table(:plan_limits) }
diff --git a/spec/migrations/migrate_auto_dev_ops_domain_to_cluster_domain_spec.rb b/spec/migrations/migrate_auto_dev_ops_domain_to_cluster_domain_spec.rb
index 349cffea70e..9188c19f76a 100644
--- a/spec/migrations/migrate_auto_dev_ops_domain_to_cluster_domain_spec.rb
+++ b/spec/migrations/migrate_auto_dev_ops_domain_to_cluster_domain_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190204115450_migrate_auto_dev_ops_domain_to_cluster_domain.rb')
-describe MigrateAutoDevOpsDomainToClusterDomain, :migration do
+describe MigrateAutoDevOpsDomainToClusterDomain do
include MigrationHelpers::ClusterHelpers
let(:migration) { described_class.new }
diff --git a/spec/migrations/migrate_code_owner_approval_status_to_protected_branches_in_batches_spec.rb b/spec/migrations/migrate_code_owner_approval_status_to_protected_branches_in_batches_spec.rb
index 67ac40d4d39..cda965135b0 100644
--- a/spec/migrations/migrate_code_owner_approval_status_to_protected_branches_in_batches_spec.rb
+++ b/spec/migrations/migrate_code_owner_approval_status_to_protected_branches_in_batches_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190827102026_migrate_code_owner_approval_status_to_protected_branches_in_batches.rb')
-describe MigrateCodeOwnerApprovalStatusToProtectedBranchesInBatches, :migration do
+describe MigrateCodeOwnerApprovalStatusToProtectedBranchesInBatches do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:protected_branches) { table(:protected_branches) }
diff --git a/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb b/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb
new file mode 100644
index 00000000000..aa78381ba3a
--- /dev/null
+++ b/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200128134110_migrate_commit_notes_mentions_to_db')
+
+describe MigrateCommitNotesMentionsToDb, :migration, :sidekiq do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:notes) { table(:notes) }
+
+ let(:user) { users.create!(name: 'root', email: 'root@example.com', username: 'root', projects_limit: 0) }
+ let(:group) { namespaces.create!(name: 'group1', path: 'group1', owner_id: user.id) }
+ let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
+
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
+ let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
+ let(:commit_user_mentions) { table(:commit_user_mentions) }
+
+ let!(:resource1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
+ let!(:resource2) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
+ let!(:resource3) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check', system: true) }
+
+ # non-migrateable resources
+ # this note is already migrated, as it has a record in the commit_user_mentions table
+ let!(:resource4) { notes.create!(note: 'note3 for @root to check', commit_id: commit.id, noteable_type: 'Commit') }
+ let!(:user_mention) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource4.id, mentioned_users_ids: [1]) }
+ # this should have pointed to an inexistent commit record in a commits table
+ # but because commit is not an AR, we'll just make it so that the note does not have mentions, i.e. no `@` char.
+ let!(:resource5) { notes.create!(note: 'note3 to check', commit_id: 'abc', noteable_type: 'Commit') }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ end
+
+ it_behaves_like 'schedules resource mentions migration', Commit, true
+end
diff --git a/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb b/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb
index deeea74bd3b..0a8975402da 100644
--- a/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb
+++ b/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190715193142_migrate_discussion_id_on_promoted_epics.rb')
-describe MigrateDiscussionIdOnPromotedEpics, :migration do
+describe MigrateDiscussionIdOnPromotedEpics do
let(:migration_class) { described_class::MIGRATION }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/migrate_forbidden_redirect_uris_spec.rb b/spec/migrations/migrate_forbidden_redirect_uris_spec.rb
index 0bc13a3974a..7c3cc9f07c8 100644
--- a/spec/migrations/migrate_forbidden_redirect_uris_spec.rb
+++ b/spec/migrations/migrate_forbidden_redirect_uris_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181026091631_migrate_forbidden_redirect_uris.rb')
-describe MigrateForbiddenRedirectUris, :migration do
+describe MigrateForbiddenRedirectUris do
let(:oauth_application) { table(:oauth_applications) }
let(:oauth_access_grant) { table(:oauth_access_grants) }
diff --git a/spec/migrations/migrate_k8s_service_integration_spec.rb b/spec/migrations/migrate_k8s_service_integration_spec.rb
index 4dd0c09632a..660e958eb42 100644
--- a/spec/migrations/migrate_k8s_service_integration_spec.rb
+++ b/spec/migrations/migrate_k8s_service_integration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190517153211_migrate_k8s_service_integration.rb')
-describe MigrateK8sServiceIntegration, :migration do
+describe MigrateK8sServiceIntegration do
context 'template service' do
context 'with namespace' do
let!(:service) do
diff --git a/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb b/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
index d6259023c01..5133afdf5b0 100644
--- a/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
+++ b/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180816161409_migrate_legacy_artifacts_to_job_artifacts.rb')
-describe MigrateLegacyArtifactsToJobArtifacts, :migration do
+describe MigrateLegacyArtifactsToJobArtifacts do
let(:migration_class) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/migrate_legacy_managed_clusters_to_unmanaged_spec.rb b/spec/migrations/migrate_legacy_managed_clusters_to_unmanaged_spec.rb
index 93426f1f273..e3462e1d6bd 100644
--- a/spec/migrations/migrate_legacy_managed_clusters_to_unmanaged_spec.rb
+++ b/spec/migrations/migrate_legacy_managed_clusters_to_unmanaged_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190606163724_migrate_legacy_managed_clusters_to_unmanaged.rb')
-describe MigrateLegacyManagedClustersToUnmanaged, :migration do
+describe MigrateLegacyManagedClustersToUnmanaged do
let(:cluster_type) { 'project_type' }
let(:created_at) { 1.hour.ago }
diff --git a/spec/migrations/migrate_managed_clusters_with_no_token_to_unmanaged_spec.rb b/spec/migrations/migrate_managed_clusters_with_no_token_to_unmanaged_spec.rb
index b73bd16cb60..2931fba3eb2 100644
--- a/spec/migrations/migrate_managed_clusters_with_no_token_to_unmanaged_spec.rb
+++ b/spec/migrations/migrate_managed_clusters_with_no_token_to_unmanaged_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190613231640_migrate_managed_clusters_with_no_token_to_unmanaged.rb')
-describe MigrateManagedClustersWithNoTokenToUnmanaged, :migration do
+describe MigrateManagedClustersWithNoTokenToUnmanaged do
let(:cluster_type) { 'project_type' }
let(:created_at) { Date.new(2018, 11, 1).midnight }
diff --git a/spec/migrations/migrate_merge_request_mentions_to_db_spec.rb b/spec/migrations/migrate_merge_request_mentions_to_db_spec.rb
new file mode 100644
index 00000000000..aef8fd6490b
--- /dev/null
+++ b/spec/migrations/migrate_merge_request_mentions_to_db_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200211155539_migrate_merge_request_mentions_to_db')
+
+describe MigrateMergeRequestMentionsToDb, :migration do
+ let(:users) { table(:users) }
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:merge_request_user_mentions) { table(:merge_request_user_mentions) }
+
+ let(:user) { users.create!(name: 'root', email: 'root@example.com', username: 'root', projects_limit: 0) }
+ let(:group) { namespaces.create!(name: 'group1', path: 'group1', owner_id: user.id, type: 'Group') }
+ let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
+
+ # migrateable resources
+ let(:common_args) { { source_branch: 'master', source_project_id: project.id, target_project_id: project.id, author_id: user.id, description: 'mr description with @root mention' } }
+ let!(:resource1) { merge_requests.create!(common_args.merge(title: "title 1", state_id: 1, target_branch: 'feature1')) }
+ let!(:resource2) { merge_requests.create!(common_args.merge(title: "title 2", state_id: 1, target_branch: 'feature2')) }
+ let!(:resource3) { merge_requests.create!(common_args.merge(title: "title 3", state_id: 1, target_branch: 'feature3')) }
+
+ # non-migrateable resources
+ # this merge request is already migrated, as it has a record in the merge_request_user_mentions table
+ let!(:resource4) { merge_requests.create!(common_args.merge(title: "title 3", state_id: 1, target_branch: 'feature3')) }
+ let!(:user_mention) { merge_request_user_mentions.create!(merge_request_id: resource4.id, mentioned_users_ids: [1]) }
+
+ let!(:resource5) { merge_requests.create!(common_args.merge(title: "title 3", description: 'description with no mention', state_id: 1, target_branch: 'feature3')) }
+
+ it_behaves_like 'schedules resource mentions migration', MergeRequest, false
+end
diff --git a/spec/migrations/migrate_null_wiki_access_levels_spec.rb b/spec/migrations/migrate_null_wiki_access_levels_spec.rb
index f99273072a2..f4753f67e17 100644
--- a/spec/migrations/migrate_null_wiki_access_levels_spec.rb
+++ b/spec/migrations/migrate_null_wiki_access_levels_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180809195358_migrate_null_wiki_access_levels.rb')
-describe MigrateNullWikiAccessLevels, :migration do
+describe MigrateNullWikiAccessLevels do
let(:namespaces) { table('namespaces') }
let(:projects) { table(:projects) }
let(:project_features) { table(:project_features) }
diff --git a/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb b/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb
index 5f865579c96..fc5d814a2de 100644
--- a/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb
+++ b/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191118211629_migrate_ops_feature_flags_scopes_target_user_ids.rb')
-describe MigrateOpsFeatureFlagsScopesTargetUserIds, :migration do
+describe MigrateOpsFeatureFlagsScopesTargetUserIds do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:flags) { table(:operations_feature_flags) }
diff --git a/spec/migrations/move_limits_from_plans_spec.rb b/spec/migrations/move_limits_from_plans_spec.rb
index 693d6ecb2c1..aeb36100205 100644
--- a/spec/migrations/move_limits_from_plans_spec.rb
+++ b/spec/migrations/move_limits_from_plans_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20191030152934_move_limits_from_plans.rb')
-describe MoveLimitsFromPlans, :migration do
+describe MoveLimitsFromPlans do
let(:plans) { table(:plans) }
let(:plan_limits) { table(:plan_limits) }
diff --git a/spec/migrations/nullify_users_role_spec.rb b/spec/migrations/nullify_users_role_spec.rb
index ad25e4885ef..487d84e2a35 100644
--- a/spec/migrations/nullify_users_role_spec.rb
+++ b/spec/migrations/nullify_users_role_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191104142124_nullify_users_role.rb')
-describe NullifyUsersRole, :migration do
+describe NullifyUsersRole do
let(:users) { table(:users) }
before do
diff --git a/spec/migrations/populate_project_statistics_packages_size_spec.rb b/spec/migrations/populate_project_statistics_packages_size_spec.rb
index 4ad91342f25..c316a4bc8b7 100644
--- a/spec/migrations/populate_project_statistics_packages_size_spec.rb
+++ b/spec/migrations/populate_project_statistics_packages_size_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190418132125_populate_project_statistics_packages_size.rb')
-describe PopulateProjectStatisticsPackagesSize, :migration do
+describe PopulateProjectStatisticsPackagesSize do
let(:project_statistics) { table(:project_statistics) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb b/spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb
index 99dfb165173..d6362528068 100644
--- a/spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb
+++ b/spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190520201748_populate_rule_type_on_approval_merge_request_rules.rb')
-describe PopulateRuleTypeOnApprovalMergeRequestRules, :migration do
+describe PopulateRuleTypeOnApprovalMergeRequestRules do
let(:migration) { described_class.new }
describe '#up' do
diff --git a/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb b/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb
index ad1bcf37732..5be8706cacf 100644
--- a/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb
+++ b/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180220150310_remove_empty_extern_uid_auth0_identities.rb')
-describe RemoveEmptyExternUidAuth0Identities, :migration do
+describe RemoveEmptyExternUidAuth0Identities do
let(:identities) { table(:identities) }
before do
diff --git a/spec/migrations/remove_empty_github_service_templates_spec.rb b/spec/migrations/remove_empty_github_service_templates_spec.rb
index c128c8538db..51b29ec6efc 100644
--- a/spec/migrations/remove_empty_github_service_templates_spec.rb
+++ b/spec/migrations/remove_empty_github_service_templates_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191021101942_remove_empty_github_service_templates.rb')
-describe RemoveEmptyGithubServiceTemplates, :migration do
+describe RemoveEmptyGithubServiceTemplates do
subject(:migration) { described_class.new }
let(:services) do
diff --git a/spec/migrations/remove_packages_deprecated_dependencies_spec.rb b/spec/migrations/remove_packages_deprecated_dependencies_spec.rb
index 0b7efe371a6..2ba7a3b268b 100644
--- a/spec/migrations/remove_packages_deprecated_dependencies_spec.rb
+++ b/spec/migrations/remove_packages_deprecated_dependencies_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200210135504_remove_packages_deprecated_dependencies.rb')
-describe RemovePackagesDeprecatedDependencies, :migration do
+describe RemovePackagesDeprecatedDependencies do
let(:projects) { table(:projects) }
let(:packages) { table(:packages_packages) }
let(:dependency_links) { table(:packages_dependency_links) }
diff --git a/spec/migrations/remove_redundant_pipeline_stages_spec.rb b/spec/migrations/remove_redundant_pipeline_stages_spec.rb
index ad905d7eb8a..9bcbb6022a7 100644
--- a/spec/migrations/remove_redundant_pipeline_stages_spec.rb
+++ b/spec/migrations/remove_redundant_pipeline_stages_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180119121225_remove_redundant_pipeline_stages.rb')
-describe RemoveRedundantPipelineStages, :migration do
+describe RemoveRedundantPipelineStages do
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
let(:stages) { table(:ci_stages) }
diff --git a/spec/migrations/remove_security_dashboard_feature_flag_spec.rb b/spec/migrations/remove_security_dashboard_feature_flag_spec.rb
new file mode 100644
index 00000000000..fa0489526e2
--- /dev/null
+++ b/spec/migrations/remove_security_dashboard_feature_flag_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'post_migrate', '20200214034836_remove_security_dashboard_feature_flag.rb')
+
+describe RemoveSecurityDashboardFeatureFlag do
+ let(:feature_gates) { table(:feature_gates) }
+
+ subject(:migration) { described_class.new }
+
+ describe '#up' do
+ it 'deletes the security_dashboard feature gate' do
+ security_dashboard_feature = feature_gates.create!(feature_key: :security_dashboard, key: :boolean, value: 'false')
+ actors_security_dashboard_feature = feature_gates.create!(feature_key: :security_dashboard, key: :actors, value: 'Project:1')
+
+ migration.up
+
+ expect { security_dashboard_feature.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect(actors_security_dashboard_feature.reload).to be_present
+ end
+ end
+
+ describe '#down' do
+ it 'copies the instance_security_dashboard feature gate to a security_dashboard gate' do
+ feature_gates.create!(feature_key: :instance_security_dashboard, key: :actors, value: 'Project:1')
+ feature_gates.create!(feature_key: :instance_security_dashboard, key: 'boolean', value: 'false')
+
+ migration.down
+
+ security_dashboard_feature = feature_gates.find_by(feature_key: :security_dashboard, key: :boolean)
+ expect(security_dashboard_feature.value).to eq('false')
+ end
+
+ context 'when there is no instance_security_dashboard gate' do
+ it 'does nothing' do
+ migration.down
+
+ security_dashboard_feature = feature_gates.find_by(feature_key: :security_dashboard, key: :boolean)
+ expect(security_dashboard_feature).to be_nil
+ end
+ end
+
+ context 'when there already is a security_dashboard gate' do
+ it 'does nothing' do
+ feature_gates.create!(feature_key: :security_dashboard, key: 'boolean', value: 'false')
+ feature_gates.create!(feature_key: :instance_security_dashboard, key: 'boolean', value: 'false')
+
+ expect { migration.down }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/migrations/rename_security_dashboard_feature_flag_to_instance_security_dashboard_spec.rb b/spec/migrations/rename_security_dashboard_feature_flag_to_instance_security_dashboard_spec.rb
new file mode 100644
index 00000000000..07be7a4ad51
--- /dev/null
+++ b/spec/migrations/rename_security_dashboard_feature_flag_to_instance_security_dashboard_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'migrate', '20200212014653_rename_security_dashboard_feature_flag_to_instance_security_dashboard.rb')
+
+describe RenameSecurityDashboardFeatureFlagToInstanceSecurityDashboard do
+ let(:feature_gates) { table(:feature_gates) }
+
+ subject(:migration) { described_class.new }
+
+ describe '#up' do
+ it 'copies the security_dashboard feature gate to a new instance_security_dashboard gate' do
+ feature_gates.create!(feature_key: :security_dashboard, key: :actors, value: 'Project:1')
+ feature_gates.create!(feature_key: :security_dashboard, key: :boolean, value: 'false')
+
+ migration.up
+
+ instance_security_dashboard_feature = feature_gates.find_by(feature_key: :instance_security_dashboard, key: :boolean)
+ expect(instance_security_dashboard_feature.value).to eq('false')
+ end
+
+ context 'when there is no security_dashboard gate' do
+ it 'does nothing' do
+ migration.up
+
+ instance_security_dashboard_feature = feature_gates.find_by(feature_key: :instance_security_dashboard, key: :boolean)
+ expect(instance_security_dashboard_feature).to be_nil
+ end
+ end
+
+ context 'when there is already an instance_security_dashboard gate' do
+ it 'does nothing' do
+ feature_gates.create!(feature_key: :security_dashboard, key: 'boolean', value: 'false')
+ feature_gates.create!(feature_key: :instance_security_dashboard, key: 'boolean', value: 'false')
+
+ expect { migration.up }.not_to raise_error
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'removes the instance_security_dashboard gate' do
+ actors_instance_security_dashboard_feature = feature_gates.create!(feature_key: :instance_security_dashboard, key: :actors, value: 'Project:1')
+ instance_security_dashboard_feature = feature_gates.create!(feature_key: :instance_security_dashboard, key: :boolean, value: 'false')
+
+ migration.down
+
+ expect { instance_security_dashboard_feature.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect(actors_instance_security_dashboard_feature.reload).to be_present
+ end
+ end
+end
diff --git a/spec/migrations/reschedule_builds_stages_migration_spec.rb b/spec/migrations/reschedule_builds_stages_migration_spec.rb
index 8127934afab..18ea16f97bc 100644
--- a/spec/migrations/reschedule_builds_stages_migration_spec.rb
+++ b/spec/migrations/reschedule_builds_stages_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180405101928_reschedule_builds_stages_migration')
-describe RescheduleBuildsStagesMigration, :migration do
+describe RescheduleBuildsStagesMigration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
diff --git a/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb b/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb
index 0e34e63fcc1..dcb31dff9b7 100644
--- a/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb
+++ b/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180309121820_reschedule_commits_count_for_merge_request_diff')
-describe RescheduleCommitsCountForMergeRequestDiff, :migration do
+describe RescheduleCommitsCountForMergeRequestDiff do
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/save_instance_administrators_group_id_spec.rb b/spec/migrations/save_instance_administrators_group_id_spec.rb
index eab41017480..74ced009fa5 100644
--- a/spec/migrations/save_instance_administrators_group_id_spec.rb
+++ b/spec/migrations/save_instance_administrators_group_id_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200210092405_save_instance_administrators_group_id')
-describe SaveInstanceAdministratorsGroupId, :migration do
+describe SaveInstanceAdministratorsGroupId do
let(:application_settings_table) { table(:application_settings) }
let(:instance_administrators_group) do
diff --git a/spec/migrations/schedule_digest_personal_access_tokens_spec.rb b/spec/migrations/schedule_digest_personal_access_tokens_spec.rb
index 915397f0f4f..d8e1b089d31 100644
--- a/spec/migrations/schedule_digest_personal_access_tokens_spec.rb
+++ b/spec/migrations/schedule_digest_personal_access_tokens_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180913142237_schedule_digest_personal_access_tokens.rb')
-describe ScheduleDigestPersonalAccessTokens, :migration do
+describe ScheduleDigestPersonalAccessTokens do
let(:personal_access_tokens) { table(:personal_access_tokens) }
let(:users) { table(:users) }
diff --git a/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb b/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
index 43333ab9837..30cb68c742c 100644
--- a/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
+++ b/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190524073827_schedule_fill_valid_time_for_pages_domain_certificates.rb')
-describe ScheduleFillValidTimeForPagesDomainCertificates, :migration do
+describe ScheduleFillValidTimeForPagesDomainCertificates do
let(:migration_class) { described_class::MIGRATION }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/schedule_link_lfs_objects_projects_spec.rb b/spec/migrations/schedule_link_lfs_objects_projects_spec.rb
new file mode 100644
index 00000000000..055ab3cdd83
--- /dev/null
+++ b/spec/migrations/schedule_link_lfs_objects_projects_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200310075115_schedule_link_lfs_objects_projects.rb')
+
+describe ScheduleLinkLfsObjectsProjects, :migration, :sidekiq do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:fork_networks) { table(:fork_networks) }
+ let(:fork_network_members) { table(:fork_network_members) }
+ let(:lfs_objects) { table(:lfs_objects) }
+ let(:lfs_objects_projects) { table(:lfs_objects_projects) }
+
+ let(:namespace) { namespaces.create(name: 'GitLab', path: 'gitlab') }
+
+ let(:fork_network) { fork_networks.create(root_project_id: source_project.id) }
+ let(:another_fork_network) { fork_networks.create(root_project_id: another_source_project.id) }
+
+ let(:source_project) { projects.create(namespace_id: namespace.id) }
+ let(:another_source_project) { projects.create(namespace_id: namespace.id) }
+ let(:project) { projects.create(namespace_id: namespace.id) }
+ let(:another_project) { projects.create(namespace_id: namespace.id) }
+
+ let(:lfs_object) { lfs_objects.create(oid: 'abc123', size: 100) }
+ let(:another_lfs_object) { lfs_objects.create(oid: 'def456', size: 200) }
+
+ let!(:source_project_lop_1) do
+ lfs_objects_projects.create(
+ lfs_object_id: lfs_object.id,
+ project_id: source_project.id
+ )
+ end
+
+ let!(:source_project_lop_2) do
+ lfs_objects_projects.create(
+ lfs_object_id: another_lfs_object.id,
+ project_id: source_project.id
+ )
+ end
+
+ let!(:another_source_project_lop_1) do
+ lfs_objects_projects.create(
+ lfs_object_id: lfs_object.id,
+ project_id: another_source_project.id
+ )
+ end
+
+ let!(:another_source_project_lop_2) do
+ lfs_objects_projects.create(
+ lfs_object_id: another_lfs_object.id,
+ project_id: another_source_project.id
+ )
+ end
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+
+ # Create links between projects
+ fork_network_members.create(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil)
+ fork_network_members.create(fork_network_id: fork_network.id, project_id: project.id, forked_from_project_id: source_project.id)
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil)
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_project.id, forked_from_project_id: another_fork_network.root_project_id)
+ end
+
+ it 'schedules background migration to link LFS objects' do
+ Sidekiq::Testing.fake! do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, source_project_lop_1.id, source_project_lop_2.id)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(4.minutes, another_source_project_lop_1.id, another_source_project_lop_2.id)
+ end
+ end
+end
diff --git a/spec/migrations/schedule_migrate_security_scans_spec.rb b/spec/migrations/schedule_migrate_security_scans_spec.rb
new file mode 100644
index 00000000000..29e4e2b5cac
--- /dev/null
+++ b/spec/migrations/schedule_migrate_security_scans_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200217225719_schedule_migrate_security_scans.rb')
+
+# rubocop: disable RSpec/FactoriesInMigrationSpecs
+describe ScheduleMigrateSecurityScans, :sidekiq do
+ let(:migration) { described_class.new }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:builds) { table(:ci_builds) }
+ let(:job_artifacts) { table(:ci_job_artifacts) }
+
+ let(:namespace) { namespaces.create!(name: "foo", path: "bar") }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:build) { builds.create! }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ stub_const("#{described_class.name}::INTERVAL", 5.minutes.to_i)
+ end
+
+ context 'no security job artifacts' do
+ before do
+ table(:ci_job_artifacts)
+ end
+
+ it 'does not schedule migration' do
+ Sidekiq::Testing.fake! do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs).to be_empty
+ end
+ end
+ end
+
+ context 'has security job artifacts' do
+ let!(:job_artifact_1) { job_artifacts.create!(project_id: project.id, job_id: build.id, file_type: 5) }
+ let!(:job_artifact_2) { job_artifacts.create!(project_id: project.id, job_id: build.id, file_type: 8) }
+
+ it 'schedules migration of security scans' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migration.up
+
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, job_artifact_1.id, job_artifact_1.id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, job_artifact_2.id, job_artifact_2.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+ end
+
+ context 'has non-security job artifacts' do
+ let!(:job_artifact_1) { job_artifacts.create!(project_id: project.id, job_id: build.id, file_type: 4) }
+ let!(:job_artifact_2) { job_artifacts.create!(project_id: project.id, job_id: build.id, file_type: 9) }
+
+ it 'schedules migration of security scans' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migration.up
+
+ expect(BackgroundMigrationWorker.jobs).to be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/schedule_pages_metadata_migration_spec.rb b/spec/migrations/schedule_pages_metadata_migration_spec.rb
index bf9442808bc..748b9fe1cd1 100644
--- a/spec/migrations/schedule_pages_metadata_migration_spec.rb
+++ b/spec/migrations/schedule_pages_metadata_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191002031332_schedule_pages_metadata_migration')
-describe SchedulePagesMetadataMigration, :migration do
+describe SchedulePagesMetadataMigration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb b/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb
index 44ef72baa86..d778b47179f 100644
--- a/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb
+++ b/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190322132835_schedule_populate_merge_request_assignees_table.rb')
-describe SchedulePopulateMergeRequestAssigneesTable, :migration do
+describe SchedulePopulateMergeRequestAssigneesTable do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
diff --git a/spec/migrations/schedule_recalculate_project_authorizations_second_run_spec.rb b/spec/migrations/schedule_recalculate_project_authorizations_second_run_spec.rb
index 04726f98c89..4c05f7d57a1 100644
--- a/spec/migrations/schedule_recalculate_project_authorizations_second_run_spec.rb
+++ b/spec/migrations/schedule_recalculate_project_authorizations_second_run_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200204113224_schedule_recalculate_project_authorizations_second_run.rb')
-describe ScheduleRecalculateProjectAuthorizationsSecondRun, :migration do
+describe ScheduleRecalculateProjectAuthorizationsSecondRun do
let(:users_table) { table(:users) }
before do
diff --git a/spec/migrations/schedule_recalculate_project_authorizations_spec.rb b/spec/migrations/schedule_recalculate_project_authorizations_spec.rb
index 77ad2b2dc8e..d30ebf825ef 100644
--- a/spec/migrations/schedule_recalculate_project_authorizations_spec.rb
+++ b/spec/migrations/schedule_recalculate_project_authorizations_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200204113223_schedule_recalculate_project_authorizations.rb')
-describe ScheduleRecalculateProjectAuthorizations, :migration do
+describe ScheduleRecalculateProjectAuthorizations do
let(:users_table) { table(:users) }
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
diff --git a/spec/migrations/schedule_runners_token_encryption_spec.rb b/spec/migrations/schedule_runners_token_encryption_spec.rb
index 60abb98f629..4121a8409b4 100644
--- a/spec/migrations/schedule_runners_token_encryption_spec.rb
+++ b/spec/migrations/schedule_runners_token_encryption_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181121111200_schedule_runners_token_encryption')
-describe ScheduleRunnersTokenEncryption, :migration do
+describe ScheduleRunnersTokenEncryption do
let(:settings) { table(:application_settings) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb b/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
index c022610be08..ea1e16a0a35 100644
--- a/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
+++ b/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180104131052_schedule_set_confidential_note_events_on_webhooks.rb')
-describe ScheduleSetConfidentialNoteEventsOnWebhooks, :migration do
+describe ScheduleSetConfidentialNoteEventsOnWebhooks do
let(:web_hooks_table) { table(:web_hooks) }
let(:migration_class) { Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnWebhooks }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/schedule_stages_index_migration_spec.rb b/spec/migrations/schedule_stages_index_migration_spec.rb
index f2e9abe1eb8..5ca857087e7 100644
--- a/spec/migrations/schedule_stages_index_migration_spec.rb
+++ b/spec/migrations/schedule_stages_index_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180420080616_schedule_stages_index_migration')
-describe ScheduleStagesIndexMigration, :migration do
+describe ScheduleStagesIndexMigration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
diff --git a/spec/migrations/schedule_sync_issuables_state_id_spec.rb b/spec/migrations/schedule_sync_issuables_state_id_spec.rb
index 21844edeb40..408e7e6d19d 100644
--- a/spec/migrations/schedule_sync_issuables_state_id_spec.rb
+++ b/spec/migrations/schedule_sync_issuables_state_id_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190214112022_schedule_sync_issuables_state_id.rb')
-describe ScheduleSyncIssuablesStateId, :migration do
+describe ScheduleSyncIssuablesStateId do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:merge_requests) { table(:merge_requests) }
diff --git a/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb b/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb
index 5dbe0d973ae..e26a864b8ba 100644
--- a/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb
+++ b/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190506135400_schedule_sync_issuables_state_id_where_nil')
-describe ScheduleSyncIssuablesStateIdWhereNil, :migration do
+describe ScheduleSyncIssuablesStateIdWhereNil do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:merge_requests) { table(:merge_requests) }
diff --git a/spec/migrations/schedule_to_archive_legacy_traces_spec.rb b/spec/migrations/schedule_to_archive_legacy_traces_spec.rb
index a81fb1494c7..e9158df01c4 100644
--- a/spec/migrations/schedule_to_archive_legacy_traces_spec.rb
+++ b/spec/migrations/schedule_to_archive_legacy_traces_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180529152628_schedule_to_archive_legacy_traces')
-describe ScheduleToArchiveLegacyTraces, :migration do
+describe ScheduleToArchiveLegacyTraces do
include TraceHelpers
let(:namespaces) { table(:namespaces) }
diff --git a/spec/migrations/schedule_update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb b/spec/migrations/schedule_update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
index 221f266cb70..098fe68927c 100644
--- a/spec/migrations/schedule_update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
+++ b/spec/migrations/schedule_update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200110121314_schedule_update_existing_subgroup_to_match_visibility_level_of_parent.rb')
-describe ScheduleUpdateExistingSubgroupToMatchVisibilityLevelOfParent, :migration do
+describe ScheduleUpdateExistingSubgroupToMatchVisibilityLevelOfParent do
include MigrationHelpers::NamespacesHelpers
let(:migration_class) { described_class::MIGRATION }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/services_remove_temporary_index_on_project_id_spec.rb b/spec/migrations/services_remove_temporary_index_on_project_id_spec.rb
index f730d7aecfd..d4f9969b71b 100644
--- a/spec/migrations/services_remove_temporary_index_on_project_id_spec.rb
+++ b/spec/migrations/services_remove_temporary_index_on_project_id_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200203104214_services_remove_temporary_index_on_project_id.rb')
-describe ServicesRemoveTemporaryIndexOnProjectId, :migration do
+describe ServicesRemoveTemporaryIndexOnProjectId do
let(:migration_instance) { described_class.new }
it 'adds and removes temporary partial index in up and down methods' do
diff --git a/spec/migrations/set_issue_id_for_all_versions_spec.rb b/spec/migrations/set_issue_id_for_all_versions_spec.rb
index bfc2731181b..ff281947db8 100644
--- a/spec/migrations/set_issue_id_for_all_versions_spec.rb
+++ b/spec/migrations/set_issue_id_for_all_versions_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190715043954_set_issue_id_for_all_versions.rb')
-describe SetIssueIdForAllVersions, :migration do
+describe SetIssueIdForAllVersions do
let(:projects) { table(:projects) }
let(:issues) { table(:issues) }
let(:designs) { table(:design_management_designs) }
diff --git a/spec/migrations/steal_fill_store_upload_spec.rb b/spec/migrations/steal_fill_store_upload_spec.rb
index ed809baf2b5..b5e3de1864c 100644
--- a/spec/migrations/steal_fill_store_upload_spec.rb
+++ b/spec/migrations/steal_fill_store_upload_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181105201455_steal_fill_store_upload.rb')
-describe StealFillStoreUpload, :migration do
+describe StealFillStoreUpload do
let(:uploads) { table(:uploads) }
describe '#up' do
diff --git a/spec/migrations/sync_issuables_state_id_spec.rb b/spec/migrations/sync_issuables_state_id_spec.rb
index 3138d2bec33..4bd30172cbe 100644
--- a/spec/migrations/sync_issuables_state_id_spec.rb
+++ b/spec/migrations/sync_issuables_state_id_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190911251732_sync_issuables_state_id')
-describe SyncIssuablesStateId, :migration do
+describe SyncIssuablesStateId do
let(:migration) { described_class.new }
describe '#up' do
diff --git a/spec/migrations/truncate_user_fullname_spec.rb b/spec/migrations/truncate_user_fullname_spec.rb
index 65b870de7b8..a5e6a0a4fce 100644
--- a/spec/migrations/truncate_user_fullname_spec.rb
+++ b/spec/migrations/truncate_user_fullname_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190325080727_truncate_user_fullname.rb')
-describe TruncateUserFullname, :migration do
+describe TruncateUserFullname do
let(:users) { table(:users) }
let(:user_short) { create_user(name: 'abc', email: 'test_short@example.com') }
diff --git a/spec/migrations/update_application_setting_npm_package_requests_forwarding_default_spec.rb b/spec/migrations/update_application_setting_npm_package_requests_forwarding_default_spec.rb
new file mode 100644
index 00000000000..f9523e0e582
--- /dev/null
+++ b/spec/migrations/update_application_setting_npm_package_requests_forwarding_default_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200221105436_update_application_setting_npm_package_requests_forwarding_default.rb')
+
+describe UpdateApplicationSettingNpmPackageRequestsForwardingDefault do
+ # Create test data - pipeline and CI/CD jobs.
+ let(:application_settings) { table(:application_settings) }
+
+ before do
+ application_settings.create!(npm_package_requests_forwarding: false)
+ end
+
+ # Test just the up migration.
+ it 'correctly migrates the application setting' do
+ expect { migrate! }.to change { current_application_setting }.from(false).to(true)
+ end
+
+ # Test a reversible migration.
+ it 'correctly migrates up and down the application setting' do
+ reversible_migration do |migration|
+ # Expectations will run before the up migration,
+ # and then again after the down migration
+ migration.before -> {
+ expect(current_application_setting).to eq false
+ }
+
+ # Expectations will run after the up migration.
+ migration.after -> {
+ expect(current_application_setting).to eq true
+ }
+ end
+ end
+
+ def current_application_setting
+ ApplicationSetting.current_without_cache.npm_package_requests_forwarding
+ end
+end
diff --git a/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb b/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb
index 20a506ea976..d149ec230a7 100644
--- a/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb
+++ b/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200106071113_update_fingerprint_sha256_within_keys.rb')
-describe UpdateFingerprintSha256WithinKeys, :migration do
+describe UpdateFingerprintSha256WithinKeys do
let(:key_table) { table(:keys) }
describe '#up' do
diff --git a/spec/migrations/update_minimum_password_length_spec.rb b/spec/migrations/update_minimum_password_length_spec.rb
index 0a763e5ce0f..ed9c85362f5 100644
--- a/spec/migrations/update_minimum_password_length_spec.rb
+++ b/spec/migrations/update_minimum_password_length_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191205084057_update_minimum_password_length')
-describe UpdateMinimumPasswordLength, :migration do
+describe UpdateMinimumPasswordLength do
let(:application_settings) { table(:application_settings) }
let(:application_setting) do
application_settings.create!(
diff --git a/spec/migrations/update_project_import_visibility_level_spec.rb b/spec/migrations/update_project_import_visibility_level_spec.rb
index 9ea9b956f67..f8439fc4204 100644
--- a/spec/migrations/update_project_import_visibility_level_spec.rb
+++ b/spec/migrations/update_project_import_visibility_level_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181219130552_update_project_import_visibility_level.rb')
-describe UpdateProjectImportVisibilityLevel, :migration do
+describe UpdateProjectImportVisibilityLevel do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:project) { projects.find_by_name(name) }
diff --git a/spec/migrations/update_timestamp_softwarelicensespolicy_spec.rb b/spec/migrations/update_timestamp_softwarelicensespolicy_spec.rb
index 539da8ac92a..918c5fb567f 100644
--- a/spec/migrations/update_timestamp_softwarelicensespolicy_spec.rb
+++ b/spec/migrations/update_timestamp_softwarelicensespolicy_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200116175538_update_timestamp_softwarelicensespolicy.rb')
-describe UpdateTimestampSoftwarelicensespolicy, :migration do
+describe UpdateTimestampSoftwarelicensespolicy do
let(:software_licenses_policy) { table(:software_license_policies) }
let(:projects) { table(:projects) }
let(:licenses) { table(:software_licenses) }
diff --git a/spec/models/analytics/cycle_analytics/project_stage_spec.rb b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
index 9d18618f638..9850bfde30e 100644
--- a/spec/models/analytics/cycle_analytics/project_stage_spec.rb
+++ b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
@@ -8,7 +8,7 @@ describe Analytics::CycleAnalytics::ProjectStage do
end
it 'default stages must be valid' do
- project = create(:project)
+ project = build(:project)
Gitlab::Analytics::CycleAnalytics::DefaultStages.all.each do |params|
stage = described_class.new(params.merge(project: project))
@@ -17,13 +17,13 @@ describe Analytics::CycleAnalytics::ProjectStage do
end
it_behaves_like 'cycle analytics stage' do
- let(:parent) { create(:project) }
+ let(:parent) { build(:project) }
let(:parent_name) { :project }
end
context 'relative positioning' do
it_behaves_like 'a class that supports relative positioning' do
- let(:project) { create(:project) }
+ let(:project) { build(:project) }
let(:factory) { :cycle_analytics_project_stage }
let(:default_params) { { project: project } }
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index abbaa22ff3e..9dad6b1e766 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -428,6 +428,12 @@ describe ApplicationSetting do
expect(subject).to be_invalid
end
+
+ it 'does not prevent from saving when gitaly timeouts were previously invalid' do
+ subject.update_column(:gitaly_timeout_default, Settings.gitlab.max_request_duration_seconds + 1)
+
+ expect(subject.reload).to be_valid
+ end
end
describe 'enforcing terms' do
@@ -681,5 +687,56 @@ describe ApplicationSetting do
end
end
+ describe 'email_restrictions' do
+ context 'when email restrictions are enabled' do
+ before do
+ subject.email_restrictions_enabled = true
+ end
+
+ it 'allows empty email restrictions' do
+ subject.email_restrictions = ''
+
+ expect(subject).to be_valid
+ end
+
+ it 'accepts valid email restrictions regex' do
+ subject.email_restrictions = '\+'
+
+ expect(subject).to be_valid
+ end
+
+ it 'does not accept invalid email restrictions regex' do
+ subject.email_restrictions = '+'
+
+ expect(subject).not_to be_valid
+ end
+
+ it 'sets an error when regex is not valid' do
+ subject.email_restrictions = '+'
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.messages[:email_restrictions].first).to eq(_('is not a valid regular expression'))
+ end
+ end
+
+ context 'when email restrictions are disabled' do
+ before do
+ subject.email_restrictions_enabled = false
+ end
+
+ it 'allows empty email restrictions' do
+ subject.email_restrictions = ''
+
+ expect(subject).to be_valid
+ end
+
+ it 'invalid regex is not valid' do
+ subject.email_restrictions = '+'
+
+ expect(subject).not_to be_valid
+ end
+ end
+ end
+
it_behaves_like 'application settings examples'
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 4bfb5771bb8..a661aa6e3a9 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1293,7 +1293,35 @@ describe Ci::Build do
environment: 'review/$APP_HOST')
end
- it { is_expected.to eq('review/host') }
+ it 'returns an expanded environment name with a list of variables' do
+ expect(build).to receive(:simple_variables).once.and_call_original
+
+ is_expected.to eq('review/host')
+ end
+
+ context 'when build metadata has already persisted the expanded environment name' do
+ before do
+ build.metadata.expanded_environment_name = 'review/host'
+ end
+
+ it 'returns a persisted expanded environment name without a list of variables' do
+ expect(build).not_to receive(:simple_variables)
+
+ is_expected.to eq('review/host')
+ end
+
+ context 'when ci_persisted_expanded_environment_name feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_persisted_expanded_environment_name: false)
+ end
+
+ it 'returns an expanded environment name with a list of variables' do
+ expect(build).to receive(:simple_variables).once.and_call_original
+
+ is_expected.to eq('review/host')
+ end
+ end
+ end
end
context 'when using persisted variables' do
@@ -2509,6 +2537,83 @@ describe Ci::Build do
end
end
+ describe 'CHANGED_PAGES variables' do
+ let(:route_map_yaml) do
+ <<~ROUTEMAP
+ - source: 'bar/branch-test.txt'
+ public: '/bar/branches'
+ - source: 'with space/README.md'
+ public: '/README'
+ ROUTEMAP
+ end
+
+ before do
+ allow_any_instance_of(Project)
+ .to receive(:route_map_for).with(/.+/)
+ .and_return(Gitlab::RouteMap.new(route_map_yaml))
+ end
+
+ context 'with a deployment environment and a merge request' do
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:environment) { create(:environment, project: merge_request.project, name: "foo-#{project.default_branch}") }
+ let(:build) { create(:ci_build, pipeline: pipeline, environment: environment.name) }
+
+ let(:full_urls) do
+ [
+ File.join(environment.external_url, '/bar/branches'),
+ File.join(environment.external_url, '/README')
+ ]
+ end
+
+ it 'populates CI_MERGE_REQUEST_CHANGED_PAGES_* variables' do
+ expect(subject).to include(
+ {
+ key: 'CI_MERGE_REQUEST_CHANGED_PAGE_PATHS',
+ value: '/bar/branches,/README',
+ public: true,
+ masked: false
+ },
+ {
+ key: 'CI_MERGE_REQUEST_CHANGED_PAGE_URLS',
+ value: full_urls.join(','),
+ public: true,
+ masked: false
+ }
+ )
+ end
+
+ context 'with a deployment environment and no merge request' do
+ let(:environment) { create(:environment, project: project, name: "foo-#{project.default_branch}") }
+ let(:build) { create(:ci_build, pipeline: pipeline, environment: environment.name) }
+
+ it 'does not append CHANGED_PAGES variables' do
+ ci_variables = subject.select { |var| var[:key] =~ /MERGE_REQUEST_CHANGED_PAGES/ }
+
+ expect(ci_variables).to be_empty
+ end
+ end
+
+ context 'with no deployment environment and a present merge request' do
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline, source_project: project, target_project: project) }
+ let(:build) { create(:ci_build, pipeline: merge_request.all_pipelines.take) }
+
+ it 'does not append CHANGED_PAGES variables' do
+ ci_variables = subject.select { |var| var[:key] =~ /MERGE_REQUEST_CHANGED_PAGES/ }
+
+ expect(ci_variables).to be_empty
+ end
+ end
+
+ context 'with no deployment environment and no merge request' do
+ it 'does not append CHANGED_PAGES variables' do
+ ci_variables = subject.select { |var| var[:key] =~ /MERGE_REQUEST_CHANGED_PAGES/ }
+
+ expect(ci_variables).to be_empty
+ end
+ end
+ end
+ end
+
context 'when build has user' do
let(:user_variables) do
[
@@ -3841,6 +3946,53 @@ describe Ci::Build do
end
end
+ describe '#collect_coverage_reports!' do
+ subject { build.collect_coverage_reports!(coverage_report) }
+
+ let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
+
+ it { expect(coverage_report.files).to eq({}) }
+
+ context 'when build has a coverage report' do
+ context 'when there is a Cobertura coverage report from simplecov-cobertura' do
+ before do
+ create(:ci_job_artifact, :cobertura, job: build, project: build.project)
+ end
+
+ it 'parses blobs and add the results to the coverage report' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files.keys).to match_array(['app/controllers/abuse_reports_controller.rb'])
+ expect(coverage_report.files['app/controllers/abuse_reports_controller.rb'].count).to eq(23)
+ end
+ end
+
+ context 'when there is a Cobertura coverage report from gocov-xml' do
+ before do
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build, project: build.project)
+ end
+
+ it 'parses blobs and add the results to the coverage report' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files.keys).to match_array(['auth/token.go', 'auth/rpccredentials.go'])
+ expect(coverage_report.files['auth/token.go'].count).to eq(49)
+ expect(coverage_report.files['auth/rpccredentials.go'].count).to eq(10)
+ end
+ end
+
+ context 'when there is a corrupted Cobertura coverage report' do
+ before do
+ create(:ci_job_artifact, :coverage_with_corrupted_data, job: build, project: build.project)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::CoberturaParserError)
+ end
+ end
+ end
+ end
+
describe '#report_artifacts' do
subject { build.report_artifacts }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index d2fe0d7eeca..de93c3c1675 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -25,7 +25,7 @@ describe Ci::JobArtifact do
end
it_behaves_like 'UpdateProjectStatistics' do
- subject { build(:ci_job_artifact, :archive, size: 106365) }
+ subject { build(:ci_job_artifact, :archive, size: 107464) }
end
end
@@ -35,7 +35,7 @@ describe Ci::JobArtifact do
end
it_behaves_like 'UpdateProjectStatistics' do
- subject { build(:ci_job_artifact, :archive, size: 106365) }
+ subject { build(:ci_job_artifact, :archive, size: 107464) }
end
end
@@ -70,6 +70,22 @@ describe Ci::JobArtifact do
end
end
+ describe '.coverage_reports' do
+ subject { described_class.coverage_reports }
+
+ context 'when there is a coverage report' do
+ let!(:artifact) { create(:ci_job_artifact, :cobertura) }
+
+ it { is_expected.to eq([artifact]) }
+ end
+
+ context 'when there are no coverage reports' do
+ let!(:artifact) { create(:ci_job_artifact, :archive) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '.erasable' do
subject { described_class.erasable }
@@ -113,13 +129,14 @@ describe Ci::JobArtifact do
describe '.for_sha' do
it 'returns job artifacts for a given pipeline sha' do
- first_pipeline = create(:ci_pipeline)
- second_pipeline = create(:ci_pipeline, sha: Digest::SHA1.hexdigest(SecureRandom.hex))
+ project = create(:project)
+ first_pipeline = create(:ci_pipeline, project: project)
+ second_pipeline = create(:ci_pipeline, project: project, sha: Digest::SHA1.hexdigest(SecureRandom.hex))
first_artifact = create(:ci_job_artifact, job: create(:ci_build, pipeline: first_pipeline))
second_artifact = create(:ci_job_artifact, job: create(:ci_build, pipeline: second_pipeline))
- expect(described_class.for_sha(first_pipeline.sha)).to eq([first_artifact])
- expect(described_class.for_sha(second_pipeline.sha)).to eq([second_artifact])
+ expect(described_class.for_sha(first_pipeline.sha, project.id)).to eq([first_artifact])
+ expect(described_class.for_sha(second_pipeline.sha, project.id)).to eq([second_artifact])
end
end
@@ -172,7 +189,7 @@ describe Ci::JobArtifact do
let(:artifact) { create(:ci_job_artifact, :archive, project: project) }
it 'sets the size from the file size' do
- expect(artifact.size).to eq(106365)
+ expect(artifact.size).to eq(107464)
end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index cf1690df9ba..f18c77988c8 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -344,9 +344,9 @@ describe Ci::Pipeline, :mailer do
end
describe '.with_reports' do
- subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
-
context 'when pipeline has a test report' do
+ subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
+
let!(:pipeline_with_report) { create(:ci_pipeline, :with_test_reports) }
it 'selects the pipeline' do
@@ -354,7 +354,19 @@ describe Ci::Pipeline, :mailer do
end
end
+ context 'when pipeline has a coverage report' do
+ subject { described_class.with_reports(Ci::JobArtifact.coverage_reports) }
+
+ let!(:pipeline_with_report) { create(:ci_pipeline, :with_coverage_reports) }
+
+ it 'selects the pipeline' do
+ is_expected.to eq([pipeline_with_report])
+ end
+ end
+
context 'when pipeline does not have metrics reports' do
+ subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
+
let!(:pipeline_without_report) { create(:ci_empty_pipeline) }
it 'does not select the pipeline' do
@@ -2509,27 +2521,53 @@ describe Ci::Pipeline, :mailer do
end
end
- context 'with success pipeline' do
- before do
- perform_enqueued_jobs do
+ shared_examples 'enqueues the notification worker' do
+ it 'enqueues PipelineUpdateCiRefStatusWorker' do
+ expect(PipelineUpdateCiRefStatusWorker).to receive(:perform_async).with(pipeline.id)
+ expect(PipelineNotificationWorker).not_to receive(:perform_async).with(pipeline.id)
+
+ pipeline.succeed
+ end
+
+ context 'when ci_pipeline_fixed_notifications is disabled' do
+ before do
+ stub_feature_flags(ci_pipeline_fixed_notifications: false)
+ end
+
+ it 'enqueues PipelineNotificationWorker' do
+ expect(PipelineUpdateCiRefStatusWorker).not_to receive(:perform_async).with(pipeline.id)
+ expect(PipelineNotificationWorker).to receive(:perform_async).with(pipeline.id)
+
pipeline.succeed
end
end
+ end
+
+ context 'with success pipeline' do
+ it_behaves_like 'sending a notification' do
+ before do
+ perform_enqueued_jobs do
+ pipeline.succeed
+ end
+ end
+ end
- it_behaves_like 'sending a notification'
+ it_behaves_like 'enqueues the notification worker'
end
context 'with failed pipeline' do
- before do
- perform_enqueued_jobs do
- create(:ci_build, :failed, pipeline: pipeline)
- create(:generic_commit_status, :failed, pipeline: pipeline)
+ it_behaves_like 'sending a notification' do
+ before do
+ perform_enqueued_jobs do
+ create(:ci_build, :failed, pipeline: pipeline)
+ create(:generic_commit_status, :failed, pipeline: pipeline)
- pipeline.drop
+ pipeline.drop
+ end
end
end
- it_behaves_like 'sending a notification'
+ it_behaves_like 'enqueues the notification worker'
end
context 'with skipped pipeline' do
@@ -2553,6 +2591,19 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#find_job_with_archive_artifacts' do
+ let!(:old_job) { create(:ci_build, name: 'rspec', retried: true, pipeline: pipeline) }
+ let!(:job_without_artifacts) { create(:ci_build, name: 'rspec', pipeline: pipeline) }
+ let!(:expected_job) { create(:ci_build, :artifacts, name: 'rspec', pipeline: pipeline ) }
+ let!(:different_job) { create(:ci_build, name: 'deploy', pipeline: pipeline) }
+
+ subject { pipeline.find_job_with_archive_artifacts('rspec') }
+
+ it 'finds the expected job' do
+ expect(subject).to eq(expected_job)
+ end
+ end
+
describe '#latest_builds_with_artifacts' do
let!(:fresh_build) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let!(:stale_build) { create(:ci_build, :success, :expired, :artifacts, pipeline: pipeline) }
@@ -2691,6 +2742,43 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#coverage_reports' do
+ subject { pipeline.coverage_reports }
+
+ context 'when pipeline has multiple builds with coverage reports' do
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline, project: project) }
+ let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: pipeline, project: project) }
+
+ before do
+ create(:ci_job_artifact, :cobertura, job: build_rspec, project: project)
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang, project: project)
+ end
+
+ it 'returns coverage reports with collected data' do
+ expect(subject.files.keys).to match_array([
+ "auth/token.go",
+ "auth/rpccredentials.go",
+ "app/controllers/abuse_reports_controller.rb"
+ ])
+ end
+
+ context 'when builds are retried' do
+ let!(:build_rspec) { create(:ci_build, :retried, :success, name: 'rspec', pipeline: pipeline, project: project) }
+ let!(:build_golang) { create(:ci_build, :retried, :success, name: 'golang', pipeline: pipeline, project: project) }
+
+ it 'does not take retried builds into account' do
+ expect(subject.files).to eql({})
+ end
+ end
+ end
+
+ context 'when pipeline does not have any builds with coverage reports' do
+ it 'returns empty coverage reports' do
+ expect(subject.files).to eql({})
+ end
+ end
+ end
+
describe '#total_size' do
let!(:build_job1) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
let!(:build_job2) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
@@ -2813,6 +2901,30 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#created_successfully?' do
+ subject { pipeline.created_successfully? }
+
+ context 'when pipeline is not persisted' do
+ let(:pipeline) { build(:ci_pipeline) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when pipeline is persisted' do
+ context 'when pipeline has failure reasons' do
+ let(:pipeline) { create(:ci_pipeline, failure_reason: :config_error) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when pipeline has no failure reasons' do
+ let(:pipeline) { create(:ci_pipeline, failure_reason: nil) }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
describe '#parent_pipeline' do
let(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
@@ -2960,8 +3072,7 @@ describe Ci::Pipeline, :mailer do
it 'can not update bridge status if is not active' do
bridge.success!
- expect { pipeline.update_bridge_status! }
- .to raise_error Ci::Pipeline::BridgeStatusError
+ expect { pipeline.update_bridge_status! }.not_to change { bridge.status }
end
end
end
@@ -2992,9 +3103,12 @@ describe Ci::Pipeline, :mailer do
end
describe '#update_bridge_status!' do
- it 'can not update upstream job status' do
- expect { pipeline.update_bridge_status! }
- .to raise_error ArgumentError
+ it 'tracks an ArgumentError and does not update upstream job status' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(instance_of(ArgumentError), pipeline_id: pipeline.id)
+
+ pipeline.update_bridge_status!
end
end
end
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 1e0544c14c5..e03f54aa728 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -145,4 +145,28 @@ describe Ci::Processable do
expect(another_build.reload.scheduling_type).to be_nil
end
end
+
+ describe '#needs_attributes' do
+ let(:build) { create(:ci_build, :created, project: project, pipeline: pipeline) }
+
+ context 'with needs' do
+ before do
+ create(:ci_build_need, build: build, name: 'test1')
+ create(:ci_build_need, build: build, name: 'test2')
+ end
+
+ it 'returns all needs attributes' do
+ expect(build.needs_attributes).to contain_exactly(
+ { 'artifacts' => true, 'name' => 'test1' },
+ { 'artifacts' => true, 'name' => 'test2' }
+ )
+ end
+ end
+
+ context 'without needs' do
+ it 'returns all needs attributes' do
+ expect(build.needs_attributes).to be_empty
+ end
+ end
+ end
end
diff --git a/spec/models/ci/ref_spec.rb b/spec/models/ci/ref_spec.rb
new file mode 100644
index 00000000000..aa3b8cdbc3e
--- /dev/null
+++ b/spec/models/ci/ref_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::Ref do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:last_updated_by_pipeline) }
+
+ it { is_expected.to validate_inclusion_of(:status).in_array(%w[success failed fixed]) }
+ it { is_expected.to validate_presence_of(:last_updated_by_pipeline) }
+end
diff --git a/spec/models/clusters/applications/cert_manager_spec.rb b/spec/models/clusters/applications/cert_manager_spec.rb
index 31209a70018..d7fd0d06b05 100644
--- a/spec/models/clusters/applications/cert_manager_spec.rb
+++ b/spec/models/clusters/applications/cert_manager_spec.rb
@@ -46,11 +46,11 @@ describe Clusters::Applications::CertManager do
expect(subject.name).to eq('certmanager')
expect(subject.chart).to eq('certmanager/cert-manager')
expect(subject.repository).to eq('https://charts.jetstack.io')
- expect(subject.version).to eq('v0.9.1')
+ expect(subject.version).to eq('v0.10.1')
expect(subject).to be_rbac
expect(subject.files).to eq(cert_manager.files.merge(cluster_issuer_file))
expect(subject.preinstall).to eq([
- 'kubectl apply -f https://raw.githubusercontent.com/jetstack/cert-manager/release-0.9/deploy/manifests/00-crds.yaml',
+ 'kubectl apply -f https://raw.githubusercontent.com/jetstack/cert-manager/release-0.10/deploy/manifests/00-crds.yaml',
'kubectl label --overwrite namespace gitlab-managed-apps certmanager.k8s.io/disable-validation=true'
])
expect(subject.postinstall).to eq([
@@ -82,7 +82,7 @@ describe Clusters::Applications::CertManager do
let(:cert_manager) { create(:clusters_applications_cert_manager, :errored, version: '0.0.1') }
it 'is initialized with the locked version' do
- expect(subject.version).to eq('v0.9.1')
+ expect(subject.version).to eq('v0.10.1')
end
end
end
diff --git a/spec/models/clusters/applications/elastic_stack_spec.rb b/spec/models/clusters/applications/elastic_stack_spec.rb
index d336dc752c8..b0992c43d11 100644
--- a/spec/models/clusters/applications/elastic_stack_spec.rb
+++ b/spec/models/clusters/applications/elastic_stack_spec.rb
@@ -20,7 +20,7 @@ describe Clusters::Applications::ElasticStack do
it 'is initialized with elastic stack arguments' do
expect(subject.name).to eq('elastic-stack')
expect(subject.chart).to eq('stable/elastic-stack')
- expect(subject.version).to eq('1.8.0')
+ expect(subject.version).to eq('1.9.0')
expect(subject).to be_rbac
expect(subject.files).to eq(elastic_stack.files)
end
@@ -37,7 +37,7 @@ describe Clusters::Applications::ElasticStack do
let(:elastic_stack) { create(:clusters_applications_elastic_stack, :errored, version: '0.0.1') }
it 'is initialized with the locked version' do
- expect(subject.version).to eq('1.8.0')
+ expect(subject.version).to eq('1.9.0')
end
end
end
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index c086ab23058..b1dd8ede3eb 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -102,7 +102,7 @@ describe Clusters::Applications::Ingress do
it 'is initialized with ingress arguments' do
expect(subject.name).to eq('ingress')
expect(subject.chart).to eq('stable/nginx-ingress')
- expect(subject.version).to eq('1.29.3')
+ expect(subject.version).to eq('1.29.7')
expect(subject).to be_rbac
expect(subject.files).to eq(ingress.files)
end
@@ -119,7 +119,7 @@ describe Clusters::Applications::Ingress do
let(:ingress) { create(:clusters_applications_ingress, :errored, version: 'nginx') }
it 'is initialized with the locked version' do
- expect(subject.version).to eq('1.29.3')
+ expect(subject.version).to eq('1.29.7')
end
end
end
@@ -135,6 +135,7 @@ describe Clusters::Applications::Ingress do
expect(values).to include('repository')
expect(values).to include('stats')
expect(values).to include('podAnnotations')
+ expect(values).to include('clusterIP')
end
end
@@ -176,6 +177,7 @@ describe Clusters::Applications::Ingress do
context 'when modsecurity_enabled is disabled' do
before do
allow(subject).to receive(:cluster).and_return(cluster)
+ allow(subject).to receive(:modsecurity_enabled).and_return(false)
end
it 'excludes modsecurity module enablement' do
diff --git a/spec/models/clusters/applications/knative_spec.rb b/spec/models/clusters/applications/knative_spec.rb
index 993cc7d0203..7ff7644e703 100644
--- a/spec/models/clusters/applications/knative_spec.rb
+++ b/spec/models/clusters/applications/knative_spec.rb
@@ -14,6 +14,7 @@ describe Clusters::Applications::Knative do
before do
allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
+ allow(ClusterConfigureIstioWorker).to receive(:perform_async)
end
describe 'associations' do
@@ -47,6 +48,32 @@ describe Clusters::Applications::Knative do
end
end
+ describe 'configuring istio ingress gateway' do
+ context 'after installed' do
+ let(:application) { create(:clusters_applications_knative, :installing) }
+
+ before do
+ application.make_installed!
+ end
+
+ it 'schedules a ClusterConfigureIstioWorker' do
+ expect(ClusterConfigureIstioWorker).to have_received(:perform_async).with(application.cluster_id)
+ end
+ end
+
+ context 'after updated' do
+ let(:application) { create(:clusters_applications_knative, :updating) }
+
+ before do
+ application.make_installed!
+ end
+
+ it 'schedules a ClusterConfigureIstioWorker' do
+ expect(ClusterConfigureIstioWorker).to have_received(:perform_async).with(application.cluster_id)
+ end
+ end
+ end
+
describe '#can_uninstall?' do
subject { knative.can_uninstall? }
@@ -196,4 +223,34 @@ describe Clusters::Applications::Knative do
describe 'validations' do
it { is_expected.to validate_presence_of(:hostname) }
end
+
+ describe '#available_domains' do
+ let!(:domain) { create(:pages_domain, :instance_serverless) }
+
+ it 'returns all instance serverless domains' do
+ expect(PagesDomain).to receive(:instance_serverless).and_call_original
+
+ domains = subject.available_domains
+
+ expect(domains.length).to eq(1)
+ expect(domains).to include(domain)
+ end
+ end
+
+ describe '#find_available_domain' do
+ let!(:domain) { create(:pages_domain, :instance_serverless) }
+
+ it 'returns the domain scoped to available domains' do
+ expect(subject).to receive(:available_domains).and_call_original
+ expect(subject.find_available_domain(domain.id)).to eq(domain)
+ end
+ end
+
+ describe '#pages_domain' do
+ let!(:sdc) { create(:serverless_domain_cluster, knative: knative) }
+
+ it 'returns the the associated pages domain' do
+ expect(knative.reload.pages_domain).to eq(sdc.pages_domain)
+ end
+ end
end
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index ba344a234b8..ecb87910d2d 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -274,7 +274,8 @@ describe Clusters::Applications::Prometheus do
subject { application.files_with_replaced_values({ hello: :world }) }
it 'does not modify #files' do
- expect(subject[:'values.yaml']).not_to eq(files)
+ expect(subject[:'values.yaml']).not_to eq(files[:'values.yaml'])
+
expect(files[:'values.yaml']).to eq(application.values)
end
@@ -282,27 +283,17 @@ describe Clusters::Applications::Prometheus do
expect(subject[:'values.yaml']).to eq({ hello: :world })
end
- it 'includes cert files' do
- expect(subject[:'ca.pem']).to be_present
- expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
-
- expect(subject[:'cert.pem']).to be_present
- expect(subject[:'key.pem']).to be_present
-
- cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
- expect(cert.not_after).to be < 60.minutes.from_now
- end
-
- context 'when the helm application does not have a ca_cert' do
- before do
- application.cluster.application_helm.ca_cert = nil
- end
-
- it 'does not include cert files' do
- expect(subject[:'ca.pem']).not_to be_present
- expect(subject[:'cert.pem']).not_to be_present
- expect(subject[:'key.pem']).not_to be_present
- end
+ it 'uses values from #files, except for values.yaml' do
+ allow(application).to receive(:files).and_return({
+ 'values.yaml': 'some value specific to files',
+ 'file_a.txt': 'file_a',
+ 'file_b.txt': 'file_b'
+ })
+
+ expect(subject.except(:'values.yaml')).to eq({
+ 'file_a.txt': 'file_a',
+ 'file_b.txt': 'file_b'
+ })
end
end
@@ -339,4 +330,46 @@ describe Clusters::Applications::Prometheus do
it { is_expected.to be_falsy }
end
end
+
+ describe 'alert manager token' do
+ subject { create(:clusters_applications_prometheus) }
+
+ context 'when not set' do
+ it 'is empty by default' do
+ expect(subject.alert_manager_token).to be_nil
+ expect(subject.encrypted_alert_manager_token).to be_nil
+ expect(subject.encrypted_alert_manager_token_iv).to be_nil
+ end
+
+ describe '#generate_alert_manager_token!' do
+ it 'generates a token' do
+ subject.generate_alert_manager_token!
+
+ expect(subject.alert_manager_token).to match(/\A\h{32}\z/)
+ end
+ end
+ end
+
+ context 'when set' do
+ let(:token) { SecureRandom.hex }
+
+ before do
+ subject.update!(alert_manager_token: token)
+ end
+
+ it 'reads the token' do
+ expect(subject.alert_manager_token).to eq(token)
+ expect(subject.encrypted_alert_manager_token).not_to be_nil
+ expect(subject.encrypted_alert_manager_token_iv).not_to be_nil
+ end
+
+ describe '#generate_alert_manager_token!' do
+ it 'does not re-generate the token' do
+ subject.generate_alert_manager_token!
+
+ expect(subject.alert_manager_token).to eq(token)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 23592cb0c70..f6c19ccc9d3 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -573,17 +573,17 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
describe '#allow_user_defined_namespace?' do
- let(:cluster) { create(:cluster, :provided_by_gcp) }
-
subject { cluster.allow_user_defined_namespace? }
context 'project type cluster' do
context 'gitlab managed' do
+ let(:cluster) { build(:cluster, :provided_by_gcp) }
+
it { is_expected.to be_truthy }
end
context 'not managed' do
- let(:cluster) { create(:cluster, :provided_by_gcp, managed: false) }
+ let(:cluster) { build(:cluster, :provided_by_gcp, managed: false) }
it { is_expected.to be_truthy }
end
@@ -591,13 +591,13 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
context 'group type cluster' do
context 'gitlab managed' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :group) }
+ let(:cluster) { build(:cluster, :provided_by_gcp, :group) }
it { is_expected.to be_falsey }
end
context 'not managed' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :group, managed: false) }
+ let(:cluster) { build(:cluster, :provided_by_gcp, :group, managed: false) }
it { is_expected.to be_truthy }
end
@@ -605,13 +605,13 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
context 'instance type cluster' do
context 'gitlab managed' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
+ let(:cluster) { build(:cluster, :provided_by_gcp, :instance) }
it { is_expected.to be_falsey }
end
context 'not managed' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :instance, managed: false) }
+ let(:cluster) { build(:cluster, :provided_by_gcp, :instance, managed: false) }
it { is_expected.to be_truthy }
end
@@ -649,18 +649,18 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
describe '#kube_ingress_domain' do
- let(:cluster) { create(:cluster, :provided_by_gcp) }
+ let(:cluster) { build(:cluster, :provided_by_gcp) }
subject { cluster.kube_ingress_domain }
context 'with domain set in cluster' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :with_domain) }
+ let(:cluster) { build(:cluster, :provided_by_gcp, :with_domain) }
it { is_expected.to eq(cluster.domain) }
end
context 'with no domain on cluster' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:cluster) { build(:cluster, :project, :provided_by_gcp) }
let(:project) { cluster.project }
context 'with domain set at instance level' do
@@ -674,59 +674,59 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
describe '#kubernetes_namespace_for' do
- let(:cluster) { create(:cluster, :group) }
- let(:environment) { create(:environment, last_deployable: build) }
- let(:build) { create(:ci_build) }
+ subject { cluster.kubernetes_namespace_for(environment, deployable: build) }
- subject { cluster.kubernetes_namespace_for(environment) }
+ let(:environment_name) { 'the-environment-name' }
+ let(:environment) { create(:environment, name: environment_name, project: cluster.project, last_deployable: build) }
+ let(:build) { create(:ci_build, environment: environment_name, project: cluster.project) }
+ let(:cluster) { create(:cluster, :project, managed: managed_cluster) }
+ let(:managed_cluster) { true }
+ let(:default_namespace) { Gitlab::Kubernetes::DefaultNamespace.new(cluster, project: cluster.project).from_environment_slug(environment.slug) }
+ let(:build_options) { {} }
- before do
- expect(Clusters::KubernetesNamespaceFinder).to receive(:new)
- .with(cluster, project: environment.project, environment_name: environment.name)
- .and_return(double(execute: persisted_namespace))
-
- allow(build).to receive(:expanded_kubernetes_namespace)
- .and_return(ci_configured_namespace)
+ it 'validates the project id' do
+ environment.project_id = build.project_id + 1
+ expect { subject }.to raise_error ArgumentError, 'environment.project_id must match deployable.project_id'
end
- context 'no persisted namespace exists and namespace is not specified in CI template' do
- let(:persisted_namespace) { nil }
- let(:ci_configured_namespace) { nil }
+ context 'when environment has no last_deployable' do
+ let(:build) { nil }
- let(:namespace_generator) { double }
- let(:default_namespace) { 'a-default-namespace' }
+ it { is_expected.to eq default_namespace }
+ end
+ context 'when cluster is managed' do
before do
- expect(Gitlab::Kubernetes::DefaultNamespace).to receive(:new)
- .with(cluster, project: environment.project)
- .and_return(namespace_generator)
- expect(namespace_generator).to receive(:from_environment_slug)
- .with(environment.slug)
- .and_return(default_namespace)
+ build.options = { environment: { kubernetes: { namespace: 'ci yaml namespace' } } }
end
- it { is_expected.to eq default_namespace }
- end
-
- context 'persisted namespace exists' do
- let(:persisted_namespace) { create(:cluster_kubernetes_namespace) }
- let(:ci_configured_namespace) { nil }
+ it 'returns the cached namespace if present, ignoring CI config' do
+ cached_namespace = create(:cluster_kubernetes_namespace, cluster: cluster, environment: environment, namespace: 'the name', service_account_token: 'some token')
+ expect(subject).to eq cached_namespace.namespace
+ end
- it { is_expected.to eq persisted_namespace.namespace }
+ it 'returns the default namespace when no cached namespace, ignoring CI config' do
+ expect(subject).to eq default_namespace
+ end
end
- context 'namespace is specified in CI template' do
- let(:persisted_namespace) { nil }
- let(:ci_configured_namespace) { 'ci-configured-namespace' }
+ context 'when cluster is not managed' do
+ let(:managed_cluster) { false }
- it { is_expected.to eq ci_configured_namespace }
- end
+ it 'returns the cached namespace if present, regardless of CI config' do
+ cached_namespace = create(:cluster_kubernetes_namespace, cluster: cluster, environment: environment, namespace: 'the name', service_account_token: 'some token')
+ build.options = { environment: { kubernetes: { namespace: 'ci yaml namespace' } } }
+ expect(subject).to eq cached_namespace.namespace
+ end
- context 'persisted namespace exists and namespace is also specifed in CI template' do
- let(:persisted_namespace) { create(:cluster_kubernetes_namespace) }
- let(:ci_configured_namespace) { 'ci-configured-namespace' }
+ it 'returns the CI YAML namespace when configured' do
+ build.options = { environment: { kubernetes: { namespace: 'ci yaml namespace' } } }
+ expect(subject).to eq 'ci yaml namespace'
+ end
- it { is_expected.to eq persisted_namespace.namespace }
+ it 'returns the default namespace when no namespace is configured' do
+ expect(subject).to eq default_namespace
+ end
end
end
@@ -754,7 +754,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
context 'with no domain' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :project) }
+ let(:cluster) { build(:cluster, :provided_by_gcp, :project) }
it 'returns an empty array' do
expect(subject.to_hash).to be_empty
@@ -782,7 +782,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
subject { cluster.status_name }
context 'the cluster has a provider' do
- let(:cluster) { create(:cluster, :provided_by_gcp) }
+ let(:cluster) { build(:cluster, :provided_by_gcp) }
let(:provider_status) { :errored }
before do
@@ -816,7 +816,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
context 'there is a cached connection status' do
- let(:cluster) { create(:cluster, :provided_by_user) }
+ let(:cluster) { build(:cluster, :provided_by_user) }
before do
allow(cluster).to receive(:connection_status).and_return(:connected)
@@ -826,7 +826,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
context 'there is no connection status in the cache' do
- let(:cluster) { create(:cluster, :provided_by_user) }
+ let(:cluster) { build(:cluster, :provided_by_user) }
before do
allow(cluster).to receive(:connection_status).and_return(nil)
diff --git a/spec/models/commit_range_spec.rb b/spec/models/commit_range_spec.rb
index 4a524b585e1..245e47fa17b 100644
--- a/spec/models/commit_range_spec.rb
+++ b/spec/models/commit_range_spec.rb
@@ -137,29 +137,4 @@ describe CommitRange do
end
end
end
-
- describe '#has_been_reverted?' do
- let(:user) { create(:user) }
- let(:issue) { create(:issue, author: user, project: project) }
-
- it 'returns true if the commit has been reverted' do
- create(:note_on_issue,
- noteable: issue,
- system: true,
- note: commit1.revert_description(user),
- project: issue.project)
-
- expect_next_instance_of(Commit) do |commit|
- expect(commit).to receive(:reverts_commit?)
- .with(commit1, user)
- .and_return(true)
- end
-
- expect(commit1.has_been_reverted?(user, issue.notes_with_associations)).to eq(true)
- end
-
- it 'returns false if the commit has not been reverted' do
- expect(commit1.has_been_reverted?(user, issue.notes_with_associations)).to eq(false)
- end
- end
end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 26cc68eb58c..ddda04faaf1 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -444,61 +444,6 @@ eos
it { is_expected.to respond_to(:id) }
end
- describe '#closes_issues' do
- let(:issue) { create :issue, project: project }
- let(:other_project) { create(:project, :public) }
- let(:other_issue) { create :issue, project: other_project }
- let(:committer) { create :user }
-
- before do
- project.add_developer(committer)
- other_project.add_developer(committer)
- end
-
- it 'detects issues that this commit is marked as closing' do
- ext_ref = "#{other_project.full_path}##{other_issue.iid}"
-
- allow(commit).to receive_messages(
- safe_message: "Fixes ##{issue.iid} and #{ext_ref}",
- committer_email: committer.email
- )
-
- expect(commit.closes_issues).to include(issue)
- expect(commit.closes_issues).to include(other_issue)
- end
-
- it 'ignores referenced issues when auto-close is disabled' do
- project.update!(autoclose_referenced_issues: false)
-
- allow(commit).to receive_messages(
- safe_message: "Fixes ##{issue.iid}",
- committer_email: committer.email
- )
-
- expect(commit.closes_issues).to be_empty
- end
-
- context 'with personal snippet' do
- let(:commit) { personal_snippet.commit }
-
- it 'does not call Gitlab::ClosingIssueExtractor' do
- expect(Gitlab::ClosingIssueExtractor).not_to receive(:new)
-
- commit.closes_issues
- end
- end
-
- context 'with project snippet' do
- let(:commit) { project_snippet.commit }
-
- it 'does not call Gitlab::ClosingIssueExtractor' do
- expect(Gitlab::ClosingIssueExtractor).not_to receive(:new)
-
- commit.closes_issues
- end
- end
- end
-
it_behaves_like 'a mentionable' do
subject { create(:project, :repository).commit }
@@ -775,32 +720,6 @@ eos
end
end
- describe '#merge_requests' do
- let!(:project) { create(:project, :repository) }
- let!(:merge_request1) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'feature') }
- let!(:merge_request2) { create(:merge_request, source_project: project, source_branch: 'merged-target', target_branch: 'feature') }
- let(:commit1) { merge_request1.merge_request_diff.commits.last }
- let(:commit2) { merge_request1.merge_request_diff.commits.first }
-
- it 'returns merge_requests that introduced that commit' do
- expect(commit1.merge_requests).to contain_exactly(merge_request1, merge_request2)
- expect(commit2.merge_requests).to contain_exactly(merge_request1)
- end
-
- context 'with personal snippet' do
- it 'returns empty relation' do
- expect(personal_snippet.repository.commit.merge_requests).to eq MergeRequest.none
- end
- end
-
- context 'with project snippet' do
- it 'returns empty relation' do
- expect(project_snippet.project).not_to receive(:merge_requests)
- expect(project_snippet.repository.commit.merge_requests).to eq MergeRequest.none
- end
- end
- end
-
describe 'signed commits' do
let(:gpg_signed_commit) { project.commit_by(oid: '0b4bc9a49b562e85de7cc9e834518ea6828729b9') }
let(:x509_signed_commit) { project.commit_by(oid: '189a6c924013fc3fe40d6f1ec1dc20214183bc97') }
@@ -821,4 +740,29 @@ eos
expect(commit.has_signature?).to be_falsey
end
end
+
+ describe '#has_been_reverted?' do
+ let(:user) { create(:user) }
+ let(:issue) { create(:issue, author: user, project: project) }
+
+ it 'returns true if the commit has been reverted' do
+ create(:note_on_issue,
+ noteable: issue,
+ system: true,
+ note: commit.revert_description(user),
+ project: issue.project)
+
+ expect_next_instance_of(Commit) do |revert_commit|
+ expect(revert_commit).to receive(:reverts_commit?)
+ .with(commit, user)
+ .and_return(true)
+ end
+
+ expect(commit.has_been_reverted?(user, issue.notes_with_associations)).to eq(true)
+ end
+
+ it 'returns false if the commit has not been reverted' do
+ expect(commit.has_been_reverted?(user, issue.notes_with_associations)).to eq(false)
+ end
+ end
end
diff --git a/spec/models/concerns/avatarable_spec.rb b/spec/models/concerns/avatarable_spec.rb
index 100953549ea..96e867dbc97 100644
--- a/spec/models/concerns/avatarable_spec.rb
+++ b/spec/models/concerns/avatarable_spec.rb
@@ -15,7 +15,7 @@ describe Avatarable do
end
describe '#update' do
- let(:validator) { project._validators[:avatar].detect { |v| v.is_a?(FileSizeValidator) } }
+ let(:validator) { project.class.validators_on(:avatar).find { |v| v.is_a?(FileSizeValidator) } }
context 'when avatar changed' do
it 'validates the file size' do
diff --git a/spec/models/concerns/blob_language_from_git_attributes_spec.rb b/spec/models/concerns/blob_language_from_git_attributes_spec.rb
index 7f05073b08e..4cb8f042b1d 100644
--- a/spec/models/concerns/blob_language_from_git_attributes_spec.rb
+++ b/spec/models/concerns/blob_language_from_git_attributes_spec.rb
@@ -11,13 +11,20 @@ describe BlobLanguageFromGitAttributes do
subject(:blob) { fake_blob(path: 'file.md') }
it 'returns return value from gitattribute' do
- expect(blob.project.repository).to receive(:gitattribute).with(blob.path, 'gitlab-language').and_return('erb?parent=json')
+ allow(blob.repository).to receive(:exists?).and_return(true)
+ expect(blob.repository).to receive(:gitattribute).with(blob.path, 'gitlab-language').and_return('erb?parent=json')
expect(blob.language_from_gitattributes).to eq('erb?parent=json')
end
- it 'returns nil if project is absent' do
- allow(blob).to receive(:project).and_return(nil)
+ it 'returns nil if repository is absent' do
+ allow(blob).to receive(:repository).and_return(nil)
+
+ expect(blob.language_from_gitattributes).to eq(nil)
+ end
+
+ it 'returns nil if repository does not exist' do
+ allow(blob.repository).to receive(:exists?).and_return(false)
expect(blob.language_from_gitattributes).to eq(nil)
end
diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb
index 91884680738..a8e56cb8bdd 100644
--- a/spec/models/concerns/bulk_insert_safe_spec.rb
+++ b/spec/models/concerns/bulk_insert_safe_spec.rb
@@ -5,6 +5,35 @@ require 'spec_helper'
describe BulkInsertSafe do
class BulkInsertItem < ApplicationRecord
include BulkInsertSafe
+ include ShaAttribute
+
+ validates :name, :enum_value, :secret_value, :sha_value, presence: true
+
+ ENUM_VALUES = {
+ case_1: 1
+ }.freeze
+
+ sha_attribute :sha_value
+
+ enum enum_value: ENUM_VALUES
+
+ attr_encrypted :secret_value,
+ mode: :per_attribute_iv,
+ algorithm: 'aes-256-gcm',
+ key: Settings.attr_encrypted_db_key_base_32,
+ insecure_mode: false
+
+ default_value_for :enum_value, 'case_1'
+ default_value_for :secret_value, 'my-secret'
+ default_value_for :sha_value, '2fd4e1c67a2d28fced849ee1bb76e7391b93eb12'
+
+ def self.valid_list(count)
+ Array.new(count) { |n| new(name: "item-#{n}") }
+ end
+
+ def self.invalid_list(count)
+ Array.new(count) { new }
+ end
end
module InheritedUnsafeMethods
@@ -23,16 +52,117 @@ describe BulkInsertSafe do
end
end
- it_behaves_like 'a BulkInsertSafe model', BulkInsertItem
+ before(:all) do
+ ActiveRecord::Schema.define do
+ create_table :bulk_insert_items, force: true do |t|
+ t.string :name, null: true
+ t.integer :enum_value, null: false
+ t.text :encrypted_secret_value, null: false
+ t.string :encrypted_secret_value_iv, null: false
+ t.binary :sha_value, null: false, limit: 20
+
+ t.index :name, unique: true
+ end
+ end
+
+ BulkInsertItem.reset_column_information
+ end
+
+ after(:all) do
+ ActiveRecord::Schema.define do
+ drop_table :bulk_insert_items, force: true
+ end
+ end
+
+ describe BulkInsertItem do
+ it_behaves_like 'a BulkInsertSafe model', described_class do
+ let(:valid_items_for_bulk_insertion) { described_class.valid_list(10) }
+ let(:invalid_items_for_bulk_insertion) { described_class.invalid_list(10) }
+ end
+
+ context 'when inheriting class methods' do
+ it 'raises an error when method is not bulk-insert safe' do
+ expect { described_class.include(InheritedUnsafeMethods) }
+ .to raise_error(described_class::MethodNotAllowedError)
+ end
- context 'when inheriting class methods' do
- it 'raises an error when method is not bulk-insert safe' do
- expect { BulkInsertItem.include(InheritedUnsafeMethods) }.to(
- raise_error(subject::MethodNotAllowedError))
+ it 'does not raise an error when method is bulk-insert safe' do
+ expect { described_class.include(InheritedSafeMethods) }.not_to raise_error
+ end
end
- it 'does not raise an error when method is bulk-insert safe' do
- expect { BulkInsertItem.include(InheritedSafeMethods) }.not_to raise_error
+ context 'primary keys' do
+ it 'raises error if primary keys are set prior to insertion' do
+ item = described_class.new(name: 'valid', id: 10)
+
+ expect { described_class.bulk_insert!([item]) }
+ .to raise_error(described_class::PrimaryKeySetError)
+ end
+ end
+
+ describe '.bulk_insert!' do
+ it 'inserts items in the given number of batches' do
+ items = described_class.valid_list(10)
+
+ expect(ActiveRecord::InsertAll).to receive(:new).twice.and_call_original
+
+ described_class.bulk_insert!(items, batch_size: 5)
+ end
+
+ it 'items can be properly fetched from database' do
+ items = described_class.valid_list(10)
+
+ described_class.bulk_insert!(items)
+
+ attribute_names = described_class.attribute_names - %w[id]
+ expect(described_class.last(items.size).pluck(*attribute_names)).to eq(
+ items.pluck(*attribute_names))
+ end
+
+ it 'rolls back the transaction when any item is invalid' do
+ # second batch is bad
+ all_items = described_class.valid_list(10) +
+ described_class.invalid_list(10)
+
+ expect do
+ described_class.bulk_insert!(all_items, batch_size: 2) rescue nil
+ end.not_to change { described_class.count }
+ end
+
+ it 'does nothing and returns true when items are empty' do
+ expect(described_class.bulk_insert!([])).to be(true)
+ expect(described_class.count).to eq(0)
+ end
+ end
+
+ context 'when duplicate items are to be inserted' do
+ let!(:existing_object) { described_class.create!(name: 'duplicate', secret_value: 'old value') }
+ let(:new_object) { described_class.new(name: 'duplicate', secret_value: 'new value') }
+
+ describe '.bulk_insert!' do
+ context 'when skip_duplicates is set to false' do
+ it 'raises an exception' do
+ expect { described_class.bulk_insert!([new_object], skip_duplicates: false) }
+ .to raise_error(ActiveRecord::RecordNotUnique)
+ end
+ end
+
+ context 'when skip_duplicates is set to true' do
+ it 'does not update existing object' do
+ described_class.bulk_insert!([new_object], skip_duplicates: true)
+
+ expect(existing_object.reload.secret_value).to eq('old value')
+ end
+ end
+ end
+
+ describe '.bulk_upsert!' do
+ it 'updates existing object' do
+ described_class.bulk_upsert!([new_object], unique_by: %w[name])
+
+ expect(existing_object.reload.secret_value).to eq('new value')
+ end
+ end
end
end
end
diff --git a/spec/models/concerns/bulk_insertable_associations_spec.rb b/spec/models/concerns/bulk_insertable_associations_spec.rb
new file mode 100644
index 00000000000..6359b2c57ef
--- /dev/null
+++ b/spec/models/concerns/bulk_insertable_associations_spec.rb
@@ -0,0 +1,233 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe BulkInsertableAssociations do
+ class BulkFoo < ApplicationRecord
+ include BulkInsertSafe
+
+ validates :name, presence: true
+ end
+
+ class BulkBar < ApplicationRecord
+ include BulkInsertSafe
+ end
+
+ SimpleBar = Class.new(ApplicationRecord)
+
+ class BulkParent < ApplicationRecord
+ include BulkInsertableAssociations
+
+ has_many :bulk_foos
+ has_many :bulk_hunks, class_name: 'BulkFoo'
+ has_many :bulk_bars
+ has_many :simple_bars # not `BulkInsertSafe`
+ has_one :bulk_foo # not supported
+ end
+
+ before(:all) do
+ ActiveRecord::Schema.define do
+ create_table :bulk_parents, force: true do |t|
+ t.string :name, null: true
+ end
+
+ create_table :bulk_foos, force: true do |t|
+ t.string :name, null: true
+ t.belongs_to :bulk_parent, null: false
+ end
+
+ create_table :bulk_bars, force: true do |t|
+ t.string :name, null: true
+ t.belongs_to :bulk_parent, null: false
+ end
+
+ create_table :simple_bars, force: true do |t|
+ t.string :name, null: true
+ t.belongs_to :bulk_parent, null: false
+ end
+ end
+ end
+
+ after(:all) do
+ ActiveRecord::Schema.define do
+ drop_table :bulk_foos, force: true
+ drop_table :bulk_bars, force: true
+ drop_table :simple_bars, force: true
+ drop_table :bulk_parents, force: true
+ end
+ end
+
+ context 'saving bulk insertable associations' do
+ let(:parent) { BulkParent.new(name: 'parent') }
+
+ context 'when items already have IDs' do
+ it 'stores nothing and raises an error' do
+ build_items(parent: parent) { |n, item| item.id = n }
+
+ expect { save_with_bulk_inserts(parent) }.to raise_error(BulkInsertSafe::PrimaryKeySetError)
+ expect(BulkFoo.count).to eq(0)
+ end
+ end
+
+ context 'when items have no IDs set' do
+ it 'stores them all and updates items with IDs' do
+ items = build_items(parent: parent)
+
+ expect(BulkFoo).to receive(:bulk_insert!).once.and_call_original
+ expect { save_with_bulk_inserts(parent) }.to change { BulkFoo.count }.from(0).to(items.size)
+ expect(parent.bulk_foos.pluck(:id)).to all(be_a Integer)
+ end
+ end
+
+ context 'when items are empty' do
+ it 'does nothing' do
+ expect(parent.bulk_foos).to be_empty
+
+ expect { save_with_bulk_inserts(parent) }.not_to change { BulkFoo.count }
+ end
+ end
+
+ context 'when relation name does not match class name' do
+ it 'stores them all' do
+ items = build_items(parent: parent, relation: :bulk_hunks)
+
+ expect(BulkFoo).to receive(:bulk_insert!).once.and_call_original
+
+ expect { save_with_bulk_inserts(parent) }.to(
+ change { BulkFoo.count }.from(0).to(items.size)
+ )
+ end
+ end
+
+ context 'with multiple threads' do
+ it 'isolates bulk insert behavior between threads' do
+ total_item_count = 10
+ parent1 = BulkParent.new(name: 'parent1')
+ parent2 = BulkParent.new(name: 'parent2')
+ build_items(parent: parent1, count: total_item_count / 2)
+ build_items(parent: parent2, count: total_item_count / 2)
+
+ expect(BulkFoo).to receive(:bulk_insert!).once.and_call_original
+ [
+ Thread.new do
+ save_with_bulk_inserts(parent1)
+ end,
+ Thread.new do
+ parent2.save!
+ end
+ ].map(&:join)
+
+ expect(BulkFoo.count).to eq(total_item_count)
+ end
+ end
+
+ context 'with multiple associations' do
+ it 'isolates writes between associations' do
+ items1 = build_items(parent: parent, relation: :bulk_foos)
+ items2 = build_items(parent: parent, relation: :bulk_bars)
+
+ expect(BulkFoo).to receive(:bulk_insert!).once.and_call_original
+ expect(BulkBar).to receive(:bulk_insert!).once.and_call_original
+
+ expect { save_with_bulk_inserts(parent) }.to(
+ change { BulkFoo.count }.from(0).to(items1.size)
+ .and(
+ change { BulkBar.count }.from(0).to(items2.size)
+ ))
+ end
+ end
+
+ context 'passing bulk insert arguments' do
+ it 'disables validations on target association' do
+ items = build_items(parent: parent)
+
+ expect(BulkFoo).to receive(:bulk_insert!).with(items, validate: false).and_return true
+
+ save_with_bulk_inserts(parent)
+ end
+ end
+
+ it 'can disable bulk-inserts within a bulk-insert block' do
+ parent1 = BulkParent.new(name: 'parent1')
+ parent2 = BulkParent.new(name: 'parent2')
+ _items1 = build_items(parent: parent1)
+ items2 = build_items(parent: parent2)
+
+ expect(BulkFoo).to receive(:bulk_insert!).once.with(items2, validate: false)
+
+ BulkInsertableAssociations.with_bulk_insert(enabled: true) do
+ BulkInsertableAssociations.with_bulk_insert(enabled: false) do
+ parent1.save!
+ end
+
+ parent2.save!
+ end
+ end
+
+ context 'when association is not bulk-insert safe' do
+ it 'saves it normally' do
+ parent.simple_bars.build
+
+ expect(SimpleBar).not_to receive(:bulk_insert!)
+ expect { save_with_bulk_inserts(parent) }.to change { SimpleBar.count }.from(0).to(1)
+ end
+ end
+
+ context 'when association is not has_many' do
+ it 'saves it normally' do
+ parent.bulk_foo = BulkFoo.new(name: 'item')
+
+ expect(BulkFoo).not_to receive(:bulk_insert!)
+ expect { save_with_bulk_inserts(parent) }.to change { BulkFoo.count }.from(0).to(1)
+ end
+ end
+
+ context 'when an item is not valid' do
+ describe '.save' do
+ it 'invalidates the parent and returns false' do
+ build_invalid_items(parent: parent)
+
+ expect(save_with_bulk_inserts(parent, bangify: false)).to be false
+ expect(parent.errors[:bulk_foos].size).to eq(1)
+
+ expect(BulkFoo.count).to eq(0)
+ expect(BulkParent.count).to eq(0)
+ end
+ end
+
+ describe '.save!' do
+ it 'invalidates the parent and raises error' do
+ build_invalid_items(parent: parent)
+
+ expect { save_with_bulk_inserts(parent) }.to raise_error(ActiveRecord::RecordInvalid)
+ expect(parent.errors[:bulk_foos].size).to eq(1)
+
+ expect(BulkFoo.count).to eq(0)
+ expect(BulkParent.count).to eq(0)
+ end
+ end
+ end
+ end
+
+ private
+
+ def save_with_bulk_inserts(entity, bangify: true)
+ BulkInsertableAssociations.with_bulk_insert { bangify ? entity.save! : entity.save }
+ end
+
+ def build_items(parent:, relation: :bulk_foos, count: 10)
+ count.times do |n|
+ item = parent.send(relation).build(name: "item_#{n}", bulk_parent_id: parent.id)
+ yield(n, item) if block_given?
+ end
+ parent.send(relation)
+ end
+
+ def build_invalid_items(parent:)
+ build_items(parent: parent).tap do |items|
+ invalid_item = items.first
+ invalid_item.name = nil
+ expect(invalid_item).not_to be_valid
+ end
+ end
+end
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 06d12c14793..697a9e98505 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -230,6 +230,26 @@ describe CacheMarkdownField, :clean_gitlab_redis_cache do
end
end
end
+
+ describe '#rendered_field_content' do
+ let(:thing) { klass.new(description: markdown, description_html: nil, cached_markdown_version: cache_version) }
+
+ context 'when a field can be cached' do
+ it 'returns the html' do
+ thing.description = updated_markdown
+
+ expect(thing.rendered_field_content(:description)).to eq updated_html
+ end
+ end
+
+ context 'when a field cannot be cached' do
+ it 'returns nil' do
+ allow(thing).to receive(:can_cache_field?).with(:description).and_return false
+
+ expect(thing.rendered_field_content(:description)).to eq nil
+ end
+ end
+ end
end
context 'for Active record classes' do
diff --git a/spec/models/concerns/ci/has_ref_spec.rb b/spec/models/concerns/ci/has_ref_spec.rb
new file mode 100644
index 00000000000..b98f915018b
--- /dev/null
+++ b/spec/models/concerns/ci/has_ref_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::HasRef do
+ describe '#branch?' do
+ let(:build) { create(:ci_build) }
+
+ subject { build.branch? }
+
+ context 'is not a tag' do
+ before do
+ build.tag = false
+ end
+
+ it 'return true when tag is set to false' do
+ is_expected.to be_truthy
+ end
+
+ context 'when it was triggered by merge request' do
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
+ it 'returns false' do
+ is_expected.to be_falsy
+ end
+ end
+ end
+
+ context 'is not a tag' do
+ before do
+ build.tag = true
+ end
+
+ it 'return false when tag is set to true' do
+ is_expected.to be_falsey
+ end
+ end
+ end
+
+ describe '#git_ref' do
+ subject { build.git_ref }
+
+ context 'when tag is true' do
+ let(:build) { create(:ci_build, tag: true) }
+
+ it 'returns a tag ref' do
+ is_expected.to start_with(Gitlab::Git::TAG_REF_PREFIX)
+ end
+ end
+
+ context 'when tag is false' do
+ let(:build) { create(:ci_build, tag: false) }
+
+ it 'returns a branch ref' do
+ is_expected.to start_with(Gitlab::Git::BRANCH_REF_PREFIX)
+ end
+ end
+
+ context 'when tag is nil' do
+ let(:build) { create(:ci_build, tag: nil) }
+
+ it 'returns a branch ref' do
+ is_expected.to start_with(Gitlab::Git::BRANCH_REF_PREFIX)
+ end
+ end
+
+ context 'when it is triggered by a merge request' do
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
+ let(:build) { create(:ci_build, tag: false, pipeline: pipeline) }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/has_ref_spec.rb b/spec/models/concerns/has_ref_spec.rb
deleted file mode 100644
index 66b25c77430..00000000000
--- a/spec/models/concerns/has_ref_spec.rb
+++ /dev/null
@@ -1,79 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe HasRef do
- describe '#branch?' do
- let(:build) { create(:ci_build) }
-
- subject { build.branch? }
-
- context 'is not a tag' do
- before do
- build.tag = false
- end
-
- it 'return true when tag is set to false' do
- is_expected.to be_truthy
- end
-
- context 'when it was triggered by merge request' do
- let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- let(:pipeline) { merge_request.pipelines_for_merge_request.first }
- let(:build) { create(:ci_build, pipeline: pipeline) }
-
- it 'returns false' do
- is_expected.to be_falsy
- end
- end
- end
-
- context 'is not a tag' do
- before do
- build.tag = true
- end
-
- it 'return false when tag is set to true' do
- is_expected.to be_falsey
- end
- end
- end
-
- describe '#git_ref' do
- subject { build.git_ref }
-
- context 'when tag is true' do
- let(:build) { create(:ci_build, tag: true) }
-
- it 'returns a tag ref' do
- is_expected.to start_with(Gitlab::Git::TAG_REF_PREFIX)
- end
- end
-
- context 'when tag is false' do
- let(:build) { create(:ci_build, tag: false) }
-
- it 'returns a branch ref' do
- is_expected.to start_with(Gitlab::Git::BRANCH_REF_PREFIX)
- end
- end
-
- context 'when tag is nil' do
- let(:build) { create(:ci_build, tag: nil) }
-
- it 'returns a branch ref' do
- is_expected.to start_with(Gitlab::Git::BRANCH_REF_PREFIX)
- end
- end
-
- context 'when it is triggered by a merge request' do
- let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- let(:pipeline) { merge_request.pipelines_for_merge_request.first }
- let(:build) { create(:ci_build, tag: false, pipeline: pipeline) }
-
- it 'returns nil' do
- is_expected.to be_nil
- end
- end
- end
-end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 3e5c16c2491..4ecbc671c72 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -56,8 +56,6 @@ describe Issuable do
end
describe "Scope" do
- subject { build(:issue) }
-
it { expect(issuable_class).to respond_to(:opened) }
it { expect(issuable_class).to respond_to(:closed) }
it { expect(issuable_class).to respond_to(:assigned) }
diff --git a/spec/models/concerns/milestoneish_spec.rb b/spec/models/concerns/milestoneish_spec.rb
index d46c9747845..cff607a4731 100644
--- a/spec/models/concerns/milestoneish_spec.rb
+++ b/spec/models/concerns/milestoneish_spec.rb
@@ -211,56 +211,37 @@ describe Milestone, 'Milestoneish' do
end
end
- describe '#complete?' do
+ describe '#complete?', :use_clean_rails_memory_store_caching do
it 'returns false when has items opened' do
- expect(milestone.complete?(non_member)).to eq false
+ expect(milestone.complete?).to eq false
end
it 'returns true when all items are closed' do
issue.close
- merge_request.close
+ security_issue_1.close
+ security_issue_2.close
- expect(milestone.complete?(non_member)).to eq true
+ expect(milestone.complete?).to eq true
end
end
- describe '#percent_complete' do
+ describe '#percent_complete', :use_clean_rails_memory_store_caching do
context 'division by zero' do
let(:new_milestone) { build_stubbed(:milestone) }
- it { expect(new_milestone.percent_complete(admin)).to eq(0) }
+ it { expect(new_milestone.percent_complete).to eq(0) }
end
end
- describe '#count_issues_by_state' do
- it 'does not count confidential issues for non project members' do
- expect(milestone.closed_issues_count(non_member)).to eq 2
- expect(milestone.total_issues_count(non_member)).to eq 3
- end
-
- it 'does not count confidential issues for project members with guest role' do
- expect(milestone.closed_issues_count(guest)).to eq 2
- expect(milestone.total_issues_count(guest)).to eq 3
- end
-
- it 'counts confidential issues for author' do
- expect(milestone.closed_issues_count(author)).to eq 4
- expect(milestone.total_issues_count(author)).to eq 6
- end
-
- it 'counts confidential issues for assignee' do
- expect(milestone.closed_issues_count(assignee)).to eq 4
- expect(milestone.total_issues_count(assignee)).to eq 6
- end
-
- it 'counts confidential issues for project members' do
- expect(milestone.closed_issues_count(member)).to eq 6
- expect(milestone.total_issues_count(member)).to eq 9
+ describe '#closed_issues_count' do
+ it 'counts all closed issues including confidential' do
+ expect(milestone.closed_issues_count).to eq 6
end
+ end
- it 'counts confidential issues for admin' do
- expect(milestone.closed_issues_count(admin)).to eq 6
- expect(milestone.total_issues_count(admin)).to eq 9
+ describe '#total_issues_count' do
+ it 'counts all issues including confidential' do
+ expect(milestone.total_issues_count).to eq 9
end
end
diff --git a/spec/models/concerns/reactive_caching_spec.rb b/spec/models/concerns/reactive_caching_spec.rb
index 6d4eeae641f..96a9c317fb8 100644
--- a/spec/models/concerns/reactive_caching_spec.rb
+++ b/spec/models/concerns/reactive_caching_spec.rb
@@ -112,6 +112,43 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
end
end
+ describe '#with_reactive_cache_set', :use_clean_rails_redis_caching do
+ subject(:go!) do
+ instance.with_reactive_cache_set('resource', {}) do |data|
+ data
+ end
+ end
+
+ it 'calls with_reactive_cache' do
+ expect(instance)
+ .to receive(:with_reactive_cache)
+
+ go!
+ end
+
+ context 'data returned' do
+ let(:resource) { 'resource' }
+ let(:set_key) { "#{cache_key}:#{resource}" }
+ let(:set_cache) { Gitlab::ReactiveCacheSetCache.new }
+
+ before do
+ stub_reactive_cache(instance, true, resource, {})
+ end
+
+ it 'saves keys in set' do
+ expect(set_cache.read(set_key)).to be_empty
+
+ go!
+
+ expect(set_cache.read(set_key)).not_to be_empty
+ end
+
+ it 'returns the data' do
+ expect(go!).to eq(true)
+ end
+ end
+ end
+
describe '.reactive_cache_worker_finder' do
context 'with default reactive_cache_worker_finder' do
let(:args) { %w(other args) }
diff --git a/spec/models/concerns/spammable_spec.rb b/spec/models/concerns/spammable_spec.rb
index 67353475251..b8537dd39f6 100644
--- a/spec/models/concerns/spammable_spec.rb
+++ b/spec/models/concerns/spammable_spec.rb
@@ -36,6 +36,46 @@ describe Spammable do
end
end
+ describe '#invalidate_if_spam' do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'when the model is spam' do
+ where(:recaptcha_enabled, :error) do
+ true | /solve the reCAPTCHA to proceed/
+ false | /has been discarded/
+ end
+
+ with_them do
+ subject { invalidate_if_spam(true, recaptcha_enabled) }
+
+ it 'has an error related to spam on the model' do
+ expect(subject.errors.messages[:base]).to match_array error
+ end
+ end
+ end
+
+ context 'when the model is not spam' do
+ [true, false].each do |enabled|
+ let(:recaptcha_enabled) { enabled }
+
+ subject { invalidate_if_spam(false, recaptcha_enabled) }
+
+ it 'returns no error' do
+ expect(subject.errors.messages[:base]).to be_empty
+ end
+ end
+ end
+
+ def invalidate_if_spam(is_spam, recaptcha_enabled)
+ stub_application_setting(recaptcha_enabled: recaptcha_enabled)
+
+ issue.tap do |i|
+ i.spam = is_spam
+ i.invalidate_if_spam
+ end
+ end
+ end
+
describe '#submittable_as_spam_by?' do
let(:admin) { build(:admin) }
let(:user) { build(:user) }
diff --git a/spec/models/concerns/usage_statistics_spec.rb b/spec/models/concerns/usage_statistics_spec.rb
new file mode 100644
index 00000000000..f99f0a13317
--- /dev/null
+++ b/spec/models/concerns/usage_statistics_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe UsageStatistics do
+ describe '.distinct_count_by' do
+ let_it_be(:issue_1) { create(:issue) }
+ let_it_be(:issue_2) { create(:issue) }
+
+ context 'two records created by the same issue' do
+ let!(:models_created_by_issue) do
+ create(:zoom_meeting, :added_to_issue, issue: issue_1)
+ create(:zoom_meeting, :removed_from_issue, issue: issue_1)
+ end
+
+ it 'returns a count of 1' do
+ expect(::ZoomMeeting.distinct_count_by(:issue_id)).to eq(1)
+ end
+
+ context 'when given no column to count' do
+ it 'counts by :id and returns a count of 2' do
+ expect(::ZoomMeeting.distinct_count_by).to eq(2)
+ end
+ end
+ end
+
+ context 'one record created by each issue' do
+ let!(:model_created_by_issue_1) { create(:zoom_meeting, issue: issue_1) }
+ let!(:model_created_by_issue_2) { create(:zoom_meeting, issue: issue_2) }
+
+ it 'returns a count of 2' do
+ expect(::ZoomMeeting.distinct_count_by(:issue_id)).to eq(2)
+ end
+ end
+
+ context 'the count query times out' do
+ before do
+ allow_next_instance_of(ActiveRecord::Relation) do |instance|
+ allow(instance).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+ end
+ end
+
+ it 'does not raise an error' do
+ expect { ::ZoomMeeting.distinct_count_by(:issue_id) }.not_to raise_error
+ end
+
+ it 'returns -1' do
+ expect(::ZoomMeeting.distinct_count_by(:issue_id)).to eq(-1)
+ end
+ end
+ end
+end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index ab7e12cd43c..86dfa586862 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -520,6 +520,21 @@ describe Deployment do
end
end
+ describe '#create_ref' do
+ let(:deployment) { build(:deployment) }
+
+ subject { deployment.create_ref }
+
+ it 'creates a ref using the sha' do
+ expect(deployment.project.repository).to receive(:create_ref).with(
+ deployment.sha,
+ "refs/environments/#{deployment.environment.name}/deployments/#{deployment.iid}"
+ )
+
+ subject
+ end
+ end
+
describe '#playable_build' do
subject { deployment.playable_build }
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 72143d69fc8..6020db09ccf 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -325,26 +325,6 @@ describe Environment, :use_clean_rails_memory_store_caching do
end
end
- describe '#first_deployment_for' do
- let(:project) { create(:project, :repository) }
- let!(:deployment) { create(:deployment, :succeed, environment: environment, ref: commit.parent.id) }
- let!(:deployment1) { create(:deployment, :succeed, environment: environment, ref: commit.id) }
- let(:head_commit) { project.commit }
- let(:commit) { project.commit.parent }
-
- it 'returns deployment id for the environment', :sidekiq_might_not_need_inline do
- expect(environment.first_deployment_for(commit.id)).to eq deployment1
- end
-
- it 'return nil when no deployment is found' do
- expect(environment.first_deployment_for(head_commit.id)).to eq nil
- end
-
- it 'returns a UTF-8 ref', :sidekiq_might_not_need_inline do
- expect(environment.first_deployment_for(commit.id).ref).to be_utf8
- end
- end
-
describe '#environment_type' do
subject { environment.environment_type }
@@ -1264,6 +1244,14 @@ describe Environment, :use_clean_rails_memory_store_caching do
end
end
+ describe '.for_id_and_slug' do
+ subject { described_class.for_id_and_slug(environment.id, environment.slug) }
+
+ let(:environment) { create(:environment) }
+
+ it { is_expected.not_to be_nil }
+ end
+
describe '.find_or_create_by_name' do
it 'finds an existing environment if it exists' do
env = create(:environment)
@@ -1278,4 +1266,39 @@ describe Environment, :use_clean_rails_memory_store_caching do
expect(env).to be_persisted
end
end
+
+ describe '#elastic_stack_available?' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_user, projects: [project]) }
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
+
+ context 'when app does not exist' do
+ it 'returns false' do
+ expect(environment.elastic_stack_available?).to be(false)
+ end
+ end
+
+ context 'when app exists' do
+ let!(:application) { create(:clusters_applications_elastic_stack, cluster: cluster) }
+
+ it 'returns false' do
+ expect(environment.elastic_stack_available?).to be(false)
+ end
+ end
+
+ context 'when app is installed' do
+ let!(:application) { create(:clusters_applications_elastic_stack, :installed, cluster: cluster) }
+
+ it 'returns true' do
+ expect(environment.elastic_stack_available?).to be(true)
+ end
+ end
+
+ context 'when app is updated' do
+ let!(:application) { create(:clusters_applications_elastic_stack, :updated, cluster: cluster) }
+
+ it 'returns true' do
+ expect(environment.elastic_stack_available?).to be(true)
+ end
+ end
+ end
end
diff --git a/spec/models/environment_status_spec.rb b/spec/models/environment_status_spec.rb
index 0f2c6928820..10283b54796 100644
--- a/spec/models/environment_status_spec.rb
+++ b/spec/models/environment_status_spec.rb
@@ -51,8 +51,10 @@ describe EnvironmentStatus do
# - source: /files\/(.+)/
# public: '\1'
describe '#changes' do
+ subject { environment_status.changes }
+
it 'contains only added and modified public pages' do
- expect(environment_status.changes).to contain_exactly(
+ expect(subject).to contain_exactly(
{
path: 'ruby-style-guide.html',
external_url: "#{environment.external_url}/ruby-style-guide.html"
@@ -64,6 +66,18 @@ describe EnvironmentStatus do
end
end
+ describe '#changed_paths' do
+ subject { environment_status.changed_urls }
+
+ it { is_expected.to contain_exactly("#{environment.external_url}/ruby-style-guide.html", "#{environment.external_url}/html/page.html") }
+ end
+
+ describe '#changed_urls' do
+ subject { environment_status.changed_paths }
+
+ it { is_expected.to contain_exactly('ruby-style-guide.html', 'html/page.html') }
+ end
+
describe '.for_merge_request' do
let(:admin) { create(:admin) }
let!(:pipeline) { create(:ci_pipeline, sha: sha, merge_requests_as_head_pipeline: [merge_request]) }
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index e81480ab88f..b564c48a9c1 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -8,7 +8,7 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
let_it_be(:project) { create(:project) }
- subject { create(:project_error_tracking_setting, project: project) }
+ subject(:setting) { build(:project_error_tracking_setting, project: project) }
describe 'Associations' do
it { is_expected.to belong_to(:project) }
@@ -453,4 +453,23 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
end
end
end
+
+ describe '#expire_issues_cache', :use_clean_rails_redis_caching do
+ let(:issues) { [:some, :issues] }
+ let(:opt) { 'list_issues' }
+ let(:params) { { issue_status: 'unresolved', limit: 20, sort: 'last_seen' } }
+
+ before do
+ start_reactive_cache_lifetime(subject, opt, params.stringify_keys)
+ stub_reactive_cache(subject, issues, opt, params.stringify_keys)
+ end
+
+ it 'clears the cache' do
+ expect(subject.list_sentry_issues(params)).to eq(issues)
+
+ subject.expire_issues_cache
+
+ expect(subject.list_sentry_issues(params)).to eq(nil)
+ end
+ end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index d42888e1d54..576ac4393ed 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -950,6 +950,16 @@ describe Group do
subject { group.ci_variables_for('ref', project) }
+ it 'memoizes the result by ref', :request_store do
+ expect(project).to receive(:protected_for?).with('ref').once.and_return(true)
+ expect(project).to receive(:protected_for?).with('other').once.and_return(false)
+
+ 2.times do
+ expect(group.ci_variables_for('ref', project)).to contain_exactly(ci_variable, protected_variable)
+ expect(group.ci_variables_for('other', project)).to contain_exactly(ci_variable)
+ end
+ end
+
shared_examples 'ref is protected' do
it 'contains all the variables' do
is_expected.to contain_exactly(ci_variable, protected_variable)
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index c0501fb16c6..66b298bb36f 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -178,67 +178,63 @@ describe Issue do
let(:namespace) { build(:namespace, path: 'sample-namespace') }
let(:project) { build(:project, name: 'sample-project', namespace: namespace) }
let(:issue) { build(:issue, iid: 1, project: project) }
- let(:group) { create(:group, name: 'Group', path: 'sample-group') }
context 'when nil argument' do
it 'returns issue id' do
expect(issue.to_reference).to eq "#1"
end
- end
-
- context 'when full is true' do
- it 'returns complete path to the issue' do
- expect(issue.to_reference(full: true)).to eq 'sample-namespace/sample-project#1'
- expect(issue.to_reference(project, full: true)).to eq 'sample-namespace/sample-project#1'
- expect(issue.to_reference(group, full: true)).to eq 'sample-namespace/sample-project#1'
- end
- end
- context 'when same project argument' do
- it 'returns issue id' do
- expect(issue.to_reference(project)).to eq("#1")
+ it 'returns complete path to the issue with full: true' do
+ expect(issue.to_reference(full: true)).to eq 'sample-namespace/sample-project#1'
end
end
- context 'when cross namespace project argument' do
- let(:another_namespace_project) { create(:project, name: 'another-project') }
+ context 'when argument is a project' do
+ context 'when same project' do
+ it 'returns issue id' do
+ expect(issue.to_reference(project)).to eq("#1")
+ end
- it 'returns complete path to the issue' do
- expect(issue.to_reference(another_namespace_project)).to eq 'sample-namespace/sample-project#1'
+ it 'returns full reference with full: true' do
+ expect(issue.to_reference(project, full: true)).to eq 'sample-namespace/sample-project#1'
+ end
end
- end
- it 'supports a cross-project reference' do
- another_project = build(:project, name: 'another-project', namespace: project.namespace)
- expect(issue.to_reference(another_project)).to eq "sample-project#1"
- end
-
- context 'when same namespace / cross-project argument' do
- let(:another_project) { create(:project, namespace: namespace) }
+ context 'when cross-project in same namespace' do
+ let(:another_project) do
+ build(:project, name: 'another-project', namespace: project.namespace)
+ end
- it 'returns path to the issue with the project name' do
- expect(issue.to_reference(another_project)).to eq 'sample-project#1'
+ it 'returns a cross-project reference' do
+ expect(issue.to_reference(another_project)).to eq "sample-project#1"
+ end
end
- end
- context 'when different namespace / cross-project argument' do
- let(:another_namespace) { create(:namespace, path: 'another-namespace') }
- let(:another_project) { create(:project, path: 'another-project', namespace: another_namespace) }
+ context 'when cross-project in different namespace' do
+ let(:another_namespace) { build(:namespace, path: 'another-namespace') }
+ let(:another_namespace_project) { build(:project, path: 'another-project', namespace: another_namespace) }
- it 'returns full path to the issue' do
- expect(issue.to_reference(another_project)).to eq 'sample-namespace/sample-project#1'
+ it 'returns complete path to the issue' do
+ expect(issue.to_reference(another_namespace_project)).to eq 'sample-namespace/sample-project#1'
+ end
end
end
context 'when argument is a namespace' do
- context 'with same project path' do
+ context 'when same as issue' do
it 'returns path to the issue with the project name' do
expect(issue.to_reference(namespace)).to eq 'sample-project#1'
end
+
+ it 'returns full reference with full: true' do
+ expect(issue.to_reference(namespace, full: true)).to eq 'sample-namespace/sample-project#1'
+ end
end
- context 'with different project path' do
- it 'returns full path to the issue' do
+ context 'when different to issue namespace' do
+ let(:group) { build(:group, name: 'Group', path: 'sample-group') }
+
+ it 'returns full path to the issue with full: true' do
expect(issue.to_reference(group)).to eq 'sample-namespace/sample-project#1'
end
end
@@ -429,16 +425,16 @@ describe Issue do
let(:issue) { create(:issue, title: 'testing-issue') }
it 'starts with the issue iid' do
- expect(issue.to_branch_name).to match /\A#{issue.iid}-[A-Za-z\-]+\z/
+ expect(issue.to_branch_name).to match(/\A#{issue.iid}-[A-Za-z\-]+\z/)
end
it "contains the issue title if not confidential" do
- expect(issue.to_branch_name).to match /testing-issue\z/
+ expect(issue.to_branch_name).to match(/testing-issue\z/)
end
it "does not contain the issue title if confidential" do
issue = create(:issue, title: 'testing-issue', confidential: true)
- expect(issue.to_branch_name).to match /confidential-issue\z/
+ expect(issue.to_branch_name).to match(/confidential-issue\z/)
end
context 'issue title longer than 100 characters' do
@@ -936,4 +932,33 @@ describe Issue do
end
it_behaves_like 'versioned description'
+
+ describe "#previous_updated_at" do
+ let_it_be(:updated_at) { Time.new(2012, 01, 06) }
+ let_it_be(:issue) { create(:issue, updated_at: updated_at) }
+
+ it 'returns updated_at value if updated_at did not change at all' do
+ allow(issue).to receive(:previous_changes).and_return({})
+
+ expect(issue.previous_updated_at).to eq(updated_at)
+ end
+
+ it 'returns updated_at value if `previous_changes` has nil value for `updated_at`' do
+ allow(issue).to receive(:previous_changes).and_return({ 'updated_at' => nil })
+
+ expect(issue.previous_updated_at).to eq(updated_at)
+ end
+
+ it 'returns updated_at value if previous updated_at value is not present' do
+ allow(issue).to receive(:previous_changes).and_return({ 'updated_at' => [nil, Time.new(2013, 02, 06)] })
+
+ expect(issue.previous_updated_at).to eq(updated_at)
+ end
+
+ it 'returns previous updated_at when present' do
+ allow(issue).to receive(:previous_changes).and_return({ 'updated_at' => [Time.new(2013, 02, 06), Time.new(2013, 03, 06)] })
+
+ expect(issue.previous_updated_at).to eq(Time.new(2013, 02, 06))
+ end
+ end
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index c9b41c9d82e..8cdedbcdedf 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -181,16 +181,49 @@ describe Key, :mailer do
end
context 'callbacks' do
- it 'adds new key to authorized_file' do
- key = build(:personal_key, id: 7)
- expect(GitlabShellWorker).to receive(:perform_async).with(:add_key, key.shell_id, key.key)
- key.save!
+ let(:key) { build(:personal_key) }
+
+ context 'authorized keys file is enabled' do
+ before do
+ stub_application_setting(authorized_keys_enabled: true)
+ end
+
+ it 'adds new key to authorized_file' do
+ allow(AuthorizedKeysWorker).to receive(:perform_async)
+
+ key.save!
+
+ # Check after the fact so we have access to Key#id
+ expect(AuthorizedKeysWorker).to have_received(:perform_async).with(:add_key, key.shell_id, key.key)
+ end
+
+ it 'removes key from authorized_file' do
+ key.save!
+
+ expect(AuthorizedKeysWorker).to receive(:perform_async).with(:remove_key, key.shell_id)
+
+ key.destroy
+ end
end
- it 'removes key from authorized_file' do
- key = create(:personal_key)
- expect(GitlabShellWorker).to receive(:perform_async).with(:remove_key, key.shell_id)
- key.destroy
+ context 'authorized_keys file is disabled' do
+ before do
+ stub_application_setting(authorized_keys_enabled: false)
+ end
+
+ it 'does not add the key on creation' do
+ expect(AuthorizedKeysWorker).not_to receive(:perform_async)
+
+ key.save!
+ end
+
+ it 'does not remove the key on destruction' do
+ key.save!
+
+ expect(AuthorizedKeysWorker).not_to receive(:perform_async)
+
+ key.destroy
+ end
end
end
diff --git a/spec/models/label_link_spec.rb b/spec/models/label_link_spec.rb
index 0a5cb5374b0..7a179dcb419 100644
--- a/spec/models/label_link_spec.rb
+++ b/spec/models/label_link_spec.rb
@@ -8,5 +8,8 @@ describe LabelLink do
it { is_expected.to belong_to(:label) }
it { is_expected.to belong_to(:target) }
- it_behaves_like 'a BulkInsertSafe model', LabelLink
+ it_behaves_like 'a BulkInsertSafe model', LabelLink do
+ let(:valid_items_for_bulk_insertion) { build_list(:label_link, 10) }
+ let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined
+ end
end
diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb
index 51713906d06..09a64dabb08 100644
--- a/spec/models/lfs_object_spec.rb
+++ b/spec/models/lfs_object_spec.rb
@@ -13,6 +13,15 @@ describe LfsObject do
expect(described_class.not_linked_to_project(project)).to contain_exactly(other_lfs_object)
end
end
+
+ describe '.for_oids' do
+ it 'returns the correct LfsObjects' do
+ lfs_object_1, lfs_object_2 = create_list(:lfs_object, 2)
+
+ expect(described_class.for_oids(lfs_object_1.oid)).to contain_exactly(lfs_object_1)
+ expect(described_class.for_oids([lfs_object_1.oid, lfs_object_2.oid])).to contain_exactly(lfs_object_1, lfs_object_2)
+ end
+ end
end
it 'has a distinct has_many :projects relation through lfs_objects_projects' do
diff --git a/spec/models/merge_request/pipelines_spec.rb b/spec/models/merge_request/pipelines_spec.rb
deleted file mode 100644
index 0afbcc60ed6..00000000000
--- a/spec/models/merge_request/pipelines_spec.rb
+++ /dev/null
@@ -1,160 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe MergeRequest::Pipelines do
- describe '#all' do
- let(:merge_request) { create(:merge_request) }
- let(:project) { merge_request.source_project }
-
- subject { described_class.new(merge_request) }
-
- shared_examples 'returning pipelines with proper ordering' do
- let!(:all_pipelines) do
- merge_request.all_commit_shas.map do |sha|
- create(:ci_empty_pipeline,
- project: project, sha: sha, ref: merge_request.source_branch)
- end
- end
-
- it 'returns all pipelines' do
- expect(subject.all).not_to be_empty
- expect(subject.all).to eq(all_pipelines.reverse)
- end
- end
-
- context 'with single merge_request_diffs' do
- it_behaves_like 'returning pipelines with proper ordering'
- end
-
- context 'with multiple irrelevant merge_request_diffs' do
- before do
- merge_request.update(target_branch: 'v1.0.0')
- end
-
- it_behaves_like 'returning pipelines with proper ordering'
- end
-
- context 'with unsaved merge request' do
- let(:merge_request) { build(:merge_request) }
-
- let!(:pipeline) do
- create(:ci_empty_pipeline, project: project,
- sha: merge_request.diff_head_sha, ref: merge_request.source_branch)
- end
-
- it 'returns pipelines from diff_head_sha' do
- expect(subject.all).to contain_exactly(pipeline)
- end
- end
-
- context 'when pipelines exist for the branch and merge request' do
- let(:source_ref) { 'feature' }
- let(:target_ref) { 'master' }
-
- let!(:branch_pipeline) do
- create(:ci_pipeline, source: :push, project: project,
- ref: source_ref, sha: shas.second)
- end
-
- let!(:tag_pipeline) do
- create(:ci_pipeline, project: project, ref: source_ref, tag: true)
- end
-
- let!(:detached_merge_request_pipeline) do
- create(:ci_pipeline, source: :merge_request_event, project: project,
- ref: source_ref, sha: shas.second, merge_request: merge_request)
- end
-
- let(:merge_request) do
- create(:merge_request, source_project: project, source_branch: source_ref,
- target_project: project, target_branch: target_ref)
- end
-
- let(:project) { create(:project, :repository) }
- let(:shas) { project.repository.commits(source_ref, limit: 2).map(&:id) }
-
- before do
- create(:merge_request_diff_commit,
- merge_request_diff: merge_request.merge_request_diff,
- sha: shas.second, relative_order: 1)
- end
-
- it 'returns merge request pipeline first' do
- expect(subject.all).to eq([detached_merge_request_pipeline, branch_pipeline])
- end
-
- context 'when there are a branch pipeline and a merge request pipeline' do
- let!(:branch_pipeline_2) do
- create(:ci_pipeline, source: :push, project: project,
- ref: source_ref, sha: shas.first)
- end
-
- let!(:detached_merge_request_pipeline_2) do
- create(:ci_pipeline, source: :merge_request_event, project: project,
- ref: source_ref, sha: shas.first, merge_request: merge_request)
- end
-
- it 'returns merge request pipelines first' do
- expect(subject.all)
- .to eq([detached_merge_request_pipeline_2,
- detached_merge_request_pipeline,
- branch_pipeline_2,
- branch_pipeline])
- end
- end
-
- context 'when there are multiple merge request pipelines from the same branch' do
- let!(:branch_pipeline_2) do
- create(:ci_pipeline, source: :push, project: project,
- ref: source_ref, sha: shas.first)
- end
-
- let!(:detached_merge_request_pipeline_2) do
- create(:ci_pipeline, source: :merge_request_event, project: project,
- ref: source_ref, sha: shas.first, merge_request: merge_request_2)
- end
-
- let(:merge_request_2) do
- create(:merge_request, source_project: project, source_branch: source_ref,
- target_project: project, target_branch: 'stable')
- end
-
- before do
- shas.each.with_index do |sha, index|
- create(:merge_request_diff_commit,
- merge_request_diff: merge_request_2.merge_request_diff,
- sha: sha, relative_order: index)
- end
- end
-
- it 'returns only related merge request pipelines' do
- expect(subject.all)
- .to eq([detached_merge_request_pipeline,
- branch_pipeline_2,
- branch_pipeline])
-
- expect(described_class.new(merge_request_2).all)
- .to eq([detached_merge_request_pipeline_2,
- branch_pipeline_2,
- branch_pipeline])
- end
- end
-
- context 'when detached merge request pipeline is run on head ref of the merge request' do
- let!(:detached_merge_request_pipeline) do
- create(:ci_pipeline, source: :merge_request_event, project: project,
- ref: merge_request.ref_path, sha: shas.second, merge_request: merge_request)
- end
-
- it 'sets the head ref of the merge request to the pipeline ref' do
- expect(detached_merge_request_pipeline.ref).to match(%r{refs/merge-requests/\d+/head})
- end
-
- it 'includes the detached merge request pipeline even though the ref is custom path' do
- expect(merge_request.all_pipelines).to include(detached_merge_request_pipeline)
- end
- end
- end
- end
-end
diff --git a/spec/models/merge_request_diff_commit_spec.rb b/spec/models/merge_request_diff_commit_spec.rb
index a296122ae09..8b51c6fae08 100644
--- a/spec/models/merge_request_diff_commit_spec.rb
+++ b/spec/models/merge_request_diff_commit_spec.rb
@@ -6,7 +6,10 @@ describe MergeRequestDiffCommit do
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
- it_behaves_like 'a BulkInsertSafe model', MergeRequestDiffCommit
+ it_behaves_like 'a BulkInsertSafe model', MergeRequestDiffCommit do
+ let(:valid_items_for_bulk_insertion) { build_list(:merge_request_diff_commit, 10) }
+ let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined
+ end
describe '#to_hash' do
subject { merge_request.commits.first }
diff --git a/spec/models/merge_request_diff_file_spec.rb b/spec/models/merge_request_diff_file_spec.rb
index 6ecbc5bf832..40f7be5dc8f 100644
--- a/spec/models/merge_request_diff_file_spec.rb
+++ b/spec/models/merge_request_diff_file_spec.rb
@@ -3,7 +3,10 @@
require 'spec_helper'
describe MergeRequestDiffFile do
- it_behaves_like 'a BulkInsertSafe model', MergeRequestDiffFile
+ it_behaves_like 'a BulkInsertSafe model', MergeRequestDiffFile do
+ let(:valid_items_for_bulk_insertion) { build_list(:merge_request_diff_file, 10) }
+ let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined
+ end
describe '#diff' do
context 'when diff is not stored' do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 36fd5d21e73..137795dcbc3 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -276,6 +276,8 @@ describe MergeRequest do
end
describe 'respond to' do
+ subject { build(:merge_request) }
+
it { is_expected.to respond_to(:unchecked?) }
it { is_expected.to respond_to(:checking?) }
it { is_expected.to respond_to(:can_be_merged?) }
@@ -662,13 +664,12 @@ describe MergeRequest do
end
describe '#raw_diffs' do
- let(:merge_request) { build(:merge_request) }
let(:options) { { paths: ['a/b', 'b/a', 'c/*'] } }
context 'when there are MR diffs' do
- it 'delegates to the MR diffs' do
- merge_request.merge_request_diff = MergeRequestDiff.new
+ let(:merge_request) { create(:merge_request, :with_diffs) }
+ it 'delegates to the MR diffs' do
expect(merge_request.merge_request_diff).to receive(:raw_diffs).with(options)
merge_request.raw_diffs(options)
@@ -676,6 +677,8 @@ describe MergeRequest do
end
context 'when there are no MR diffs' do
+ let(:merge_request) { build(:merge_request) }
+
it 'delegates to the compare object' do
merge_request.compare = double(:compare)
@@ -905,6 +908,16 @@ describe MergeRequest do
end
end
+ describe '#new_paths' do
+ let(:merge_request) do
+ create(:merge_request, source_branch: 'expand-collapse-files', target_branch: 'master')
+ end
+
+ it 'returns new path of changed files' do
+ expect(merge_request.new_paths.count).to eq(105)
+ end
+ end
+
describe "#related_notes" do
let!(:merge_request) { create(:merge_request) }
@@ -1578,6 +1591,24 @@ describe MergeRequest do
end
end
+ describe '#has_coverage_reports?' do
+ subject { merge_request.has_coverage_reports? }
+
+ let(:project) { create(:project, :repository) }
+
+ context 'when head pipeline has coverage reports' do
+ let(:merge_request) { create(:merge_request, :with_coverage_reports, source_project: project) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when head pipeline does not have coverage reports' do
+ let(:merge_request) { create(:merge_request, source_project: project) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#calculate_reactive_cache' do
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
@@ -1660,6 +1691,60 @@ describe MergeRequest do
end
end
+ describe '#find_coverage_reports' do
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, :with_coverage_reports, source_project: project) }
+ let(:pipeline) { merge_request.head_pipeline }
+
+ subject { merge_request.find_coverage_reports }
+
+ context 'when head pipeline has coverage reports' do
+ let!(:job) do
+ create(:ci_build, options: { artifacts: { reports: { cobertura: ['cobertura-coverage.xml'] } } }, pipeline: pipeline)
+ end
+
+ let!(:artifacts_metadata) { create(:ci_job_artifact, :metadata, job: job) }
+
+ context 'when reactive cache worker is parsing results asynchronously' do
+ it 'returns status' do
+ expect(subject[:status]).to eq(:parsing)
+ end
+ end
+
+ context 'when reactive cache worker is inline' do
+ before do
+ synchronous_reactive_cache(merge_request)
+ end
+
+ it 'returns status and data' do
+ expect(subject[:status]).to eq(:parsed)
+ end
+
+ context 'when an error occurrs' do
+ before do
+ merge_request.update!(head_pipeline: nil)
+ end
+
+ it 'returns an error message' do
+ expect(subject[:status]).to eq(:error)
+ end
+ end
+
+ context 'when cached results is not latest' do
+ before do
+ allow_next_instance_of(Ci::GenerateCoverageReportsService) do |service|
+ allow(service).to receive(:latest?).and_return(false)
+ end
+ end
+
+ it 'raises and InvalidateReactiveCache error' do
+ expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
+ end
+ end
+ end
+ end
+ end
+
describe '#compare_test_reports' do
subject { merge_request.compare_test_reports }
diff --git a/spec/models/milestone_note_spec.rb b/spec/models/milestone_note_spec.rb
new file mode 100644
index 00000000000..9e77ef91bb2
--- /dev/null
+++ b/spec/models/milestone_note_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MilestoneNote do
+ describe '.from_event' do
+ let(:author) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:noteable) { create(:issue, author: author, project: project) }
+ let(:event) { create(:resource_milestone_event, issue: noteable) }
+
+ subject { described_class.from_event(event, resource: noteable, resource_parent: project) }
+
+ it_behaves_like 'a system note', exclude_project: true do
+ let(:action) { 'milestone' }
+ end
+ end
+end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index 04587ef4240..ee4c35ebddd 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -3,6 +3,18 @@
require 'spec_helper'
describe Milestone do
+ describe 'MilestoneStruct#serializable_hash' do
+ let(:predefined_milestone) { described_class::MilestoneStruct.new('Test Milestone', '#test', 1) }
+
+ it 'presents the predefined milestone as a hash' do
+ expect(predefined_milestone.serializable_hash).to eq(
+ title: predefined_milestone.title,
+ name: predefined_milestone.name,
+ id: predefined_milestone.id
+ )
+ end
+ end
+
describe 'modules' do
context 'with a project' do
it_behaves_like 'AtomicInternalId' do
@@ -179,6 +191,16 @@ describe Milestone do
end
end
+ describe '.predefined_id?' do
+ it 'returns true for a predefined Milestone ID' do
+ expect(Milestone.predefined_id?(described_class::Upcoming.id)).to be true
+ end
+
+ it 'returns false for a Milestone ID that is not predefined' do
+ expect(Milestone.predefined_id?(milestone.id)).to be false
+ end
+ end
+
describe '.order_by_name_asc' do
it 'sorts by name ascending' do
milestone1 = create(:milestone, title: 'Foo')
@@ -209,17 +231,17 @@ describe Milestone do
describe "#percent_complete" do
it "does not count open issues" do
milestone.issues << issue
- expect(milestone.percent_complete(user)).to eq(0)
+ expect(milestone.percent_complete).to eq(0)
end
it "counts closed issues" do
issue.close
milestone.issues << issue
- expect(milestone.percent_complete(user)).to eq(100)
+ expect(milestone.percent_complete).to eq(100)
end
it "recovers from dividing by zero" do
- expect(milestone.percent_complete(user)).to eq(0)
+ expect(milestone.percent_complete).to eq(0)
end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 740385bbd54..78b93b303f7 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -201,6 +201,26 @@ describe Namespace do
expect(described_class.find_by_pages_host(host)).to eq(namespace)
end
+ context 'when there is non-top-level group with searched name' do
+ before do
+ create(:group, :nested, path: 'pages')
+ end
+
+ it 'ignores this group' do
+ host = "pages.#{Settings.pages.host.upcase}"
+
+ expect(described_class.find_by_pages_host(host)).to be_nil
+ end
+
+ it 'finds right top level group' do
+ group = create(:group, path: 'pages')
+
+ host = "pages.#{Settings.pages.host.upcase}"
+
+ expect(described_class.find_by_pages_host(host)).to eq(group)
+ end
+ end
+
it "returns no result if the provided host is not subdomain of the Pages host" do
create(:namespace, name: 'namespace.io')
host = "namespace.io"
@@ -531,6 +551,41 @@ describe Namespace do
end
end
+ describe "#default_branch_protection" do
+ let(:namespace) { create(:namespace) }
+ let(:default_branch_protection) { nil }
+ let(:group) { create(:group, default_branch_protection: default_branch_protection) }
+
+ before do
+ stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
+ end
+
+ context 'for a namespace' do
+ # Unlike a group, the settings of a namespace cannot be altered
+ # via the UI or the API.
+
+ it 'returns the instance level setting' do
+ expect(namespace.default_branch_protection).to eq(Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
+ end
+ end
+
+ context 'for a group' do
+ context 'that has not altered the default value' do
+ it 'returns the instance level setting' do
+ expect(group.default_branch_protection).to eq(Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
+ end
+ end
+
+ context 'that has altered the default value' do
+ let(:default_branch_protection) { Gitlab::Access::PROTECTION_FULL }
+
+ it 'returns the group level setting' do
+ expect(group.default_branch_protection).to eq(default_branch_protection)
+ end
+ end
+ end
+ end
+
describe '#self_and_hierarchy' do
let!(:group) { create(:group, path: 'git_lab') }
let!(:nested_group) { create(:group, parent: group) }
@@ -983,6 +1038,24 @@ describe Namespace do
expect(virtual_domain.lookup_paths).not_to be_empty
end
end
+
+ it 'preloads project_feature and route' do
+ project2 = create(:project, namespace: namespace)
+ project3 = create(:project, namespace: namespace)
+
+ project.mark_pages_as_deployed
+ project2.mark_pages_as_deployed
+ project3.mark_pages_as_deployed
+
+ virtual_domain = namespace.pages_virtual_domain
+
+ queries = ActiveRecord::QueryRecorder.new { virtual_domain.lookup_paths }
+
+ # 1 to load projects
+ # 1 to preload project features
+ # 1 to load routes
+ expect(queries.count).to eq(3)
+ end
end
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index a50608a17b6..74ec74e0def 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -270,43 +270,69 @@ describe Note do
end
end
- describe "confidential?" do
- it "delegates to noteable" do
- issue_note = build(:note, :on_issue)
- confidential_note = build(:note, noteable: create(:issue, confidential: true))
+ describe '#confidential?' do
+ context 'when note is not confidential' do
+ it 'is true when a noteable is confidential' do
+ issue = create(:issue, :confidential)
+ note = build(:note, noteable: issue, project: issue.project)
- expect(issue_note.confidential?).to be_falsy
- expect(confidential_note.confidential?).to be_truthy
+ expect(note.confidential?).to be_truthy
+ end
+
+ it 'is false when a noteable is not confidential' do
+ issue = create(:issue, confidential: false)
+ note = build(:note, noteable: issue, project: issue.project)
+
+ expect(note.confidential?).to be_falsy
+ end
+
+ it "is falsey when noteable can't be confidential" do
+ commit_note = build(:note_on_commit)
+
+ expect(commit_note.confidential?).to be_falsy
+ end
end
+ context 'when note is confidential' do
+ it 'is true even when a noteable is not confidential' do
+ issue = create(:issue, confidential: false)
+ note = build(:note, :confidential, noteable: issue, project: issue.project)
- it "is falsey when noteable can't be confidential" do
- commit_note = build(:note_on_commit)
- expect(commit_note.confidential?).to be_falsy
+ expect(note.confidential?).to be_truthy
+ end
end
end
- describe "#visible_for?" do
- using RSpec::Parameterized::TableSyntax
+ describe "#system_note_with_references_visible_for?" do
+ let(:project) { create(:project, :public) }
+ let(:user) { create(:user) }
+ let(:guest) { create(:project_member, :guest, project: project, user: create(:user)).user }
+ let(:reporter) { create(:project_member, :reporter, project: project, user: create(:user)).user }
+ let(:maintainer) { create(:project_member, :maintainer, project: project, user: create(:user)).user }
+ let(:non_member) { create(:user) }
- let_it_be(:note) { create(:note) }
- let_it_be(:user) { create(:user) }
+ let(:note) { create(:note, project: project) }
- where(:cross_reference_visible, :system_note_viewable, :result) do
- true | true | false
- false | true | true
- false | false | false
+ context 'when project is public' do
+ it_behaves_like 'users with note access' do
+ let(:users) { [reporter, maintainer, guest, non_member, nil] }
+ end
end
- with_them do
- it "returns expected result" do
- expect(note).to receive(:cross_reference_not_visible_for?).and_return(cross_reference_visible)
+ context 'when group is private' do
+ let(:project) { create(:project, :private) }
- unless cross_reference_visible
- expect(note).to receive(:system_note_viewable_by?)
- .with(user).and_return(system_note_viewable)
- end
+ it_behaves_like 'users with note access' do
+ let(:users) { [reporter, maintainer, guest] }
+ end
- expect(note.visible_for?(user)).to eq result
+ it 'returns visible but not readable for non-member user' do
+ expect(note.system_note_with_references_visible_for?(non_member)).to be_truthy
+ expect(note.readable_by?(non_member)).to be_falsy
+ end
+
+ it 'returns visible but not readable for a nil user' do
+ expect(note.system_note_with_references_visible_for?(nil)).to be_truthy
+ expect(note.readable_by?(nil)).to be_falsy
end
end
end
@@ -349,7 +375,7 @@ describe Note do
end
end
- describe "cross_reference_not_visible_for?" do
+ describe "system_note_with_references_visible_for?" do
let_it_be(:private_user) { create(:user) }
let_it_be(:private_project) { create(:project, namespace: private_user.namespace) { |p| p.add_maintainer(private_user) } }
let_it_be(:private_issue) { create(:issue, project: private_project) }
@@ -359,11 +385,11 @@ describe Note do
shared_examples "checks references" do
it "returns true" do
- expect(note.cross_reference_not_visible_for?(ext_issue.author)).to be_truthy
+ expect(note.system_note_with_references_visible_for?(ext_issue.author)).to be_falsy
end
it "returns false" do
- expect(note.cross_reference_not_visible_for?(private_user)).to be_falsy
+ expect(note.system_note_with_references_visible_for?(private_user)).to be_truthy
end
it "returns false if user visible reference count set" do
@@ -371,14 +397,14 @@ describe Note do
note.total_reference_count = 1
expect(note).not_to receive(:reference_mentionables)
- expect(note.cross_reference_not_visible_for?(ext_issue.author)).to be_falsy
+ expect(note.system_note_with_references_visible_for?(ext_issue.author)).to be_truthy
end
it "returns true if ref count is 0" do
note.user_visible_reference_count = 0
expect(note).not_to receive(:reference_mentionables)
- expect(note.cross_reference_not_visible_for?(ext_issue.author)).to be_truthy
+ expect(note.system_note_with_references_visible_for?(ext_issue.author)).to be_falsy
end
end
@@ -423,16 +449,16 @@ describe Note do
note.total_reference_count = 2
expect(note).not_to receive(:reference_mentionables)
- expect(note.cross_reference_not_visible_for?(ext_issue.author)).to be_truthy
+ expect(note.system_note_with_references_visible_for?(ext_issue.author)).to be_falsy
end
end
end
- describe '#cross_reference?' do
+ describe '#system_note_with_references?' do
it 'falsey for user-generated notes' do
note = create(:note, system: false)
- expect(note.cross_reference?).to be_falsy
+ expect(note.system_note_with_references?).to be_falsy
end
context 'when the note might contain cross references' do
@@ -443,7 +469,7 @@ describe Note do
it 'delegates to the cross-reference regex' do
expect(note).to receive(:matches_cross_reference_regex?).and_return(false)
- note.cross_reference?
+ note.system_note_with_references?
end
end
end
@@ -453,8 +479,8 @@ describe Note do
let(:label_note) { build(:note, note: 'added ~2323232323', system: true) }
it 'scan for a `mentioned in` prefix' do
- expect(commit_note.cross_reference?).to be_truthy
- expect(label_note.cross_reference?).to be_falsy
+ expect(commit_note.system_note_with_references?).to be_truthy
+ expect(label_note.system_note_with_references?).to be_falsy
end
end
@@ -468,7 +494,7 @@ describe Note do
it 'delegates to the system note service' do
expect(SystemNotes::IssuablesService).to receive(:cross_reference?).with(note.note)
- note.cross_reference?
+ note.system_note_with_references?
end
end
@@ -480,7 +506,7 @@ describe Note do
it 'delegates to the cross-reference regex' do
expect(note).to receive(:matches_cross_reference_regex?)
- note.cross_reference?
+ note.system_note_with_references?
end
end
@@ -489,13 +515,13 @@ describe Note do
it_behaves_like 'system_note_metadata includes note action'
- it { expect(note.cross_reference?).to be_falsy }
+ it { expect(note.system_note_with_references?).to be_falsy }
context 'with cross reference label note' do
let(:label) { create(:label, project: issue.project)}
let(:note) { create(:system_note, note: "added #{label.to_reference} label", noteable: issue, project: issue.project) }
- it { expect(note.cross_reference?).to be_truthy }
+ it { expect(note.system_note_with_references?).to be_truthy }
end
end
@@ -504,13 +530,13 @@ describe Note do
it_behaves_like 'system_note_metadata includes note action'
- it { expect(note.cross_reference?).to be_falsy }
+ it { expect(note.system_note_with_references?).to be_falsy }
context 'with cross reference milestone note' do
let(:milestone) { create(:milestone, project: issue.project)}
let(:note) { create(:system_note, note: "added #{milestone.to_reference} milestone", noteable: issue, project: issue.project) }
- it { expect(note.cross_reference?).to be_truthy }
+ it { expect(note.system_note_with_references?).to be_truthy }
end
end
end
@@ -1221,5 +1247,69 @@ describe Note do
expect(notes.second.id).to eq(note2.id)
end
end
+
+ describe '#noteable_assignee_or_author' do
+ let(:user) { create(:user) }
+ let(:noteable) { create(:issue) }
+ let(:note) { create(:note, project: noteable.project, noteable: noteable) }
+
+ subject { note.noteable_assignee_or_author?(user) }
+
+ shared_examples 'assignee check' do
+ context 'when the provided user is one of the assignees' do
+ before do
+ note.noteable.update(assignees: [user, create(:user)])
+ end
+
+ it 'returns true' do
+ expect(subject).to be_truthy
+ end
+ end
+ end
+
+ shared_examples 'author check' do
+ context 'when the provided user is the author' do
+ before do
+ note.noteable.update(author: user)
+ end
+
+ it 'returns true' do
+ expect(subject).to be_truthy
+ end
+ end
+
+ context 'when the provided user is neither author nor assignee' do
+ it 'returns true' do
+ expect(subject).to be_falsey
+ end
+ end
+ end
+
+ context 'when user is nil' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+ end
+
+ context 'when noteable is an issue' do
+ it_behaves_like 'author check'
+ it_behaves_like 'assignee check'
+ end
+
+ context 'when noteable is a merge request' do
+ let(:noteable) { create(:merge_request) }
+
+ it_behaves_like 'author check'
+ it_behaves_like 'assignee check'
+ end
+
+ context 'when noteable is a snippet' do
+ let(:noteable) { create(:personal_snippet) }
+
+ it_behaves_like 'author check'
+ end
+ end
end
end
diff --git a/spec/models/notification_recipient_spec.rb b/spec/models/notification_recipient_spec.rb
index f6a36dbb3fc..05aeafaa4d4 100644
--- a/spec/models/notification_recipient_spec.rb
+++ b/spec/models/notification_recipient_spec.rb
@@ -176,8 +176,20 @@ describe NotificationRecipient do
)
end
- before do
- notification_setting.update!(failed_pipeline: true)
+ it 'returns true' do
+ expect(recipient.suitable_notification_level?).to eq true
+ end
+ end
+
+ context "when action is fixed_pipeline" do
+ let(:recipient) do
+ described_class.new(
+ user,
+ :watch,
+ custom_action: :fixed_pipeline,
+ target: target,
+ project: project
+ )
end
it 'returns true' do
@@ -185,7 +197,7 @@ describe NotificationRecipient do
end
end
- context "when action is not failed_pipeline" do
+ context "when action is not fixed_pipeline or failed_pipeline" do
let(:recipient) do
described_class.new(
user,
@@ -196,10 +208,6 @@ describe NotificationRecipient do
)
end
- before do
- notification_setting.update!(success_pipeline: true)
- end
-
it 'returns false' do
expect(recipient.suitable_notification_level?).to eq false
end
@@ -309,6 +317,26 @@ describe NotificationRecipient do
expect(recipient.suitable_notification_level?).to eq false
end
end
+
+ context 'when custom_action is fixed_pipeline and success_pipeline event is enabled' do
+ let(:recipient) do
+ described_class.new(
+ user,
+ :watch,
+ custom_action: :fixed_pipeline,
+ target: target,
+ project: project
+ )
+ end
+
+ before do
+ notification_setting.update!(success_pipeline: true)
+ end
+
+ it 'returns true' do
+ expect(recipient.suitable_notification_level?).to eq true
+ end
+ end
end
end
diff --git a/spec/models/notification_setting_spec.rb b/spec/models/notification_setting_spec.rb
index 094c60e3e09..9ab9ae494ec 100644
--- a/spec/models/notification_setting_spec.rb
+++ b/spec/models/notification_setting_spec.rb
@@ -110,7 +110,8 @@ RSpec.describe NotificationSetting do
:reassign_merge_request,
:merge_merge_request,
:failed_pipeline,
- :success_pipeline
+ :success_pipeline,
+ :fixed_pipeline
)
end
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index 3b7caeae9f2..4bf56e7b28b 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -9,6 +9,7 @@ describe PagesDomain do
describe 'associations' do
it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:serverless_domain_clusters) }
end
describe 'validate domain' do
@@ -642,4 +643,12 @@ describe PagesDomain do
end
end
end
+
+ describe '.find_by_domain_case_insensitive' do
+ it 'lookup is case-insensitive' do
+ pages_domain = create(:pages_domain, domain: "Pages.IO")
+
+ expect(PagesDomain.find_by_domain_case_insensitive('pages.io')).to eq(pages_domain)
+ end
+ end
end
diff --git a/spec/models/project_export_job_spec.rb b/spec/models/project_export_job_spec.rb
new file mode 100644
index 00000000000..dc39d0e401d
--- /dev/null
+++ b/spec/models/project_export_job_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ProjectExportJob, type: :model do
+ let(:project) { create(:project) }
+ let!(:job1) { create(:project_export_job, project: project, status: 0) }
+ let!(:job2) { create(:project_export_job, project: project, status: 2) }
+
+ describe 'associations' do
+ it { expect(job1).to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { expect(job1).to validate_presence_of(:project) }
+ it { expect(job1).to validate_presence_of(:jid) }
+ it { expect(job1).to validate_presence_of(:status) }
+ end
+end
diff --git a/spec/models/project_import_state_spec.rb b/spec/models/project_import_state_spec.rb
index babde7b0670..157477767af 100644
--- a/spec/models/project_import_state_spec.rb
+++ b/spec/models/project_import_state_spec.rb
@@ -23,8 +23,7 @@ describe ProjectImportState, type: :model do
# Works around https://github.com/rspec/rspec-mocks/issues/910
allow(Project).to receive(:find).with(project.id).and_return(project)
- expect(project.repository).to receive(:after_import).and_call_original
- expect(project.wiki.repository).to receive(:after_import).and_call_original
+ expect(project).to receive(:after_import).and_call_original
end
it 'imports a project', :sidekiq_might_not_need_inline do
diff --git a/spec/models/project_services/chat_message/push_message_spec.rb b/spec/models/project_services/chat_message/push_message_spec.rb
index fe0b2fe3440..9d990508ab2 100644
--- a/spec/models/project_services/chat_message/push_message_spec.rb
+++ b/spec/models/project_services/chat_message/push_message_spec.rb
@@ -22,8 +22,14 @@ describe ChatMessage::PushMessage do
context 'push' do
before do
args[:commits] = [
- { message: 'message1', url: 'http://url1.com', id: 'abcdefghijkl', author: { name: 'author1' } },
- { message: "message2\nsecondline", url: 'http://url2.com', id: '123456789012', author: { name: 'author2' } }
+ { message: 'message1', title: 'message1', url: 'http://url1.com', id: 'abcdefghijkl', author: { name: 'author1' } },
+ {
+ message: 'message2' + ' w' * 100 + "\nsecondline",
+ title: 'message2 w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w ...',
+ url: 'http://url2.com',
+ id: '123456789012',
+ author: { name: 'author2' }
+ }
]
end
@@ -34,7 +40,7 @@ describe ChatMessage::PushMessage do
'<http://url.com|project_name> (<http://url.com/compare/before...after|Compare changes>)')
expect(subject.attachments).to eq([{
text: "<http://url1.com|abcdefgh>: message1 - author1\n\n"\
- "<http://url2.com|12345678>: message2\nsecondline - author2",
+ "<http://url2.com|12345678>: message2 w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w ... - author2",
color: color
}])
end
@@ -49,27 +55,7 @@ describe ChatMessage::PushMessage do
expect(subject.pretext).to eq(
'test.user pushed to branch [master](http://url.com/commits/master) of [project_name](http://url.com) ([Compare changes](http://url.com/compare/before...after))')
expect(subject.attachments).to eq(
- "[abcdefgh](http://url1.com): message1 - author1\n\n[12345678](http://url2.com): message2\nsecondline - author2")
- expect(subject.activity).to eq(
- title: 'test.user pushed to branch [master](http://url.com/commits/master)',
- subtitle: 'in [project_name](http://url.com)',
- text: '[Compare changes](http://url.com/compare/before...after)',
- image: 'http://someavatar.com'
- )
- end
- end
-
- context 'with markdown and commit message html' do
- before do
- args[:commit_message_html] = true
- args[:markdown] = true
- end
-
- it 'returns a message regarding pushes' do
- expect(subject.pretext).to eq(
- 'test.user pushed to branch [master](http://url.com/commits/master) of [project_name](http://url.com) ([Compare changes](http://url.com/compare/before...after))')
- expect(subject.attachments).to eq(
- "[abcdefgh](http://url1.com): message1 - author1<br/>\n<br/>\n[12345678](http://url2.com): message2<br/>\nsecondline - author2")
+ "[abcdefgh](http://url1.com): message1 - author1\n\n[12345678](http://url2.com): message2 w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w ... - author2")
expect(subject.activity).to eq(
title: 'test.user pushed to branch [master](http://url.com/commits/master)',
subtitle: 'in [project_name](http://url.com)',
diff --git a/spec/models/project_services/chat_notification_service_spec.rb b/spec/models/project_services/chat_notification_service_spec.rb
index 45ea4cd74ed..64c7a9b230d 100644
--- a/spec/models/project_services/chat_notification_service_spec.rb
+++ b/spec/models/project_services/chat_notification_service_spec.rb
@@ -74,5 +74,28 @@ describe ChatNotificationService do
chat_service.execute(data)
end
end
+
+ shared_examples 'with channel specified' do |channel, expected_channels|
+ before do
+ allow(chat_service).to receive(:push_channel).and_return(channel)
+ end
+
+ it 'notifies all channels' do
+ expect(chat_service).to receive(:notify).with(any_args, hash_including(channel: expected_channels)).and_return(true)
+ expect(chat_service.execute(data)).to be(true)
+ end
+ end
+
+ context 'with single channel specified' do
+ it_behaves_like 'with channel specified', 'slack-integration', ['slack-integration']
+ end
+
+ context 'with multiple channel names specified' do
+ it_behaves_like 'with channel specified', 'slack-integration,#slack-test', ['slack-integration', '#slack-test']
+ end
+
+ context 'with multiple channel names with spaces specified' do
+ it_behaves_like 'with channel specified', 'slack-integration, #slack-test, @UDLP91W0A', ['slack-integration', '#slack-test', '@UDLP91W0A']
+ end
end
end
diff --git a/spec/models/project_services/gitlab_issue_tracker_service_spec.rb b/spec/models/project_services/gitlab_issue_tracker_service_spec.rb
index defebcee9c6..7f1c6224b7d 100644
--- a/spec/models/project_services/gitlab_issue_tracker_service_spec.rb
+++ b/spec/models/project_services/gitlab_issue_tracker_service_spec.rb
@@ -33,9 +33,9 @@ describe GitlabIssueTrackerService do
end
it 'gives the correct path' do
- expect(service.project_url).to eq("http://#{Gitlab.config.gitlab.host}/gitlab/root/#{project.full_path}/issues")
- expect(service.new_issue_url).to eq("http://#{Gitlab.config.gitlab.host}/gitlab/root/#{project.full_path}/issues/new")
- expect(service.issue_url(432)).to eq("http://#{Gitlab.config.gitlab.host}/gitlab/root/#{project.full_path}/issues/432")
+ expect(service.project_url).to eq("http://#{Gitlab.config.gitlab.host}/gitlab/root/#{project.full_path}/-/issues")
+ expect(service.new_issue_url).to eq("http://#{Gitlab.config.gitlab.host}/gitlab/root/#{project.full_path}/-/issues/new")
+ expect(service.issue_url(432)).to eq("http://#{Gitlab.config.gitlab.host}/gitlab/root/#{project.full_path}/-/issues/432")
end
end
@@ -45,9 +45,9 @@ describe GitlabIssueTrackerService do
end
it 'gives the correct path' do
- expect(service.issue_tracker_path).to eq("/gitlab/root/#{project.full_path}/issues")
- expect(service.new_issue_path).to eq("/gitlab/root/#{project.full_path}/issues/new")
- expect(service.issue_path(432)).to eq("/gitlab/root/#{project.full_path}/issues/432")
+ expect(service.issue_tracker_path).to eq("/gitlab/root/#{project.full_path}/-/issues")
+ expect(service.new_issue_path).to eq("/gitlab/root/#{project.full_path}/-/issues/new")
+ expect(service.issue_path(432)).to eq("/gitlab/root/#{project.full_path}/-/issues/432")
end
end
end
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index 832c19adf1d..32e6b5afce5 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -440,6 +440,27 @@ describe JiraService do
end
end
+ context 'when Remote Link already exists' do
+ let(:remote_link) do
+ double(
+ 'remote link',
+ object: {
+ url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/-/commit/#{commit_id}"
+ }.with_indifferent_access
+ )
+ end
+
+ it 'does not create comment' do
+ allow(JIRA::Resource::Remotelink).to receive(:all).and_return([remote_link])
+
+ expect(remote_link).to receive(:save!)
+
+ @jira_service.close_issue(resource, ExternalIssue.new('JIRA-123', project))
+
+ expect(WebMock).not_to have_requested(:post, @comment_url)
+ end
+ end
+
it 'does not send comment or remote links to issues already closed' do
allow_any_instance_of(JIRA::Resource::Issue).to receive(:resolution).and_return(true)
@@ -489,7 +510,14 @@ describe JiraService do
@jira_service.close_issue(resource, ExternalIssue.new('JIRA-123', project))
- expect(@jira_service).to have_received(:log_error).with("Issue transition failed", error: "Bad Request", client_url: "http://jira.example.com")
+ expect(@jira_service).to have_received(:log_error).with(
+ "Issue transition failed",
+ error: hash_including(
+ exception_class: 'StandardError',
+ exception_message: "Bad Request"
+ ),
+ client_url: "http://jira.example.com"
+ )
end
it 'calls the api with jira_issue_transition_id' do
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index 1922bb065cf..fd4783a60f2 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -176,6 +176,15 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
it 'returns true' do
expect(service.prometheus_available?).to be(true)
end
+
+ it 'avoids N+1 queries' do
+ service
+ 5.times do |i|
+ other_cluster = create(:cluster_for_group, :with_installed_helm, groups: [group], environment_scope: i)
+ create(:clusters_applications_prometheus, :installing, cluster: other_cluster)
+ end
+ expect { service.prometheus_available? }.not_to exceed_query_limit(1)
+ end
end
context 'cluster belongs to gitlab instance' do
diff --git a/spec/models/project_services/youtrack_service_spec.rb b/spec/models/project_services/youtrack_service_spec.rb
index 0067793f8d8..b8fff635e99 100644
--- a/spec/models/project_services/youtrack_service_spec.rb
+++ b/spec/models/project_services/youtrack_service_spec.rb
@@ -38,8 +38,8 @@ describe YoutrackService do
expect(described_class.reference_pattern.match('YT-123')[:issue]).to eq('YT-123')
end
- it 'does not allow issue number to be followed by a letter' do
- expect(described_class.reference_pattern.match('YT-123A')).to eq(nil)
+ it 'allows lowercase project key on the reference' do
+ expect(described_class.reference_pattern.match('yt-123')[:issue]).to eq('yt-123')
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 50409615cd0..ae97e5340e2 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -72,6 +72,7 @@ describe Project do
it { is_expected.to have_one(:project_setting) }
it { is_expected.to have_many(:commit_statuses) }
it { is_expected.to have_many(:ci_pipelines) }
+ it { is_expected.to have_many(:ci_refs) }
it { is_expected.to have_many(:builds) }
it { is_expected.to have_many(:build_trace_section_names)}
it { is_expected.to have_many(:runner_projects) }
@@ -1400,6 +1401,22 @@ describe Project do
expect(project.repository_storage).to eq('picked')
end
+
+ it 'picks from the latest available storage', :request_store do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ Gitlab::CurrentSettings.current_application_settings
+
+ settings = ApplicationSetting.last
+ settings.repository_storages = %w(picked)
+ settings.save!
+
+ expect(Gitlab::CurrentSettings.repository_storages).to eq(%w(default))
+
+ project
+
+ expect(project.repository.storage).to eq('picked')
+ expect(Gitlab::CurrentSettings.repository_storages).to eq(%w(picked))
+ end
end
context 'shared runners by default' do
@@ -1622,6 +1639,29 @@ describe Project do
end
end
+ describe '#default_branch_protected?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:project) { create(:project) }
+
+ subject { project.default_branch_protected? }
+
+ where(:default_branch_protection_level, :result) do
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
+ Gitlab::Access::PROTECTION_FULL | true
+ end
+
+ with_them do
+ before do
+ expect(project.namespace).to receive(:default_branch_protection).and_return(default_branch_protection_level)
+ end
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
describe '#pages_url' do
let(:group) { create(:group, name: group_name) }
let(:project) { create(:project, namespace: group, name: project_name) }
@@ -1791,21 +1831,19 @@ describe Project do
let(:project) { create(:project, :repository) }
let(:repo) { double(:repo, exists?: true) }
let(:wiki) { double(:wiki, exists?: true) }
- let(:design) { double(:wiki, exists?: false) }
it 'expires the caches of the repository and wiki' do
+ # In EE, there are design repositories as well
+ allow(Repository).to receive(:new).and_call_original
+
allow(Repository).to receive(:new)
- .with('foo', project)
+ .with('foo', project, shard: project.repository_storage)
.and_return(repo)
allow(Repository).to receive(:new)
- .with('foo.wiki', project)
+ .with('foo.wiki', project, shard: project.repository_storage, repo_type: Gitlab::GlRepository::WIKI)
.and_return(wiki)
- allow(Repository).to receive(:new)
- .with('foo.design', project)
- .and_return(design)
-
expect(repo).to receive(:before_delete)
expect(wiki).to receive(:before_delete)
@@ -1899,30 +1937,15 @@ describe Project do
describe '#create_repository' do
let(:project) { create(:project, :repository) }
- let(:shell) { Gitlab::Shell.new }
-
- before do
- allow(project).to receive(:gitlab_shell).and_return(shell)
- end
context 'using a regular repository' do
it 'creates the repository' do
- expect(shell).to receive(:create_repository)
- .with(project.repository_storage, project.disk_path, project.full_path)
- .and_return(true)
-
- expect(project.repository).to receive(:after_create)
-
+ expect(project.repository).to receive(:create_repository)
expect(project.create_repository).to eq(true)
end
it 'adds an error if the repository could not be created' do
- expect(shell).to receive(:create_repository)
- .with(project.repository_storage, project.disk_path, project.full_path)
- .and_return(false)
-
- expect(project.repository).not_to receive(:after_create)
-
+ expect(project.repository).to receive(:create_repository) { raise 'Fail in test' }
expect(project.create_repository).to eq(false)
expect(project.errors).not_to be_empty
end
@@ -1931,7 +1954,7 @@ describe Project do
context 'using a forked repository' do
it 'does nothing' do
expect(project).to receive(:forked?).and_return(true)
- expect(shell).not_to receive(:create_repository)
+ expect(project.repository).not_to receive(:create_repository)
project.create_repository
end
@@ -1940,28 +1963,16 @@ describe Project do
describe '#ensure_repository' do
let(:project) { create(:project, :repository) }
- let(:shell) { Gitlab::Shell.new }
-
- before do
- allow(project).to receive(:gitlab_shell).and_return(shell)
- end
it 'creates the repository if it not exist' do
- allow(project).to receive(:repository_exists?)
- .and_return(false)
-
- allow(shell).to receive(:create_repository)
- .with(project.repository_storage, project.disk_path, project.full_path)
- .and_return(true)
-
+ allow(project).to receive(:repository_exists?).and_return(false)
expect(project).to receive(:create_repository).with(force: true)
project.ensure_repository
end
it 'does not create the repository if it exists' do
- allow(project).to receive(:repository_exists?)
- .and_return(true)
+ allow(project).to receive(:repository_exists?).and_return(true)
expect(project).not_to receive(:create_repository)
@@ -1970,13 +1981,8 @@ describe Project do
it 'creates the repository if it is a fork' do
expect(project).to receive(:forked?).and_return(true)
-
- allow(project).to receive(:repository_exists?)
- .and_return(false)
-
- expect(shell).to receive(:create_repository)
- .with(project.repository_storage, project.disk_path, project.full_path)
- .and_return(true)
+ expect(project).to receive(:repository_exists?).and_return(false)
+ expect(project.repository).to receive(:create_repository) { true }
project.ensure_repository
end
@@ -2800,6 +2806,44 @@ describe Project do
end
end
+ describe '#change_repository_storage' do
+ let(:project) { create(:project, :repository) }
+ let(:read_only_project) { create(:project, :repository, repository_read_only: true) }
+
+ before do
+ stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
+ end
+
+ it 'schedules the transfer of the repository to the new storage and locks the project' do
+ expect(ProjectUpdateRepositoryStorageWorker).to receive(:perform_async).with(project.id, 'test_second_storage')
+
+ project.change_repository_storage('test_second_storage')
+ project.save!
+
+ expect(project).to be_repository_read_only
+ end
+
+ it "doesn't schedule the transfer if the repository is already read-only" do
+ expect(ProjectUpdateRepositoryStorageWorker).not_to receive(:perform_async)
+
+ read_only_project.change_repository_storage('test_second_storage')
+ read_only_project.save!
+ end
+
+ it "doesn't lock or schedule the transfer if the storage hasn't changed" do
+ expect(ProjectUpdateRepositoryStorageWorker).not_to receive(:perform_async)
+
+ project.change_repository_storage(project.repository_storage)
+ project.save!
+
+ expect(project).not_to be_repository_read_only
+ end
+
+ it 'throws an error if an invalid repository storage is provided' do
+ expect { project.change_repository_storage('unknown') }.to raise_error(ArgumentError)
+ end
+ end
+
describe '#pushes_since_gc' do
let(:project) { create(:project) }
@@ -2931,6 +2975,19 @@ describe Project do
end
end
+ it 'memoizes the result by ref and environment', :request_store do
+ scoped_variable = create(:ci_variable, value: 'secret', project: project, environment_scope: 'scoped')
+
+ expect(project).to receive(:protected_for?).with('ref').once.and_return(true)
+ expect(project).to receive(:protected_for?).with('other').twice.and_return(false)
+
+ 2.times do
+ expect(project.reload.ci_variables_for(ref: 'ref', environment: 'production')).to contain_exactly(ci_variable, protected_variable)
+ expect(project.reload.ci_variables_for(ref: 'other')).to contain_exactly(ci_variable)
+ expect(project.reload.ci_variables_for(ref: 'other', environment: 'scoped')).to contain_exactly(ci_variable, scoped_variable)
+ end
+ end
+
context 'when the ref is not protected' do
before do
allow(project).to receive(:protected_for?).with('ref').and_return(false)
@@ -3884,6 +3941,12 @@ describe Project do
describe '#remove_export' do
let(:project) { create(:project, :with_export) }
+ before do
+ allow_next_instance_of(ProjectExportWorker) do |job|
+ allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
+ end
+ end
+
it 'removes the export' do
project.remove_exports
@@ -4545,13 +4608,14 @@ describe Project do
let(:import_state) { create(:import_state, project: project) }
it 'runs the correct hooks' do
- expect(project.repository).to receive(:after_import)
- expect(project.wiki.repository).to receive(:after_import)
+ expect(project.repository).to receive(:expire_content_cache)
+ expect(project.wiki.repository).to receive(:expire_content_cache)
expect(import_state).to receive(:finish)
expect(project).to receive(:update_project_counter_caches)
expect(project).to receive(:after_create_default_branch)
expect(project).to receive(:refresh_markdown_cache!)
expect(InternalId).to receive(:flush_records!).with(project: project)
+ expect(DetectRepositoryLanguagesWorker).to receive(:perform_async).with(project.id)
project.after_import
end
@@ -4564,7 +4628,7 @@ describe Project do
end
it 'does not protect when branch protection is disabled' do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE)
+ expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_NONE)
project.after_import
@@ -4572,7 +4636,7 @@ describe Project do
end
it "gives developer access to push when branch protection is set to 'developers can push'" do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
+ expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
project.after_import
@@ -4582,7 +4646,7 @@ describe Project do
end
it "gives developer access to merge when branch protection is set to 'developers can merge'" do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
+ expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
project.after_import
@@ -5619,6 +5683,53 @@ describe Project do
end
end
+ describe '#all_lfs_objects_oids' do
+ let(:project) { create(:project) }
+ let(:lfs_object) { create(:lfs_object) }
+ let(:another_lfs_object) { create(:lfs_object) }
+
+ subject { project.all_lfs_objects_oids }
+
+ context 'when project has associated LFS objects' do
+ before do
+ create(:lfs_objects_project, lfs_object: lfs_object, project: project)
+ create(:lfs_objects_project, lfs_object: another_lfs_object, project: project)
+ end
+
+ it 'returns OIDs of LFS objects' do
+ expect(subject).to match_array([lfs_object.oid, another_lfs_object.oid])
+ end
+
+ context 'and there are specified oids' do
+ subject { project.all_lfs_objects_oids(oids: [lfs_object.oid]) }
+
+ it 'returns OIDs of LFS objects that match specified oids' do
+ expect(subject).to eq([lfs_object.oid])
+ end
+ end
+ end
+
+ context 'when fork has associated LFS objects to itself and source' do
+ let(:source) { create(:project) }
+ let(:project) { fork_project(source) }
+
+ before do
+ create(:lfs_objects_project, lfs_object: lfs_object, project: source)
+ create(:lfs_objects_project, lfs_object: another_lfs_object, project: project)
+ end
+
+ it 'returns OIDs of LFS objects' do
+ expect(subject).to match_array([lfs_object.oid, another_lfs_object.oid])
+ end
+ end
+
+ context 'when project has no associated LFS objects' do
+ it 'returns empty array' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
describe '#lfs_objects_oids' do
let(:project) { create(:project) }
let(:lfs_object) { create(:lfs_object) }
@@ -5635,6 +5746,14 @@ describe Project do
it 'returns OIDs of LFS objects' do
expect(subject).to match_array([lfs_object.oid, another_lfs_object.oid])
end
+
+ context 'and there are specified oids' do
+ subject { project.lfs_objects_oids(oids: [lfs_object.oid]) }
+
+ it 'returns OIDs of LFS objects that match specified oids' do
+ expect(subject).to eq([lfs_object.oid])
+ end
+ end
end
context 'when project has no associated LFS objects' do
@@ -5684,6 +5803,86 @@ describe Project do
end
end
+ describe '#add_export_job' do
+ context 'if not already present' do
+ it 'starts project export job' do
+ user = create(:user)
+ project = build(:project)
+
+ expect(ProjectExportWorker).to receive(:perform_async).with(user.id, project.id, nil, {})
+
+ project.add_export_job(current_user: user)
+ end
+ end
+ end
+
+ describe '#export_in_progress?' do
+ let(:project) { build(:project) }
+ let!(:project_export_job ) { create(:project_export_job, project: project) }
+
+ context 'when project export is enqueued' do
+ it { expect(project.export_in_progress?).to be false }
+ end
+
+ context 'when project export is in progress' do
+ before do
+ project_export_job.start!
+ end
+
+ it { expect(project.export_in_progress?).to be true }
+ end
+
+ context 'when project export is completed' do
+ before do
+ finish_job(project_export_job)
+ end
+
+ it { expect(project.export_in_progress?).to be false }
+ end
+ end
+
+ describe '#export_status' do
+ let(:project) { build(:project) }
+ let!(:project_export_job ) { create(:project_export_job, project: project) }
+
+ context 'when project export is enqueued' do
+ it { expect(project.export_status).to eq :queued }
+ end
+
+ context 'when project export is in progress' do
+ before do
+ project_export_job.start!
+ end
+
+ it { expect(project.export_status).to eq :started }
+ end
+
+ context 'when project export is completed' do
+ before do
+ finish_job(project_export_job)
+ allow(project).to receive(:export_file).and_return(double(ImportExportUploader, file: 'exists.zip'))
+ end
+
+ it { expect(project.export_status).to eq :finished }
+ end
+
+ context 'when project export is being regenerated' do
+ let!(:new_project_export_job ) { create(:project_export_job, project: project) }
+
+ before do
+ finish_job(project_export_job)
+ allow(project).to receive(:export_file).and_return(double(ImportExportUploader, file: 'exists.zip'))
+ end
+
+ it { expect(project.export_status).to eq :regeneration_in_progress }
+ end
+ end
+
+ def finish_job(export_job)
+ export_job.start
+ export_job.finish
+ end
+
def rugged_config
rugged_repo(project.repository).config
end
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index af23f121bdc..2d660d1deab 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -34,7 +34,7 @@ describe ProjectWiki do
describe "#url_to_repo" do
it "returns the correct ssh url to the repo" do
- expect(subject.url_to_repo).to eq(gitlab_shell.url_to_repo(subject.full_path))
+ expect(subject.url_to_repo).to eq(Gitlab::Shell.url_to_repo(subject.full_path))
end
end
@@ -97,9 +97,7 @@ describe ProjectWiki do
it "raises CouldNotCreateWikiError if it can't create the wiki repository" do
# Create a fresh project which will not have a wiki
project_wiki = described_class.new(create(:project), user)
- gitlab_shell = double(:gitlab_shell)
- allow(gitlab_shell).to receive(:create_wiki_repository)
- allow(project_wiki).to receive(:gitlab_shell).and_return(gitlab_shell)
+ expect(project_wiki.repository).to receive(:create_if_not_exists) { false }
expect { project_wiki.send(:wiki) }.to raise_exception(ProjectWiki::CouldNotCreateWikiError)
end
@@ -416,26 +414,12 @@ describe ProjectWiki do
end
end
- describe '#create_repo!' do
- let(:project) { create(:project) }
-
- it 'creates a repository' do
- expect(raw_repository.exists?).to eq(false)
- expect(subject.repository).to receive(:after_create)
-
- subject.send(:create_repo!, raw_repository)
-
- expect(raw_repository.exists?).to eq(true)
- end
- end
-
describe '#ensure_repository' do
let(:project) { create(:project) }
it 'creates the repository if it not exist' do
expect(raw_repository.exists?).to eq(false)
- expect(subject).to receive(:create_repo!).and_call_original
subject.ensure_repository
expect(raw_repository.exists?).to eq(true)
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index 7f8a60dafa8..30fce1cd5c4 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -164,31 +164,45 @@ describe ProtectedBranch do
end
end
- context "new project" do
- let(:project) { create(:project) }
+ context 'new project' do
+ using RSpec::Parameterized::TableSyntax
- it 'returns false when default_protected_branch is unprotected' do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE)
+ let(:project) { create(:project) }
- expect(described_class.protected?(project, 'master')).to be false
- end
+ context 'when the group has set their own default_branch_protection level' do
+ where(:default_branch_protection_level, :result) do
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
+ Gitlab::Access::PROTECTION_FULL | true
+ end
- it 'returns false when default_protected_branch lets developers push' do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
+ with_them do
+ it 'protects the default branch based on the default branch protection setting of the group' do
+ expect(project.namespace).to receive(:default_branch_protection).and_return(default_branch_protection_level)
- expect(described_class.protected?(project, 'master')).to be false
+ expect(described_class.protected?(project, 'master')).to eq(result)
+ end
+ end
end
- it 'returns true when default_branch_protection does not let developers push but let developer merge branches' do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
-
- expect(described_class.protected?(project, 'master')).to be true
- end
+ context 'when the group has not set their own default_branch_protection level' do
+ where(:default_branch_protection_level, :result) do
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
+ Gitlab::Access::PROTECTION_FULL | true
+ end
- it 'returns true when default_branch_protection is in full protection' do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_FULL)
+ with_them do
+ before do
+ stub_application_setting(default_branch_protection: default_branch_protection_level)
+ end
- expect(described_class.protected?(project, 'master')).to be true
+ it 'protects the default branch based on the instance level default branch protection setting' do
+ expect(described_class.protected?(project, 'master')).to eq(result)
+ end
+ end
end
end
end
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 85e398c7d5f..3884b8138be 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -20,7 +20,6 @@ RSpec.describe Release do
describe 'validation' do
it { is_expected.to validate_presence_of(:project) }
- it { is_expected.to validate_presence_of(:description) }
it { is_expected.to validate_presence_of(:tag) }
context 'when a release exists in the database without a name' do
diff --git a/spec/models/releases/link_spec.rb b/spec/models/releases/link_spec.rb
index 4dd26c976cc..7533d1e6e5c 100644
--- a/spec/models/releases/link_spec.rb
+++ b/spec/models/releases/link_spec.rb
@@ -13,6 +13,7 @@ describe Releases::Link do
describe 'validation' do
it { is_expected.to validate_presence_of(:url) }
it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:filepath).is_at_most(128) }
context 'when url is invalid' do
let(:link) { build(:release_link, url: 'hoge') }
@@ -43,6 +44,16 @@ describe Releases::Link do
end
end
+ context 'when duplicate filepath is added to a release' do
+ let!(:link) { create(:release_link, filepath: '/binaries/gitlab-runner-linux-amd64', release: release) }
+
+ it 'raises an error' do
+ expect do
+ create(:release_link, filepath: '/binaries/gitlab-runner-linux-amd64', release: release)
+ end.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
describe '.sorted' do
subject { described_class.sorted }
@@ -101,4 +112,38 @@ describe Releases::Link do
end
end
end
+
+ describe 'FILEPATH_REGEX with table' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:link) { build(:release_link)}
+
+ where(:reason, :filepath, :result) do
+ 'cannot contain `//`' | '/https//www.example.com' | be_invalid
+ 'cannot start with `//`' | '//www.example.com' | be_invalid
+ 'cannot contain a `?`' | '/example.com/?stuff=true' | be_invalid
+ 'cannot contain a `:`' | '/example:5000' | be_invalid
+ 'cannot end in a `-`' | '/binaries/awesome-app.dmg-' | be_invalid
+ 'cannot end in a `.`' | '/binaries/awesome-app.dmg.' | be_invalid
+ 'cannot end in a `_`' | '/binaries/awesome-app.dmg_' | be_invalid
+ 'cannot start with a `.`' | '.binaries/awesome-app.dmg' | be_invalid
+ 'cannot start with a `-`' | '-binaries/awesome-app.dmg' | be_invalid
+ 'cannot start with a `_`' | '_binaries/awesome-app.dmg' | be_invalid
+ 'cannot start with a number' | '3binaries/awesome-app.dmg' | be_invalid
+ 'cannot start with a letter' | 'binaries/awesome-app.dmg' | be_invalid
+ 'cannot contain accents' | '/binarïes/âwésome-app.dmg' | be_invalid
+ 'can end in a character' | '/binaries/awesome-app.dmg' | be_valid
+ 'can end in a number' | '/binaries/awesome-app-1' | be_valid
+ 'can contain one or more dots, dashes or underscores' | '/sub_tr__ee.ex..ample-2--1/v99.com' | be_valid
+ 'can contain multiple non-sequential slashes' | '/example.com/path/to/file.exe' | be_valid
+ 'can be nil' | nil | be_valid
+ end
+
+ with_them do
+ specify do
+ link.filepath = filepath
+ expect(link).to result
+ end
+ end
+ end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index a8cdebd2b9c..ca04bd7a28a 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -320,6 +320,21 @@ describe Repository do
end
end
+ context "when 'author' is set" do
+ it "returns commits from that author" do
+ commit = repository.commits(nil, limit: 1).first
+ known_author = "#{commit.author_name} <#{commit.author_email}>"
+
+ expect(repository.commits(nil, author: known_author, limit: 1)).not_to be_empty
+ end
+
+ it "doesn't returns commits from an unknown author" do
+ unknown_author = "The Man With No Name <zapp@brannigan.com>"
+
+ expect(repository.commits(nil, author: unknown_author, limit: 1)).to be_empty
+ end
+ end
+
context "when 'all' flag is set" do
it 'returns every commit from the repository' do
expect(repository.commits(nil, all: true, limit: 60).size).to eq(60)
@@ -1914,32 +1929,6 @@ describe Repository do
end
end
- describe '#after_import' do
- subject { repository.after_import }
-
- it 'flushes and builds the cache' do
- expect(repository).to receive(:expire_content_cache)
-
- subject
- end
-
- it 'calls DetectRepositoryLanguagesWorker' do
- expect(DetectRepositoryLanguagesWorker).to receive(:perform_async)
-
- subject
- end
-
- context 'with a wiki repository' do
- let(:repository) { project.wiki.repository }
-
- it 'does not call DetectRepositoryLanguagesWorker' do
- expect(DetectRepositoryLanguagesWorker).not_to receive(:perform_async)
-
- subject
- end
- end
- end
-
describe '#after_push_commit' do
it 'expires statistics caches' do
expect(repository).to receive(:expire_statistics_caches)
diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb
index a92f5ee93e1..ca887b485a2 100644
--- a/spec/models/resource_label_event_spec.rb
+++ b/spec/models/resource_label_event_spec.rb
@@ -10,6 +10,10 @@ RSpec.describe ResourceLabelEvent, type: :model do
it_behaves_like 'having unique enum values'
+ it_behaves_like 'a resource event'
+ it_behaves_like 'a resource event for issues'
+ it_behaves_like 'a resource event for merge requests'
+
describe 'associations' do
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:issue) }
diff --git a/spec/models/resource_milestone_event_spec.rb b/spec/models/resource_milestone_event_spec.rb
new file mode 100644
index 00000000000..1b0181e3fd2
--- /dev/null
+++ b/spec/models/resource_milestone_event_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ResourceMilestoneEvent, type: :model do
+ it_behaves_like 'a resource event'
+ it_behaves_like 'a resource event for issues'
+ it_behaves_like 'a resource event for merge requests'
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:milestone) }
+ end
+
+ describe 'validations' do
+ context 'when issue and merge_request are both nil' do
+ subject { build(described_class.name.underscore.to_sym, issue: nil, merge_request: nil) }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when issue and merge_request are both set' do
+ subject { build(described_class.name.underscore.to_sym, issue: build(:issue), merge_request: build(:merge_request)) }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when issue is set' do
+ subject { create(described_class.name.underscore.to_sym, issue: create(:issue), merge_request: nil) }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when merge_request is set' do
+ subject { create(described_class.name.underscore.to_sym, issue: nil, merge_request: create(:merge_request)) }
+
+ it { is_expected.to be_valid }
+ end
+ end
+
+ describe 'states' do
+ [Issue, MergeRequest].each do |klass|
+ klass.available_states.each do |state|
+ it "supports state #{state.first} for #{klass.name.underscore}" do
+ model = create(klass.name.underscore, state: state[0])
+ key = model.class.name.underscore
+ event = build(described_class.name.underscore.to_sym, key => model, state: model.state)
+
+ expect(event.state).to eq(state[0])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/resource_weight_event_spec.rb b/spec/models/resource_weight_event_spec.rb
index 2f00204512e..8a37883d933 100644
--- a/spec/models/resource_weight_event_spec.rb
+++ b/spec/models/resource_weight_event_spec.rb
@@ -3,6 +3,9 @@
require 'spec_helper'
RSpec.describe ResourceWeightEvent, type: :model do
+ it_behaves_like 'a resource event'
+ it_behaves_like 'a resource event for issues'
+
let_it_be(:user1) { create(:user) }
let_it_be(:user2) { create(:user) }
@@ -11,13 +14,11 @@ RSpec.describe ResourceWeightEvent, type: :model do
let_it_be(:issue3) { create(:issue, author: user2) }
describe 'validations' do
- it { is_expected.not_to allow_value(nil).for(:user) }
it { is_expected.not_to allow_value(nil).for(:issue) }
it { is_expected.to allow_value(nil).for(:weight) }
end
describe 'associations' do
- it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:issue) }
end
@@ -66,7 +67,7 @@ RSpec.describe ResourceWeightEvent, type: :model do
it 'returns the expected id' do
allow(Digest::SHA1).to receive(:hexdigest)
- .with("ResourceWeightEvent-2019-12-30 00:00:00 UTC-#{user1.id}")
+ .with("ResourceWeightEvent-#{event.id}-#{user1.id}")
.and_return('73d167c478')
expect(event.discussion_id).to eq('73d167c478')
diff --git a/spec/models/serverless/domain_cluster_spec.rb b/spec/models/serverless/domain_cluster_spec.rb
index bd645b7d0aa..f5e1eb304a1 100644
--- a/spec/models/serverless/domain_cluster_spec.rb
+++ b/spec/models/serverless/domain_cluster_spec.rb
@@ -10,7 +10,7 @@ describe ::Serverless::DomainCluster do
it { is_expected.to validate_presence_of(:knative) }
it { is_expected.to validate_presence_of(:uuid) }
- it { is_expected.to validate_length_of(:uuid).is_equal_to(Gitlab::Serverless::Domain::UUID_LENGTH) }
+ it { is_expected.to validate_length_of(:uuid).is_equal_to(::Serverless::Domain::UUID_LENGTH) }
it { is_expected.to validate_uniqueness_of(:uuid) }
it 'validates that uuid has only hex characters' do
@@ -31,7 +31,7 @@ describe ::Serverless::DomainCluster do
context 'when nil' do
it 'generates a value by default' do
attributes = build(:serverless_domain_cluster).attributes.merge(uuid: nil)
- expect(Gitlab::Serverless::Domain).to receive(:generate_uuid).and_call_original
+ expect(::Serverless::Domain).to receive(:generate_uuid).and_call_original
subject = Serverless::DomainCluster.new(attributes)
@@ -47,6 +47,10 @@ describe ::Serverless::DomainCluster do
end
end
+ describe 'cluster' do
+ it { is_expected.to respond_to(:cluster) }
+ end
+
describe 'domain' do
it { is_expected.to respond_to(:domain) }
end
diff --git a/spec/models/serverless/domain_spec.rb b/spec/models/serverless/domain_spec.rb
new file mode 100644
index 00000000000..ba54e05b4e3
--- /dev/null
+++ b/spec/models/serverless/domain_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::Serverless::Domain do
+ let(:function_name) { 'test-function' }
+ let(:pages_domain_name) { 'serverless.gitlab.io' }
+ let(:pages_domain) { create(:pages_domain, :instance_serverless, domain: pages_domain_name) }
+ let!(:serverless_domain_cluster) { create(:serverless_domain_cluster, uuid: 'abcdef12345678', pages_domain: pages_domain) }
+ let(:valid_cluster_uuid) { 'aba1cdef123456f278' }
+ let(:invalid_cluster_uuid) { 'aba1cdef123456f178' }
+ let!(:environment) { create(:environment, name: 'test') }
+
+ let(:valid_uri) { "https://#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
+ let(:valid_fqdn) { "#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
+ let(:invalid_uri) { "https://#{function_name}-#{invalid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
+
+ shared_examples 'a valid Domain' do
+ describe '#uri' do
+ it 'matches valid URI' do
+ expect(subject.uri.to_s).to eq valid_uri
+ end
+ end
+
+ describe '#function_name' do
+ it 'returns function_name' do
+ expect(subject.function_name).to eq function_name
+ end
+ end
+
+ describe '#serverless_domain_cluster' do
+ it 'returns serverless_domain_cluster' do
+ expect(subject.serverless_domain_cluster).to eq serverless_domain_cluster
+ end
+ end
+
+ describe '#environment' do
+ it 'returns environment' do
+ expect(subject.environment).to eq environment
+ end
+ end
+ end
+
+ describe '.new' do
+ context 'with valid arguments' do
+ subject do
+ described_class.new(
+ function_name: function_name,
+ serverless_domain_cluster: serverless_domain_cluster,
+ environment: environment
+ )
+ end
+
+ it_behaves_like 'a valid Domain'
+ end
+
+ context 'with invalid arguments' do
+ subject do
+ described_class.new(
+ function_name: function_name,
+ environment: environment
+ )
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'with nil cluster argument' do
+ subject do
+ described_class.new(
+ function_name: function_name,
+ serverless_domain_cluster: nil,
+ environment: environment
+ )
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+ end
+
+ describe '.generate_uuid' do
+ it 'has 14 characters' do
+ expect(described_class.generate_uuid.length).to eq(described_class::UUID_LENGTH)
+ end
+
+ it 'consists of only hexadecimal characters' do
+ expect(described_class.generate_uuid).to match(/\A\h+\z/)
+ end
+
+ it 'uses random characters' do
+ uuid = 'abcd1234567890'
+
+ expect(SecureRandom).to receive(:hex).with(described_class::UUID_LENGTH / 2).and_return(uuid)
+ expect(described_class.generate_uuid).to eq(uuid)
+ end
+ end
+end
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index f58bcbebd67..d0673b21329 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -10,8 +10,52 @@ describe Service do
it { is_expected.to have_one :issue_tracker_data }
end
- describe 'Validations' do
+ describe 'validations' do
it { is_expected.to validate_presence_of(:type) }
+
+ it 'validates presence of project_id if not template', :aggregate_failures do
+ expect(build(:service, project_id: nil, template: true)).to be_valid
+ expect(build(:service, project_id: nil, template: false)).to be_invalid
+ end
+
+ it 'validates presence of project_id if not instance', :aggregate_failures do
+ expect(build(:service, project_id: nil, instance: true)).to be_valid
+ expect(build(:service, project_id: nil, instance: false)).to be_invalid
+ end
+
+ it 'validates absence of project_id if instance', :aggregate_failures do
+ expect(build(:service, project_id: nil, instance: true)).to be_valid
+ expect(build(:service, instance: true)).to be_invalid
+ end
+
+ it 'validates absence of project_id if template', :aggregate_failures do
+ expect(build(:service, template: true)).to validate_absence_of(:project_id)
+ expect(build(:service, template: false)).not_to validate_absence_of(:project_id)
+ end
+
+ it 'validates service is template or instance' do
+ expect(build(:service, project_id: nil, template: true, instance: true)).to be_invalid
+ end
+
+ context 'with an existing service template' do
+ before do
+ create(:service, :template)
+ end
+
+ it 'validates only one service template per type' do
+ expect(build(:service, :template)).to be_invalid
+ end
+ end
+
+ context 'with an existing instance service' do
+ before do
+ create(:service, :instance)
+ end
+
+ it 'validates only one service instance per type' do
+ expect(build(:service, :instance)).to be_invalid
+ end
+ end
end
describe 'Scopes' do
@@ -153,7 +197,7 @@ describe Service do
context 'when data are stored in separated fields' do
let(:template) do
- create(:jira_service, data_params.merge(properties: {}, title: title, description: description, template: true))
+ create(:jira_service, :template, data_params.merge(properties: {}, title: title, description: description))
end
it_behaves_like 'service creation from a template'
@@ -391,14 +435,6 @@ describe Service do
end
end
- describe '.find_by_template' do
- let!(:service) { create(:service, template: true) }
-
- it 'returns service template' do
- expect(described_class.find_by_template).to eq(service)
- end
- end
-
describe '#api_field_names' do
let(:fake_service) do
Class.new(Service) do
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
index 9befbb02b17..6861e03282a 100644
--- a/spec/models/snippet_repository_spec.rb
+++ b/spec/models/snippet_repository_spec.rb
@@ -3,6 +3,11 @@
require 'spec_helper'
describe SnippetRepository do
+ let_it_be(:user) { create(:user) }
+ let(:snippet) { create(:personal_snippet, :repository, author: user) }
+ let(:snippet_repository) { snippet.snippet_repository }
+ let(:commit_opts) { { branch_name: 'master', message: 'whatever' } }
+
describe 'associations' do
it { is_expected.to belong_to(:shard) }
it { is_expected.to belong_to(:snippet) }
@@ -10,7 +15,7 @@ describe SnippetRepository do
describe '.find_snippet' do
it 'finds snippet by disk path' do
- snippet = create(:snippet)
+ snippet = create(:snippet, author: user)
snippet.track_snippet_repository
expect(described_class.find_snippet(snippet.disk_path)).to eq(snippet)
@@ -20,4 +25,155 @@ describe SnippetRepository do
expect(described_class.find_snippet('@@unexisting/path/to/snippet')).to be_nil
end
end
+
+ describe '#multi_files_action' do
+ let(:new_file) { { file_path: 'new_file_test', content: 'bar' } }
+ let(:move_file) { { previous_path: 'CHANGELOG', file_path: 'CHANGELOG_new', content: 'bar' } }
+ let(:update_file) { { previous_path: 'README', file_path: 'README', content: 'bar' } }
+ let(:data) { [new_file, move_file, update_file] }
+
+ it 'returns nil when files argument is empty' do
+ expect(snippet.repository).not_to receive(:multi_action)
+
+ operation = snippet_repository.multi_files_action(user, [], commit_opts)
+
+ expect(operation).to be_nil
+ end
+
+ it 'returns nil when files argument is nil' do
+ expect(snippet.repository).not_to receive(:multi_action)
+
+ operation = snippet_repository.multi_files_action(user, nil, commit_opts)
+
+ expect(operation).to be_nil
+ end
+
+ it 'performs the operation accordingly to the files data' do
+ new_file_blob = blob_at(snippet, new_file[:file_path])
+ move_file_blob = blob_at(snippet, move_file[:previous_path])
+ update_file_blob = blob_at(snippet, update_file[:previous_path])
+
+ aggregate_failures do
+ expect(new_file_blob).to be_nil
+ expect(move_file_blob).not_to be_nil
+ expect(update_file_blob).not_to be_nil
+ end
+
+ expect do
+ snippet_repository.multi_files_action(user, data, commit_opts)
+ end.not_to raise_error
+
+ aggregate_failures do
+ data.each do |entry|
+ blob = blob_at(snippet, entry[:file_path])
+
+ expect(blob).not_to be_nil
+ expect(blob.path).to eq entry[:file_path]
+ expect(blob.data).to eq entry[:content]
+ end
+ end
+ end
+
+ it 'tries to obtain an exclusive lease' do
+ expect(Gitlab::ExclusiveLease).to receive(:new).with("multi_files_action:#{snippet.id}", anything).and_call_original
+
+ snippet_repository.multi_files_action(user, data, commit_opts)
+ end
+
+ it 'cancels the lease when the method has finished' do
+ expect(Gitlab::ExclusiveLease).to receive(:cancel).with("multi_files_action:#{snippet.id}", anything).and_call_original
+
+ snippet_repository.multi_files_action(user, data, commit_opts)
+ end
+
+ it 'raises an error if the lease cannot be obtained' do
+ allow_next_instance_of(Gitlab::ExclusiveLease) do |instance|
+ allow(instance).to receive(:try_obtain).and_return false
+ end
+
+ expect do
+ snippet_repository.multi_files_action(user, data, commit_opts)
+ end.to raise_error(described_class::CommitError)
+ end
+
+ context 'with commit actions' do
+ let(:result) do
+ [{ action: :create }.merge(new_file),
+ { action: :move }.merge(move_file),
+ { action: :update }.merge(update_file)]
+ end
+ let(:repo) { double }
+
+ before do
+ allow(snippet).to receive(:repository).and_return(repo)
+ allow(repo).to receive(:ls_files).and_return([])
+ end
+
+ it 'infers the commit action based on the parameters if not present' do
+ expect(repo).to receive(:multi_action).with(user, hash_including(actions: result))
+
+ snippet_repository.multi_files_action(user, data, commit_opts)
+ end
+
+ context 'when commit actions are present' do
+ let(:file_action) { { file_path: 'foo.txt', content: 'foo', action: :foobar } }
+ let(:data) { [file_action] }
+
+ it 'does not change commit action' do
+ expect(repo).to(
+ receive(:multi_action).with(
+ user,
+ hash_including(actions: array_including(hash_including(action: :foobar)))))
+
+ snippet_repository.multi_files_action(user, data, commit_opts)
+ end
+ end
+ end
+
+ shared_examples 'snippet repository with file names' do |*filenames|
+ it 'sets a name for unnamed files' do
+ ls_files = snippet.repository.ls_files(nil)
+ expect(ls_files).to include(*filenames)
+ end
+ end
+
+ let_it_be(:named_snippet) { { file_path: 'fee.txt', content: 'bar', action: :create } }
+ let_it_be(:unnamed_snippet) { { file_path: '', content: 'dummy', action: :create } }
+
+ context 'when some files are not named' do
+ let(:data) { [named_snippet] + Array.new(2) { unnamed_snippet.clone } }
+
+ before do
+ expect do
+ snippet_repository.multi_files_action(user, data, commit_opts)
+ end.not_to raise_error
+ end
+
+ it_behaves_like 'snippet repository with file names', 'snippetfile1.txt', 'snippetfile2.txt'
+ end
+
+ context 'repository already has 10 unnamed snippets' do
+ let(:pre_populate_data) { Array.new(10) { unnamed_snippet.clone } }
+ let(:data) { [named_snippet] + Array.new(2) { unnamed_snippet.clone } }
+
+ before do
+ # Pre-populate repository with 9 unnamed snippets.
+ snippet_repository.multi_files_action(user, pre_populate_data, commit_opts)
+
+ expect do
+ snippet_repository.multi_files_action(user, data, commit_opts)
+ end.not_to raise_error
+ end
+
+ it_behaves_like 'snippet repository with file names', 'snippetfile10.txt', 'snippetfile11.txt'
+ end
+ end
+
+ def blob_at(snippet, path)
+ snippet.repository.blob_at('master', path)
+ end
+
+ def first_blob(snippet)
+ snippet.repository.blob_at('master', snippet.repository.ls_files(nil).first)
+ end
end
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index 93bc42c144d..7ae4a81ddd7 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -149,7 +149,7 @@ describe Snippet do
end
describe '.search' do
- let(:snippet) { create(:snippet, title: 'test snippet') }
+ let(:snippet) { create(:snippet, title: 'test snippet', description: 'description') }
it 'returns snippets with a matching title' do
expect(described_class.search(snippet.title)).to eq([snippet])
@@ -174,6 +174,10 @@ describe Snippet do
it 'returns snippets with a matching file name regardless of the casing' do
expect(described_class.search(snippet.file_name.upcase)).to eq([snippet])
end
+
+ it 'returns snippets with a matching description' do
+ expect(described_class.search(snippet.description)).to eq([snippet])
+ end
end
describe '.search_code' do
@@ -511,6 +515,32 @@ describe Snippet do
end
end
+ describe '#blobs' do
+ let(:snippet) { create(:snippet) }
+
+ context 'when repository does not exist' do
+ it 'returns empty array' do
+ expect(snippet.blobs).to be_empty
+ end
+ end
+
+ context 'when repository exists' do
+ let(:snippet) { create(:snippet, :repository) }
+
+ it 'returns array of blobs' do
+ expect(snippet.blobs).to all(be_a(Blob))
+ end
+ end
+
+ it 'returns a blob representing the snippet data' do
+ blob = snippet.blob
+
+ expect(blob).to be_a(Blob)
+ expect(blob.path).to eq(snippet.file_name)
+ expect(blob.data).to eq(snippet.content)
+ end
+ end
+
describe '#to_json' do
let(:snippet) { build(:snippet) }
@@ -536,7 +566,7 @@ describe Snippet do
end
describe '#track_snippet_repository' do
- let(:snippet) { create(:snippet, :repository) }
+ let(:snippet) { create(:snippet) }
context 'when a snippet repository entry does not exist' do
it 'creates a new entry' do
@@ -554,7 +584,8 @@ describe Snippet do
end
context 'when a tracking entry exists' do
- let!(:snippet_repository) { create(:snippet_repository, snippet: snippet) }
+ let!(:snippet) { create(:snippet, :repository) }
+ let(:snippet_repository) { snippet.snippet_repository }
let!(:shard) { create(:shard, name: 'foo') }
it 'does not create a new entry in the database' do
@@ -592,7 +623,7 @@ describe Snippet do
end
context 'when repository exists' do
- let(:snippet) { create(:snippet, :repository) }
+ let!(:snippet) { create(:snippet, :repository) }
it 'does not try to create repository' do
expect(snippet.repository).not_to receive(:after_create)
@@ -600,10 +631,23 @@ describe Snippet do
expect(snippet.create_repository).to be_nil
end
- it 'does not track snippet repository' do
- expect do
- snippet.create_repository
- end.not_to change(SnippetRepository, :count)
+ context 'when snippet_repository exists' do
+ it 'does not create a new snippet repository' do
+ expect do
+ snippet.create_repository
+ end.not_to change(SnippetRepository, :count)
+ end
+ end
+
+ context 'when snippet_repository does not exist' do
+ it 'creates a snippet_repository' do
+ snippet.snippet_repository.destroy
+ snippet.reload
+
+ expect do
+ snippet.create_repository
+ end.to change(SnippetRepository, :count).by(1)
+ end
end
end
end
@@ -631,4 +675,42 @@ describe Snippet do
end
end
end
+
+ describe '#can_cache_field?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:snippet) { create(:snippet, file_name: file_name) }
+
+ subject { snippet.can_cache_field?(field) }
+
+ where(:field, :file_name, :result) do
+ :title | nil | true
+ :title | 'foo.bar' | true
+ :description | nil | true
+ :description | 'foo.bar' | true
+ :content | nil | false
+ :content | 'bar.foo' | false
+ :content | 'markdown.md' | true
+ end
+
+ with_them do
+ it { is_expected.to eq result }
+ end
+ end
+
+ describe '#url_to_repo' do
+ subject { snippet.url_to_repo }
+
+ context 'with personal snippet' do
+ let(:snippet) { create(:personal_snippet) }
+
+ it { is_expected.to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "snippets/#{snippet.id}.git") }
+ end
+
+ context 'with project snippet' do
+ let(:snippet) { create(:project_snippet) }
+
+ it { is_expected.to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "#{snippet.project.full_path}/snippets/#{snippet.id}.git") }
+ end
+ end
end
diff --git a/spec/models/upload_spec.rb b/spec/models/upload_spec.rb
index 7138305d7b1..8a64948d570 100644
--- a/spec/models/upload_spec.rb
+++ b/spec/models/upload_spec.rb
@@ -127,6 +127,36 @@ describe Upload do
expect(uploader.mounted_as).to eq(subject.send(:mount_point))
expect(uploader.file).not_to be_nil
end
+
+ context 'when upload has mount_point nil' do
+ context 'when an upload belongs to a note' do
+ it 'mounts it as attachment' do
+ project = create(:project, :legacy_storage)
+ merge_request = create(:merge_request, source_project: project)
+ note = create(:legacy_diff_note_on_merge_request, note: 'some note', project: project, noteable: merge_request)
+
+ subject = build(:upload, :with_file, :attachment_upload, model: note, mount_point: nil)
+ uploader = subject.retrieve_uploader
+
+ expect(uploader.upload).to eq(subject)
+ expect(uploader.path).to include('attachment')
+ expect(uploader.file).not_to be_nil
+ end
+ end
+
+ context 'when an upload does not belong to a note' do
+ it 'does not mount it as attachment' do
+ appearance = create(:appearance)
+
+ subject = build(:upload, :with_file, :attachment_upload, model: appearance, mount_point: nil)
+ uploader = subject.retrieve_uploader
+
+ expect(uploader.upload).to eq(subject)
+ expect(uploader.path).not_to include('attachment')
+ expect(uploader.file).not_to be_nil
+ end
+ end
+ end
end
describe '#needs_checksum?' do
diff --git a/spec/models/user_detail_spec.rb b/spec/models/user_detail_spec.rb
new file mode 100644
index 00000000000..2b2bfff7be2
--- /dev/null
+++ b/spec/models/user_detail_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe UserDetail do
+ it { is_expected.to belong_to(:user) }
+
+ describe 'validations' do
+ describe 'job_title' do
+ it { is_expected.not_to validate_presence_of(:job_title) }
+ it { is_expected.to validate_length_of(:job_title).is_at_most(200) }
+ end
+ end
+end
diff --git a/spec/models/user_highest_role_spec.rb b/spec/models/user_highest_role_spec.rb
new file mode 100644
index 00000000000..5d31dae8339
--- /dev/null
+++ b/spec/models/user_highest_role_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe UserHighestRole do
+ describe 'associations' do
+ it { is_expected.to belong_to(:user).required }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_inclusion_of(:highest_access_level).in_array([nil, *Gitlab::Access.all_values]) }
+ end
+end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index cd84bf54e8f..849494e7cd4 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -29,6 +29,8 @@ describe User, :do_not_mock_admin_mode do
it { is_expected.to have_one(:namespace) }
it { is_expected.to have_one(:status) }
it { is_expected.to have_one(:max_access_level_membership) }
+ it { is_expected.to have_one(:user_detail) }
+ it { is_expected.to have_one(:user_highest_role) }
it { is_expected.to have_many(:snippets).dependent(:destroy) }
it { is_expected.to have_many(:members) }
it { is_expected.to have_many(:project_members) }
@@ -430,6 +432,73 @@ describe User, :do_not_mock_admin_mode do
end
end
+ context 'email restrictions' do
+ context 'when email restriction is disabled' do
+ before do
+ stub_application_setting(email_restrictions_enabled: false)
+ stub_application_setting(email_restrictions: '\+')
+ end
+
+ it 'does accept email address' do
+ user = build(:user, email: 'info+1@test.com')
+
+ expect(user).to be_valid
+ end
+ end
+
+ context 'when email restrictions is enabled' do
+ before do
+ stub_application_setting(email_restrictions_enabled: true)
+ stub_application_setting(email_restrictions: '([\+]|\b(\w*gitlab.com\w*)\b)')
+ end
+
+ it 'does not accept email address with + characters' do
+ user = build(:user, email: 'info+1@test.com')
+
+ expect(user).not_to be_valid
+ end
+
+ it 'does not accept email with a gitlab domain' do
+ user = build(:user, email: 'info@gitlab.com')
+
+ expect(user).not_to be_valid
+ end
+
+ it 'adds an error message when email is not accepted' do
+ user = build(:user, email: 'info@gitlab.com')
+
+ expect(user).not_to be_valid
+ expect(user.errors.messages[:email].first).to eq(_('is not allowed for sign-up'))
+ end
+
+ it 'does accept a valid email address' do
+ user = build(:user, email: 'info@test.com')
+
+ expect(user).to be_valid
+ end
+
+ context 'when feature flag is turned off' do
+ before do
+ stub_feature_flags(email_restrictions: false)
+ end
+
+ it 'does accept the email address' do
+ user = build(:user, email: 'info+1@test.com')
+
+ expect(user).to be_valid
+ end
+ end
+
+ context 'when created_by_id is set' do
+ it 'does accept the email address' do
+ user = build(:user, email: 'info+1@test.com', created_by_id: 1)
+
+ expect(user).to be_valid
+ end
+ end
+ end
+ end
+
context 'owns_notification_email' do
it 'accepts temp_oauth_email emails' do
user = build(:user, email: "temp-email-for-oauth@example.com")
@@ -1943,18 +2012,28 @@ describe User, :do_not_mock_admin_mode do
describe '#all_emails' do
let(:user) { create(:user) }
+ let!(:email_confirmed) { create :email, user: user, confirmed_at: Time.now }
+ let!(:email_unconfirmed) { create :email, user: user }
+
+ context 'when `include_private_email` is true' do
+ it 'returns all emails' do
+ expect(user.reload.all_emails).to contain_exactly(
+ user.email,
+ user.private_commit_email,
+ email_unconfirmed.email,
+ email_confirmed.email
+ )
+ end
+ end
- it 'returns all emails' do
- email_confirmed = create :email, user: user, confirmed_at: Time.now
- email_unconfirmed = create :email, user: user
- user.reload
-
- expect(user.all_emails).to contain_exactly(
- user.email,
- user.private_commit_email,
- email_unconfirmed.email,
- email_confirmed.email
- )
+ context 'when `include_private_email` is false' do
+ it 'does not include the private commit email' do
+ expect(user.reload.all_emails(include_private_email: false)).to contain_exactly(
+ user.email,
+ email_unconfirmed.email,
+ email_confirmed.email
+ )
+ end
end
end
@@ -4119,7 +4198,7 @@ describe User, :do_not_mock_admin_mode do
describe '#read_only_attribute?' do
context 'when LDAP server is enabled' do
before do
- allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
+ allow(Gitlab::Auth::Ldap::Config).to receive(:enabled?).and_return(true)
end
%i[name email location].each do |attribute|
@@ -4241,4 +4320,42 @@ describe User, :do_not_mock_admin_mode do
expect(user.hook_attrs).to eq(user_attributes)
end
end
+
+ describe 'user detail' do
+ context 'when user is initialized' do
+ let(:user) { build(:user) }
+
+ it { expect(user.user_detail).to be_present }
+ it { expect(user.user_detail).not_to be_persisted }
+ end
+
+ context 'when user detail exists' do
+ let(:user) { create(:user, job_title: 'Engineer') }
+
+ it { expect(user.user_detail).to be_persisted }
+ end
+ end
+
+ describe '#gitlab_employee?' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { user.gitlab_employee? }
+
+ where(:email, :is_com, :expected_result) do
+ 'test@gitlab.com' | true | true
+ 'test@example.com' | true | false
+ 'test@gitlab.com' | false | false
+ 'test@example.com' | false | false
+ end
+
+ with_them do
+ let(:user) { build(:user, email: email) }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_com)
+ end
+
+ it { is_expected.to be expected_result }
+ end
+ end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index be5479cfc11..42a7d567613 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -192,16 +192,17 @@ describe WikiPage do
expect(subject).not_to be_valid
expect(subject.errors[:title]).to contain_exactly(
- "exceeds the limit of #{max_title} bytes for page titles"
+ "exceeds the limit of #{max_title} bytes"
)
end
it 'rejects directories exceeding the limit' do
- subject.title = invalid_directory + '/foo'
+ subject.title = "#{invalid_directory}/#{invalid_directory}2/foo"
expect(subject).not_to be_valid
expect(subject.errors[:title]).to contain_exactly(
- "exceeds the limit of #{max_directory} bytes for directory names"
+ "exceeds the limit of #{max_directory} bytes for directory name \"#{invalid_directory}\"",
+ "exceeds the limit of #{max_directory} bytes for directory name \"#{invalid_directory}2\""
)
end
@@ -210,8 +211,8 @@ describe WikiPage do
expect(subject).not_to be_valid
expect(subject.errors[:title]).to contain_exactly(
- "exceeds the limit of #{max_title} bytes for page titles",
- "exceeds the limit of #{max_directory} bytes for directory names"
+ "exceeds the limit of #{max_title} bytes",
+ "exceeds the limit of #{max_directory} bytes for directory name \"#{invalid_directory}\""
)
end
end
@@ -474,43 +475,69 @@ describe WikiPage do
end
end
- describe "#title" do
- it "replaces a hyphen to a space" do
- subject.title = "Import-existing-repositories-into-GitLab"
+ describe '#title_changed?' do
+ using RSpec::Parameterized::TableSyntax
- expect(subject.title).to eq("Import existing repositories into GitLab")
+ let(:untitled_page) { described_class.new(wiki) }
+ let(:directory_page) do
+ create_page('parent directory/child page', 'test content')
+ wiki.find_page('parent directory/child page')
end
- it 'unescapes html' do
- subject.title = 'foo &amp; bar'
+ where(:page, :title, :changed) do
+ :untitled_page | nil | false
+ :untitled_page | 'new title' | true
- expect(subject.title).to eq('foo & bar')
+ :new_page | nil | true
+ :new_page | 'test page' | true
+ :new_page | 'new title' | true
+
+ :existing_page | nil | false
+ :existing_page | 'test page' | false
+ :existing_page | 'test-page' | false
+ :existing_page | '/test page' | false
+ :existing_page | '/test-page' | false
+ :existing_page | ' test page ' | true
+ :existing_page | 'new title' | true
+ :existing_page | 'new-title' | true
+
+ :directory_page | nil | false
+ :directory_page | 'parent directory/child page' | false
+ :directory_page | 'parent-directory/child page' | false
+ :directory_page | 'parent-directory/child-page' | false
+ :directory_page | 'child page' | false
+ :directory_page | 'child-page' | false
+ :directory_page | '/child page' | true
+ :directory_page | 'parent directory/other' | true
+ :directory_page | 'parent-directory/other' | true
+ :directory_page | 'parent-directory / child-page' | true
+ :directory_page | 'other directory/child page' | true
+ :directory_page | 'other-directory/child page' | true
+ end
+
+ with_them do
+ it 'returns the expected value' do
+ subject = public_send(page)
+ subject.title = title if title
+
+ expect(subject.title_changed?).to be(changed)
+ end
end
end
describe '#path' do
- let(:path) { 'mypath.md' }
- let(:git_page) { instance_double('Gitlab::Git::WikiPage', path: path).as_null_object }
-
it 'returns the path when persisted' do
- page = described_class.new(wiki, git_page, true)
-
- expect(page.path).to eq(path)
+ expect(existing_page.path).to eq('test-page.md')
end
it 'returns nil when not persisted' do
- page = described_class.new(wiki, git_page, false)
-
- expect(page.path).to be_nil
+ expect(new_page.path).to be_nil
end
end
describe '#directory' do
context 'when the page is at the root directory' do
- subject do
- create_page('file', 'content')
- wiki.find_page('file')
- end
+ subject { existing_page }
it 'returns an empty string' do
expect(subject.directory).to eq('')
diff --git a/spec/models/x509_certificate_spec.rb b/spec/models/x509_certificate_spec.rb
index 187d37334a1..880c5014a84 100644
--- a/spec/models/x509_certificate_spec.rb
+++ b/spec/models/x509_certificate_spec.rb
@@ -43,6 +43,28 @@ RSpec.describe X509Certificate do
expect(certificate.subject).to eq(subject)
expect(certificate.email).to eq(email)
end
+
+ it 'calls mark_commit_signatures_unverified' do
+ expect_any_instance_of(described_class).to receive(:mark_commit_signatures_unverified)
+
+ described_class.safe_create!(attributes)
+ end
+
+ context 'certificate revocation handling' do
+ let(:x509_certificate) { create(:x509_certificate) }
+
+ it 'starts a revoke worker if certificate is revoked' do
+ expect(X509CertificateRevokeWorker).to receive(:perform_async).with(x509_certificate.id)
+
+ x509_certificate.revoked!
+ end
+
+ it 'does not starts a revoke worker for good certificates' do
+ expect(X509CertificateRevokeWorker).not_to receive(:perform_async).with(x509_certificate.id)
+
+ x509_certificate
+ end
+ end
end
describe 'validators' do
diff --git a/spec/policies/application_setting/term_policy_spec.rb b/spec/policies/application_setting/term_policy_spec.rb
index 21690d4b457..2b5b9758ec2 100644
--- a/spec/policies/application_setting/term_policy_spec.rb
+++ b/spec/policies/application_setting/term_policy_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe ApplicationSetting::TermPolicy do
include TermsHelper
- set(:term) { create(:term) }
+ let_it_be(:term) { create(:term) }
let(:user) { create(:user) }
subject(:policy) { described_class.new(user, term) }
diff --git a/spec/policies/ci/pipeline_schedule_policy_spec.rb b/spec/policies/ci/pipeline_schedule_policy_spec.rb
index 700d7d1af0a..d503401f7cf 100644
--- a/spec/policies/ci/pipeline_schedule_policy_spec.rb
+++ b/spec/policies/ci/pipeline_schedule_policy_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Ci::PipelineSchedulePolicy, :models do
- set(:user) { create(:user) }
- set(:project) { create(:project, :repository) }
- set(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:pipeline_schedule, reload: true) { create(:ci_pipeline_schedule, :nightly, project: project) }
let(:policy) do
described_class.new(user, pipeline_schedule)
diff --git a/spec/policies/note_policy_spec.rb b/spec/policies/note_policy_spec.rb
index 2619bb2fe3c..94e6a86025c 100644
--- a/spec/policies/note_policy_spec.rb
+++ b/spec/policies/note_policy_spec.rb
@@ -35,6 +35,18 @@ describe NotePolicy do
end
end
+ context 'when the noteable is a deleted commit' do
+ let(:commit) { nil }
+ let(:note) { create(:note_on_commit, commit_id: '12345678', author: user, project: project) }
+
+ it 'allows to read' do
+ expect(policy).to be_allowed(:read_note)
+ expect(policy).to be_disallowed(:admin_note)
+ expect(policy).to be_disallowed(:resolve_note)
+ expect(policy).to be_disallowed(:award_emoji)
+ end
+ end
+
context 'when the noteable is a commit' do
let(:commit) { project.repository.head_commit }
let(:note) { create(:note_on_commit, commit_id: commit.id, author: user, project: project) }
@@ -238,6 +250,101 @@ describe NotePolicy do
end
end
end
+
+ context 'with confidential notes' do
+ def permissions(user, note)
+ described_class.new(user, note)
+ end
+
+ let(:reporter) { create(:user) }
+ let(:developer) { create(:user) }
+ let(:maintainer) { create(:user) }
+ let(:guest) { create(:user) }
+ let(:non_member) { create(:user) }
+ let(:author) { create(:user) }
+ let(:assignee) { create(:user) }
+
+ before do
+ project.add_reporter(reporter)
+ project.add_developer(developer)
+ project.add_maintainer(maintainer)
+ project.add_guest(guest)
+ end
+
+ shared_examples_for 'confidential notes permissions' do
+ it 'does not allow non members to read confidential notes and replies' do
+ expect(permissions(non_member, confidential_note)).to be_disallowed(:read_note, :admin_note, :resolve_note, :award_emoji)
+ end
+
+ it 'does not allow guests to read confidential notes and replies' do
+ expect(permissions(guest, confidential_note)).to be_disallowed(:read_note, :admin_note, :resolve_note, :award_emoji)
+ end
+
+ it 'allows reporter to read all notes but not resolve and admin them' do
+ expect(permissions(reporter, confidential_note)).to be_allowed(:read_note, :award_emoji)
+ expect(permissions(reporter, confidential_note)).to be_disallowed(:admin_note, :resolve_note)
+ end
+
+ it 'allows developer to read and resolve all notes' do
+ expect(permissions(developer, confidential_note)).to be_allowed(:read_note, :award_emoji, :resolve_note)
+ expect(permissions(developer, confidential_note)).to be_disallowed(:admin_note)
+ end
+
+ it 'allows maintainers to read all notes and admin them' do
+ expect(permissions(maintainer, confidential_note)).to be_allowed(:read_note, :admin_note, :resolve_note, :award_emoji)
+ end
+
+ it 'allows noteable author to read and resolve all notes' do
+ expect(permissions(author, confidential_note)).to be_allowed(:read_note, :resolve_note, :award_emoji)
+ expect(permissions(author, confidential_note)).to be_disallowed(:admin_note)
+ end
+ end
+
+ context 'for issues' do
+ let(:issue) { create(:issue, project: project, author: author, assignees: [assignee]) }
+ let(:confidential_note) { create(:note, :confidential, project: project, noteable: issue) }
+
+ it_behaves_like 'confidential notes permissions'
+
+ it 'allows noteable assignees to read all notes' do
+ expect(permissions(assignee, confidential_note)).to be_allowed(:read_note, :award_emoji)
+ expect(permissions(assignee, confidential_note)).to be_disallowed(:admin_note, :resolve_note)
+ end
+ end
+
+ context 'for merge requests' do
+ let(:merge_request) { create(:merge_request, source_project: project, author: author, assignees: [assignee]) }
+ let(:confidential_note) { create(:note, :confidential, project: project, noteable: merge_request) }
+
+ it_behaves_like 'confidential notes permissions'
+
+ it 'allows noteable assignees to read all notes' do
+ expect(permissions(assignee, confidential_note)).to be_allowed(:read_note, :award_emoji)
+ expect(permissions(assignee, confidential_note)).to be_disallowed(:admin_note, :resolve_note)
+ end
+ end
+
+ context 'for project snippets' do
+ let(:project_snippet) { create(:project_snippet, project: project, author: author) }
+ let(:confidential_note) { create(:note, :confidential, project: project, noteable: project_snippet) }
+
+ it_behaves_like 'confidential notes permissions'
+ end
+
+ context 'for personal snippets' do
+ let(:personal_snippet) { create(:personal_snippet, author: author) }
+ let(:confidential_note) { create(:note, :confidential, project: nil, noteable: personal_snippet) }
+
+ it 'allows snippet author to read and resolve all notes' do
+ expect(permissions(author, confidential_note)).to be_allowed(:read_note, :resolve_note, :award_emoji)
+ expect(permissions(author, confidential_note)).to be_disallowed(:admin_note)
+ end
+
+ it 'does not allow maintainers to read confidential notes and replies' do
+ expect(permissions(maintainer, confidential_note)).to be_disallowed(:read_note, :admin_note, :resolve_note, :award_emoji)
+ end
+ end
+ end
end
end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 3b08726c75a..e7d49377b78 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -5,12 +5,12 @@ require 'spec_helper'
describe ProjectPolicy do
include ExternalAuthorizationServiceHelpers
include_context 'ProjectPolicy context'
- set(:guest) { create(:user) }
- set(:reporter) { create(:user) }
- set(:developer) { create(:user) }
- set(:maintainer) { create(:user) }
- set(:owner) { create(:user) }
- set(:admin) { create(:admin) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
let(:project) { create(:project, :public, namespace: owner.namespace) }
let(:base_guest_permissions) do
@@ -52,7 +52,7 @@ describe ProjectPolicy do
admin_snippet admin_project_member admin_note admin_wiki admin_project
admin_commit_status admin_build admin_container_image
admin_pipeline admin_environment admin_deployment destroy_release add_cluster
- daily_statistics
+ daily_statistics read_deploy_token create_deploy_token destroy_deploy_token
]
end
diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb
index b55d565a57c..c5077e119bc 100644
--- a/spec/policies/project_snippet_policy_spec.rb
+++ b/spec/policies/project_snippet_policy_spec.rb
@@ -20,28 +20,39 @@ describe ProjectSnippetPolicy do
subject { described_class.new(current_user, snippet) }
shared_examples 'regular user access rights' do
- context 'project team member (non guest)' do
- before do
- project.add_developer(current_user)
- end
+ context 'not snippet author' do
+ context 'project team member (non guest)' do
+ before do
+ project.add_developer(current_user)
+ end
- it do
- expect_allowed(:read_snippet, :create_note)
- expect_disallowed(*author_permissions)
+ it do
+ expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(*author_permissions)
+ end
end
- end
- context 'project team member (guest)' do
- before do
- project.add_guest(current_user)
- end
+ context 'project team member (guest)' do
+ before do
+ project.add_guest(current_user)
+ end
- context 'not snippet author' do
it do
expect_allowed(:read_snippet, :create_note)
expect_disallowed(:admin_snippet)
end
end
+
+ context 'project team member (maintainer)' do
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ it do
+ expect_allowed(:read_snippet, :create_note)
+ expect_allowed(*author_permissions)
+ end
+ end
end
context 'snippet author' do
@@ -69,6 +80,17 @@ describe ProjectSnippetPolicy do
end
end
+ context 'project team member (maintainer)' do
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ it do
+ expect_allowed(:read_snippet, :create_note)
+ expect_allowed(*author_permissions)
+ end
+ end
+
context 'not a project member' do
it do
expect_allowed(:read_snippet, :create_note)
diff --git a/spec/policies/resource_label_event_policy_spec.rb b/spec/policies/resource_label_event_policy_spec.rb
index 799534d2b08..4db2390c818 100644
--- a/spec/policies/resource_label_event_policy_spec.rb
+++ b/spec/policies/resource_label_event_policy_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
describe ResourceLabelEventPolicy do
- set(:user) { create(:user) }
- set(:project) { create(:project, :private) }
- set(:issue) { create(:issue, project: project) }
- set(:private_project) { create(:project, :private) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:private_project) { create(:project, :private) }
describe '#read_resource_label_event' do
context 'with non-member user' do
diff --git a/spec/presenters/issue_presenter_spec.rb b/spec/presenters/issue_presenter_spec.rb
index 1eb674d1f8f..4a4caef9d28 100644
--- a/spec/presenters/issue_presenter_spec.rb
+++ b/spec/presenters/issue_presenter_spec.rb
@@ -17,7 +17,7 @@ describe IssuePresenter do
describe '#web_url' do
it 'returns correct path' do
- expect(presenter.web_url).to eq("http://localhost/#{group.name}/#{project.name}/issues/#{issue.iid}")
+ expect(presenter.web_url).to eq("http://localhost/#{group.name}/#{project.name}/-/issues/#{issue.iid}")
end
end
@@ -37,7 +37,7 @@ describe IssuePresenter do
describe '#issue_path' do
it 'returns correct path' do
- expect(presenter.issue_path).to eq("/#{group.name}/#{project.name}/issues/#{issue.iid}")
+ expect(presenter.issue_path).to eq("/#{group.name}/#{project.name}/-/issues/#{issue.iid}")
end
end
end
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index 025f083ab27..f184e767f8c 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -128,11 +128,11 @@ describe MergeRequestPresenter do
subject { described_class.new(resource, current_user: user).closing_issues_links }
it 'presents closing issues links' do
- is_expected.to match("#{project.full_path}/issues/#{issue_a.iid}")
+ is_expected.to match("#{project.full_path}/-/issues/#{issue_a.iid}")
end
it 'does not present related issues links' do
- is_expected.not_to match("#{project.full_path}/issues/#{issue_b.iid}")
+ is_expected.not_to match("#{project.full_path}/-/issues/#{issue_b.iid}")
end
it 'appends status when closing issue is already closed' do
@@ -148,11 +148,11 @@ describe MergeRequestPresenter do
end
it 'presents related issues links' do
- is_expected.to match("#{project.full_path}/issues/#{issue_b.iid}")
+ is_expected.to match("#{project.full_path}/-/issues/#{issue_b.iid}")
end
it 'does not present closing issues links' do
- is_expected.not_to match("#{project.full_path}/issues/#{issue_a.iid}")
+ is_expected.not_to match("#{project.full_path}/-/issues/#{issue_a.iid}")
end
it 'appends status when mentioned issue is already closed' do
@@ -275,7 +275,7 @@ describe MergeRequestPresenter do
project.add_maintainer(user)
is_expected
- .to eq("/#{resource.project.full_path}/issues/new?merge_request_to_resolve_discussions_of=#{resource.iid}")
+ .to eq("/#{resource.project.full_path}/-/issues/new?merge_request_to_resolve_discussions_of=#{resource.iid}")
end
end
diff --git a/spec/presenters/hooks/project_hook_presenter_spec.rb b/spec/presenters/project_hook_presenter_spec.rb
index 773e8ccf51e..773e8ccf51e 100644
--- a/spec/presenters/hooks/project_hook_presenter_spec.rb
+++ b/spec/presenters/project_hook_presenter_spec.rb
diff --git a/spec/presenters/projects/import_export/project_export_presenter_spec.rb b/spec/presenters/projects/import_export/project_export_presenter_spec.rb
new file mode 100644
index 00000000000..052ca36974a
--- /dev/null
+++ b/spec/presenters/projects/import_export/project_export_presenter_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::ImportExport::ProjectExportPresenter do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:user) { create(:user) }
+
+ subject { described_class.new(project, current_user: user) }
+
+ describe '#description' do
+ context "override_description not provided" do
+ it "keeps original description" do
+ expect(subject.description).to eq(project.description)
+ end
+ end
+
+ context "override_description provided" do
+ let(:description) { "overridden description" }
+
+ subject { described_class.new(project, current_user: user, override_description: description) }
+
+ it "overrides description" do
+ expect(subject.description).to eq(description)
+ end
+ end
+ end
+
+ describe '#as_json' do
+ context "override_description not provided" do
+ it "keeps original description" do
+ expect(subject.as_json["description"]).to eq(project.description)
+ end
+ end
+
+ context "override_description provided" do
+ let(:description) { "overridden description" }
+
+ subject { described_class.new(project, current_user: user, override_description: description) }
+
+ it "overrides description" do
+ expect(subject.as_json["description"]).to eq(description)
+ end
+ end
+ end
+
+ describe '#project_members' do
+ let(:user2) { create(:user, email: 'group@member.com') }
+ let(:member_emails) do
+ subject.project_members.map do |pm|
+ pm.user.email
+ end
+ end
+
+ before do
+ group.add_developer(user2)
+ end
+
+ it 'does not export group members if it has no permission' do
+ group.add_developer(user)
+
+ expect(member_emails).not_to include('group@member.com')
+ end
+
+ it 'does not export group members as maintainer' do
+ group.add_maintainer(user)
+
+ expect(member_emails).not_to include('group@member.com')
+ end
+
+ it 'exports group members as group owner' do
+ group.add_owner(user)
+
+ expect(member_emails).to include('group@member.com')
+ end
+
+ context 'as admin' do
+ let(:user) { create(:admin) }
+
+ it 'exports group members as admin' do
+ expect(member_emails).to include('group@member.com')
+ end
+
+ it 'exports group members as project members' do
+ member_types = subject.project_members.map { |pm| pm.source_type }
+
+ expect(member_types).to all(eq('Project'))
+ end
+ end
+ end
+end
diff --git a/spec/presenters/projects/prometheus/alert_presenter_spec.rb b/spec/presenters/projects/prometheus/alert_presenter_spec.rb
index fc6ddcbfe02..85c73aa3533 100644
--- a/spec/presenters/projects/prometheus/alert_presenter_spec.rb
+++ b/spec/presenters/projects/prometheus/alert_presenter_spec.rb
@@ -3,12 +3,21 @@
require 'spec_helper'
describe Projects::Prometheus::AlertPresenter do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
let(:presenter) { described_class.new(alert) }
let(:payload) { {} }
let(:alert) { create(:alerting_alert, project: project, payload: payload) }
+ shared_context 'gitlab alert' do
+ let(:gitlab_alert) { create(:prometheus_alert, project: project) }
+ let(:metric_id) { gitlab_alert.prometheus_metric_id }
+
+ let(:alert) do
+ create(:alerting_alert, project: project, metric_id: metric_id)
+ end
+ end
+
describe '#project_full_path' do
subject { presenter.project_full_path }
@@ -145,13 +154,34 @@ describe Projects::Prometheus::AlertPresenter do
end
end
- context 'with gitlab alert' do
- let(:gitlab_alert) { create(:prometheus_alert, project: project) }
- let(:metric_id) { gitlab_alert.prometheus_metric_id }
+ describe '#show_performance_dashboard_link?' do
+ subject { presenter.show_performance_dashboard_link? }
- let(:alert) do
- create(:alerting_alert, project: project, metric_id: metric_id)
+ it { is_expected.to be_falsey }
+
+ context 'with gitlab alert' do
+ include_context 'gitlab alert'
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ describe '#show_incident_issues_link?' do
+ subject { presenter.show_incident_issues_link? }
+
+ it { is_expected.to be_falsey }
+
+ context 'create issue setting enabled' do
+ before do
+ create(:project_incident_management_setting, project: project, create_issue: true)
+ end
+
+ it { is_expected.to eq(true) }
end
+ end
+
+ context 'with gitlab alert' do
+ include_context 'gitlab alert'
describe '#full_title' do
let(:query_title) do
@@ -189,6 +219,17 @@ describe Projects::Prometheus::AlertPresenter do
it { is_expected.to eq(expected_link) }
end
+
+ describe '#incident_issues_link' do
+ let(:expected_link) do
+ Gitlab::Routing.url_helpers
+ .project_issues_url(project, label_name: described_class::INCIDENT_LABEL_NAME)
+ end
+
+ subject { presenter.incident_issues_link }
+
+ it { is_expected.to eq(expected_link) }
+ end
end
context 'without gitlab alert' do
diff --git a/spec/presenters/hooks/service_hook_presenter_spec.rb b/spec/presenters/service_hook_presenter_spec.rb
index bea57768e3e..bea57768e3e 100644
--- a/spec/presenters/hooks/service_hook_presenter_spec.rb
+++ b/spec/presenters/service_hook_presenter_spec.rb
diff --git a/spec/presenters/snippet_blob_presenter_spec.rb b/spec/presenters/snippet_blob_presenter_spec.rb
index fa10d1a7f30..eb7621cc591 100644
--- a/spec/presenters/snippet_blob_presenter_spec.rb
+++ b/spec/presenters/snippet_blob_presenter_spec.rb
@@ -4,36 +4,73 @@ require 'spec_helper'
describe SnippetBlobPresenter do
describe '#rich_data' do
- let(:snippet) { build(:personal_snippet) }
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:current_user).and_return(nil)
+ end
+ end
subject { described_class.new(snippet.blob).rich_data }
- it 'returns nil when the snippet blob is binary' do
- allow(snippet.blob).to receive(:binary?).and_return(true)
+ context 'with PersonalSnippet' do
+ let(:raw_url) { "http://127.0.0.1:3000/snippets/#{snippet.id}/raw" }
+ let(:snippet) { build(:personal_snippet) }
- expect(subject).to be_nil
- end
+ it 'returns nil when the snippet blob is binary' do
+ allow(snippet.blob).to receive(:binary?).and_return(true)
- it 'returns markdown content when snippet file is markup' do
- snippet.file_name = 'test.md'
- snippet.content = '*foo*'
+ expect(subject).to be_nil
+ end
- expect(subject).to eq '<p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>'
- end
+ context 'with markdown format' do
+ let(:snippet) { create(:personal_snippet, file_name: 'test.md', content: '*foo*') }
- it 'returns syntax highlighted content' do
- snippet.file_name = 'test.rb'
- snippet.content = 'class Foo;end'
+ it 'returns rich markdown content' do
+ expected = <<~HTML
+ <div class="file-content md">
+ <p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>
+ </div>
+ HTML
- expect(subject)
- .to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>'
- end
+ expect(subject).to eq(expected)
+ end
+ end
- it 'returns plain text highlighted content' do
- snippet.file_name = 'test'
- snippet.content = 'foo'
+ context 'with notebook format' do
+ let(:snippet) { create(:personal_snippet, file_name: 'test.ipynb') }
- expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>'
+ it 'returns rich notebook content' do
+ expect(subject.strip).to eq %Q(<div class="file-content" data-endpoint="/snippets/#{snippet.id}/raw" id="js-notebook-viewer"></div>)
+ end
+ end
+
+ context 'with openapi format' do
+ let(:snippet) { create(:personal_snippet, file_name: 'openapi.yml') }
+
+ it 'returns rich openapi content' do
+ expect(subject).to eq %Q(<div class="file-content" data-endpoint="/snippets/#{snippet.id}/raw" id="js-openapi-viewer"></div>\n)
+ end
+ end
+
+ context 'with svg format' do
+ let(:snippet) { create(:personal_snippet, file_name: 'test.svg') }
+
+ it 'returns rich svg content' do
+ result = Nokogiri::HTML::DocumentFragment.parse(subject)
+ image_tag = result.search('img').first
+
+ expect(image_tag.attr('src')).to include("data:#{snippet.blob.mime_type};base64")
+ expect(image_tag.attr('alt')).to eq('test.svg')
+ end
+ end
+
+ context 'with other format' do
+ let(:snippet) { create(:personal_snippet, file_name: 'test') }
+
+ it 'does not return no rich content' do
+ expect(subject).to be_nil
+ end
+ end
end
end
@@ -55,19 +92,19 @@ describe SnippetBlobPresenter do
expect(subject).to eq '<span id="LC1" class="line" lang="markdown"><span class="ge">*foo*</span></span>'
end
- it 'returns plain syntax content' do
+ it 'returns highlighted syntax content' do
snippet.file_name = 'test.rb'
snippet.content = 'class Foo;end'
expect(subject)
- .to eq '<span id="LC1" class="line" lang="">class Foo;end</span>'
+ .to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>'
end
it 'returns plain text highlighted content' do
snippet.file_name = 'test'
snippet.content = 'foo'
- expect(subject).to eq '<span id="LC1" class="line" lang="">foo</span>'
+ expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>'
end
end
@@ -76,18 +113,18 @@ describe SnippetBlobPresenter do
context 'with ProjectSnippet' do
let!(:project) { create(:project) }
- let(:snippet) { build(:project_snippet, project: project, id: 1) }
+ let(:snippet) { create(:project_snippet, project: project) }
it 'returns the raw path' do
- expect(subject).to eq "/#{snippet.project.full_path}/snippets/1/raw"
+ expect(subject).to eq "/#{snippet.project.full_path}/snippets/#{snippet.id}/raw"
end
end
context 'with PersonalSnippet' do
- let(:snippet) { build(:personal_snippet, id: 1) }
+ let(:snippet) { create(:personal_snippet) }
it 'returns the raw path' do
- expect(subject).to eq "/snippets/1/raw"
+ expect(subject).to eq "/snippets/#{snippet.id}/raw"
end
end
end
diff --git a/spec/presenters/snippet_presenter_spec.rb b/spec/presenters/snippet_presenter_spec.rb
index e2117905559..591d86652b6 100644
--- a/spec/presenters/snippet_presenter_spec.rb
+++ b/spec/presenters/snippet_presenter_spec.rb
@@ -143,4 +143,24 @@ describe SnippetPresenter do
expect(subject).to be_truthy
end
end
+
+ describe '#blob' do
+ let(:snippet) { personal_snippet }
+
+ subject { presenter.blob }
+
+ context 'when snippet does not have a repository' do
+ it 'returns SnippetBlob' do
+ expect(subject).to eq snippet.blob
+ end
+ end
+
+ context 'when snippet has a repository' do
+ let(:snippet) { create(:snippet, :repository, author: user) }
+
+ it 'returns repository first blob' do
+ expect(subject).to eq snippet.blobs.first
+ end
+ end
+ end
end
diff --git a/spec/requests/api/access_requests_spec.rb b/spec/requests/api/access_requests_spec.rb
index 17f4cde5b8c..52bc81cff18 100644
--- a/spec/requests/api/access_requests_spec.rb
+++ b/spec/requests/api/access_requests_spec.rb
@@ -37,7 +37,7 @@ describe API::AccessRequests do
user = public_send(type)
get api("/#{source_type.pluralize}/#{source.id}/access_requests", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -47,7 +47,7 @@ describe API::AccessRequests do
it 'returns access requesters' do
get api("/#{source_type.pluralize}/#{source.id}/access_requests", maintainer)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -70,7 +70,7 @@ describe API::AccessRequests do
user = public_send(type)
post api("/#{source_type.pluralize}/#{source.id}/access_requests", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end.not_to change { source.requesters.count }
end
end
@@ -82,7 +82,7 @@ describe API::AccessRequests do
expect do
post api("/#{source_type.pluralize}/#{source.id}/access_requests", access_requester)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end.not_to change { source.requesters.count }
end
end
@@ -97,7 +97,7 @@ describe API::AccessRequests do
expect do
post api("/#{source_type.pluralize}/#{source.id}/access_requests", stranger)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end.not_to change { source.requesters.count }
end
end
@@ -106,7 +106,7 @@ describe API::AccessRequests do
expect do
post api("/#{source_type.pluralize}/#{source.id}/access_requests", stranger)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end.to change { source.requesters.count }.by(1)
# User attributes
@@ -137,7 +137,7 @@ describe API::AccessRequests do
user = public_send(type)
put api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}/approve", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -149,7 +149,7 @@ describe API::AccessRequests do
put api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}/approve", maintainer),
params: { access_level: Member::MAINTAINER }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end.to change { source.members.count }.by(1)
# User attributes
expect(json_response['id']).to eq(access_requester.id)
@@ -168,7 +168,7 @@ describe API::AccessRequests do
expect do
put api("/#{source_type.pluralize}/#{source.id}/access_requests/#{stranger.id}/approve", maintainer)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end.not_to change { source.members.count }
end
end
@@ -189,7 +189,7 @@ describe API::AccessRequests do
user = public_send(type)
delete api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -200,7 +200,7 @@ describe API::AccessRequests do
expect do
delete api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}", access_requester)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { source.requesters.count }.by(-1)
end
end
@@ -210,7 +210,7 @@ describe API::AccessRequests do
expect do
delete api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}", maintainer)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { source.requesters.count }.by(-1)
end
@@ -219,7 +219,7 @@ describe API::AccessRequests do
expect do
delete api("/#{source_type.pluralize}/#{source.id}/access_requests/#{developer.id}", maintainer)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end.not_to change { source.requesters.count }
end
end
@@ -229,7 +229,7 @@ describe API::AccessRequests do
expect do
delete api("/#{source_type.pluralize}/#{source.id}/access_requests/#{stranger.id}", maintainer)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end.not_to change { source.requesters.count }
end
end
diff --git a/spec/requests/api/admin/sidekiq_spec.rb b/spec/requests/api/admin/sidekiq_spec.rb
new file mode 100644
index 00000000000..303b62f4436
--- /dev/null
+++ b/spec/requests/api/admin/sidekiq_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Admin::Sidekiq, :clean_gitlab_redis_queues do
+ let_it_be(:admin) { create(:admin) }
+
+ describe 'DELETE /admin/sidekiq/queues/:queue_name' do
+ context 'when the user is not an admin' do
+ it 'returns a 403' do
+ delete api("/admin/sidekiq/queues/authorized_projects?user=#{admin.username}", create(:user))
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when the user is an admin' do
+ around do |example|
+ Sidekiq::Queue.new('authorized_projects').clear
+ Sidekiq::Testing.disable!(&example)
+ Sidekiq::Queue.new('authorized_projects').clear
+ end
+
+ def add_job(user, args)
+ Sidekiq::Client.push(
+ 'class' => 'AuthorizedProjectsWorker',
+ 'queue' => 'authorized_projects',
+ 'args' => args,
+ 'meta.user' => user.username
+ )
+ end
+
+ context 'valid request' do
+ it 'returns info about the deleted jobs' do
+ add_job(admin, [1])
+ add_job(admin, [2])
+ add_job(create(:user), [3])
+
+ delete api("/admin/sidekiq/queues/authorized_projects?user=#{admin.username}", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq('completed' => true,
+ 'deleted_jobs' => 2,
+ 'queue_size' => 1)
+ end
+ end
+
+ context 'when no required params are provided' do
+ it 'returns a 400' do
+ delete api("/admin/sidekiq/queues/authorized_projects?user_2=#{admin.username}", admin)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when the queue does not exist' do
+ it 'returns a 404' do
+ delete api("/admin/sidekiq/queues/authorized_projects_2?user=#{admin.username}", admin)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/api_guard/admin_mode_middleware_spec.rb b/spec/requests/api/api_guard/admin_mode_middleware_spec.rb
new file mode 100644
index 00000000000..8973afe6570
--- /dev/null
+++ b/spec/requests/api/api_guard/admin_mode_middleware_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::APIGuard::AdminModeMiddleware, :do_not_mock_admin_mode, :request_store do
+ let(:user) { create(:admin) }
+
+ it 'is loaded' do
+ expect(API::API.middleware).to include([:use, described_class])
+ end
+
+ context 'when there is an exception in the api call' do
+ let(:app) do
+ Class.new(API::API) do
+ get 'willfail' do
+ raise StandardError.new('oh noes!')
+ end
+ end
+ end
+
+ it 'resets admin mode' do
+ Gitlab::Auth::CurrentUserMode.bypass_session!(user.id)
+
+ expect(Gitlab::Auth::CurrentUserMode.bypass_session_admin_id).to be(user.id)
+ expect(Gitlab::Auth::CurrentUserMode).to receive(:reset_bypass_session!).and_call_original
+
+ get api('/willfail')
+
+ expect(response.status).to eq(500)
+ expect(response.body).to include('oh noes!')
+
+ expect(Gitlab::Auth::CurrentUserMode.bypass_session_admin_id).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/appearance_spec.rb b/spec/requests/api/appearance_spec.rb
index 40fd216f32d..70be3adf723 100644
--- a/spec/requests/api/appearance_spec.rb
+++ b/spec/requests/api/appearance_spec.rb
@@ -11,7 +11,7 @@ describe API::Appearance, 'Appearance' do
it "returns 403" do
get api("/application/appearance", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -19,7 +19,7 @@ describe API::Appearance, 'Appearance' do
it "returns appearance" do
get api("/application/appearance", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Hash
expect(json_response['description']).to eq('')
expect(json_response['email_header_and_footer_enabled']).to be(false)
@@ -41,7 +41,7 @@ describe API::Appearance, 'Appearance' do
it "returns 403" do
put api("/application/appearance", user), params: { title: "Test" }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -54,7 +54,7 @@ describe API::Appearance, 'Appearance' do
new_project_guidelines: "Please read the FAQs for help."
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Hash
expect(json_response['description']).to eq('gitlab-test.example.com')
expect(json_response['email_header_and_footer_enabled']).to be(false)
@@ -82,7 +82,7 @@ describe API::Appearance, 'Appearance' do
put api("/application/appearance", admin), params: settings
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
settings.each do |attribute, value|
expect(Appearance.current.public_send(attribute)).to eq(value)
end
@@ -92,14 +92,14 @@ describe API::Appearance, 'Appearance' do
it "with message_font_color" do
put api("/application/appearance", admin), params: { message_font_color: "No Color" }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['message_font_color']).to contain_exactly('must be a valid color code')
end
it "with message_background_color" do
put api("/application/appearance", admin), params: { message_background_color: "#1" }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['message_background_color']).to contain_exactly('must be a valid color code')
end
end
@@ -115,7 +115,7 @@ describe API::Appearance, 'Appearance' do
favicon: fixture_file_upload("spec/fixtures/dk.png", "image/png")
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['logo']).to eq("/uploads/-/system/appearance/logo/#{appearance.id}/dk.png")
expect(json_response['header_logo']).to eq("/uploads/-/system/appearance/header_logo/#{appearance.id}/dk.png")
expect(json_response['favicon']).to eq("/uploads/-/system/appearance/favicon/#{appearance.id}/dk.png")
@@ -125,14 +125,14 @@ describe API::Appearance, 'Appearance' do
it "with string instead of file" do
put api("/application/appearance", admin), params: { logo: 'not-a-file.png' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq("logo is invalid")
end
it "with .svg file instead of .png" do
put api("/application/appearance", admin), params: { favicon: fixture_file_upload("spec/fixtures/logo_sample.svg", "image/svg") }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['favicon']).to contain_exactly("You are not allowed to upload \"svg\" files, allowed types: png, ico")
end
end
diff --git a/spec/requests/api/applications_spec.rb b/spec/requests/api/applications_spec.rb
index d110751e661..cd341ad134e 100644
--- a/spec/requests/api/applications_spec.rb
+++ b/spec/requests/api/applications_spec.rb
@@ -16,7 +16,7 @@ describe API::Applications, :api do
application = Doorkeeper::Application.find_by(name: 'application_name', redirect_uri: 'http://application.url')
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to be_a Hash
expect(json_response['application_id']).to eq application.uid
expect(json_response['secret']).to eq application.secret
@@ -29,7 +29,7 @@ describe API::Applications, :api do
post api('/applications', admin_user), params: { name: 'application_name', redirect_uri: 'http://', scopes: '' }
end.not_to change { Doorkeeper::Application.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to be_a Hash
expect(json_response['message']['redirect_uri'][0]).to eq('must be an absolute URI.')
end
@@ -39,7 +39,7 @@ describe API::Applications, :api do
post api('/applications', admin_user), params: { name: 'application_name', redirect_uri: 'javascript://alert()', scopes: '' }
end.not_to change { Doorkeeper::Application.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to be_a Hash
expect(json_response['message']['redirect_uri'][0]).to eq('is forbidden by the server.')
end
@@ -49,7 +49,7 @@ describe API::Applications, :api do
post api('/applications', admin_user), params: { redirect_uri: 'http://application.url', scopes: '' }
end.not_to change { Doorkeeper::Application.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to be_a Hash
expect(json_response['error']).to eq('name is missing')
end
@@ -59,7 +59,7 @@ describe API::Applications, :api do
post api('/applications', admin_user), params: { name: 'application_name', scopes: '' }
end.not_to change { Doorkeeper::Application.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to be_a Hash
expect(json_response['error']).to eq('redirect_uri is missing')
end
@@ -69,7 +69,7 @@ describe API::Applications, :api do
post api('/applications', admin_user), params: { name: 'application_name', redirect_uri: 'http://application.url' }
end.not_to change { Doorkeeper::Application.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to be_a Hash
expect(json_response['error']).to eq('scopes is missing')
end
@@ -79,7 +79,7 @@ describe API::Applications, :api do
post api('/applications', admin_user), params: { name: 'application_name', redirect_uri: 'http://application.url', scopes: '', confidential: nil }
end.not_to change { Doorkeeper::Application.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to be_a Hash
expect(json_response['message']['confidential'].first).to eq('is not included in the list')
end
@@ -91,7 +91,7 @@ describe API::Applications, :api do
post api('/applications', user), params: { name: 'application_name', redirect_uri: 'http://application.url', scopes: '' }
end.not_to change { Doorkeeper::Application.count }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -101,7 +101,7 @@ describe API::Applications, :api do
post api('/applications'), params: { name: 'application_name', redirect_uri: 'http://application.url' }
end.not_to change { Doorkeeper::Application.count }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -111,7 +111,7 @@ describe API::Applications, :api do
it 'can list application' do
get api('/applications', admin_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a(Array)
end
end
@@ -120,7 +120,7 @@ describe API::Applications, :api do
it 'cannot list application' do
get api('/applications', user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -128,7 +128,7 @@ describe API::Applications, :api do
it 'cannot list application' do
get api('/applications')
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -140,7 +140,7 @@ describe API::Applications, :api do
delete api("/applications/#{application.id}", admin_user)
end.to change { Doorkeeper::Application.count }.by(-1)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
@@ -148,7 +148,7 @@ describe API::Applications, :api do
it 'cannot delete an application' do
delete api("/applications/#{application.id}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -156,7 +156,7 @@ describe API::Applications, :api do
it 'cannot delete an application' do
delete api("/applications/#{application.id}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
diff --git a/spec/requests/api/award_emoji_spec.rb b/spec/requests/api/award_emoji_spec.rb
index 4a830f2b449..fea1e3ac836 100644
--- a/spec/requests/api/award_emoji_spec.rb
+++ b/spec/requests/api/award_emoji_spec.rb
@@ -20,7 +20,7 @@ describe API::AwardEmoji do
it "returns an array of award_emoji" do
get api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.first['name']).to eq(award_emoji.name)
end
@@ -28,7 +28,7 @@ describe API::AwardEmoji do
it "returns a 404 error when issue id not found" do
get api("/projects/#{project.id}/issues/12345/award_emoji", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -36,7 +36,7 @@ describe API::AwardEmoji do
it "returns an array of award_emoji" do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/award_emoji", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['name']).to eq(downvote.name)
@@ -50,7 +50,7 @@ describe API::AwardEmoji do
it 'returns the awarded emoji' do
get api("/projects/#{project.id}/snippets/#{snippet.id}/award_emoji", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.first['name']).to eq(award.name)
end
@@ -62,7 +62,7 @@ describe API::AwardEmoji do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/award_emoji", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -73,7 +73,7 @@ describe API::AwardEmoji do
it 'returns an array of award emoji' do
get api("/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.first['name']).to eq(rocket.name)
end
@@ -84,7 +84,7 @@ describe API::AwardEmoji do
it "returns the award emoji" do
get api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji/#{award_emoji.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(award_emoji.name)
expect(json_response['awardable_id']).to eq(issue.id)
expect(json_response['awardable_type']).to eq("Issue")
@@ -93,7 +93,7 @@ describe API::AwardEmoji do
it "returns a 404 error if the award is not found" do
get api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -101,7 +101,7 @@ describe API::AwardEmoji do
it 'returns the award emoji' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/award_emoji/#{downvote.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(downvote.name)
expect(json_response['awardable_id']).to eq(merge_request.id)
expect(json_response['awardable_type']).to eq("MergeRequest")
@@ -115,7 +115,7 @@ describe API::AwardEmoji do
it 'returns the awarded emoji' do
get api("/projects/#{project.id}/snippets/#{snippet.id}/award_emoji/#{award.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(award.name)
expect(json_response['awardable_id']).to eq(snippet.id)
expect(json_response['awardable_type']).to eq("Snippet")
@@ -128,7 +128,7 @@ describe API::AwardEmoji do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/award_emoji/#{downvote.id}", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -139,7 +139,7 @@ describe API::AwardEmoji do
it 'returns an award emoji' do
get api("/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji/#{rocket.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to be_an Array
expect(json_response['name']).to eq(rocket.name)
end
@@ -152,7 +152,7 @@ describe API::AwardEmoji do
it "creates a new award emoji" do
post api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji", user), params: { name: 'blowfish' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq('blowfish')
expect(json_response['user']['username']).to eq(user.username)
end
@@ -168,13 +168,13 @@ describe API::AwardEmoji do
it "returns a 400 bad request error if the name is not given" do
post api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 401 unauthorized error if the user is not authenticated" do
post api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji"), params: { name: 'thumbsup' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "normalizes +1 as thumbsup award" do
@@ -188,7 +188,7 @@ describe API::AwardEmoji do
post api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji", user), params: { name: 'thumbsup' }
post api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji", user), params: { name: 'thumbsup' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response["message"]).to match("has already been taken")
end
end
@@ -200,7 +200,7 @@ describe API::AwardEmoji do
post api("/projects/#{project.id}/snippets/#{snippet.id}/award_emoji", user), params: { name: 'blowfish' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq('blowfish')
expect(json_response['user']['username']).to eq(user.username)
end
@@ -215,7 +215,7 @@ describe API::AwardEmoji do
post api("/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji", user), params: { name: 'rocket' }
end.to change { note.award_emoji.count }.from(0).to(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['user']['username']).to eq(user.username)
end
@@ -238,7 +238,7 @@ describe API::AwardEmoji do
post api("/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji", user), params: { name: 'rocket' }
post api("/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji", user), params: { name: 'rocket' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response["message"]).to match("has already been taken")
end
end
@@ -250,14 +250,14 @@ describe API::AwardEmoji do
expect do
delete api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji/#{award_emoji.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { issue.award_emoji.count }.from(1).to(0)
end
it 'returns a 404 error when the award emoji can not be found' do
delete api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
@@ -270,14 +270,14 @@ describe API::AwardEmoji do
expect do
delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/award_emoji/#{downvote.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { merge_request.award_emoji.count }.from(1).to(0)
end
it 'returns a 404 error when note id not found' do
delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/notes/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
@@ -293,7 +293,7 @@ describe API::AwardEmoji do
expect do
delete api("/projects/#{project.id}/snippets/#{snippet.id}/award_emoji/#{award.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { snippet.award_emoji.count }.from(1).to(0)
end
@@ -310,7 +310,7 @@ describe API::AwardEmoji do
expect do
delete api("/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji/#{rocket.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { note.award_emoji.count }.from(1).to(0)
end
diff --git a/spec/requests/api/badges_spec.rb b/spec/requests/api/badges_spec.rb
index d931dea01e7..d7f9b7d010b 100644
--- a/spec/requests/api/badges_spec.rb
+++ b/spec/requests/api/badges_spec.rb
@@ -35,7 +35,7 @@ describe API::Badges do
get api("/#{source_type.pluralize}/#{source.id}/badges", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(badges_count)
@@ -80,7 +80,7 @@ describe API::Badges do
get api("/#{source_type.pluralize}/#{source.id}/badges/#{badge.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(badge.name)
expect(json_response['id']).to eq(badge.id)
expect(json_response['link_url']).to eq(badge.link_url)
@@ -120,7 +120,7 @@ describe API::Badges do
post api("/#{source_type.pluralize}/#{source.id}/badges", user),
params: { link_url: example_url, image_url: example_url2 }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -132,7 +132,7 @@ describe API::Badges do
post api("/#{source_type.pluralize}/#{source.id}/badges", maintainer),
params: { name: example_name, link_url: example_url, image_url: example_url2 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end.to change { source.badges.count }.by(1)
expect(json_response['name']).to eq(example_name)
@@ -146,21 +146,21 @@ describe API::Badges do
post api("/#{source_type.pluralize}/#{source.id}/badges", maintainer),
params: { link_url: example_url }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 when image_url is not given' do
post api("/#{source_type.pluralize}/#{source.id}/badges", maintainer),
params: { image_url: example_url2 }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 when link_url or image_url is not valid' do
post api("/#{source_type.pluralize}/#{source.id}/badges", maintainer),
params: { link_url: 'whatever', image_url: 'whatever' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -192,7 +192,7 @@ describe API::Badges do
put api("/#{source_type.pluralize}/#{source.id}/badges/#{badge.id}", user),
params: { link_url: example_url }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -203,7 +203,7 @@ describe API::Badges do
put api("/#{source_type.pluralize}/#{source.id}/badges/#{badge.id}", maintainer),
params: { name: example_name, link_url: example_url, image_url: example_url2 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(example_name)
expect(json_response['link_url']).to eq(example_url)
expect(json_response['image_url']).to eq(example_url2)
@@ -215,7 +215,7 @@ describe API::Badges do
put api("/#{source_type.pluralize}/#{source.id}/badges/#{badge.id}", maintainer),
params: { link_url: 'whatever', image_url: 'whatever' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -240,7 +240,7 @@ describe API::Badges do
delete api("/#{source_type.pluralize}/#{source.id}/badges/#{badge.id}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -251,7 +251,7 @@ describe API::Badges do
expect do
delete api("/#{source_type.pluralize}/#{source.id}/badges/#{badge.id}", maintainer)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { source.badges.count }.by(-1)
end
@@ -263,7 +263,7 @@ describe API::Badges do
it 'returns 404 if badge does not exist' do
delete api("/#{source_type.pluralize}/#{source.id}/badges/123", maintainer)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -290,7 +290,7 @@ describe API::Badges do
get api("/#{source_type.pluralize}/#{source.id}/badges/render?link_url=#{example_url}&image_url=#{example_url2}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -300,7 +300,7 @@ describe API::Badges do
it 'gets the rendered badge values' do
get api("/#{source_type.pluralize}/#{source.id}/badges/render?link_url=#{example_url}&image_url=#{example_url2}", maintainer)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.keys).to contain_exactly('name', 'link_url', 'rendered_link_url', 'image_url', 'rendered_image_url')
expect(json_response['link_url']).to eq(example_url)
@@ -313,19 +313,19 @@ describe API::Badges do
it 'returns 400 when link_url is not given' do
get api("/#{source_type.pluralize}/#{source.id}/badges/render?link_url=#{example_url}", maintainer)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 when image_url is not given' do
get api("/#{source_type.pluralize}/#{source.id}/badges/render?image_url=#{example_url}", maintainer)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 when link_url or image_url is not valid' do
get api("/#{source_type.pluralize}/#{source.id}/badges/render?link_url=whatever&image_url=whatever", maintainer)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -335,7 +335,7 @@ describe API::Badges do
it 'cannot delete badges owned by the project group' do
delete api("/projects/#{project.id}/badges/#{project_group.badges.first.id}", maintainer)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb
index f53bfedb49a..d761b371821 100644
--- a/spec/requests/api/boards_spec.rb
+++ b/spec/requests/api/boards_spec.rb
@@ -45,7 +45,7 @@ describe API::Boards do
post api(url, user), params: { label_id: group_label.id }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['label']['name']).to eq(group_label.title)
expect(json_response['position']).to eq(3)
end
@@ -60,7 +60,7 @@ describe API::Boards do
post api(url, user), params: { label_id: group_label.id }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['label']['name']).to eq(group_label.title)
end
end
@@ -78,7 +78,7 @@ describe API::Boards do
post api(url, user), params: { label_id: group_label.id }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['label']['name']).to eq(group_label.title)
end
end
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index 046ec40f218..97f880dd3cd 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -31,7 +31,7 @@ describe API::Branches do
it 'returns the repository branches' do
get api(route, current_user), params: { per_page: 100 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branches')
expect(response).to include_pagination_headers
branch_names = json_response.map { |x| x['name'] }
@@ -51,7 +51,7 @@ describe API::Branches do
get api(route, current_user), params: { per_page: 2 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
check_merge_status(json_response)
end
@@ -59,7 +59,7 @@ describe API::Branches do
it 'merge status matches reality on paginated input' do
get api(route, current_user), params: { per_page: 20, page: 2 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
check_merge_status(json_response)
end
@@ -155,14 +155,14 @@ describe API::Branches do
it 'returns 204 No Content' do
head api(route, user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to be_empty
end
it 'returns 404 Not Found' do
head api("/projects/#{project_id}/repository/branches/unknown", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_empty
end
end
@@ -170,7 +170,7 @@ describe API::Branches do
it 'returns the repository branch' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branch')
expect(json_response['name']).to eq(CGI.unescape(branch_name))
end
@@ -298,7 +298,7 @@ describe API::Branches do
it 'protects a single branch' do
put api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branch')
expect(json_response['name']).to eq(CGI.unescape(branch_name))
expect(json_response['protected']).to eq(true)
@@ -307,7 +307,7 @@ describe API::Branches do
it 'protects a single branch and developers can push' do
put api(route, current_user), params: { developers_can_push: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branch')
expect(json_response['name']).to eq(CGI.unescape(branch_name))
expect(json_response['protected']).to eq(true)
@@ -318,7 +318,7 @@ describe API::Branches do
it 'protects a single branch and developers can merge' do
put api(route, current_user), params: { developers_can_merge: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branch')
expect(json_response['name']).to eq(CGI.unescape(branch_name))
expect(json_response['protected']).to eq(true)
@@ -329,7 +329,7 @@ describe API::Branches do
it 'protects a single branch and developers can push and merge' do
put api(route, current_user), params: { developers_can_push: true, developers_can_merge: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branch')
expect(json_response['name']).to eq(CGI.unescape(branch_name))
expect(json_response['protected']).to eq(true)
@@ -428,7 +428,7 @@ describe API::Branches do
put api("/projects/#{project.id}/repository/branches/#{protected_branch.name}/protect", user),
params: { developers_can_push: false, developers_can_merge: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branch')
expect(json_response['name']).to eq(protected_branch.name)
expect(json_response['protected']).to eq(true)
@@ -446,7 +446,7 @@ describe API::Branches do
put api("/projects/#{project.id}/repository/branches/#{protected_branch.name}/protect", user),
params: { developers_can_push: true, developers_can_merge: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branch')
expect(json_response['name']).to eq(protected_branch.name)
expect(json_response['protected']).to eq(true)
@@ -465,7 +465,7 @@ describe API::Branches do
it 'unprotects a single branch' do
put api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branch')
expect(json_response['name']).to eq(CGI.unescape(branch_name))
expect(json_response['protected']).to eq(false)
@@ -559,7 +559,7 @@ describe API::Branches do
it 'creates a new branch' do
post api(route, current_user), params: { branch: 'feature1', ref: branch_sha }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/branch')
expect(json_response['name']).to eq('feature1')
expect(json_response['commit']['id']).to eq(branch_sha)
@@ -604,25 +604,25 @@ describe API::Branches do
it 'returns 400 if branch name is invalid' do
post api(route, user), params: { branch: 'new design', ref: branch_sha }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Branch name is invalid')
end
it 'returns 400 if branch already exists', :clean_gitlab_redis_cache do
post api(route, user), params: { branch: 'new_design1', ref: branch_sha }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
post api(route, user), params: { branch: 'new_design1', ref: branch_sha }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Branch already exists')
end
it 'returns 400 if ref name is invalid' do
post api(route, user), params: { branch: 'new_design3', ref: 'foo' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Invalid reference name: new_design3')
end
end
@@ -637,19 +637,19 @@ describe API::Branches do
it 'removes branch' do
delete api("/projects/#{project.id}/repository/branches/#{branch_name}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'removes a branch with dots in the branch name' do
delete api("/projects/#{project.id}/repository/branches/#{branch_with_dot.name}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'returns 404 if branch not exists' do
delete api("/projects/#{project.id}/repository/branches/foobar", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'when the branch refname is invalid' do
@@ -676,14 +676,14 @@ describe API::Branches do
it 'returns 202 with json body' do
delete api("/projects/#{project.id}/repository/merged_branches", user)
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
expect(json_response['message']).to eql('202 Accepted')
end
it 'returns a 403 error if guest' do
delete api("/projects/#{project.id}/repository/merged_branches", guest)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
diff --git a/spec/requests/api/broadcast_messages_spec.rb b/spec/requests/api/broadcast_messages_spec.rb
index 7d71b83e147..9bfbbe0daab 100644
--- a/spec/requests/api/broadcast_messages_spec.rb
+++ b/spec/requests/api/broadcast_messages_spec.rb
@@ -13,11 +13,11 @@ describe API::BroadcastMessages do
get api('/broadcast_messages')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_kind_of(Array)
expect(json_response.first.keys)
- .to match_array(%w(id message starts_at ends_at color font active target_path broadcast_type))
+ .to match_array(%w(id message starts_at ends_at color font active target_path broadcast_type dismissable))
end
end
@@ -25,10 +25,10 @@ describe API::BroadcastMessages do
it 'returns the specified message' do
get api("/broadcast_messages/#{message.id}")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq message.id
expect(json_response.keys)
- .to match_array(%w(id message starts_at ends_at color font active target_path broadcast_type))
+ .to match_array(%w(id message starts_at ends_at color font active target_path broadcast_type dismissable))
end
end
@@ -36,13 +36,13 @@ describe API::BroadcastMessages do
it 'returns a 401 for anonymous users' do
post api('/broadcast_messages'), params: attributes_for(:broadcast_message)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns a 403 for users' do
post api('/broadcast_messages', user), params: attributes_for(:broadcast_message)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'as an admin' do
@@ -52,7 +52,7 @@ describe API::BroadcastMessages do
post api('/broadcast_messages', admin), params: attrs
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq 'message is missing'
end
@@ -61,7 +61,7 @@ describe API::BroadcastMessages do
travel_to(time) do
post api('/broadcast_messages', admin), params: { message: 'Test message' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['starts_at']).to eq '2016-07-02T10:11:12.000Z'
expect(json_response['ends_at']).to eq '2016-07-02T11:11:12.000Z'
end
@@ -72,7 +72,7 @@ describe API::BroadcastMessages do
post api('/broadcast_messages', admin), params: attrs
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['color']).to eq attrs[:color]
expect(json_response['font']).to eq attrs[:font]
end
@@ -82,7 +82,7 @@ describe API::BroadcastMessages do
post api('/broadcast_messages', admin), params: attrs
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['target_path']).to eq attrs[:target_path]
end
@@ -91,7 +91,7 @@ describe API::BroadcastMessages do
post api('/broadcast_messages', admin), params: attrs
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['broadcast_type']).to eq attrs[:broadcast_type]
end
@@ -100,7 +100,7 @@ describe API::BroadcastMessages do
post api('/broadcast_messages', admin), params: attrs
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['broadcast_type']).to eq 'banner'
end
@@ -109,7 +109,16 @@ describe API::BroadcastMessages do
post api('/broadcast_messages', admin), params: attrs
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'accepts an active dismissable value ' do
+ attrs = { message: 'new message', dismissable: true }
+
+ post api('/broadcast_messages', admin), params: attrs
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['dismissable']).to eq true
end
end
end
@@ -119,14 +128,14 @@ describe API::BroadcastMessages do
put api("/broadcast_messages/#{message.id}"),
params: attributes_for(:broadcast_message)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns a 403 for users' do
put api("/broadcast_messages/#{message.id}", user),
params: attributes_for(:broadcast_message)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'as an admin' do
@@ -135,7 +144,7 @@ describe API::BroadcastMessages do
put api("/broadcast_messages/#{message.id}", admin), params: attrs
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['color']).to eq attrs[:color]
expect(json_response['font']).to eq attrs[:font]
end
@@ -147,7 +156,7 @@ describe API::BroadcastMessages do
put api("/broadcast_messages/#{message.id}", admin), params: attrs
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['starts_at']).to eq '2016-07-02T10:11:12.000Z'
expect(json_response['ends_at']).to eq '2016-07-02T13:11:12.000Z'
end
@@ -158,7 +167,7 @@ describe API::BroadcastMessages do
put api("/broadcast_messages/#{message.id}", admin), params: attrs
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect { message.reload }.to change { message.message }.to('new message')
end
@@ -167,7 +176,7 @@ describe API::BroadcastMessages do
put api("/broadcast_messages/#{message.id}", admin), params: attrs
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['target_path']).to eq attrs[:target_path]
end
@@ -176,7 +185,7 @@ describe API::BroadcastMessages do
put api("/broadcast_messages/#{message.id}", admin), params: attrs
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['broadcast_type']).to eq attrs[:broadcast_type]
end
@@ -185,7 +194,16 @@ describe API::BroadcastMessages do
put api("/broadcast_messages/#{message.id}", admin), params: attrs
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'accepts a new dismissable value ' do
+ attrs = { message: 'new message', dismissable: true }
+
+ put api("/broadcast_messages/#{message.id}", admin), params: attrs
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['dismissable']).to eq true
end
end
end
@@ -195,14 +213,14 @@ describe API::BroadcastMessages do
delete api("/broadcast_messages/#{message.id}"),
params: attributes_for(:broadcast_message)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns a 403 for users' do
delete api("/broadcast_messages/#{message.id}", user),
params: attributes_for(:broadcast_message)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it_behaves_like '412 response' do
@@ -213,7 +231,7 @@ describe API::BroadcastMessages do
expect do
delete api("/broadcast_messages/#{message.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { BroadcastMessage.count }.by(-1)
end
end
diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb
index 24ed836996e..6b810cf2d89 100644
--- a/spec/requests/api/commit_statuses_spec.rb
+++ b/spec/requests/api/commit_statuses_spec.rb
@@ -37,7 +37,7 @@ describe API::CommitStatuses do
end
it 'returns latest commit statuses' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
@@ -53,7 +53,7 @@ describe API::CommitStatuses do
end
it 'returns all commit statuses' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(statuses_id).to contain_exactly(status1.id, status2.id,
@@ -68,7 +68,7 @@ describe API::CommitStatuses do
end
it 'returns latest commit statuses for specific ref' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(statuses_id).to contain_exactly(status3.id, status5.id)
@@ -81,7 +81,7 @@ describe API::CommitStatuses do
end
it 'return latest commit statuses for specific name' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(statuses_id).to contain_exactly(status4.id, status5.id)
@@ -108,7 +108,7 @@ describe API::CommitStatuses do
end
it "does not return project commits" do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -118,7 +118,7 @@ describe API::CommitStatuses do
end
it "does not return project commits" do
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -134,7 +134,7 @@ describe API::CommitStatuses do
it 'creates commit status' do
post api(post_url, developer), params: { state: status }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['sha']).to eq(commit.id)
expect(json_response['status']).to eq(status)
expect(json_response['name']).to eq('default')
@@ -162,7 +162,7 @@ describe API::CommitStatuses do
job = pipeline.statuses.find_by_name(json_response['name'])
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(job.status).to eq('pending')
expect(job.stage_idx).to eq(GenericCommitStatus::EXTERNAL_STAGE_IDX)
end
@@ -189,7 +189,7 @@ describe API::CommitStatuses do
it "to #{status}" do
expect { post api(post_url, developer), params: { state: status } }.not_to change { CommitStatus.count }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['status']).to eq(status)
end
end
@@ -211,7 +211,7 @@ describe API::CommitStatuses do
it 'creates commit status' do
subject
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['sha']).to eq(commit.id)
expect(json_response['status']).to eq('success')
expect(json_response['name']).to eq('coverage')
@@ -227,7 +227,7 @@ describe API::CommitStatuses do
it 'sets head pipeline' do
subject
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(merge_request.reload.head_pipeline).not_to be_nil
end
end
@@ -254,7 +254,7 @@ describe API::CommitStatuses do
end
it 'updates a commit status' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['sha']).to eq(commit.id)
expect(json_response['status']).to eq('success')
expect(json_response['name']).to eq('coverage')
@@ -300,7 +300,7 @@ describe API::CommitStatuses do
end
it 'correctly posts a new commit status' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['sha']).to eq(commit.id)
expect(json_response['status']).to eq('success')
end
@@ -318,7 +318,7 @@ describe API::CommitStatuses do
end
it 'does not create commit status' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -328,7 +328,7 @@ describe API::CommitStatuses do
end
it 'does not create commit status' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -342,7 +342,7 @@ describe API::CommitStatuses do
let(:user) { developer }
it 'does not create commit status' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -350,7 +350,7 @@ describe API::CommitStatuses do
let(:user) { create_user(:maintainer) }
it 'creates commit status' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
end
@@ -363,7 +363,7 @@ describe API::CommitStatuses do
end
it 'returns not found error' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -376,7 +376,7 @@ describe API::CommitStatuses do
end
it 'responds with bad request status and validation errors' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['target_url'])
.to include 'is blocked: Only allowed schemes are http, https'
end
@@ -391,7 +391,7 @@ describe API::CommitStatuses do
end
it 'responds with bad request status and validation errors' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['target_url'])
.to include 'is blocked: Only allowed schemes are http, https'
end
@@ -407,7 +407,7 @@ describe API::CommitStatuses do
end
it 'responds with bad request status and validation errors' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['name'])
.to include 'has already been taken'
end
@@ -420,7 +420,7 @@ describe API::CommitStatuses do
end
it 'does not create commit status' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -430,7 +430,7 @@ describe API::CommitStatuses do
end
it 'does not create commit status' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -440,7 +440,7 @@ describe API::CommitStatuses do
end
it 'does not create commit status' do
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index c179de249d5..4b110874df0 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -28,7 +28,7 @@ describe API::Commits do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema(schema)
expect(json_response.first['id']).to eq(commit.id)
expect(json_response.first['committer_name']).to eq(commit.committer_name)
@@ -123,7 +123,7 @@ describe API::Commits do
it "returns an invalid parameter error message" do
get api("/projects/#{project_id}/repository/commits?since=invalid-date", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('since is invalid')
end
end
@@ -305,13 +305,13 @@ describe API::Commits do
it 'returns a 403 unauthorized for user without permissions' do
post api(url, guest)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns a 400 bad request if no params are given' do
post api(url, user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
describe 'create' do
@@ -365,7 +365,7 @@ describe API::Commits do
it 'a new file in project repo' do
post api(url, user), params: valid_c_params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(message)
expect(json_response['committer_name']).to eq(user.name)
expect(json_response['committer_email']).to eq(user.email)
@@ -374,7 +374,7 @@ describe API::Commits do
it 'a new file with utf8 chars in project repo' do
post api(url, user), params: valid_utf8_c_params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(message)
expect(json_response['committer_name']).to eq(user.name)
expect(json_response['committer_email']).to eq(user.email)
@@ -383,7 +383,7 @@ describe API::Commits do
it 'returns a 400 bad request if file exists' do
post api(url, user), params: invalid_c_params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
context 'with project path containing a dot in URL' do
@@ -392,7 +392,7 @@ describe API::Commits do
it 'a new file in project repo' do
post api(url, user), params: valid_c_params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
@@ -414,7 +414,7 @@ describe API::Commits do
it 'returns a 403' do
post api(url, guest), params: valid_c_params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'when start_project is provided' do
@@ -456,7 +456,7 @@ describe API::Commits do
it 'returns a 400' do
post api(url, guest), params: valid_c_params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq("A branch called 'master' already exists. Switch to that branch in order to make changes")
end
@@ -510,7 +510,7 @@ describe API::Commits do
it 'returns a 403' do
post api(url, guest), params: valid_c_params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -528,7 +528,7 @@ describe API::Commits do
it 'returns a 403' do
post api(url, guest), params: valid_c_params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -545,7 +545,7 @@ describe API::Commits do
valid_c_params[:start_branch] = 'master'
post api(url, user), params: valid_c_params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('start_branch, start_sha are mutually exclusive')
end
@@ -553,7 +553,7 @@ describe API::Commits do
valid_c_params[:branch] = 'master'
post api(url, user), params: valid_c_params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq("A branch called 'master' already exists. Switch to that branch in order to make changes")
end
@@ -561,7 +561,7 @@ describe API::Commits do
valid_c_params[:start_sha] = '1' * 40
post api(url, user), params: valid_c_params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq("Cannot find start_sha '#{valid_c_params[:start_sha]}'")
end
@@ -569,7 +569,7 @@ describe API::Commits do
valid_c_params[:start_sha] = start_sha.slice(0, 7)
post api(url, user), params: valid_c_params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq("Invalid start_sha '#{valid_c_params[:start_sha]}'")
end
@@ -630,14 +630,14 @@ describe API::Commits do
it 'an existing file in project repo' do
post api(url, user), params: valid_d_params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(message)
end
it 'returns a 400 bad request if file does not exist' do
post api(url, user), params: invalid_d_params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -675,14 +675,14 @@ describe API::Commits do
it 'an existing file in project repo' do
post api(url, user), params: valid_m_params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(message)
end
it 'returns a 400 bad request if file does not exist' do
post api(url, user), params: invalid_m_params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -718,14 +718,14 @@ describe API::Commits do
it 'an existing file in project repo' do
post api(url, user), params: valid_u_params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(message)
end
it 'returns a 400 bad request if file does not exist' do
post api(url, user), params: invalid_u_params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -750,7 +750,7 @@ describe API::Commits do
it 'responds with success' do
post api(url, user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(message)
end
@@ -760,7 +760,7 @@ describe API::Commits do
it 'responds with success' do
post api(url, user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(message)
end
end
@@ -771,7 +771,7 @@ describe API::Commits do
it "responds with 400" do
post api(url, user), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq("A file with this name doesn't exist")
end
end
@@ -849,28 +849,28 @@ describe API::Commits do
it 'are committed as one in project repo' do
post api(url, user), params: valid_mo_params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(message)
end
it 'includes the commit stats' do
post api(url, user), params: valid_mo_params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include 'stats'
end
it "doesn't include the commit stats when stats is false" do
post api(url, user), params: valid_mo_params.merge(stats: false)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).not_to include 'stats'
end
it 'return a 400 bad request if there are any issues' do
post api(url, user), params: invalid_mo_params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -937,7 +937,7 @@ describe API::Commits do
refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]}
refs.concat(project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]})
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
@@ -949,7 +949,7 @@ describe API::Commits do
refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]}
refs.concat(project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]})
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
end
@@ -958,7 +958,7 @@ describe API::Commits do
refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
end
@@ -967,7 +967,7 @@ describe API::Commits do
refs = project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
end
end
@@ -982,7 +982,7 @@ describe API::Commits do
it 'returns the ref last commit' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/commit/detail')
expect(json_response['id']).to eq(commit.id)
expect(json_response['short_id']).to eq(commit.short_id)
@@ -1030,7 +1030,7 @@ describe API::Commits do
it 'includes status as "created" and a last_pipeline object' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/commit/detail')
expect(json_response['status']).to eq('created')
expect(json_response['last_pipeline']['id']).to eq(pipeline.id)
@@ -1047,7 +1047,7 @@ describe API::Commits do
it 'includes a "success" status' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/commit/detail')
expect(json_response['status']).to eq('success')
end
@@ -1065,7 +1065,7 @@ describe API::Commits do
get api(route, current_user)
expect(response).to match_response_schema('public_api/v4/commit/detail')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['last_pipeline']).to be_nil
end
end
@@ -1076,21 +1076,21 @@ describe API::Commits do
it 'is not present return stats by default' do
get api(route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include 'stats'
end
it "is false it does not include stats" do
get api(route, user), params: { stats: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to include 'stats'
end
it "is true it includes stats" do
get api(route, user), params: { stats: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include 'stats'
end
end
@@ -1227,7 +1227,7 @@ describe API::Commits do
it 'returns the diff of the selected commit' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.size).to be >= 1
expect(json_response.first.keys).to include 'diff'
@@ -1241,7 +1241,7 @@ describe API::Commits do
it 'respects the limit' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.size).to be <= 1
end
@@ -1338,7 +1338,7 @@ describe API::Commits do
it 'returns the diff of the selected commit' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/commit_notes')
expect(json_response.size).to eq(2)
expect(json_response.first['note']).to eq('a comment on a commit')
@@ -1428,7 +1428,7 @@ describe API::Commits do
it 'returns the comments for the target project' do
get api(route, guest)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/commit_notes')
expect(json_response.size).to eq(1)
expect(json_response.first['note']).to eq('a comment on a commit for fork')
@@ -1448,7 +1448,7 @@ describe API::Commits do
it 'cherry-picks the ref commit' do
post api(route, current_user), params: { branch: branch }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/commit/basic')
expect(json_response['title']).to eq(commit.title)
expect(json_response['message']).to eq(commit.cherry_pick_message(user))
@@ -1608,7 +1608,7 @@ describe API::Commits do
it 'reverts the ref commit' do
post api(route, current_user), params: { branch: branch }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/commit/basic')
expect(json_response['message']).to eq(commit.revert_message(user))
@@ -1694,7 +1694,7 @@ describe API::Commits do
# Second one is redundant and should be empty
post api(route, current_user), params: { branch: 'markdown' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error_code']).to eq 'empty'
end
end
@@ -1733,7 +1733,7 @@ describe API::Commits do
it 'creates the comment' do
post api(route, current_user), params: { note: note }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/commit_note')
expect(json_response['note']).to eq('My comment')
expect(json_response['path']).to be_nil
@@ -1774,7 +1774,7 @@ describe API::Commits do
it 'returns the inline comment' do
post api(route, current_user), params: { note: 'My comment', path: project.repository.commit.raw_diffs.first.new_path, line: 1, line_type: 'new' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/commit_note')
expect(json_response['note']).to eq('My comment')
expect(json_response['path']).to eq(project.repository.commit.raw_diffs.first.new_path)
@@ -1794,7 +1794,7 @@ describe API::Commits do
it 'returns 400 if note is missing' do
post api(route, current_user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
context 'when ref contains a dot' do
@@ -1839,7 +1839,7 @@ describe API::Commits do
it 'returns the correct merge request' do
get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(1)
expect(json_response[0]['id']).to eq(merged_mr.id)
@@ -1850,13 +1850,13 @@ describe API::Commits do
get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'responds 404 when the commit does not exist' do
get api("/projects/#{project.id}/repository/commits/a7d26f00c35b/merge_requests", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'public project' do
@@ -1866,7 +1866,7 @@ describe API::Commits do
it 'responds 403 when only members are allowed to read merge requests' do
get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", non_member)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -1900,7 +1900,7 @@ describe API::Commits do
it 'returns correct JSON' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['gpg_key_id']).to eq(commit.signature.gpg_key_id)
expect(json_response['gpg_key_subkey_id']).to eq(commit.signature.gpg_key_subkey_id)
expect(json_response['gpg_key_primary_keyid']).to eq(commit.signature.gpg_key_primary_keyid)
diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb
index 4579ccfad80..9092d132b53 100644
--- a/spec/requests/api/deploy_keys_spec.rb
+++ b/spec/requests/api/deploy_keys_spec.rb
@@ -51,7 +51,7 @@ describe API::DeployKeys do
it 'returns array of ssh keys' do
get api("/projects/#{project.id}/deploy_keys", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['title']).to eq(deploy_key.title)
@@ -62,14 +62,14 @@ describe API::DeployKeys do
it 'returns a single key' do
get api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(deploy_key.title)
end
it 'returns 404 Not Found with invalid ID' do
get api("/projects/#{project.id}/deploy_keys/404", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -77,14 +77,14 @@ describe API::DeployKeys do
it 'does not create an invalid ssh key' do
post api("/projects/#{project.id}/deploy_keys", admin), params: { title: 'invalid key' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('key is missing')
end
it 'does not create a key without title' do
post api("/projects/#{project.id}/deploy_keys", admin), params: { key: 'some key' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('title is missing')
end
@@ -105,7 +105,7 @@ describe API::DeployKeys do
post api("/projects/#{project.id}/deploy_keys", admin), params: { key: deploy_key.key, title: deploy_key.title }
end.not_to change { project.deploy_keys.count }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'joins an existing ssh key to a new project' do
@@ -113,7 +113,7 @@ describe API::DeployKeys do
post api("/projects/#{project2.id}/deploy_keys", admin), params: { key: deploy_key.key, title: deploy_key.title }
end.to change { project2.deploy_keys.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'accepts can_push parameter' do
@@ -121,7 +121,7 @@ describe API::DeployKeys do
post api("/projects/#{project.id}/deploy_keys", admin), params: key_attrs
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['can_push']).to eq(true)
end
end
@@ -139,7 +139,7 @@ describe API::DeployKeys do
it 'does not update a public deploy key' do
expect { subject }.not_to change(deploy_key, :title)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -151,12 +151,12 @@ describe API::DeployKeys do
it 'updates the title of the deploy key' do
expect { subject }.to change { deploy_key.reload.title }.to 'new title'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'updates can_push of deploy_keys_project' do
expect { subject }.to change { deploy_keys_project.reload.can_push }.from(false).to(true)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -169,12 +169,12 @@ describe API::DeployKeys do
it 'updates the title of the deploy key' do
expect { subject }.to change { deploy_key.reload.title }.to 'new title'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'updates can_push of deploy_keys_project' do
expect { subject }.to change { deploy_keys_project.reload.can_push }.from(false).to(true)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'invalid title' do
@@ -182,7 +182,7 @@ describe API::DeployKeys do
it 'does not update the title of the deploy key' do
expect { subject }.not_to change { deploy_key.reload.title }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -200,12 +200,12 @@ describe API::DeployKeys do
it 'updates the title of the deploy key' do
expect { subject }.to change { deploy_key.reload.title }.to 'new title'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'updates can_push of deploy_keys_project' do
expect { subject }.to change { deploy_keys_project.reload.can_push }.from(false).to(true)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -222,12 +222,12 @@ describe API::DeployKeys do
it 'does not update the title of the deploy key' do
expect { subject }.not_to change { deploy_key.reload.title }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'updates can_push of deploy_keys_project' do
expect { subject }.to change { deploy_keys_project.reload.can_push }.from(false).to(true)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -242,7 +242,7 @@ describe API::DeployKeys do
expect do
delete api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { project.deploy_keys.count }.by(-1)
end
@@ -251,7 +251,7 @@ describe API::DeployKeys do
expect do
delete api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.not_to change { DeployKey.count }
end
end
@@ -264,7 +264,7 @@ describe API::DeployKeys do
expect do
delete api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { DeployKey.count }.by(-1)
end
end
@@ -278,7 +278,7 @@ describe API::DeployKeys do
expect do
delete api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.not_to change { DeployKey.count }
end
end
@@ -287,7 +287,7 @@ describe API::DeployKeys do
it 'returns 404 Not Found with invalid ID' do
delete api("/projects/#{project.id}/deploy_keys/404", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
@@ -304,7 +304,7 @@ describe API::DeployKeys do
post api("/projects/#{project2.id}/deploy_keys/#{deploy_key.id}/enable", admin)
end.to change { project2.deploy_keys.count }.from(0).to(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['id']).to eq(deploy_key.id)
end
end
@@ -313,7 +313,7 @@ describe API::DeployKeys do
it 'returns a 404 error' do
post api("/projects/#{project2.id}/deploy_keys/#{deploy_key.id}/enable", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/deploy_tokens_spec.rb b/spec/requests/api/deploy_tokens_spec.rb
new file mode 100644
index 00000000000..fa20635056f
--- /dev/null
+++ b/spec/requests/api/deploy_tokens_spec.rb
@@ -0,0 +1,312 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::DeployTokens do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:creator) { create(:user) }
+ let_it_be(:project) { create(:project, creator_id: creator.id) }
+ let_it_be(:group) { create(:group) }
+ let!(:deploy_token) { create(:deploy_token, projects: [project]) }
+ let!(:group_deploy_token) { create(:deploy_token, :group, groups: [group]) }
+
+ shared_examples 'with feature flag disabled' do
+ context 'disabled feature flag' do
+ before do
+ stub_feature_flags(deploy_tokens_api: false)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:service_unavailable) }
+ end
+ end
+
+ describe 'GET /deploy_tokens' do
+ subject do
+ get api('/deploy_tokens', user)
+ response
+ end
+
+ it_behaves_like 'with feature flag disabled'
+
+ context 'when unauthenticated' do
+ let(:user) { nil }
+
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
+ end
+
+ context 'when authenticated as non-admin user' do
+ let(:user) { creator }
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as admin' do
+ let(:user) { create(:admin) }
+
+ it { is_expected.to have_gitlab_http_status(:ok) }
+
+ it 'returns all deploy tokens' do
+ subject
+
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/deploy_tokens')
+ end
+ end
+ end
+
+ describe 'GET /projects/:id/deploy_tokens' do
+ subject do
+ get api("/projects/#{project.id}/deploy_tokens", user)
+ response
+ end
+
+ context 'when unauthenticated' do
+ let(:user) { nil }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
+ context 'when authenticated as non-admin user' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as maintainer' do
+ let!(:other_deploy_token) { create(:deploy_token) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like 'with feature flag disabled'
+
+ it { is_expected.to have_gitlab_http_status(:ok) }
+
+ it 'returns all deploy tokens for the project' do
+ subject
+
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/deploy_tokens')
+ end
+
+ it 'does not return deploy tokens for other projects' do
+ subject
+
+ token_ids = json_response.map { |token| token['id'] }
+ expect(token_ids).not_to include(other_deploy_token.id)
+ end
+ end
+ end
+
+ describe 'GET /groups/:id/deploy_tokens' do
+ subject do
+ get api("/groups/#{group.id}/deploy_tokens", user)
+ response
+ end
+
+ context 'when unauthenticated' do
+ let(:user) { nil }
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as non-admin user' do
+ before do
+ group.add_developer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as maintainer' do
+ let!(:other_deploy_token) { create(:deploy_token, :group) }
+
+ before do
+ group.add_maintainer(user)
+ end
+
+ it_behaves_like 'with feature flag disabled'
+
+ it { is_expected.to have_gitlab_http_status(:ok) }
+
+ it 'returns all deploy tokens for the group' do
+ subject
+
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/deploy_tokens')
+ end
+
+ it 'does not return deploy tokens for other groups' do
+ subject
+
+ token_ids = json_response.map { |token| token['id'] }
+ expect(token_ids).not_to include(other_deploy_token.id)
+ end
+ end
+ end
+
+ describe 'DELETE /projects/:id/deploy_tokens/:token_id' do
+ subject do
+ delete api("/projects/#{project.id}/deploy_tokens/#{deploy_token.id}", user)
+ response
+ end
+
+ context 'when unauthenticated' do
+ let(:user) { nil }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
+ context 'when authenticated as non-admin user' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:no_content) }
+
+ it 'deletes the deploy token' do
+ expect { subject }.to change { project.deploy_tokens.count }.by(-1)
+ end
+
+ context 'invalid request' do
+ it 'returns not found with invalid group id' do
+ delete api("/projects/bad_id/deploy_tokens/#{group_deploy_token.id}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns bad_request with invalid token id' do
+ delete api("/projects/#{project.id}/deploy_tokens/123abc", user)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+ end
+
+ context 'deploy token creation' do
+ shared_examples 'creating a deploy token' do |entity, unauthenticated_response|
+ let(:params) do
+ {
+ name: 'Foo',
+ expires_at: 1.year.from_now,
+ scopes: [
+ 'read_repository'
+ ],
+ username: 'Bar'
+ }
+ end
+
+ context 'when unauthenticated' do
+ let(:user) { nil }
+
+ it { is_expected.to have_gitlab_http_status(unauthenticated_response) }
+ end
+
+ context 'when authenticated as non-admin user' do
+ before do
+ send(entity).add_developer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as maintainer' do
+ before do
+ send(entity).add_maintainer(user)
+ end
+
+ it 'creates the deploy token' do
+ expect { subject }.to change { DeployToken.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/deploy_token')
+ end
+
+ context 'with an invalid scope' do
+ before do
+ params[:scopes] = %w[read_repository all_access]
+ end
+
+ it { is_expected.to have_gitlab_http_status(:bad_request) }
+ end
+ end
+ end
+
+ describe 'POST /projects/:id/deploy_tokens' do
+ subject do
+ post api("/projects/#{project.id}/deploy_tokens", user), params: params
+ response
+ end
+
+ it_behaves_like 'creating a deploy token', :project, :not_found
+ end
+
+ describe 'POST /groups/:id/deploy_tokens' do
+ subject do
+ post api("/groups/#{group.id}/deploy_tokens", user), params: params
+ response
+ end
+
+ it_behaves_like 'creating a deploy token', :group, :forbidden
+ end
+ end
+
+ describe 'DELETE /groups/:id/deploy_tokens/:token_id' do
+ subject do
+ delete api("/groups/#{group.id}/deploy_tokens/#{group_deploy_token.id}", user)
+ response
+ end
+
+ context 'when unauthenticated' do
+ let(:user) { nil }
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as non-admin user' do
+ before do
+ group.add_developer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as maintainer' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'deletes the deploy token' do
+ expect { subject }.to change { group.deploy_tokens.count }.by(-1)
+
+ expect(group.deploy_tokens).to be_empty
+ end
+
+ context 'invalid request' do
+ it 'returns bad request with invalid group id' do
+ delete api("/groups/bad_id/deploy_tokens/#{group_deploy_token.id}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns not found with invalid deploy token id' do
+ delete api("/groups/#{group.id}/deploy_tokens/bad_id", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index 151f67061eb..b820b227fff 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -20,7 +20,7 @@ describe API::Deployments do
it 'returns projects deployments sorted by id asc' do
get api("/projects/#{project.id}/deployments", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(3)
@@ -74,7 +74,7 @@ describe API::Deployments do
let(:order_by) { 'wrong_sorting_value' }
it 'returns error' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -82,7 +82,7 @@ describe API::Deployments do
let(:sort) { 'wrong_sorting_direction' }
it 'returns error' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -92,7 +92,7 @@ describe API::Deployments do
it 'returns a 404 status code' do
get api("/projects/#{project.id}/deployments", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -105,7 +105,7 @@ describe API::Deployments do
it 'returns the projects deployment' do
get api("/projects/#{project.id}/deployments/#{deployment.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['sha']).to match /\A\h{40}\z/
expect(json_response['id']).to eq(deployment.id)
end
@@ -115,7 +115,7 @@ describe API::Deployments do
it 'returns a 404 status code' do
get api("/projects/#{project.id}/deployments/#{deployment.id}", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -159,7 +159,7 @@ describe API::Deployments do
}
)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['sha']).to eq(sha)
expect(json_response['ref']).to eq('master')
@@ -178,7 +178,7 @@ describe API::Deployments do
}
)
- expect(response).to have_gitlab_http_status(500)
+ expect(response).to have_gitlab_http_status(:internal_server_error)
end
it 'links any merged merge requests to the deployment', :sidekiq_inline do
@@ -228,7 +228,7 @@ describe API::Deployments do
}
)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['sha']).to eq(sha)
expect(json_response['ref']).to eq('master')
@@ -312,7 +312,7 @@ describe API::Deployments do
}
)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -341,7 +341,7 @@ describe API::Deployments do
params: { status: 'success' }
)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'updates a deployment without an associated build' do
@@ -350,7 +350,7 @@ describe API::Deployments do
params: { status: 'success' }
)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('success')
end
@@ -390,7 +390,7 @@ describe API::Deployments do
params: { status: 'success' }
)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'updates a deployment without an associated build' do
@@ -399,7 +399,7 @@ describe API::Deployments do
params: { status: 'success' }
)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('success')
end
end
@@ -411,7 +411,7 @@ describe API::Deployments do
params: { status: 'success' }
)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -428,7 +428,7 @@ describe API::Deployments do
it 'returns a 404 status code' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -443,7 +443,8 @@ describe API::Deployments do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
expect(json_response.map { |d| d['id'] }).to contain_exactly(merge_request1.id, merge_request2.id)
end
@@ -451,7 +452,7 @@ describe API::Deployments do
it 'returns an empty array' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq([])
end
end
@@ -468,7 +469,7 @@ describe API::Deployments do
it 'succeeds', :aggregate_failures do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
end
diff --git a/spec/requests/api/discussions_spec.rb b/spec/requests/api/discussions_spec.rb
index f37a02e7135..e4dd6dfbeff 100644
--- a/spec/requests/api/discussions_spec.rb
+++ b/spec/requests/api/discussions_spec.rb
@@ -58,7 +58,7 @@ describe API::Discussions do
post api("/projects/#{project.id}/merge_requests/#{noteable['iid']}/discussions", user),
params: { body: 'hi!', position: position }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
diff --git a/spec/requests/api/doorkeeper_access_spec.rb b/spec/requests/api/doorkeeper_access_spec.rb
index 2a34e623a7e..a25a6485f47 100644
--- a/spec/requests/api/doorkeeper_access_spec.rb
+++ b/spec/requests/api/doorkeeper_access_spec.rb
@@ -10,7 +10,7 @@ describe 'doorkeeper access' do
describe "unauthenticated" do
it "returns authentication success" do
get api("/user"), params: { access_token: token.token }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
include_examples 'user login request with unique ip limit' do
@@ -23,14 +23,14 @@ describe 'doorkeeper access' do
describe "when token invalid" do
it "returns authentication error" do
get api("/user"), params: { access_token: "123a" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
describe "authorization by OAuth token" do
it "returns authentication success" do
get api("/user", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
include_examples 'user login request with unique ip limit' do
@@ -44,7 +44,7 @@ describe 'doorkeeper access' do
it 'returns 403 response' do
get api("/user"), params: { access_token: token.token }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index bdb0ef44038..56af64342c0 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -27,7 +27,7 @@ describe API::Environments do
get api("/projects/#{project.id}/environments", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -43,7 +43,7 @@ describe API::Environments do
it 'returns environment by name' do
get api("/projects/#{project.id}/environments?name=#{environment.name}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -53,7 +53,7 @@ describe API::Environments do
it 'returns no environment by non-existent name' do
get api("/projects/#{project.id}/environments?name=test", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(0)
@@ -62,7 +62,7 @@ describe API::Environments do
it 'returns environments by name_like' do
get api("/projects/#{project.id}/environments?search=envir", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(2)
@@ -71,7 +71,7 @@ describe API::Environments do
it 'returns no environment by non-existent name_like' do
get api("/projects/#{project.id}/environments?search=test", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(0)
@@ -83,7 +83,7 @@ describe API::Environments do
it 'returns a 404 status code' do
get api("/projects/#{project.id}/environments", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -93,7 +93,7 @@ describe API::Environments do
it 'creates a environment with valid params' do
post api("/projects/#{project.id}/environments", user), params: { name: "mepmep" }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq('mepmep')
expect(json_response['slug']).to eq('mepmep')
expect(json_response['external']).to be nil
@@ -102,19 +102,19 @@ describe API::Environments do
it 'requires name to be passed' do
post api("/projects/#{project.id}/environments", user), params: { external_url: 'test.gitlab.com' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns a 400 if environment already exists' do
post api("/projects/#{project.id}/environments", user), params: { name: environment.name }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns a 400 if slug is specified' do
post api("/projects/#{project.id}/environments", user), params: { name: "foo", slug: "foo" }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["error"]).to eq("slug is automatically generated and cannot be changed")
end
end
@@ -123,7 +123,7 @@ describe API::Environments do
it 'rejects the request' do
post api("/projects/#{project.id}/environments", non_member), params: { name: 'gitlab.com' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 400 when the required params are missing' do
@@ -138,7 +138,7 @@ describe API::Environments do
put api("/projects/#{project.id}/environments/#{environment.id}", user),
params: { name: 'Mepmep', external_url: url }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq('Mepmep')
expect(json_response['external_url']).to eq(url)
end
@@ -148,7 +148,7 @@ describe API::Environments do
api_url = api("/projects/#{project.id}/environments/#{environment.id}", user)
put api_url, params: { slug: slug + "-foo" }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["error"]).to eq("slug is automatically generated and cannot be changed")
end
@@ -157,7 +157,7 @@ describe API::Environments do
put api("/projects/#{project.id}/environments/#{environment.id}", user),
params: { name: 'Mepmep' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq('Mepmep')
expect(json_response['external_url']).to eq(url)
end
@@ -165,7 +165,7 @@ describe API::Environments do
it 'returns a 404 if the environment does not exist' do
put api("/projects/#{project.id}/environments/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -174,13 +174,13 @@ describe API::Environments do
it 'returns a 200 for an existing environment' do
delete api("/projects/#{project.id}/environments/#{environment.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'returns a 404 for non existing id' do
delete api("/projects/#{project.id}/environments/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Not found')
end
@@ -193,7 +193,7 @@ describe API::Environments do
it 'rejects the request' do
delete api("/projects/#{project.id}/environments/#{environment.id}", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -208,7 +208,7 @@ describe API::Environments do
end
it 'returns a 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'actually stops the environment' do
@@ -219,7 +219,7 @@ describe API::Environments do
it 'returns a 404 for non existing id' do
post api("/projects/#{project.id}/environments/12345/stop", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Not found')
end
end
@@ -228,7 +228,7 @@ describe API::Environments do
it 'rejects the request' do
post api("/projects/#{project.id}/environments/#{environment.id}/stop", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -240,7 +240,7 @@ describe API::Environments do
get api("/projects/#{project.id}/environments/#{environment.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/environment')
end
end
@@ -249,7 +249,7 @@ describe API::Environments do
it 'returns a 404 status code' do
get api("/projects/#{project.id}/environments/#{environment.id}", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/error_tracking_spec.rb b/spec/requests/api/error_tracking_spec.rb
index 120248bdbc6..deed9777025 100644
--- a/spec/requests/api/error_tracking_spec.rb
+++ b/spec/requests/api/error_tracking_spec.rb
@@ -22,7 +22,7 @@ describe API::ErrorTracking do
end
shared_examples 'returns 404' do
- it 'returns correct project settings' do
+ it 'returns no project settings' do
make_request
expect(response).to have_gitlab_http_status(:not_found)
diff --git a/spec/requests/api/events_spec.rb b/spec/requests/api/events_spec.rb
index 30e6a1340a8..acf3bb3482a 100644
--- a/spec/requests/api/events_spec.rb
+++ b/spec/requests/api/events_spec.rb
@@ -16,7 +16,7 @@ describe API::Events do
it 'returns authentication error' do
get api('/events')
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -24,7 +24,7 @@ describe API::Events do
it 'returns users events' do
get api('/events?action=closed&target_type=issue&after=2016-12-1&before=2016-12-31', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -36,7 +36,7 @@ describe API::Events do
get api('/events?action=closed&target_type=issue&after=2016-12-1&before=2016-12-31&scope=all', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(2)
@@ -50,7 +50,7 @@ describe API::Events do
it 'returns users events' do
get api('/events?action=closed&target_type=issue&after=2016-12-1&before=2016-12-31', personal_access_token: token)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -63,7 +63,7 @@ describe API::Events do
it 'returns a "403" response' do
get api('/events', personal_access_token: token_without_scopes)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -76,7 +76,7 @@ describe API::Events do
get api("/users/#{user.id}/events", non_member)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
end
end
@@ -90,7 +90,7 @@ describe API::Events do
get api("/users/#{user.id}/events", personal_access_token: non_member_token)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
end
end
@@ -99,7 +99,7 @@ describe API::Events do
it 'accepts a username' do
get api("/users/#{user.username}/events", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -108,7 +108,7 @@ describe API::Events do
it 'returns the events' do
get api("/users/#{user.id}/events", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -127,7 +127,7 @@ describe API::Events do
end
it 'responds with HTTP 200 OK' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'includes the push payload as a Hash' do
@@ -177,7 +177,7 @@ describe API::Events do
it 'returns no user events' do
get api("/users/#{user.username}/events?scope=all")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.size).to eq(0)
end
@@ -188,7 +188,7 @@ describe API::Events do
it 'returns a 404 error if not found' do
get api('/users/42/events', user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
end
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index d7b0bf881a6..ce72a416c33 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -46,19 +46,19 @@ describe API::Features do
it 'returns a 401 for anonymous users' do
get api('/features')
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns a 403 for users' do
get api('/features', user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns the feature list for admins' do
get api('/features', admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to match_array(expected_features)
end
end
@@ -70,20 +70,20 @@ describe API::Features do
it 'returns a 401 for anonymous users' do
post api("/features/#{feature_name}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns a 403 for users' do
post api("/features/#{feature_name}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'when passed value=true' do
it 'creates an enabled feature' do
post api("/features/#{feature_name}", admin), params: { value: 'true' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'on',
@@ -93,7 +93,7 @@ describe API::Features do
it 'creates an enabled feature for the given Flipper group when passed feature_group=perf_team' do
post api("/features/#{feature_name}", admin), params: { value: 'true', feature_group: 'perf_team' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'conditional',
@@ -106,7 +106,7 @@ describe API::Features do
it 'creates an enabled feature for the given user when passed user=username' do
post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'conditional',
@@ -119,7 +119,7 @@ describe API::Features do
it 'creates an enabled feature for the given user and feature group when passed user=username and feature_group=perf_team' do
post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username, feature_group: 'perf_team' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq('my_feature')
expect(json_response['state']).to eq('conditional')
expect(json_response['gates']).to contain_exactly(
@@ -137,7 +137,7 @@ describe API::Features do
it 'sets the feature gate' do
post api("/features/#{feature_name}", admin), params: { value: 'true', project: project.full_path }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'conditional',
@@ -152,7 +152,7 @@ describe API::Features do
it 'sets no new values' do
post api("/features/#{feature_name}", admin), params: { value: 'true', project: 'mep/to/the/mep/mep' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
"name" => "my_feature",
"state" => "off",
@@ -171,7 +171,7 @@ describe API::Features do
post api("/features/#{feature_name}", admin), params: { value: 'true', group: group.full_path }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'conditional',
@@ -186,7 +186,7 @@ describe API::Features do
it 'sets no new values and keeps the feature disabled' do
post api("/features/#{feature_name}", admin), params: { value: 'true', group: 'not/a/group' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
"name" => "my_feature",
"state" => "off",
@@ -201,7 +201,7 @@ describe API::Features do
it 'creates a feature with the given percentage if passed an integer' do
post api("/features/#{feature_name}", admin), params: { value: '50' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'conditional',
@@ -223,7 +223,7 @@ describe API::Features do
it 'enables the feature' do
post api("/features/#{feature_name}", admin), params: { value: 'true' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'on',
@@ -233,7 +233,7 @@ describe API::Features do
it 'enables the feature for the given Flipper group when passed feature_group=perf_team' do
post api("/features/#{feature_name}", admin), params: { value: 'true', feature_group: 'perf_team' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'conditional',
@@ -246,7 +246,7 @@ describe API::Features do
it 'enables the feature for the given user when passed user=username' do
post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'conditional',
@@ -264,7 +264,7 @@ describe API::Features do
post api("/features/#{feature_name}", admin), params: { value: 'false' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'off',
@@ -277,7 +277,7 @@ describe API::Features do
post api("/features/#{feature_name}", admin), params: { value: 'false', feature_group: 'perf_team' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'off',
@@ -290,7 +290,7 @@ describe API::Features do
post api("/features/#{feature_name}", admin), params: { value: 'false', user: user.username }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'off',
@@ -306,7 +306,7 @@ describe API::Features do
it 'updates the percentage of time if passed an integer' do
post api("/features/#{feature_name}", admin), params: { value: '30' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq(
'name' => 'my_feature',
'state' => 'conditional',
@@ -326,13 +326,13 @@ describe API::Features do
it 'returns a 401 for anonymous users' do
delete api("/features/#{feature_name}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns a 403 for users' do
delete api("/features/#{feature_name}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -340,7 +340,7 @@ describe API::Features do
it 'returns 204 when the value is not set' do
delete api("/features/#{feature_name}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
context 'when the gate value was set' do
@@ -351,7 +351,7 @@ describe API::Features do
it 'deletes an enabled feature' do
delete api("/features/#{feature_name}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(Feature.get(feature_name)).not_to be_enabled
end
end
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index efad443de3f..e6406174391 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -7,6 +7,8 @@ describe API::Files do
let!(:project) { create(:project, :repository, namespace: user.namespace ) }
let(:guest) { create(:user) { |u| project.add_guest(u) } }
let(:file_path) { "files%2Fruby%2Fpopen%2Erb" }
+ let(:rouge_file_path) { "%2e%2e%2f" }
+ let(:invalid_file_message) { 'file_path should be a valid file path' }
let(:params) do
{
ref: 'master'
@@ -55,10 +57,16 @@ describe API::Files do
describe "HEAD /projects/:id/repository/files/:file_path" do
shared_examples_for 'repository files' do
+ it 'returns 400 when file path is invalid' do
+ head api(route(rouge_file_path), current_user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
it 'returns file attributes in headers' do
head api(route(file_path), current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['X-Gitlab-File-Path']).to eq(CGI.unescape(file_path))
expect(response.headers['X-Gitlab-File-Name']).to eq('popen.rb')
expect(response.headers['X-Gitlab-Last-Commit-Id']).to eq('570e7b2abdd848b95f2f578043fc23bd6f6fd24d')
@@ -72,7 +80,7 @@ describe API::Files do
head api(route(file_path), current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['X-Gitlab-File-Name']).to eq('commit.js.coffee')
expect(response.headers['X-Gitlab-Content-Sha256']).to eq('08785f04375b47f81f46e68cc125d5ef368aa20576ddb53f91f4d83f1d04b929')
end
@@ -81,7 +89,7 @@ describe API::Files do
it "responds with a 400 status" do
head api(route("any%2Ffile"), current_user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -91,7 +99,7 @@ describe API::Files do
head api(route('app%2Fmodels%2Fapplication%2Erb'), current_user), params: params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -101,7 +109,7 @@ describe API::Files do
it "responds with a 403 status" do
head api(route(file_path), current_user), params: params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -119,7 +127,7 @@ describe API::Files do
head api(route(file_path), current_user), params: params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -145,10 +153,17 @@ describe API::Files do
describe "GET /projects/:id/repository/files/:file_path" do
shared_examples_for 'repository files' do
+ it 'returns 400 for invalid file path' do
+ get api(route(rouge_file_path), current_user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq(invalid_file_message)
+ end
+
it 'returns file attributes as json' do
get api(route(file_path), current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['file_path']).to eq(CGI.unescape(file_path))
expect(json_response['file_name']).to eq('popen.rb')
expect(json_response['last_commit_id']).to eq('570e7b2abdd848b95f2f578043fc23bd6f6fd24d')
@@ -161,7 +176,7 @@ describe API::Files do
get api(route(file_path), current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq('application/json')
end
@@ -172,7 +187,7 @@ describe API::Files do
get api(route(file_path), current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['file_name']).to eq('commit.js.coffee')
expect(json_response['content_sha256']).to eq('08785f04375b47f81f46e68cc125d5ef368aa20576ddb53f91f4d83f1d04b929')
expect(Base64.decode64(json_response['content']).lines.first).to eq("class Commit\n")
@@ -184,7 +199,7 @@ describe API::Files do
get api(url, current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
end
@@ -193,7 +208,7 @@ describe API::Files do
get api(url, current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'sets inline content disposition by default' do
@@ -294,7 +309,7 @@ describe API::Files do
it 'returns file attributes in headers' do
head api(route(file_path) + '/blame', current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['X-Gitlab-File-Path']).to eq(CGI.unescape(file_path))
expect(response.headers['X-Gitlab-File-Name']).to eq('popen.rb')
expect(response.headers['X-Gitlab-Last-Commit-Id']).to eq('570e7b2abdd848b95f2f578043fc23bd6f6fd24d')
@@ -302,10 +317,17 @@ describe API::Files do
.to eq('c440cd09bae50c4632cc58638ad33c6aa375b6109d811e76a9cc3a613c1e8887')
end
+ it 'returns 400 when file path is invalid' do
+ get api(route(rouge_file_path) + '/blame', current_user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq(invalid_file_message)
+ end
+
it 'returns blame file attributes as json' do
get api(route(file_path) + '/blame', current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.map { |x| x['lines'].size }).to eq(expected_blame_range_sizes)
expect(json_response.map { |x| x['commit']['id'] }).to eq(expected_blame_range_commit_ids)
range = json_response[0]
@@ -329,7 +351,7 @@ describe API::Files do
get api(url, current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns file by commit sha' do
@@ -339,7 +361,7 @@ describe API::Files do
get api(route(file_path) + '/blame', current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'when mandatory params are not given' do
@@ -411,20 +433,27 @@ describe API::Files do
get api(route(file_path) + '/blame', personal_access_token: token), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
describe "GET /projects/:id/repository/files/:file_path/raw" do
shared_examples_for 'repository raw files' do
+ it 'returns 400 when file path is invalid' do
+ get api(route(rouge_file_path) + "/raw", current_user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq(invalid_file_message)
+ end
+
it 'returns raw file info' do
url = route(file_path) + "/raw"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
get api(url, current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns raw file info for files with dots' do
@@ -433,7 +462,7 @@ describe API::Files do
get api(url, current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns file by commit sha' do
@@ -444,7 +473,7 @@ describe API::Files do
get api(route(file_path) + "/raw", current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'sets no-cache headers' do
@@ -520,7 +549,7 @@ describe API::Files do
get api(route(file_path) + "/raw", personal_access_token: token), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -535,10 +564,17 @@ describe API::Files do
}
end
+ it 'returns 400 when file path is invalid' do
+ post api(route(rouge_file_path), user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq(invalid_file_message)
+ end
+
it "creates a new file in project repo" do
post api(route(file_path), user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response["file_path"]).to eq(CGI.unescape(file_path))
last_commit = project.repository.commit.raw
expect(last_commit.author_email).to eq(user.email)
@@ -548,7 +584,7 @@ describe API::Files do
it "returns a 400 bad request if no mandatory params given" do
post api(route("any%2Etxt"), user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns a 400 bad request if the commit message is empty' do
@@ -556,7 +592,7 @@ describe API::Files do
post api(route(file_path), user), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 400 if editor fails to create file" do
@@ -566,7 +602,7 @@ describe API::Files do
post api(route("any%2Etxt"), user), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
context 'with PATs' do
@@ -575,7 +611,7 @@ describe API::Files do
post api(route(file_path), personal_access_token: token), params: params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns 201 with `api` scope' do
@@ -583,7 +619,7 @@ describe API::Files do
post api(route(file_path), personal_access_token: token), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
@@ -593,7 +629,7 @@ describe API::Files do
post api(route("new_file_with_author%2Etxt"), user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response.content_type).to eq('application/json')
last_commit = project.repository.commit.raw
expect(last_commit.author_email).to eq(author_email)
@@ -607,7 +643,7 @@ describe API::Files do
it "creates a new file in project repo" do
post api(route("newfile%2Erb"), user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['file_path']).to eq('newfile.rb')
last_commit = project.repository.commit.raw
expect(last_commit.author_email).to eq(user.email)
@@ -628,7 +664,7 @@ describe API::Files do
it "updates existing file in project repo" do
put api(route(file_path), user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['file_path']).to eq(CGI.unescape(file_path))
last_commit = project.repository.commit.raw
expect(last_commit.author_email).to eq(user.email)
@@ -640,7 +676,7 @@ describe API::Files do
put api(route(file_path), user), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 400 bad request if update existing file with stale last commit id" do
@@ -648,7 +684,7 @@ describe API::Files do
put api(route(file_path), user), params: params_with_stale_id
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq(_('You are attempting to update a file that has changed since you started editing it.'))
end
@@ -659,13 +695,24 @@ describe API::Files do
put api(route(file_path), user), params: params_with_correct_id
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it "returns 400 when file path is invalid" do
+ last_commit = Gitlab::Git::Commit
+ .last_for_path(project.repository, 'master', URI.unescape(file_path))
+ params_with_correct_id = params.merge(last_commit_id: last_commit.id)
+
+ put api(route(rouge_file_path), user), params: params_with_correct_id
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq(invalid_file_message)
end
it "returns a 400 bad request if no params given" do
put api(route(file_path), user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
context "when specifying an author" do
@@ -674,7 +721,7 @@ describe API::Files do
put api(route(file_path), user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
last_commit = project.repository.commit.raw
expect(last_commit.author_email).to eq(author_email)
expect(last_commit.author_name).to eq(author_name)
@@ -690,16 +737,23 @@ describe API::Files do
}
end
+ it 'returns 400 when file path is invalid' do
+ delete api(route(rouge_file_path), user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq(invalid_file_message)
+ end
+
it "deletes existing file in project repo" do
delete api(route(file_path), user), params: params
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it "returns a 400 bad request if no params given" do
delete api(route(file_path), user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns a 400 bad request if the commit message is empty' do
@@ -707,7 +761,7 @@ describe API::Files do
delete api(route(file_path), user), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 400 if fails to delete file" do
@@ -717,7 +771,7 @@ describe API::Files do
delete api(route(file_path), user), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
context "when specifying an author" do
@@ -726,7 +780,7 @@ describe API::Files do
delete api(route(file_path), user), params: params
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
end
@@ -754,7 +808,7 @@ describe API::Files do
it "remains unchanged" do
get api(route(file_path), user), params: get_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['file_path']).to eq(CGI.unescape(file_path))
expect(json_response['file_name']).to eq(CGI.unescape(file_path))
expect(json_response['content']).to eq(put_params[:content])
diff --git a/spec/requests/api/graphql/boards/boards_query_spec.rb b/spec/requests/api/graphql/boards/boards_query_spec.rb
new file mode 100644
index 00000000000..a17554aba21
--- /dev/null
+++ b/spec/requests/api/graphql/boards/boards_query_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'get list of boards' do
+ include GraphqlHelpers
+
+ include_context 'group and project boards query context'
+
+ describe 'for a project' do
+ let(:board_parent) { create(:project, :repository, :private) }
+
+ it_behaves_like 'group and project boards query'
+ end
+
+ describe 'for a group' do
+ let(:board_parent) { create(:group, :private) }
+
+ before do
+ allow(board_parent).to receive(:multiple_issue_boards_available?).and_return(false)
+ end
+
+ it_behaves_like 'group and project boards query'
+ end
+end
diff --git a/spec/requests/api/graphql/gitlab_schema_spec.rb b/spec/requests/api/graphql/gitlab_schema_spec.rb
index 8d020cd3a4e..cf409ea6c2d 100644
--- a/spec/requests/api/graphql/gitlab_schema_spec.rb
+++ b/spec/requests/api/graphql/gitlab_schema_spec.rb
@@ -11,11 +11,11 @@ describe 'GitlabSchema configurations' do
describe 'timeouts' do
context 'when timeout is reached' do
it 'shows an error' do
- Timecop.scale(50000000) do # ludicrously large number because the timeout has to happen before the query even begins
- subject
+ allow_any_instance_of(Gitlab::Graphql::Timeout).to receive(:max_seconds).and_return(0)
- expect_graphql_errors_to_include /Timeout/
- end
+ subject
+
+ expect_graphql_errors_to_include /Timeout/
end
end
end
@@ -140,7 +140,7 @@ describe 'GitlabSchema configurations' do
end
it_behaves_like 'imposing query limits' do
- it "fails all queries when only one of the queries is too complex" do
+ it 'fails all queries when only one of the queries is too complex' do
# The `project` query above has a complexity of 5
allow(GitlabSchema).to receive(:max_query_complexity).and_return 4
diff --git a/spec/requests/api/graphql/group_query_spec.rb b/spec/requests/api/graphql/group_query_spec.rb
index 6e2663fb090..a38d1857076 100644
--- a/spec/requests/api/graphql/group_query_spec.rb
+++ b/spec/requests/api/graphql/group_query_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
# Based on spec/requests/api/groups_spec.rb
# Should follow closely in order to ensure all situations are covered
-describe 'getting group information' do
+describe 'getting group information', :do_not_mock_admin_mode do
include GraphqlHelpers
include UploadHelpers
diff --git a/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb b/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
new file mode 100644
index 00000000000..a5159da84f3
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Deleting Sidekiq jobs', :clean_gitlab_redis_queues do
+ include GraphqlHelpers
+
+ let_it_be(:admin) { create(:admin) }
+
+ let(:variables) { { user: admin.username, queue_name: 'authorized_projects' } }
+ let(:mutation) { graphql_mutation(:admin_sidekiq_queues_delete_jobs, variables) }
+
+ def mutation_response
+ graphql_mutation_response(:admin_sidekiq_queues_delete_jobs)
+ end
+
+ context 'when the user is not an admin' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['You must be an admin to use this mutation']
+ end
+
+ context 'when the user is an admin' do
+ let(:current_user) { admin }
+
+ context 'valid request' do
+ around do |example|
+ Sidekiq::Queue.new('authorized_projects').clear
+ Sidekiq::Testing.disable!(&example)
+ Sidekiq::Queue.new('authorized_projects').clear
+ end
+
+ def add_job(user, args)
+ Sidekiq::Client.push(
+ 'class' => 'AuthorizedProjectsWorker',
+ 'queue' => 'authorized_projects',
+ 'args' => args,
+ 'meta.user' => user.username
+ )
+ end
+
+ it 'returns info about the deleted jobs' do
+ add_job(admin, [1])
+ add_job(admin, [2])
+ add_job(create(:user), [3])
+
+ post_graphql_mutation(mutation, current_user: admin)
+
+ expect(mutation_response['errors']).to be_empty
+ expect(mutation_response['result']).to eq('completed' => true,
+ 'deletedJobs' => 2,
+ 'queueSize' => 1)
+ end
+ end
+
+ context 'when no required params are provided' do
+ let(:variables) { { queue_name: 'authorized_projects' } }
+
+ it_behaves_like 'a mutation that returns errors in the response',
+ errors: ['No metadata provided']
+ end
+
+ context 'when the queue does not exist' do
+ let(:variables) { { user: admin.username, queue_name: 'authorized_projects_2' } }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['Queue authorized_projects_2 not found']
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index cb19f50b5b5..cef7fc5cbe3 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -67,7 +67,7 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['blob']['richData']).to match(content)
+ expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
@@ -93,7 +93,7 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['blob']['richData']).to match(content)
+ expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
diff --git a/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb b/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
index f80a3401134..05e3f7e6806 100644
--- a/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Mark snippet as spam' do
+describe 'Mark snippet as spam', :do_not_mock_admin_mode do
include GraphqlHelpers
let_it_be(:admin) { create(:admin) }
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index e9481a36287..1035e3346e1 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -56,7 +56,7 @@ describe 'Updating a Snippet' do
it 'returns the updated Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['blob']['richData']).to match(updated_content)
+ expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(updated_content)
expect(mutation_response['snippet']['title']).to eq(updated_title)
expect(mutation_response['snippet']['description']).to eq(updated_description)
@@ -78,7 +78,7 @@ describe 'Updating a Snippet' do
it 'returns the Snippet with its original values' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['blob']['richData']).to match(original_content)
+ expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(original_content)
expect(mutation_response['snippet']['title']).to eq(original_title)
expect(mutation_response['snippet']['description']).to eq(original_description)
@@ -91,7 +91,7 @@ describe 'Updating a Snippet' do
describe 'PersonalSnippet' do
it_behaves_like 'graphql update actions' do
- let_it_be(:snippet) do
+ let(:snippet) do
create(:personal_snippet,
:private,
file_name: original_file_name,
@@ -104,7 +104,7 @@ describe 'Updating a Snippet' do
describe 'ProjectSnippet' do
let_it_be(:project) { create(:project, :private) }
- let_it_be(:snippet) do
+ let(:snippet) do
create(:project_snippet,
:private,
project: project,
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index cad9329fcb8..92c5c52be7d 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -58,7 +58,7 @@ describe 'GraphQL' do
it 'returns an error' do
post_graphql(query, variables: "This is not JSON")
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['errors'].first['message']).not_to be_nil
end
end
@@ -114,7 +114,7 @@ describe 'GraphQL' do
post_graphql(query, headers: { 'PRIVATE-TOKEN' => token.token })
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(graphql_data['echo']).to eq('nil says: Hello world')
end
@@ -152,4 +152,52 @@ describe 'GraphQL' do
end
end
end
+
+ describe 'resolver complexity' do
+ let_it_be(:project) { create(:project, :public) }
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field(resource, {}, 'edges { node { iid } }')
+ )
+ end
+
+ before do
+ stub_const('GitlabSchema::DEFAULT_MAX_COMPLEXITY', 6)
+ stub_feature_flags(graphql_resolver_complexity: true)
+ end
+
+ context 'when fetching single resource' do
+ let(:resource) { 'issues(first: 1)' }
+
+ it 'processes the query' do
+ post_graphql(query)
+
+ expect(graphql_errors).to be_nil
+ end
+ end
+
+ context 'when fetching too many resources' do
+ let(:resource) { 'issues(first: 100)' }
+
+ it 'returns an error' do
+ post_graphql(query)
+
+ expect_graphql_errors_to_include(/which exceeds max complexity/)
+ end
+
+ context 'when graphql_resolver_complexity is disabled' do
+ before do
+ stub_feature_flags(graphql_resolver_complexity: false)
+ end
+
+ it 'processes the query' do
+ post_graphql(query)
+
+ expect(graphql_errors).to be_nil
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/group_boards_spec.rb b/spec/requests/api/group_boards_spec.rb
index d2d10f357fe..a9083f82f25 100644
--- a/spec/requests/api/group_boards_spec.rb
+++ b/spec/requests/api/group_boards_spec.rb
@@ -50,7 +50,7 @@ describe API::GroupBoards do
post api(url, user), params: { label_id: project_label.id }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
diff --git a/spec/requests/api/group_clusters_spec.rb b/spec/requests/api/group_clusters_spec.rb
index 14027db01c4..d3bd84f1604 100644
--- a/spec/requests/api/group_clusters_spec.rb
+++ b/spec/requests/api/group_clusters_spec.rb
@@ -26,7 +26,7 @@ describe API::GroupClusters do
it 'responds with 403' do
get api("/groups/#{group.id}/clusters", developer_user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -36,7 +36,7 @@ describe API::GroupClusters do
end
it 'responds with 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'includes pagination headers' do
@@ -70,7 +70,7 @@ describe API::GroupClusters do
it 'responds with 403' do
get api("/groups/#{group.id}/clusters/#{cluster_id}", developer_user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -140,7 +140,7 @@ describe API::GroupClusters do
let(:cluster_id) { 123 }
it 'returns 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -179,7 +179,7 @@ describe API::GroupClusters do
it 'responds with 403' do
post api("/groups/#{group.id}/clusters/user", developer_user), params: cluster_params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -190,7 +190,7 @@ describe API::GroupClusters do
context 'with valid params' do
it 'responds with 201' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'creates a new Cluster::Cluster' do
@@ -238,7 +238,7 @@ describe API::GroupClusters do
let(:api_url) { 'invalid_api_url' }
it 'responds with 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'does not create a new Clusters::Cluster' do
@@ -260,7 +260,7 @@ describe API::GroupClusters do
end
it 'responds with 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['base'].first).to eq(_('Instance does not support multiple Kubernetes clusters'))
end
end
@@ -271,7 +271,7 @@ describe API::GroupClusters do
end
it 'responds with 403' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
@@ -305,7 +305,7 @@ describe API::GroupClusters do
it 'responds with 403' do
put api("/groups/#{group.id}/clusters/#{cluster.id}", developer_user), params: update_params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -320,7 +320,7 @@ describe API::GroupClusters do
context 'with valid params' do
it 'responds with 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'updates cluster attributes' do
@@ -333,7 +333,7 @@ describe API::GroupClusters do
let(:domain) { 'invalid domain' }
it 'responds with 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'does not update cluster attributes' do
@@ -350,7 +350,7 @@ describe API::GroupClusters do
let(:management_project_id) { create(:project).id }
it 'responds with 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns validation errors' do
@@ -368,7 +368,7 @@ describe API::GroupClusters do
end
it 'responds with 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns validation error' do
@@ -380,7 +380,7 @@ describe API::GroupClusters do
let(:domain) { 'new-domain.com' }
it 'responds with 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -408,7 +408,7 @@ describe API::GroupClusters do
end
it 'responds with 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'updates platform kubernetes attributes' do
@@ -424,7 +424,7 @@ describe API::GroupClusters do
let(:cluster) { create(:cluster, :group, :provided_by_user) }
it 'responds with 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -442,7 +442,7 @@ describe API::GroupClusters do
it 'responds with 403' do
delete api("/groups/#{group.id}/clusters/#{cluster.id}", developer_user), params: cluster_params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -452,7 +452,7 @@ describe API::GroupClusters do
end
it 'responds with 204' do
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'deletes the cluster' do
@@ -463,7 +463,7 @@ describe API::GroupClusters do
let(:cluster) { create(:cluster, :group, :provided_by_user) }
it 'responds with 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/group_export_spec.rb b/spec/requests/api/group_export_spec.rb
index 6128f2e4a87..47193591cf1 100644
--- a/spec/requests/api/group_export_spec.rb
+++ b/spec/requests/api/group_export_spec.rb
@@ -44,7 +44,7 @@ describe API::GroupExport do
it 'downloads exported group archive' do
get api(download_path, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'when export_file.file does not exist' do
@@ -57,7 +57,7 @@ describe API::GroupExport do
it 'returns 404' do
get api(download_path, user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -66,7 +66,7 @@ describe API::GroupExport do
it 'returns 404' do
get api(download_path, user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -79,7 +79,7 @@ describe API::GroupExport do
it 'responds with 404 Not Found' do
get api(download_path, user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -98,7 +98,7 @@ describe API::GroupExport do
it 'accepts download' do
post api(path, user)
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
end
end
@@ -110,7 +110,7 @@ describe API::GroupExport do
it 'forbids the request' do
post api(path, user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -123,7 +123,7 @@ describe API::GroupExport do
it 'responds with 404 Not Found' do
post api(path, user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/group_import_spec.rb b/spec/requests/api/group_import_spec.rb
index 1594881677f..3f85428aac2 100644
--- a/spec/requests/api/group_import_spec.rb
+++ b/spec/requests/api/group_import_spec.rb
@@ -42,7 +42,7 @@ describe API::GroupImport do
it 'creates new group and accepts request' do
subject
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
end
it 'creates private group' do
@@ -63,7 +63,7 @@ describe API::GroupImport do
subject
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
expect(group.children.count).to eq(1)
end
@@ -81,7 +81,7 @@ describe API::GroupImport do
subject
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
expect(public_parent_group.children.first.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
end
@@ -90,7 +90,7 @@ describe API::GroupImport do
subject
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
expect(internal_parent_group.children.first.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
end
end
@@ -101,7 +101,7 @@ describe API::GroupImport do
expect { subject }.not_to change { Group.count }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Group Not Found')
end
@@ -111,7 +111,7 @@ describe API::GroupImport do
subject
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
end
@@ -128,7 +128,7 @@ describe API::GroupImport do
it 'returns 400 HTTP status' do
subject
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -139,7 +139,7 @@ describe API::GroupImport do
it 'forbids the request' do
subject
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -154,7 +154,7 @@ describe API::GroupImport do
post api('/groups/import', user), params: params, headers: workhorse_header
end.not_to change { Group.count }.from(1)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq(error_message)
end
end
diff --git a/spec/requests/api/group_labels_spec.rb b/spec/requests/api/group_labels_spec.rb
index f7994b55efa..dea26ec7274 100644
--- a/spec/requests/api/group_labels_spec.rb
+++ b/spec/requests/api/group_labels_spec.rb
@@ -15,7 +15,7 @@ describe API::GroupLabels do
it 'returns all available labels for the group' do
get api("/groups/#{group.id}/labels", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response).to all(match_schema('public_api/v4/labels/label'))
@@ -27,7 +27,7 @@ describe API::GroupLabels do
it 'includes counts in the response' do
get api("/groups/#{group.id}/labels", user), params: { with_counts: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response).to all(match_schema('public_api/v4/labels/label_with_counts'))
@@ -42,7 +42,7 @@ describe API::GroupLabels do
it 'returns all available labels for the group and ancestor groups' do
get api("/groups/#{subgroup.id}/labels", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response).to all(match_schema('public_api/v4/labels/label'))
@@ -55,7 +55,7 @@ describe API::GroupLabels do
it 'returns all available labels for the group but not for ancestor groups' do
get api("/groups/#{subgroup.id}/labels", user), params: { include_ancestor_groups: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response).to all(match_schema('public_api/v4/labels/label'))
@@ -69,7 +69,7 @@ describe API::GroupLabels do
it 'returns a single label for the group' do
get api("/groups/#{group.id}/labels/#{group_label1.name}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(group_label1.name)
expect(json_response['color']).to eq(group_label1.color)
expect(json_response['description']).to eq(group_label1.description)
@@ -85,7 +85,7 @@ describe API::GroupLabels do
description: 'test'
}
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq('Foo')
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to eq('test')
@@ -107,13 +107,13 @@ describe API::GroupLabels do
it 'returns a 400 bad request if name not given' do
post api("/groups/#{group.id}/labels", user), params: { color: '#FFAABB' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns a 400 bad request if color is not given' do
post api("/groups/#{group.id}/labels", user), params: { name: 'Foobar' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 409 if label already exists' do
@@ -123,7 +123,7 @@ describe API::GroupLabels do
color: '#FFAABB'
}
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to eq('Label already exists')
end
end
@@ -132,20 +132,20 @@ describe API::GroupLabels do
it 'returns 204 for existing label' do
delete api("/groups/#{group.id}/labels", user), params: { name: group_label1.name }
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'returns 404 for non existing label' do
delete api("/groups/#{group.id}/labels", user), params: { name: 'not_exists' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Label Not Found')
end
it 'returns 400 for wrong parameters' do
delete api("/groups/#{group.id}/labels", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "does not delete parent's group labels" do
@@ -154,7 +154,7 @@ describe API::GroupLabels do
delete api("/groups/#{subgroup.id}/labels", user), params: { name: subgroup_label.name }
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(subgroup.labels.size).to eq(0)
expect(group.labels).to include(group_label1)
end
@@ -169,13 +169,13 @@ describe API::GroupLabels do
it 'returns 204 for existing label' do
delete api("/groups/#{group.id}/labels/#{group_label1.name}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'returns 404 for non existing label' do
delete api("/groups/#{group.id}/labels/not_exists", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Label Not Found')
end
@@ -185,7 +185,7 @@ describe API::GroupLabels do
delete api("/groups/#{subgroup.id}/labels/#{subgroup_label.name}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(subgroup.labels.size).to eq(0)
expect(group.labels).to include(group_label1)
end
@@ -205,7 +205,7 @@ describe API::GroupLabels do
description: 'test'
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq('New Label')
expect(json_response['color']).to eq('#FFFFFF')
expect(json_response['description']).to eq('test')
@@ -221,7 +221,7 @@ describe API::GroupLabels do
new_name: 'New Label'
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(subgroup.labels[0].name).to eq('New Label')
expect(group_label1.name).to eq('feature')
end
@@ -233,20 +233,20 @@ describe API::GroupLabels do
new_name: 'label3'
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 400 if no label name given' do
put api("/groups/#{group.id}/labels", user), params: { new_name: group_label1.name }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('label_id, name are missing, exactly one parameter must be provided')
end
it 'returns 400 if no new parameters given' do
put api("/groups/#{group.id}/labels", user), params: { name: group_label1.name }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('new_name, color, description are missing, '\
'at least one parameter must be provided')
end
@@ -261,7 +261,7 @@ describe API::GroupLabels do
description: 'test'
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq('New Label')
expect(json_response['color']).to eq('#FFFFFF')
expect(json_response['description']).to eq('test')
@@ -276,7 +276,7 @@ describe API::GroupLabels do
new_name: 'New Label'
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(subgroup.labels[0].name).to eq('New Label')
expect(group_label1.name).to eq('feature')
end
@@ -287,13 +287,13 @@ describe API::GroupLabels do
new_name: 'label3'
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 400 if no new parameters given' do
put api("/groups/#{group.id}/labels/#{group_label1.name}", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('new_name, color, description are missing, '\
'at least one parameter must be provided')
end
@@ -304,7 +304,7 @@ describe API::GroupLabels do
it 'subscribes to the label' do
post api("/groups/#{group.id}/labels/#{group_label1.title}/subscribe", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(group_label1.title)
expect(json_response['subscribed']).to be_truthy
end
@@ -314,7 +314,7 @@ describe API::GroupLabels do
it 'subscribes to the label' do
post api("/groups/#{group.id}/labels/#{group_label1.id}/subscribe", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(group_label1.title)
expect(json_response['subscribed']).to be_truthy
end
@@ -328,7 +328,7 @@ describe API::GroupLabels do
it 'returns 304' do
post api("/groups/#{group.id}/labels/#{group_label1.id}/subscribe", user)
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
end
@@ -336,7 +336,7 @@ describe API::GroupLabels do
it 'returns 404 error' do
post api("/groups/#{group.id}/labels/1234/subscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -350,7 +350,7 @@ describe API::GroupLabels do
it 'unsubscribes from the label' do
post api("/groups/#{group.id}/labels/#{group_label1.title}/unsubscribe", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(group_label1.title)
expect(json_response['subscribed']).to be_falsey
end
@@ -360,7 +360,7 @@ describe API::GroupLabels do
it 'unsubscribes from the label' do
post api("/groups/#{group.id}/labels/#{group_label1.id}/unsubscribe", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(group_label1.title)
expect(json_response['subscribed']).to be_falsey
end
@@ -374,7 +374,7 @@ describe API::GroupLabels do
it 'returns 304' do
post api("/groups/#{group.id}/labels/#{group_label1.id}/unsubscribe", user)
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
end
@@ -382,7 +382,7 @@ describe API::GroupLabels do
it 'returns 404 error' do
post api("/groups/#{group.id}/labels/1234/unsubscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb
index abdc3a40360..a5b48985df5 100644
--- a/spec/requests/api/group_variables_spec.rb
+++ b/spec/requests/api/group_variables_spec.rb
@@ -17,7 +17,7 @@ describe API::GroupVariables do
it 'returns group variables' do
get api("/groups/#{group.id}/variables", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a(Array)
end
end
@@ -26,7 +26,7 @@ describe API::GroupVariables do
it 'does not return group variables' do
get api("/groups/#{group.id}/variables", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -34,7 +34,7 @@ describe API::GroupVariables do
it 'does not return group variables' do
get api("/groups/#{group.id}/variables")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -50,7 +50,7 @@ describe API::GroupVariables do
it 'returns group variable details' do
get api("/groups/#{group.id}/variables/#{variable.key}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['value']).to eq(variable.value)
expect(json_response['protected']).to eq(variable.protected?)
expect(json_response['variable_type']).to eq(variable.variable_type)
@@ -59,7 +59,7 @@ describe API::GroupVariables do
it 'responds with 404 Not Found if requesting non-existing variable' do
get api("/groups/#{group.id}/variables/non_existing_variable", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -67,7 +67,7 @@ describe API::GroupVariables do
it 'does not return group variable details' do
get api("/groups/#{group.id}/variables/#{variable.key}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -75,7 +75,7 @@ describe API::GroupVariables do
it 'does not return group variable details' do
get api("/groups/#{group.id}/variables/#{variable.key}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -90,13 +90,14 @@ describe API::GroupVariables do
it 'creates variable' do
expect do
- post api("/groups/#{group.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'PROTECTED_VALUE_2', protected: true }
+ post api("/groups/#{group.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'PROTECTED_VALUE_2', protected: true, masked: true }
end.to change {group.variables.count}.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
expect(json_response['value']).to eq('PROTECTED_VALUE_2')
expect(json_response['protected']).to be_truthy
+ expect(json_response['masked']).to be_truthy
expect(json_response['variable_type']).to eq('env_var')
end
@@ -105,10 +106,11 @@ describe API::GroupVariables do
post api("/groups/#{group.id}/variables", user), params: { variable_type: 'file', key: 'TEST_VARIABLE_2', value: 'VALUE_2' }
end.to change {group.variables.count}.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
expect(json_response['value']).to eq('VALUE_2')
expect(json_response['protected']).to be_falsey
+ expect(json_response['masked']).to be_falsey
expect(json_response['variable_type']).to eq('file')
end
@@ -117,7 +119,7 @@ describe API::GroupVariables do
post api("/groups/#{group.id}/variables", user), params: { key: variable.key, value: 'VALUE_2' }
end.to change {group.variables.count}.by(0)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -125,7 +127,7 @@ describe API::GroupVariables do
it 'does not create variable' do
post api("/groups/#{group.id}/variables", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -133,7 +135,7 @@ describe API::GroupVariables do
it 'does not create variable' do
post api("/groups/#{group.id}/variables")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -150,21 +152,22 @@ describe API::GroupVariables do
initial_variable = group.variables.reload.first
value_before = initial_variable.value
- put api("/groups/#{group.id}/variables/#{variable.key}", user), params: { variable_type: 'file', value: 'VALUE_1_UP', protected: true }
+ put api("/groups/#{group.id}/variables/#{variable.key}", user), params: { variable_type: 'file', value: 'VALUE_1_UP', protected: true, masked: true }
updated_variable = group.variables.reload.first
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(value_before).to eq(variable.value)
expect(updated_variable.value).to eq('VALUE_1_UP')
expect(updated_variable).to be_protected
expect(json_response['variable_type']).to eq('file')
+ expect(json_response['masked']).to be_truthy
end
it 'responds with 404 Not Found if requesting non-existing variable' do
put api("/groups/#{group.id}/variables/non_existing_variable", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -172,7 +175,7 @@ describe API::GroupVariables do
it 'does not update variable' do
put api("/groups/#{group.id}/variables/#{variable.key}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -180,7 +183,7 @@ describe API::GroupVariables do
it 'does not update variable' do
put api("/groups/#{group.id}/variables/#{variable.key}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -197,14 +200,14 @@ describe API::GroupVariables do
expect do
delete api("/groups/#{group.id}/variables/#{variable.key}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change {group.variables.count}.by(-1)
end
it 'responds with 404 Not Found if requesting non-existing variable' do
delete api("/groups/#{group.id}/variables/non_existing_variable", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
@@ -216,7 +219,7 @@ describe API::GroupVariables do
it 'does not delete variable' do
delete api("/groups/#{group.id}/variables/#{variable.key}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -224,7 +227,7 @@ describe API::GroupVariables do
it 'does not delete variable' do
delete api("/groups/#{group.id}/variables/#{variable.key}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 35b77832c73..54bb2e670da 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -21,12 +21,53 @@ describe API::Groups do
group2.add_owner(user2)
end
+ shared_examples 'group avatar upload' do
+ context 'when valid' do
+ let(:file_path) { 'spec/fixtures/banana_sample.gif' }
+
+ it 'returns avatar url in response' do
+ make_upload_request
+
+ group_id = json_response['id']
+ expect(json_response['avatar_url']).to eq('http://localhost/uploads/'\
+ '-/system/group/avatar/'\
+ "#{group_id}/banana_sample.gif")
+ end
+ end
+
+ context 'when invalid' do
+ shared_examples 'invalid file upload request' do
+ it 'returns 400' do
+ make_upload_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.message).to eq('Bad Request')
+ expect(json_response['message'].to_s).to match(/#{message}/)
+ end
+ end
+
+ context 'when file format is not supported' do
+ let(:file_path) { 'spec/fixtures/doc_sample.txt' }
+ let(:message) { 'file format is not supported. Please try one of the following supported formats: image/png, image/jpeg, image/gif, image/bmp, image/tiff, image/vnd.microsoft.icon' }
+
+ it_behaves_like 'invalid file upload request'
+ end
+
+ context 'when file format is not supported' do
+ let(:file_path) { 'spec/fixtures/big-image.png' }
+ let(:message) { 'is too big' }
+
+ it_behaves_like 'invalid file upload request'
+ end
+ end
+ end
+
describe "GET /groups" do
context "when unauthenticated" do
it "returns public groups" do
get api("/groups")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -54,7 +95,7 @@ describe API::Groups do
it "normal user: returns an array of groups of user1" do
get api("/groups", user1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -65,7 +106,7 @@ describe API::Groups do
it "does not include runners_token information" do
get api("/groups", user1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -75,7 +116,7 @@ describe API::Groups do
it "does not include statistics" do
get api("/groups", user1), params: { statistics: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first).not_to include 'statistics'
@@ -86,7 +127,7 @@ describe API::Groups do
it "admin: returns an array of all groups" do
get api("/groups", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
@@ -95,7 +136,7 @@ describe API::Groups do
it "does not include runners_token information" do
get api("/groups", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
@@ -105,7 +146,7 @@ describe API::Groups do
it "does not include statistics by default" do
get api("/groups", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first).not_to include('statistics')
@@ -126,7 +167,7 @@ describe API::Groups do
get api("/groups", admin), params: { statistics: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response)
@@ -138,7 +179,7 @@ describe API::Groups do
it "returns all groups excluding skipped groups" do
get api("/groups", admin), params: { skip_groups: [group2.id] }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -153,7 +194,7 @@ describe API::Groups do
get api("/groups", user1), params: { all_available: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to contain_exactly(public_group.name, group1.name)
@@ -176,7 +217,7 @@ describe API::Groups do
it "sorts by name ascending by default" do
get api("/groups", user1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq(groups_visible_to_user(user1).order(:name).pluck(:name))
@@ -185,7 +226,7 @@ describe API::Groups do
it "sorts in descending order when passed" do
get api("/groups", user1), params: { sort: "desc" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq(groups_visible_to_user(user1).order(name: :desc).pluck(:name))
@@ -194,7 +235,7 @@ describe API::Groups do
it "sorts by path in order_by param" do
get api("/groups", user1), params: { order_by: "path" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq(groups_visible_to_user(user1).order(:path).pluck(:name))
@@ -203,7 +244,7 @@ describe API::Groups do
it "sorts by id in the order_by param" do
get api("/groups", user1), params: { order_by: "id" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq(groups_visible_to_user(user1).order(:id).pluck(:name))
@@ -212,7 +253,7 @@ describe API::Groups do
it "sorts also by descending id with pagination fix" do
get api("/groups", user1), params: { order_by: "id", sort: "desc" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq(groups_visible_to_user(user1).order(id: :desc).pluck(:name))
@@ -221,7 +262,7 @@ describe API::Groups do
it "sorts identical keys by id for good pagination" do
get api("/groups", user1), params: { search: "same-name", order_by: "name" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups_ids).to eq(Group.select { |group| group['name'] == 'same-name' }.map { |group| group['id'] }.sort)
@@ -230,7 +271,7 @@ describe API::Groups do
it "sorts descending identical keys by id for good pagination" do
get api("/groups", user1), params: { search: "same-name", order_by: "name", sort: "desc" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups_ids).to eq(Group.select { |group| group['name'] == 'same-name' }.map { |group| group['id'] }.sort)
@@ -247,7 +288,7 @@ describe API::Groups do
get api('/groups', user2), params: { owned: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -267,7 +308,7 @@ describe API::Groups do
it 'returns an array of groups the user has at least master access' do
get api('/groups', user2), params: { min_access_level: 40 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq([group2.id, group3.id])
@@ -308,13 +349,13 @@ describe API::Groups do
it 'returns 404 for a private group' do
get api("/groups/#{group2.id}")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 200 for a public group' do
get api("/groups/#{group1.id}")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to include('runners_token')
end
@@ -346,7 +387,7 @@ describe API::Groups do
get api("/groups/#{group1.id}", user1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(group1.id)
expect(json_response['name']).to eq(group1.name)
expect(json_response['path']).to eq(group1.path)
@@ -379,7 +420,7 @@ describe API::Groups do
get api("/groups/#{group1.id}", user1), params: { with_projects: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['projects']).to be_nil
expect(json_response['shared_projects']).to be_nil
expect(json_response).not_to include('runners_token')
@@ -388,7 +429,7 @@ describe API::Groups do
it "doesn't return runners_token if the user is not the owner of the group" do
get api("/groups/#{group1.id}", user3)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to include('runners_token')
end
@@ -396,20 +437,20 @@ describe API::Groups do
group1.add_owner(user3)
get api("/groups/#{group1.id}", user3)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include('runners_token')
end
it "does not return a non existing group" do
get api("/groups/1328", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "does not return a group not attached to user1" do
get api("/groups/#{group2.id}", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns only public and internal projects in the group' do
@@ -451,21 +492,21 @@ describe API::Groups do
it "returns any existing group" do
get api("/groups/#{group2.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(group2.name)
end
it "returns information of the runners_token for the group" do
get api("/groups/#{group2.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include('runners_token')
end
it "does not return a non existing group" do
get api("/groups/1328", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -473,20 +514,20 @@ describe API::Groups do
it 'returns any existing group' do
get api("/groups/#{group1.path}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(group1.name)
end
it 'does not return a non existing group' do
get api('/groups/unknown', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'does not return a group not attached to user1' do
get api("/groups/#{group2.path}", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -539,16 +580,26 @@ describe API::Groups do
describe 'PUT /groups/:id' do
let(:new_group_name) { 'New Group'}
+ it_behaves_like 'group avatar upload' do
+ def make_upload_request
+ group_param = {
+ avatar: fixture_file_upload(file_path)
+ }
+ put api("/groups/#{group1.id}", user1), params: group_param
+ end
+ end
+
context 'when authenticated as the group owner' do
it 'updates the group' do
put api("/groups/#{group1.id}", user1), params: {
name: new_group_name,
request_access_enabled: true,
project_creation_level: "noone",
- subgroup_creation_level: "maintainer"
+ subgroup_creation_level: "maintainer",
+ default_branch_protection: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(new_group_name)
expect(json_response['description']).to eq('')
expect(json_response['visibility']).to eq('public')
@@ -566,12 +617,13 @@ describe API::Groups do
expect(json_response['projects'].length).to eq(2)
expect(json_response['shared_projects']).to be_an Array
expect(json_response['shared_projects'].length).to eq(0)
+ expect(json_response['default_branch_protection']).to eq(::Gitlab::Access::MAINTAINER_PROJECT_ACCESS)
end
it 'returns 404 for a non existing group' do
put api('/groups/1328', user1), params: { name: new_group_name }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'within a subgroup' do
@@ -585,14 +637,14 @@ describe API::Groups do
it 'does not change visibility when not requested' do
put api("/groups/#{group3.id}", user3), params: { description: 'Bug #23083' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['visibility']).to eq('public')
end
it 'prevents making private a group containing public subgroups' do
put api("/groups/#{group3.id}", user3), params: { visibility: 'private' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['visibility_level']).to contain_exactly('private is not allowed since there are sub-groups with higher visibility.')
end
end
@@ -602,7 +654,7 @@ describe API::Groups do
it 'updates the group' do
put api("/groups/#{group1.id}", admin), params: { name: new_group_name }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(new_group_name)
end
end
@@ -611,7 +663,7 @@ describe API::Groups do
it 'does not updates the group' do
put api("/groups/#{group1.id}", user2), params: { name: new_group_name }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -619,7 +671,7 @@ describe API::Groups do
it 'returns 404 when trying to update the group' do
put api("/groups/#{group2.id}", user1), params: { name: new_group_name }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -645,7 +697,7 @@ describe API::Groups do
it "returns the group's projects" do
get api("/groups/#{group1.id}/projects", user1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(2)
project_names = json_response.map { |proj| proj['name'] }
@@ -656,7 +708,7 @@ describe API::Groups do
it "returns the group's projects with simple representation" do
get api("/groups/#{group1.id}/projects", user1), params: { simple: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(2)
project_names = json_response.map { |proj| proj['name'] }
@@ -669,7 +721,7 @@ describe API::Groups do
get api("/groups/#{group1.id}/projects", user1), params: { visibility: 'public' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(1)
@@ -683,7 +735,7 @@ describe API::Groups do
get api("/groups/#{group1.id}/projects", user1), params: { with_shared: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(2)
@@ -696,7 +748,7 @@ describe API::Groups do
get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(4)
@@ -705,13 +757,13 @@ describe API::Groups do
it "does not return a non existing group" do
get api("/groups/1328/projects", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "does not return a group not attached to user1" do
get api("/groups/#{group2.id}/projects", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "only returns projects to which user has access" do
@@ -719,7 +771,7 @@ describe API::Groups do
get api("/groups/#{group1.id}/projects", user3)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(project3.name)
@@ -730,7 +782,7 @@ describe API::Groups do
get api("/groups/#{project2.group.id}/projects", user3), params: { owned: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(project2.name)
end
@@ -740,7 +792,7 @@ describe API::Groups do
get api("/groups/#{group1.id}/projects", user1), params: { starred: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(project1.name)
end
@@ -750,7 +802,7 @@ describe API::Groups do
it "returns any existing group" do
get api("/groups/#{group2.id}/projects", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(project2.name)
@@ -759,7 +811,7 @@ describe API::Groups do
it "does not return a non existing group" do
get api("/groups/1328/projects", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'avoids N+1 queries' do
@@ -781,7 +833,7 @@ describe API::Groups do
it 'returns any existing group' do
get api("/groups/#{group1.path}/projects", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
project_names = json_response.map { |proj| proj['name'] }
expect(project_names).to match_array([project1.name, project3.name])
@@ -790,13 +842,13 @@ describe API::Groups do
it 'does not return a non existing group' do
get api('/groups/unknown/projects', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'does not return a group not attached to user1' do
get api("/groups/#{group2.path}/projects", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -810,7 +862,7 @@ describe API::Groups do
it 'returns only public subgroups' do
get api("/groups/#{group1.id}/subgroups")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -821,7 +873,7 @@ describe API::Groups do
it 'returns 404 for a private group' do
get api("/groups/#{group2.id}/subgroups")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -830,7 +882,7 @@ describe API::Groups do
it 'returns no subgroups for the public group' do
get api("/groups/#{group1.id}/subgroups", user2)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.length).to eq(0)
end
@@ -839,7 +891,7 @@ describe API::Groups do
it 'returns public subgroups' do
get api("/groups/#{group1.id}/subgroups", user2), params: { all_available: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
expect(json_response[0]['id']).to eq(subgroup1.id)
@@ -852,7 +904,7 @@ describe API::Groups do
it 'returns 404 for the private group' do
get api("/groups/#{group2.id}/subgroups", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -864,7 +916,7 @@ describe API::Groups do
it 'returns private subgroups' do
get api("/groups/#{group1.id}/subgroups", user2)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
@@ -878,7 +930,7 @@ describe API::Groups do
it 'does not include statistics' do
get api("/groups/#{group1.id}/subgroups", user2), params: { statistics: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.first).not_to include 'statistics'
end
@@ -893,7 +945,7 @@ describe API::Groups do
it 'returns subgroups' do
get api("/groups/#{group2.id}/subgroups", user1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
expect(json_response.first['id']).to eq(subgroup3.id)
@@ -906,7 +958,7 @@ describe API::Groups do
it 'returns private subgroups of a public group' do
get api("/groups/#{group1.id}/subgroups", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
end
@@ -914,7 +966,7 @@ describe API::Groups do
it 'returns subgroups of a private group' do
get api("/groups/#{group2.id}/subgroups", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
end
@@ -922,7 +974,7 @@ describe API::Groups do
it 'does not include statistics by default' do
get api("/groups/#{group1.id}/subgroups", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.first).not_to include('statistics')
end
@@ -930,7 +982,7 @@ describe API::Groups do
it 'includes statistics if requested' do
get api("/groups/#{group1.id}/subgroups", admin), params: { statistics: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.first).to include('statistics')
end
@@ -938,13 +990,23 @@ describe API::Groups do
end
describe "POST /groups" do
+ it_behaves_like 'group avatar upload' do
+ def make_upload_request
+ params = attributes_for_group_api(request_access_enabled: false).tap do |attrs|
+ attrs[:avatar] = fixture_file_upload(file_path)
+ end
+
+ post api("/groups", user3), params: params
+ end
+ end
+
context "when authenticated as user without group permissions" do
it "does not create group" do
group = attributes_for_group_api
post api("/groups", user1), params: group
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'as owner' do
@@ -955,7 +1017,7 @@ describe API::Groups do
it 'can create subgroups' do
post api("/groups", user1), params: { parent_id: group2.id, name: 'foo', path: 'foo' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
@@ -967,7 +1029,7 @@ describe API::Groups do
it 'can create subgroups' do
post api("/groups", user1), params: { parent_id: group2.id, name: 'foo', path: 'foo' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
end
@@ -978,7 +1040,7 @@ describe API::Groups do
post api("/groups", user3), params: group
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response["name"]).to eq(group[:name])
expect(json_response["path"]).to eq(group[:path])
@@ -993,7 +1055,7 @@ describe API::Groups do
post api("/groups", user3), params: group
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response["full_path"]).to eq("#{parent.path}/#{group[:path]}")
expect(json_response["parent_id"]).to eq(parent.id)
@@ -1002,20 +1064,20 @@ describe API::Groups do
it "does not create group, duplicate" do
post api("/groups", user3), params: { name: 'Duplicate Test', path: group2.path }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(response.message).to eq("Bad Request")
end
it "returns 400 bad request error if name not given" do
post api("/groups", user3), params: { path: group2.path }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns 400 bad request error if path not given" do
post api("/groups", user3), params: { name: 'test' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -1027,7 +1089,7 @@ describe API::Groups do
expect { delete api("/groups/#{group1.id}", user1) }.to change(GroupDestroyWorker.jobs, :size).by(1)
end
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
end
it_behaves_like '412 response' do
@@ -1041,19 +1103,19 @@ describe API::Groups do
delete api("/groups/#{group1.id}", user3)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it "does not remove a non existing group" do
delete api("/groups/1328", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "does not remove a group not attached to user1" do
delete api("/groups/#{group2.id}", user1)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1061,13 +1123,13 @@ describe API::Groups do
it "removes any existing group" do
delete api("/groups/#{group2.id}", admin)
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
end
it "does not remove a non existing group" do
delete api("/groups/1328", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -1086,7 +1148,7 @@ describe API::Groups do
it "does not transfer project to group" do
post api("/groups/#{group1.id}/projects/#{project.id}", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -1094,7 +1156,7 @@ describe API::Groups do
it "transfers project to group" do
post api("/groups/#{group1.id}/projects/#{project.id}", admin)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
context 'when using project path in URL' do
@@ -1102,7 +1164,7 @@ describe API::Groups do
it "transfers project to group" do
post api("/groups/#{group1.id}/projects/#{project_path}", admin)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
@@ -1110,7 +1172,7 @@ describe API::Groups do
it "does not transfer project to group" do
post api("/groups/#{group1.id}/projects/nogroup%2Fnoproject", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -1120,7 +1182,7 @@ describe API::Groups do
it "transfers project to group" do
post api("/groups/#{group1.path}/projects/#{project_path}", admin)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
@@ -1128,7 +1190,7 @@ describe API::Groups do
it "does not transfer project to group" do
post api("/groups/noexist/projects/#{project_path}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 26174611c58..98904a4d79f 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -269,7 +269,7 @@ describe API::Helpers do
# The 500 status is expected as we're testing a case where an exception
# is raised, but Grape shouldn't raise an additional exception
- expect(response).to have_gitlab_http_status(500)
+ expect(response).to have_gitlab_http_status(:internal_server_error)
expect(json_response['message']).not_to include("undefined local variable or method `request'")
expect(json_response['message']).to start_with("\nRuntimeError (Runtime Error!):")
end
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index 3ff7102479c..1a7d3b18d11 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -36,7 +36,7 @@ describe API::ImportGithub do
personal_access_token: token,
repo_id: 1234
}
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to be_a Hash
expect(json_response['name']).to eq(project.name)
end
@@ -50,7 +50,7 @@ describe API::ImportGithub do
repo_id: 1234
}
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index c3b5f9ded21..426e15faaa6 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -10,6 +10,10 @@ describe API::Internal::Base do
let(:gl_repository) { "project-#{project.id}" }
let(:reference_counter) { double('ReferenceCounter') }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :repository, author: user) }
+ let_it_be(:project_snippet) { create(:project_snippet, :repository, author: user, project: project) }
+ let(:snippet_changes) { "#{TestEnv::BRANCH_SHA['snippet/single-file']} #{TestEnv::BRANCH_SHA['snippet/edit-file']} refs/heads/snippet/edit-file" }
+
describe "GET /internal/check" do
it do
expect_any_instance_of(Redis).to receive(:ping).and_return('PONG')
@@ -259,6 +263,8 @@ describe API::Internal::Base do
describe "POST /internal/allowed", :clean_gitlab_redis_shared_state do
context "access granted" do
+ let(:env) { {} }
+
around do |example|
Timecop.freeze { example.run }
end
@@ -267,30 +273,32 @@ describe API::Internal::Base do
project.add_developer(user)
end
- context 'with env passed as a JSON' do
- let(:gl_repository) { Gitlab::GlRepository::WIKI.identifier_for_container(project) }
-
- it 'sets env in RequestStore' do
- obj_dir_relative = './objects'
- alt_obj_dirs_relative = ['./alt-objects-1', './alt-objects-2']
+ shared_examples 'sets hook env' do
+ context 'with env passed as a JSON' do
+ let(:obj_dir_relative) { './objects' }
+ let(:alt_obj_dirs_relative) { ['./alt-objects-1', './alt-objects-2'] }
+ let(:env) do
+ {
+ GIT_OBJECT_DIRECTORY_RELATIVE: obj_dir_relative,
+ GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative
+ }
+ end
- expect(Gitlab::Git::HookEnv).to receive(:set).with(gl_repository, {
- 'GIT_OBJECT_DIRECTORY_RELATIVE' => obj_dir_relative,
- 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => alt_obj_dirs_relative
- })
+ it 'sets env in RequestStore' do
+ expect(Gitlab::Git::HookEnv).to receive(:set).with(gl_repository, env.stringify_keys)
- push(key, project.wiki, env: {
- GIT_OBJECT_DIRECTORY_RELATIVE: obj_dir_relative,
- GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative
- }.to_json)
+ subject
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
end
context "git push with project.wiki" do
+ subject { push(key, project.wiki, env: env.to_json) }
+
it 'responds with success' do
- push(key, project.wiki)
+ subject
expect(response).to have_gitlab_http_status(:ok)
expect(json_response["status"]).to be_truthy
@@ -298,6 +306,10 @@ describe API::Internal::Base do
expect(json_response["gl_repository"]).to eq("wiki-#{project.id}")
expect(user.reload.last_activity_on).to be_nil
end
+
+ it_behaves_like 'sets hook env' do
+ let(:gl_repository) { Gitlab::GlRepository::WIKI.identifier_for_container(project) }
+ end
end
context "git pull with project.wiki" do
@@ -312,6 +324,88 @@ describe API::Internal::Base do
end
end
+ shared_examples 'snippets with disabled feature flag' do
+ context 'when feature flag :version_snippets is disabled' do
+ it 'returns 404' do
+ stub_feature_flags(version_snippets: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'git push with personal snippet' do
+ subject { push(key, personal_snippet, env: env.to_json, changes: snippet_changes) }
+
+ it 'responds with success' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["status"]).to be_truthy
+ expect(json_response["gl_project_path"]).to eq(personal_snippet.repository.full_path)
+ expect(json_response["gl_repository"]).to eq("snippet-#{personal_snippet.id}")
+ expect(user.reload.last_activity_on).to be_nil
+ end
+
+ it_behaves_like 'snippets with disabled feature flag'
+ it_behaves_like 'sets hook env' do
+ let(:gl_repository) { Gitlab::GlRepository::SNIPPET.identifier_for_container(personal_snippet) }
+ end
+ end
+
+ context 'git pull with personal snippet' do
+ it 'responds with success' do
+ pull(key, personal_snippet)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["status"]).to be_truthy
+ expect(json_response["gl_project_path"]).to eq(personal_snippet.repository.full_path)
+ expect(json_response["gl_repository"]).to eq("snippet-#{personal_snippet.id}")
+ expect(user.reload.last_activity_on).to eql(Date.today)
+ end
+
+ it_behaves_like 'snippets with disabled feature flag' do
+ subject { pull(key, personal_snippet) }
+ end
+ end
+
+ context 'git push with project snippet' do
+ subject { push(key, project_snippet, env: env.to_json, changes: snippet_changes) }
+
+ it 'responds with success' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["status"]).to be_truthy
+ expect(json_response["gl_project_path"]).to eq(project_snippet.repository.full_path)
+ expect(json_response["gl_repository"]).to eq("snippet-#{project_snippet.id}")
+ expect(user.reload.last_activity_on).to be_nil
+ end
+
+ it_behaves_like 'snippets with disabled feature flag'
+ it_behaves_like 'sets hook env' do
+ let(:gl_repository) { Gitlab::GlRepository::SNIPPET.identifier_for_container(project_snippet) }
+ end
+ end
+
+ context 'git pull with project snippet' do
+ it 'responds with success' do
+ pull(key, project_snippet)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["status"]).to be_truthy
+ expect(json_response["gl_project_path"]).to eq(project_snippet.repository.full_path)
+ expect(json_response["gl_repository"]).to eq("snippet-#{project_snippet.id}")
+ expect(user.reload.last_activity_on).to eql(Date.today)
+ end
+
+ it_behaves_like 'snippets with disabled feature flag' do
+ subject { pull(key, project_snippet) }
+ end
+ end
+
context "git pull" do
before do
allow(Feature).to receive(:persisted_names).and_return(%w[gitaly_mep_mep])
@@ -393,10 +487,28 @@ describe API::Internal::Base do
end
end
- it_behaves_like 'storing arguments in the application context' do
- let(:expected_params) { { user: key.user.username, project: project.full_path } }
+ context 'with Project' do
+ it_behaves_like 'storing arguments in the application context' do
+ let(:expected_params) { { user: key.user.username, project: project.full_path } }
+
+ subject { push(key, project) }
+ end
+ end
+
+ context 'with PersonalSnippet' do
+ it_behaves_like 'storing arguments in the application context' do
+ let(:expected_params) { { user: key.user.username } }
+
+ subject { push(key, personal_snippet) }
+ end
+ end
+
+ context 'with ProjectSnippet' do
+ it_behaves_like 'storing arguments in the application context' do
+ let(:expected_params) { { user: key.user.username, project: project_snippet.project.full_path } }
- subject { push(key, project) }
+ subject { push(key, project_snippet) }
+ end
end
end
@@ -450,7 +562,7 @@ describe API::Internal::Base do
{
authentication_abilities: [:read_project, :download_code, :push_code],
namespace_path: project.namespace.path,
- project_path: project.path,
+ repository_path: project.path,
redirected_path: nil
}
).and_return(access_checker)
@@ -478,30 +590,35 @@ describe API::Internal::Base do
project.add_developer(user)
end
- context "git pull" do
- context "with no console message" do
- it "has the correct payload" do
+ context 'git pull' do
+ context 'with a key that has expired' do
+ let(:key) { create(:key, user: user, expires_at: 2.days.ago) }
+
+ it 'includes the `key expired` message in the response' do
pull(key, project)
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['gl_console_messages']).to eq([])
+ expect(json_response['gl_console_messages']).to eq(['INFO: Your SSH key has expired. Please generate a new key.'])
end
end
- context "with a console message" do
- let(:console_messages) { ['message for the console'] }
+ context 'with a key that will expire in the next 7 days' do
+ let(:key) { create(:key, user: user, expires_at: 2.days.from_now) }
- it "has the correct payload" do
- expect_next_instance_of(Gitlab::GitAccess) do |access|
- expect(access).to receive(:check_for_console_messages)
- .with('git-upload-pack')
- .and_return(console_messages)
- end
+ it 'includes the `key expiring soon` message in the response' do
+ pull(key, project)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['gl_console_messages']).to eq(['INFO: Your SSH key is expiring soon. Please generate a new key.'])
+ end
+ end
+ context 'with a key that has no expiry' do
+ it 'does not include any message in the response' do
pull(key, project)
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['gl_console_messages']).to eq(console_messages)
+ expect(json_response['gl_console_messages']).to eq([])
end
end
end
@@ -654,7 +771,7 @@ describe API::Internal::Base do
it 'rejects the SSH push' do
push(key, project)
- expect(response.status).to eq(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response['status']).to be_falsey
expect(json_response['message']).to eq 'Git access over SSH is not allowed'
end
@@ -662,7 +779,7 @@ describe API::Internal::Base do
it 'rejects the SSH pull' do
pull(key, project)
- expect(response.status).to eq(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response['status']).to be_falsey
expect(json_response['message']).to eq 'Git access over SSH is not allowed'
end
@@ -676,7 +793,7 @@ describe API::Internal::Base do
it 'rejects the HTTP push' do
push(key, project, 'http')
- expect(response.status).to eq(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response['status']).to be_falsey
expect(json_response['message']).to eq 'Git access over HTTP is not allowed'
end
@@ -684,7 +801,7 @@ describe API::Internal::Base do
it 'rejects the HTTP pull' do
pull(key, project, 'http')
- expect(response.status).to eq(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response['status']).to be_falsey
expect(json_response['message']).to eq 'Git access over HTTP is not allowed'
end
@@ -835,22 +952,60 @@ describe API::Internal::Base do
allow_any_instance_of(Gitlab::Identifier).to receive(:identify).and_return(user)
end
- it 'executes PostReceiveService' do
- message = <<~MESSAGE.strip
- To create a merge request for #{branch_name}, visit:
- http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}
- MESSAGE
+ context 'with Project' do
+ it 'executes PostReceiveService' do
+ message = <<~MESSAGE.strip
+ To create a merge request for #{branch_name}, visit:
+ http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}
+ MESSAGE
+
+ subject
- subject
+ expect(json_response).to eq({
+ 'messages' => [{ 'message' => message, 'type' => 'basic' }],
+ 'reference_counter_decreased' => true
+ })
+ end
- expect(json_response).to eq({
- 'messages' => [{ 'message' => message, 'type' => 'basic' }],
- 'reference_counter_decreased' => true
- })
+ it_behaves_like 'storing arguments in the application context' do
+ let(:expected_params) { { user: user.username, project: project.full_path } }
+ end
end
- it_behaves_like 'storing arguments in the application context' do
- let(:expected_params) { { user: user.username, project: project.full_path } }
+ context 'with PersonalSnippet' do
+ let(:gl_repository) { "snippet-#{personal_snippet.id}" }
+
+ it 'executes PostReceiveService' do
+ subject
+
+ expect(json_response).to eq({
+ 'messages' => [],
+ 'reference_counter_decreased' => true
+ })
+ end
+
+ it_behaves_like 'storing arguments in the application context' do
+ let(:expected_params) { { user: key.user.username } }
+ let(:gl_repository) { "snippet-#{personal_snippet.id}" }
+ end
+ end
+
+ context 'with ProjectSnippet' do
+ let(:gl_repository) { "snippet-#{project_snippet.id}" }
+
+ it 'executes PostReceiveService' do
+ subject
+
+ expect(json_response).to eq({
+ 'messages' => [],
+ 'reference_counter_decreased' => true
+ })
+ end
+
+ it_behaves_like 'storing arguments in the application context' do
+ let(:expected_params) { { user: key.user.username, project: project_snippet.project.full_path } }
+ let(:gl_repository) { "snippet-#{project_snippet.id}" }
+ end
end
context 'with an orphaned write deploy key' do
@@ -866,16 +1021,32 @@ describe API::Internal::Base do
end
context 'when project is nil' do
- let(:gl_repository) { 'project-foo' }
+ context 'with Project' do
+ let(:gl_repository) { 'project-foo' }
- it 'does not try to notify that project moved' do
- allow(Gitlab::GlRepository).to receive(:parse).and_return([nil, Gitlab::GlRepository::PROJECT])
+ it 'does not try to notify that project moved' do
+ allow(Gitlab::GlRepository).to receive(:parse).and_return([nil, nil, Gitlab::GlRepository::PROJECT])
- expect(Gitlab::Checks::ProjectMoved).not_to receive(:fetch_message)
+ expect(Gitlab::Checks::ProjectMoved).not_to receive(:fetch_message)
- subject
+ subject
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with PersonalSnippet' do
+ let(:gl_repository) { "snippet-#{personal_snippet.id}" }
+
+ it 'does not try to notify that project moved' do
+ allow(Gitlab::GlRepository).to receive(:parse).and_return([personal_snippet, nil, Gitlab::GlRepository::PROJECT])
+
+ expect(Gitlab::Checks::ProjectMoved).not_to receive(:fetch_message)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
end
end
@@ -896,24 +1067,37 @@ describe API::Internal::Base do
end
end
- def gl_repository_for(project_or_wiki)
- case project_or_wiki
+ def gl_repository_for(container)
+ case container
when ProjectWiki
- Gitlab::GlRepository::WIKI.identifier_for_container(project_or_wiki.project)
+ Gitlab::GlRepository::WIKI.identifier_for_container(container.project)
when Project
- Gitlab::GlRepository::PROJECT.identifier_for_container(project_or_wiki)
+ Gitlab::GlRepository::PROJECT.identifier_for_container(container)
+ when Snippet
+ Gitlab::GlRepository::SNIPPET.identifier_for_container(container)
else
nil
end
end
- def pull(key, project, protocol = 'ssh')
+ def full_path_for(container)
+ case container
+ when PersonalSnippet
+ "snippets/#{container.id}"
+ when ProjectSnippet
+ "#{container.project.full_path}/snippets/#{container.id}"
+ else
+ container.full_path
+ end
+ end
+
+ def pull(key, container, protocol = 'ssh')
post(
api("/internal/allowed"),
params: {
key_id: key.id,
- project: project.full_path,
- gl_repository: gl_repository_for(project),
+ project: full_path_for(container),
+ gl_repository: gl_repository_for(container),
action: 'git-upload-pack',
secret_token: secret_token,
protocol: protocol
@@ -921,12 +1105,14 @@ describe API::Internal::Base do
)
end
- def push(key, project, protocol = 'ssh', env: nil)
+ def push(key, container, protocol = 'ssh', env: nil, changes: nil)
+ changes ||= 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master'
+
params = {
- changes: 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master',
+ changes: changes,
key_id: key.id,
- project: project.full_path,
- gl_repository: gl_repository_for(project),
+ project: full_path_for(container),
+ gl_repository: gl_repository_for(container),
action: 'git-receive-pack',
secret_token: secret_token,
protocol: protocol,
@@ -939,14 +1125,14 @@ describe API::Internal::Base do
)
end
- def archive(key, project)
+ def archive(key, container)
post(
api("/internal/allowed"),
params: {
ref: 'master',
key_id: key.id,
- project: project.full_path,
- gl_repository: gl_repository_for(project),
+ project: full_path_for(container),
+ gl_repository: gl_repository_for(container),
action: 'git-upload-archive',
secret_token: secret_token,
protocol: 'ssh'
diff --git a/spec/requests/api/internal/pages_spec.rb b/spec/requests/api/internal/pages_spec.rb
index 9a8c1a0e03b..44f7115f6a8 100644
--- a/spec/requests/api/internal/pages_spec.rb
+++ b/spec/requests/api/internal/pages_spec.rb
@@ -56,24 +56,114 @@ describe API::Internal::Pages do
end
end
+ context 'serverless domain' do
+ let(:namespace) { create(:namespace, name: 'gitlab-org') }
+ let(:project) { create(:project, namespace: namespace, name: 'gitlab-ce') }
+ let(:environment) { create(:environment, project: project) }
+ let(:pages_domain) { create(:pages_domain, domain: 'serverless.gitlab.io') }
+ let(:knative_without_ingress) { create(:clusters_applications_knative) }
+ let(:knative_with_ingress) { create(:clusters_applications_knative, external_ip: '10.0.0.1') }
+
+ context 'without a knative ingress gateway IP' do
+ let!(:serverless_domain_cluster) do
+ create(
+ :serverless_domain_cluster,
+ uuid: 'abcdef12345678',
+ pages_domain: pages_domain,
+ knative: knative_without_ingress
+ )
+ end
+
+ let(:serverless_domain) do
+ create(
+ :serverless_domain,
+ serverless_domain_cluster: serverless_domain_cluster,
+ environment: environment
+ )
+ end
+
+ it 'responds with 204 no content' do
+ query_host(serverless_domain.uri.host)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_empty
+ end
+ end
+
+ context 'with a knative ingress gateway IP' do
+ let!(:serverless_domain_cluster) do
+ create(
+ :serverless_domain_cluster,
+ uuid: 'abcdef12345678',
+ pages_domain: pages_domain,
+ knative: knative_with_ingress
+ )
+ end
+
+ let(:serverless_domain) do
+ create(
+ :serverless_domain,
+ serverless_domain_cluster: serverless_domain_cluster,
+ environment: environment
+ )
+ end
+
+ it 'responds with proxy configuration' do
+ query_host(serverless_domain.uri.host)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('internal/serverless/virtual_domain')
+
+ expect(json_response['certificate']).to eq(pages_domain.certificate)
+ expect(json_response['key']).to eq(pages_domain.key)
+
+ expect(json_response['lookup_paths']).to eq(
+ [
+ {
+ 'source' => {
+ 'type' => 'serverless',
+ 'service' => "test-function.#{project.name}-#{project.id}-#{environment.slug}.#{serverless_domain_cluster.knative.hostname}",
+ 'cluster' => {
+ 'hostname' => serverless_domain_cluster.knative.hostname,
+ 'address' => serverless_domain_cluster.knative.external_ip,
+ 'port' => 443,
+ 'cert' => serverless_domain_cluster.certificate,
+ 'key' => serverless_domain_cluster.key
+ }
+ }
+ }
+ ]
+ )
+ end
+ end
+ end
+
context 'custom domain' do
let(:namespace) { create(:namespace, name: 'gitlab-org') }
let(:project) { create(:project, namespace: namespace, name: 'gitlab-ce') }
- let!(:pages_domain) { create(:pages_domain, domain: 'pages.gitlab.io', project: project) }
+ let!(:pages_domain) { create(:pages_domain, domain: 'pages.io', project: project) }
context 'when there are no pages deployed for the related project' do
it 'responds with 204 No Content' do
- query_host('pages.gitlab.io')
+ query_host('pages.io')
expect(response).to have_gitlab_http_status(:no_content)
end
end
context 'when there are pages deployed for the related project' do
+ it 'domain lookup is case insensitive' do
+ deploy_pages(project)
+
+ query_host('Pages.IO')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
it 'responds with the correct domain configuration' do
deploy_pages(project)
- query_host('pages.gitlab.io')
+ query_host('pages.io')
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('internal/pages/virtual_domain')
diff --git a/spec/requests/api/issues/put_projects_issues_spec.rb b/spec/requests/api/issues/put_projects_issues_spec.rb
index b4332c555e1..51037f701e1 100644
--- a/spec/requests/api/issues/put_projects_issues_spec.rb
+++ b/spec/requests/api/issues/put_projects_issues_spec.rb
@@ -4,8 +4,9 @@ require 'spec_helper'
describe API::Issues do
let_it_be(:user) { create(:user) }
+ let_it_be(:owner) { create(:owner) }
let_it_be(:project, reload: true) do
- create(:project, :public, creator_id: user.id, namespace: user.namespace)
+ create(:project, :public, creator_id: owner.id, namespace: owner.namespace)
end
let(:user2) { create(:user) }
@@ -97,7 +98,7 @@ describe API::Issues do
labels: 'label, label?, label&foo, ?, &'
}
- expect(response.status).to eq(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label'
expect(json_response['labels']).to include 'label?'
expect(json_response['labels']).to include 'label&foo'
@@ -112,7 +113,7 @@ describe API::Issues do
labels: ['label', 'label?', 'label&foo, ?, &']
}
- expect(response.status).to eq(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label'
expect(json_response['labels']).to include 'label?'
expect(json_response['labels']).to include 'label&foo'
@@ -349,7 +350,7 @@ describe API::Issues do
it 'allows special label names' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { labels: 'label:foo, label-bar,label_bar,label/bar,label?bar,label&bar,?,&' }
- expect(response.status).to eq(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label:foo'
expect(json_response['labels']).to include 'label-bar'
expect(json_response['labels']).to include 'label_bar'
@@ -363,7 +364,7 @@ describe API::Issues do
it 'allows special label names with labels param as array' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { labels: ['label:foo', 'label-bar', 'label_bar', 'label/bar,label?bar,label&bar,?,&'] }
- expect(response.status).to eq(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label:foo'
expect(json_response['labels']).to include 'label-bar'
expect(json_response['labels']).to include 'label_bar'
@@ -400,15 +401,49 @@ describe API::Issues do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq 'opened'
end
+ end
- context 'when an admin or owner makes the request' do
+ describe 'PUT /projects/:id/issues/:issue_iid to update updated_at param' do
+ context 'when reporter makes request' do
it 'accepts the update date to be set' do
update_time = 2.weeks.ago
+
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
- params: { labels: 'label3', state_event: 'close', updated_at: update_time }
+ params: { title: 'some new title', updated_at: update_time }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['title']).to include 'some new title'
+ expect(Time.parse(json_response['updated_at'])).not_to be_like_time(update_time)
+ end
+ end
+
+ context 'when admin or owner makes the request' do
+ it 'not allow to set null for updated_at' do
+ put api("/projects/#{project.id}/issues/#{issue.iid}", owner), params: { updated_at: nil }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'not allow to set blank for updated_at' do
+ put api("/projects/#{project.id}/issues/#{issue.iid}", owner), params: { updated_at: '' }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'not allow to set invalid format for updated_at' do
+ put api("/projects/#{project.id}/issues/#{issue.iid}", owner), params: { updated_at: 'invalid-format' }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'accepts the update date to be set' do
+ update_time = 2.weeks.ago
+ put api("/projects/#{project.id}/issues/#{issue.iid}", owner),
+ params: { title: 'some new title', updated_at: update_time }
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['labels']).to include 'label3'
+ expect(json_response['title']).to include 'some new title'
+
expect(Time.parse(json_response['updated_at'])).to be_like_time(update_time)
end
end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 652be20f1e4..03dfd13c25b 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -66,7 +66,7 @@ describe API::Jobs do
context 'authorized user' do
it 'returns project jobs' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
end
@@ -122,7 +122,7 @@ describe API::Jobs do
let(:query) { { 'scope' => 'pending' } }
it do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
end
@@ -131,7 +131,7 @@ describe API::Jobs do
let(:query) { { scope: %w(pending running) } }
it do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
end
@@ -139,7 +139,7 @@ describe API::Jobs do
context 'respond 400 when scope contains invalid state' do
let(:query) { { scope: %w(unknown running) } }
- it { expect(response).to have_gitlab_http_status(400) }
+ it { expect(response).to have_gitlab_http_status(:bad_request) }
end
end
@@ -148,7 +148,7 @@ describe API::Jobs do
let(:api_user) { nil }
it 'does not return project jobs' do
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -156,7 +156,7 @@ describe API::Jobs do
let(:api_user) { guest }
it 'does not return project jobs' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -178,7 +178,7 @@ describe API::Jobs do
context 'authorized user' do
it 'returns pipeline jobs' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
end
@@ -210,7 +210,7 @@ describe API::Jobs do
let(:query) { { 'scope' => 'pending' } }
it do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
end
@@ -219,7 +219,7 @@ describe API::Jobs do
let(:query) { { scope: %w(pending running) } }
it do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
end
@@ -227,7 +227,7 @@ describe API::Jobs do
context 'respond 400 when scope contains invalid state' do
let(:query) { { scope: %w(unknown running) } }
- it { expect(response).to have_gitlab_http_status(400) }
+ it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'jobs in different pipelines' do
@@ -257,7 +257,7 @@ describe API::Jobs do
let(:api_user) { nil }
it 'does not return jobs' do
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -265,7 +265,7 @@ describe API::Jobs do
let(:api_user) { guest }
it 'does not return jobs' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -280,7 +280,7 @@ describe API::Jobs do
context 'authorized user' do
it 'returns specific job data' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(job.id)
expect(json_response['status']).to eq(job.status)
expect(json_response['stage']).to eq(job.stage)
@@ -319,7 +319,7 @@ describe API::Jobs do
let(:api_user) { nil }
it 'does not return specific job data' do
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -339,7 +339,7 @@ describe API::Jobs do
end
it 'returns status 401 (unauthorized)' do
- expect(response).to have_http_status :unauthorized
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -349,7 +349,7 @@ describe API::Jobs do
end
it 'returns status 403 (forbidden)' do
- expect(response).to have_http_status :forbidden
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -362,7 +362,7 @@ describe API::Jobs do
end
it 'returns status 204 (no content)' do
- expect(response).to have_http_status :no_content
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
end
@@ -386,7 +386,7 @@ describe API::Jobs do
get_artifact_file(artifact)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -398,7 +398,7 @@ describe API::Jobs do
get_artifact_file(artifact)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -410,7 +410,7 @@ describe API::Jobs do
get_artifact_file(artifact)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -423,7 +423,7 @@ describe API::Jobs do
get_artifact_file(artifact)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
'Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
@@ -435,7 +435,7 @@ describe API::Jobs do
it 'does not return job artifact file' do
get_artifact_file('some/artifact')
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -453,7 +453,7 @@ describe API::Jobs do
end
it 'returns specific job artifacts' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h).to include(download_headers)
expect(response.body).to match_file(job.artifacts_file.file.file)
end
@@ -476,7 +476,7 @@ describe API::Jobs do
let(:api_user) { nil }
it 'does not return specific job artifacts' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -507,7 +507,7 @@ describe API::Jobs do
context 'when proxy download is disabled' do
it 'returns location redirect' do
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -521,7 +521,7 @@ describe API::Jobs do
let(:api_user) { nil }
it 'does not return specific job artifacts' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -557,7 +557,7 @@ describe API::Jobs do
it 'does not find a resource in a private project' do
expect(project).to be_private
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -569,7 +569,7 @@ describe API::Jobs do
end
it 'gives 403' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -604,7 +604,7 @@ describe API::Jobs do
%Q(attachment; filename="#{job.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}) }
end
- it { expect(response).to have_http_status(:ok) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
it { expect(response.headers.to_h).to include(download_headers) }
end
@@ -619,7 +619,7 @@ describe API::Jobs do
end
it 'returns location redirect' do
- expect(response).to have_http_status(:found)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
@@ -677,7 +677,7 @@ describe API::Jobs do
let(:public_builds) { true }
it 'allows to access artifacts', :sidekiq_might_not_need_inline do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
'Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
@@ -689,7 +689,7 @@ describe API::Jobs do
let(:public_builds) { false }
it 'rejects access to artifacts' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response).to have_key('message')
expect(response.headers.to_h)
.not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
@@ -701,7 +701,7 @@ describe API::Jobs do
let(:public_builds) { true }
it 'rejects access and hides existence of artifacts' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response).to have_key('message')
expect(response.headers.to_h)
.not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
@@ -720,7 +720,7 @@ describe API::Jobs do
get_artifact_file(artifact)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
'Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
@@ -737,7 +737,7 @@ describe API::Jobs do
it 'returns a specific artifact file for a valid path', :sidekiq_might_not_need_inline do
get_artifact_file(artifact, 'improve/awesome')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
'Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
@@ -773,7 +773,7 @@ describe API::Jobs do
it 'does not return job artifact file' do
get_artifact_file('some/artifact')
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -803,7 +803,7 @@ describe API::Jobs do
end
it 'returns specific job trace' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq(job.trace.raw)
end
end
@@ -812,7 +812,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'returns specific job trace' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq(job.trace.raw)
end
end
@@ -821,7 +821,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'returns specific job trace' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq(job.trace.raw)
end
end
@@ -831,7 +831,7 @@ describe API::Jobs do
let(:api_user) { nil }
it 'does not return specific job trace' do
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -844,7 +844,7 @@ describe API::Jobs do
context 'authorized user' do
context 'user with :update_build persmission' do
it 'cancels running or pending job' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(project.builds.first.status).to eq('success')
end
end
@@ -853,7 +853,7 @@ describe API::Jobs do
let(:api_user) { reporter }
it 'does not cancel job' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -862,7 +862,7 @@ describe API::Jobs do
let(:api_user) { nil }
it 'does not cancel job' do
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -877,7 +877,7 @@ describe API::Jobs do
context 'authorized user' do
context 'user with :update_build permission' do
it 'retries non-running job' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(project.builds.first.status).to eq('canceled')
expect(json_response['status']).to eq('pending')
end
@@ -887,7 +887,7 @@ describe API::Jobs do
let(:api_user) { reporter }
it 'does not retry job' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -896,7 +896,7 @@ describe API::Jobs do
let(:api_user) { nil }
it 'does not retry job' do
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -914,7 +914,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :trace_artifact, :artifacts, :test_reports, :success, project: project, pipeline: pipeline) }
it 'erases job content' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(job.job_artifacts.count).to eq(0)
expect(job.trace.exist?).to be_falsy
expect(job.artifacts_file.present?).to be_falsy
@@ -934,7 +934,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :trace_live, project: project, pipeline: pipeline) }
it 'responds with forbidden' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -945,13 +945,13 @@ describe API::Jobs do
context 'when the build was created by the developer' do
let(:owner) { user }
- it { expect(response).to have_gitlab_http_status(201) }
+ it { expect(response).to have_gitlab_http_status(:created) }
end
context 'when the build was created by the other' do
let(:owner) { create(:user) }
- it { expect(response).to have_gitlab_http_status(403) }
+ it { expect(response).to have_gitlab_http_status(:forbidden) }
end
end
end
@@ -968,7 +968,7 @@ describe API::Jobs do
end
it 'keeps artifacts' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(job.reload.artifacts_expire_at).to be_nil
end
end
@@ -977,7 +977,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, project: project, pipeline: pipeline) }
it 'responds with not found' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -992,7 +992,7 @@ describe API::Jobs do
context 'when user is authorized to trigger a manual action' do
it 'plays the job' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['user']['id']).to eq(user.id)
expect(json_response['id']).to eq(job.id)
expect(job.reload).to be_pending
@@ -1005,7 +1005,7 @@ describe API::Jobs do
it 'does not trigger a manual action' do
expect(job.reload).to be_manual
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1014,7 +1014,7 @@ describe API::Jobs do
it 'does not trigger a manual action' do
expect(job.reload).to be_manual
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -1022,7 +1022,7 @@ describe API::Jobs do
context 'on a non-playable job' do
it 'returns a status code 400, Bad Request' do
- expect(response).to have_gitlab_http_status 400
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(response.body).to match("Unplayable Job")
end
end
diff --git a/spec/requests/api/keys_spec.rb b/spec/requests/api/keys_spec.rb
index c743cb3f633..089ee22982c 100644
--- a/spec/requests/api/keys_spec.rb
+++ b/spec/requests/api/keys_spec.rb
@@ -5,29 +5,30 @@ require 'spec_helper'
describe API::Keys do
let(:user) { create(:user) }
let(:admin) { create(:admin) }
- let(:key) { create(:key, user: user) }
+ let(:key) { create(:key, user: user, expires_at: 1.day.from_now) }
let(:email) { create(:email, user: user) }
describe 'GET /keys/:uid' do
context 'when unauthenticated' do
it 'returns authentication error' do
get api("/keys/#{key.id}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when authenticated' do
it 'returns 404 for non-existing key' do
get api('/keys/0', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Not found')
end
it 'returns single ssh key with user information' do
user.keys << key
get api("/keys/#{key.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(key.title)
+ expect(Time.parse(json_response['expires_at'])).to be_like_time(key.expires_at)
expect(json_response['user']['id']).to eq(user.id)
expect(json_response['user']['username']).to eq(user.username)
end
@@ -44,27 +45,27 @@ describe API::Keys do
it 'returns authentication error' do
get api("/keys?fingerprint=#{key.fingerprint}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns authentication error when authenticated as user' do
get api("/keys?fingerprint=#{key.fingerprint}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'when authenticated as admin' do
it 'returns 404 for non-existing SSH md5 fingerprint' do
get api("/keys?fingerprint=11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Key Not Found')
end
it 'returns 404 for non-existing SSH sha256 fingerprint' do
get api("/keys?fingerprint=#{URI.encode_www_form_component("SHA256:nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo1lCg")}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Key Not Found')
end
@@ -73,7 +74,7 @@ describe API::Keys do
get api("/keys?fingerprint=#{key.fingerprint}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(key.title)
expect(json_response['user']['id']).to eq(user.id)
expect(json_response['user']['username']).to eq(user.username)
@@ -84,7 +85,7 @@ describe API::Keys do
get api("/keys?fingerprint=#{URI.encode_www_form_component("SHA256:" + key.fingerprint_sha256)}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(key.title)
expect(json_response['user']['id']).to eq(user.id)
expect(json_response['user']['username']).to eq(user.username)
@@ -95,7 +96,7 @@ describe API::Keys do
get api("/keys?fingerprint=#{URI.encode_www_form_component("sha256:" + key.fingerprint_sha256)}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(key.title)
expect(json_response['user']['id']).to eq(user.id)
expect(json_response['user']['username']).to eq(user.username)
@@ -125,7 +126,7 @@ describe API::Keys do
get api("/keys?fingerprint=#{URI.encode_www_form_component("SHA256:" + deploy_key.fingerprint_sha256)}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(deploy_key.title)
expect(json_response['user']['id']).to eq(user.id)
diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb
index d027738c8db..f8216da3419 100644
--- a/spec/requests/api/labels_spec.rb
+++ b/spec/requests/api/labels_spec.rb
@@ -27,7 +27,7 @@ describe API::Labels do
it "returns 200 if name is changed (#{route_type} route)" do
put_labels_api(route_type, user, spec_params, new_name: 'New Label')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq('New Label')
expect(json_response['color']).to eq(label1.color)
end
@@ -35,7 +35,7 @@ describe API::Labels do
it "returns 200 if colors is changed (#{route_type} route)" do
put_labels_api(route_type, user, spec_params, color: '#FFFFFF')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(label1.name)
expect(json_response['color']).to eq('#FFFFFF')
end
@@ -51,7 +51,7 @@ describe API::Labels do
it "returns 400 if no new parameters given (#{route_type} route)" do
put_labels_api(route_type, user, spec_params)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('new_name, color, description, priority are missing, '\
'at least one parameter must be provided')
end
@@ -59,27 +59,27 @@ describe API::Labels do
it "returns 400 when color code is too short (#{route_type} route)" do
put_labels_api(route_type, user, spec_params, color: '#FF')
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['color']).to eq(['must be a valid color code'])
end
it "returns 400 for too long color code (#{route_type} route)" do
put_labels_api(route_type, user, spec_params, color: '#FFAAFFFF')
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['color']).to eq(['must be a valid color code'])
end
it "returns 400 for invalid priority (#{route_type} route)" do
put_labels_api(route_type, user, spec_params, priority: 'foo')
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns 200 if name and colors and description are changed (#{route_type} route)" do
put_labels_api(route_type, user, spec_params, new_name: 'New Label', color: '#FFFFFF', description: 'test')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq('New Label')
expect(json_response['color']).to eq('#FFFFFF')
expect(json_response['description']).to eq('test')
@@ -88,14 +88,14 @@ describe API::Labels do
it "returns 400 for invalid name (#{route_type} route)" do
put_labels_api(route_type, user, spec_params, new_name: ',', color: '#FFFFFF')
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['title']).to eq(['is invalid'])
end
it "returns 200 if description is changed (#{route_type} route)" do
put_labels_api(route_type, user, spec_params, description: 'test')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(expected_response_label_id)
expect(json_response['description']).to eq('test')
end
@@ -162,14 +162,14 @@ describe API::Labels do
it 'returns 204 for existing label (deprecated route)' do
delete api("/projects/#{project.id}/labels", user), params: spec_params
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'returns 204 for existing label (rest route)' do
label_id = spec_params[:name] || spec_params[:label_id]
delete api("/projects/#{project.id}/labels/#{label_id}", user), params: spec_params.except(:name, :label_id)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
@@ -188,7 +188,7 @@ describe API::Labels do
it 'returns all available labels to the project' do
get api("/projects/#{project.id}/labels", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to all(match_schema('public_api/v4/labels/project_label'))
expect(json_response.size).to eq(3)
@@ -205,7 +205,7 @@ describe API::Labels do
it 'includes counts in the response' do
get api("/projects/#{project.id}/labels", user), params: { with_counts: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to all(match_schema('public_api/v4/labels/project_label_with_counts'))
expect(json_response.size).to eq(3)
@@ -264,7 +264,7 @@ describe API::Labels do
it 'returns all available labels for the project, parent group and ancestor groups' do
get api("/projects/#{project.id}/labels", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response).to all(match_schema('public_api/v4/labels/label'))
@@ -287,7 +287,7 @@ describe API::Labels do
it 'returns all available labels for the project and the parent group only' do
get api("/projects/#{project.id}/labels", user), params: { include_ancestor_groups: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response).to all(match_schema('public_api/v4/labels/label'))
@@ -307,7 +307,7 @@ describe API::Labels do
priority: 2
}
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq('Foo')
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to eq('test')
@@ -345,12 +345,12 @@ describe API::Labels do
it 'returns a 400 bad request if name not given' do
post api("/projects/#{project.id}/labels", user), params: { color: '#FFAABB' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns a 400 bad request if color not given' do
post api("/projects/#{project.id}/labels", user), params: { name: 'Foobar' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 for invalid color' do
@@ -359,7 +359,7 @@ describe API::Labels do
name: 'Foo',
color: '#FFAA'
}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['color']).to eq(['must be a valid color code'])
end
@@ -369,7 +369,7 @@ describe API::Labels do
name: 'Foo',
color: '#FFAAFFFF'
}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['color']).to eq(['must be a valid color code'])
end
@@ -379,7 +379,7 @@ describe API::Labels do
name: ',',
color: '#FFAABB'
}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['title']).to eq(['is invalid'])
end
@@ -394,7 +394,7 @@ describe API::Labels do
color: '#FFAABB'
}
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to eq('Label already exists')
end
@@ -406,7 +406,7 @@ describe API::Labels do
priority: 'foo'
}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 409 if label already exists in project' do
@@ -415,7 +415,7 @@ describe API::Labels do
name: 'label1',
color: '#FFAABB'
}
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to eq('Label already exists')
end
end
@@ -432,14 +432,14 @@ describe API::Labels do
it 'returns 404 for non existing label' do
delete api("/projects/#{project.id}/labels", user), params: { name: 'label2' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Label Not Found')
end
it 'returns 400 for wrong parameters' do
delete api("/projects/#{project.id}/labels", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'fails if label_id and name are given in params' do
@@ -449,7 +449,7 @@ describe API::Labels do
name: priority_label.name
}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it_behaves_like '412 response' do
@@ -480,7 +480,7 @@ describe API::Labels do
new_name: 'label3'
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 if label by id does not exist' do
@@ -490,13 +490,13 @@ describe API::Labels do
new_name: 'label3'
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 400 if no label name and id is given' do
put api("/projects/#{project.id}/labels", user), params: { new_name: 'label2' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('label_id, name are missing, exactly one parameter must be provided')
end
@@ -508,7 +508,7 @@ describe API::Labels do
new_name: 'New Label'
}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -523,7 +523,7 @@ describe API::Labels do
it 'returns 200 if label is promoted' do
put api("/projects/#{project.id}/labels/promote", user), params: { name: label1.name }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(label1.name)
expect(json_response['color']).to eq(label1.color)
end
@@ -535,7 +535,7 @@ describe API::Labels do
.to change(project.labels, :count).by(-1)
.and change(group.labels, :count).by(0)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns 403 if guest promotes label' do
@@ -544,19 +544,19 @@ describe API::Labels do
put api("/projects/#{project.id}/labels/promote", guest), params: { name: label1.name }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns 404 if label does not exist' do
put api("/projects/#{project.id}/labels/promote", user), params: { name: 'unknown' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 400 if no label name given' do
put api("/projects/#{project.id}/labels/promote", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('name is missing')
end
end
@@ -566,7 +566,7 @@ describe API::Labels do
it "subscribes to the label" do
post api("/projects/#{project.id}/labels/#{label1.title}/subscribe", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response["name"]).to eq(label1.title)
expect(json_response["subscribed"]).to be_truthy
end
@@ -576,7 +576,7 @@ describe API::Labels do
it "subscribes to the label" do
post api("/projects/#{project.id}/labels/#{label1.id}/subscribe", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response["name"]).to eq(label1.title)
expect(json_response["subscribed"]).to be_truthy
end
@@ -590,7 +590,7 @@ describe API::Labels do
it "returns 304" do
post api("/projects/#{project.id}/labels/#{label1.id}/subscribe", user)
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
end
@@ -598,7 +598,7 @@ describe API::Labels do
it "returns 404 error" do
post api("/projects/#{project.id}/labels/1234/subscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -612,7 +612,7 @@ describe API::Labels do
it "unsubscribes from the label" do
post api("/projects/#{project.id}/labels/#{label1.title}/unsubscribe", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response["name"]).to eq(label1.title)
expect(json_response["subscribed"]).to be_falsey
end
@@ -622,7 +622,7 @@ describe API::Labels do
it "unsubscribes from the label" do
post api("/projects/#{project.id}/labels/#{label1.id}/unsubscribe", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response["name"]).to eq(label1.title)
expect(json_response["subscribed"]).to be_falsey
end
@@ -636,7 +636,7 @@ describe API::Labels do
it "returns 304" do
post api("/projects/#{project.id}/labels/#{label1.id}/unsubscribe", user)
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
end
@@ -644,7 +644,7 @@ describe API::Labels do
it "returns 404 error" do
post api("/projects/#{project.id}/labels/1234/unsubscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb
index 46d23bd16b9..5815ce07125 100644
--- a/spec/requests/api/lint_spec.rb
+++ b/spec/requests/api/lint_spec.rb
@@ -12,7 +12,7 @@ describe API::Lint do
it 'passes validation' do
post api('/ci/lint'), params: { content: yaml_content }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Hash
expect(json_response['status']).to eq('valid')
expect(json_response['errors']).to eq([])
@@ -23,7 +23,7 @@ describe API::Lint do
it 'responds with errors about invalid syntax' do
post api('/ci/lint'), params: { content: 'invalid content' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('invalid')
expect(json_response['errors']).to eq(['Invalid configuration format'])
end
@@ -31,7 +31,7 @@ describe API::Lint do
it "responds with errors about invalid configuration" do
post api('/ci/lint'), params: { content: '{ image: "ruby:2.1", services: ["postgres"] }' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('invalid')
expect(json_response['errors']).to eq(['jobs config should contain at least one visible job'])
end
@@ -41,7 +41,7 @@ describe API::Lint do
it 'responds with validation error about missing content' do
post api('/ci/lint')
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('content is missing')
end
end
diff --git a/spec/requests/api/lsif_data_spec.rb b/spec/requests/api/lsif_data_spec.rb
index 214bc832cda..a1516046e3e 100644
--- a/spec/requests/api/lsif_data_spec.rb
+++ b/spec/requests/api/lsif_data_spec.rb
@@ -9,18 +9,20 @@ describe API::LsifData do
let(:commit) { project.commit }
describe 'GET lsif/info' do
- let(:endpoint_path) { "/projects/#{project.id}/commits/#{commit.id}/lsif/info" }
+ subject do
+ endpoint_path = "/projects/#{project.id}/commits/#{commit.id}/lsif/info"
+
+ get api(endpoint_path, user), params: { paths: ['main.go', 'morestrings/reverse.go'] }
+
+ response
+ end
context 'user does not have access to the project' do
before do
project.add_guest(user)
end
- it 'returns 403' do
- get api(endpoint_path, user), params: { path: 'main.go' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
end
context 'user has access to the project' do
@@ -28,35 +30,27 @@ describe API::LsifData do
project.add_reporter(user)
end
- context 'code_navigation feature is disabled' do
- before do
- stub_feature_flags(code_navigation: false)
- end
-
- it 'returns 404' do
- get api(endpoint_path, user)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
context 'there is no job artifact for the passed commit' do
- it 'returns 404' do
- get api(endpoint_path, user), params: { path: 'main.go' }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'lsif data is stored as a job artifact' do
let!(:pipeline) { create(:ci_pipeline, project: project, sha: commit.id) }
let!(:artifact) { create(:ci_job_artifact, :lsif, job: create(:ci_build, pipeline: pipeline)) }
- it 'returns code navigation info for a given path' do
- get api(endpoint_path, user), params: { path: 'main.go' }
+ context 'code_navigation feature is disabled' do
+ before do
+ stub_feature_flags(code_navigation: false)
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.parsed_body.last).to eq({
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
+ it 'returns code navigation info for a given path', :aggregate_failures do
+ expect(subject).to have_gitlab_http_status(:ok)
+
+ data_for_main = response.parsed_body['main.go']
+ expect(data_for_main.last).to eq({
'end_char' => 18,
'end_line' => 8,
'start_char' => 13,
@@ -67,26 +61,33 @@ describe API::LsifData do
'value' => Gitlab::Highlight.highlight(nil, 'func Func2(i int) string', language: 'go')
}]
})
+
+ data_for_reverse = response.parsed_body['morestrings/reverse.go']
+ expect(data_for_reverse.last).to eq({
+ 'end_char' => 9,
+ 'end_line' => 7,
+ 'start_char' => 8,
+ 'start_line' => 7,
+ 'definition_url' => project_blob_path(project, "#{commit.id}/morestrings/reverse.go", anchor: 'L6'),
+ 'hover' => [{
+ 'language' => 'go',
+ 'value' => Gitlab::Highlight.highlight(nil, 'var b string', language: 'go')
+ }]
+ })
end
context 'the stored file is too large' do
- it 'returns 413' do
+ before do
allow_any_instance_of(JobArtifactUploader).to receive(:cached_size).and_return(20.megabytes)
-
- get api(endpoint_path, user), params: { path: 'main.go' }
-
- expect(response).to have_gitlab_http_status(:payload_too_large)
end
+
+ it { is_expected.to have_gitlab_http_status(:payload_too_large) }
end
context 'the user does not have access to the pipeline' do
let(:project) { create(:project, :repository, builds_access_level: ProjectFeature::DISABLED) }
- it 'returns 403' do
- get api(endpoint_path, user), params: { path: 'main.go' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
end
end
end
diff --git a/spec/requests/api/markdown_spec.rb b/spec/requests/api/markdown_spec.rb
index 8a1e1b05c9a..9b787e76740 100644
--- a/spec/requests/api/markdown_spec.rb
+++ b/spec/requests/api/markdown_spec.rb
@@ -16,7 +16,7 @@ describe API::Markdown do
shared_examples "rendered markdown text without GFM" do
it "renders markdown text" do
- expect(response).to have_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response.headers["Content-Type"]).to eq("application/json")
expect(json_response).to be_a(Hash)
expect(json_response["html"]).to eq("<p>#{text}</p>")
@@ -25,7 +25,7 @@ describe API::Markdown do
shared_examples "404 Project Not Found" do
it "responses with 404 Not Found" do
- expect(response).to have_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.headers["Content-Type"]).to eq("application/json")
expect(json_response).to be_a(Hash)
expect(json_response["message"]).to eq("404 Project Not Found")
@@ -37,7 +37,7 @@ describe API::Markdown do
let(:params) { {} }
it "responses with 400 Bad Request" do
- expect(response).to have_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(response.headers["Content-Type"]).to eq("application/json")
expect(json_response).to be_a(Hash)
expect(json_response["error"]).to eq("text is missing")
@@ -83,7 +83,7 @@ describe API::Markdown do
let(:params) { { text: text, gfm: true } }
it "renders markdown text" do
- expect(response).to have_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response.headers["Content-Type"]).to eq("application/json")
expect(json_response).to be_a(Hash)
expect(json_response["html"]).to include("Hello world!")
@@ -100,7 +100,7 @@ describe API::Markdown do
let(:user) { project.owner }
it "renders markdown text" do
- expect(response).to have_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response.headers["Content-Type"]).to eq("application/json")
expect(json_response).to be_a(Hash)
expect(json_response["html"]).to include("Hello world!")
@@ -120,7 +120,7 @@ describe API::Markdown do
shared_examples 'user without proper access' do
it 'does not render the title or link' do
- expect(response).to have_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response["html"]).not_to include('Confidential title')
expect(json_response["html"]).not_to include('<a href=')
expect(json_response["html"]).to include('Hello world!')
@@ -146,7 +146,7 @@ describe API::Markdown do
let(:user) { confidential_issue.author }
it 'renders the title or link' do
- expect(response).to have_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response["html"]).to include('Confidential title')
expect(json_response["html"]).to include('Hello world!')
.and include('data-name="tada"')
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index f2942020e16..0b504df1f51 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -43,7 +43,7 @@ describe API::Members do
get api(members_url, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(2)
@@ -72,7 +72,7 @@ describe API::Members do
get api(members_url, developer)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(2)
@@ -82,7 +82,7 @@ describe API::Members do
it 'finds members with query string' do
get api(members_url, developer), params: { query: maintainer.username }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.count).to eq(1)
@@ -92,7 +92,7 @@ describe API::Members do
it 'finds members with the given user_ids' do
get api(members_url, developer), params: { user_ids: [maintainer.id, developer.id, stranger.id] }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |u| u['id'] }).to contain_exactly(maintainer.id, developer.id)
@@ -101,7 +101,7 @@ describe API::Members do
it 'finds all members with no query specified' do
get api(members_url, developer), params: { query: '' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.count).to eq(2)
@@ -137,7 +137,7 @@ describe API::Members do
it 'finds all project members including inherited members' do
get api("/projects/#{project.id}/members/all", developer)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |u| u['id'] }).to match_array [maintainer.id, developer.id, nested_user.id, project_user.id, linked_group_user.id]
@@ -148,7 +148,7 @@ describe API::Members do
get api("/projects/#{project.id}/members/all", developer)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
@@ -165,7 +165,7 @@ describe API::Members do
it 'finds all group members including inherited members' do
get api("/groups/#{nested_group.id}/members/all", developer)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |u| u['id'] }).to match_array [maintainer.id, developer.id, nested_user.id]
@@ -185,7 +185,7 @@ describe API::Members do
user = public_send(type)
get api("/#{source_type.pluralize}/#{source.id}/members/#{all ? 'all/' : ''}#{developer.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
# User attributes
expect(json_response['id']).to eq(developer.id)
expect(json_response['name']).to eq(developer.name)
@@ -220,7 +220,7 @@ describe API::Members do
post api("/#{source_type.pluralize}/#{source.id}/members", user),
params: { user_id: access_requester.id, access_level: Member::MAINTAINER }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -233,7 +233,7 @@ describe API::Members do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { user_id: access_requester.id, access_level: Member::MAINTAINER }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end.to change { source.members.count }.by(1)
expect(source.requesters.count).to eq(0)
expect(json_response['id']).to eq(access_requester.id)
@@ -246,7 +246,7 @@ describe API::Members do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { user_id: stranger.id, access_level: Member::DEVELOPER, expires_at: '2016-08-05' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end.to change { source.members.count }.by(1)
expect(json_response['id']).to eq(stranger.id)
expect(json_response['access_level']).to eq(Member::DEVELOPER)
@@ -265,7 +265,7 @@ describe API::Members do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { user_id: stranger.id, access_level: Member::REPORTER }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['access_level']).to eq(["should be greater than or equal to Developer inherited membership from group #{parent.name}"])
end
@@ -279,7 +279,7 @@ describe API::Members do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { user_id: stranger.id, access_level: Member::MAINTAINER }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['id']).to eq(stranger.id)
expect(json_response['access_level']).to eq(Member::MAINTAINER)
end
@@ -289,14 +289,14 @@ describe API::Members do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { user_id: maintainer.id, access_level: Member::MAINTAINER }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
end
it 'returns 404 when the user_id is not valid' do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { user_id: 0, access_level: Member::MAINTAINER }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
@@ -304,21 +304,21 @@ describe API::Members do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { access_level: Member::MAINTAINER }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 when access_level is not given' do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { user_id: stranger.id }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 when access_level is not valid' do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { user_id: stranger.id, access_level: 1234 }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -340,7 +340,7 @@ describe API::Members do
put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", user),
params: { access_level: Member::MAINTAINER }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -351,7 +351,7 @@ describe API::Members do
put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", maintainer),
params: { access_level: Member::MAINTAINER, expires_at: '2016-08-05' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(developer.id)
expect(json_response['access_level']).to eq(Member::MAINTAINER)
expect(json_response['expires_at']).to eq('2016-08-05')
@@ -362,20 +362,20 @@ describe API::Members do
put api("/#{source_type.pluralize}/#{source.id}/members/123", maintainer),
params: { access_level: Member::MAINTAINER }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 400 when access_level is not given' do
put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", maintainer)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 when access level is not valid' do
put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", maintainer),
params: { access_level: 1234 }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -393,7 +393,7 @@ describe API::Members do
user = public_send(type)
delete api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -404,7 +404,7 @@ describe API::Members do
expect do
delete api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", developer)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { source.members.count }.by(-1)
end
end
@@ -415,7 +415,7 @@ describe API::Members do
expect do
delete api("/#{source_type.pluralize}/#{source.id}/members/#{access_requester.id}", maintainer)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end.not_to change { source.requesters.count }
end
end
@@ -424,7 +424,7 @@ describe API::Members do
expect do
delete api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", maintainer)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { source.members.count }.by(-1)
end
@@ -436,7 +436,7 @@ describe API::Members do
it 'returns 404 if member does not exist' do
delete api("/#{source_type.pluralize}/#{source.id}/members/123", maintainer)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -491,7 +491,7 @@ describe API::Members do
post api("/projects/#{project.id}/members", maintainer),
params: { user_id: stranger.id, access_level: Member::OWNER }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end.to change { project.members.count }.by(0)
end
end
diff --git a/spec/requests/api/merge_request_diffs_spec.rb b/spec/requests/api/merge_request_diffs_spec.rb
index 9de76c2fe50..2e74022ae07 100644
--- a/spec/requests/api/merge_request_diffs_spec.rb
+++ b/spec/requests/api/merge_request_diffs_spec.rb
@@ -28,12 +28,12 @@ describe API::MergeRequestDiffs, 'MergeRequestDiffs' do
it 'returns a 404 when merge_request id is used instead of the iid' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.id}/versions", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 when merge_request_iid not found' do
get api("/projects/#{project.id}/merge_requests/0/versions", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -51,17 +51,17 @@ describe API::MergeRequestDiffs, 'MergeRequestDiffs' do
it 'returns a 404 when merge_request id is used instead of the iid' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.id}/versions/#{merge_request_diff.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 when merge_request version_id is not found' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/versions/0", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 when merge_request_iid is not found' do
get api("/projects/#{project.id}/merge_requests/12345/versions/#{merge_request_diff.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 862afd11b86..d8fac47d6f6 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -194,7 +194,7 @@ describe API::MergeRequests do
it 'matches V4 response schema' do
get api(endpoint_path, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/merge_requests')
end
@@ -454,25 +454,25 @@ describe API::MergeRequests do
it "returns authentication error without any scope" do
get api("/merge_requests")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "returns authentication error when scope is assigned-to-me" do
get api("/merge_requests"), params: { scope: 'assigned-to-me' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "returns authentication error when scope is assigned_to_me" do
get api("/merge_requests"), params: { scope: 'assigned_to_me' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "returns authentication error when scope is created-by-me" do
get api("/merge_requests"), params: { scope: 'created-by-me' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -705,7 +705,7 @@ describe API::MergeRequests do
get api("/projects/#{project.id}/merge_requests")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns an array of no merge_requests when wip=yes" do
@@ -818,7 +818,7 @@ describe API::MergeRequests do
merge_request = create(:merge_request, :with_test_reports, milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, title: "Test", created_at: base_time)
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/merge_request')
end
@@ -828,7 +828,7 @@ describe API::MergeRequests do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(merge_request.id)
expect(json_response['iid']).to eq(merge_request.iid)
expect(json_response['project_id']).to eq(merge_request.project.id)
@@ -866,7 +866,7 @@ describe API::MergeRequests do
it 'exposes description and title html when render_html is true' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { render_html: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include('title_html', 'description_html')
end
@@ -874,7 +874,7 @@ describe API::MergeRequests do
it 'exposes rebase_in_progress when include_rebase_in_progress is true' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { include_rebase_in_progress: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include('rebase_in_progress')
end
@@ -950,19 +950,19 @@ describe API::MergeRequests do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { include_diverged_commits_count: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['diverged_commits_count']).to eq(1)
end
it "returns a 404 error if merge_request_iid not found" do
get api("/projects/#{project.id}/merge_requests/0", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns a 404 error if merge_request `id` is used instead of iid" do
get api("/projects/#{project.id}/merge_requests/#{merge_request.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'Work in Progress' do
@@ -971,7 +971,7 @@ describe API::MergeRequests do
it "returns merge request" do
get api("/projects/#{project.id}/merge_requests/#{merge_request_wip.iid}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['work_in_progress']).to eq(true)
end
end
@@ -990,7 +990,7 @@ describe API::MergeRequests do
get api("/projects/#{project.id}/merge_requests/#{merge_request_overflow.iid}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['changes_count']).to eq('5+')
end
end
@@ -1059,13 +1059,13 @@ describe API::MergeRequests do
it 'returns a 404 when merge_request_iid not found' do
get api("/projects/#{project.id}/merge_requests/0/commits", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 when merge_request id is used instead of iid' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.id}/commits", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1083,7 +1083,7 @@ describe API::MergeRequests do
it 'returns a 404 when merge_request_iid not found' do
get api("/projects/#{project.id}/merge_requests/0/context_commits", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1091,19 +1091,19 @@ describe API::MergeRequests do
it 'returns the change information of the merge_request' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/changes", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['changes'].size).to eq(merge_request.diffs.size)
end
it 'returns a 404 when merge_request_iid not found' do
get api("/projects/#{project.id}/merge_requests/0/changes", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 when merge_request id is used instead of iid' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.id}/changes", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1123,14 +1123,14 @@ describe API::MergeRequests do
it 'exposes basic attributes' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/pipelines")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pipelines')
end
it 'returns 404 if MR does not exist' do
get api("/projects/#{project.id}/merge_requests/777/pipelines")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1143,19 +1143,23 @@ describe API::MergeRequests do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/pipelines", guest)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
describe 'POST /projects/:id/merge_requests/:merge_request_iid/pipelines' do
before do
- stub_ci_pipeline_yaml_file(YAML.dump({
+ stub_ci_pipeline_yaml_file(ci_yaml)
+ end
+
+ let(:ci_yaml) do
+ YAML.dump({
rspec: {
script: 'ls',
only: ['merge_requests']
}
- }))
+ })
end
let(:project) do
@@ -1186,7 +1190,7 @@ describe API::MergeRequests do
context 'when authorized' do
it 'creates and returns the new Pipeline' do
expect { request }.to change(Ci::Pipeline, :count).by(1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a Hash
end
end
@@ -1196,7 +1200,7 @@ describe API::MergeRequests do
it 'responds with a blank 404' do
expect { request }.not_to change(Ci::Pipeline, :count)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1205,7 +1209,19 @@ describe API::MergeRequests do
it 'responds with a blank 404' do
expect { request }.not_to change(Ci::Pipeline, :count)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the .gitlab-ci.yml file is invalid' do
+ let(:ci_yaml) { 'invalid yaml file' }
+
+ it 'creates a failed pipeline' do
+ expect { request }.to change(Ci::Pipeline, :count).by(1)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_a Hash
+ expect(merge_request.pipelines_for_merge_request.last).to be_failed
+ expect(merge_request.pipelines_for_merge_request.last).to be_config_error
end
end
end
@@ -1225,7 +1241,7 @@ describe API::MergeRequests do
it 'creates a new merge request' do
post api("/projects/#{project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('Test merge request')
expect(json_response['assignee']['name']).to eq(user2.name)
expect(json_response['assignees'].first['name']).to eq(user2.name)
@@ -1236,7 +1252,7 @@ describe API::MergeRequests do
post api("/projects/#{project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('Test merge request')
expect(json_response['assignee']).to be_nil
end
@@ -1249,7 +1265,7 @@ describe API::MergeRequests do
post api("/projects/#{private_project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['assignee']).to be_nil
end
end
@@ -1268,7 +1284,7 @@ describe API::MergeRequests do
it 'creates a new project merge request with no more than one assignee' do
post api("/projects/#{project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('Test merge request')
expect(json_response['assignees'].count).to eq(1)
expect(json_response['assignees'].first['name']).to eq(user.name)
@@ -1294,7 +1310,7 @@ describe API::MergeRequests do
params[:labels] = labels
post api("/projects/#{project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('Test merge_request')
expect(json_response['labels']).to eq(%w(label label2))
expect(json_response['milestone']['id']).to eq(milestone.id)
@@ -1319,7 +1335,7 @@ describe API::MergeRequests do
params[:labels] = 'label, label?, label&foo, ?, &'
post api("/projects/#{project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['labels']).to include 'label'
expect(json_response['labels']).to include 'label?'
expect(json_response['labels']).to include 'label&foo'
@@ -1331,7 +1347,7 @@ describe API::MergeRequests do
params[:labels] = ['label', 'label?', 'label&foo, ?, &', '1, 2', 3, 4]
post api("/projects/#{project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['labels']).to include 'label'
expect(json_response['labels']).to include 'label?'
expect(json_response['labels']).to include 'label&foo'
@@ -1347,7 +1363,7 @@ describe API::MergeRequests do
params[:labels] = ''
post api("/projects/#{project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['labels']).to eq([])
end
@@ -1357,7 +1373,7 @@ describe API::MergeRequests do
params: params.to_json,
headers: { 'Content-Type': 'application/json' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['labels']).to eq([])
end
@@ -1365,7 +1381,7 @@ describe API::MergeRequests do
params[:labels] = []
post api("/projects/#{project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['labels']).to eq([])
end
@@ -1373,7 +1389,7 @@ describe API::MergeRequests do
params[:labels] = ['']
post api("/projects/#{project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['labels']).to eq([])
end
@@ -1381,7 +1397,7 @@ describe API::MergeRequests do
params[:labels] = ['', '', '']
post api("/projects/#{project.id}/merge_requests", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['labels']).to eq([])
end
end
@@ -1389,25 +1405,25 @@ describe API::MergeRequests do
it "returns 422 when source_branch equals target_branch" do
post api("/projects/#{project.id}/merge_requests", user),
params: { title: "Test merge_request", source_branch: "master", target_branch: "master", author: user }
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it "returns 400 when source_branch is missing" do
post api("/projects/#{project.id}/merge_requests", user),
params: { title: "Test merge_request", target_branch: "master", author: user }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns 400 when target_branch is missing" do
post api("/projects/#{project.id}/merge_requests", user),
params: { title: "Test merge_request", source_branch: "markdown", author: user }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns 400 when title is missing" do
post api("/projects/#{project.id}/merge_requests", user),
params: { target_branch: 'master', source_branch: 'markdown' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
context 'with existing MR' do
@@ -1432,7 +1448,7 @@ describe API::MergeRequests do
author: user
}
end.to change { MergeRequest.count }.by(0)
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
end
end
@@ -1471,7 +1487,7 @@ describe API::MergeRequests do
it "returns merge_request" do
post api("/projects/#{forked_project.id}/merge_requests", user2),
params: { title: 'Test merge_request', source_branch: "feature_conflict", target_branch: "master", author: user2, target_project_id: project.id, description: 'Test description for Test merge_request' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('Test merge_request')
expect(json_response['description']).to eq('Test description for Test merge_request')
end
@@ -1482,7 +1498,7 @@ describe API::MergeRequests do
expect(forked_project.forked_from_project).to eq(project)
post api("/projects/#{forked_project.id}/merge_requests", user2),
params: { title: 'Test merge_request', source_branch: "master", target_branch: "master", author: user2, target_project_id: project.id }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('Test merge_request')
end
@@ -1498,31 +1514,31 @@ describe API::MergeRequests do
target_project_id: project.id
}
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it "returns 400 when source_branch is missing" do
post api("/projects/#{forked_project.id}/merge_requests", user2),
params: { title: 'Test merge_request', target_branch: "master", author: user2, target_project_id: project.id }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns 400 when target_branch is missing" do
post api("/projects/#{forked_project.id}/merge_requests", user2),
params: { title: 'Test merge_request', target_branch: "master", author: user2, target_project_id: project.id }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns 400 when title is missing" do
post api("/projects/#{forked_project.id}/merge_requests", user2),
params: { target_branch: 'master', source_branch: 'markdown', author: user2, target_project_id: project.id }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'allows setting `allow_collaboration`', :sidekiq_might_not_need_inline do
post api("/projects/#{forked_project.id}/merge_requests", user2),
params: { title: 'Test merge_request', source_branch: "feature_conflict", target_branch: "master", author: user2, target_project_id: project.id, allow_collaboration: true }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['allow_collaboration']).to be_truthy
expect(json_response['allow_maintainer_to_push']).to be_truthy
end
@@ -1541,20 +1557,20 @@ describe API::MergeRequests do
post api("/projects/#{forked_project.id}/merge_requests", user2), params: params
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it 'returns 403 if targeting a different fork which user can not access' do
post api("/projects/#{forked_project.id}/merge_requests", user2), params: params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
it "returns 201 when target_branch is specified and for the same project", :sidekiq_might_not_need_inline do
post api("/projects/#{forked_project.id}/merge_requests", user2),
params: { title: 'Test merge_request', target_branch: 'master', source_branch: 'markdown', author: user2, target_project_id: forked_project.id }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
end
@@ -1585,7 +1601,7 @@ describe API::MergeRequests do
describe 'when authenticated' do
it 'creates and returns the new context commit' do
post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", authenticated_user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to be_an Array
expect(json_response.first['short_id']).to eq(commit.short_id)
expect(json_response.first['title']).to eq(commit.title)
@@ -1602,46 +1618,46 @@ describe API::MergeRequests do
end
it 'returns 400 when the context commit is already created' do
post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", authenticated_user), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq("Context commits: [\"#{commit.id}\"] are already created")
end
end
it 'returns 400 when one or more shas are invalid' do
post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", authenticated_user), params: params_invalid_shas
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('One or more context commits\' sha is not valid.')
end
it 'returns 400 when the commits are empty' do
post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", authenticated_user), params: params_empty_commits
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 when params is empty' do
post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", authenticated_user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 403 when creating new context commit for guest role' do
guest = create(:user)
project.add_guest(guest)
post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", guest), params: params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns 403 when creating new context commit for reporter role' do
reporter = create(:user)
project.add_reporter(reporter)
post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", reporter), params: params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when unauthenticated' do
it 'returns 401 if user tries to create context commits' do
post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits"), params: params
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -1656,7 +1672,7 @@ describe API::MergeRequests do
it "denies the deletion of the merge request" do
delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", developer)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -1664,19 +1680,19 @@ describe API::MergeRequests do
it "destroys the merge request owners can destroy" do
delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it "returns 404 for an invalid merge request IID" do
delete api("/projects/#{project.id}/merge_requests/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns 404 if the merge request id is used instead of iid" do
delete api("/projects/#{project.id}/merge_requests/#{merge_request.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
@@ -1712,40 +1728,40 @@ describe API::MergeRequests do
it "deletes context commit" do
delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits", authenticated_user), params: params
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it "returns 400 when invalid commit sha is passed" do
delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits", authenticated_user), params: params_invalid_shas
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["message"]).to eq('One or more context commits\' sha is not valid.')
end
it "returns 400 when commits is empty" do
delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits", authenticated_user), params: params_empty_commits
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns 400 when no params is passed" do
delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits", authenticated_user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 403 when deleting existing context commit for guest role' do
guest = create(:user)
project.add_guest(guest)
delete api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", guest), params: params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns 403 when deleting existing context commit for reporter role' do
reporter = create(:user)
project.add_reporter(reporter)
delete api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", reporter), params: params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -1753,7 +1769,7 @@ describe API::MergeRequests do
it "returns 401, unauthorised error" do
delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -1764,7 +1780,7 @@ describe API::MergeRequests do
it "returns merge_request in case of success" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "returns 406 if branch can't be merged" do
@@ -1773,21 +1789,21 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
- expect(response).to have_gitlab_http_status(406)
+ expect(response).to have_gitlab_http_status(:not_acceptable)
expect(json_response['message']).to eq('Branch cannot be merged')
end
it "returns 405 if merge_request is not open" do
merge_request.close
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
- expect(response).to have_gitlab_http_status(405)
+ expect(response).to have_gitlab_http_status(:method_not_allowed)
expect(json_response['message']).to eq('405 Method Not Allowed')
end
it "returns 405 if merge_request is a work in progress" do
merge_request.update_attribute(:title, "WIP: #{merge_request.title}")
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
- expect(response).to have_gitlab_http_status(405)
+ expect(response).to have_gitlab_http_status(:method_not_allowed)
expect(json_response['message']).to eq('405 Method Not Allowed')
end
@@ -1801,7 +1817,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
- expect(response).to have_gitlab_http_status(405)
+ expect(response).to have_gitlab_http_status(:method_not_allowed)
expect(json_response['message']).to eq('405 Method Not Allowed')
end
@@ -1809,21 +1825,21 @@ describe API::MergeRequests do
user2 = create(:user)
project.add_reporter(user2)
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user2)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response['message']).to eq('401 Unauthorized')
end
it "returns 409 if the SHA parameter doesn't match" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { sha: merge_request.diff_head_sha.reverse }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to start_with('SHA does not match HEAD of source branch')
end
it "succeeds if the SHA parameter matches" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { sha: merge_request.diff_head_sha }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "updates the MR's squash attribute" do
@@ -1831,7 +1847,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { squash: true }
end.to change { merge_request.reload.squash }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'does not merge if merge_when_pipeline_succeeds is passed and the pipeline has failed' do
@@ -1842,7 +1858,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { merge_when_pipeline_succeeds: true }
- expect(response).to have_gitlab_http_status(405)
+ expect(response).to have_gitlab_http_status(:method_not_allowed)
expect(merge_request.reload.state).to eq('opened')
end
@@ -1851,7 +1867,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { merge_when_pipeline_succeeds: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq('merged')
end
@@ -1861,7 +1877,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { merge_when_pipeline_succeeds: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('Test')
expect(json_response['merge_when_pipeline_succeeds']).to eq(true)
end
@@ -1873,7 +1889,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { merge_when_pipeline_succeeds: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('Test')
expect(json_response['merge_when_pipeline_succeeds']).to eq(true)
end
@@ -1881,13 +1897,13 @@ describe API::MergeRequests do
it "returns 404 for an invalid merge request IID" do
put api("/projects/#{project.id}/merge_requests/12345/merge", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns 404 if the merge request id is used instead of iid" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.id}/merge", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
describe "the squash_commit_message param" do
@@ -1923,7 +1939,7 @@ describe API::MergeRequests do
params: { should_remove_source_branch: true }
)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(source_repository.branch_exists?(source_branch)).to be_falsy
end
end
@@ -1938,7 +1954,7 @@ describe API::MergeRequests do
it "records the squash commit SHA and returns it in the response" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['squash_commit_sha'].length).to eq(40)
end
end
@@ -1958,7 +1974,7 @@ describe API::MergeRequests do
it 'returns the generated ID from the merge service in case of success' do
get api(url, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commit_id']).to eq(merge_request.merge_ref_head.sha)
end
@@ -1970,7 +1986,7 @@ describe API::MergeRequests do
it 'returns 200 if MR can be merged' do
get api(url, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commit_id']).to eq(merge_request.merge_ref_head.sha)
end
@@ -1981,7 +1997,7 @@ describe API::MergeRequests do
get api(url, user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Merge request is not mergeable')
end
end
@@ -1995,7 +2011,7 @@ describe API::MergeRequests do
get api(url, user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Not found')
end
end
@@ -2006,7 +2022,7 @@ describe API::MergeRequests do
it 'returns 404' do
get api(url, user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -2016,7 +2032,7 @@ describe API::MergeRequests do
it 'returns 404' do
get api(url, user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -2030,7 +2046,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { state_event: "close", remove_source_branch: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq('closed')
expect(json_response['force_remove_source_branch']).to be_falsey
end
@@ -2042,7 +2058,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { state_event: "close", remove_source_branch: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq('closed')
expect(json_response['force_remove_source_branch']).to be_truthy
end
@@ -2063,7 +2079,7 @@ describe API::MergeRequests do
it 'is true for an authorized user' do
put api("/projects/#{target_project.id}/merge_requests/#{merge_request.iid}", fork_owner), params: { state_event: 'close', remove_source_branch: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq('closed')
expect(json_response['force_remove_source_branch']).to be true
end
@@ -2073,7 +2089,7 @@ describe API::MergeRequests do
put api("/projects/#{target_project.id}/merge_requests/#{merge_request.iid}", target_project.owner), params: { state_event: 'close', remove_source_branch: true }
end.not_to change { merge_request.reload.merge_params }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq('closed')
expect(json_response['force_remove_source_branch']).to be false
end
@@ -2084,46 +2100,46 @@ describe API::MergeRequests do
it "returns merge_request" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { state_event: "close" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq('closed')
end
end
it "updates title and returns merge_request" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { title: "New title" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('New title')
end
it "updates description and returns merge_request" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { description: "New description" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['description']).to eq('New description')
end
it "updates milestone_id and returns merge_request" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { milestone_id: milestone.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['milestone']['id']).to eq(milestone.id)
end
it "updates squash and returns merge_request" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { squash: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['squash']).to be_truthy
end
it "returns merge_request with renamed target_branch" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { target_branch: "wiki" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['target_branch']).to eq('wiki')
end
it "returns merge_request that removes the source branch" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { remove_source_branch: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['force_remove_source_branch']).to be_truthy
end
@@ -2136,7 +2152,7 @@ describe API::MergeRequests do
put api("/projects/#{private_project.id}/merge_requests/#{mr.iid}", user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignee']).to be_nil
end
@@ -2236,7 +2252,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { state_event: 'close', title: nil }
merge_request.reload
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(merge_request.state).to eq('opened')
end
@@ -2244,20 +2260,20 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { state_event: 'close', target_branch: nil }
merge_request.reload
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(merge_request.state).to eq('opened')
end
it "returns 404 for an invalid merge request IID" do
put api("/projects/#{project.id}/merge_requests/12345", user), params: { state_event: "close" }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns 404 if the merge request id is used instead of iid" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.id}", user), params: { state_event: "close" }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -2310,19 +2326,19 @@ describe API::MergeRequests do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/closes_issues", guest)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it "returns 404 for an invalid merge request IID" do
get api("/projects/#{project.id}/merge_requests/12345/closes_issues", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns 404 if the merge request id is used instead of iid" do
get api("/projects/#{project.id}/merge_requests/#{merge_request.id}/closes_issues", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -2330,26 +2346,26 @@ describe API::MergeRequests do
it 'subscribes to a merge request' do
post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/subscribe", admin)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['subscribed']).to eq(true)
end
it 'returns 304 if already subscribed' do
post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/subscribe", user)
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
it 'returns 404 if the merge request is not found' do
post api("/projects/#{project.id}/merge_requests/123/subscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 if the merge request id is used instead of iid' do
post api("/projects/#{project.id}/merge_requests/#{merge_request.id}/subscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 403 if user has no access to read code' do
@@ -2358,7 +2374,7 @@ describe API::MergeRequests do
post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/subscribe", guest)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -2366,26 +2382,26 @@ describe API::MergeRequests do
it 'unsubscribes from a merge request' do
post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/unsubscribe", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['subscribed']).to eq(false)
end
it 'returns 304 if not subscribed' do
post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/unsubscribe", admin)
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
it 'returns 404 if the merge request is not found' do
post api("/projects/#{project.id}/merge_requests/123/unsubscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 if the merge request id is used instead of iid' do
post api("/projects/#{project.id}/merge_requests/#{merge_request.id}/unsubscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 403 if user has no access to read code' do
@@ -2394,7 +2410,7 @@ describe API::MergeRequests do
post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/unsubscribe", guest)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -2406,19 +2422,19 @@ describe API::MergeRequests do
it 'removes the merge_when_pipeline_succeeds status' do
post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/cancel_merge_when_pipeline_succeeds", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'returns 404 if the merge request is not found' do
post api("/projects/#{project.id}/merge_requests/123/cancel_merge_when_pipeline_succeeds", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 if the merge request id is used instead of iid' do
post api("/projects/#{project.id}/merge_requests/#{merge_request.id}/cancel_merge_when_pipeline_succeeds", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -2431,7 +2447,7 @@ describe API::MergeRequests do
end.to change { RebaseWorker.jobs.size }.by(1)
end
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
expect(merge_request.reload).to be_rebase_in_progress
expect(json_response['rebase_in_progress']).to be(true)
end
@@ -2446,7 +2462,7 @@ describe API::MergeRequests do
end.to change { RebaseWorker.jobs.size }.by(1)
end
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
expect(merge_request.reload).to be_rebase_in_progress
expect(json_response['rebase_in_progress']).to be(true)
end
@@ -2459,7 +2475,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/rebase", guest)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns 409 if a rebase is already in progress' do
@@ -2469,7 +2485,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/rebase", user)
end
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
end
it "returns 409 if rebase can't lock the row" do
@@ -2478,7 +2494,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/rebase", user)
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to eq('Failed to enqueue the rebase operation, possibly due to a long-lived transaction. Try again later.')
end
end
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index e0bf1509be3..68fffc638df 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -12,7 +12,7 @@ describe API::Namespaces do
context "when unauthenticated" do
it "returns authentication error" do
get api("/namespaces")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -23,7 +23,7 @@ describe API::Namespaces do
group_kind_json_response = json_response.find { |resource| resource['kind'] == 'group' }
user_kind_json_response = json_response.find { |resource| resource['kind'] == 'user' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(group_kind_json_response.keys).to include('id', 'kind', 'name', 'path', 'full_path',
'parent_id', 'members_count_with_descendants')
@@ -34,7 +34,7 @@ describe API::Namespaces do
it "admin: returns an array of all namespaces" do
get api("/namespaces", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(Namespace.count)
@@ -43,7 +43,7 @@ describe API::Namespaces do
it "admin: returns an array of matched namespaces" do
get api("/namespaces?search=#{group2.name}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -77,7 +77,7 @@ describe API::Namespaces do
it "user: returns an array of namespaces" do
get api("/namespaces", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -86,7 +86,7 @@ describe API::Namespaces do
it "admin: returns an array of matched namespaces" do
get api("/namespaces?search=#{user.username}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -102,7 +102,7 @@ describe API::Namespaces do
it 'returns namespace details' do
get api("/namespaces/#{namespace_id}", request_actor)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(requested_namespace.id)
expect(json_response['path']).to eq(requested_namespace.path)
@@ -153,7 +153,7 @@ describe API::Namespaces do
it 'returns not-found' do
get api('/namespaces/0', request_actor)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -162,7 +162,7 @@ describe API::Namespaces do
it 'returns authentication error' do
get api("/namespaces/#{group1.id}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -174,7 +174,7 @@ describe API::Namespaces do
it 'returns not-found' do
get api("/namespaces/#{group2.id}", request_actor)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -182,7 +182,7 @@ describe API::Namespaces do
it 'returns not-found' do
get api("/namespaces/#{user2.namespace.id}", request_actor)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index b4416344ecf..6cf978e717e 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -72,7 +72,7 @@ describe API::Notes do
it "returns an empty array" do
get api("/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response).to be_empty
@@ -86,7 +86,7 @@ describe API::Notes do
it "returns 404" do
get api("/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -95,7 +95,7 @@ describe API::Notes do
it "returns a non-empty array" do
get api("/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes", private_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['body']).to eq(cross_reference_note.note)
@@ -114,7 +114,7 @@ describe API::Notes do
shared_examples 'a notes request' do
it 'is a note array response' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
end
@@ -177,7 +177,7 @@ describe API::Notes do
it "returns a 404 error" do
get api("/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes/#{cross_reference_note.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context "when issue is confidential" do
@@ -188,7 +188,7 @@ describe API::Notes do
it "returns 404" do
get api("/projects/#{project.id}/issues/#{issue.iid}/notes/#{issue_note.id}", private_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -197,7 +197,7 @@ describe API::Notes do
it "returns an issue note by id" do
get api("/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes/#{cross_reference_note.id}", private_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['body']).to eq(cross_reference_note.note)
end
end
@@ -237,7 +237,7 @@ describe API::Notes do
it 'returns 200 status' do
subject
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'creates a new note' do
@@ -251,7 +251,7 @@ describe API::Notes do
it 'returns 403 status' do
subject
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not create a new note' do
diff --git a/spec/requests/api/notification_settings_spec.rb b/spec/requests/api/notification_settings_spec.rb
index 09fc0197c58..cbdab2f53a6 100644
--- a/spec/requests/api/notification_settings_spec.rb
+++ b/spec/requests/api/notification_settings_spec.rb
@@ -11,7 +11,7 @@ describe API::NotificationSettings do
it "returns global notification settings for the current user" do
get api("/notification_settings", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a Hash
expect(json_response['notification_email']).to eq(user.notification_email)
expect(json_response['level']).to eq(user.global_notification_setting.level)
@@ -24,7 +24,7 @@ describe API::NotificationSettings do
it "updates global notification settings for the current user" do
put api("/notification_settings", user), params: { level: 'watch', notification_email: email.email }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['notification_email']).to eq(email.email)
expect(user.reload.notification_email).to eq(email.email)
expect(json_response['level']).to eq(user.reload.global_notification_setting.level)
@@ -35,7 +35,7 @@ describe API::NotificationSettings do
it "fails on non-user email address" do
put api("/notification_settings", user), params: { notification_email: 'invalid@example.com' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -43,7 +43,7 @@ describe API::NotificationSettings do
it "returns group level notification settings for the current user" do
get api("/groups/#{group.id}/notification_settings", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a Hash
expect(json_response['level']).to eq(user.notification_settings_for(group).level)
end
@@ -53,7 +53,7 @@ describe API::NotificationSettings do
it "updates group level notification settings for the current user" do
put api("/groups/#{group.id}/notification_settings", user), params: { level: 'watch' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['level']).to eq(user.reload.notification_settings_for(group).level)
end
end
@@ -62,7 +62,7 @@ describe API::NotificationSettings do
it "returns project level notification settings for the current user" do
get api("/projects/#{project.id}/notification_settings", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a Hash
expect(json_response['level']).to eq(user.notification_settings_for(project).level)
end
@@ -72,7 +72,7 @@ describe API::NotificationSettings do
it "updates project level notification settings for the current user" do
put api("/projects/#{project.id}/notification_settings", user), params: { level: 'custom', new_note: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['level']).to eq(user.reload.notification_settings_for(project).level)
expect(json_response['events']['new_note']).to be_truthy
expect(json_response['events']['new_issue']).to be_falsey
@@ -83,7 +83,7 @@ describe API::NotificationSettings do
it "fails on invalid level" do
put api("/projects/#{project.id}/notification_settings", user), params: { level: 'invalid' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
diff --git a/spec/requests/api/oauth_tokens_spec.rb b/spec/requests/api/oauth_tokens_spec.rb
index ce03756a19a..80eae97f41a 100644
--- a/spec/requests/api/oauth_tokens_spec.rb
+++ b/spec/requests/api/oauth_tokens_spec.rb
@@ -14,7 +14,7 @@ describe 'OAuth tokens' do
request_oauth_token(user)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response['error']).to eq('invalid_grant')
end
end
@@ -25,7 +25,7 @@ describe 'OAuth tokens' do
request_oauth_token(user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['access_token']).not_to be_nil
end
end
@@ -33,7 +33,7 @@ describe 'OAuth tokens' do
shared_examples 'does not create an access token' do
let(:user) { create(:user) }
- it { expect(response).to have_gitlab_http_status(401) }
+ it { expect(response).to have_gitlab_http_status(:unauthorized) }
end
context 'when user is blocked' do
diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb
index 6b774e9335e..8c411233b27 100644
--- a/spec/requests/api/pages_domains_spec.rb
+++ b/spec/requests/api/pages_domains_spec.rb
@@ -49,7 +49,7 @@ describe API::PagesDomains do
it 'returns paginated all pages domains' do
get api('/pages/domains', admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain_basics')
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
@@ -76,7 +76,7 @@ describe API::PagesDomains do
it 'returns paginated pages domains' do
get api(route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domains')
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
@@ -147,7 +147,7 @@ describe API::PagesDomains do
it 'returns pages domain' do
get api(route_domain, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(json_response['domain']).to eq(pages_domain.domain)
expect(json_response['url']).to eq(pages_domain.url)
@@ -157,7 +157,7 @@ describe API::PagesDomains do
it 'returns pages domain with project path' do
get api(route_domain_path, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(json_response['domain']).to eq(pages_domain.domain)
expect(json_response['url']).to eq(pages_domain.url)
@@ -167,7 +167,7 @@ describe API::PagesDomains do
it 'returns pages domain with a certificate' do
get api(route_secure_domain, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(json_response['domain']).to eq(pages_domain_secure.domain)
expect(json_response['url']).to eq(pages_domain_secure.url)
@@ -179,7 +179,7 @@ describe API::PagesDomains do
it 'returns pages domain with an expired certificate' do
get api(route_expired_domain, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(json_response['certificate']['expired']).to be true
end
@@ -187,7 +187,7 @@ describe API::PagesDomains do
it 'returns pages domain with letsencrypt' do
get api(route_letsencrypt_domain, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(json_response['domain']).to eq(pages_domain_with_letsencrypt.domain)
expect(json_response['url']).to eq(pages_domain_with_letsencrypt.url)
@@ -261,7 +261,7 @@ describe API::PagesDomains do
post api(route, user), params: params
pages_domain = PagesDomain.find_by(domain: json_response['domain'])
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain.domain).to eq(params[:domain])
expect(pages_domain.certificate).to be_nil
@@ -273,7 +273,7 @@ describe API::PagesDomains do
post api(route, user), params: params_secure
pages_domain = PagesDomain.find_by(domain: json_response['domain'])
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain.domain).to eq(params_secure[:domain])
expect(pages_domain.certificate).to eq(params_secure[:certificate])
@@ -285,7 +285,7 @@ describe API::PagesDomains do
post api(route, user), params: pages_domain_with_letsencrypt_params
pages_domain = PagesDomain.find_by(domain: json_response['domain'])
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain.domain).to eq(pages_domain_with_letsencrypt_params[:domain])
expect(pages_domain.auto_ssl_enabled).to be true
@@ -295,7 +295,7 @@ describe API::PagesDomains do
post api(route, user), params: params_secure.merge(auto_ssl_enabled: true)
pages_domain = PagesDomain.find_by(domain: json_response['domain'])
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain.domain).to eq(params_secure[:domain])
expect(pages_domain.certificate).to eq(params_secure[:certificate])
@@ -306,13 +306,13 @@ describe API::PagesDomains do
it 'fails to create pages domain without key' do
post api(route, user), params: pages_domain_secure_params.slice(:domain, :certificate)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'fails to create pages domain with key missmatch' do
post api(route, user), params: pages_domain_secure_key_missmatch_params.slice(:domain, :certificate, :key)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -370,7 +370,7 @@ describe API::PagesDomains do
put api(route_secure_domain, user), params: { certificate: nil, key: nil }
pages_domain_secure.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain_secure.certificate).to be_nil
expect(pages_domain_secure.key).to be_nil
@@ -381,7 +381,7 @@ describe API::PagesDomains do
put api(route_domain, user), params: params_secure
pages_domain.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain.certificate).to eq(params_secure[:certificate])
expect(pages_domain.key).to eq(params_secure[:key])
@@ -391,7 +391,7 @@ describe API::PagesDomains do
put api(route_domain, user), params: params_secure.merge(auto_ssl_enabled: true)
pages_domain.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain.certificate).to eq(params_secure[:certificate])
expect(pages_domain.key).to eq(params_secure[:key])
@@ -402,7 +402,7 @@ describe API::PagesDomains do
put api(route_domain, user), params: { auto_ssl_enabled: true }
pages_domain.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain.auto_ssl_enabled).to be true
end
@@ -411,7 +411,7 @@ describe API::PagesDomains do
put api(route_letsencrypt_domain, user), params: { auto_ssl_enabled: false }
pages_domain_with_letsencrypt.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain_with_letsencrypt.auto_ssl_enabled).to be false
expect(pages_domain_with_letsencrypt.key).to be
@@ -422,7 +422,7 @@ describe API::PagesDomains do
put api(route_expired_domain, user), params: params_secure
pages_domain_expired.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain_expired.certificate).to eq(params_secure[:certificate])
expect(pages_domain_expired.key).to eq(params_secure[:key])
@@ -432,7 +432,7 @@ describe API::PagesDomains do
put api(route_secure_domain, user), params: params_secure_nokey
pages_domain_secure.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain_secure.certificate).to eq(params_secure_nokey[:certificate])
end
@@ -448,19 +448,19 @@ describe API::PagesDomains do
it 'fails to update pages domain adding certificate without key' do
put api(route_domain, user), params: params_secure_nokey
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'fails to update pages domain adding certificate with missing chain' do
put api(route_domain, user), params: pages_domain_secure_missing_chain_params.slice(:certificate)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'fails to update pages domain with key missmatch' do
put api(route_secure_domain, user), params: pages_domain_secure_key_missmatch_params.slice(:certificate, :key)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -524,7 +524,7 @@ describe API::PagesDomains do
it 'deletes a pages domain' do
delete api(route_domain, user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
diff --git a/spec/requests/api/pipeline_schedules_spec.rb b/spec/requests/api/pipeline_schedules_spec.rb
index fdb9508ed08..05abdf76be9 100644
--- a/spec/requests/api/pipeline_schedules_spec.rb
+++ b/spec/requests/api/pipeline_schedules_spec.rb
@@ -289,7 +289,7 @@ describe API::PipelineSchedules do
delete api("/projects/#{project.id}/pipeline_schedules/#{pipeline_schedule.id}", maintainer)
end.to change { project.pipeline_schedules.count }.by(-1)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'responds with 404 Not Found if requesting non-existing pipeline_schedule' do
diff --git a/spec/requests/api/pipelines_spec.rb b/spec/requests/api/pipelines_spec.rb
index 75e3013d362..6b824690e6c 100644
--- a/spec/requests/api/pipelines_spec.rb
+++ b/spec/requests/api/pipelines_spec.rb
@@ -23,7 +23,7 @@ describe API::Pipelines do
it 'returns project pipelines' do
get api("/projects/#{project.id}/pipelines", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['sha']).to match /\A\h{40}\z/
@@ -254,7 +254,9 @@ describe API::Pipelines do
context 'when order_by and sort are specified' do
context 'when order_by user_id' do
before do
- create_list(:ci_pipeline, 3, project: project, user: create(:user))
+ create_list(:user, 3).each do |some_user|
+ create(:ci_pipeline, project: project, user: some_user)
+ end
end
context 'when sort parameter is valid' do
@@ -294,7 +296,7 @@ describe API::Pipelines do
it 'does not return project pipelines' do
get api("/projects/#{project.id}/pipelines", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq '404 Project Not Found'
expect(json_response).not_to be_an Array
end
@@ -323,7 +325,7 @@ describe API::Pipelines do
post api("/projects/#{project.id}/pipeline", user), params: { ref: project.default_branch }
end.to change { project.ci_pipelines.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to be_a Hash
expect(json_response['sha']).to eq project.commit.id
end
@@ -337,7 +339,7 @@ describe API::Pipelines do
end.to change { project.ci_pipelines.count }.by(1)
expect_variables(project.ci_pipelines.last.variables, variables)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to be_a Hash
expect(json_response['sha']).to eq project.commit.id
expect(json_response).not_to have_key('variables')
@@ -358,7 +360,7 @@ describe API::Pipelines do
end.to change { project.ci_pipelines.count }.by(1)
expect_variables(project.ci_pipelines.last.variables, variables)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to be_a Hash
expect(json_response['sha']).to eq project.commit.id
expect(json_response).not_to have_key('variables')
@@ -372,7 +374,7 @@ describe API::Pipelines do
post api("/projects/#{project.id}/pipeline", user), params: { ref: project.default_branch }
end.not_to change { project.ci_pipelines.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -380,7 +382,7 @@ describe API::Pipelines do
it 'fails when using an invalid ref' do
post api("/projects/#{project.id}/pipeline", user), params: { ref: 'invalid_ref' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['base'].first).to eq 'Reference not found'
expect(json_response).not_to be_an Array
end
@@ -395,7 +397,7 @@ describe API::Pipelines do
it 'fails to create pipeline' do
post api("/projects/#{project.id}/pipeline", user), params: { ref: project.default_branch }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['base'].first).to eq 'Missing CI config file'
expect(json_response).not_to be_an Array
end
@@ -407,7 +409,7 @@ describe API::Pipelines do
it 'does not create pipeline' do
post api("/projects/#{project.id}/pipeline", non_member), params: { ref: project.default_branch }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq '404 Project Not Found'
expect(json_response).not_to be_an Array
end
@@ -428,21 +430,21 @@ describe API::Pipelines do
it 'exposes known attributes' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pipeline/detail')
end
it 'returns project pipelines' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['sha']).to match /\A\h{40}\z/
end
it 'returns 404 when it does not exist' do
get api("/projects/#{project.id}/pipelines/123456", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq '404 Not found'
expect(json_response['id']).to be nil
end
@@ -464,7 +466,7 @@ describe API::Pipelines do
it 'does not return a project pipeline' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq '404 Project Not Found'
expect(json_response['id']).to be nil
end
@@ -489,7 +491,7 @@ describe API::Pipelines do
it 'gets the latest pipleine' do
get api("/projects/#{project.id}/pipelines/latest", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pipeline/detail')
expect(json_response['ref']).to eq(project.default_branch)
expect(json_response['sha']).to eq(project.commit.id)
@@ -500,7 +502,7 @@ describe API::Pipelines do
it 'gets the latest pipleine' do
get api("/projects/#{project.id}/pipelines/latest", user), params: { ref: second_branch.name }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pipeline/detail')
expect(json_response['ref']).to eq(second_branch.name)
expect(json_response['sha']).to eq(second_branch.target)
@@ -512,7 +514,7 @@ describe API::Pipelines do
it 'does not return a project pipeline' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq '404 Project Not Found'
expect(json_response['id']).to be nil
end
@@ -528,7 +530,7 @@ describe API::Pipelines do
it 'returns pipeline variables empty' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
end
@@ -538,7 +540,7 @@ describe API::Pipelines do
it 'returns pipeline variables' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to contain_exactly({ "variable_type" => "env_var", "key" => "foo", "value" => "bar" })
end
end
@@ -559,7 +561,7 @@ describe API::Pipelines do
it 'returns pipeline variables' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to contain_exactly({ "variable_type" => "env_var", "key" => "foo", "value" => "bar" })
end
end
@@ -570,7 +572,7 @@ describe API::Pipelines do
it 'does not return pipeline variables' do
subject
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -579,7 +581,7 @@ describe API::Pipelines do
it 'does not return pipeline variables' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/variables", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq '404 Project Not Found'
end
end
@@ -592,14 +594,14 @@ describe API::Pipelines do
it 'destroys the pipeline' do
delete api("/projects/#{project.id}/pipelines/#{pipeline.id}", owner)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect { pipeline.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
it 'returns 404 when it does not exist' do
delete api("/projects/#{project.id}/pipelines/123456", owner)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq '404 Not found'
end
@@ -613,7 +615,7 @@ describe API::Pipelines do
it 'destroys associated jobs' do
delete api("/projects/#{project.id}/pipelines/#{pipeline.id}", owner)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect { build.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
@@ -624,7 +626,7 @@ describe API::Pipelines do
it 'returns a 404' do
delete api("/projects/#{project.id}/pipelines/#{pipeline.id}", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq '404 Project Not Found'
end
end
@@ -639,7 +641,7 @@ describe API::Pipelines do
it 'returns a 403' do
delete api("/projects/#{project.id}/pipelines/#{pipeline.id}", developer)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq '403 Forbidden'
end
end
@@ -660,7 +662,7 @@ describe API::Pipelines do
post api("/projects/#{project.id}/pipelines/#{pipeline.id}/retry", user)
end.to change { pipeline.builds.count }.from(1).to(2)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(build.reload.retried?).to be true
end
end
@@ -669,7 +671,7 @@ describe API::Pipelines do
it 'does not return a project pipeline' do
post api("/projects/#{project.id}/pipelines/#{pipeline.id}/retry", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq '404 Project Not Found'
expect(json_response['id']).to be nil
end
@@ -688,7 +690,7 @@ describe API::Pipelines do
it 'retries failed builds', :sidekiq_might_not_need_inline do
post api("/projects/#{project.id}/pipelines/#{pipeline.id}/cancel", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('canceled')
end
end
@@ -703,7 +705,7 @@ describe API::Pipelines do
it 'rejects the action' do
post api("/projects/#{project.id}/pipelines/#{pipeline.id}/cancel", reporter)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(pipeline.reload.status).to eq('pending')
end
end
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index f3d005322f2..7cef40ff3b5 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -26,7 +26,7 @@ describe API::ProjectClusters do
it 'responds with 403' do
get api("/projects/#{project.id}/clusters", developer_user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -36,7 +36,7 @@ describe API::ProjectClusters do
end
it 'responds with 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'includes pagination headers' do
@@ -71,7 +71,7 @@ describe API::ProjectClusters do
it 'responds with 403' do
get api("/projects/#{project.id}/clusters/#{cluster_id}", developer_user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -142,7 +142,7 @@ describe API::ProjectClusters do
let(:cluster_id) { 123 }
it 'returns 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -175,7 +175,7 @@ describe API::ProjectClusters do
it 'responds with 403' do
post api("/projects/#{project.id}/clusters/user", developer_user), params: cluster_params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -186,7 +186,7 @@ describe API::ProjectClusters do
context 'with valid params' do
it 'responds with 201' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'creates a new Cluster::Cluster' do
@@ -236,7 +236,7 @@ describe API::ProjectClusters do
let(:namespace) { 'invalid_namespace' }
it 'responds with 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'does not create a new Clusters::Cluster' do
@@ -258,7 +258,7 @@ describe API::ProjectClusters do
end
it 'responds with 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['base'].first).to eq(_('Instance does not support multiple Kubernetes clusters'))
end
@@ -270,7 +270,7 @@ describe API::ProjectClusters do
end
it 'responds with 403' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
@@ -307,7 +307,7 @@ describe API::ProjectClusters do
it 'responds with 403' do
put api("/projects/#{project.id}/clusters/#{cluster.id}", developer_user), params: update_params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -322,7 +322,7 @@ describe API::ProjectClusters do
context 'with valid params' do
it 'responds with 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'updates cluster attributes' do
@@ -336,7 +336,7 @@ describe API::ProjectClusters do
let(:namespace) { 'invalid_namespace' }
it 'responds with 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'does not update cluster attributes' do
@@ -354,7 +354,7 @@ describe API::ProjectClusters do
let(:management_project_id) { create(:project).id }
it 'responds with 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns validation errors' do
@@ -372,7 +372,7 @@ describe API::ProjectClusters do
end
it 'responds with 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns validation error' do
@@ -384,7 +384,7 @@ describe API::ProjectClusters do
let(:namespace) { 'new-namespace' }
it 'responds with 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -413,7 +413,7 @@ describe API::ProjectClusters do
end
it 'responds with 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'updates platform kubernetes attributes' do
@@ -430,7 +430,7 @@ describe API::ProjectClusters do
let(:cluster) { create(:cluster, :project, :provided_by_user) }
it 'responds with 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -448,7 +448,7 @@ describe API::ProjectClusters do
it 'responds with 403' do
delete api("/projects/#{project.id}/clusters/#{cluster.id}", developer_user), params: cluster_params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -458,7 +458,7 @@ describe API::ProjectClusters do
end
it 'responds with 204' do
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'deletes the cluster' do
@@ -469,7 +469,7 @@ describe API::ProjectClusters do
let(:cluster) { create(:cluster, :project, :provided_by_user) }
it 'responds with 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index 98b3416a2bc..91905635c3f 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -109,7 +109,7 @@ describe API::ProjectContainerRepositories do
context 'disallowed' do
let(:params) do
- { name_regex: 'v10.*' }
+ { name_regex_delete: 'v10.*' }
end
it_behaves_like 'rejected container repository access', :developer, :forbidden
@@ -130,16 +130,33 @@ describe API::ProjectContainerRepositories do
end
end
+ context 'without name_regex' do
+ let(:params) do
+ { keep_n: 100,
+ older_than: '1 day',
+ other: 'some value' }
+ end
+
+ it 'returns bad request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
context 'passes all declared parameters' do
let(:params) do
- { name_regex: 'v10.*',
+ { name_regex_delete: 'v10.*',
+ name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
other: 'some value' }
end
let(:worker_params) do
- { name_regex: 'v10.*',
+ { name_regex: nil,
+ name_regex_delete: 'v10.*',
+ name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
container_expiration_policy: false }
@@ -163,7 +180,7 @@ describe API::ProjectContainerRepositories do
stub_exclusive_lease_taken(lease_key, timeout: 1.hour)
subject
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(response.body).to include('This request has already been made.')
end
@@ -174,6 +191,38 @@ describe API::ProjectContainerRepositories do
end
end
end
+
+ context 'with deprecated name_regex param' do
+ let(:params) do
+ { name_regex: 'v10.*',
+ name_regex_keep: 'v10.1.*',
+ keep_n: 100,
+ older_than: '1 day',
+ other: 'some value' }
+ end
+
+ let(:worker_params) do
+ { name_regex: 'v10.*',
+ name_regex_delete: nil,
+ name_regex_keep: 'v10.1.*',
+ keep_n: 100,
+ older_than: '1 day',
+ container_expiration_policy: false }
+ end
+
+ let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
+
+ it 'schedules cleanup of tags repository' do
+ stub_last_activity_update
+ stub_exclusive_lease(lease_key, timeout: 1.hour)
+ expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
+ .with(maintainer.id, root_repository.id, worker_params)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+ end
end
end
diff --git a/spec/requests/api/project_events_spec.rb b/spec/requests/api/project_events_spec.rb
index d466dca9884..3fa3d4fa899 100644
--- a/spec/requests/api/project_events_spec.rb
+++ b/spec/requests/api/project_events_spec.rb
@@ -14,7 +14,7 @@ describe API::ProjectEvents do
it 'returns 404 for private project' do
get api("/projects/#{private_project.id}/events")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 200 status for a public project' do
@@ -22,7 +22,7 @@ describe API::ProjectEvents do
get api("/projects/#{public_project.id}/events")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -36,14 +36,14 @@ describe API::ProjectEvents do
it 'returns only accessible events' do
get api("/projects/#{public_project.id}/events", non_member)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
end
it 'returns all events when the user has access' do
get api("/projects/#{public_project.id}/events", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(2)
end
end
@@ -92,7 +92,7 @@ describe API::ProjectEvents do
it 'returns 404' do
get api("/projects/#{private_project.id}/events", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -100,7 +100,7 @@ describe API::ProjectEvents do
it 'returns project events' do
get api("/projects/#{private_project.id}/events?action=closed&target_type=issue&after=2016-12-1&before=2016-12-31", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -109,7 +109,7 @@ describe API::ProjectEvents do
it 'returns 404 if project does not exist' do
get api("/projects/1234/events", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'when the requesting token does not have "api" scope' do
@@ -118,7 +118,7 @@ describe API::ProjectEvents do
it 'returns a "403" response' do
get api("/projects/#{private_project.id}/events", personal_access_token: token)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -142,7 +142,7 @@ describe API::ProjectEvents do
get api("/projects/#{private_project.id}/events", user), params: { target_type: :merge_request }
end.not_to exceed_all_query_limit(control_count)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.size).to eq(2)
expect(json_response.map { |r| r['target_id'] }).to match_array([merge_request1.id, merge_request2.id])
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index 98214a8c471..859a3cca44f 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -27,12 +27,9 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
-
- # simulate exporting work directory
- FileUtils.mkdir_p File.join(project_started.export_path, 'securerandom-hex')
-
- # simulate in after export action
- FileUtils.touch File.join(project_after_export.import_export_shared.lock_files_path, SecureRandom.hex)
+ allow_next_instance_of(ProjectExportWorker) do |job|
+ allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
+ end
end
after do
@@ -55,7 +52,7 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
it 'prevents requesting project export' do
request
- expect(response).to have_gitlab_http_status(429)
+ expect(response).to have_gitlab_http_status(:too_many_requests)
expect(json_response['message']['error']).to eq('This endpoint has been requested too many times. Try again later.')
end
end
@@ -77,33 +74,47 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
it 'is none' do
get api(path_none, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/project/export_status')
expect(json_response['export_status']).to eq('none')
end
- it 'is started' do
- get api(path_started, user)
+ context 'when project export has started' do
+ before do
+ create(:project_export_job, project: project_started, status: 1)
+ end
+
+ it 'returns status started' do
+ get api(path_started, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to match_response_schema('public_api/v4/project/export_status')
- expect(json_response['export_status']).to eq('started')
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/project/export_status')
+ expect(json_response['export_status']).to eq('started')
+ end
end
- it 'is after_export' do
- get api(path_after_export, user)
+ context 'when project export has finished' do
+ it 'returns status finished' do
+ get api(path_finished, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to match_response_schema('public_api/v4/project/export_status')
- expect(json_response['export_status']).to eq('after_export_action')
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/project/export_status')
+ expect(json_response['export_status']).to eq('finished')
+ end
end
- it 'is finished' do
- get api(path_finished, user)
+ context 'when project export is being regenerated' do
+ before do
+ create(:project_export_job, project: project_finished, status: 1)
+ end
- expect(response).to have_gitlab_http_status(200)
- expect(response).to match_response_schema('public_api/v4/project/export_status')
- expect(json_response['export_status']).to eq('finished')
+ it 'returns status regeneration_in_progress' do
+ get api(path_finished, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/project/export_status')
+ expect(json_response['export_status']).to eq('regeneration_in_progress')
+ end
end
end
@@ -185,7 +196,7 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
it 'downloads' do
get api(download_path_finished, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -194,7 +205,7 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
it 'downloads' do
get api(download_path_export_action, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -324,7 +335,7 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
post(api(path, user), params: { 'upload[url]' => 'http://gitlab.com' })
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
end
end
@@ -334,7 +345,7 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
post api(path, user)
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
end
end
end
@@ -403,7 +414,7 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
expect_any_instance_of(Projects::ImportExport::ExportService).to receive(:execute)
post api(path, project.owner), params: params
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
end
end
end
diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb
index b466bcb1a12..540b30e2969 100644
--- a/spec/requests/api/project_hooks_spec.rb
+++ b/spec/requests/api/project_hooks_spec.rb
@@ -25,7 +25,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
it "returns project hooks" do
get api("/projects/#{project.id}/hooks", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(response).to include_pagination_headers
expect(json_response.count).to eq(1)
@@ -49,7 +49,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
it "does not access project hooks" do
get api("/projects/#{project.id}/hooks", user3)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -59,7 +59,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
it "returns a project hook" do
get api("/projects/#{project.id}/hooks/#{hook.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['url']).to eq(hook.url)
expect(json_response['issues_events']).to eq(hook.issues_events)
expect(json_response['confidential_issues_events']).to eq(hook.confidential_issues_events)
@@ -77,14 +77,14 @@ describe API::ProjectHooks, 'ProjectHooks' do
it "returns a 404 error if hook id is not available" do
get api("/projects/#{project.id}/hooks/1234", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
context "unauthorized user" do
it "does not access an existing hook" do
get api("/projects/#{project.id}/hooks/#{hook.id}", user3)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -96,7 +96,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
params: { url: "http://example.com", issues_events: true, confidential_issues_events: true, wiki_page_events: true, job_events: true, push_events_branch_filter: 'some-feature-branch' }
end.to change {project.hooks.count}.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['url']).to eq('http://example.com')
expect(json_response['issues_events']).to eq(true)
expect(json_response['confidential_issues_events']).to eq(true)
@@ -120,7 +120,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
post api("/projects/#{project.id}/hooks", user), params: { url: "http://example.com", token: token }
end.to change {project.hooks.count}.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response["url"]).to eq("http://example.com")
expect(json_response).not_to include("token")
@@ -132,17 +132,17 @@ describe API::ProjectHooks, 'ProjectHooks' do
it "returns a 400 error if url not given" do
post api("/projects/#{project.id}/hooks", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 422 error if url not valid" do
post api("/projects/#{project.id}/hooks", user), params: { url: "ftp://example.com" }
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it "returns a 422 error if branch filter is not valid" do
post api("/projects/#{project.id}/hooks", user), params: { url: "http://example.com", push_events_branch_filter: '~badbranchname/' }
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
@@ -151,7 +151,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
put api("/projects/#{project.id}/hooks/#{hook.id}", user),
params: { url: 'http://example.org', push_events: false, job_events: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['url']).to eq('http://example.org')
expect(json_response['issues_events']).to eq(hook.issues_events)
expect(json_response['confidential_issues_events']).to eq(hook.confidential_issues_events)
@@ -171,7 +171,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
put api("/projects/#{project.id}/hooks/#{hook.id}", user), params: { url: "http://example.org", token: token }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response["url"]).to eq("http://example.org")
expect(json_response).not_to include("token")
@@ -181,17 +181,17 @@ describe API::ProjectHooks, 'ProjectHooks' do
it "returns 404 error if hook id not found" do
put api("/projects/#{project.id}/hooks/1234", user), params: { url: 'http://example.org' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns 400 error if url is not given" do
put api("/projects/#{project.id}/hooks/#{hook.id}", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 422 error if url is not valid" do
put api("/projects/#{project.id}/hooks/#{hook.id}", user), params: { url: 'ftp://example.com' }
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
@@ -200,19 +200,19 @@ describe API::ProjectHooks, 'ProjectHooks' do
expect do
delete api("/projects/#{project.id}/hooks/#{hook.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change {project.hooks.count}.by(-1)
end
it "returns a 404 error when deleting non existent hook" do
delete api("/projects/#{project.id}/hooks/42", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns a 404 error if hook id not given" do
delete api("/projects/#{project.id}/hooks", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns a 404 if a user attempts to delete project hooks they do not own" do
@@ -221,7 +221,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
other_project.add_maintainer(test_user)
delete api("/projects/#{other_project.id}/hooks/#{hook.id}", test_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(WebHook.exists?(hook.id)).to be_truthy
end
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index 71dd8fee0ae..563acd0ece4 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -3,37 +3,51 @@
require 'spec_helper'
describe API::ProjectImport do
- let(:export_path) { "#{Dir.tmpdir}/project_export_spec" }
+ include WorkhorseHelpers
+
let(:user) { create(:user) }
let(:file) { File.join('spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') }
let(:namespace) { create(:group) }
- before do
- allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
- stub_uploads_object_storage(FileUploader)
+ let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
+ let(:workhorse_headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } }
+ before do
namespace.add_owner(user)
end
- after do
- FileUtils.rm_rf(export_path, secure: true)
- end
-
describe 'POST /projects/import' do
+ subject { upload_archive(file_upload, workhorse_headers, params) }
+
+ let(:file_upload) { fixture_file_upload(file) }
+
+ let(:params) do
+ {
+ path: 'test-import',
+ 'file.size' => file_upload.size
+ }
+ end
+
+ before do
+ allow(ImportExportUploader).to receive(:workhorse_upload_path).and_return('/')
+ end
+
it 'schedules an import using a namespace' do
stub_import(namespace)
+ params[:namespace] = namespace.id
- post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id }
+ subject
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'schedules an import using the namespace path' do
stub_import(namespace)
+ params[:namespace] = namespace.full_path
- post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path }
+ subject
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
context 'when a name is explicitly set' do
@@ -41,24 +55,30 @@ describe API::ProjectImport do
it 'schedules an import using a namespace and a different name' do
stub_import(namespace)
+ params[:name] = expected_name
+ params[:namespace] = namespace.id
- post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id, name: expected_name }
+ subject
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'schedules an import using the namespace path and a different name' do
stub_import(namespace)
+ params[:name] = expected_name
+ params[:namespace] = namespace.full_path
- post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: expected_name }
+ subject
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'sets name correctly' do
stub_import(namespace)
+ params[:name] = expected_name
+ params[:namespace] = namespace.full_path
- post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: expected_name }
+ subject
project = Project.find(json_response['id'])
expect(project.name).to eq(expected_name)
@@ -66,8 +86,11 @@ describe API::ProjectImport do
it 'sets name correctly with an overwrite' do
stub_import(namespace)
+ params[:name] = 'new project name'
+ params[:namespace] = namespace.full_path
+ params[:overwrite] = true
- post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: 'new project name', overwrite: true }
+ subject
project = Project.find(json_response['id'])
expect(project.name).to eq('new project name')
@@ -75,8 +98,10 @@ describe API::ProjectImport do
it 'schedules an import using the path and name explicitly set to nil' do
stub_import(namespace)
+ params[:name] = nil
+ params[:namespace] = namespace.full_path
- post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: nil }
+ subject
project = Project.find(json_response['id'])
expect(project.name).to eq('test-import')
@@ -85,56 +110,63 @@ describe API::ProjectImport do
it 'schedules an import at the user namespace level' do
stub_import(user.namespace)
+ params[:path] = 'test-import2'
- post api('/projects/import', user), params: { path: 'test-import2', file: fixture_file_upload(file) }
+ subject
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'does not schedule an import for a namespace that does not exist' do
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
expect(::Projects::CreateService).not_to receive(:new)
- post api('/projects/import', user), params: { namespace: 'nonexistent', path: 'test-import2', file: fixture_file_upload(file) }
+ params[:namespace] = 'nonexistent'
+ params[:path] = 'test-import2'
- expect(response).to have_gitlab_http_status(404)
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Namespace Not Found')
end
it 'does not schedule an import if the user has no permission to the namespace' do
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
- post(api('/projects/import', create(:user)),
- params: {
- path: 'test-import3',
- file: fixture_file_upload(file),
- namespace: namespace.full_path
- })
+ new_namespace = create(:group)
+ params[:path] = 'test-import3'
+ params[:namespace] = new_namespace.full_path
+
+ subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Namespace Not Found')
end
- it 'does not schedule an import if the user uploads no valid file' do
- expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
+ context 'if user uploads no valid file' do
+ let(:file) { 'README.md' }
+
+ it 'does not schedule an import if the user uploads no valid file' do
+ expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
+
+ params[:path] = 'test-import3'
- post api('/projects/import', user), params: { path: 'test-import3', file: './random/test' }
+ subject
- expect(response).to have_gitlab_http_status(400)
- expect(json_response['error']).to eq('file is invalid')
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']['error']).to eq('You need to upload a GitLab project export archive (ending in .gz).')
+ end
end
it 'stores params that can be overridden' do
stub_import(namespace)
override_params = { 'description' => 'Hello world' }
- post api('/projects/import', user),
- params: {
- path: 'test-import',
- file: fixture_file_upload(file),
- namespace: namespace.id,
- override_params: override_params
- }
+ params[:namespace] = namespace.id
+ params[:override_params] = override_params
+
+ subject
+
import_project = Project.find(json_response['id'])
expect(import_project.import_data.data['override_params']).to eq(override_params)
@@ -144,33 +176,14 @@ describe API::ProjectImport do
stub_import(namespace)
override_params = { 'not_allowed' => 'Hello world' }
- post api('/projects/import', user),
- params: {
- path: 'test-import',
- file: fixture_file_upload(file),
- namespace: namespace.id,
- override_params: override_params
- }
- import_project = Project.find(json_response['id'])
-
- expect(import_project.import_data.data['override_params']).to be_empty
- end
+ params[:namespace] = namespace.id
+ params[:override_params] = override_params
- it 'correctly overrides params during the import', :sidekiq_might_not_need_inline do
- override_params = { 'description' => 'Hello world' }
+ subject
- perform_enqueued_jobs do
- post api('/projects/import', user),
- params: {
- path: 'test-import',
- file: fixture_file_upload(file),
- namespace: namespace.id,
- override_params: override_params
- }
- end
import_project = Project.find(json_response['id'])
- expect(import_project.description).to eq('Hello world')
+ expect(import_project.import_data.data['override_params']).to be_empty
end
context 'when target path already exists in namespace' do
@@ -179,9 +192,11 @@ describe API::ProjectImport do
it 'does not schedule an import' do
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
- post api('/projects/import', user), params: { path: existing_project.path, file: fixture_file_upload(file) }
+ params[:path] = existing_project.path
- expect(response).to have_gitlab_http_status(400)
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Name has already been taken')
end
@@ -189,9 +204,12 @@ describe API::ProjectImport do
it 'schedules an import' do
stub_import(user.namespace)
- post api('/projects/import', user), params: { path: existing_project.path, file: fixture_file_upload(file), overwrite: true }
+ params[:path] = existing_project.path
+ params[:overwrite] = true
+
+ subject
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
end
@@ -202,13 +220,52 @@ describe API::ProjectImport do
end
it 'prevents users from importing projects' do
- post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id }
+ params[:namespace] = namespace.id
- expect(response).to have_gitlab_http_status(429)
+ subject
+
+ expect(response).to have_gitlab_http_status(:too_many_requests)
expect(json_response['message']['error']).to eq('This endpoint has been requested too many times. Try again later.')
end
end
+ context 'when using remote storage' do
+ let(:file_name) { 'project_export.tar.gz' }
+
+ let!(:fog_connection) do
+ stub_uploads_object_storage(ImportExportUploader, direct_upload: true)
+ end
+
+ let(:tmp_object) do
+ fog_connection.directories.new(key: 'uploads').files.create(
+ key: "tmp/uploads/#{file_name}",
+ body: fixture_file_upload(file)
+ )
+ end
+
+ let(:file_upload) { fog_to_uploaded_file(tmp_object) }
+
+ it 'schedules an import' do
+ stub_import(namespace)
+ params[:namespace] = namespace.id
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ def upload_archive(file, headers = {}, params = {})
+ workhorse_finalize(
+ api("/projects/import", user),
+ method: :post,
+ file_key: :file,
+ params: params.merge(file: file),
+ headers: headers,
+ send_rewritten_field: true
+ )
+ end
+
def stub_import(namespace)
expect_any_instance_of(ProjectImportState).to receive(:schedule)
expect(::Projects::CreateService).to receive(:new).with(user, hash_including(namespace_id: namespace.id)).and_call_original
@@ -222,7 +279,7 @@ describe API::ProjectImport do
get api("/projects/#{project.id}/import", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include('import_status' => 'started')
end
@@ -233,9 +290,65 @@ describe API::ProjectImport do
get api("/projects/#{project.id}/import", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include('import_status' => 'failed',
'import_error' => 'error')
end
end
+
+ describe 'POST /projects/import/authorize' do
+ subject { post api('/projects/import/authorize', user), headers: workhorse_headers }
+
+ it 'authorizes importing project with workhorse header' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).to eq(ImportExportUploader.workhorse_local_upload_path)
+ end
+
+ it 'rejects requests that bypassed gitlab-workhorse' do
+ workhorse_headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'when using remote storage' do
+ context 'when direct upload is enabled' do
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: true)
+ end
+
+ it 'responds with status 200, location of file remote store and object details' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response).not_to have_key('TempPath')
+ expect(json_response['RemoteObject']).to have_key('ID')
+ expect(json_response['RemoteObject']).to have_key('GetURL')
+ expect(json_response['RemoteObject']).to have_key('StoreURL')
+ expect(json_response['RemoteObject']).to have_key('DeleteURL')
+ expect(json_response['RemoteObject']).to have_key('MultipartUpload')
+ end
+ end
+
+ context 'when direct upload is disabled' do
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: false)
+ end
+
+ it 'handles as a local file' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).to eq(ImportExportUploader.workhorse_local_upload_path)
+ expect(json_response['RemoteObject']).to be_nil
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/project_milestones_spec.rb b/spec/requests/api/project_milestones_spec.rb
index df6d83c1e65..a40878fc807 100644
--- a/spec/requests/api/project_milestones_spec.rb
+++ b/spec/requests/api/project_milestones_spec.rb
@@ -27,19 +27,19 @@ describe API::ProjectMilestones do
it 'returns 404 response when the project does not exists' do
delete api("/projects/0/milestones/#{milestone.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 response when the milestone does not exists' do
delete api("/projects/#{project.id}/milestones/0", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns 404 from guest user deleting a milestone" do
delete api("/projects/#{project.id}/milestones/#{milestone.id}", guest)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -67,7 +67,7 @@ describe API::ProjectMilestones do
it 'returns 403' do
post api("/projects/#{project.id}/milestones/#{milestone.id}/promote", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -79,14 +79,14 @@ describe API::ProjectMilestones do
it 'returns 200' do
post api("/projects/#{project.id}/milestones/#{milestone.id}/promote", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(group.milestones.first.title).to eq(milestone.title)
end
it 'returns 200 for closed milestone' do
post api("/projects/#{project.id}/milestones/#{closed_milestone.id}/promote", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(group.milestones.first.title).to eq(closed_milestone.title)
end
end
@@ -99,13 +99,13 @@ describe API::ProjectMilestones do
it 'returns 404 response when the project does not exist' do
post api("/projects/0/milestones/#{milestone.id}/promote", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 response when the milestone does not exist' do
post api("/projects/#{project.id}/milestones/0/promote", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -117,7 +117,7 @@ describe API::ProjectMilestones do
it 'returns 403' do
post api("/projects/#{project.id}/milestones/#{milestone.id}/promote", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
diff --git a/spec/requests/api/project_snapshots_spec.rb b/spec/requests/api/project_snapshots_spec.rb
index cdd44f71649..a54f317782b 100644
--- a/spec/requests/api/project_snapshots_spec.rb
+++ b/spec/requests/api/project_snapshots_spec.rb
@@ -34,26 +34,26 @@ describe API::ProjectSnapshots do
it 'returns authentication error as project owner' do
get api("/projects/#{project.id}/snapshot", project.owner)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns authentication error as unauthenticated user' do
get api("/projects/#{project.id}/snapshot", nil)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'requests project repository raw archive as administrator' do
get api("/projects/#{project.id}/snapshot", admin), params: { wiki: '0' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect_snapshot_response_for(project.repository)
end
it 'requests wiki repository raw archive as administrator' do
get api("/projects/#{project.id}/snapshot", admin), params: { wiki: '1' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect_snapshot_response_for(project.wiki.repository)
end
end
diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb
index 2c6a13efc12..e018a4643db 100644
--- a/spec/requests/api/project_snippets_spec.rb
+++ b/spec/requests/api/project_snippets_spec.rb
@@ -6,6 +6,12 @@ describe API::ProjectSnippets do
let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create(:admin) }
+ let_it_be(:project_no_snippets) { create(:project, :snippets_disabled) }
+
+ before do
+ project_no_snippets.add_developer(admin)
+ project_no_snippets.add_developer(user)
+ end
describe "GET /projects/:project_id/snippets/:id/user_agent_detail" do
let(:snippet) { create(:project_snippet, :public, project: project) }
@@ -14,7 +20,7 @@ describe API::ProjectSnippets do
it 'exposes known attributes' do
get api("/projects/#{project.id}/snippets/#{snippet.id}/user_agent_detail", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['user_agent']).to eq(user_agent_detail.user_agent)
expect(json_response['ip_address']).to eq(user_agent_detail.ip_address)
expect(json_response['akismet_submitted']).to eq(user_agent_detail.submitted)
@@ -24,13 +30,19 @@ describe API::ProjectSnippets do
other_project = create(:project)
get api("/projects/#{other_project.id}/snippets/#{snippet.id}/user_agent_detail", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns unauthorized for non-admin users" do
get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/user_agent_detail", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'with snippets disabled' do
+ it_behaves_like '403 response' do
+ let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/123/user_agent_detail", admin) }
+ end
end
end
@@ -45,7 +57,7 @@ describe API::ProjectSnippets do
get api("/projects/#{project.id}/snippets", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(3)
@@ -58,11 +70,17 @@ describe API::ProjectSnippets do
get api("/projects/#{project.id}/snippets/", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(0)
end
+
+ context 'with snippets disabled' do
+ it_behaves_like '403 response' do
+ let(:request) { get api("/projects/#{project_no_snippets.id}/snippets", user) }
+ end
+ end
end
describe 'GET /projects/:project_id/snippets/:id' do
@@ -72,7 +90,7 @@ describe API::ProjectSnippets do
it 'returns snippet json' do
get api("/projects/#{project.id}/snippets/#{snippet.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(snippet.title)
expect(json_response['description']).to eq(snippet.description)
@@ -82,9 +100,15 @@ describe API::ProjectSnippets do
it 'returns 404 for invalid snippet id' do
get api("/projects/#{project.id}/snippets/1234", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Not found')
end
+
+ context 'with snippets disabled' do
+ it_behaves_like '403 response' do
+ let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/123", user) }
+ end
+ end
end
describe 'POST /projects/:project_id/snippets/' do
@@ -98,6 +122,36 @@ describe API::ProjectSnippets do
}
end
+ shared_examples 'project snippet repository actions' do
+ let(:snippet) { ProjectSnippet.find(json_response['id']) }
+
+ it 'creates repository' do
+ subject
+
+ expect(snippet.repository.exists?).to be_truthy
+ end
+
+ it 'commit the files to the repository' do
+ subject
+
+ blob = snippet.repository.blob_at('master', params[:file_name])
+
+ expect(blob.data).to eq params[:code]
+ end
+
+ context 'when feature flag :version_snippets is disabled' do
+ it 'does not create snippet repository' do
+ stub_feature_flags(version_snippets: false)
+
+ expect do
+ subject
+ end.to change { ProjectSnippet.count }.by(1)
+
+ expect(snippet.repository_exists?).to be_falsey
+ end
+ end
+ end
+
context 'with a regular user' do
let(:user) { create(:user) }
@@ -110,7 +164,7 @@ describe API::ProjectSnippets do
it 'creates a new snippet' do
post api("/projects/#{project.id}/snippets/", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
snippet = ProjectSnippet.find(json_response['id'])
expect(snippet.content).to eq(params[:code])
expect(snippet.description).to eq(params[:description])
@@ -118,12 +172,16 @@ describe API::ProjectSnippets do
expect(snippet.file_name).to eq(params[:file_name])
expect(snippet.visibility_level).to eq(Snippet::INTERNAL)
end
+
+ it_behaves_like 'project snippet repository actions' do
+ subject { post api("/projects/#{project.id}/snippets/", user), params: params }
+ end
end
it 'creates a new snippet' do
post api("/projects/#{project.id}/snippets/", admin), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
snippet = ProjectSnippet.find(json_response['id'])
expect(snippet.content).to eq(params[:code])
expect(snippet.description).to eq(params[:description])
@@ -132,12 +190,16 @@ describe API::ProjectSnippets do
expect(snippet.visibility_level).to eq(Snippet::PUBLIC)
end
+ it_behaves_like 'project snippet repository actions' do
+ subject { post api("/projects/#{project.id}/snippets/", admin), params: params }
+ end
+
it 'creates a new snippet with content parameter' do
params[:content] = params.delete(:code)
post api("/projects/#{project.id}/snippets/", admin), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
snippet = ProjectSnippet.find(json_response['id'])
expect(snippet.content).to eq(params[:content])
expect(snippet.description).to eq(params[:description])
@@ -151,7 +213,7 @@ describe API::ProjectSnippets do
post api("/projects/#{project.id}/snippets/", admin), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('code, content are mutually exclusive')
end
@@ -160,7 +222,7 @@ describe API::ProjectSnippets do
post api("/projects/#{project.id}/snippets/", admin), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 for empty code field' do
@@ -168,7 +230,7 @@ describe API::ProjectSnippets do
post api("/projects/#{project.id}/snippets/", admin), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
context 'when the snippet is spam' do
@@ -196,7 +258,7 @@ describe API::ProjectSnippets do
expect { create_snippet(project, visibility: 'public') }
.not_to change { Snippet.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq({ "error" => "Spam detected" })
end
@@ -206,19 +268,25 @@ describe API::ProjectSnippets do
end
end
end
+
+ context 'with snippets disabled' do
+ it_behaves_like '403 response' do
+ let(:request) { post api("/projects/#{project_no_snippets.id}/snippets", user), params: params }
+ end
+ end
end
describe 'PUT /projects/:project_id/snippets/:id/' do
let(:visibility_level) { Snippet::PUBLIC }
- let(:snippet) { create(:project_snippet, author: admin, visibility_level: visibility_level) }
+ let(:snippet) { create(:project_snippet, :repository, author: admin, visibility_level: visibility_level, project: project) }
it 'updates snippet' do
new_content = 'New content'
new_description = 'New description'
- put api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin), params: { code: new_content, description: new_description, visibility: 'private' }
+ update_snippet(params: { code: new_content, description: new_description, visibility: 'private' })
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
snippet.reload
expect(snippet.content).to eq(new_content)
expect(snippet.description).to eq(new_description)
@@ -229,47 +297,47 @@ describe API::ProjectSnippets do
new_content = 'New content'
new_description = 'New description'
- put api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin), params: { content: new_content, description: new_description }
+ update_snippet(params: { content: new_content, description: new_description })
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
snippet.reload
expect(snippet.content).to eq(new_content)
expect(snippet.description).to eq(new_description)
end
it 'returns 400 when both code and content parameters specified' do
- put api("/projects/#{snippet.project.id}/snippets/1234", admin), params: { code: 'some content', content: 'other content' }
+ update_snippet(params: { code: 'some content', content: 'other content' })
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('code, content are mutually exclusive')
end
it 'returns 404 for invalid snippet id' do
- put api("/projects/#{snippet.project.id}/snippets/1234", admin), params: { title: 'foo' }
+ update_snippet(snippet_id: '1234', params: { title: 'foo' })
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
it 'returns 400 for missing parameters' do
- put api("/projects/#{project.id}/snippets/1234", admin)
+ update_snippet
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 for empty code field' do
new_content = ''
- put api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin), params: { code: new_content }
+ update_snippet(params: { code: new_content })
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
- context 'when the snippet is spam' do
- def update_snippet(snippet_params = {})
- put api("/projects/#{snippet.project.id}/snippets/#{snippet.id}", admin), params: snippet_params
- end
+ it_behaves_like 'update with repository actions' do
+ let(:snippet_without_repo) { create(:project_snippet, author: admin, project: project, visibility_level: visibility_level) }
+ end
+ context 'when the snippet is spam' do
before do
allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
@@ -280,7 +348,7 @@ describe API::ProjectSnippets do
let(:visibility_level) { Snippet::PRIVATE }
it 'creates the snippet' do
- expect { update_snippet(title: 'Foo') }
+ expect { update_snippet(params: { title: 'Foo' }) }
.to change { snippet.reload.title }.to('Foo')
end
end
@@ -289,12 +357,12 @@ describe API::ProjectSnippets do
let(:visibility_level) { Snippet::PUBLIC }
it 'rejects the snippet' do
- expect { update_snippet(title: 'Foo') }
+ expect { update_snippet(params: { title: 'Foo' }) }
.not_to change { snippet.reload.title }
end
it 'creates a spam log' do
- expect { update_snippet(title: 'Foo') }
+ expect { update_snippet(params: { title: 'Foo' }) }
.to log_spam(title: 'Foo', user_id: admin.id, noteable_type: 'ProjectSnippet')
end
end
@@ -303,49 +371,65 @@ describe API::ProjectSnippets do
let(:visibility_level) { Snippet::PRIVATE }
it 'rejects the snippet' do
- expect { update_snippet(title: 'Foo', visibility: 'public') }
+ expect { update_snippet(params: { title: 'Foo', visibility: 'public' }) }
.not_to change { snippet.reload.title }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq({ "error" => "Spam detected" })
end
it 'creates a spam log' do
- expect { update_snippet(title: 'Foo', visibility: 'public') }
+ expect { update_snippet(params: { title: 'Foo', visibility: 'public' }) }
.to log_spam(title: 'Foo', user_id: admin.id, noteable_type: 'ProjectSnippet')
end
end
end
+
+ context 'with snippets disabled' do
+ it_behaves_like '403 response' do
+ let(:request) { put api("/projects/#{project_no_snippets.id}/snippets/123", admin), params: { description: 'foo' } }
+ end
+ end
+
+ def update_snippet(snippet_id: snippet.id, params: {})
+ put api("/projects/#{snippet.project.id}/snippets/#{snippet_id}", admin), params: params
+ end
end
describe 'DELETE /projects/:project_id/snippets/:id/' do
- let(:snippet) { create(:project_snippet, author: admin) }
+ let(:snippet) { create(:project_snippet, author: admin, project: project) }
it 'deletes snippet' do
delete api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'returns 404 for invalid snippet id' do
delete api("/projects/#{snippet.project.id}/snippets/1234", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
it_behaves_like '412 response' do
let(:request) { api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin) }
end
+
+ context 'with snippets disabled' do
+ it_behaves_like '403 response' do
+ let(:request) { delete api("/projects/#{project_no_snippets.id}/snippets/123", admin) }
+ end
+ end
end
describe 'GET /projects/:project_id/snippets/:id/raw' do
- let(:snippet) { create(:project_snippet, author: admin) }
+ let(:snippet) { create(:project_snippet, author: admin, project: project) }
it 'returns raw text' do
get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain'
expect(response.body).to eq(snippet.content)
end
@@ -353,8 +437,14 @@ describe API::ProjectSnippets do
it 'returns 404 for invalid snippet id' do
get api("/projects/#{snippet.project.id}/snippets/1234/raw", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
+
+ context 'with snippets disabled' do
+ it_behaves_like '403 response' do
+ let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/123/raw", admin) }
+ end
+ end
end
end
diff --git a/spec/requests/api/project_statistics_spec.rb b/spec/requests/api/project_statistics_spec.rb
index 184d0a72c37..5d0b506cc92 100644
--- a/spec/requests/api/project_statistics_spec.rb
+++ b/spec/requests/api/project_statistics_spec.rb
@@ -21,7 +21,7 @@ describe API::ProjectStatistics do
it 'returns the fetch statistics of the last 30 days' do
get api("/projects/#{public_project.id}/statistics", maintainer)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
fetches = json_response['fetches']
expect(fetches['total']).to eq(40)
expect(fetches['days'].length).to eq(5)
@@ -34,7 +34,7 @@ describe API::ProjectStatistics do
get api("/projects/#{public_project.id}/statistics", maintainer)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
fetches = json_response['fetches']
expect(fetches['total']).to eq(40)
expect(fetches['days'].length).to eq(5)
@@ -47,7 +47,7 @@ describe API::ProjectStatistics do
get api("/projects/#{public_project.id}/statistics", developer)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
@@ -56,7 +56,7 @@ describe API::ProjectStatistics do
get api("/projects/#{public_project.id}/statistics", maintainer)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/project_templates_spec.rb b/spec/requests/api/project_templates_spec.rb
index 2bf864afe87..50b950fb5c1 100644
--- a/spec/requests/api/project_templates_spec.rb
+++ b/spec/requests/api/project_templates_spec.rb
@@ -15,7 +15,7 @@ describe API::ProjectTemplates do
it 'returns dockerfiles' do
get api("/projects/#{public_project.id}/templates/dockerfiles")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/template_list')
expect(json_response).to satisfy_one { |template| template['key'] == 'Binary' }
@@ -24,7 +24,7 @@ describe API::ProjectTemplates do
it 'returns gitignores' do
get api("/projects/#{public_project.id}/templates/gitignores")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/template_list')
expect(json_response).to satisfy_one { |template| template['key'] == 'Actionscript' }
@@ -33,7 +33,7 @@ describe API::ProjectTemplates do
it 'returns gitlab_ci_ymls' do
get api("/projects/#{public_project.id}/templates/gitlab_ci_ymls")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/template_list')
expect(json_response).to satisfy_one { |template| template['key'] == 'Android' }
@@ -42,7 +42,7 @@ describe API::ProjectTemplates do
it 'returns licenses' do
get api("/projects/#{public_project.id}/templates/licenses")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/template_list')
expect(json_response).to satisfy_one { |template| template['key'] == 'mit' }
@@ -51,19 +51,19 @@ describe API::ProjectTemplates do
it 'returns 400 for an unknown template type' do
get api("/projects/#{public_project.id}/templates/unknown")
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'denies access to an anonymous user on a private project' do
get api("/projects/#{private_project.id}/templates/licenses")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'permits access to a developer on a private project' do
get api("/projects/#{private_project.id}/templates/licenses", developer)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/template_list')
end
end
@@ -72,7 +72,7 @@ describe API::ProjectTemplates do
it 'returns key and name for the listed licenses' do
get api("/projects/#{public_project.id}/templates/licenses")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/template_list')
end
end
@@ -81,7 +81,7 @@ describe API::ProjectTemplates do
it 'returns a specific dockerfile' do
get api("/projects/#{public_project.id}/templates/dockerfiles/Binary")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/template')
expect(json_response['name']).to eq('Binary')
end
@@ -89,7 +89,7 @@ describe API::ProjectTemplates do
it 'returns a specific gitignore' do
get api("/projects/#{public_project.id}/templates/gitignores/Actionscript")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/template')
expect(json_response['name']).to eq('Actionscript')
end
@@ -97,7 +97,7 @@ describe API::ProjectTemplates do
it 'returns C++ gitignore' do
get api("/projects/#{public_project.id}/templates/gitignores/C++")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/template')
expect(json_response['name']).to eq('C++')
end
@@ -105,7 +105,7 @@ describe API::ProjectTemplates do
it 'returns C++ gitignore for URL-encoded names' do
get api("/projects/#{public_project.id}/templates/gitignores/C%2B%2B")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/template')
expect(json_response['name']).to eq('C++')
end
@@ -113,7 +113,7 @@ describe API::ProjectTemplates do
it 'returns a specific gitlab_ci_yml' do
get api("/projects/#{public_project.id}/templates/gitlab_ci_ymls/Android")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/template')
expect(json_response['name']).to eq('Android')
end
@@ -121,26 +121,26 @@ describe API::ProjectTemplates do
it 'returns a specific license' do
get api("/projects/#{public_project.id}/templates/licenses/mit")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/license')
end
it 'returns 404 for an unknown specific template' do
get api("/projects/#{public_project.id}/templates/licenses/unknown")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'denies access to an anonymous user on a private project' do
get api("/projects/#{private_project.id}/templates/licenses/mit")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'permits access to a developer on a private project' do
get api("/projects/#{private_project.id}/templates/licenses/mit", developer)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/license')
end
@@ -148,7 +148,7 @@ describe API::ProjectTemplates do
it 'rejects invalid filenames' do
get api("/projects/#{public_project.id}/templates/#{template_type}/%2e%2e%2fPython%2ea")
- expect(response).to have_gitlab_http_status(500)
+ expect(response).to have_gitlab_http_status(:internal_server_error)
end
end
@@ -165,7 +165,7 @@ describe API::ProjectTemplates do
fullname: 'Fullname Placeholder'
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/license')
content = json_response['content']
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 97b61b44856..83f678ad2cb 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -110,7 +110,7 @@ describe API::Projects do
it 'returns an array of projects' do
get api('/projects', current_user), params: filter
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(*projects.map(&:id))
@@ -210,7 +210,7 @@ describe API::Projects do
get api('/projects', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).not_to include(project.id)
end
@@ -242,7 +242,7 @@ describe API::Projects do
it "does not include statistics by default" do
get api('/projects', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first).not_to include('statistics')
@@ -251,7 +251,7 @@ describe API::Projects do
it "includes statistics if requested" do
get api('/projects', user), params: { statistics: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first).to include 'statistics'
@@ -260,7 +260,7 @@ describe API::Projects do
it "does not include license by default" do
get api('/projects', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first).not_to include('license', 'license_url')
@@ -269,7 +269,7 @@ describe API::Projects do
it "does not include license if requested" do
get api('/projects', user), params: { license: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first).not_to include('license', 'license_url')
@@ -314,7 +314,7 @@ describe API::Projects do
get api('/projects?simple=true', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first.keys).to match_array expected_keys
@@ -327,7 +327,7 @@ describe API::Projects do
it 'returns archived projects' do
get api('/projects?archived=true', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(Project.public_or_visible_to_user(user).where(archived: true).size)
@@ -337,7 +337,7 @@ describe API::Projects do
it 'returns non-archived projects' do
get api('/projects?archived=false', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(Project.public_or_visible_to_user(user).where(archived: false).size)
@@ -347,7 +347,7 @@ describe API::Projects do
it 'returns every project' do
get api('/projects', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(*Project.public_or_visible_to_user(user).pluck(:id))
@@ -398,7 +398,7 @@ describe API::Projects do
it 'filters based on private visibility param' do
get api('/projects', user), params: { visibility: 'private' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(project.id, project2.id, project3.id)
@@ -409,7 +409,7 @@ describe API::Projects do
get api('/projects', user), params: { visibility: 'internal' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(project2.id)
@@ -418,7 +418,7 @@ describe API::Projects do
it 'filters based on public visibility param' do
get api('/projects', user), params: { visibility: 'public' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(public_project.id)
@@ -431,7 +431,7 @@ describe API::Projects do
it 'filters case-insensitively by programming language' do
get api('/projects', user), params: { with_programming_language: 'javascript' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(project3.id)
@@ -442,7 +442,7 @@ describe API::Projects do
it 'returns the correct order when sorted by id' do
get api('/projects', user), params: { order_by: 'id', sort: 'desc' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['id']).to eq(project3.id)
@@ -453,7 +453,7 @@ describe API::Projects do
it 'returns an array of projects the user owns' do
get api('/projects', user4), params: { owned: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['name']).to eq(project4.name)
@@ -472,7 +472,7 @@ describe API::Projects do
it 'returns the starred projects viewable by the user' do
get api('/projects', user3), params: { starred: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(project.id, public_project.id)
@@ -494,7 +494,7 @@ describe API::Projects do
it 'returns only projects that satisfy all query parameters' do
get api('/projects', user), params: { visibility: 'public', owned: true, starred: true, search: 'gitlab' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -513,7 +513,7 @@ describe API::Projects do
it 'returns only projects that satisfy all query parameters' do
get api('/projects', user), params: { visibility: 'public', membership: true, starred: true, search: 'gitlab' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(2)
@@ -532,7 +532,7 @@ describe API::Projects do
it 'returns an array of projects the user has at least developer access' do
get api('/projects', user2), params: { min_access_level: 30 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(project2.id, project3.id)
@@ -587,7 +587,7 @@ describe API::Projects do
it 'contains only the first project with per_page = 1' do
get api('/projects', current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(public_project.id)
end
@@ -609,14 +609,14 @@ describe API::Projects do
it 'returns an empty array when the page does not have any records' do
get api('/projects', current_user), params: params.merge(id_after: Project.maximum(:id))
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq([])
end
it 'responds with 501 if order_by is different from id' do
get api('/projects', current_user), params: params.merge(order_by: :created_at)
- expect(response).to have_gitlab_http_status(405)
+ expect(response).to have_gitlab_http_status(:method_not_allowed)
end
end
@@ -634,7 +634,7 @@ describe API::Projects do
it 'contains only the last project with per_page = 1' do
get api('/projects', current_user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(project3.id)
end
@@ -672,14 +672,14 @@ describe API::Projects do
allow_any_instance_of(User).to receive(:projects_limit_left).and_return(0)
expect { post api('/projects', user2), params: { name: 'foo' } }
.to change {Project.count}.by(0)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
it 'creates new project without path but with name and returns 201' do
expect { post api('/projects', user), params: { name: 'Foo Project' } }
.to change { Project.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
project = Project.first
@@ -690,7 +690,7 @@ describe API::Projects do
it 'creates new project without name but with path and returns 201' do
expect { post api('/projects', user), params: { path: 'foo_project' } }
.to change { Project.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
project = Project.first
@@ -701,7 +701,7 @@ describe API::Projects do
it 'creates new project with name and path and returns 201' do
expect { post api('/projects', user), params: { path: 'path-project-Foo', name: 'Foo Project' } }
.to change { Project.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
project = Project.first
@@ -712,12 +712,12 @@ describe API::Projects do
it 'creates last project before reaching project limit' do
allow_any_instance_of(User).to receive(:projects_limit_left).and_return(1)
post api('/projects', user2), params: { name: 'foo' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'does not create new project without name or path and returns 400' do
expect { post api('/projects', user) }.not_to change { Project.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "assigns attributes to project" do
@@ -739,7 +739,7 @@ describe API::Projects do
post api('/projects', user), params: project
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
project.each_pair do |k, v|
next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled storage_version].include?(k)
@@ -758,7 +758,7 @@ describe API::Projects do
expect { post api('/projects', user), params: { template_name: 'rails', name: 'rails-test' } }
.to change { Project.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
project = Project.find(json_response['id'])
expect(project).to be_saved
@@ -769,7 +769,7 @@ describe API::Projects do
expect { post api('/projects', user), params: { template_name: 'unknown', name: 'rails-test' } }
.not_to change { Project.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['template_name']).to eq(["'unknown' is unknown or invalid"])
end
@@ -778,7 +778,7 @@ describe API::Projects do
expect { post api('/projects', user), params: project_params }
.not_to change { Project.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'sets a project as public' do
@@ -931,7 +931,7 @@ describe API::Projects do
post api('/projects', user), params: project
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'ignores import_url when it is nil' do
@@ -939,7 +939,7 @@ describe API::Projects do
post api('/projects', user), params: project
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
context 'when a visibility level is restricted' do
@@ -952,7 +952,7 @@ describe API::Projects do
it 'does not allow a non-admin to use a restricted visibility level' do
post api('/projects', user), params: project_param
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['visibility_level'].first).to(
match('restricted by your GitLab administrator')
)
@@ -972,14 +972,14 @@ describe API::Projects do
it 'returns error when user not found' do
get api('/users/0/projects/')
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns projects filtered by user id' do
get api("/users/#{user4.id}/projects/", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id)
@@ -991,7 +991,7 @@ describe API::Projects do
it 'only returns projects with id_after filter given' do
get api("/users/#{user4.id}/projects?id_after=#{public_project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(another_public_project.id)
@@ -1000,7 +1000,7 @@ describe API::Projects do
it 'returns both projects without a id_after filter' do
get api("/users/#{user4.id}/projects", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id, another_public_project.id)
@@ -1013,7 +1013,7 @@ describe API::Projects do
it 'only returns projects with id_before filter given' do
get api("/users/#{user4.id}/projects?id_before=#{another_public_project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id)
@@ -1022,7 +1022,7 @@ describe API::Projects do
it 'returns both projects without a id_before filter' do
get api("/users/#{user4.id}/projects", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id, another_public_project.id)
@@ -1035,7 +1035,7 @@ describe API::Projects do
it 'only returns projects with id matching the range' do
get api("/users/#{user4.id}/projects?id_after=#{more_projects.first.id}&id_before=#{more_projects.last.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(*more_projects[1..-2].map(&:id))
@@ -1045,7 +1045,7 @@ describe API::Projects do
it 'returns projects filtered by username' do
get api("/users/#{user4.username}/projects/", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id)
@@ -1059,7 +1059,7 @@ describe API::Projects do
get api("/users/#{user4.id}/projects/", user2), params: { min_access_level: 30 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(private_project1.id)
@@ -1071,7 +1071,7 @@ describe API::Projects do
it 'filters case-insensitively by programming language' do
get api('/projects', user), params: { with_programming_language: 'ruby' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(project.id)
@@ -1087,14 +1087,14 @@ describe API::Projects do
it 'returns error when user not found' do
get api('/users/9999/starred_projects/')
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns projects filtered by user' do
get api("/users/#{user3.id}/starred_projects/", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(project.id, project2.id, project3.id)
@@ -1104,7 +1104,7 @@ describe API::Projects do
describe 'POST /projects/user/:id' do
it 'creates new project without path but with name and return 201' do
expect { post api("/projects/user/#{user.id}", admin), params: { name: 'Foo Project' } }.to change { Project.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
project = Project.find(json_response['id'])
@@ -1115,7 +1115,7 @@ describe API::Projects do
it 'creates new project with name and path and returns 201' do
expect { post api("/projects/user/#{user.id}", admin), params: { path: 'path-project-Foo', name: 'Foo Project' } }
.to change { Project.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
project = Project.find(json_response['id'])
@@ -1127,7 +1127,7 @@ describe API::Projects do
expect { post api("/projects/user/#{user.id}", admin) }
.not_to change { Project.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('name is missing')
end
@@ -1142,7 +1142,7 @@ describe API::Projects do
post api("/projects/user/#{user.id}", admin), params: project
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
project.each_pair do |k, v|
next if %i[has_external_issue_tracker path storage_version].include?(k)
@@ -1156,7 +1156,7 @@ describe API::Projects do
post api("/projects/user/#{user.id}", admin), params: project
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['visibility']).to eq('public')
end
@@ -1165,7 +1165,7 @@ describe API::Projects do
post api("/projects/user/#{user.id}", admin), params: project
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['visibility']).to eq('internal')
end
@@ -1246,10 +1246,12 @@ describe API::Projects do
it "uploads the file and returns its info" do
post api("/projects/#{project.id}/uploads", user), params: { file: fixture_file_upload("spec/fixtures/dk.png", "image/png") }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['alt']).to eq("dk")
expect(json_response['url']).to start_with("/uploads/")
expect(json_response['url']).to end_with("/dk.png")
+
+ expect(json_response['full_path']).to start_with("/#{project.namespace.path}/#{project.path}/uploads")
end
end
@@ -1260,7 +1262,7 @@ describe API::Projects do
get api("/projects/#{private_project.id}")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns public projects' do
@@ -1268,7 +1270,7 @@ describe API::Projects do
get api("/projects/#{public_project.id}")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(public_project.id)
expect(json_response['description']).to eq(public_project.description)
expect(json_response['default_branch']).to eq(public_project.default_branch)
@@ -1283,7 +1285,7 @@ describe API::Projects do
get api("/projects/#{fork.id}")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['forked_from_project']).to be_nil
end
end
@@ -1295,7 +1297,7 @@ describe API::Projects do
it 'hides protected attributes of private repositories if user is not a member' do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
protected_attributes.each do |attribute|
expect(json_response.keys).not_to include(attribute)
end
@@ -1306,7 +1308,7 @@ describe API::Projects do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
protected_attributes.each do |attribute|
expect(json_response.keys).to include(attribute)
end
@@ -1323,7 +1325,7 @@ describe API::Projects do
get api("/projects/#{project.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(project.id)
expect(json_response['description']).to eq(project.description)
expect(json_response['default_branch']).to eq(project.default_branch)
@@ -1374,7 +1376,7 @@ describe API::Projects do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(project.id)
expect(json_response['description']).to eq(project.description)
expect(json_response['default_branch']).to eq(project.default_branch)
@@ -1448,20 +1450,20 @@ describe API::Projects do
it 'returns a project by path name' do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(project.name)
end
it 'returns a 404 error if not found' do
get api('/projects/42', user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
end
it 'returns a 404 error if user is not a member' do
other_user = create(:user)
get api("/projects/#{project.id}", other_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'handles users with dots' do
@@ -1469,14 +1471,14 @@ describe API::Projects do
project = create(:project, creator_id: dot_user.id, namespace: dot_user.namespace)
get api("/projects/#{CGI.escape(project.full_path)}", dot_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(project.name)
end
it 'exposes namespace fields' do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['namespace']).to eq({
'id' => user.namespace.id,
'name' => user.namespace.name,
@@ -1492,14 +1494,14 @@ describe API::Projects do
it "does not include license fields by default" do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to include('license', 'license_url')
end
it 'includes license fields when requested' do
get api("/projects/#{project.id}", user), params: { license: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['license']).to eq({
'key' => project.repository.license.key,
'name' => project.repository.license.name,
@@ -1512,14 +1514,14 @@ describe API::Projects do
it "does not include statistics by default" do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to include 'statistics'
end
it "includes statistics if requested" do
get api("/projects/#{project.id}", user), params: { statistics: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include 'statistics'
end
@@ -1529,7 +1531,7 @@ describe API::Projects do
it "does not include statistics if user is not a member" do
get api("/projects/#{project.id}", user), params: { statistics: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to include 'statistics'
end
@@ -1538,7 +1540,7 @@ describe API::Projects do
get api("/projects/#{project.id}", user), params: { statistics: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include 'statistics'
end
@@ -1548,7 +1550,7 @@ describe API::Projects do
get api("/projects/#{project.id}", user), params: { statistics: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include 'statistics'
end
end
@@ -1556,14 +1558,14 @@ describe API::Projects do
it "includes import_error if user can admin project" do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include("import_error")
end
it "does not include import_error if user cannot admin project" do
get api("/projects/#{project.id}", user3)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to include("import_error")
end
@@ -1572,7 +1574,7 @@ describe API::Projects do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
end
@@ -1614,7 +1616,7 @@ describe API::Projects do
get api("/projects/#{fork.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['forked_from_project']).to include('id' => project.id)
end
@@ -1625,7 +1627,7 @@ describe API::Projects do
get api("/projects/#{fork.id}", fork_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['forked_from_project']).to be_nil
end
end
@@ -1639,7 +1641,7 @@ describe API::Projects do
it 'contains permission information' do
get api("/projects", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.first['permissions']['project_access']['access_level'])
.to eq(Gitlab::Access::MAINTAINER)
expect(json_response.first['permissions']['group_access']).to be_nil
@@ -1651,7 +1653,7 @@ describe API::Projects do
project.add_maintainer(user)
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['permissions']['project_access']['access_level'])
.to eq(Gitlab::Access::MAINTAINER)
expect(json_response['permissions']['group_access']).to be_nil
@@ -1668,7 +1670,7 @@ describe API::Projects do
it 'sets the owner and return 200' do
get api("/projects/#{project2.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['permissions']['project_access']).to be_nil
expect(json_response['permissions']['group_access']['access_level'])
.to eq(Gitlab::Access::OWNER)
@@ -1687,7 +1689,7 @@ describe API::Projects do
it 'sets group access and return 200' do
get api("/projects/#{project2.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['permissions']['project_access']).to be_nil
expect(json_response['permissions']['group_access']['access_level'])
.to eq(Gitlab::Access::OWNER)
@@ -1701,7 +1703,7 @@ describe API::Projects do
it 'sets the maximum group access and return 200' do
get api("/projects/#{project2.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['permissions']['project_access']).to be_nil
expect(json_response['permissions']['group_access']['access_level'])
.to eq(Gitlab::Access::OWNER)
@@ -1718,7 +1720,7 @@ describe API::Projects do
it 'returns group web_url and avatar_url' do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
group_data = json_response['namespace']
expect(group_data['web_url']).to eq(group.web_url)
@@ -1733,7 +1735,7 @@ describe API::Projects do
it 'returns user web_url and avatar_url' do
get api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
user_data = json_response['namespace']
expect(user_data['web_url']).to eq("http://localhost/#{user.username}")
@@ -1749,6 +1751,27 @@ describe API::Projects do
subject { get api("/projects/#{project.id}", user) }
end
+
+ describe 'repository_storage attribute' do
+ before do
+ get api("/projects/#{project.id}", user)
+ end
+
+ context 'when authenticated as an admin' do
+ let(:user) { create(:admin) }
+
+ it 'returns repository_storage attribute' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['repository_storage']).to eq(project.repository_storage)
+ end
+ end
+
+ context 'when authenticated as a regular user' do
+ it 'does not return repository_storage attribute' do
+ expect(json_response).not_to have_key('repository_storage')
+ end
+ end
+ end
end
describe 'GET /projects/:id/users' do
@@ -1758,7 +1781,7 @@ describe API::Projects do
user = project.namespace.owner
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -1787,7 +1810,7 @@ describe API::Projects do
it 'returns a 404 error if not found' do
get api('/projects/42/users', user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
end
@@ -1796,7 +1819,7 @@ describe API::Projects do
get api("/projects/#{project.id}/users", other_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'filters out users listed in skip_users' do
@@ -1805,7 +1828,7 @@ describe API::Projects do
get api("/projects/#{project.id}/users?skip_users=#{user.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
expect(json_response[0]['id']).to eq(other_user.id)
end
@@ -1826,7 +1849,7 @@ describe API::Projects do
it 'denies project to be forked from an existing project' do
post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -1845,7 +1868,7 @@ describe API::Projects do
post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", user)
project_fork_target.reload
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(project_fork_target.forked_from_project.id).to eq(project_fork_source.id)
expect(project_fork_target.fork_network_member).to be_present
expect(project_fork_target).to be_forked
@@ -1854,7 +1877,7 @@ describe API::Projects do
it 'denies project to be forked from a private project' do
post api("/projects/#{project_fork_target.id}/fork/#{private_project_fork_source.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1864,13 +1887,13 @@ describe API::Projects do
post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'allows project to be forked from a private project' do
post api("/projects/#{project_fork_target.id}/fork/#{private_project_fork_source.id}", admin)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'refreshes the forks count cachce' do
@@ -1881,7 +1904,7 @@ describe API::Projects do
it 'fails if forked_from project which does not exist' do
post api("/projects/#{project_fork_target.id}/fork/0", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'fails with 409 if already forked' do
@@ -1892,7 +1915,7 @@ describe API::Projects do
post api("/projects/#{project_fork_target.id}/fork/#{other_project_fork_source.id}", admin)
project_fork_target.reload
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(project_fork_target.forked_from_project.id).to eq(project_fork_source.id)
expect(project_fork_target).to be_forked
end
@@ -1902,7 +1925,7 @@ describe API::Projects do
describe 'DELETE /projects/:id/fork' do
it "is not visible to users outside group" do
delete api("/projects/#{project_fork_target.id}/fork", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'when users belong to project group' do
@@ -1924,7 +1947,7 @@ describe API::Projects do
it 'makes forked project unforked' do
delete api("/projects/#{project_fork_target.id}/fork", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
project_fork_target.reload
expect(project_fork_target.forked_from_project).to be_nil
expect(project_fork_target).not_to be_forked
@@ -1937,13 +1960,13 @@ describe API::Projects do
it 'is forbidden to non-owner users' do
delete api("/projects/#{project_fork_target.id}/fork", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'is idempotent if not forked' do
expect(project_fork_target.forked_from_project).to be_nil
delete api("/projects/#{project_fork_target.id}/fork", admin)
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
expect(project_fork_target.reload.forked_from_project).to be_nil
end
end
@@ -1973,7 +1996,7 @@ describe API::Projects do
it 'returns the forks' do
get api("/projects/#{project_fork_source.id}/forks", member)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(1)
expect(json_response[0]['name']).to eq(private_fork.name)
@@ -1984,7 +2007,7 @@ describe API::Projects do
it 'returns an empty array' do
get api("/projects/#{project_fork_source.id}/forks", non_member)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(0)
end
@@ -1995,7 +2018,7 @@ describe API::Projects do
it 'returns an empty array' do
get api("/projects/#{project_fork_source.id}/forks")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(0)
end
@@ -2017,7 +2040,7 @@ describe API::Projects do
post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER, expires_at: expires_at }
end.to change { ProjectGroupLink.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['group_id']).to eq(group.id)
expect(json_response['group_access']).to eq(Gitlab::Access::DEVELOPER)
expect(json_response['expires_at']).to eq(expires_at.to_s)
@@ -2025,18 +2048,18 @@ describe API::Projects do
it "returns a 400 error when group id is not given" do
post api("/projects/#{project.id}/share", user), params: { group_access: Gitlab::Access::DEVELOPER }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 400 error when access level is not given" do
post api("/projects/#{project.id}/share", user), params: { group_id: group.id }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 400 error when sharing is disabled" do
project.namespace.update(share_with_group_lock: true)
post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns a 404 error when user cannot read group' do
@@ -2044,19 +2067,19 @@ describe API::Projects do
post api("/projects/#{project.id}/share", user), params: { group_id: private_group.id, group_access: Gitlab::Access::DEVELOPER }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 error when group does not exist' do
post api("/projects/#{project.id}/share", user), params: { group_id: 1234, group_access: Gitlab::Access::DEVELOPER }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns a 400 error when wrong params passed" do
post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: 1234 }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq 'group_access does not have a valid value'
end
@@ -2066,7 +2089,7 @@ describe API::Projects do
post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
end
end
@@ -2081,7 +2104,7 @@ describe API::Projects do
it 'returns 204 when deleting a group share' do
delete api("/projects/#{project.id}/share/#{group.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(project.project_group_links).to be_empty
end
@@ -2093,19 +2116,19 @@ describe API::Projects do
it 'returns a 400 when group id is not an integer' do
delete api("/projects/#{project.id}/share/foo", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns a 404 error when group link does not exist' do
delete api("/projects/#{project.id}/share/1234", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 error when project does not exist' do
delete api("/projects/123/share/1234", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -2126,7 +2149,7 @@ describe API::Projects do
put api("/projects/#{project.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to match('at least one parameter must be provided')
end
@@ -2136,7 +2159,7 @@ describe API::Projects do
put api("/projects/#{project.id}"), params: project_param
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -2146,7 +2169,7 @@ describe API::Projects do
put api("/projects/#{project.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
@@ -2158,7 +2181,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
@@ -2171,7 +2194,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
@@ -2185,7 +2208,7 @@ describe API::Projects do
put api("/projects/#{project.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['name']).to eq(['has already been taken'])
end
@@ -2194,7 +2217,7 @@ describe API::Projects do
put api("/projects/#{project.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['request_access_enabled']).to eq(false)
end
@@ -2203,7 +2226,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
@@ -2215,7 +2238,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
@@ -2227,7 +2250,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['builds_access_level']).to eq('private')
end
@@ -2247,7 +2270,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['emails_disabled']).to eq(true)
end
@@ -2257,7 +2280,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['build_git_strategy']).to eq('clone')
end
@@ -2267,7 +2290,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'updates merge_method' do
@@ -2275,7 +2298,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
@@ -2287,7 +2310,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'updates avatar' do
@@ -2298,7 +2321,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['avatar_url']).to eq('http://localhost/uploads/'\
'-/system/project/avatar/'\
"#{project3.id}/banana_sample.gif")
@@ -2309,7 +2332,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['auto_devops_deploy_strategy']).to eq('timed_incremental')
end
@@ -2319,7 +2342,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['auto_devops_enabled']).to eq(false)
end
@@ -2329,7 +2352,7 @@ describe API::Projects do
it 'updates path' do
project_param = { path: 'bar' }
put api("/projects/#{project3.id}", user4), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
end
@@ -2345,7 +2368,7 @@ describe API::Projects do
description: 'new description' }
put api("/projects/#{project3.id}", user4), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
end
@@ -2354,20 +2377,20 @@ describe API::Projects do
it 'does not update path to existing path' do
project_param = { path: project.path }
put api("/projects/#{project3.id}", user4), params: project_param
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['path']).to eq(['has already been taken'])
end
it 'does not update name' do
project_param = { name: 'bar' }
put api("/projects/#{project3.id}", user4), params: project_param
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not update visibility_level' do
project_param = { visibility: 'public' }
put api("/projects/#{project3.id}", user4), params: project_param
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'updates container_expiration_policy' do
@@ -2380,7 +2403,7 @@ describe API::Projects do
put api("/projects/#{project3.id}", user4), params: project_param
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['container_expiration_policy']['cadence']).to eq('1month')
expect(json_response['container_expiration_policy']['keep_n']).to eq(1)
@@ -2397,7 +2420,51 @@ describe API::Projects do
description: 'new description',
request_access_enabled: true }
put api("/projects/#{project.id}", user3), params: project_param
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when updating repository storage' do
+ let(:unknown_storage) { 'new-storage' }
+ let(:new_project) { create(:project, :repository, namespace: user.namespace) }
+
+ context 'as a user' do
+ it 'returns 200 but does not change repository_storage' do
+ expect do
+ Sidekiq::Testing.fake! do
+ put(api("/projects/#{new_project.id}", user), params: { repository_storage: unknown_storage, issues_enabled: false })
+ end
+ end.not_to change(ProjectUpdateRepositoryStorageWorker.jobs, :size)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['issues_enabled']).to eq(false)
+ expect(new_project.reload.repository.storage).to eq('default')
+ end
+ end
+
+ context 'as an admin' do
+ include_context 'custom session'
+
+ let(:admin) { create(:admin) }
+
+ it 'returns 500 when repository storage is unknown' do
+ put(api("/projects/#{new_project.id}", admin), params: { repository_storage: unknown_storage })
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(json_response['message']).to match('ArgumentError')
+ end
+
+ it 'returns 200 when repository storage has changed' do
+ stub_storage_settings('test_second_storage' => { 'path' => TestEnv::SECOND_STORAGE_PATH })
+
+ expect do
+ Sidekiq::Testing.fake! do
+ put(api("/projects/#{new_project.id}", admin), params: { repository_storage: 'test_second_storage' })
+ end
+ end.to change(ProjectUpdateRepositoryStorageWorker.jobs, :size).by(1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
end
end
@@ -2407,7 +2474,7 @@ describe API::Projects do
it 'archives the project' do
post api("/projects/#{project.id}/archive", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['archived']).to be_truthy
end
end
@@ -2420,7 +2487,7 @@ describe API::Projects do
it 'remains archived' do
post api("/projects/#{project.id}/archive", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['archived']).to be_truthy
end
end
@@ -2433,7 +2500,7 @@ describe API::Projects do
it 'rejects the action' do
post api("/projects/#{project.id}/archive", user3)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -2443,7 +2510,7 @@ describe API::Projects do
it 'remains unarchived' do
post api("/projects/#{project.id}/unarchive", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['archived']).to be_falsey
end
end
@@ -2456,7 +2523,7 @@ describe API::Projects do
it 'unarchives the project' do
post api("/projects/#{project.id}/unarchive", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['archived']).to be_falsey
end
end
@@ -2469,7 +2536,7 @@ describe API::Projects do
it 'rejects the action' do
post api("/projects/#{project.id}/unarchive", user3)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -2479,7 +2546,7 @@ describe API::Projects do
it 'stars the project' do
expect { post api("/projects/#{project.id}/star", user) }.to change { project.reload.star_count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['star_count']).to eq(1)
end
end
@@ -2493,7 +2560,7 @@ describe API::Projects do
it 'does not modify the star count' do
expect { post api("/projects/#{project.id}/star", user) }.not_to change { project.reload.star_count }
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
end
end
@@ -2508,7 +2575,7 @@ describe API::Projects do
it 'unstars the project' do
expect { post api("/projects/#{project.id}/unstar", user) }.to change { project.reload.star_count }.by(-1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['star_count']).to eq(0)
end
end
@@ -2517,7 +2584,7 @@ describe API::Projects do
it 'does not modify the star count' do
expect { post api("/projects/#{project.id}/unstar", user) }.not_to change { project.reload.star_count }
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
end
end
@@ -2527,7 +2594,7 @@ describe API::Projects do
it 'returns an array of starrers' do
get api("/projects/#{public_project.id}/starrers", current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response[0]['starred_since']).to be_present
@@ -2642,7 +2709,7 @@ describe API::Projects do
it 'removes project' do
delete api("/projects/#{project.id}", user)
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
expect(json_response['message']).to eql('202 Accepted')
end
@@ -2655,17 +2722,17 @@ describe API::Projects do
user3 = create(:user)
project.add_developer(user3)
delete api("/projects/#{project.id}", user3)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not remove a non existing project' do
delete api('/projects/1328', user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'does not remove a project not attached to user' do
delete api("/projects/#{project.id}", user2)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -2673,13 +2740,13 @@ describe API::Projects do
it 'removes any existing project' do
delete api("/projects/#{project.id}", admin)
- expect(response).to have_gitlab_http_status(202)
+ expect(response).to have_gitlab_http_status(:accepted)
expect(json_response['message']).to eql('202 Accepted')
end
it 'does not remove a non existing project' do
delete api('/projects/1328', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
@@ -2698,20 +2765,14 @@ describe API::Projects do
create(:project, :repository, creator: user, namespace: user.namespace)
end
- let(:group) { create(:group) }
- let(:group2) do
- group = create(:group, name: 'group2_name')
- group.add_maintainer(user2)
- group
- end
-
- let(:group3) do
- group = create(:group, name: 'group3_name', parent: group2)
- group.add_owner(user2)
- group
- end
+ let(:group) { create(:group, :public) }
+ let(:group2) { create(:group, name: 'group2_name') }
+ let(:group3) { create(:group, name: 'group3_name', parent: group2) }
before do
+ group.add_guest(user2)
+ group2.add_maintainer(user2)
+ group3.add_owner(user2)
project.add_reporter(user2)
project2.add_reporter(user2)
end
@@ -2720,7 +2781,7 @@ describe API::Projects do
it 'forks if user has sufficient access to project' do
post api("/projects/#{project.id}/fork", user2)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(project.name)
expect(json_response['path']).to eq(project.path)
expect(json_response['owner']['id']).to eq(user2.id)
@@ -2733,7 +2794,7 @@ describe API::Projects do
it 'forks if user is admin' do
post api("/projects/#{project.id}/fork", admin)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(project.name)
expect(json_response['path']).to eq(project.path)
expect(json_response['owner']['id']).to eq(admin.id)
@@ -2747,14 +2808,17 @@ describe API::Projects do
new_user = create(:user)
post api("/projects/#{project.id}/fork", new_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
end
it 'fails if forked project exists in the user namespace' do
- post api("/projects/#{project.id}/fork", user)
+ new_project = create(:project, name: project.name, path: project.path)
+ new_project.add_reporter(user)
- expect(response).to have_gitlab_http_status(409)
+ post api("/projects/#{new_project.id}/fork", user)
+
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']['name']).to eq(['has already been taken'])
expect(json_response['message']['path']).to eq(['has already been taken'])
end
@@ -2762,56 +2826,116 @@ describe API::Projects do
it 'fails if project to fork from does not exist' do
post api('/projects/424242/fork', user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
end
it 'forks with explicit own user namespace id' do
post api("/projects/#{project.id}/fork", user2), params: { namespace: user2.namespace.id }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['owner']['id']).to eq(user2.id)
end
it 'forks with explicit own user name as namespace' do
post api("/projects/#{project.id}/fork", user2), params: { namespace: user2.username }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['owner']['id']).to eq(user2.id)
end
it 'forks to another user when admin' do
post api("/projects/#{project.id}/fork", admin), params: { namespace: user2.username }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['owner']['id']).to eq(user2.id)
end
it 'fails if trying to fork to another user when not admin' do
post api("/projects/#{project.id}/fork", user2), params: { namespace: admin.namespace.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'fails if trying to fork to non-existent namespace' do
post api("/projects/#{project.id}/fork", user2), params: { namespace: 42424242 }
- expect(response).to have_gitlab_http_status(404)
- expect(json_response['message']).to eq('404 Target Namespace Not Found')
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Namespace Not Found')
end
it 'forks to owned group' do
post api("/projects/#{project.id}/fork", user2), params: { namespace: group2.name }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['namespace']['name']).to eq(group2.name)
end
+ context 'when namespace_id is specified' do
+ shared_examples_for 'forking to specified namespace_id' do
+ it 'forks to specified namespace_id' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['owner']['id']).to eq(user2.id)
+ expect(json_response['namespace']['id']).to eq(user2.namespace.id)
+ end
+ end
+
+ context 'and namespace_id is specified alone' do
+ before do
+ post api("/projects/#{project.id}/fork", user2), params: { namespace_id: user2.namespace.id }
+ end
+
+ it_behaves_like 'forking to specified namespace_id'
+ end
+
+ context 'and namespace_id and namespace are both specified' do
+ before do
+ post api("/projects/#{project.id}/fork", user2), params: { namespace_id: user2.namespace.id, namespace: admin.namespace.id }
+ end
+
+ it_behaves_like 'forking to specified namespace_id'
+ end
+
+ context 'and namespace_id and namespace_path are both specified' do
+ before do
+ post api("/projects/#{project.id}/fork", user2), params: { namespace_id: user2.namespace.id, namespace_path: admin.namespace.path }
+ end
+
+ it_behaves_like 'forking to specified namespace_id'
+ end
+ end
+
+ context 'when namespace_path is specified' do
+ shared_examples_for 'forking to specified namespace_path' do
+ it 'forks to specified namespace_path' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['owner']['id']).to eq(user2.id)
+ expect(json_response['namespace']['path']).to eq(user2.namespace.path)
+ end
+ end
+
+ context 'and namespace_path is specified alone' do
+ before do
+ post api("/projects/#{project.id}/fork", user2), params: { namespace_path: user2.namespace.path }
+ end
+
+ it_behaves_like 'forking to specified namespace_path'
+ end
+
+ context 'and namespace_path and namespace are both specified' do
+ before do
+ post api("/projects/#{project.id}/fork", user2), params: { namespace_path: user2.namespace.path, namespace: admin.namespace.path }
+ end
+
+ it_behaves_like 'forking to specified namespace_path'
+ end
+ end
+
it 'forks to owned subgroup' do
full_path = "#{group2.path}/#{group3.path}"
post api("/projects/#{project.id}/fork", user2), params: { namespace: full_path }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['namespace']['name']).to eq(group3.name)
expect(json_response['namespace']['full_path']).to eq(full_path)
end
@@ -2819,20 +2943,21 @@ describe API::Projects do
it 'fails to fork to not owned group' do
post api("/projects/#{project.id}/fork", user2), params: { namespace: group.name }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq("404 Target Namespace Not Found")
end
it 'forks to not owned group when admin' do
post api("/projects/#{project.id}/fork", admin), params: { namespace: group.name }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['namespace']['name']).to eq(group.name)
end
it 'accepts a path for the target project' do
post api("/projects/#{project.id}/fork", user2), params: { path: 'foobar' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(project.name)
expect(json_response['path']).to eq('foobar')
expect(json_response['owner']['id']).to eq(user2.id)
@@ -2846,14 +2971,14 @@ describe API::Projects do
post api("/projects/#{project.id}/fork", user2), params: { path: 'foobar' }
post api("/projects/#{project2.id}/fork", user2), params: { path: 'foobar' }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']['path']).to eq(['has already been taken'])
end
it 'accepts a name for the target project' do
post api("/projects/#{project.id}/fork", user2), params: { name: 'My Random Project' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq('My Random Project')
expect(json_response['path']).to eq(project.path)
expect(json_response['owner']['id']).to eq(user2.id)
@@ -2867,7 +2992,27 @@ describe API::Projects do
post api("/projects/#{project.id}/fork", user2), params: { name: 'My Random Project' }
post api("/projects/#{project2.id}/fork", user2), params: { name: 'My Random Project' }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
+ expect(json_response['message']['name']).to eq(['has already been taken'])
+ end
+
+ it 'forks to the same namespace with alternative path and name' do
+ post api("/projects/#{project.id}/fork", user), params: { path: 'path_2', name: 'name_2' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['name']).to eq('name_2')
+ expect(json_response['path']).to eq('path_2')
+ expect(json_response['owner']['id']).to eq(user.id)
+ expect(json_response['namespace']['id']).to eq(user.namespace.id)
+ expect(json_response['forked_from_project']['id']).to eq(project.id)
+ expect(json_response['import_status']).to eq('scheduled')
+ end
+
+ it 'fails to fork to the same namespace without alternative path and name' do
+ post api("/projects/#{project.id}/fork", user)
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ expect(json_response['message']['path']).to eq(['has already been taken'])
expect(json_response['message']['name']).to eq(['has already been taken'])
end
end
@@ -2876,7 +3021,7 @@ describe API::Projects do
it 'returns authentication error' do
post api("/projects/#{project.id}/fork")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response['message']).to eq('401 Unauthorized')
end
end
@@ -2890,8 +3035,7 @@ describe API::Projects do
it 'denies project to be forked' do
post api("/projects/#{project.id}/fork", admin)
- expect(response).to have_gitlab_http_status(409)
- expect(json_response['message']['forked_from_project_id']).to eq(['is forbidden'])
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -2909,7 +3053,7 @@ describe API::Projects do
post api("/projects/#{project.id}/housekeeping", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
context 'when housekeeping lease is taken' do
@@ -2918,7 +3062,7 @@ describe API::Projects do
post api("/projects/#{project.id}/housekeeping", user)
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to match(/Somebody already triggered housekeeping for this project/)
end
end
@@ -2932,7 +3076,7 @@ describe API::Projects do
it 'returns forbidden error' do
post api("/projects/#{project.id}/housekeeping", user3)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -2940,7 +3084,7 @@ describe API::Projects do
it 'returns authentication error' do
post api("/projects/#{project.id}/housekeeping")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -2954,25 +3098,25 @@ describe API::Projects do
put api("/projects/#{project.id}/transfer", user), params: { namespace: group.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'fails when transferring to a non owned namespace' do
put api("/projects/#{project.id}/transfer", user), params: { namespace: group.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'fails when transferring to an unknown namespace' do
put api("/projects/#{project.id}/transfer", user), params: { namespace: 'unknown' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'fails on missing namespace' do
put api("/projects/#{project.id}/transfer", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -2987,7 +3131,7 @@ describe API::Projects do
it 'fails transferring the project to the target namespace' do
put api("/projects/#{project.id}/transfer", user), params: { namespace: group.id }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb
index 8499a165d8b..9203e0ec819 100644
--- a/spec/requests/api/protected_branches_spec.rb
+++ b/spec/requests/api/protected_branches_spec.rb
@@ -19,7 +19,7 @@ describe API::ProtectedBranches do
it 'returns the protected branches' do
get api(route, user), params: params.merge(per_page: 100)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
@@ -66,7 +66,7 @@ describe API::ProtectedBranches do
it 'returns the protected branch' do
get api(route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(branch_name)
expect(json_response['push_access_levels'][0]['access_level']).to eq(::Gitlab::Access::MAINTAINER)
expect(json_response['merge_access_levels'][0]['access_level']).to eq(::Gitlab::Access::MAINTAINER)
@@ -118,7 +118,7 @@ describe API::ProtectedBranches do
let(:post_endpoint) { api("/projects/#{project.id}/protected_branches", user) }
def expect_protection_to_be_successful
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(branch_name)
end
@@ -130,7 +130,7 @@ describe API::ProtectedBranches do
it 'protects a single branch' do
post post_endpoint, params: { name: branch_name }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(branch_name)
expect(json_response['push_access_levels'][0]['access_level']).to eq(Gitlab::Access::MAINTAINER)
expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::MAINTAINER)
@@ -139,7 +139,7 @@ describe API::ProtectedBranches do
it 'protects a single branch and developers can push' do
post post_endpoint, params: { name: branch_name, push_access_level: 30 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(branch_name)
expect(json_response['push_access_levels'][0]['access_level']).to eq(Gitlab::Access::DEVELOPER)
expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::MAINTAINER)
@@ -148,7 +148,7 @@ describe API::ProtectedBranches do
it 'protects a single branch and developers can merge' do
post post_endpoint, params: { name: branch_name, merge_access_level: 30 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(branch_name)
expect(json_response['push_access_levels'][0]['access_level']).to eq(Gitlab::Access::MAINTAINER)
expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::DEVELOPER)
@@ -157,7 +157,7 @@ describe API::ProtectedBranches do
it 'protects a single branch and developers can push and merge' do
post post_endpoint, params: { name: branch_name, push_access_level: 30, merge_access_level: 30 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(branch_name)
expect(json_response['push_access_levels'][0]['access_level']).to eq(Gitlab::Access::DEVELOPER)
expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::DEVELOPER)
@@ -166,7 +166,7 @@ describe API::ProtectedBranches do
it 'protects a single branch and no one can push' do
post post_endpoint, params: { name: branch_name, push_access_level: 0 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(branch_name)
expect(json_response['push_access_levels'][0]['access_level']).to eq(Gitlab::Access::NO_ACCESS)
expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::MAINTAINER)
@@ -175,7 +175,7 @@ describe API::ProtectedBranches do
it 'protects a single branch and no one can merge' do
post post_endpoint, params: { name: branch_name, merge_access_level: 0 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(branch_name)
expect(json_response['push_access_levels'][0]['access_level']).to eq(Gitlab::Access::MAINTAINER)
expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::NO_ACCESS)
@@ -184,7 +184,7 @@ describe API::ProtectedBranches do
it 'protects a single branch and no one can push or merge' do
post post_endpoint, params: { name: branch_name, push_access_level: 0, merge_access_level: 0 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(branch_name)
expect(json_response['push_access_levels'][0]['access_level']).to eq(Gitlab::Access::NO_ACCESS)
expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::NO_ACCESS)
@@ -193,7 +193,7 @@ describe API::ProtectedBranches do
it 'returns a 409 error if the same branch is protected twice' do
post post_endpoint, params: { name: protected_name }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
end
context 'when branch has a wildcard in its name' do
@@ -217,7 +217,7 @@ describe API::ProtectedBranches do
it "prevents deletion of the protected branch rule" do
post post_endpoint, params: { name: branch_name }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -230,7 +230,7 @@ describe API::ProtectedBranches do
it "returns a 403 error if guest" do
post post_endpoint, params: { name: branch_name }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -245,7 +245,7 @@ describe API::ProtectedBranches do
it "unprotects a single branch" do
delete delete_endpoint
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it_behaves_like '412 response' do
@@ -255,7 +255,7 @@ describe API::ProtectedBranches do
it "returns 404 if branch does not exist" do
delete api("/projects/#{project.id}/protected_branches/barfoo", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'when a policy restricts rule deletion' do
@@ -267,7 +267,7 @@ describe API::ProtectedBranches do
it "prevents deletion of the protected branch rule" do
delete delete_endpoint
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -277,7 +277,7 @@ describe API::ProtectedBranches do
it "unprotects a wildcard branch" do
delete delete_endpoint
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
end
diff --git a/spec/requests/api/protected_tags_spec.rb b/spec/requests/api/protected_tags_spec.rb
index 5a962cd5667..3bc8ecbee73 100644
--- a/spec/requests/api/protected_tags_spec.rb
+++ b/spec/requests/api/protected_tags_spec.rb
@@ -19,7 +19,7 @@ describe API::ProtectedTags do
it 'returns the protected tags' do
get api(route, user), params: { per_page: 100 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
@@ -55,7 +55,7 @@ describe API::ProtectedTags do
it 'returns the protected tag' do
get api(route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(tag_name)
expect(json_response['create_access_levels'][0]['access_level']).to eq(::Gitlab::Access::MAINTAINER)
end
@@ -106,7 +106,7 @@ describe API::ProtectedTags do
it 'protects a single tag with maintainers can create tags' do
post api("/projects/#{project.id}/protected_tags", user), params: { name: tag_name }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(tag_name)
expect(json_response['create_access_levels'][0]['access_level']).to eq(Gitlab::Access::MAINTAINER)
end
@@ -115,7 +115,7 @@ describe API::ProtectedTags do
post api("/projects/#{project.id}/protected_tags", user),
params: { name: tag_name, create_access_level: 30 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(tag_name)
expect(json_response['create_access_levels'][0]['access_level']).to eq(Gitlab::Access::DEVELOPER)
end
@@ -124,7 +124,7 @@ describe API::ProtectedTags do
post api("/projects/#{project.id}/protected_tags", user),
params: { name: tag_name, create_access_level: 0 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(tag_name)
expect(json_response['create_access_levels'][0]['access_level']).to eq(Gitlab::Access::NO_ACCESS)
end
@@ -132,7 +132,7 @@ describe API::ProtectedTags do
it 'returns a 422 error if the same tag is protected twice' do
post api("/projects/#{project.id}/protected_tags", user), params: { name: protected_name }
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message'][0]).to eq('Name has already been taken')
end
@@ -140,7 +140,7 @@ describe API::ProtectedTags do
post api("/projects/#{project.id}/protected_tags", user), params: { name: protected_name }
post api("/projects/#{project2.id}/protected_tags", user), params: { name: protected_name }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(protected_name)
end
@@ -150,7 +150,7 @@ describe API::ProtectedTags do
it 'protects multiple tags with a wildcard in the name' do
post api("/projects/#{project.id}/protected_tags", user), params: { name: tag_name }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(tag_name)
expect(json_response['create_access_levels'][0]['access_level']).to eq(Gitlab::Access::MAINTAINER)
end
@@ -165,7 +165,7 @@ describe API::ProtectedTags do
it 'returns a 403 error if guest' do
post api("/projects/#{project.id}/protected_tags/", user), params: { name: tag_name }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -178,7 +178,7 @@ describe API::ProtectedTags do
it 'unprotects a single tag' do
delete api("/projects/#{project.id}/protected_tags/#{tag_name}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it_behaves_like '412 response' do
@@ -188,7 +188,7 @@ describe API::ProtectedTags do
it "returns 404 if tag does not exist" do
delete api("/projects/#{project.id}/protected_tags/barfoo", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'when tag has a wildcard in its name' do
@@ -197,7 +197,7 @@ describe API::ProtectedTags do
it 'unprotects a wildcard tag' do
delete api("/projects/#{project.id}/protected_tags/#{tag_name}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
end
diff --git a/spec/requests/api/release/links_spec.rb b/spec/requests/api/release/links_spec.rb
index 3a59052bb29..cf2043ecc74 100644
--- a/spec/requests/api/release/links_spec.rb
+++ b/spec/requests/api/release/links_spec.rb
@@ -135,16 +135,44 @@ describe API::Release::Links do
end
end
end
+
+ describe '#direct_asset_url' do
+ let!(:link) { create(:release_link, release: release, url: url, filepath: filepath) }
+ let(:url) { 'https://google.com/-/jobs/140463678/artifacts/download' }
+
+ context 'when filepath is provided' do
+ let(:filepath) { '/bin/bigfile.exe' }
+
+ specify do
+ get api("/projects/#{project.id}/releases/v0.1/assets/links/#{link.id}", maintainer)
+
+ expect(json_response['direct_asset_url']).to eq("http://localhost/#{project.namespace.path}/#{project.name}/-/releases/#{release.tag}/bin/bigfile.exe")
+ end
+ end
+
+ context 'when filepath is not provided' do
+ let(:filepath) { nil }
+
+ specify do
+ get api("/projects/#{project.id}/releases/v0.1/assets/links/#{link.id}", maintainer)
+
+ expect(json_response['direct_asset_url']).to eq(url)
+ end
+ end
+ end
end
describe 'POST /projects/:id/releases/:tag_name/assets/links' do
let(:params) do
{
name: 'awesome-app.dmg',
+ filepath: '/binaries/awesome-app.dmg',
url: 'https://example.com/download/awesome-app.dmg'
}
end
+ let(:last_release_link) { release.links.last }
+
it 'accepts the request' do
post api("/projects/#{project.id}/releases/v0.1/assets/links", maintainer), params: params
@@ -157,8 +185,9 @@ describe API::Release::Links do
end.to change { Releases::Link.count }.by(1)
release.reload
- expect(release.links.last.name).to eq('awesome-app.dmg')
- expect(release.links.last.url).to eq('https://example.com/download/awesome-app.dmg')
+ expect(last_release_link.name).to eq('awesome-app.dmg')
+ expect(last_release_link.filepath).to eq('/binaries/awesome-app.dmg')
+ expect(last_release_link.url).to eq('https://example.com/download/awesome-app.dmg')
end
it 'matches response schema' do
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 12fd9f431e5..41999ca6e60 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -78,7 +78,7 @@ describe API::Releases do
issue_uri = URI.parse(links['issues_url'])
expect(mr_uri.path).to eq("#{path_base}/-/merge_requests")
- expect(issue_uri.path).to eq("#{path_base}/issues")
+ expect(issue_uri.path).to eq("#{path_base}/-/issues")
expect(mr_uri.query).to eq(expected_query)
expect(issue_uri.query).to eq(expected_query)
end
@@ -233,31 +233,6 @@ describe API::Releases do
.to match_array(release.sources.map(&:url))
end
- context "when release description contains confidential issue's link" do
- let(:confidential_issue) do
- create(:issue,
- :confidential,
- project: project,
- title: 'A vulnerability')
- end
-
- let!(:release) do
- create(:release,
- project: project,
- tag: 'v0.1',
- sha: commit.id,
- author: maintainer,
- description: "This is confidential #{confidential_issue.to_reference}")
- end
-
- it "does not expose confidential issue's title" do
- get api("/projects/#{project.id}/releases/v0.1", maintainer)
-
- expect(json_response['description_html']).to include(confidential_issue.to_reference)
- expect(json_response['description_html']).not_to include('A vulnerability')
- end
- end
-
context 'when release has link asset' do
let!(:link) do
create(:release_link,
@@ -359,12 +334,29 @@ describe API::Releases do
let(:milestone) { create(:milestone, project: project) }
+ it 'matches schema' do
+ get api("/projects/#{project.id}/releases/v0.1", non_project_member)
+
+ expect(response).to match_response_schema('public_api/v4/release')
+ end
+
it 'exposes milestones' do
get api("/projects/#{project.id}/releases/v0.1", non_project_member)
expect(json_response['milestones'].first['title']).to eq(milestone.title)
end
+ it 'returns issue stats for milestone' do
+ create_list(:issue, 2, milestone: milestone, project: project)
+ create_list(:issue, 3, :closed, milestone: milestone, project: project)
+
+ get api("/projects/#{project.id}/releases/v0.1", non_project_member)
+
+ issue_stats = json_response['milestones'].first["issue_stats"]
+ expect(issue_stats["total"]).to eq(5)
+ expect(issue_stats["closed"]).to eq(3)
+ end
+
context 'when project restricts visibility of issues and merge requests' do
let!(:project) { create(:project, :repository, :public, :issues_private, :merge_requests_private) }
@@ -414,6 +406,22 @@ describe API::Releases do
expect(project.releases.last.description).to eq('Super nice release')
end
+ it 'creates a new release without description' do
+ params = {
+ name: 'New release without description',
+ tag_name: 'v0.1',
+ released_at: '2019-03-25 10:00:00'
+ }
+
+ expect do
+ post api("/projects/#{project.id}/releases", maintainer), params: params
+ end.to change { Release.count }.by(1)
+
+ expect(project.releases.last.name).to eq('New release without description')
+ expect(project.releases.last.tag).to eq('v0.1')
+ expect(project.releases.last.description).to eq(nil)
+ end
+
it 'sets the released_at to the current time if the released_at parameter is not provided' do
now = Time.zone.parse('2015-08-25 06:00:00Z')
Timecop.freeze(now) do
@@ -459,26 +467,6 @@ describe API::Releases do
expect(project.releases.last.released_at).to eq('2019-03-25T01:00:00Z')
end
- context 'when description is empty' do
- let(:params) do
- {
- name: 'New release',
- tag_name: 'v0.1',
- description: ''
- }
- end
-
- it 'returns an error as validation failure' do
- expect do
- post api("/projects/#{project.id}/releases", maintainer), params: params
- end.not_to change { Release.count }
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message'])
- .to eq("Validation failed: Description can't be blank")
- end
- end
-
it 'matches response schema' do
post api("/projects/#{project.id}/releases", maintainer), params: params
diff --git a/spec/requests/api/remote_mirrors_spec.rb b/spec/requests/api/remote_mirrors_spec.rb
index 065d9c7ca5b..3eaec6e2520 100644
--- a/spec/requests/api/remote_mirrors_spec.rb
+++ b/spec/requests/api/remote_mirrors_spec.rb
@@ -24,18 +24,53 @@ describe API::RemoteMirrors do
expect(response).to have_gitlab_http_status(:success)
expect(response).to match_response_schema('remote_mirrors')
end
+ end
- # TODO: Remove flag: https://gitlab.com/gitlab-org/gitlab/issues/38121
- context 'with the `remote_mirrors_api` feature disabled' do
- before do
- stub_feature_flags(remote_mirrors_api: false)
+ describe 'POST /projects/:id/remote_mirrors' do
+ let(:route) { "/projects/#{project.id}/remote_mirrors" }
+
+ shared_examples 'creates a remote mirror' do
+ it 'creates a remote mirror and returns reponse' do
+ project.add_maintainer(user)
+
+ post api(route, user), params: params
+
+ enabled = params.fetch(:enabled, false)
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response).to match_response_schema('remote_mirror')
+ expect(json_response['enabled']).to eq(enabled)
end
+ end
- it 'responds with `not_found`' do
- get api(route, user)
+ it 'requires `admin_remote_mirror` permission' do
+ post api(route, developer)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ context 'creates a remote mirror' do
+ context 'disabled by default' do
+ let(:params) { { url: 'https://foo:bar@test.com' } }
- expect(response).to have_gitlab_http_status(:not_found)
+ it_behaves_like 'creates a remote mirror'
end
+
+ context 'enabled' do
+ let(:params) { { url: 'https://foo:bar@test.com', enabled: true } }
+
+ it_behaves_like 'creates a remote mirror'
+ end
+ end
+
+ it 'returns error if url is invalid' do
+ project.add_maintainer(user)
+
+ post api(route, user), params: {
+ url: 'ftp://foo:bar@test.com'
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']['url']).to eq(["is blocked: Only allowed schemes are ssh, git, http, https"])
end
end
@@ -43,6 +78,10 @@ describe API::RemoteMirrors do
let(:route) { ->(id) { "/projects/#{project.id}/remote_mirrors/#{id}" } }
let(:mirror) { project.remote_mirrors.first }
+ before do
+ stub_feature_flags(keep_divergent_refs: false)
+ end
+
it 'requires `admin_remote_mirror` permission' do
put api(route[mirror.id], developer)
@@ -54,24 +93,30 @@ describe API::RemoteMirrors do
put api(route[mirror.id], user), params: {
enabled: '0',
- only_protected_branches: 'true'
+ only_protected_branches: 'true',
+ keep_divergent_refs: 'true'
}
expect(response).to have_gitlab_http_status(:success)
expect(json_response['enabled']).to eq(false)
expect(json_response['only_protected_branches']).to eq(true)
+
+ # Deleted due to lack of feature availability
+ expect(json_response['keep_divergent_refs']).to be_nil
end
- # TODO: Remove flag: https://gitlab.com/gitlab-org/gitlab/issues/38121
- context 'with the `remote_mirrors_api` feature disabled' do
+ context 'with the `keep_divergent_refs` feature enabled' do
before do
- stub_feature_flags(remote_mirrors_api: false)
+ stub_feature_flags(keep_divergent_refs: { enabled: true, project: project })
end
- it 'responds with `not_found`' do
- put api(route[mirror.id], user)
+ it 'updates the `keep_divergent_refs` attribute' do
+ project.add_maintainer(user)
+
+ put api(route[mirror.id], user), params: { keep_divergent_refs: 'true' }
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response['keep_divergent_refs']).to eq(true)
end
end
end
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index 8bca458bece..97dc3899d3f 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -19,7 +19,7 @@ describe API::Repositories do
it 'returns the repository tree' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
@@ -108,7 +108,7 @@ describe API::Repositories do
it 'returns blob attributes as json' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['size']).to eq(111)
expect(json_response['encoding']).to eq("base64")
expect(Base64.decode64(json_response['content']).lines.first).to eq("class Commit\n")
@@ -167,7 +167,7 @@ describe API::Repositories do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
end
@@ -223,11 +223,15 @@ describe API::Repositories do
describe "GET /projects/:id/repository/archive(.:format)?:sha" do
let(:route) { "/projects/#{project.id}/repository/archive" }
+ before do
+ allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(false)
+ end
+
shared_examples_for 'repository archive' do
it 'returns the repository archive' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
type, params = workhorse_send_data
@@ -238,7 +242,7 @@ describe API::Repositories do
it 'returns the repository archive archive.zip' do
get api("/projects/#{project.id}/repository/archive.zip", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
type, params = workhorse_send_data
@@ -249,7 +253,7 @@ describe API::Repositories do
it 'returns the repository archive archive.tar.bz2' do
get api("/projects/#{project.id}/repository/archive.tar.bz2", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
type, params = workhorse_send_data
@@ -263,6 +267,14 @@ describe API::Repositories do
let(:message) { '404 File Not Found' }
end
end
+
+ it 'rate limits user when thresholds hit' do
+ allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+
+ get api("/projects/#{project.id}/repository/archive.tar.bz2", user)
+
+ expect(response).to have_gitlab_http_status(:too_many_requests)
+ end
end
context 'when unauthenticated', 'and project is public' do
@@ -302,7 +314,7 @@ describe API::Repositories do
}).and_call_original
get api(route, current_user), params: { from: 'master', to: 'feature' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commits']).to be_present
expect(json_response['diffs']).to be_present
end
@@ -313,7 +325,7 @@ describe API::Repositories do
}).and_call_original
get api(route, current_user), params: { from: 'master', to: 'feature', straight: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commits']).to be_present
expect(json_response['diffs']).to be_present
end
@@ -324,7 +336,7 @@ describe API::Repositories do
}).and_call_original
get api(route, current_user), params: { from: 'master', to: 'feature', straight: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commits']).to be_present
expect(json_response['diffs']).to be_present
end
@@ -332,7 +344,7 @@ describe API::Repositories do
it "compares tags" do
get api(route, current_user), params: { from: 'v1.0.0', to: 'v1.1.0' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commits']).to be_present
expect(json_response['diffs']).to be_present
end
@@ -340,7 +352,7 @@ describe API::Repositories do
it "compares commits" do
get api(route, current_user), params: { from: sample_commit.id, to: sample_commit.parent_id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commits']).to be_empty
expect(json_response['diffs']).to be_empty
expect(json_response['compare_same_ref']).to be_falsey
@@ -349,7 +361,7 @@ describe API::Repositories do
it "compares commits in reverse order" do
get api(route, current_user), params: { from: sample_commit.parent_id, to: sample_commit.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commits']).to be_present
expect(json_response['diffs']).to be_present
end
@@ -357,7 +369,7 @@ describe API::Repositories do
it "compares same refs" do
get api(route, current_user), params: { from: 'master', to: 'master' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commits']).to be_empty
expect(json_response['diffs']).to be_empty
expect(json_response['compare_same_ref']).to be_truthy
@@ -368,7 +380,7 @@ describe API::Repositories do
get api(route, current_user), params: { from: 'master', to: 'feature' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commits']).to be_present
expect(json_response['diffs']).to be_present
expect(json_response['diffs'].first['diff']).to be_empty
@@ -377,13 +389,13 @@ describe API::Repositories do
it "returns a 404 when from ref is unknown" do
get api(route, current_user), params: { from: 'unknown_ref', to: 'master' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns a 404 when to ref is unknown" do
get api(route, current_user), params: { from: 'master', to: 'unknown_ref' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -421,7 +433,7 @@ describe API::Repositories do
it 'returns valid data' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
@@ -438,7 +450,7 @@ describe API::Repositories do
it 'returns the repository contribuors sorted by commits desc' do
get api(route, current_user), params: { order_by: 'commits', sort: 'desc' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('contributors')
expect(json_response.first['commits']).to be > json_response.last['commits']
end
@@ -448,7 +460,7 @@ describe API::Repositories do
it 'returns the repository contribuors sorted by name asc case insensitive' do
get api(route, current_user), params: { order_by: 'name', sort: 'asc' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('contributors')
expect(json_response.first['name'].downcase).to be < json_response.last['name'].downcase
end
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 9d01a44916c..5a8add1e9db 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -22,7 +22,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns 400 error' do
post api('/runners')
- expect(response).to have_gitlab_http_status 400
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -30,7 +30,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns 403 error' do
post api('/runners'), params: { token: 'invalid' }
- expect(response).to have_gitlab_http_status 403
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -40,7 +40,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
runner = Ci::Runner.first
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['id']).to eq(runner.id)
expect(json_response['token']).to eq(runner.token)
expect(runner.run_untagged).to be true
@@ -55,7 +55,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'creates project runner' do
post api('/runners'), params: { token: project.runners_token }
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(project.runners.size).to eq(1)
runner = Ci::Runner.first
expect(runner.token).not_to eq(registration_token)
@@ -70,7 +70,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'creates a group runner' do
post api('/runners'), params: { token: group.runners_token }
- expect(response).to have_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(group.runners.reload.size).to eq(1)
runner = Ci::Runner.first
expect(runner.token).not_to eq(registration_token)
@@ -87,7 +87,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
description: 'server.hostname'
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.description).to eq('server.hostname')
end
end
@@ -99,7 +99,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
tag_list: 'tag1, tag2'
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2))
end
end
@@ -113,7 +113,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
tag_list: ['tag']
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.run_untagged).to be false
expect(Ci::Runner.first.tag_list.sort).to eq(['tag'])
end
@@ -126,7 +126,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
run_untagged: false
}
- expect(response).to have_gitlab_http_status 400
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to include(
'tags_list' => ['can not be empty when runner is not allowed to pick untagged jobs'])
end
@@ -140,7 +140,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
locked: true
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.locked).to be true
end
end
@@ -153,7 +153,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
active: true
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.active).to be true
end
end
@@ -165,7 +165,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
active: false
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.active).to be false
end
end
@@ -179,7 +179,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
access_level: 'ref_protected'
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.ref_protected?).to be true
end
end
@@ -191,7 +191,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
access_level: 'not_protected'
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.ref_protected?).to be false
end
end
@@ -204,7 +204,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
maximum_timeout: 9000
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.maximum_timeout).to eq(9000)
end
@@ -215,7 +215,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
maximum_timeout: ''
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.maximum_timeout).to be_nil
end
end
@@ -231,7 +231,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
info: { param => value }
}
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.read_attribute(param.to_sym)).to eq(value)
end
end
@@ -242,7 +242,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
params: { token: registration_token },
headers: { 'X-Forwarded-For' => '123.111.123.111' }
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(Ci::Runner.first.ip_address).to eq('123.111.123.111')
end
end
@@ -252,7 +252,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns 400 error' do
delete api('/runners')
- expect(response).to have_gitlab_http_status 400
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -260,7 +260,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns 403 error' do
delete api('/runners'), params: { token: 'invalid' }
- expect(response).to have_gitlab_http_status 403
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -270,7 +270,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'deletes Runner' do
delete api('/runners'), params: { token: runner.token }
- expect(response).to have_gitlab_http_status 204
+ expect(response).to have_gitlab_http_status(:no_content)
expect(Ci::Runner.count).to eq(0)
end
@@ -296,7 +296,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns 403 error' do
post api('/runners/verify'), params: { token: 'invalid-token' }
- expect(response).to have_gitlab_http_status 403
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -304,7 +304,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'verifies Runner credentials' do
post api('/runners/verify'), params: { token: runner.token }
- expect(response).to have_gitlab_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -361,7 +361,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
context 'when runner sends version in User-Agent' do
context 'for stable version' do
it 'gives 204 and set X-GitLab-Last-Update' do
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(response.header).to have_key('X-GitLab-Last-Update')
end
end
@@ -370,7 +370,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
let(:last_update) { runner.ensure_runner_queue_value }
it 'gives 204 and set the same X-GitLab-Last-Update' do
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(response.header['X-GitLab-Last-Update']).to eq(last_update)
end
end
@@ -380,7 +380,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
let(:new_update) { runner.tick_runner_queue }
it 'gives 204 and set a new X-GitLab-Last-Update' do
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(response.header['X-GitLab-Last-Update']).to eq(new_update)
end
end
@@ -388,19 +388,19 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
context 'when beta version is sent' do
let(:user_agent) { 'gitlab-runner 9.0.0~beta.167.g2b2bacc (master; go1.7.4; linux/amd64)' }
- it { expect(response).to have_gitlab_http_status(204) }
+ it { expect(response).to have_gitlab_http_status(:no_content) }
end
context 'when pre-9-0 version is sent' do
let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0 (1-6-stable; go1.6.3; linux/amd64)' }
- it { expect(response).to have_gitlab_http_status(204) }
+ it { expect(response).to have_gitlab_http_status(:no_content) }
end
context 'when pre-9-0 beta version is sent' do
let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (master; go1.6.3; linux/amd64)' }
- it { expect(response).to have_gitlab_http_status(204) }
+ it { expect(response).to have_gitlab_http_status(:no_content) }
end
end
end
@@ -409,7 +409,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns 400 error' do
post api('/jobs/request')
- expect(response).to have_gitlab_http_status 400
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -417,7 +417,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns 403 error' do
post api('/jobs/request'), params: { token: 'invalid' }
- expect(response).to have_gitlab_http_status 403
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -429,7 +429,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns 204 error' do
request_job
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(response.header['X-GitLab-Last-Update']).to eq(update_value)
end
end
@@ -516,7 +516,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'picks a job' do
request_job info: { platform: :darwin }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
expect(runner.reload.platform).to eq('darwin')
expect(json_response['id']).to eq(job.id)
@@ -541,7 +541,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
request_job info: { platform: :darwin }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['id']).to eq(job.id)
end
@@ -551,7 +551,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'sets branch as ref_type' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['ref_type']).to eq('tag')
end
@@ -563,7 +563,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'specifies refspecs' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['refspecs']).to include("+refs/tags/#{job.ref}:refs/tags/#{job.ref}")
end
end
@@ -576,7 +576,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'specifies refspecs' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['refspecs'])
.to contain_exactly('+refs/tags/*:refs/tags/*', '+refs/heads/*:refs/remotes/origin/*')
end
@@ -592,7 +592,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'gives 204' do
request_job(job_age: job_age)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
@@ -602,7 +602,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'picks a job' do
request_job(job_age: job_age)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
end
@@ -611,7 +611,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'sets tag as ref_type' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['ref_type']).to eq('branch')
end
@@ -623,7 +623,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'specifies refspecs' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['refspecs']).to include("+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}")
end
end
@@ -636,7 +636,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'specifies refspecs' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['refspecs'])
.to contain_exactly('+refs/tags/*:refs/tags/*', '+refs/heads/*:refs/remotes/origin/*')
end
@@ -651,7 +651,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'sets branch as ref_type' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['ref_type']).to eq('branch')
end
@@ -663,7 +663,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns the overwritten git depth for merge request refspecs' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['depth']).to eq(1)
end
end
@@ -680,7 +680,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it "updates provided Runner's parameter" do
request_job info: { param => value }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(runner.reload.read_attribute(param.to_sym)).to eq(value)
end
end
@@ -691,7 +691,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
params: { token: runner.token },
headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222' }
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(runner.reload.ip_address).to eq('123.222.123.222')
end
@@ -700,7 +700,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
params: { token: runner.token },
headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222, 127.0.0.1' }
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
expect(runner.reload.ip_address).to eq('123.222.123.222')
end
@@ -713,7 +713,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns a conflict' do
request_job
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
end
end
@@ -731,7 +731,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns dependent jobs' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['id']).to eq(test_job.id)
expect(json_response['dependencies'].count).to eq(2)
expect(json_response['dependencies']).to include(
@@ -751,12 +751,12 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns dependent jobs' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['id']).to eq(test_job.id)
expect(json_response['dependencies'].count).to eq(1)
expect(json_response['dependencies']).to include(
{ 'id' => job.id, 'name' => job.name, 'token' => job.token,
- 'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 106365 } })
+ 'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 107464 } })
end
end
@@ -777,7 +777,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns dependent jobs' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['id']).to eq(test_job.id)
expect(json_response['dependencies'].count).to eq(1)
expect(json_response['dependencies'][0]).to include('id' => job2.id, 'name' => job2.name, 'token' => job2.token)
@@ -801,7 +801,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns an empty array' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['id']).to eq(empty_dependencies_job.id)
expect(json_response['dependencies'].count).to eq(0)
end
@@ -820,7 +820,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'picks job' do
request_job
- expect(response).to have_gitlab_http_status 201
+ expect(response).to have_gitlab_http_status(:created)
end
end
@@ -854,7 +854,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns variables for triggers' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['variables']).to include(*expected_variables)
end
end
@@ -919,7 +919,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'contains info about timeout taken from project' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
end
@@ -929,7 +929,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'contains info about timeout overridden by runner' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['runner_info']).to include({ 'timeout' => 1000 })
end
end
@@ -940,7 +940,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'contains info about timeout not overridden by runner' do
request_job
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
end
end
@@ -965,7 +965,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns the image ports' do
request_job
- expect(response).to have_http_status(:created)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include(
'id' => job.id,
'image' => a_hash_including('name' => 'ruby', 'ports' => [{ 'number' => 80, 'protocol' => 'http', 'name' => 'default_port' }]),
@@ -989,7 +989,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns the service ports' do
request_job
- expect(response).to have_http_status(:created)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include(
'id' => job.id,
'image' => a_hash_including('name' => 'ruby'),
@@ -1089,7 +1089,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
job.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(job.trace.raw).to eq 'BUILD TRACE UPDATED'
expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED'
end
@@ -1133,7 +1133,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'responds with forbidden' do
update_job
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -1147,7 +1147,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
job.reload
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(response.header['Job-Status']).to eq 'failed'
expect(job.trace.raw).to eq 'Job failed'
expect(job).to be_failed
@@ -1461,7 +1461,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'succeeds' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
@@ -1481,7 +1481,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'succeeds' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response).not_to have_key('TempPath')
expect(json_response['RemoteObject']).to have_key('ID')
@@ -1509,7 +1509,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'fails to post' do
authorize_artifacts_with_token_in_params(filesize: sample_max_size.megabytes.to_i)
- expect(response).to have_gitlab_http_status(413)
+ expect(response).to have_gitlab_http_status(:payload_too_large)
end
end
@@ -1557,7 +1557,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'authorizes posting artifacts to running job' do
authorize_artifacts_with_token_in_headers
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).not_to be_nil
end
@@ -1567,7 +1567,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
authorize_artifacts_with_token_in_headers(filesize: 100)
- expect(response).to have_gitlab_http_status(413)
+ expect(response).to have_gitlab_http_status(:payload_too_large)
end
end
@@ -1575,7 +1575,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'fails to authorize artifacts posting' do
authorize_artifacts(token: job.project.runners_token)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -1591,7 +1591,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'responds with forbidden' do
authorize_artifacts(token: 'invalid', filesize: 100 )
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -1632,14 +1632,14 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'responds with forbidden' do
upload_artifacts(file_upload, headers_with_token)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when job is running' do
shared_examples 'successful artifacts upload' do
it 'updates successfully' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
@@ -1678,7 +1678,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
let(:remote_id) { 'invalid id' }
it 'responds with bad request' do
- expect(response).to have_gitlab_http_status(500)
+ expect(response).to have_gitlab_http_status(:internal_server_error)
expect(json_response['message']).to eq("Missing file")
end
end
@@ -1689,7 +1689,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'responds with forbidden' do
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -1700,7 +1700,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
upload_artifacts(file_upload, headers_with_token)
- expect(response).to have_gitlab_http_status(413)
+ expect(response).to have_gitlab_http_status(:payload_too_large)
end
end
@@ -1708,7 +1708,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'fails to post artifacts without file' do
post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -1716,7 +1716,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'fails to post artifacts without GitLab-Workhorse' do
post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {}
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -1750,7 +1750,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
let(:expire_in) { '7 days' }
it 'updates when specified' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(7.days.from_now)
end
end
@@ -1759,7 +1759,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
let(:expire_in) { nil }
it 'ignores if not specified' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(job.reload.artifacts_expire_at).to be_nil
end
@@ -1768,7 +1768,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
let(:default_artifacts_expire_in) { '5 days' }
it 'sets to application default' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(5.days.from_now)
end
end
@@ -1777,7 +1777,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
let(:default_artifacts_expire_in) { '0' }
it 'does not set expire_in' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(job.reload.artifacts_expire_at).to be_nil
end
end
@@ -1812,7 +1812,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
it 'stores artifacts and artifacts metadata' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(stored_artifacts_file.filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.filename).to eq(metadata.original_filename)
expect(stored_artifacts_size).to eq(artifacts.size)
@@ -1827,7 +1827,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
it 'is expected to respond with bad request' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'does not store metadata' do
@@ -1843,7 +1843,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'stores junit test report' do
upload_artifacts(file_upload, headers_with_token, params)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(job.reload.job_artifacts_archive).not_to be_nil
end
end
@@ -1854,7 +1854,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(job.reload.job_artifacts_archive).to be_nil
end
end
@@ -1868,7 +1868,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'stores junit test report' do
upload_artifacts(file_upload, headers_with_token, params)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(job.reload.job_artifacts_junit).not_to be_nil
end
end
@@ -1880,7 +1880,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(job.reload.job_artifacts_junit).to be_nil
end
end
@@ -1894,7 +1894,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'stores metrics_referee data' do
upload_artifacts(file_upload, headers_with_token, params)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(job.reload.job_artifacts_metrics_referee).not_to be_nil
end
end
@@ -1906,7 +1906,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(job.reload.job_artifacts_metrics_referee).to be_nil
end
end
@@ -1920,7 +1920,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'stores network_referee data' do
upload_artifacts(file_upload, headers_with_token, params)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(job.reload.job_artifacts_network_referee).not_to be_nil
end
end
@@ -1932,11 +1932,54 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(job.reload.job_artifacts_network_referee).to be_nil
end
end
end
+
+ context 'when artifact_type is dotenv' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
+ let(:params) { { artifact_type: :dotenv, artifact_format: :gzip } }
+
+ it 'stores dotenv file' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_dotenv).not_to be_nil
+ end
+
+ it 'parses dotenv file' do
+ expect do
+ upload_artifacts(file_upload, headers_with_token, params)
+ end.to change { job.job_variables.count }.from(0).to(2)
+ end
+
+ context 'when parse error happens' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/ci_build_artifacts_metadata.gz') }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq('Invalid Format')
+ end
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
+ let(:params) { { artifact_type: :dotenv, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_dotenv).to be_nil
+ end
+ end
+ end
end
context 'when artifacts already exist for the job' do
@@ -1979,6 +2022,21 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
end
+ context 'when object storage throws errors' do
+ let(:params) { { artifact_type: :archive, artifact_format: :zip } }
+
+ it 'does not store artifacts' do
+ allow_next_instance_of(JobArtifactUploader) do |uploader|
+ allow(uploader).to receive(:store!).and_raise(Errno::EIO)
+ end
+
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:service_unavailable)
+ expect(job.reload.job_artifacts_archive).to be_nil
+ end
+ end
+
context 'when artifacts are being stored outside of tmp path' do
let(:new_tmpdir) { Dir.mktmpdir }
@@ -1998,7 +2056,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it' "fails to post artifacts for outside of tmp path"' do
upload_artifacts(file_upload, headers_with_token)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -2040,7 +2098,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
it 'download artifacts' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h).to include download_headers
end
end
@@ -2055,7 +2113,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
it 'uses workhorse send-url' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h).to include(
'Gitlab-Workhorse-Send-Data' => /send-url:/)
end
@@ -2067,7 +2125,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
it 'receive redirect for downloading artifacts' do
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response.headers).to include('Location')
end
end
@@ -2082,7 +2140,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
it 'responds with forbidden' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -2091,7 +2149,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'responds with not found' do
download_artifact
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index c54487a68fe..70094ef4388 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -32,7 +32,7 @@ describe API::Runners do
it 'returns response status and headers' do
get api('/runners', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
@@ -51,7 +51,7 @@ describe API::Runners do
get api('/runners?scope=paused', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to match_array [
@@ -61,7 +61,7 @@ describe API::Runners do
it 'avoids filtering if scope is invalid' do
get api('/runners?scope=unknown', user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by type' do
@@ -76,7 +76,7 @@ describe API::Runners do
it 'does not filter by invalid type' do
get api('/runners?type=bogus', user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by status' do
@@ -92,7 +92,7 @@ describe API::Runners do
it 'does not filter by invalid status' do
get api('/runners?status=bogus', user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by tag_list' do
@@ -111,7 +111,7 @@ describe API::Runners do
it 'does not return runners' do
get api('/runners')
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -122,7 +122,7 @@ describe API::Runners do
it 'returns response status and headers' do
get api('/runners/all', admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
@@ -141,7 +141,7 @@ describe API::Runners do
get api('/runners/all?scope=shared', admin)
shared = json_response.all? { |r| r['is_shared'] }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response[0]).to have_key('ip_address')
@@ -151,7 +151,7 @@ describe API::Runners do
it 'filters runners by scope' do
get api('/runners/all?scope=specific', admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to match_array [
@@ -163,7 +163,7 @@ describe API::Runners do
it 'avoids filtering if scope is invalid' do
get api('/runners/all?scope=unknown', admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by type' do
@@ -178,7 +178,7 @@ describe API::Runners do
it 'does not filter by invalid type' do
get api('/runners/all?type=bogus', admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by status' do
@@ -194,7 +194,7 @@ describe API::Runners do
it 'does not filter by invalid status' do
get api('/runners/all?status=bogus', admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by tag_list' do
@@ -213,7 +213,7 @@ describe API::Runners do
it 'does not return runners list' do
get api('/runners/all', user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -222,7 +222,7 @@ describe API::Runners do
it 'does not return runners' do
get api('/runners')
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -233,7 +233,7 @@ describe API::Runners do
it "returns runner's details" do
get api("/runners/#{shared_runner.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['description']).to eq(shared_runner.description)
expect(json_response['maximum_timeout']).to be_nil
end
@@ -247,7 +247,7 @@ describe API::Runners do
expect do
delete api("/runners/#{unused_project_runner.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.project_type.count }.by(-1)
end
end
@@ -255,7 +255,7 @@ describe API::Runners do
it "returns runner's details" do
get api("/runners/#{project_runner.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['description']).to eq(project_runner.description)
end
@@ -269,7 +269,7 @@ describe API::Runners do
it 'returns 404 if runner does not exists' do
get api('/runners/0', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -278,7 +278,7 @@ describe API::Runners do
it "returns runner's details" do
get api("/runners/#{project_runner.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['description']).to eq(project_runner.description)
end
end
@@ -287,7 +287,7 @@ describe API::Runners do
it "returns runner's details" do
get api("/runners/#{shared_runner.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['description']).to eq(shared_runner.description)
end
end
@@ -297,7 +297,7 @@ describe API::Runners do
it "does not return project runner's details" do
get api("/runners/#{project_runner.id}", user2)
- expect(response).to have_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -305,7 +305,7 @@ describe API::Runners do
it "does not return project runner's details" do
get api("/runners/#{project_runner.id}")
- expect(response).to have_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -318,7 +318,7 @@ describe API::Runners do
description = shared_runner.description
update_runner(shared_runner.id, admin, description: "#{description}_updated")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.description).to eq("#{description}_updated")
end
@@ -326,14 +326,14 @@ describe API::Runners do
active = shared_runner.active
update_runner(shared_runner.id, admin, active: !active)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.active).to eq(!active)
end
it 'runner tag list' do
update_runner(shared_runner.id, admin, tag_list: ['ruby2.1', 'pgsql', 'mysql'])
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.tag_list).to include('ruby2.1', 'pgsql', 'mysql')
end
@@ -342,28 +342,28 @@ describe API::Runners do
update_runner(shared_runner.id, admin, tag_list: ['ruby2.1', 'pgsql', 'mysql'])
update_runner(shared_runner.id, admin, run_untagged: 'false')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.run_untagged?).to be(false)
end
it 'runner unlocked flag' do
update_runner(shared_runner.id, admin, locked: 'true')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.locked?).to be(true)
end
it 'runner access level' do
update_runner(shared_runner.id, admin, access_level: 'ref_protected')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.ref_protected?).to be_truthy
end
it 'runner maximum timeout' do
update_runner(shared_runner.id, admin, maximum_timeout: 1234)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.maximum_timeout).to eq(1234)
end
@@ -371,7 +371,7 @@ describe API::Runners do
put api("/runners/#{shared_runner.id}", admin)
shared_runner.reload
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -390,7 +390,7 @@ describe API::Runners do
maximum_timeout: 1234)
shared_runner.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.description).to eq("#{description}_updated")
expect(shared_runner.active).to eq(!active)
expect(shared_runner.tag_list).to include('ruby2.1', 'pgsql', 'mysql')
@@ -411,7 +411,7 @@ describe API::Runners do
update_runner(project_runner.id, admin, description: 'test')
project_runner.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(project_runner.description).to eq('test')
expect(project_runner.description).not_to eq(description)
expect(project_runner.ensure_runner_queue_value)
@@ -422,7 +422,7 @@ describe API::Runners do
it 'returns 404 if runner does not exists' do
update_runner(0, admin, description: 'test')
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
def update_runner(id, user, args)
@@ -435,7 +435,7 @@ describe API::Runners do
it 'does not update runner' do
put api("/runners/#{shared_runner.id}", user), params: { description: 'test' }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -443,7 +443,7 @@ describe API::Runners do
it 'does not update project runner without access to it' do
put api("/runners/#{project_runner.id}", user2), params: { description: 'test' }
- expect(response).to have_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'updates project runner with access to it' do
@@ -451,7 +451,7 @@ describe API::Runners do
put api("/runners/#{project_runner.id}", admin), params: { description: 'test' }
project_runner.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(project_runner.description).to eq('test')
expect(project_runner.description).not_to eq(description)
end
@@ -462,7 +462,7 @@ describe API::Runners do
it 'does not delete project runner' do
put api("/runners/#{project_runner.id}")
- expect(response).to have_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -474,7 +474,7 @@ describe API::Runners do
expect do
delete api("/runners/#{shared_runner.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.instance_type.count }.by(-1)
end
@@ -488,7 +488,7 @@ describe API::Runners do
expect do
delete api("/runners/#{project_runner.id}", admin)
- expect(response).to have_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.project_type.count }.by(-1)
end
end
@@ -496,7 +496,7 @@ describe API::Runners do
it 'returns 404 if runner does not exists' do
delete api('/runners/0', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -504,40 +504,40 @@ describe API::Runners do
context 'when runner is shared' do
it 'does not delete runner' do
delete api("/runners/#{shared_runner.id}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when runner is not shared' do
it 'does not delete runner without access to it' do
delete api("/runners/#{project_runner.id}", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not delete project runner with more than one associated project' do
delete api("/runners/#{two_projects_runner.id}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'deletes project runner for one owned project' do
expect do
delete api("/runners/#{project_runner.id}", user)
- expect(response).to have_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.project_type.count }.by(-1)
end
it 'does not delete group runner with maintainer access' do
delete api("/runners/#{group_runner.id}", group_maintainer)
- expect(response).to have_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'deletes group runner with owner access' do
expect do
delete api("/runners/#{group_runner.id}", user)
- expect(response).to have_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.group_type.count }.by(-1)
end
@@ -551,7 +551,7 @@ describe API::Runners do
it 'does not delete project runner' do
delete api("/runners/#{project_runner.id}")
- expect(response).to have_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -569,7 +569,7 @@ describe API::Runners do
it 'return jobs' do
get api("/runners/#{shared_runner.id}/jobs", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
@@ -581,7 +581,7 @@ describe API::Runners do
it 'return jobs' do
get api("/runners/#{project_runner.id}/jobs", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
@@ -593,7 +593,7 @@ describe API::Runners do
it 'return filtered jobs' do
get api("/runners/#{project_runner.id}/jobs?status=failed", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
@@ -607,7 +607,7 @@ describe API::Runners do
it 'return jobs in descending order' do
get api("/runners/#{project_runner.id}/jobs?order_by=id", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
@@ -620,7 +620,7 @@ describe API::Runners do
it 'return jobs sorted in ascending order' do
get api("/runners/#{project_runner.id}/jobs?order_by=id&sort=asc", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
@@ -634,7 +634,7 @@ describe API::Runners do
it 'return 400' do
get api("/runners/#{project_runner.id}/jobs?status=non-existing", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -642,7 +642,7 @@ describe API::Runners do
it 'return 400' do
get api("/runners/#{project_runner.id}/jobs?order_by=non-existing", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -650,7 +650,7 @@ describe API::Runners do
it 'return 400' do
get api("/runners/#{project_runner.id}/jobs?sort=non-existing", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -659,7 +659,7 @@ describe API::Runners do
it 'returns 404' do
get api('/runners/0/jobs', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -670,7 +670,7 @@ describe API::Runners do
it 'returns 403' do
get api("/runners/#{shared_runner.id}/jobs", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -678,7 +678,7 @@ describe API::Runners do
it 'return jobs' do
get api("/runners/#{project_runner.id}/jobs", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
@@ -690,7 +690,7 @@ describe API::Runners do
it 'return filtered jobs' do
get api("/runners/#{project_runner.id}/jobs?status=failed", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
@@ -703,7 +703,7 @@ describe API::Runners do
it 'return 400' do
get api("/runners/#{project_runner.id}/jobs?status=non-existing", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -712,7 +712,7 @@ describe API::Runners do
it 'returns 404' do
get api('/runners/0/jobs', user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -721,7 +721,7 @@ describe API::Runners do
it 'does not return jobs' do
get api("/runners/#{project_runner.id}/jobs", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -729,7 +729,7 @@ describe API::Runners do
it 'does not return jobs' do
get api("/runners/#{project_runner.id}/jobs")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -739,7 +739,7 @@ describe API::Runners do
it 'returns response status and headers' do
get api('/runners/all', admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
@@ -756,7 +756,7 @@ describe API::Runners do
it 'filters runners by scope' do
get api("/projects/#{project.id}/runners?scope=specific", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to match_array [
@@ -767,7 +767,7 @@ describe API::Runners do
it 'avoids filtering if scope is invalid' do
get api("/projects/#{project.id}/runners?scope=unknown", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by type' do
@@ -782,7 +782,7 @@ describe API::Runners do
it 'does not filter by invalid type' do
get api("/projects/#{project.id}/runners?type=bogus", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by status' do
@@ -798,7 +798,7 @@ describe API::Runners do
it 'does not filter by invalid status' do
get api("/projects/#{project.id}/runners?status=bogus", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by tag_list' do
@@ -817,7 +817,7 @@ describe API::Runners do
it "does not return project's runners" do
get api("/projects/#{project.id}/runners", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -825,7 +825,7 @@ describe API::Runners do
it "does not return project's runners" do
get api("/projects/#{project.id}/runners")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -838,14 +838,14 @@ describe API::Runners do
expect do
post api("/projects/#{project.id}/runners", user), params: { runner_id: project_runner2.id }
end.to change { project.runners.count }.by(+1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'avoids changes when enabling already enabled runner' do
expect do
post api("/projects/#{project.id}/runners", user), params: { runner_id: project_runner.id }
end.to change { project.runners.count }.by(0)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'does not enable locked runner' do
@@ -855,19 +855,19 @@ describe API::Runners do
post api("/projects/#{project.id}/runners", user), params: { runner_id: project_runner2.id }
end.to change { project.runners.count }.by(0)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not enable shared runner' do
post api("/projects/#{project.id}/runners", user), params: { runner_id: shared_runner.id }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not enable group runner' do
post api("/projects/#{project.id}/runners", user), params: { runner_id: group_runner.id }
- expect(response).to have_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'user is admin' do
@@ -878,7 +878,7 @@ describe API::Runners do
expect do
post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id }
end.to change { project.runners.count }.by(+1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
@@ -888,14 +888,14 @@ describe API::Runners do
end.to change { project.runners.count }.by(1)
expect(shared_runner.reload).not_to be_instance_type
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
it 'raises an error when no runner_id param is provided' do
post api("/projects/#{project.id}/runners", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -905,7 +905,7 @@ describe API::Runners do
it 'does not enable runner without access to' do
post api("/projects/#{project.id}/runners", user), params: { runner_id: new_project_runner.id }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -913,7 +913,7 @@ describe API::Runners do
it 'does not enable runner' do
post api("/projects/#{project.id}/runners", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -921,7 +921,7 @@ describe API::Runners do
it 'does not enable runner' do
post api("/projects/#{project.id}/runners")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -933,7 +933,7 @@ describe API::Runners do
expect do
delete api("/projects/#{project.id}/runners/#{two_projects_runner.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { project.runners.count }.by(-1)
end
@@ -947,14 +947,14 @@ describe API::Runners do
expect do
delete api("/projects/#{project.id}/runners/#{project_runner.id}", user)
end.to change { project.runners.count }.by(0)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
it 'returns 404 is runner is not found' do
delete api("/projects/#{project.id}/runners/0", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -962,7 +962,7 @@ describe API::Runners do
it "does not disable project's runner" do
delete api("/projects/#{project.id}/runners/#{project_runner.id}", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -970,7 +970,7 @@ describe API::Runners do
it "does not disable project's runner" do
delete api("/projects/#{project.id}/runners/#{project_runner.id}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index 04794b2ba58..6ff5fbd7925 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -9,7 +9,7 @@ describe API::Search do
let_it_be(:repo_project) { create(:project, :public, :repository, group: group) }
shared_examples 'response is correct' do |schema:, size: 1|
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
it { expect(response).to match_response_schema(schema) }
it { expect(response).to include_limited_pagination_headers }
it { expect(json_response.size).to eq(size) }
@@ -20,7 +20,7 @@ describe API::Search do
it 'returns 401 error' do
get api('/search'), params: { scope: 'projects', search: 'awesome' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -28,7 +28,7 @@ describe API::Search do
it 'returns 400 error' do
get api('/search', user), params: { scope: 'unsupported', search: 'awesome' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -36,7 +36,7 @@ describe API::Search do
it 'returns 400 error' do
get api('/search', user), params: { search: 'awesome' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -115,7 +115,7 @@ describe API::Search do
end
it 'returns 400 error' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -147,7 +147,7 @@ describe API::Search do
it 'returns 401 error' do
get api("/groups/#{group.id}/search"), params: { scope: 'projects', search: 'awesome' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -155,7 +155,7 @@ describe API::Search do
it 'returns 400 error' do
get api("/groups/#{group.id}/search", user), params: { scope: 'unsupported', search: 'awesome' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -163,7 +163,7 @@ describe API::Search do
it 'returns 400 error' do
get api("/groups/#{group.id}/search", user), params: { search: 'awesome' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -171,7 +171,7 @@ describe API::Search do
it 'returns 404 error' do
get api('/groups/0/search', user), params: { scope: 'issues', search: 'awesome' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -181,7 +181,7 @@ describe API::Search do
get api("/groups/#{private_group.id}/search", user), params: { scope: 'issues', search: 'awesome' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -254,7 +254,7 @@ describe API::Search do
end
it 'returns 400 error' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -277,7 +277,7 @@ describe API::Search do
it 'returns 401 error' do
get api("/projects/#{project.id}/search"), params: { scope: 'issues', search: 'awesome' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -285,7 +285,7 @@ describe API::Search do
it 'returns 400 error' do
get api("/projects/#{project.id}/search", user), params: { scope: 'unsupported', search: 'awesome' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -293,7 +293,7 @@ describe API::Search do
it 'returns 400 error' do
get api("/projects/#{project.id}/search", user), params: { search: 'awesome' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -301,7 +301,7 @@ describe API::Search do
it 'returns 404 error' do
get api('/projects/0/search', user), params: { scope: 'issues', search: 'awesome' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -311,7 +311,7 @@ describe API::Search do
get api("/projects/#{project.id}/search", user), params: { scope: 'issues', search: 'awesome' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -383,7 +383,7 @@ describe API::Search do
end
it 'returns 400 error' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -436,7 +436,7 @@ describe API::Search do
it 'by filename' do
get api("/projects/#{repo_project.id}/search", user), params: { scope: 'blobs', search: 'mon filename:PROCESS.md' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(2)
expect(json_response.first['path']).to eq('PROCESS.md')
expect(json_response.first['filename']).to eq('PROCESS.md')
@@ -445,21 +445,21 @@ describe API::Search do
it 'by path' do
get api("/projects/#{repo_project.id}/search", user), params: { scope: 'blobs', search: 'mon path:markdown' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(8)
end
it 'by extension' do
get api("/projects/#{repo_project.id}/search", user), params: { scope: 'blobs', search: 'mon extension:md' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(11)
end
it 'by ref' do
get api("/projects/#{repo_project.id}/search", user), params: { scope: 'blobs', search: 'This file is used in tests for ci_environments_status', ref: 'pages-deploy' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
end
end
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index 323164f26f0..906ffce25bf 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -14,14 +14,14 @@ describe API::Services do
it 'returns authentication error when unauthenticated' do
get api("/projects/#{project.id}/services")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "returns error when authenticated but user is not a project owner" do
project.add_developer(user2)
get api("/projects/#{project.id}/services", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'project with services' do
@@ -32,7 +32,7 @@ describe API::Services do
get api("/projects/#{project.id}/services", user)
aggregate_failures 'expect successful response with all active services' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.count).to eq(1)
expect(json_response.first['slug']).to eq('emails-on-push')
@@ -49,7 +49,7 @@ describe API::Services do
it "updates #{service} settings" do
put api("/projects/#{project.id}/services/#{dashed_service}", user), params: service_attrs
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
current_service = project.services.first
events = current_service.event_names.empty? ? ["foo"].freeze : current_service.event_names
@@ -61,7 +61,7 @@ describe API::Services do
put api("/projects/#{project.id}/services/#{dashed_service}?#{query_strings}", user), params: service_attrs
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['slug']).to eq(dashed_service)
events.each do |event|
next if event == "foo"
@@ -103,7 +103,7 @@ describe API::Services do
it "deletes #{service}" do
delete api("/projects/#{project.id}/services/#{dashed_service}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
project.send(service_method).reload
expect(project.send(service_method).activated?).to be_falsey
end
@@ -117,13 +117,13 @@ describe API::Services do
it 'returns authentication error when unauthenticated' do
get api("/projects/#{project.id}/services/#{dashed_service}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "returns all properties of service #{service}" do
get api("/projects/#{project.id}/services/#{dashed_service}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties'].keys).to match_array(service_instance.api_field_names)
end
@@ -131,7 +131,7 @@ describe API::Services do
project.add_developer(user2)
get api("/projects/#{project.id}/services/#{dashed_service}", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -144,7 +144,7 @@ describe API::Services do
it 'returns a not found message' do
post api("/projects/#{project.id}/services/idonotexist/trigger")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response["error"]).to eq("404 Not Found")
end
end
@@ -163,7 +163,7 @@ describe API::Services do
it 'when the service is inactive' do
post api("/projects/#{project.id}/services/#{service_name}/trigger"), params: params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -178,7 +178,7 @@ describe API::Services do
it 'returns status 200' do
post api("/projects/#{project.id}/services/#{service_name}/trigger"), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -186,7 +186,7 @@ describe API::Services do
it 'returns a generic 404' do
post api("/projects/404/services/#{service_name}/trigger"), params: params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response["message"]).to eq("404 Service Not Found")
end
end
@@ -206,7 +206,7 @@ describe API::Services do
it 'returns status 200' do
post api("/projects/#{project.id}/services/#{service_name}/trigger"), params: { token: 'token', text: 'help' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['response_type']).to eq("ephemeral")
end
end
@@ -228,7 +228,7 @@ describe API::Services do
it 'accepts a username for update' do
put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(username: 'new_username')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties']['username']).to eq('new_username')
end
end
@@ -253,14 +253,14 @@ describe API::Services do
it 'accepts branches_to_be_notified for update' do
put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(branches_to_be_notified: 'all')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties']['branches_to_be_notified']).to eq('all')
end
it 'accepts notify_only_broken_pipelines for update' do
put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(notify_only_broken_pipelines: true)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)
end
end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 1a6bd4e6c0d..4a8b8f70dff 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -11,7 +11,7 @@ describe API::Settings, 'Settings' do
it "returns application settings" do
get api("/application/settings", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Hash
expect(json_response['default_projects_limit']).to eq(42)
expect(json_response['password_authentication_enabled_for_web']).to be_truthy
@@ -91,7 +91,7 @@ describe API::Settings, 'Settings' do
snippet_size_limit: 5
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['default_ci_config_path']).to eq('debian/salsa-ci.yml')
expect(json_response['default_projects_limit']).to eq(3)
expect(json_response['default_project_creation']).to eq(::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS)
@@ -132,7 +132,7 @@ describe API::Settings, 'Settings' do
put api("/application/settings", admin),
params: { performance_bar_allowed_group_id: group.full_path }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['performance_bar_allowed_group_id']).to eq(group.id)
end
@@ -143,7 +143,7 @@ describe API::Settings, 'Settings' do
performance_bar_allowed_group_id: group.full_path
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['performance_bar_allowed_group_id']).to be_nil
end
@@ -151,7 +151,7 @@ describe API::Settings, 'Settings' do
put api("/application/settings", admin),
params: { allow_local_requests_from_hooks_and_services: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['allow_local_requests_from_hooks_and_services']).to eq(true)
end
@@ -173,7 +173,7 @@ describe API::Settings, 'Settings' do
it 'includes the attributes in the API' do
get api("/application/settings", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
attribute_names.each do |attribute|
expect(json_response.keys).to include(attribute)
end
@@ -182,7 +182,7 @@ describe API::Settings, 'Settings' do
it 'allows updating the settings' do
put api("/application/settings", admin), params: settings
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
settings.each do |attribute, value|
expect(ApplicationSetting.current.public_send(attribute)).to eq(value)
end
@@ -205,7 +205,7 @@ describe API::Settings, 'Settings' do
it "includes the attributes in the API" do
get api("/application/settings", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
attribute_names.each do |attribute|
expect(json_response.keys).to include(attribute)
end
@@ -214,7 +214,7 @@ describe API::Settings, 'Settings' do
it "allows updating the settings" do
put api("/application/settings", admin), params: settings
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
settings.each do |attribute, value|
expect(ApplicationSetting.current.public_send(attribute)).to eq(value)
end
@@ -224,7 +224,7 @@ describe API::Settings, 'Settings' do
it "returns a blank parameter error message" do
put api("/application/settings", admin), params: { snowplow_enabled: true }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["error"]).to eq("snowplow_collector_hostname is missing")
end
@@ -233,7 +233,7 @@ describe API::Settings, 'Settings' do
snowplow_collector_hostname: nil
})
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
message = json_response["message"]
expect(message["snowplow_collector_hostname"]).to include("can't be blank")
end
@@ -257,7 +257,7 @@ describe API::Settings, 'Settings' do
it 'includes attributes in the API' do
get api("/application/settings", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
exposed_attributes.each do |attribute|
expect(json_response.keys).to include(attribute)
end
@@ -266,7 +266,7 @@ describe API::Settings, 'Settings' do
it 'does not include sensitive attributes in the API' do
get api("/application/settings", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
sensitive_attributes.each do |attribute|
expect(json_response.keys).not_to include(attribute)
end
@@ -275,7 +275,7 @@ describe API::Settings, 'Settings' do
it 'allows updating the settings' do
put api("/application/settings", admin), params: settings
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
settings.each do |attribute, value|
expect(ApplicationSetting.current.public_send(attribute)).to eq(value)
end
@@ -287,7 +287,7 @@ describe API::Settings, 'Settings' do
it 'does not update the settings' do
put api("/application/settings", admin), params: settings
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to include('eks_account_id is missing')
expect(json_response['error']).to include('eks_access_key_id is missing')
expect(json_response['error']).to include('eks_secret_access_key is missing')
@@ -299,7 +299,7 @@ describe API::Settings, 'Settings' do
it "returns a blank parameter error message" do
put api("/application/settings", admin), params: { plantuml_enabled: true }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('plantuml_url is missing')
end
end
@@ -314,7 +314,7 @@ describe API::Settings, 'Settings' do
asset_proxy_whitelist: ['example.com', '*.example.com']
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['asset_proxy_enabled']).to be(true)
expect(json_response['asset_proxy_url']).to eq('http://assets.example.com')
expect(json_response['asset_proxy_secret_key']).to be_nil
@@ -327,7 +327,7 @@ describe API::Settings, 'Settings' do
asset_proxy_whitelist: 'example.com, *.example.com'
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['asset_proxy_whitelist']).to eq(['example.com', '*.example.com', 'localhost'])
end
end
@@ -340,7 +340,7 @@ describe API::Settings, 'Settings' do
domain_blacklist: []
}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
message = json_response["message"]
expect(message["domain_blacklist"]).to eq(["Domain blacklist cannot be empty if Blacklist is enabled."])
end
@@ -352,7 +352,7 @@ describe API::Settings, 'Settings' do
domain_blacklist: ['domain1.com', 'domain2.com']
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['domain_blacklist_enabled']).to be(true)
expect(json_response['domain_blacklist']).to eq(['domain1.com', 'domain2.com'])
end
@@ -364,7 +364,7 @@ describe API::Settings, 'Settings' do
domain_blacklist: 'domain3.com, *.domain4.com'
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['domain_blacklist_enabled']).to be(true)
expect(json_response['domain_blacklist']).to eq(['domain3.com', '*.domain4.com'])
end
@@ -374,7 +374,7 @@ describe API::Settings, 'Settings' do
it "returns a blank parameter error message" do
put api("/application/settings", admin), params: { sourcegraph_enabled: true }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('sourcegraph_url is missing')
end
end
diff --git a/spec/requests/api/sidekiq_metrics_spec.rb b/spec/requests/api/sidekiq_metrics_spec.rb
index 438b1475c54..705ae29d5d8 100644
--- a/spec/requests/api/sidekiq_metrics_spec.rb
+++ b/spec/requests/api/sidekiq_metrics_spec.rb
@@ -9,21 +9,21 @@ describe API::SidekiqMetrics do
it 'defines the `queue_metrics` endpoint' do
get api('/sidekiq/queue_metrics', admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a Hash
end
it 'defines the `process_metrics` endpoint' do
get api('/sidekiq/process_metrics', admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['processes']).to be_an Array
end
it 'defines the `job_stats` endpoint' do
get api('/sidekiq/job_stats', admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a Hash
expect(json_response['jobs']).to be_a Hash
expect(json_response['jobs'].keys)
@@ -34,7 +34,7 @@ describe API::SidekiqMetrics do
it 'defines the `compound_metrics` endpoint' do
get api('/sidekiq/compound_metrics', admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a Hash
expect(json_response['queues']).to be_a Hash
expect(json_response['processes']).to be_an Array
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index 21565265b99..627611c10ce 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -13,7 +13,7 @@ describe API::Snippets do
get api("/snippets/", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |snippet| snippet['id']} ).to contain_exactly(
@@ -30,7 +30,7 @@ describe API::Snippets do
get api("/snippets/", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(0)
@@ -41,7 +41,7 @@ describe API::Snippets do
get api("/snippets/")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'does not return snippets related to a project with disable feature visibility' do
@@ -73,7 +73,7 @@ describe API::Snippets do
it 'returns all snippets with public visibility from all users' do
get api("/snippets/public", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |snippet| snippet['id']} ).to contain_exactly(
@@ -95,13 +95,13 @@ describe API::Snippets do
it 'requires authentication' do
get api("/snippets/#{snippet.id}", nil)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns raw text' do
get api("/snippets/#{snippet.id}/raw", author)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain'
expect(response.body).to eq(snippet.content)
end
@@ -117,14 +117,14 @@ describe API::Snippets do
get api("/snippets/#{snippet.id}/raw", author)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
it 'hides private snippets from ordinary users' do
get api("/snippets/#{snippet.id}/raw", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'shows internal snippets to ordinary users' do
@@ -132,7 +132,7 @@ describe API::Snippets do
get api("/snippets/#{internal_snippet.id}/raw", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -145,13 +145,13 @@ describe API::Snippets do
it 'requires authentication' do
get api("/snippets/#{private_snippet.id}", nil)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns snippet json' do
get api("/snippets/#{private_snippet.id}", author)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(private_snippet.title)
expect(json_response['description']).to eq(private_snippet.description)
@@ -162,19 +162,19 @@ describe API::Snippets do
it 'shows private snippets to an admin' do
get api("/snippets/#{private_snippet.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'hides private snippets from an ordinary user' do
get api("/snippets/#{private_snippet.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'shows internal snippets to an ordinary user' do
get api("/snippets/#{internal_snippet.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns 404 for invalid snippet id' do
@@ -182,7 +182,7 @@ describe API::Snippets do
get api("/snippets/#{private_snippet.id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
end
@@ -199,17 +199,47 @@ describe API::Snippets do
end
shared_examples 'snippet creation' do
+ let(:snippet) { Snippet.find(json_response["id"]) }
+
+ subject { post api("/snippets/", user), params: params }
+
it 'creates a new snippet' do
expect do
- post api("/snippets/", user), params: params
+ subject
end.to change { PersonalSnippet.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(params[:title])
expect(json_response['description']).to eq(params[:description])
expect(json_response['file_name']).to eq(params[:file_name])
expect(json_response['visibility']).to eq(params[:visibility])
end
+
+ it 'creates repository' do
+ subject
+
+ expect(snippet.repository.exists?).to be_truthy
+ end
+
+ it 'commit the files to the repository' do
+ subject
+
+ blob = snippet.repository.blob_at('master', params[:file_name])
+
+ expect(blob.data).to eq params[:content]
+ end
+
+ context 'when feature flag :version_snippets is disabled' do
+ it 'does not create snippet repository' do
+ stub_feature_flags(version_snippets: false)
+
+ expect do
+ subject
+ end.to change { PersonalSnippet.count }.by(1)
+
+ expect(snippet.repository_exists?).to be_falsey
+ end
+ end
end
context 'with restricted visibility settings' do
@@ -229,7 +259,7 @@ describe API::Snippets do
post api("/snippets/", user), params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
context 'when the snippet is spam' do
@@ -255,7 +285,7 @@ describe API::Snippets do
expect { create_snippet(visibility: 'public') }
.not_to change { Snippet.count }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq({ "error" => "Spam detected" })
end
@@ -271,7 +301,7 @@ describe API::Snippets do
let(:visibility_level) { Snippet::PUBLIC }
let(:other_user) { create(:user) }
let(:snippet) do
- create(:personal_snippet, author: user, visibility_level: visibility_level)
+ create(:personal_snippet, :repository, author: user, visibility_level: visibility_level)
end
shared_examples 'snippet updates' do
@@ -279,9 +309,9 @@ describe API::Snippets do
new_content = 'New content'
new_description = 'New description'
- put api("/snippets/#{snippet.id}", user), params: { content: new_content, description: new_description, visibility: 'internal' }
+ update_snippet(params: { content: new_content, description: new_description, visibility: 'internal' })
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
snippet.reload
expect(snippet.content).to eq(new_content)
expect(snippet.description).to eq(new_description)
@@ -302,30 +332,30 @@ describe API::Snippets do
it_behaves_like 'snippet updates'
it 'returns 404 for invalid snippet id' do
- put api("/snippets/1234", user), params: { title: 'foo' }
+ update_snippet(snippet_id: '1234', params: { title: 'Foo' })
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
it "returns 404 for another user's snippet" do
- put api("/snippets/#{snippet.id}", other_user), params: { title: 'fubar' }
+ update_snippet(requester: other_user, params: { title: 'foobar' })
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
it 'returns 400 for missing parameters' do
- put api("/snippets/1234", user)
+ update_snippet
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
- context 'when the snippet is spam' do
- def update_snippet(snippet_params = {})
- put api("/snippets/#{snippet.id}", user), params: snippet_params
- end
+ it_behaves_like 'update with repository actions' do
+ let(:snippet_without_repo) { create(:personal_snippet, author: user, visibility_level: visibility_level) }
+ end
+ context 'when the snippet is spam' do
before do
allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
@@ -336,7 +366,7 @@ describe API::Snippets do
let(:visibility_level) { Snippet::PRIVATE }
it 'updates the snippet' do
- expect { update_snippet(title: 'Foo') }
+ expect { update_snippet(params: { title: 'Foo' }) }
.to change { snippet.reload.title }.to('Foo')
end
end
@@ -345,15 +375,15 @@ describe API::Snippets do
let(:visibility_level) { Snippet::PUBLIC }
it 'rejects the shippet' do
- expect { update_snippet(title: 'Foo') }
+ expect { update_snippet(params: { title: 'Foo' }) }
.not_to change { snippet.reload.title }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq({ "error" => "Spam detected" })
end
it 'creates a spam log' do
- expect { update_snippet(title: 'Foo') }.to log_spam(title: 'Foo', user_id: user.id, noteable_type: 'PersonalSnippet')
+ expect { update_snippet(params: { title: 'Foo' }) }.to log_spam(title: 'Foo', user_id: user.id, noteable_type: 'PersonalSnippet')
end
end
@@ -361,16 +391,20 @@ describe API::Snippets do
let(:visibility_level) { Snippet::PRIVATE }
it 'rejects the snippet' do
- expect { update_snippet(title: 'Foo', visibility: 'public') }
+ expect { update_snippet(params: { title: 'Foo', visibility: 'public' }) }
.not_to change { snippet.reload.title }
end
it 'creates a spam log' do
- expect { update_snippet(title: 'Foo', visibility: 'public') }
+ expect { update_snippet(params: { title: 'Foo', visibility: 'public' }) }
.to log_spam(title: 'Foo', user_id: user.id, noteable_type: 'PersonalSnippet')
end
end
end
+
+ def update_snippet(snippet_id: snippet.id, params: {}, requester: user)
+ put api("/snippets/#{snippet_id}", requester), params: params
+ end
end
describe 'DELETE /snippets/:id' do
@@ -380,14 +414,14 @@ describe API::Snippets do
expect do
delete api("/snippets/#{public_snippet.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { PersonalSnippet.count }.by(-1)
end
it 'returns 404 for invalid snippet id' do
delete api("/snippets/1234", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
@@ -404,7 +438,7 @@ describe API::Snippets do
it 'exposes known attributes' do
get api("/snippets/#{snippet.id}/user_agent_detail", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['user_agent']).to eq(user_agent_detail.user_agent)
expect(json_response['ip_address']).to eq(user_agent_detail.ip_address)
expect(json_response['akismet_submitted']).to eq(user_agent_detail.submitted)
@@ -413,7 +447,7 @@ describe API::Snippets do
it "returns unauthorized for non-admin users" do
get api("/snippets/#{snippet.id}/user_agent_detail", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
diff --git a/spec/requests/api/statistics_spec.rb b/spec/requests/api/statistics_spec.rb
index 91fc4d4c123..f03c1e9ca64 100644
--- a/spec/requests/api/statistics_spec.rb
+++ b/spec/requests/api/statistics_spec.rb
@@ -25,7 +25,7 @@ describe API::Statistics, 'Statistics' do
it "returns authentication error" do
get api(path, nil)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -35,7 +35,7 @@ describe API::Statistics, 'Statistics' do
it "returns forbidden error" do
get api(path, user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -45,7 +45,7 @@ describe API::Statistics, 'Statistics' do
it 'matches the response schema' do
get api(path, admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('statistics')
end
diff --git a/spec/requests/api/submodules_spec.rb b/spec/requests/api/submodules_spec.rb
index 064392fb185..2604dc18005 100644
--- a/spec/requests/api/submodules_spec.rb
+++ b/spec/requests/api/submodules_spec.rb
@@ -33,7 +33,7 @@ describe API::Submodules do
it 'returns 401' do
put api(route(submodule)), params: params
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -41,7 +41,7 @@ describe API::Submodules do
it 'returns 403' do
put api(route(submodule), guest), params: params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -49,19 +49,19 @@ describe API::Submodules do
it 'returns 400 if params is missing' do
put api(route(submodule), user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 if branch is missing' do
put api(route(submodule), user), params: params.except(:branch)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 if commit_sha is missing' do
put api(route(submodule), user), params: params.except(:commit_sha)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns the commit' do
@@ -69,7 +69,7 @@ describe API::Submodules do
put api(route(submodule), user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['message']).to eq commit_message
expect(json_response['author_name']).to eq user.name
expect(json_response['committer_name']).to eq user.name
@@ -89,7 +89,7 @@ describe API::Submodules do
put api(route(encoded_submodule), user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq project.repository.commit(branch).id
expect(project.repository.blob_at(branch, submodule).id).to eq commit_sha
end
diff --git a/spec/requests/api/suggestions_spec.rb b/spec/requests/api/suggestions_spec.rb
index 5b07e598b8d..df3f72e3447 100644
--- a/spec/requests/api/suggestions_spec.rb
+++ b/spec/requests/api/suggestions_spec.rb
@@ -40,7 +40,7 @@ describe API::Suggestions do
put api(url, user), params: { id: suggestion.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response)
.to include('id', 'from_line', 'to_line', 'appliable', 'applied',
'from_content', 'to_content')
@@ -57,7 +57,7 @@ describe API::Suggestions do
put api(url, user), params: { id: suggestion.id }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({ 'message' => 'Suggestion is not appliable' })
end
end
@@ -74,7 +74,7 @@ describe API::Suggestions do
put api(url, user), params: { id: suggestion.id }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response).to eq({ 'message' => '403 Forbidden' })
end
end
diff --git a/spec/requests/api/system_hooks_spec.rb b/spec/requests/api/system_hooks_spec.rb
index 79790b1e999..50015d2e2c3 100644
--- a/spec/requests/api/system_hooks_spec.rb
+++ b/spec/requests/api/system_hooks_spec.rb
@@ -18,7 +18,7 @@ describe API::SystemHooks do
it "returns authentication error" do
get api("/hooks")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -26,7 +26,7 @@ describe API::SystemHooks do
it "returns forbidden error" do
get api("/hooks", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -34,7 +34,7 @@ describe API::SystemHooks do
it "returns an array of hooks" do
get api("/hooks", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['url']).to eq(hook.url)
@@ -56,13 +56,13 @@ describe API::SystemHooks do
it "responds with 400 if url not given" do
post api("/hooks", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "responds with 400 if url is invalid" do
post api("/hooks", admin), params: { url: 'hp://mep.mep' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "does not create new hook without url" do
@@ -76,7 +76,7 @@ describe API::SystemHooks do
post api('/hooks', admin), params: { url: 'http://mep.mep' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['enable_ssl_verification']).to be true
expect(json_response['push_events']).to be false
expect(json_response['tag_push_events']).to be false
@@ -95,7 +95,7 @@ describe API::SystemHooks do
merge_requests_events: true
}
- expect(response).to have_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['enable_ssl_verification']).to be false
expect(json_response['push_events']).to be true
expect(json_response['tag_push_events']).to be true
@@ -106,13 +106,13 @@ describe API::SystemHooks do
describe "GET /hooks/:id" do
it "returns hook by id" do
get api("/hooks/#{hook.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['event_name']).to eq('project_create')
end
it "returns 404 on failure" do
get api("/hooks/404", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -121,14 +121,14 @@ describe API::SystemHooks do
expect do
delete api("/hooks/#{hook.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { SystemHook.count }.by(-1)
end
it 'returns 404 if the system hook does not exist' do
delete api('/hooks/12345', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
diff --git a/spec/requests/api/tags_spec.rb b/spec/requests/api/tags_spec.rb
index 09e63b86cfc..694802ce1b8 100644
--- a/spec/requests/api/tags_spec.rb
+++ b/spec/requests/api/tags_spec.rb
@@ -61,7 +61,7 @@ describe API::Tags do
it 'only returns searched tags' do
get api("#{route}", user), params: { search: 'v1.1.0' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -73,7 +73,7 @@ describe API::Tags do
it 'returns the repository tags' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tags')
expect(response).to include_pagination_headers
expect(json_response.map { |r| r['name'] }).to include(tag_name)
@@ -133,7 +133,7 @@ describe API::Tags do
it 'returns an array of project tags with release info' do
get api(route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tags')
expect(response).to include_pagination_headers
@@ -151,7 +151,7 @@ describe API::Tags do
it 'returns the repository branch' do
get api(route, current_user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tag')
expect(json_response['name']).to eq(tag_name)
end
@@ -214,7 +214,7 @@ describe API::Tags do
it 'creates a new tag' do
post api(route, current_user), params: { tag_name: tag_name, ref: 'master' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/tag')
expect(json_response['name']).to eq(tag_name)
end
@@ -269,26 +269,26 @@ describe API::Tags do
it 'returns 400 if tag name is invalid' do
post api(route, current_user), params: { tag_name: 'new design', ref: 'master' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Tag name invalid')
end
it 'returns 400 if tag already exists' do
post api(route, current_user), params: { tag_name: 'new_design1', ref: 'master' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/tag')
post api(route, current_user), params: { tag_name: 'new_design1', ref: 'master' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Tag new_design1 already exists')
end
it 'returns 400 if ref name is invalid' do
post api(route, current_user), params: { tag_name: 'new_design3', ref: 'foo' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Target foo is invalid')
end
@@ -296,7 +296,7 @@ describe API::Tags do
it 'creates a new tag' do
post api(route, current_user), params: { tag_name: tag_name, ref: 'master', release_description: 'Wow' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/tag')
expect(json_response['name']).to eq(tag_name)
expect(json_response['release']['description']).to eq('Wow')
@@ -315,7 +315,7 @@ describe API::Tags do
post api(route, current_user), params: { tag_name: 'v7.1.0', ref: 'master', message: 'Release 7.1.0' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/tag')
expect(json_response['name']).to eq('v7.1.0')
expect(json_response['message']).to eq('Release 7.1.0')
@@ -337,7 +337,7 @@ describe API::Tags do
it 'deletes a tag' do
delete api(route, current_user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it_behaves_like '412 response' do
@@ -383,7 +383,7 @@ describe API::Tags do
it 'creates description for existing git tag' do
post api(route, user), params: { description: description }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/release/tag_release')
expect(json_response['tag_name']).to eq(tag_name)
expect(json_response['description']).to eq(description)
@@ -424,7 +424,7 @@ describe API::Tags do
it 'returns 409 if there is already a release' do
post api(route, user), params: { description: description }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to eq('Release already exists')
end
end
@@ -449,7 +449,7 @@ describe API::Tags do
it 'updates the release description' do
put api(route, current_user), params: { description: new_description }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['tag_name']).to eq(tag_name)
expect(json_response['description']).to eq(new_description)
end
diff --git a/spec/requests/api/task_completion_status_spec.rb b/spec/requests/api/task_completion_status_spec.rb
index a2891e1d983..4dd1e27bd4b 100644
--- a/spec/requests/api/task_completion_status_spec.rb
+++ b/spec/requests/api/task_completion_status_spec.rb
@@ -57,7 +57,7 @@ describe 'task completion status response' do
get api("#{path}?iids[]=#{taskable.iid}", user)
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
it 'returns the expected results' do
expect(json_response).to be_an Array
diff --git a/spec/requests/api/templates_spec.rb b/spec/requests/api/templates_spec.rb
index b6ba417d892..fae338b4ca3 100644
--- a/spec/requests/api/templates_spec.rb
+++ b/spec/requests/api/templates_spec.rb
@@ -25,7 +25,7 @@ describe API::Templates do
it 'returns a list of available gitignore templates' do
get api('/templates/gitignores')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to be > 15
@@ -36,7 +36,7 @@ describe API::Templates do
it 'returns a list of available gitlab_ci_ymls' do
get api('/templates/gitlab_ci_ymls')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['name']).not_to be_nil
@@ -47,7 +47,7 @@ describe API::Templates do
it 'adds a disclaimer on the top' do
get api('/templates/gitlab_ci_ymls/Ruby')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['content']).to start_with("# This file is a template,")
end
end
@@ -58,7 +58,7 @@ describe API::Templates do
end
it 'returns a license template' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['key']).to eq('mit')
expect(json_response['name']).to eq('MIT License')
@@ -78,7 +78,7 @@ describe API::Templates do
it 'returns a list of available license templates' do
get api('/templates/licenses')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(12)
@@ -90,7 +90,7 @@ describe API::Templates do
it 'returns a list of available popular license templates' do
get api('/templates/licenses?popular=1')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(3)
@@ -173,7 +173,7 @@ describe API::Templates do
let(:license_type) { 'muth-over9000' }
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -185,7 +185,7 @@ describe API::Templates do
it 'replaces the copyright owner placeholder with the name of the current user' do
get api('/templates/licenses/mit', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['content']).to include("Copyright (c) #{Time.now.year} #{user.name}")
end
end
diff --git a/spec/requests/api/todos_spec.rb b/spec/requests/api/todos_spec.rb
index 4121a0f3f3a..7a7712c2f5d 100644
--- a/spec/requests/api/todos_spec.rb
+++ b/spec/requests/api/todos_spec.rb
@@ -166,7 +166,7 @@ describe API::Todos do
it 'returns authentication error' do
post api("/todos/#{pending_1.id}/mark_as_done")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -174,7 +174,7 @@ describe API::Todos do
it 'marks a todo as done' do
post api("/todos/#{pending_1.id}/mark_as_done", john_doe)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['id']).to eq(pending_1.id)
expect(json_response['state']).to eq('done')
expect(pending_1.reload).to be_done
@@ -199,7 +199,7 @@ describe API::Todos do
it 'returns authentication error' do
post api('/todos/mark_as_done')
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -207,7 +207,7 @@ describe API::Todos do
it 'marks all todos as done' do
post api('/todos/mark_as_done', john_doe)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(pending_1.reload).to be_done
expect(pending_2.reload).to be_done
expect(pending_3.reload).to be_done
@@ -258,9 +258,9 @@ describe API::Todos do
post api("/projects/#{project_1.id}/#{issuable_type}/#{issuable.iid}/todo", guest)
if issuable_type == 'merge_requests'
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
else
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/triggers_spec.rb b/spec/requests/api/triggers_spec.rb
index 1042e4e970d..bcc1c6bc4d4 100644
--- a/spec/requests/api/triggers_spec.rb
+++ b/spec/requests/api/triggers_spec.rb
@@ -31,13 +31,13 @@ describe API::Triggers do
it 'returns bad request if token is missing' do
post api("/projects/#{project.id}/trigger/pipeline"), params: { ref: 'master' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns not found if project is not found' do
post api('/projects/0/trigger/pipeline'), params: options.merge(ref: 'master')
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -47,7 +47,7 @@ describe API::Triggers do
it 'creates pipeline' do
post api("/projects/#{project.id}/trigger/pipeline"), params: options.merge(ref: 'master')
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include('id' => pipeline.id)
pipeline.builds.reload
expect(pipeline.builds.pending.size).to eq(2)
@@ -57,7 +57,7 @@ describe API::Triggers do
it 'returns bad request with no pipeline created if there\'s no commit for that ref' do
post api("/projects/#{project.id}/trigger/pipeline"), params: options.merge(ref: 'other-branch')
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('base' => ["Reference not found"])
end
@@ -69,21 +69,21 @@ describe API::Triggers do
it 'validates variables to be a hash' do
post api("/projects/#{project.id}/trigger/pipeline"), params: options.merge(variables: 'value', ref: 'master')
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('variables is invalid')
end
it 'validates variables needs to be a map of key-valued strings' do
post api("/projects/#{project.id}/trigger/pipeline"), params: options.merge(variables: { key: %w(1 2) }, ref: 'master')
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('variables needs to be a map of key-valued strings')
end
it 'creates trigger request with variables' do
post api("/projects/#{project.id}/trigger/pipeline"), params: options.merge(variables: variables, ref: 'master')
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(pipeline.variables.map { |v| { v.key => v.value } }.last).to eq(variables)
end
end
@@ -93,7 +93,7 @@ describe API::Triggers do
it 'does not leak the presence of project when token is for different project' do
post api("/projects/#{project2.id}/ref/master/trigger/pipeline?token=#{trigger_token}"), params: { ref: 'refs/heads/other-branch' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'creates builds from the ref given in the URL, not in the body' do
@@ -101,7 +101,7 @@ describe API::Triggers do
post api("/projects/#{project.id}/ref/master/trigger/pipeline?token=#{trigger_token}"), params: { ref: 'refs/heads/other-branch' }
end.to change(project.builds, :count).by(5)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
context 'when ref contains a dot' do
@@ -112,7 +112,7 @@ describe API::Triggers do
post api("/projects/#{project.id}/ref/v.1-branch/trigger/pipeline?token=#{trigger_token}"), params: { ref: 'refs/heads/other-branch' }
end.to change(project.builds, :count).by(4)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
end
end
@@ -135,7 +135,7 @@ describe API::Triggers do
it 'returns a list of triggers with tokens exposed correctly' do
get api("/projects/#{project.id}/triggers", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_a(Array)
@@ -149,7 +149,7 @@ describe API::Triggers do
it 'does not return triggers list' do
get api("/projects/#{project.id}/triggers", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -157,7 +157,7 @@ describe API::Triggers do
it 'does not return triggers list' do
get api("/projects/#{project.id}/triggers")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -167,14 +167,14 @@ describe API::Triggers do
it 'returns trigger details' do
get api("/projects/#{project.id}/triggers/#{trigger.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a(Hash)
end
it 'responds with 404 Not Found if requesting non-existing trigger' do
get api("/projects/#{project.id}/triggers/-5", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -182,7 +182,7 @@ describe API::Triggers do
it 'does not return triggers list' do
get api("/projects/#{project.id}/triggers/#{trigger.id}", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -190,7 +190,7 @@ describe API::Triggers do
it 'does not return triggers list' do
get api("/projects/#{project.id}/triggers/#{trigger.id}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -204,7 +204,7 @@ describe API::Triggers do
params: { description: 'trigger' }
end.to change {project.triggers.count}.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include('description' => 'trigger')
end
end
@@ -223,7 +223,7 @@ describe API::Triggers do
post api("/projects/#{project.id}/triggers", user2),
params: { description: 'trigger' }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -232,7 +232,7 @@ describe API::Triggers do
post api("/projects/#{project.id}/triggers"),
params: { description: 'trigger' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -245,7 +245,7 @@ describe API::Triggers do
put api("/projects/#{project.id}/triggers/#{trigger.id}", user),
params: { description: new_description }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include('description' => new_description)
expect(trigger.reload.description).to eq(new_description)
end
@@ -255,7 +255,7 @@ describe API::Triggers do
it 'does not update trigger' do
put api("/projects/#{project.id}/triggers/#{trigger.id}", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -263,7 +263,7 @@ describe API::Triggers do
it 'does not update trigger' do
put api("/projects/#{project.id}/triggers/#{trigger.id}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -274,14 +274,14 @@ describe API::Triggers do
expect do
delete api("/projects/#{project.id}/triggers/#{trigger.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change {project.triggers.count}.by(-1)
end
it 'responds with 404 Not Found if requesting non-existing trigger' do
delete api("/projects/#{project.id}/triggers/-5", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
@@ -293,7 +293,7 @@ describe API::Triggers do
it 'does not delete trigger' do
delete api("/projects/#{project.id}/triggers/#{trigger.id}", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -301,7 +301,7 @@ describe API::Triggers do
it 'does not delete trigger' do
delete api("/projects/#{project.id}/triggers/#{trigger.id}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 12ac601c013..6d1b76a9aea 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe API::Users do
+describe API::Users, :do_not_mock_admin_mode do
let(:user) { create(:user, username: 'user.with.dot') }
let(:admin) { create(:admin) }
let(:key) { create(:key, user: user) }
@@ -41,7 +41,7 @@ describe API::Users do
it "returns authorization error when the `username` parameter is not passed" do
get api("/users")
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it "returns the user when a valid `username` parameter is passed" do
@@ -65,7 +65,7 @@ describe API::Users do
it "returns an empty response when an invalid `username` parameter is passed" do
get api("/users"), params: { username: 'invalid' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.size).to eq(0)
end
@@ -93,13 +93,13 @@ describe API::Users do
it "returns authorization error when the `username` parameter refers to an inaccessible user" do
get api("/users"), params: { username: user.username }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it "returns authorization error when the `username` parameter is not passed" do
get api("/users")
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -169,7 +169,7 @@ describe API::Users do
it "returns a 403 when non-admin user searches by external UID" do
get api("/users?extern_uid=#{omniauth_user.identities.first.extern_uid}&provider=#{omniauth_user.identities.first.provider}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not reveal the `is_admin` flag of the user' do
@@ -218,13 +218,13 @@ describe API::Users do
it "returns 400 error if provider with no extern_uid" do
get api("/users?extern_uid=#{omniauth_user.identities.first.extern_uid}", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns 400 error if provider with no extern_uid" do
get api("/users?provider=#{omniauth_user.identities.first.provider}", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a user created before a specific date" do
@@ -283,7 +283,7 @@ describe API::Users do
it 'returns 400 when provided incorrect sort params' do
get api('/users', admin), params: { order_by: 'magic', sort: 'asc' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -293,7 +293,7 @@ describe API::Users do
get api("/users", user), params: { skip_ldap: "true" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.first["username"]).to eq user.username
end
@@ -330,6 +330,29 @@ describe API::Users do
expect(json_response.keys).not_to include 'last_sign_in_ip'
end
+ it "does not contain plan or trial data" do
+ get api("/users/#{user.id}", user)
+
+ expect(response).to match_response_schema('public_api/v4/user/basic')
+ expect(json_response.keys).not_to include 'plan'
+ expect(json_response.keys).not_to include 'trial'
+ end
+
+ context 'when job title is present' do
+ let(:job_title) { 'Fullstack Engineer' }
+
+ before do
+ create(:user_detail, user: user, job_title: job_title)
+ end
+
+ it 'returns job title of a user' do
+ get api("/users/#{user.id}", user)
+
+ expect(response).to match_response_schema('public_api/v4/user/basic')
+ expect(json_response['job_title']).to eq(job_title)
+ end
+ end
+
context 'when authenticated as admin' do
it 'includes the `is_admin` field' do
get api("/users/#{user.id}", admin)
@@ -352,6 +375,22 @@ describe API::Users do
expect(json_response['highest_role']).to be(0)
end
+ if Gitlab.ee?
+ it 'does not include values for plan or trial' do
+ get api("/users/#{user.id}", admin)
+
+ expect(response).to match_response_schema('public_api/v4/user/basic')
+ end
+ else
+ it 'does not include plan or trial data' do
+ get api("/users/#{user.id}", admin)
+
+ expect(response).to match_response_schema('public_api/v4/user/basic')
+ expect(json_response.keys).not_to include 'plan'
+ expect(json_response.keys).not_to include 'trial'
+ end
+ end
+
context 'when user has not logged in' do
it 'does not include the sign in IPs' do
get api("/users/#{user.id}", admin)
@@ -388,7 +427,7 @@ describe API::Users do
get api("/users/#{user.id}")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns the `created_at` field for public users" do
@@ -409,14 +448,14 @@ describe API::Users do
it "returns a 404 error if user id not found" do
get api("/users/0", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it "returns a 404 for invalid ID" do
get api("/users/1ASDF", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -453,7 +492,7 @@ describe API::Users do
it "creates user with correct attributes" do
post api('/users', admin), params: attributes_for(:user, admin: true, can_create_group: true)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
user_id = json_response['id']
new_user = User.find(user_id)
expect(new_user.admin).to eq(true)
@@ -466,12 +505,12 @@ describe API::Users do
post api('/users', admin), params: attributes
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it "creates non-admin user" do
post api('/users', admin), params: attributes_for(:user, admin: false, can_create_group: false)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
user_id = json_response['id']
new_user = User.find(user_id)
expect(new_user.admin).to eq(false)
@@ -480,7 +519,7 @@ describe API::Users do
it "creates non-admin users by default" do
post api('/users', admin), params: attributes_for(:user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
user_id = json_response['id']
new_user = User.find(user_id)
expect(new_user.admin).to eq(false)
@@ -489,12 +528,12 @@ describe API::Users do
it "returns 201 Created on success" do
post api("/users", admin), params: attributes_for(:user, projects_limit: 3)
expect(response).to match_response_schema('public_api/v4/user/admin')
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'creates non-external users by default' do
post api("/users", admin), params: attributes_for(:user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
user_id = json_response['id']
new_user = User.find(user_id)
@@ -503,7 +542,7 @@ describe API::Users do
it 'allows an external user to be created' do
post api("/users", admin), params: attributes_for(:user, external: true)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
user_id = json_response['id']
new_user = User.find(user_id)
@@ -513,7 +552,7 @@ describe API::Users do
it "creates user with reset password" do
post api('/users', admin), params: attributes_for(:user, reset_password: true).except(:password)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
user_id = json_response['id']
new_user = User.find(user_id)
@@ -525,7 +564,7 @@ describe API::Users do
params = attributes_for(:user, force_random_password: true, reset_password: true)
post api('/users', admin), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
user_id = json_response['id']
new_user = User.find(user_id)
@@ -537,7 +576,7 @@ describe API::Users do
it "creates user with private profile" do
post api('/users', admin), params: attributes_for(:user, private_profile: true)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
user_id = json_response['id']
new_user = User.find(user_id)
@@ -553,27 +592,27 @@ describe API::Users do
password: 'password',
name: 'test'
}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 error if name not given' do
post api('/users', admin), params: attributes_for(:user).except(:name)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 error if password not given' do
post api('/users', admin), params: attributes_for(:user).except(:password)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 error if email not given' do
post api('/users', admin), params: attributes_for(:user).except(:email)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 error if username not given' do
post api('/users', admin), params: attributes_for(:user).except(:username)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "doesn't create user with invalid optional attributes" do
@@ -582,7 +621,7 @@ describe API::Users do
post api('/users', admin), params: attributes
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 error if user does not validate' do
@@ -595,7 +634,7 @@ describe API::Users do
bio: 'g' * 256,
projects_limit: -1
}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['password'])
.to eq(['is too short (minimum is 8 characters)'])
expect(json_response['message']['bio'])
@@ -608,7 +647,7 @@ describe API::Users do
it "is not available for non admin users" do
post api("/users", user), params: attributes_for(:user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'with existing user' do
@@ -632,7 +671,7 @@ describe API::Users do
username: 'foo'
}
end.to change { User.count }.by(0)
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to eq('Email has already been taken')
end
@@ -646,7 +685,7 @@ describe API::Users do
username: 'test'
}
end.to change { User.count }.by(0)
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to eq('Username has already been taken')
end
@@ -660,14 +699,14 @@ describe API::Users do
username: 'TEST'
}
end.to change { User.count }.by(0)
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to eq('Username has already been taken')
end
it 'creates user with new identity' do
post api("/users", admin), params: attributes_for(:user, provider: 'github', extern_uid: '67890')
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['identities'].first['extern_uid']).to eq('67890')
expect(json_response['identities'].first['provider']).to eq('github')
end
@@ -689,13 +728,13 @@ describe API::Users do
put api("/users/#{user.id}", admin), params: { bio: 'new test bio' }
expect(response).to match_response_schema('public_api/v4/user/admin')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "updates user with new bio" do
put api("/users/#{user.id}", admin), params: { bio: 'new test bio' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['bio']).to eq('new test bio')
expect(user.reload.bio).to eq('new test bio')
end
@@ -703,14 +742,14 @@ describe API::Users do
it "updates user with new password and forces reset on next login" do
put api("/users/#{user.id}", admin), params: { password: '12345678' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(user.reload.password_expires_at).to be <= Time.now
end
it "updates user with organization" do
put api("/users/#{user.id}", admin), params: { organization: 'GitLab' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['organization']).to eq('GitLab')
expect(user.reload.organization).to eq('GitLab')
end
@@ -721,7 +760,7 @@ describe API::Users do
user.reload
expect(user.avatar).to be_present
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['avatar_url']).to include(user.avatar_path)
end
@@ -732,7 +771,7 @@ describe API::Users do
user.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(user).to be_confirmed
expect(user.email).to eq(old_email)
expect(user.notification_email).to eq(old_notification_email)
@@ -744,7 +783,7 @@ describe API::Users do
user.reload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(user).to be_confirmed
expect(user.email).to eq('new@email.com')
end
@@ -752,7 +791,7 @@ describe API::Users do
it 'updates user with their own username' do
put api("/users/#{user.id}", admin), params: { username: user.username }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['username']).to eq(user.username)
expect(user.reload.username).to eq(user.username)
end
@@ -760,14 +799,14 @@ describe API::Users do
it "updates user's existing identity" do
put api("/users/#{omniauth_user.id}", admin), params: { provider: 'ldapmain', extern_uid: '654321' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(omniauth_user.reload.identities.first.extern_uid).to eq('654321')
end
it 'updates user with new identity' do
put api("/users/#{user.id}", admin), params: { provider: 'github', extern_uid: 'john' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(user.reload.identities.first.extern_uid).to eq('john')
expect(user.reload.identities.first.provider).to eq('github')
end
@@ -775,7 +814,7 @@ describe API::Users do
it "updates admin status" do
put api("/users/#{user.id}", admin), params: { admin: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(user.reload.admin).to eq(true)
end
@@ -793,10 +832,17 @@ describe API::Users do
expect(user.reload.private_profile).to eq(false)
end
+ it "does have default values for theme and color-scheme ID" do
+ put api("/users/#{user.id}", admin), params: {}
+
+ expect(user.reload.theme_id).to eq(Gitlab::Themes.default.id)
+ expect(user.reload.color_scheme_id).to eq(Gitlab::ColorSchemes.default.id)
+ end
+
it "updates private profile" do
put api("/users/#{user.id}", admin), params: { private_profile: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(user.reload.private_profile).to eq(true)
end
@@ -805,7 +851,7 @@ describe API::Users do
put api("/users/#{user.id}", admin), params: { private_profile: nil }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(user.reload.private_profile).to eq(false)
end
@@ -814,14 +860,27 @@ describe API::Users do
put api("/users/#{user.id}", admin), params: {}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(user.reload.private_profile).to eq(true)
end
+ it "does not modify theme or color-scheme ID when field is not provided" do
+ theme = Gitlab::Themes.each.find { |t| t.id != Gitlab::Themes.default.id }
+ scheme = Gitlab::ColorSchemes.each.find { |t| t.id != Gitlab::ColorSchemes.default.id }
+
+ user.update(theme_id: theme.id, color_scheme_id: scheme.id)
+
+ put api("/users/#{user.id}", admin), params: {}
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(user.reload.theme_id).to eq(theme.id)
+ expect(user.reload.color_scheme_id).to eq(scheme.id)
+ end
+
it "does not update admin status" do
put api("/users/#{admin_user.id}", admin), params: { can_create_group: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(admin_user.reload.admin).to eq(true)
expect(admin_user.can_create_group).to eq(false)
end
@@ -829,35 +888,35 @@ describe API::Users do
it "does not allow invalid update" do
put api("/users/#{user.id}", admin), params: { email: 'invalid email' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(user.reload.email).not_to eq('invalid email')
end
it "updates theme id" do
put api("/users/#{user.id}", admin), params: { theme_id: 5 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(user.reload.theme_id).to eq(5)
end
it "does not update invalid theme id" do
put api("/users/#{user.id}", admin), params: { theme_id: 50 }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(user.reload.theme_id).not_to eq(50)
end
it "updates color scheme id" do
put api("/users/#{user.id}", admin), params: { color_scheme_id: 5 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(user.reload.color_scheme_id).to eq(5)
end
it "does not update invalid color scheme id" do
put api("/users/#{user.id}", admin), params: { color_scheme_id: 50 }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(user.reload.color_scheme_id).not_to eq(50)
end
@@ -867,21 +926,21 @@ describe API::Users do
put api("/users/#{user.id}", user), params: attributes_for(:user)
end.not_to change { user.reload.attributes }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
it "returns 404 for non-existing user" do
put api("/users/0", admin), params: { bio: 'update should fail' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it "returns a 404 if invalid ID" do
put api("/users/ASDF", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 400 error if user does not validate' do
@@ -894,7 +953,7 @@ describe API::Users do
bio: 'g' * 256,
projects_limit: -1
}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['password'])
.to eq(['is too short (minimum is 8 characters)'])
expect(json_response['message']['bio'])
@@ -908,13 +967,13 @@ describe API::Users do
it 'returns 400 if provider is missing for identity update' do
put api("/users/#{omniauth_user.id}", admin), params: { extern_uid: '654321' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 if external UID is missing for identity update' do
put api("/users/#{omniauth_user.id}", admin), params: { provider: 'ldap' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
context "with existing user" do
@@ -927,7 +986,7 @@ describe API::Users do
it 'returns 409 conflict error if email address exists' do
put api("/users/#{@user.id}", admin), params: { email: 'test@example.com' }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(@user.reload.email).to eq(@user.email)
end
@@ -935,7 +994,7 @@ describe API::Users do
@user_id = User.all.last.id
put api("/users/#{@user.id}", admin), params: { username: 'test' }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(@user.reload.username).to eq(@user.username)
end
@@ -943,7 +1002,7 @@ describe API::Users do
@user_id = User.all.last.id
put api("/users/#{@user.id}", admin), params: { username: 'TEST' }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(@user.reload.username).to eq(@user.username)
end
end
@@ -996,14 +1055,14 @@ describe API::Users do
it "does not create invalid ssh key" do
post api("/users/#{user.id}/keys", admin), params: { title: "invalid key" }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('key is missing')
end
it 'does not create key without title' do
post api("/users/#{user.id}/keys", admin), params: { key: 'some key' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('title is missing')
end
@@ -1016,7 +1075,7 @@ describe API::Users do
it "returns 400 for invalid ID" do
post api("/users/0/keys", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -1026,7 +1085,7 @@ describe API::Users do
get api("/users/#{user_id}/keys")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
@@ -1036,7 +1095,7 @@ describe API::Users do
get api("/users/#{user.id}/keys")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['title']).to eq(key.title)
@@ -1047,7 +1106,7 @@ describe API::Users do
it 'returns 404 for non-existing user' do
get api("/users/#{not_existing_user_id}/keys")
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
@@ -1057,7 +1116,7 @@ describe API::Users do
get api("/users/#{user.username}/keys")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['title']).to eq(key.title)
@@ -1072,7 +1131,7 @@ describe API::Users do
context 'when unauthenticated' do
it 'returns authentication error' do
delete api("/users/#{user.id}/keys/42")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -1084,7 +1143,7 @@ describe API::Users do
expect do
delete api("/users/#{user.id}/keys/#{key.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { user.keys.count }.by(-1)
end
@@ -1096,13 +1155,13 @@ describe API::Users do
user.keys << key
user.save
delete api("/users/0/keys/#{key.id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns 404 error if key not foud' do
delete api("/users/#{user.id}/keys/42", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Key Not Found')
end
end
@@ -1116,7 +1175,7 @@ describe API::Users do
it 'does not create invalid GPG key' do
post api("/users/#{user.id}/gpg_keys", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('key is missing')
end
@@ -1125,14 +1184,14 @@ describe API::Users do
expect do
post api("/users/#{user.id}/gpg_keys", admin), params: key_attrs
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end.to change { user.gpg_keys.count }.by(1)
end
it 'returns 400 for invalid ID' do
post api('/users/0/gpg_keys', admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -1145,7 +1204,7 @@ describe API::Users do
it 'returns authentication error' do
get api("/users/#{user.id}/gpg_keys")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -1153,14 +1212,14 @@ describe API::Users do
it 'returns 404 for non-existing user' do
get api('/users/0/gpg_keys', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns 404 error if key not foud' do
delete api("/users/#{user.id}/gpg_keys/42", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 GPG Key Not Found')
end
@@ -1170,7 +1229,7 @@ describe API::Users do
get api("/users/#{user.id}/gpg_keys", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['key']).to eq(gpg_key.key)
@@ -1187,7 +1246,7 @@ describe API::Users do
it 'returns authentication error' do
delete api("/users/#{user.id}/keys/42")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -1199,7 +1258,7 @@ describe API::Users do
expect do
delete api("/users/#{user.id}/gpg_keys/#{gpg_key.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { user.gpg_keys.count }.by(-1)
end
@@ -1209,14 +1268,14 @@ describe API::Users do
delete api("/users/0/gpg_keys/#{gpg_key.id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns 404 error if key not foud' do
delete api("/users/#{user.id}/gpg_keys/42", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 GPG Key Not Found')
end
end
@@ -1231,7 +1290,7 @@ describe API::Users do
it 'returns authentication error' do
post api("/users/#{user.id}/gpg_keys/42/revoke")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -1253,14 +1312,14 @@ describe API::Users do
post api("/users/0/gpg_keys/#{gpg_key.id}/revoke", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns 404 error if key not foud' do
post api("/users/#{user.id}/gpg_keys/42/revoke", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 GPG Key Not Found')
end
end
@@ -1274,7 +1333,7 @@ describe API::Users do
it "does not create invalid email" do
post api("/users/#{user.id}/emails", admin), params: {}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('email is missing')
end
@@ -1291,7 +1350,7 @@ describe API::Users do
it "returns a 400 for invalid ID" do
post api("/users/0/emails", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "creates verified email" do
@@ -1300,7 +1359,7 @@ describe API::Users do
post api("/users/#{user.id}/emails", admin), params: email_attrs
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
email = Email.find_by(user_id: user.id, email: email_attrs[:email])
expect(email).to be_confirmed
@@ -1315,14 +1374,14 @@ describe API::Users do
context 'when unauthenticated' do
it 'returns authentication error' do
get api("/users/#{user.id}/emails")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when authenticated' do
it 'returns 404 for non-existing user' do
get api('/users/0/emails', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
@@ -1332,7 +1391,7 @@ describe API::Users do
get api("/users/#{user.id}/emails", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['email']).to eq(email.email)
@@ -1341,7 +1400,7 @@ describe API::Users do
it "returns a 404 for invalid ID" do
get api("/users/ASDF/emails", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -1354,7 +1413,7 @@ describe API::Users do
context 'when unauthenticated' do
it 'returns authentication error' do
delete api("/users/#{user.id}/emails/42")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -1366,7 +1425,7 @@ describe API::Users do
expect do
delete api("/users/#{user.id}/emails/#{email.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { user.emails.count }.by(-1)
end
@@ -1378,20 +1437,20 @@ describe API::Users do
user.emails << email
user.save
delete api("/users/0/emails/#{email.id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns 404 error if email not foud' do
delete api("/users/#{user.id}/emails/42", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Email Not Found')
end
it "returns a 404 for invalid ID" do
delete api("/users/ASDF/emails/bar", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -1407,7 +1466,7 @@ describe API::Users do
it "deletes user", :sidekiq_might_not_need_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect { User.find(user.id) }.to raise_error ActiveRecord::RecordNotFound
expect { Namespace.find(namespace.id) }.to raise_error ActiveRecord::RecordNotFound
end
@@ -1418,14 +1477,14 @@ describe API::Users do
context "hard delete disabled" do
it "does not delete user" do
perform_enqueued_jobs { delete api("/users/#{user.id}", admin)}
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
end
end
context "hard delete enabled" do
it "delete user and group", :sidekiq_might_not_need_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin)}
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(Group.exists?(group.id)).to be_falsy
end
end
@@ -1437,31 +1496,31 @@ describe API::Users do
it "does not delete for unauthenticated user" do
perform_enqueued_jobs { delete api("/users/#{user.id}") }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "is not available for non admin users" do
perform_enqueued_jobs { delete api("/users/#{user.id}", user) }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it "returns 404 for non-existing user" do
perform_enqueued_jobs { delete api("/users/0", admin) }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it "returns a 404 for invalid ID" do
perform_enqueued_jobs { delete api("/users/ASDF", admin) }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context "hard delete disabled" do
it "moves contributions to the ghost user", :sidekiq_might_not_need_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(issue.reload).to be_persisted
expect(issue.author.ghost?).to be_truthy
end
@@ -1471,7 +1530,7 @@ describe API::Users do
it "removes contributions", :sidekiq_might_not_need_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin) }
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(Issue.exists?(issue.id)).to be_falsy
end
end
@@ -1486,14 +1545,14 @@ describe API::Users do
it 'returns 403 without private token when sudo is defined' do
get api("/user?private_token=#{personal_access_token}&sudo=123", version: version)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
it 'returns current user without private token when sudo not defined' do
get api("/user", user, version: version)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/user/public')
expect(json_response['id']).to eq(user.id)
end
@@ -1513,13 +1572,13 @@ describe API::Users do
it 'returns 403 without private token when sudo defined' do
get api("/user?private_token=#{admin_personal_access_token}&sudo=#{user.id}", version: version)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns initial current user without private token but with is_admin when sudo not defined' do
get api("/user?private_token=#{admin_personal_access_token}", version: version)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/user/admin')
expect(json_response['id']).to eq(admin.id)
end
@@ -1530,7 +1589,7 @@ describe API::Users do
it "returns 401 error if user is unauthenticated" do
get api("/user", version: version)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -1543,7 +1602,7 @@ describe API::Users do
context "when unauthenticated" do
it "returns authentication error" do
get api("/user/keys")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -1554,7 +1613,7 @@ describe API::Users do
get api("/user/keys", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first["title"]).to eq(key.title)
@@ -1574,14 +1633,14 @@ describe API::Users do
user.keys << key
user.save
get api("/user/keys/#{key.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response["title"]).to eq(key.title)
end
it "returns 404 Not Found within invalid ID" do
get api("/user/keys/42", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Key Not Found')
end
@@ -1590,14 +1649,14 @@ describe API::Users do
user.save
admin
get api("/user/keys/#{key.id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Key Not Found')
end
it "returns 404 for invalid ID" do
get api("/users/keys/ASDF", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context "scopes" do
@@ -1614,31 +1673,31 @@ describe API::Users do
expect do
post api("/user/keys", user), params: key_attrs
end.to change { user.keys.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it "returns a 401 error if unauthorized" do
post api("/user/keys"), params: { title: 'some title', key: 'some key' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "does not create ssh key without key" do
post api("/user/keys", user), params: { title: 'title' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('key is missing')
end
it 'does not create ssh key without title' do
post api('/user/keys', user), params: { key: 'some key' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('title is missing')
end
it "does not create ssh key without title" do
post api("/user/keys", user), params: { key: "somekey" }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -1650,7 +1709,7 @@ describe API::Users do
expect do
delete api("/user/keys/#{key.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { user.keys.count}.by(-1)
end
@@ -1661,7 +1720,7 @@ describe API::Users do
it "returns 404 if key ID not found" do
delete api("/user/keys/42", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Key Not Found')
end
@@ -1669,13 +1728,13 @@ describe API::Users do
user.keys << key
user.save
delete api("/user/keys/#{key.id}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "returns a 404 for invalid ID" do
delete api("/users/keys/ASDF", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1684,7 +1743,7 @@ describe API::Users do
it 'returns authentication error' do
get api('/user/gpg_keys')
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -1695,7 +1754,7 @@ describe API::Users do
get api('/user/gpg_keys', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['key']).to eq(gpg_key.key)
@@ -1717,14 +1776,14 @@ describe API::Users do
get api("/user/gpg_keys/#{gpg_key.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['key']).to eq(gpg_key.key)
end
it 'returns 404 Not Found within invalid ID' do
get api('/user/gpg_keys/42', user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 GPG Key Not Found')
end
@@ -1734,14 +1793,14 @@ describe API::Users do
get api("/user/gpg_keys/#{gpg_key.id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 GPG Key Not Found')
end
it 'returns 404 for invalid ID' do
get api('/users/gpg_keys/ASDF', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'scopes' do
@@ -1758,20 +1817,20 @@ describe API::Users do
expect do
post api('/user/gpg_keys', user), params: key_attrs
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end.to change { user.gpg_keys.count }.by(1)
end
it 'returns a 401 error if unauthorized' do
post api('/user/gpg_keys'), params: { key: 'some key' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'does not create GPG key without key' do
post api('/user/gpg_keys', user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('key is missing')
end
end
@@ -1791,7 +1850,7 @@ describe API::Users do
it 'returns 404 if key ID not found' do
post api('/user/gpg_keys/42/revoke', user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 GPG Key Not Found')
end
@@ -1801,13 +1860,13 @@ describe API::Users do
post api("/user/gpg_keys/#{gpg_key.id}/revoke")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns a 404 for invalid ID' do
post api('/users/gpg_keys/ASDF/revoke', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1819,14 +1878,14 @@ describe API::Users do
expect do
delete api("/user/gpg_keys/#{gpg_key.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { user.gpg_keys.count}.by(-1)
end
it 'returns 404 if key ID not found' do
delete api('/user/gpg_keys/42', user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 GPG Key Not Found')
end
@@ -1836,13 +1895,13 @@ describe API::Users do
delete api("/user/gpg_keys/#{gpg_key.id}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns a 404 for invalid ID' do
delete api('/users/gpg_keys/ASDF', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1850,7 +1909,7 @@ describe API::Users do
context "when unauthenticated" do
it "returns authentication error" do
get api("/user/emails")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -1861,7 +1920,7 @@ describe API::Users do
get api("/user/emails", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first["email"]).to eq(email.email)
@@ -1881,13 +1940,13 @@ describe API::Users do
user.emails << email
user.save
get api("/user/emails/#{email.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response["email"]).to eq(email.email)
end
it "returns 404 Not Found within invalid ID" do
get api("/user/emails/42", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Email Not Found')
end
@@ -1896,14 +1955,14 @@ describe API::Users do
user.save
admin
get api("/user/emails/#{email.id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Email Not Found')
end
it "returns 404 for invalid ID" do
get api("/users/emails/ASDF", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context "scopes" do
@@ -1920,18 +1979,18 @@ describe API::Users do
expect do
post api("/user/emails", user), params: email_attrs
end.to change { user.emails.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it "returns a 401 error if unauthorized" do
post api("/user/emails"), params: { email: 'some email' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "does not create email with invalid email" do
post api("/user/emails", user), params: {}
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('email is missing')
end
end
@@ -1944,7 +2003,7 @@ describe API::Users do
expect do
delete api("/user/emails/#{email.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change { user.emails.count}.by(-1)
end
@@ -1955,7 +2014,7 @@ describe API::Users do
it "returns 404 if email ID not found" do
delete api("/user/emails/42", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Email Not Found')
end
@@ -1963,20 +2022,20 @@ describe API::Users do
user.emails << email
user.save
delete api("/user/emails/#{email.id}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "returns 400 for invalid ID" do
delete api("/user/emails/ASDF", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'activate and deactivate' do
shared_examples '404' do
it 'returns 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
end
@@ -1986,7 +2045,7 @@ describe API::Users do
it 'is not authorized to perform the action' do
post api("/users/#{user.id}/activate", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -1999,7 +2058,7 @@ describe API::Users do
end
it 'activates a deactivated user' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('active')
end
end
@@ -2012,7 +2071,7 @@ describe API::Users do
end
it 'returns 201' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('active')
end
end
@@ -2025,7 +2084,7 @@ describe API::Users do
end
it 'returns 403' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
expect(user.reload.state).to eq('blocked')
end
@@ -2039,7 +2098,7 @@ describe API::Users do
end
it 'returns 403' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
expect(user.reload.state).to eq('ldap_blocked')
end
@@ -2060,7 +2119,7 @@ describe API::Users do
it 'is not authorized to perform the action' do
post api("/users/#{user.id}/deactivate", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -2077,7 +2136,7 @@ describe API::Users do
end
it 'deactivates an active user' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('deactivated')
end
end
@@ -2090,7 +2149,7 @@ describe API::Users do
end
it 'does not deactivate an active user' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq("403 Forbidden - The user you are trying to deactivate has been active in the past #{::User::MINIMUM_INACTIVE_DAYS} days and cannot be deactivated")
expect(user.reload.state).to eq('active')
end
@@ -2105,7 +2164,7 @@ describe API::Users do
end
it 'returns 201' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('deactivated')
end
end
@@ -2118,7 +2177,7 @@ describe API::Users do
end
it 'returns 403' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - A blocked user cannot be deactivated by the API')
expect(user.reload.state).to eq('blocked')
end
@@ -2132,7 +2191,7 @@ describe API::Users do
end
it 'returns 403' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - A blocked user cannot be deactivated by the API')
expect(user.reload.state).to eq('ldap_blocked')
end
@@ -2150,33 +2209,48 @@ describe API::Users do
end
describe 'POST /users/:id/block' do
+ let(:blocked_user) { create(:user, state: 'blocked') }
+
before do
admin
end
it 'blocks existing user' do
post api("/users/#{user.id}/block", admin)
- expect(response).to have_gitlab_http_status(201)
- expect(user.reload.state).to eq('blocked')
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response.body).to eq('true')
+ expect(user.reload.state).to eq('blocked')
+ end
end
it 'does not re-block ldap blocked users' do
post api("/users/#{ldap_blocked_user.id}/block", admin)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(ldap_blocked_user.reload.state).to eq('ldap_blocked')
end
it 'does not be available for non admin users' do
post api("/users/#{user.id}/block", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(user.reload.state).to eq('active')
end
it 'returns a 404 error if user id not found' do
post api('/users/0/block', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
+
+ it 'returns a 201 if user is already blocked' do
+ post api("/users/#{blocked_user.id}/block", admin)
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response.body).to eq('null')
+ end
+ end
end
describe 'POST /users/:id/unblock' do
@@ -2189,44 +2263,44 @@ describe API::Users do
it 'unblocks existing user' do
post api("/users/#{user.id}/unblock", admin)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('active')
end
it 'unblocks a blocked user' do
post api("/users/#{blocked_user.id}/unblock", admin)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(blocked_user.reload.state).to eq('active')
end
it 'does not unblock ldap blocked users' do
post api("/users/#{ldap_blocked_user.id}/unblock", admin)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(ldap_blocked_user.reload.state).to eq('ldap_blocked')
end
it 'does not unblock deactivated users' do
post api("/users/#{deactivated_user.id}/unblock", admin)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(deactivated_user.reload.state).to eq('deactivated')
end
it 'is not available for non admin users' do
post api("/users/#{user.id}/unblock", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(user.reload.state).to eq('active')
end
it 'returns a 404 error if user id not found' do
post api('/users/0/block', admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it "returns a 404 for invalid ID" do
post api("/users/ASDF/block", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -2315,7 +2389,7 @@ describe API::Users do
it 'has no permission' do
get api("/user/activities", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -2367,7 +2441,7 @@ describe API::Users do
it 'renders errors when the status was invalid' do
put api('/user/status', user), params: { emoji: 'does not exist', message: 'hello world' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['emoji']).to be_present
end
@@ -2389,21 +2463,21 @@ describe API::Users do
it 'returns a 404 error if user not found' do
get api("/users/#{not_existing_user_id}/impersonation_tokens", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns a 403 error when authenticated as normal user' do
get api("/users/#{not_existing_user_id}/impersonation_tokens", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
it 'returns an array of all impersonated tokens' do
get api("/users/#{user.id}/impersonation_tokens", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(2)
@@ -2412,7 +2486,7 @@ describe API::Users do
it 'returns an array of active impersonation tokens if state active' do
get api("/users/#{user.id}/impersonation_tokens?state=active", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -2422,7 +2496,7 @@ describe API::Users do
it 'returns an array of inactive personal access tokens if active is set to false' do
get api("/users/#{user.id}/impersonation_tokens?state=inactive", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
expect(json_response).to all(include('active' => false))
@@ -2438,7 +2512,7 @@ describe API::Users do
it 'returns validation error if impersonation token misses some attributes' do
post api("/users/#{user.id}/impersonation_tokens", admin)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('name is missing')
end
@@ -2449,7 +2523,7 @@ describe API::Users do
expires_at: expires_at
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
@@ -2460,7 +2534,7 @@ describe API::Users do
expires_at: expires_at
}
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
@@ -2473,7 +2547,7 @@ describe API::Users do
impersonation: impersonation
}
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(name)
expect(json_response['scopes']).to eq(scopes)
expect(json_response['expires_at']).to eq(expires_at)
@@ -2493,35 +2567,35 @@ describe API::Users do
it 'returns 404 error if user not found' do
get api("/users/#{not_existing_user_id}/impersonation_tokens/1", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns a 404 error if impersonation token not found' do
get api("/users/#{user.id}/impersonation_tokens/#{not_existing_pat_id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Impersonation Token Not Found')
end
it 'returns a 404 error if token is not impersonation token' do
get api("/users/#{user.id}/impersonation_tokens/#{personal_access_token.id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Impersonation Token Not Found')
end
it 'returns a 403 error when authenticated as normal user' do
get api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
it 'returns an impersonation token' do
get api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['token']).not_to be_present
expect(json_response['impersonation']).to be_truthy
end
@@ -2534,28 +2608,28 @@ describe API::Users do
it 'returns a 404 error if user not found' do
delete api("/users/#{not_existing_user_id}/impersonation_tokens/1", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns a 404 error if impersonation token not found' do
delete api("/users/#{user.id}/impersonation_tokens/#{not_existing_pat_id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Impersonation Token Not Found')
end
it 'returns a 404 error if token is not impersonation token' do
delete api("/users/#{user.id}/impersonation_tokens/#{personal_access_token.id}", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Impersonation Token Not Found')
end
it 'returns a 403 error when authenticated as normal user' do
delete api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
@@ -2566,7 +2640,7 @@ describe API::Users do
it 'revokes a impersonation token' do
delete api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(impersonation_token.revoked).to be_falsey
expect(impersonation_token.reload.revoked).to be_truthy
end
diff --git a/spec/requests/api/variables_spec.rb b/spec/requests/api/variables_spec.rb
index dfecd43cbfa..f209a1d2e6e 100644
--- a/spec/requests/api/variables_spec.rb
+++ b/spec/requests/api/variables_spec.rb
@@ -15,7 +15,7 @@ describe API::Variables do
it 'returns project variables' do
get api("/projects/#{project.id}/variables", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a(Array)
end
end
@@ -24,7 +24,7 @@ describe API::Variables do
it 'does not return project variables' do
get api("/projects/#{project.id}/variables", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -32,7 +32,7 @@ describe API::Variables do
it 'does not return project variables' do
get api("/projects/#{project.id}/variables")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -42,7 +42,7 @@ describe API::Variables do
it 'returns project variable details' do
get api("/projects/#{project.id}/variables/#{variable.key}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['value']).to eq(variable.value)
expect(json_response['protected']).to eq(variable.protected?)
expect(json_response['masked']).to eq(variable.masked?)
@@ -52,7 +52,7 @@ describe API::Variables do
it 'responds with 404 Not Found if requesting non-existing variable' do
get api("/projects/#{project.id}/variables/non_existing_variable", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -60,7 +60,7 @@ describe API::Variables do
it 'does not return project variable details' do
get api("/projects/#{project.id}/variables/#{variable.key}", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -68,7 +68,7 @@ describe API::Variables do
it 'does not return project variable details' do
get api("/projects/#{project.id}/variables/#{variable.key}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -80,7 +80,7 @@ describe API::Variables do
post api("/projects/#{project.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'PROTECTED_VALUE_2', protected: true, masked: true }
end.to change {project.variables.count}.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
expect(json_response['value']).to eq('PROTECTED_VALUE_2')
expect(json_response['protected']).to be_truthy
@@ -93,7 +93,7 @@ describe API::Variables do
post api("/projects/#{project.id}/variables", user), params: { variable_type: 'file', key: 'TEST_VARIABLE_2', value: 'VALUE_2' }
end.to change {project.variables.count}.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
expect(json_response['value']).to eq('VALUE_2')
expect(json_response['protected']).to be_falsey
@@ -106,7 +106,7 @@ describe API::Variables do
post api("/projects/#{project.id}/variables", user), params: { key: variable.key, value: 'VALUE_2' }
end.to change {project.variables.count}.by(0)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'creates variable with a specific environment scope' do
@@ -114,7 +114,7 @@ describe API::Variables do
post api("/projects/#{project.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'VALUE_2', environment_scope: 'review/*' }
end.to change { project.variables.reload.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
expect(json_response['value']).to eq('VALUE_2')
expect(json_response['environment_scope']).to eq('review/*')
@@ -127,7 +127,7 @@ describe API::Variables do
post api("/projects/#{project.id}/variables", user), params: { key: variable.key, value: 'VALUE_2', environment_scope: 'review/*' }
end.to change { project.variables.reload.count }.by(1)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['key']).to eq(variable.key)
expect(json_response['value']).to eq('VALUE_2')
expect(json_response['environment_scope']).to eq('review/*')
@@ -138,7 +138,7 @@ describe API::Variables do
it 'does not create variable' do
post api("/projects/#{project.id}/variables", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -146,7 +146,7 @@ describe API::Variables do
it 'does not create variable' do
post api("/projects/#{project.id}/variables")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -161,7 +161,7 @@ describe API::Variables do
updated_variable = project.variables.reload.first
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(value_before).to eq(variable.value)
expect(updated_variable.value).to eq('VALUE_1_UP')
expect(updated_variable).to be_protected
@@ -171,7 +171,7 @@ describe API::Variables do
it 'responds with 404 Not Found if requesting non-existing variable' do
put api("/projects/#{project.id}/variables/non_existing_variable", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -179,7 +179,7 @@ describe API::Variables do
it 'does not update variable' do
put api("/projects/#{project.id}/variables/#{variable.key}", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -187,7 +187,7 @@ describe API::Variables do
it 'does not update variable' do
put api("/projects/#{project.id}/variables/#{variable.key}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -198,14 +198,14 @@ describe API::Variables do
expect do
delete api("/projects/#{project.id}/variables/#{variable.key}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end.to change {project.variables.count}.by(-1)
end
it 'responds with 404 Not Found if requesting non-existing variable' do
delete api("/projects/#{project.id}/variables/non_existing_variable", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -213,7 +213,7 @@ describe API::Variables do
it 'does not delete variable' do
delete api("/projects/#{project.id}/variables/#{variable.key}", user2)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -221,7 +221,7 @@ describe API::Variables do
it 'does not delete variable' do
delete api("/projects/#{project.id}/variables/#{variable.key}")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
diff --git a/spec/requests/api/version_spec.rb b/spec/requests/api/version_spec.rb
index e2117ca45ee..9eb8c03e273 100644
--- a/spec/requests/api/version_spec.rb
+++ b/spec/requests/api/version_spec.rb
@@ -8,21 +8,59 @@ describe API::Version do
it 'returns authentication error' do
get api('/version')
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
- context 'when authenticated' do
+ context 'when authenticated as user' do
let(:user) { create(:user) }
it 'returns the version information' do
get api('/version', user)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response['version']).to eq(Gitlab::VERSION)
- expect(json_response['revision']).to eq(Gitlab.revision)
+ expect_version
end
end
+
+ context 'when authenticated with token' do
+ let(:personal_access_token) { create(:personal_access_token, scopes: scopes) }
+
+ context 'with api scope' do
+ let(:scopes) { %i(api) }
+
+ it 'returns the version information' do
+ get api('/version', personal_access_token: personal_access_token)
+
+ expect_version
+ end
+ end
+
+ context 'with read_user scope' do
+ let(:scopes) { %i(read_user) }
+
+ it 'returns the version information' do
+ get api('/version', personal_access_token: personal_access_token)
+
+ expect_version
+ end
+ end
+
+ context 'with neither api nor read_user scope' do
+ let(:scopes) { %i(read_repository) }
+
+ it 'returns authorization error' do
+ get api('/version', personal_access_token: personal_access_token)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ def expect_version
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['version']).to eq(Gitlab::VERSION)
+ expect(json_response['revision']).to eq(Gitlab.revision)
+ end
end
context 'with graphql enabled' do
diff --git a/spec/requests/api/wikis_spec.rb b/spec/requests/api/wikis_spec.rb
index 2e0b7a30480..7bd9a178a8d 100644
--- a/spec/requests/api/wikis_spec.rb
+++ b/spec/requests/api/wikis_spec.rb
@@ -32,7 +32,7 @@ describe API::Wikis do
it 'returns the list of wiki pages without content' do
get api(url, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(2)
json_response.each_with_index do |page, index|
@@ -45,7 +45,7 @@ describe API::Wikis do
it 'returns the list of wiki pages with content' do
get api(url, user), params: { with_content: 1 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(2)
json_response.each_with_index do |page, index|
@@ -60,14 +60,14 @@ describe API::Wikis do
it 'return the empty list of wiki pages' do
get api(url, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(0)
end
end
shared_examples_for 'returns wiki page' do
it 'returns the wiki page' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(4)
expect(json_response.keys).to match_array(expected_keys_with_content)
expect(json_response['content']).to eq(page.content)
@@ -80,7 +80,7 @@ describe API::Wikis do
it 'creates the wiki page' do
post(api(url, user), params: payload)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response.size).to eq(4)
expect(json_response.keys).to match_array(expected_keys_with_content)
expect(json_response['content']).to eq(payload[:content])
@@ -95,7 +95,7 @@ describe API::Wikis do
post(api(url, user), params: payload)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response.size).to eq(1)
expect(json_response['error']).to eq("#{part} is missing")
end
@@ -106,7 +106,7 @@ describe API::Wikis do
it 'updates the wiki page' do
put(api(url, user), params: payload)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(4)
expect(json_response.keys).to match_array(expected_keys_with_content)
expect(json_response['content']).to eq(payload[:content])
@@ -120,14 +120,14 @@ describe API::Wikis do
put(api(url, user), params: payload)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
shared_examples_for '403 Forbidden' do
it 'returns 403 Forbidden' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response.size).to eq(1)
expect(json_response['message']).to eq('403 Forbidden')
end
@@ -135,7 +135,7 @@ describe API::Wikis do
shared_examples_for '404 Wiki Page Not Found' do
it 'returns 404 Wiki Page Not Found' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response.size).to eq(1)
expect(json_response['message']).to eq('404 Wiki Page Not Found')
end
@@ -143,7 +143,7 @@ describe API::Wikis do
shared_examples_for '404 Project Not Found' do
it 'returns 404 Project Not Found' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response.size).to eq(1)
expect(json_response['message']).to eq('404 Project Not Found')
end
@@ -151,7 +151,7 @@ describe API::Wikis do
shared_examples_for '204 No Content' do
it 'returns 204 No Content' do
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
@@ -161,7 +161,7 @@ describe API::Wikis do
workhorse_post_with_file(api(url, user), file_key: :file, params: payload)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq result_hash.deep_stringify_keys
end
@@ -170,7 +170,7 @@ describe API::Wikis do
post(api(url, user), params: payload)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response.size).to eq(1)
expect(json_response['error']).to eq('file is missing')
end
@@ -180,7 +180,7 @@ describe API::Wikis do
post(api(url, user), params: payload)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response.size).to eq(1)
expect(json_response['error']).to eq('file is invalid')
end
@@ -190,7 +190,7 @@ describe API::Wikis do
post(api(url, user), params: payload)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response).to eq result_hash.deep_stringify_keys
end
end
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 381ad45d477..183a3545cf2 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -639,8 +639,8 @@ describe 'Git HTTP requests' do
context 'when LDAP is configured' do
before do
- allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
- allow_any_instance_of(Gitlab::Auth::LDAP::Authentication)
+ allow(Gitlab::Auth::Ldap::Config).to receive(:enabled?).and_return(true)
+ allow_any_instance_of(Gitlab::Auth::Ldap::Authentication)
.to receive(:login).and_return(nil)
end
@@ -862,8 +862,8 @@ describe 'Git HTTP requests' do
before do
allow(Gitlab::Auth::OAuth::Provider).to receive(:enabled?).and_return(true)
- allow_any_instance_of(Gitlab::Auth::LDAP::Authentication).to receive(:login).and_return(nil)
- allow_any_instance_of(Gitlab::Auth::LDAP::Authentication).to receive(:login).with(user.username, user.password).and_return(user)
+ allow_any_instance_of(Gitlab::Auth::Ldap::Authentication).to receive(:login).and_return(nil)
+ allow_any_instance_of(Gitlab::Auth::Ldap::Authentication).to receive(:login).with(user.username, user.password).and_return(user)
end
it_behaves_like 'pulls require Basic HTTP Authentication'
diff --git a/spec/requests/groups/milestones_controller_spec.rb b/spec/requests/groups/milestones_controller_spec.rb
index 977cccad29f..1c6743dc678 100644
--- a/spec/requests/groups/milestones_controller_spec.rb
+++ b/spec/requests/groups/milestones_controller_spec.rb
@@ -12,23 +12,45 @@ describe Groups::MilestonesController do
end
let!(:private_milestone) { create(:milestone, project: public_project_with_private_issues_and_mrs, title: 'project milestone') }
- it 'avoids N+1 database queries' do
- public_project = create(:project, :public, :merge_requests_enabled, :issues_enabled, group: public_group)
- create(:milestone, project: public_project)
+ describe 'GET #index' do
+ it 'avoids N+1 database queries' do
+ public_project = create(:project, :public, :merge_requests_enabled, :issues_enabled, group: public_group)
+ create(:milestone, project: public_project)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { get "/groups/#{public_group.to_param}/-/milestones.json" }.count
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { get group_milestones_path(public_group, format: :json) }.count
- projects = create_list(:project, 2, :public, :merge_requests_enabled, :issues_enabled, group: public_group)
- projects.each do |project|
- create(:milestone, project: project)
+ projects = create_list(:project, 2, :public, :merge_requests_enabled, :issues_enabled, group: public_group)
+ projects.each do |project|
+ create(:milestone, project: project)
+ end
+
+ expect { get group_milestones_path(public_group, format: :json) }.not_to exceed_all_query_limit(control_count)
+ expect(response).to have_gitlab_http_status(:ok)
+ milestones = json_response
+
+ expect(milestones.count).to eq(3)
+ expect(milestones.map {|x| x['title']}).not_to include(private_milestone.title)
end
+ end
- expect { get "/groups/#{public_group.to_param}/-/milestones.json" }.not_to exceed_all_query_limit(control_count)
- expect(response).to have_http_status(200)
- milestones = json_response
+ describe 'GET #show' do
+ let(:milestone) { create(:milestone, group: public_group) }
+ let(:show_path) { group_milestone_path(public_group, milestone) }
- expect(milestones.count).to eq(3)
- expect(milestones.map {|x| x['title']}).not_to include(private_milestone.title)
+ it 'avoids N+1 database queries' do
+ projects = create_list(:project, 3, :public, :merge_requests_enabled, :issues_enabled, group: public_group)
+ projects.each do |project|
+ create_list(:issue, 2, milestone: milestone, project: project)
+ end
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { get show_path }
+
+ projects = create_list(:project, 3, :public, :merge_requests_enabled, :issues_enabled, group: public_group)
+ projects.each do |project|
+ create_list(:issue, 2, milestone: milestone, project: project)
+ end
+
+ expect { get show_path }.not_to exceed_all_query_limit(control)
+ end
end
end
end
diff --git a/spec/requests/groups/registry/repositories_controller_spec.rb b/spec/requests/groups/registry/repositories_controller_spec.rb
index 35fdeaab604..25bd7aa862e 100644
--- a/spec/requests/groups/registry/repositories_controller_spec.rb
+++ b/spec/requests/groups/registry/repositories_controller_spec.rb
@@ -28,7 +28,7 @@ describe Groups::Registry::RepositoriesController do
expect { get(endpoint) }.not_to exceed_all_query_limit(control_count)
# sanity check that response is 200
- expect(response).to have_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
repositories = json_response
expect(repositories.count).to eq(5)
end
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index c6403a6ab75..c71b803a7ab 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -690,22 +690,34 @@ describe 'Git LFS API and storage' do
end
context 'when pushing an LFS object that already exists' do
+ shared_examples_for 'batch upload with existing LFS object' do
+ it_behaves_like 'LFS http 200 response'
+
+ it 'responds with links the object to the project' do
+ expect(json_response['objects']).to be_kind_of(Array)
+ expect(json_response['objects'].first).to include(sample_object)
+ expect(lfs_object.projects.pluck(:id)).not_to include(project.id)
+ expect(lfs_object.projects.pluck(:id)).to include(other_project.id)
+ expect(json_response['objects'].first['actions']['upload']['href']).to eq(objects_url(project, sample_oid, sample_size))
+ expect(json_response['objects'].first['actions']['upload']['header']).to include('Content-Type' => 'application/octet-stream')
+ end
+
+ it_behaves_like 'process authorization header', renew_authorization: true
+ end
+
let(:update_lfs_permissions) do
other_project.lfs_objects << lfs_object
end
- it_behaves_like 'LFS http 200 response'
-
- it 'responds with links the object to the project' do
- expect(json_response['objects']).to be_kind_of(Array)
- expect(json_response['objects'].first).to include(sample_object)
- expect(lfs_object.projects.pluck(:id)).not_to include(project.id)
- expect(lfs_object.projects.pluck(:id)).to include(other_project.id)
- expect(json_response['objects'].first['actions']['upload']['href']).to eq(objects_url(project, sample_oid, sample_size))
- expect(json_response['objects'].first['actions']['upload']['header']).to include('Content-Type' => 'application/octet-stream')
+ context 'in another project' do
+ it_behaves_like 'batch upload with existing LFS object'
end
- it_behaves_like 'process authorization header', renew_authorization: true
+ context 'in source of fork project' do
+ let(:project) { fork_project(other_project) }
+
+ it_behaves_like 'batch upload with existing LFS object'
+ end
end
context 'when pushing a LFS object that does not exist' do
diff --git a/spec/requests/sessions_spec.rb b/spec/requests/sessions_spec.rb
new file mode 100644
index 00000000000..6697700c37d
--- /dev/null
+++ b/spec/requests/sessions_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Sessions' do
+ context 'authentication', :allow_forgery_protection do
+ let(:user) { create(:user) }
+
+ it 'logout does not require a csrf token' do
+ login_as(user)
+
+ post(destroy_user_session_path, headers: { 'X-CSRF-Token' => 'invalid' })
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index e503f1a4231..1a43342d278 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -488,6 +488,7 @@ describe 'project routing' do
end
it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/project_members", "/gitlab/gitlabhq/-/project_members"
+ it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/-/settings/members", "/gitlab/gitlabhq/-/project_members"
end
# project_milestones GET /:project_id/milestones(.:format) milestones#index
@@ -798,6 +799,11 @@ describe 'project routing' do
end
it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/settings/repository", "/gitlab/gitlabhq/-/settings/repository"
+
+ # TODO: remove this test as part of https://gitlab.com/gitlab-org/gitlab/issues/207079 (12.9)
+ it 'to ci_cd#create_deploy_token' do
+ expect(post('gitlab/gitlabhq/-/settings/repository/deploy_token/create')).to route_to('projects/settings/ci_cd#create_deploy_token', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
end
describe Projects::TemplatesController, 'routing' do
diff --git a/spec/rubocop/cop/ban_catch_throw_spec.rb b/spec/rubocop/cop/ban_catch_throw_spec.rb
new file mode 100644
index 00000000000..b4c277fc429
--- /dev/null
+++ b/spec/rubocop/cop/ban_catch_throw_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../rubocop/cop/ban_catch_throw'
+
+describe RuboCop::Cop::BanCatchThrow do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ it 'registers an offense when `catch` or `throw` are used' do
+ inspect_source("catch(:foo) {\n throw(:foo)\n}")
+
+ aggregate_failures do
+ expect(cop.offenses.size).to eq(2)
+ expect(cop.offenses.map(&:line)).to eq([1, 2])
+ expect(cop.highlights).to eq(['catch(:foo)', 'throw(:foo)'])
+ end
+ end
+
+ it 'does not register an offense for a method called catch or throw' do
+ inspect_source("foo.catch(:foo) {\n foo.throw(:foo)\n}")
+
+ expect(cop.offenses).to be_empty
+ end
+end
diff --git a/spec/rubocop/cop/migration/add_column_with_default_spec.rb b/spec/rubocop/cop/migration/add_column_with_default_spec.rb
index f3518f2f058..a8cf965a3ef 100644
--- a/spec/rubocop/cop/migration/add_column_with_default_spec.rb
+++ b/spec/rubocop/cop/migration/add_column_with_default_spec.rb
@@ -16,7 +16,7 @@ describe RuboCop::Cop::Migration::AddColumnWithDefault do
it 'does not register any offenses' do
expect_no_offenses(<<~RUBY)
def up
- add_column_with_default(:ci_build_needs, :artifacts, :boolean, default: true, allow_null: false)
+ add_column_with_default(:merge_request_diff_files, :artifacts, :boolean, default: true, allow_null: false)
end
RUBY
end
@@ -29,38 +29,42 @@ describe RuboCop::Cop::Migration::AddColumnWithDefault do
let(:offense) { '`add_column_with_default` without `allow_null: true` may cause prolonged lock situations and downtime, see https://gitlab.com/gitlab-org/gitlab/issues/38060' }
- it 'registers an offense when specifying allow_null: false' do
- expect_offense(<<~RUBY)
- def up
- add_column_with_default(:ci_build_needs, :artifacts, :boolean, default: true, allow_null: false)
- ^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
- end
- RUBY
- end
+ context 'for blacklisted table' do
+ it 'registers an offense when specifying allow_null: false' do
+ expect_offense(<<~RUBY)
+ def up
+ add_column_with_default(:merge_request_diff_files, :artifacts, :boolean, default: true, allow_null: false)
+ ^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
+ end
+ RUBY
+ end
- it 'registers no offense when specifying allow_null: true' do
- expect_no_offenses(<<~RUBY)
- def up
- add_column_with_default(:ci_build_needs, :artifacts, :boolean, default: true, allow_null: true)
- end
- RUBY
- end
+ it 'registers no offense when specifying allow_null: true' do
+ expect_no_offenses(<<~RUBY)
+ def up
+ add_column_with_default(:merge_request_diff_files, :artifacts, :boolean, default: true, allow_null: true)
+ end
+ RUBY
+ end
- it 'registers an offense when allow_null is not specified' do
- expect_offense(<<~RUBY)
- def up
- add_column_with_default(:ci_build_needs, :artifacts, :boolean, default: true)
- ^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
- end
- RUBY
+ it 'registers an offense when allow_null is not specified' do
+ expect_offense(<<~RUBY)
+ def up
+ add_column_with_default(:merge_request_diff_files, :artifacts, :boolean, default: true)
+ ^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
+ end
+ RUBY
+ end
end
- it 'registers no offense for application_settings (whitelisted table)' do
- expect_no_offenses(<<~RUBY)
- def up
- add_column_with_default(:application_settings, :another_column, :boolean, default: true, allow_null: false)
- end
- RUBY
+ context 'for tables not on the blacklist' do
+ it 'registers no offense for application_settings (not on blacklist)' do
+ expect_no_offenses(<<~RUBY)
+ def up
+ add_column_with_default(:application_settings, :another_column, :boolean, default: true, allow_null: false)
+ end
+ RUBY
+ end
end
end
end
diff --git a/spec/rubocop/cop/migration/schedule_async_spec.rb b/spec/rubocop/cop/migration/schedule_async_spec.rb
new file mode 100644
index 00000000000..3453f1c51cc
--- /dev/null
+++ b/spec/rubocop/cop/migration/schedule_async_spec.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/migration/schedule_async'
+
+describe RuboCop::Cop::Migration::ScheduleAsync do
+ include CopHelper
+
+ let(:cop) { described_class.new }
+ let(:source) do
+ <<~SOURCE
+ def up
+ BackgroundMigrationWorker.perform_async(ClazzName, "Bar", "Baz")
+ end
+ SOURCE
+ end
+
+ shared_examples 'a disabled cop' do
+ it 'does not register any offenses' do
+ inspect_source(source)
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+
+ context 'outside of a migration' do
+ it_behaves_like 'a disabled cop'
+ end
+
+ context 'in a migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ end
+
+ context 'in an old migration' do
+ before do
+ allow(cop).to receive(:version).and_return(described_class::ENFORCED_SINCE - 5)
+ end
+
+ it_behaves_like 'a disabled cop'
+ end
+
+ context 'that is recent' do
+ before do
+ allow(cop).to receive(:version).and_return(described_class::ENFORCED_SINCE + 5)
+ end
+
+ context 'BackgroundMigrationWorker.perform_async' do
+ it 'adds an offence when calling `BackgroundMigrationWorker.peform_async`' do
+ inspect_source(source)
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'autocorrects to the right version' do
+ correct_source = <<~CORRECT
+ def up
+ migrate_async(ClazzName, "Bar", "Baz")
+ end
+ CORRECT
+
+ expect(autocorrect_source(source)).to eq(correct_source)
+ end
+ end
+
+ context 'BackgroundMigrationWorker.perform_in' do
+ let(:source) do
+ <<~SOURCE
+ def up
+ BackgroundMigrationWorker
+ .perform_in(delay, ClazzName, "Bar", "Baz")
+ end
+ SOURCE
+ end
+
+ it 'adds an offence' do
+ inspect_source(source)
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'autocorrects to the right version' do
+ correct_source = <<~CORRECT
+ def up
+ migrate_in(delay, ClazzName, "Bar", "Baz")
+ end
+ CORRECT
+
+ expect(autocorrect_source(source)).to eq(correct_source)
+ end
+ end
+
+ context 'BackgroundMigrationWorker.bulk_perform_async' do
+ let(:source) do
+ <<~SOURCE
+ def up
+ BackgroundMigrationWorker
+ .bulk_perform_async(jobs)
+ end
+ SOURCE
+ end
+
+ it 'adds an offence' do
+ inspect_source(source)
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'autocorrects to the right version' do
+ correct_source = <<~CORRECT
+ def up
+ bulk_migrate_async(jobs)
+ end
+ CORRECT
+
+ expect(autocorrect_source(source)).to eq(correct_source)
+ end
+ end
+
+ context 'BackgroundMigrationWorker.bulk_perform_in' do
+ let(:source) do
+ <<~SOURCE
+ def up
+ BackgroundMigrationWorker
+ .bulk_perform_in(5.minutes, jobs)
+ end
+ SOURCE
+ end
+
+ it 'adds an offence' do
+ inspect_source(source)
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'autocorrects to the right version' do
+ correct_source = <<~CORRECT
+ def up
+ bulk_migrate_in(5.minutes, jobs)
+ end
+ CORRECT
+
+ expect(autocorrect_source(source)).to eq(correct_source)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb b/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb
new file mode 100644
index 00000000000..75a1f939a9f
--- /dev/null
+++ b/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/migration/with_lock_retries_with_change'
+
+describe RuboCop::Cop::Migration::WithLockRetriesWithChange do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ context 'in migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ end
+
+ it 'registers an offense when `with_lock_retries` is used inside a `change` method' do
+ inspect_source('def change; with_lock_retries {}; end')
+
+ aggregate_failures do
+ expect(cop.offenses.size).to eq(1)
+ expect(cop.offenses.map(&:line)).to eq([1])
+ end
+ end
+
+ it 'registers no offense when `with_lock_retries` is used inside an `up` method' do
+ inspect_source('def up; with_lock_retries {}; end')
+
+ expect(cop.offenses.size).to eq(0)
+ end
+ end
+
+ context 'outside of migration' do
+ it 'registers no offense' do
+ inspect_source('def change; with_lock_retries {}; end')
+
+ expect(cop.offenses.size).to eq(0)
+ end
+ end
+end
diff --git a/spec/rubocop/cop/scalability/idempotent_worker_spec.rb b/spec/rubocop/cop/scalability/idempotent_worker_spec.rb
new file mode 100644
index 00000000000..7abd602f8bc
--- /dev/null
+++ b/spec/rubocop/cop/scalability/idempotent_worker_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require_relative '../../../support/helpers/expect_offense'
+require_relative '../../../../rubocop/cop/scalability/idempotent_worker'
+
+describe RuboCop::Cop::Scalability::IdempotentWorker do
+ include CopHelper
+ include ExpectOffense
+
+ subject(:cop) { described_class.new }
+
+ before do
+ allow(cop)
+ .to receive(:in_worker?)
+ .and_return(true)
+ end
+
+ it 'adds an offense when not defining idempotent method' do
+ inspect_source(<<~CODE.strip_indent)
+ class SomeWorker
+ end
+ CODE
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'adds an offense when not defining idempotent method' do
+ inspect_source(<<~CODE.strip_indent)
+ class SomeWorker
+ idempotent!
+ end
+ CODE
+
+ expect(cop.offenses.size).to be_zero
+ end
+end
diff --git a/spec/rubocop/migration_helpers_spec.rb b/spec/rubocop/migration_helpers_spec.rb
new file mode 100644
index 00000000000..73ced8c58da
--- /dev/null
+++ b/spec/rubocop/migration_helpers_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require 'rspec-parameterized'
+
+require_relative '../../rubocop/migration_helpers'
+
+describe RuboCop::MigrationHelpers do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:fake_cop) { Class.new { include RuboCop::MigrationHelpers }.new }
+
+ let(:node) { double(:node) }
+
+ before do
+ allow(node).to receive_message_chain('location.expression.source_buffer.name')
+ .and_return(name)
+ end
+
+ describe '#in_migration?' do
+ where(:name, :expected) do
+ '/gitlab/db/migrate/20200210184420_create_operations_scopes_table.rb' | true
+ '/gitlab/db/post_migrate/20200210184420_create_operations_scopes_table.rb' | true
+ '/gitlab/db/geo/migrate/20200210184420_create_operations_scopes_table.rb' | true
+ '/gitlab/db/geo/post_migrate/20200210184420_create_operations_scopes_table.rb' | true
+ '/gitlab/db/elsewhere/20200210184420_create_operations_scopes_table.rb' | false
+ end
+
+ with_them do
+ it { expect(fake_cop.in_migration?(node)).to eq(expected) }
+ end
+ end
+
+ describe '#in_post_deployment_migration?' do
+ where(:name, :expected) do
+ '/gitlab/db/migrate/20200210184420_create_operations_scopes_table.rb' | false
+ '/gitlab/db/post_migrate/20200210184420_create_operations_scopes_table.rb' | true
+ '/gitlab/db/geo/migrate/20200210184420_create_operations_scopes_table.rb' | false
+ '/gitlab/db/geo/post_migrate/20200210184420_create_operations_scopes_table.rb' | true
+ '/gitlab/db/elsewhere/20200210184420_create_operations_scopes_table.rb' | false
+ end
+
+ with_them do
+ it { expect(fake_cop.in_post_deployment_migration?(node)).to eq(expected) }
+ end
+ end
+
+ describe "#version" do
+ let(:name) do
+ '/path/to/gitlab/db/migrate/20200210184420_create_operations_scopes_table.rb'
+ end
+
+ it { expect(fake_cop.version(node)).to eq(20200210184420) }
+ end
+end
diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb
index fc05989df16..15f605b183d 100644
--- a/spec/serializers/build_details_entity_spec.rb
+++ b/spec/serializers/build_details_entity_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe BuildDetailsEntity do
include ProjectForksHelper
- set(:user) { create(:admin) }
+ let_it_be(:user) { create(:admin) }
it 'inherits from JobEntity' do
expect(described_class).to be < JobEntity
diff --git a/spec/serializers/cluster_application_entity_spec.rb b/spec/serializers/cluster_application_entity_spec.rb
index c700c150461..873fbf812cc 100644
--- a/spec/serializers/cluster_application_entity_spec.rb
+++ b/spec/serializers/cluster_application_entity_spec.rb
@@ -59,5 +59,23 @@ describe ClusterApplicationEntity do
expect(subject[:external_ip]).to eq('111.222.111.222')
end
end
+
+ context 'for knative application' do
+ let(:pages_domain) { create(:pages_domain, :instance_serverless) }
+ let(:application) { build(:clusters_applications_knative, :installed) }
+
+ before do
+ create(:serverless_domain_cluster, knative: application, pages_domain: pages_domain)
+ end
+
+ it 'includes available domains' do
+ expect(subject[:available_domains].length).to eq(1)
+ expect(subject[:available_domains].first).to eq(id: pages_domain.id, domain: pages_domain.domain)
+ end
+
+ it 'includes pages_domain' do
+ expect(subject[:pages_domain]).to eq(id: pages_domain.id, domain: pages_domain.domain)
+ end
+ end
end
end
diff --git a/spec/serializers/container_repository_entity_spec.rb b/spec/serializers/container_repository_entity_spec.rb
index 799a8d5c122..96c80331f41 100644
--- a/spec/serializers/container_repository_entity_spec.rb
+++ b/spec/serializers/container_repository_entity_spec.rb
@@ -3,14 +3,10 @@
require 'spec_helper'
describe ContainerRepositoryEntity do
- let(:entity) do
- described_class.new(repository, request: request)
- end
-
- set(:project) { create(:project) }
- set(:user) { create(:user) }
- set(:repository) { create(:container_repository, project: project) }
-
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:repository) { create(:container_repository, project: project) }
+ let(:entity) { described_class.new(repository, request: request) }
let(:request) { double('request') }
subject { entity.as_json }
diff --git a/spec/serializers/container_tag_entity_spec.rb b/spec/serializers/container_tag_entity_spec.rb
index 8a67a189761..8440e56f08f 100644
--- a/spec/serializers/container_tag_entity_spec.rb
+++ b/spec/serializers/container_tag_entity_spec.rb
@@ -3,14 +3,10 @@
require 'spec_helper'
describe ContainerTagEntity do
- let(:entity) do
- described_class.new(tag, request: request)
- end
-
- set(:project) { create(:project) }
- set(:user) { create(:user) }
- set(:repository) { create(:container_repository, name: 'image', project: project) }
-
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:repository) { create(:container_repository, name: 'image', project: project) }
+ let(:entity) { described_class.new(tag, request: request) }
let(:request) { double('request') }
let(:tag) { repository.tag('test') }
diff --git a/spec/serializers/deployment_serializer_spec.rb b/spec/serializers/deployment_serializer_spec.rb
index 4834f5ede3c..67fccaa3f25 100644
--- a/spec/serializers/deployment_serializer_spec.rb
+++ b/spec/serializers/deployment_serializer_spec.rb
@@ -3,9 +3,8 @@
require 'spec_helper'
describe DeploymentSerializer do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user, email: project.commit.author_email) }
-
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user, email: project.commit.author_email) }
let(:resource) { create(:deployment, project: project, sha: project.commit.id) }
let(:serializer) { described_class.new(request) }
diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb
index f392ecea959..b4ea90d2141 100644
--- a/spec/serializers/environment_entity_spec.rb
+++ b/spec/serializers/environment_entity_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe EnvironmentEntity do
+ include Gitlab::Routing.url_helpers
+
let(:request) { double('request') }
let(:entity) do
described_class.new(environment, request: spy('request'))
@@ -71,4 +73,22 @@ describe EnvironmentEntity do
expect(subject).to include(:cancel_auto_stop_path, :auto_stop_at)
end
end
+
+ context 'pod_logs' do
+ it 'exposes logs keys' do
+ expect(subject).to include(:logs_path)
+ expect(subject).to include(:logs_api_path)
+ expect(subject).to include(:enable_advanced_logs_querying)
+ end
+
+ it 'uses k8s api when ES is not available' do
+ expect(subject[:logs_api_path]).to eq(k8s_project_logs_path(environment.project, environment_name: environment.name, format: :json))
+ end
+
+ it 'uses ES api when ES is available' do
+ allow(environment).to receive(:elastic_stack_available?).and_return(true)
+
+ expect(subject[:logs_api_path]).to eq(elasticsearch_project_logs_path(environment.project, environment_name: environment.name, format: :json))
+ end
+ end
end
diff --git a/spec/serializers/group_variable_entity_spec.rb b/spec/serializers/group_variable_entity_spec.rb
index 22c98b418dd..e6b51e0d626 100644
--- a/spec/serializers/group_variable_entity_spec.rb
+++ b/spec/serializers/group_variable_entity_spec.rb
@@ -10,7 +10,7 @@ describe GroupVariableEntity do
subject { entity.as_json }
it 'contains required fields' do
- expect(subject).to include(:id, :key, :value, :protected)
+ expect(subject).to include(:id, :key, :value, :protected, :variable_type)
end
end
end
diff --git a/spec/serializers/merge_request_serializer_spec.rb b/spec/serializers/merge_request_serializer_spec.rb
index 9297df31842..871a47b0a02 100644
--- a/spec/serializers/merge_request_serializer_spec.rb
+++ b/spec/serializers/merge_request_serializer_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe MergeRequestSerializer do
- set(:user) { create(:user) }
- set(:resource) { create(:merge_request, description: "Description") }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:resource) { create(:merge_request, description: "Description") }
let(:json_entity) do
described_class.new(current_user: user)
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index f621cb650f9..31f8bcbfef0 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -75,8 +75,9 @@ describe MergeRequestWidgetEntity do
let(:role) { :developer }
it 'has add ci config path' do
- expect(subject[:merge_request_add_ci_config_path])
- .to eq("/#{resource.project.full_path}/-/new/#{resource.source_branch}?commit_message=Add+.gitlab-ci.yml&file_name=.gitlab-ci.yml")
+ expected_path = "/#{resource.project.full_path}/-/new/#{resource.source_branch}?commit_message=Add+.gitlab-ci.yml&file_name=.gitlab-ci.yml&suggest_gitlab_ci_yml=true"
+
+ expect(subject[:merge_request_add_ci_config_path]).to eq(expected_path)
end
context 'when source project is missing' do
@@ -122,6 +123,26 @@ describe MergeRequestWidgetEntity do
expect(subject[:merge_request_add_ci_config_path]).not_to be_nil
end
end
+
+ context 'when build feature is disabled' do
+ before do
+ project.project_feature.update(builds_access_level: ProjectFeature::DISABLED)
+ end
+
+ it 'has no path' do
+ expect(subject[:merge_request_add_ci_config_path]).to be_nil
+ end
+ end
+
+ context 'when creating the pipeline is not allowed' do
+ before do
+ user.state = 'blocked'
+ end
+
+ it 'has no path' do
+ expect(subject[:merge_request_add_ci_config_path]).to be_nil
+ end
+ end
end
context 'when user does not have permissions' do
@@ -141,6 +162,13 @@ describe MergeRequestWidgetEntity do
.to eq('Maintainer')
end
+ it 'has new pipeline path for project' do
+ project.add_maintainer(user)
+
+ expect(subject[:new_project_pipeline_path])
+ .to eq("/#{resource.project.full_path}/pipelines/new")
+ end
+
describe 'when source project is deleted' do
let(:project) { create(:project, :repository) }
let(:forked_project) { fork_project(project) }
diff --git a/spec/serializers/namespace_basic_entity_spec.rb b/spec/serializers/namespace_basic_entity_spec.rb
index f8b71ceb9f3..d3d666d57aa 100644
--- a/spec/serializers/namespace_basic_entity_spec.rb
+++ b/spec/serializers/namespace_basic_entity_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe NamespaceBasicEntity do
- set(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
let(:entity) do
described_class.represent(group)
end
diff --git a/spec/serializers/pipeline_details_entity_spec.rb b/spec/serializers/pipeline_details_entity_spec.rb
index f270f9fd4cb..a154bcac635 100644
--- a/spec/serializers/pipeline_details_entity_spec.rb
+++ b/spec/serializers/pipeline_details_entity_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe PipelineDetailsEntity do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:request) { double('request') }
it 'inherrits from PipelineEntity' do
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index 75f3bdfcc9e..914f42054bd 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe PipelineEntity do
include Gitlab::Routing
- set(:project) { create(:project) }
- set(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
let(:request) { double('request') }
before do
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index 84b0e487ee7..4e4cc9c35e6 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe PipelineSerializer do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:serializer) do
described_class.new(current_user: user, project: project)
diff --git a/spec/serializers/project_import_entity_spec.rb b/spec/serializers/project_import_entity_spec.rb
index e476da82729..9ccae798091 100644
--- a/spec/serializers/project_import_entity_spec.rb
+++ b/spec/serializers/project_import_entity_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe ProjectImportEntity do
include ImportHelper
- set(:project) { create(:project, import_status: :started, import_source: 'namespace/project') }
+ let_it_be(:project) { create(:project, import_status: :started, import_source: 'namespace/project') }
let(:provider_url) { 'https://provider.com' }
let(:entity) { described_class.represent(project, provider_url: provider_url) }
diff --git a/spec/serializers/project_serializer_spec.rb b/spec/serializers/project_serializer_spec.rb
index 22f958fc17f..ef3dd1dc15e 100644
--- a/spec/serializers/project_serializer_spec.rb
+++ b/spec/serializers/project_serializer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe ProjectSerializer do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:provider_url) { 'http://provider.com' }
context 'when serializer option is :import' do
diff --git a/spec/serializers/serverless/domain_entity_spec.rb b/spec/serializers/serverless/domain_entity_spec.rb
new file mode 100644
index 00000000000..bdf0ccb176c
--- /dev/null
+++ b/spec/serializers/serverless/domain_entity_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Serverless::DomainEntity do
+ describe '#as_json' do
+ let(:domain) { create(:pages_domain, :instance_serverless) }
+
+ subject { described_class.new(domain).as_json }
+
+ it 'has an id' do
+ expect(subject[:id]).to eq(domain.id)
+ end
+
+ it 'has a domain' do
+ expect(subject[:domain]).to eq(domain.domain)
+ end
+ end
+end
diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb
index 931b67b2950..33538703e92 100644
--- a/spec/services/boards/issues/list_service_spec.rb
+++ b/spec/services/boards/issues/list_service_spec.rb
@@ -33,11 +33,11 @@ describe Boards::Issues::ListService do
let!(:list1_issue3) { create(:labeled_issue, project: project, milestone: m1, labels: [development, p1]) }
let!(:list2_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [testing]) }
- let!(:closed_issue1) { create(:labeled_issue, :closed, project: project, labels: [bug]) }
- let!(:closed_issue2) { create(:labeled_issue, :closed, project: project, labels: [p3]) }
- let!(:closed_issue3) { create(:issue, :closed, project: project) }
- let!(:closed_issue4) { create(:labeled_issue, :closed, project: project, labels: [p1]) }
- let!(:closed_issue5) { create(:labeled_issue, :closed, project: project, labels: [development]) }
+ let!(:closed_issue1) { create(:labeled_issue, :closed, project: project, labels: [bug], closed_at: 1.day.ago) }
+ let!(:closed_issue2) { create(:labeled_issue, :closed, project: project, labels: [p3], closed_at: 2.days.ago) }
+ let!(:closed_issue3) { create(:issue, :closed, project: project, closed_at: 1.week.ago) }
+ let!(:closed_issue4) { create(:labeled_issue, :closed, project: project, labels: [p1], closed_at: 1.year.ago) }
+ let!(:closed_issue5) { create(:labeled_issue, :closed, project: project, labels: [development], closed_at: 2.years.ago) }
let(:parent) { project }
@@ -94,11 +94,11 @@ describe Boards::Issues::ListService do
let!(:list1_issue3) { create(:labeled_issue, project: project1, milestone: m1, labels: [development, p1, p1_project1]) }
let!(:list2_issue1) { create(:labeled_issue, project: project1, milestone: m1, labels: [testing]) }
- let!(:closed_issue1) { create(:labeled_issue, :closed, project: project, labels: [bug]) }
- let!(:closed_issue2) { create(:labeled_issue, :closed, project: project, labels: [p3, p3_project]) }
- let!(:closed_issue3) { create(:issue, :closed, project: project1) }
- let!(:closed_issue4) { create(:labeled_issue, :closed, project: project1, labels: [p1, p1_project1]) }
- let!(:closed_issue5) { create(:labeled_issue, :closed, project: project1, labels: [development]) }
+ let!(:closed_issue1) { create(:labeled_issue, :closed, project: project, labels: [bug], closed_at: 1.day.ago) }
+ let!(:closed_issue2) { create(:labeled_issue, :closed, project: project, labels: [p3, p3_project], closed_at: 2.days.ago) }
+ let!(:closed_issue3) { create(:issue, :closed, project: project1, closed_at: 1.week.ago) }
+ let!(:closed_issue4) { create(:labeled_issue, :closed, project: project1, labels: [p1, p1_project1], closed_at: 1.year.ago) }
+ let!(:closed_issue5) { create(:labeled_issue, :closed, project: project1, labels: [development], closed_at: 2.years.ago) }
before do
group.add_developer(user)
diff --git a/spec/services/ci/create_cross_project_pipeline_service_spec.rb b/spec/services/ci/create_cross_project_pipeline_service_spec.rb
index 51cf18f8d87..99c44c3aa17 100644
--- a/spec/services/ci/create_cross_project_pipeline_service_spec.rb
+++ b/spec/services/ci/create_cross_project_pipeline_service_spec.rb
@@ -109,13 +109,35 @@ describe Ci::CreateCrossProjectPipelineService, '#execute' do
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
- it 'updates bridge status when downstream pipeline gets proceesed' do
+ it 'updates bridge status when downstream pipeline gets processed' do
pipeline = service.execute(bridge)
expect(pipeline.reload).to be_pending
expect(bridge.reload).to be_success
end
+ context 'when bridge job has already any downstream pipelines' do
+ before do
+ bridge.sourced_pipelines.create!(
+ source_pipeline: bridge.pipeline,
+ source_project: bridge.project,
+ project: bridge.project,
+ pipeline: create(:ci_pipeline, project: bridge.project)
+ )
+ end
+
+ it 'logs an error and exits' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ instance_of(Ci::CreateCrossProjectPipelineService::DuplicateDownstreamPipelineError),
+ bridge_id: bridge.id, project_id: bridge.project.id)
+ .and_call_original
+ expect(Ci::CreatePipelineService).not_to receive(:new)
+ expect(service.execute(bridge)).to be_nil
+ end
+ end
+
context 'when target ref is not specified' do
let(:trigger) do
{ trigger: { project: downstream_project.full_path } }
@@ -128,6 +150,35 @@ describe Ci::CreateCrossProjectPipelineService, '#execute' do
end
end
+ context 'when downstream pipeline has yaml configuration error' do
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(job: { invalid: 'yaml' }))
+ end
+
+ it 'creates only one new pipeline' do
+ expect { service.execute(bridge) }
+ .to change { Ci::Pipeline.count }.by(1)
+ end
+
+ it 'creates a new pipeline in a downstream project' do
+ pipeline = service.execute(bridge)
+
+ expect(pipeline.user).to eq bridge.user
+ expect(pipeline.project).to eq downstream_project
+ expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
+ expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
+ expect(pipeline.source_bridge).to eq bridge
+ expect(pipeline.source_bridge).to be_a ::Ci::Bridge
+ end
+
+ it 'updates the bridge status when downstream pipeline gets processed' do
+ pipeline = service.execute(bridge)
+
+ expect(pipeline.reload).to be_failed
+ expect(bridge.reload).to be_failed
+ end
+ end
+
context 'when downstream project is the same as the job project' do
let(:trigger) do
{ trigger: { project: upstream_project.full_path } }
@@ -173,7 +224,7 @@ describe Ci::CreateCrossProjectPipelineService, '#execute' do
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
- it 'updates bridge status when downstream pipeline gets proceesed' do
+ it 'updates bridge status when downstream pipeline gets processed' do
pipeline = service.execute(bridge)
expect(pipeline.reload).to be_pending
@@ -211,6 +262,22 @@ describe Ci::CreateCrossProjectPipelineService, '#execute' do
it_behaves_like 'creates a child pipeline'
+ it 'updates the bridge job to success' do
+ expect { service.execute(bridge) }.to change { bridge.status }.to 'success'
+ end
+
+ context 'when bridge uses "depend" strategy' do
+ let(:trigger) do
+ {
+ trigger: { include: 'child-pipeline.yml', strategy: 'depend' }
+ }
+ end
+
+ it 'does not update the bridge job status' do
+ expect { service.execute(bridge) }.not_to change { bridge.status }
+ end
+ end
+
context 'when latest sha for the ref changed in the meantime' do
before do
upstream_project.repository.create_file(
@@ -267,6 +334,54 @@ describe Ci::CreateCrossProjectPipelineService, '#execute' do
end
end
+ context 'when downstream pipeline creation errors out' do
+ let(:stub_config) { false }
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(invalid: { yaml: 'error' }))
+ end
+
+ it 'creates only one new pipeline' do
+ expect { service.execute(bridge) }
+ .to change { Ci::Pipeline.count }.by(1)
+ end
+
+ it 'creates a new pipeline in the downstream project' do
+ pipeline = service.execute(bridge)
+
+ expect(pipeline.user).to eq bridge.user
+ expect(pipeline.project).to eq downstream_project
+ end
+
+ it 'drops the bridge' do
+ pipeline = service.execute(bridge)
+
+ expect(pipeline.reload).to be_failed
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
+ end
+ end
+
+ context 'when bridge job status update raises state machine errors' do
+ let(:stub_config) { false }
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(invalid: { yaml: 'error' }))
+ bridge.drop!
+ end
+
+ it 'tracks the exception' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ instance_of(Ci::Bridge::InvalidTransitionError),
+ bridge_id: bridge.id,
+ downstream_pipeline_id: kind_of(Numeric))
+
+ service.execute(bridge)
+ end
+ end
+
context 'when bridge job has YAML variables defined' do
before do
bridge.yaml_variables = [{ key: 'BRIDGE', value: 'var', public: true }]
diff --git a/spec/services/ci/create_job_artifacts_service_spec.rb b/spec/services/ci/create_job_artifacts_service_spec.rb
index e1146fc3df6..fe64a66f322 100644
--- a/spec/services/ci/create_job_artifacts_service_spec.rb
+++ b/spec/services/ci/create_job_artifacts_service_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
describe Ci::CreateJobArtifactsService do
- let(:service) { described_class.new }
- let(:job) { create(:ci_build) }
+ let_it_be(:project) { create(:project) }
+ let(:service) { described_class.new(project) }
+ let(:job) { create(:ci_build, project: project) }
let(:artifacts_sha256) { '0' * 64 }
let(:metadata_file) { nil }
@@ -64,7 +65,7 @@ describe Ci::CreateJobArtifactsService do
it 'sets expiration date according to application settings' do
expected_expire_at = 1.day.from_now
- expect(subject).to be_truthy
+ expect(subject).to match(a_hash_including(status: :success))
archive_artifact, metadata_artifact = job.job_artifacts.last(2)
expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at)
@@ -80,7 +81,7 @@ describe Ci::CreateJobArtifactsService do
it 'sets expiration date according to the parameter' do
expected_expire_at = 2.hours.from_now
- expect(subject).to be_truthy
+ expect(subject).to match(a_hash_including(status: :success))
archive_artifact, metadata_artifact = job.job_artifacts.last(2)
expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at)
@@ -101,21 +102,86 @@ describe Ci::CreateJobArtifactsService do
it 'ignores the changes' do
expect { subject }.not_to change { Ci::JobArtifact.count }
- expect(subject).to be_truthy
+ expect(subject).to match(a_hash_including(status: :success))
end
end
context 'when sha256 of uploading artifact is different than the existing one' do
let(:existing_sha256) { '1' * 64 }
- it 'returns false and logs the error' do
+ it 'returns error status' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
expect { subject }.not_to change { Ci::JobArtifact.count }
- expect(subject).to be_falsey
- expect(job.errors[:base]).to contain_exactly('another artifact of the same type already exists')
+ expect(subject).to match(
+ a_hash_including(http_status: :bad_request,
+ message: 'another artifact of the same type already exists',
+ status: :error))
end
end
end
+
+ context 'when artifact type is dotenv' do
+ let(:artifacts_file) do
+ file_to_upload('spec/fixtures/build.env.gz', sha256: artifacts_sha256)
+ end
+
+ let(:params) do
+ {
+ 'artifact_type' => 'dotenv',
+ 'artifact_format' => 'gzip'
+ }
+ end
+
+ it 'calls parse service' do
+ expect_any_instance_of(Ci::ParseDotenvArtifactService) do |service|
+ expect(service).to receive(:execute).once.and_call_original
+ end
+
+ expect(subject[:status]).to eq(:success)
+ expect(job.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => 'VAR1', 'source' => 'dotenv'),
+ hash_including('key' => 'KEY2', 'value' => 'VAR2', 'source' => 'dotenv'))
+ end
+
+ context 'when ci_synchronous_artifact_parsing feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_synchronous_artifact_parsing: false)
+ end
+
+ it 'does not call parse service' do
+ expect(Ci::ParseDotenvArtifactService).not_to receive(:new)
+
+ expect(subject[:status]).to eq(:success)
+ end
+ end
+ end
+
+ shared_examples 'rescues object storage error' do |klass, message, expected_message|
+ it "handles #{klass}" do
+ allow_next_instance_of(JobArtifactUploader) do |uploader|
+ allow(uploader).to receive(:store!).and_raise(klass, message)
+ end
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .and_call_original
+
+ expect(subject).to match(
+ a_hash_including(
+ http_status: :service_unavailable,
+ message: expected_message || message,
+ status: :error))
+ end
+ end
+
+ it_behaves_like 'rescues object storage error',
+ Errno::EIO, 'some/path', 'Input/output error - some/path'
+
+ it_behaves_like 'rescues object storage error',
+ Google::Apis::ServerError, 'Server error'
+
+ it_behaves_like 'rescues object storage error',
+ Signet::RemoteServerError, 'The service is currently unavailable'
end
end
diff --git a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
index 33cd6e164b0..112b19fcbc5 100644
--- a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
@@ -7,23 +7,74 @@ describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:service) { described_class.new(project, user, { ref: ref }) }
+ let(:upstream_pipeline) { create(:ci_pipeline, project: project) }
+ let(:bridge) { create(:ci_bridge, pipeline: upstream_pipeline) }
+
+ subject { service.execute(:push, bridge: bridge) }
+
context 'custom config content' do
let(:bridge) do
- double(:bridge, yaml_for_downstream: <<~YML
+ create(:ci_bridge, status: 'running', pipeline: upstream_pipeline, project: upstream_pipeline.project).tap do |bridge|
+ allow(bridge).to receive(:yaml_for_downstream).and_return(config_from_bridge)
+ end
+ end
+
+ let(:config_from_bridge) do
+ <<~YML
rspec:
script: rspec
custom:
script: custom
YML
- )
end
- subject { service.execute(:push, bridge: bridge) }
+ before do
+ allow(bridge).to receive(:yaml_for_downstream).and_return config_from_bridge
+ end
it 'creates a pipeline using the content passed in as param' do
expect(subject).to be_persisted
expect(subject.builds.map(&:name)).to eq %w[rspec custom]
expect(subject.config_source).to eq 'bridge_source'
end
+
+ context 'when bridge includes yaml from artifact' do
+ # the generated.yml is available inside the ci_build_artifacts.zip associated
+ # to the generator_job
+ let(:config_from_bridge) do
+ <<~YML
+ include:
+ - artifact: generated.yml
+ job: generator
+ YML
+ end
+
+ context 'when referenced job exists' do
+ let!(:generator_job) do
+ create(:ci_build, :artifacts,
+ project: project,
+ pipeline: upstream_pipeline,
+ name: 'generator')
+ end
+
+ it 'created a pipeline using the content passed in as param and download the artifact' do
+ expect(subject).to be_persisted
+ expect(subject.builds.pluck(:name)).to eq %w[rspec time custom]
+ expect(subject.config_source).to eq 'bridge_source'
+ end
+ end
+
+ context 'when referenced job does not exist' do
+ it 'creates an empty pipeline' do
+ expect(subject).to be_persisted
+ expect(subject).to be_failed
+ expect(subject.errors.full_messages)
+ .to contain_exactly(
+ 'Job `generator` not found in parent pipeline or does not have artifacts!')
+ expect(subject.builds.pluck(:name)).to be_empty
+ expect(subject.config_source).to eq 'bridge_source'
+ end
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 7745a78a806..3b819c795b2 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -985,6 +985,7 @@ describe Ci::CreatePipelineService do
expect(pipeline).to be_persisted
expect(build).to be_kind_of(Ci::Build)
expect(build.options).to eq(config[:release].except(:stage, :only).with_indifferent_access)
+ expect(build).to be_persisted
end
end
diff --git a/spec/services/ci/find_exposed_artifacts_service_spec.rb b/spec/services/ci/find_exposed_artifacts_service_spec.rb
index b0f190b0e7a..16e23253c34 100644
--- a/spec/services/ci/find_exposed_artifacts_service_spec.rb
+++ b/spec/services/ci/find_exposed_artifacts_service_spec.rb
@@ -172,5 +172,47 @@ describe Ci::FindExposedArtifactsService do
])
end
end
+
+ context 'cross-project MR' do
+ let!(:foreign_project) { create(:project) }
+ let!(:pipeline) { create(:ci_pipeline, project: foreign_project) }
+
+ let!(:job_show) do
+ create_job_with_artifacts({
+ artifacts: {
+ expose_as: 'file artifact',
+ paths: ['ci_artifacts.txt']
+ }
+ })
+ end
+
+ let!(:job_browse) do
+ create_job_with_artifacts({
+ artifacts: {
+ expose_as: 'directory artifact',
+ paths: ['tests_encoding/']
+ }
+ })
+ end
+
+ subject { described_class.new(project, user).for_pipeline(pipeline, limit: 2) }
+
+ it 'returns the correct path for cross-project MRs' do
+ expect(subject).to eq([
+ {
+ text: 'file artifact',
+ url: file_project_job_artifacts_path(foreign_project, job_show, 'ci_artifacts.txt'),
+ job_name: job_show.name,
+ job_path: project_job_path(foreign_project, job_show)
+ },
+ {
+ text: 'directory artifact',
+ url: browse_project_job_artifacts_path(foreign_project, job_browse),
+ job_name: job_browse.name,
+ job_path: project_job_path(foreign_project, job_browse)
+ }
+ ])
+ end
+ end
end
end
diff --git a/spec/services/ci/parse_dotenv_artifact_service_spec.rb b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
new file mode 100644
index 00000000000..fc4131d262b
--- /dev/null
+++ b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
@@ -0,0 +1,260 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::ParseDotenvArtifactService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:build) { create(:ci_build, pipeline: pipeline, project: project) }
+ let(:service) { described_class.new(project, nil) }
+
+ describe '#execute' do
+ subject { service.execute(artifact) }
+
+ context 'when build has a dotenv artifact' do
+ let!(:artifact) { create(:ci_job_artifact, :dotenv, job: build) }
+
+ it 'parses the artifact' do
+ expect(subject[:status]).to eq(:success)
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => 'VAR1'),
+ hash_including('key' => 'KEY2', 'value' => 'VAR2'))
+ end
+
+ context 'when parse error happens' do
+ before do
+ allow(service).to receive(:scan_line!) { raise described_class::ParserError.new('Invalid Format') }
+ end
+
+ it 'returns error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(described_class::ParserError, job_id: build.id)
+
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Invalid Format')
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when artifact size is too big' do
+ before do
+ allow(artifact.file).to receive(:size) { 10.kilobytes }
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq("Dotenv Artifact Too Big. Maximum Allowable Size: #{described_class::MAX_ACCEPTABLE_DOTENV_SIZE}")
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when artifact has the specified blob' do
+ before do
+ allow(artifact).to receive(:each_blob).and_yield(blob)
+ end
+
+ context 'when a white space trails the key' do
+ let(:blob) { 'KEY1 =VAR1' }
+
+ it 'trims the trailing space' do
+ subject
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => 'VAR1'))
+ end
+ end
+
+ context 'when multiple key/value pairs exist in one line' do
+ let(:blob) { 'KEY1=VAR1KEY2=VAR1' }
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq("Validation failed: Key can contain only letters, digits and '_'.")
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when key contains UNICODE' do
+ let(:blob) { '🛹=skateboard' }
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq("Validation failed: Key can contain only letters, digits and '_'.")
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when value contains UNICODE' do
+ let(:blob) { 'skateboard=🛹' }
+
+ it 'parses the dotenv data' do
+ subject
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'skateboard', 'value' => '🛹'))
+ end
+ end
+
+ context 'when key contains a space' do
+ let(:blob) { 'K E Y 1=VAR1' }
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq("Validation failed: Key can contain only letters, digits and '_'.")
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when value contains a space' do
+ let(:blob) { 'KEY1=V A R 1' }
+
+ it 'parses the dotenv data' do
+ subject
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => 'V A R 1'))
+ end
+ end
+
+ context 'when value is double quoated' do
+ let(:blob) { 'KEY1="VAR1"' }
+
+ it 'parses the value as-is' do
+ subject
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => '"VAR1"'))
+ end
+ end
+
+ context 'when value is single quoated' do
+ let(:blob) { "KEY1='VAR1'" }
+
+ it 'parses the value as-is' do
+ subject
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => "'VAR1'"))
+ end
+ end
+
+ context 'when value has white spaces in double quote' do
+ let(:blob) { 'KEY1=" VAR1 "' }
+
+ it 'parses the value as-is' do
+ subject
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => '" VAR1 "'))
+ end
+ end
+
+ context 'when key is missing' do
+ let(:blob) { '=VAR1' }
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to match(/Key can't be blank/)
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when value is missing' do
+ let(:blob) { 'KEY1=' }
+
+ it 'parses the dotenv data' do
+ subject
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => ''))
+ end
+ end
+
+ context 'when it is not dotenv format' do
+ let(:blob) { "{ 'KEY1': 'VAR1' }" }
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Invalid Format')
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when more than limitated variables are specified in dotenv' do
+ let(:blob) do
+ StringIO.new.tap do |s|
+ (described_class::MAX_ACCEPTABLE_VARIABLES_COUNT + 1).times do |i|
+ s << "KEY#{i}=VAR#{i}\n"
+ end
+ end.string
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq("Dotenv files cannot have more than #{described_class::MAX_ACCEPTABLE_VARIABLES_COUNT} variables")
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when variables are cross-referenced in dotenv' do
+ let(:blob) do
+ <<~EOS
+ KEY1=VAR1
+ KEY2=${KEY1}_Test
+ EOS
+ end
+
+ it 'does not support variable expansion in dotenv parser' do
+ subject
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => 'VAR1'),
+ hash_including('key' => 'KEY2', 'value' => '${KEY1}_Test'))
+ end
+ end
+
+ context 'when there is an empty line' do
+ let(:blob) do
+ <<~EOS
+ KEY1=VAR1
+
+ KEY2=VAR2
+ EOS
+ end
+
+ it 'does not support empty line in dotenv parser' do
+ subject
+
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Invalid Format')
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when there is a comment' do
+ let(:blob) do
+ <<~EOS
+ KEY1=VAR1 # This is variable
+ EOS
+ end
+
+ it 'does not support comment in dotenv parser' do
+ subject
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => 'VAR1 # This is variable'))
+ end
+ end
+ end
+ end
+
+ context 'when build does not have a dotenv artifact' do
+ let!(:artifact) { }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_bridge_status_service_spec.rb b/spec/services/ci/pipeline_bridge_status_service_spec.rb
index 95f16af3af9..0b6ae976d97 100644
--- a/spec/services/ci/pipeline_bridge_status_service_spec.rb
+++ b/spec/services/ci/pipeline_bridge_status_service_spec.rb
@@ -22,6 +22,24 @@ describe Ci::PipelineBridgeStatusService do
subject
end
+
+ context 'when bridge job status raises state machine errors' do
+ before do
+ pipeline.drop!
+ bridge.drop!
+ end
+
+ it 'tracks the exception' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ instance_of(Ci::Bridge::InvalidTransitionError),
+ bridge_id: bridge.id,
+ downstream_pipeline_id: pipeline.id)
+
+ subject
+ end
+ end
end
end
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 0f2d994efd4..2da1350e2af 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -612,7 +612,8 @@ module Ci
allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).to receive(:observe)
.with({ shared_runner: expected_shared_runner,
- jobs_running_for_project: expected_jobs_running_for_project_first_job }, 1800)
+ jobs_running_for_project: expected_jobs_running_for_project_first_job,
+ shard: expected_shard }, 1800)
execute(runner)
end
@@ -625,7 +626,8 @@ module Ci
allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).to receive(:observe)
.with({ shared_runner: expected_shared_runner,
- jobs_running_for_project: expected_jobs_running_for_project_third_job }, 1800)
+ jobs_running_for_project: expected_jobs_running_for_project_third_job,
+ shard: expected_shard }, 1800)
execute(runner)
end
@@ -638,13 +640,28 @@ module Ci
end
context 'when shared runner is used' do
- let(:runner) { shared_runner }
+ let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 tag2)) }
let(:expected_shared_runner) { true }
+ let(:expected_shard) { Ci::RegisterJobService::DEFAULT_METRICS_SHARD }
let(:expected_jobs_running_for_project_first_job) { 0 }
let(:expected_jobs_running_for_project_third_job) { 2 }
it_behaves_like 'metrics collector'
+ context 'when metrics_shard tag is defined' do
+ let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 metrics_shard::shard_tag tag2)) }
+ let(:expected_shard) { 'shard_tag' }
+
+ it_behaves_like 'metrics collector'
+ end
+
+ context 'when multiple metrics_shard tag is defined' do
+ let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 metrics_shard::shard_tag metrics_shard::shard_tag_2 tag2)) }
+ let(:expected_shard) { 'shard_tag' }
+
+ it_behaves_like 'metrics collector'
+ end
+
context 'when pending job with queued_at=nil is used' do
before do
pending_job.update(queued_at: nil)
@@ -662,8 +679,9 @@ module Ci
end
context 'when specific runner is used' do
- let(:runner) { specific_runner }
+ let(:runner) { create(:ci_runner, :project, projects: [project], tag_list: %w(tag1 metrics_shard::shard_tag tag2)) }
let(:expected_shared_runner) { false }
+ let(:expected_shard) { Ci::RegisterJobService::DEFAULT_METRICS_SHARD }
let(:expected_jobs_running_for_project_first_job) { '+Inf' }
let(:expected_jobs_running_for_project_third_job) { '+Inf' }
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 8ca9ce86574..0ed4dcec93e 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -36,7 +36,8 @@ describe Ci::RetryBuildService do
job_artifacts_performance job_artifacts_lsif
job_artifacts_codequality job_artifacts_metrics scheduled_at
job_variables waiting_for_resource_at job_artifacts_metrics_referee
- job_artifacts_network_referee].freeze
+ job_artifacts_network_referee job_artifacts_dotenv
+ job_artifacts_cobertura needs].freeze
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
@@ -46,7 +47,8 @@ describe Ci::RetryBuildService do
sourced_pipelines artifacts_file_store artifacts_metadata_store
metadata runner_session trace_chunks upstream_pipeline_id
artifacts_file artifacts_metadata artifacts_size commands
- resource resource_group_id processed security_scans].freeze
+ resource resource_group_id processed security_scans author
+ pipeline_id].freeze
shared_examples 'build duplication' do
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
@@ -79,8 +81,15 @@ describe Ci::RetryBuildService do
end
describe 'clone accessors' do
+ let(:forbidden_associations) do
+ Ci::Build.reflect_on_all_associations.each_with_object(Set.new) do |assoc, memo|
+ memo << assoc.name unless assoc.macro == :belongs_to
+ end
+ end
+
CLONE_ACCESSORS.each do |attribute|
it "clones #{attribute} build attribute" do
+ expect(attribute).not_to be_in(forbidden_associations), "association #{attribute} must be `belongs_to`"
expect(build.send(attribute)).not_to be_nil
expect(new_build.send(attribute)).not_to be_nil
expect(new_build.send(attribute)).to eq build.send(attribute)
@@ -97,9 +106,17 @@ describe Ci::RetryBuildService do
expect(new_build.protected).to eq build.protected
end
end
+
+ it 'clones only the needs attributes' do
+ expect(new_build.needs.exists?).to be_truthy
+ expect(build.needs.exists?).to be_truthy
+
+ expect(new_build.needs_attributes).to match(build.needs_attributes)
+ expect(new_build.needs).not_to match(build.needs)
+ end
end
- describe 'reject acessors' do
+ describe 'reject accessors' do
REJECT_ACCESSORS.each do |attribute|
it "does not clone #{attribute} build attribute" do
expect(new_build.send(attribute)).not_to eq build.send(attribute)
@@ -117,8 +134,9 @@ describe Ci::RetryBuildService do
#
current_accessors =
Ci::Build.attribute_names.map(&:to_sym) +
+ Ci::Build.attribute_aliases.keys.map(&:to_sym) +
Ci::Build.reflect_on_all_associations.map(&:name) +
- [:tag_list]
+ [:tag_list, :needs_attributes]
current_accessors.uniq!
@@ -221,6 +239,10 @@ describe Ci::RetryBuildService do
it 'creates a new deployment' do
expect { new_build }.to change { Deployment.count }.by(1)
end
+
+ it 'persists expanded environment name' do
+ expect(new_build.metadata.expanded_environment_name).to eq('production')
+ end
end
context 'when scheduling_type of build is nil' do
diff --git a/spec/services/ci/update_ci_ref_status_service_spec.rb b/spec/services/ci/update_ci_ref_status_service_spec.rb
new file mode 100644
index 00000000000..8b60586318d
--- /dev/null
+++ b/spec/services/ci/update_ci_ref_status_service_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::UpdateCiRefStatusService do
+ describe '#call' do
+ subject { described_class.new(pipeline) }
+
+ shared_examples 'creates ci_ref' do
+ it 'creates a ci_ref with the pipeline attributes' do
+ expect do
+ expect(subject.call).to eq(true)
+ end.to change { Ci::Ref.count }.by(1)
+
+ created_ref = pipeline.reload.ref_status
+ %w[ref tag project status].each do |attr|
+ expect(created_ref[attr]).to eq(pipeline[attr])
+ end
+ end
+
+ it 'calls PipelineNotificationWorker pasing the ref_status' do
+ expect(PipelineNotificationWorker).to receive(:perform_async).with(pipeline.id, ref_status: pipeline.status)
+
+ subject.call
+ end
+ end
+
+ shared_examples 'updates ci_ref' do
+ where(:ref_status, :pipeline_status, :next_status) do
+ [
+ %w[failed success fixed],
+ %w[failed failed failed],
+ %w[success success success],
+ %w[success failed failed]
+ ]
+ end
+
+ with_them do
+ let(:ci_ref) { create(:ci_ref, status: ref_status) }
+ let(:pipeline) { create(:ci_pipeline, status: pipeline_status, project: ci_ref.project, ref: ci_ref.ref) }
+
+ it 'sets ci_ref.status to next_status' do
+ expect do
+ expect(subject.call).to eq(true)
+ expect(ci_ref.reload.status).to eq(next_status)
+ end.not_to change { Ci::Ref.count }
+ end
+
+ it 'calls PipelineNotificationWorker pasing the ref_status' do
+ expect(PipelineNotificationWorker).to receive(:perform_async).with(pipeline.id, ref_status: next_status)
+
+ subject.call
+ end
+ end
+ end
+
+ shared_examples 'does a noop' do
+ it "doesn't change ci_ref" do
+ expect do
+ expect do
+ expect(subject.call).to eq(false)
+ end.not_to change { ci_ref.reload.status }
+ end.not_to change { Ci::Ref.count }
+ end
+
+ it "doesn't call PipelineNotificationWorker" do
+ expect(PipelineNotificationWorker).not_to receive(:perform_async)
+
+ subject.call
+ end
+ end
+
+ context "ci_ref doesn't exists" do
+ let(:pipeline) { create(:ci_pipeline, :success, ref: 'new-ref') }
+
+ it_behaves_like 'creates ci_ref'
+
+ context 'when an ActiveRecord::RecordNotUnique validation is raised' do
+ let(:ci_ref) { create(:ci_ref, status: 'failed') }
+ let(:pipeline) { create(:ci_pipeline, status: :success, project: ci_ref.project, ref: ci_ref.ref) }
+
+ it 'reloads the ci_ref and retries once' do
+ subject.instance_variable_set("@ref", subject.send(:build_ref))
+
+ expect do
+ expect(subject.call).to eq(true)
+ end.not_to change { Ci::Ref.count }
+ expect(ci_ref.reload.status).to eq('fixed')
+ end
+
+ it 'raises error on multiple retries' do
+ allow_any_instance_of(Ci::Ref).to receive(:update)
+ .and_raise(ActiveRecord::RecordNotUnique)
+
+ expect { subject.call }.to raise_error(ActiveRecord::RecordNotUnique)
+ end
+ end
+ end
+
+ context 'ci_ref exists' do
+ let!(:ci_ref) { create(:ci_ref, status: 'failed') }
+ let(:pipeline) { ci_ref.pipelines.first }
+
+ it_behaves_like 'updates ci_ref'
+
+ context 'pipeline status is invalid' do
+ let!(:pipeline) { create(:ci_pipeline, :running, project: ci_ref.project, ref: ci_ref.ref, tag: ci_ref.tag) }
+
+ it_behaves_like 'does a noop'
+ end
+
+ context 'newer pipeline finished' do
+ let(:newer_pipeline) { create(:ci_pipeline, :success, project: ci_ref.project, ref: ci_ref.ref, tag: ci_ref.tag) }
+
+ before do
+ ci_ref.update!(last_updated_by_pipeline: newer_pipeline)
+ end
+
+ it_behaves_like 'does a noop'
+ end
+
+ context 'pipeline is retried' do
+ before do
+ ci_ref.update!(last_updated_by_pipeline: pipeline)
+ end
+
+ it_behaves_like 'updates ci_ref'
+ end
+
+ context 'ref is stale' do
+ let(:pipeline1) { create(:ci_pipeline, :success, project: ci_ref.project, ref: ci_ref.ref, tag: ci_ref.tag) }
+ let(:pipeline2) { create(:ci_pipeline, :success, project: ci_ref.project, ref: ci_ref.ref, tag: ci_ref.tag) }
+
+ it 'reloads the ref and retry' do
+ service1 = described_class.new(pipeline1)
+ service2 = described_class.new(pipeline2)
+
+ service2.send(:ref)
+ service1.call
+ expect(ci_ref.reload.status).to eq('fixed')
+ expect do
+ expect(service2.call).to eq(true)
+ # We expect 'success' in this case rather than 'fixed' because
+ # the ref is correctly reloaded on stale error.
+ expect(ci_ref.reload.status).to eq('success')
+ end.not_to change { Ci::Ref.count }
+ end
+
+ it 'aborts when a newer pipeline finished' do
+ service1 = described_class.new(pipeline1)
+ service2 = described_class.new(pipeline2)
+
+ service2.call
+ expect do
+ expect(service1.call).to eq(false)
+ expect(ci_ref.reload.status).to eq('fixed')
+ end.not_to change { Ci::Ref.count }
+ end
+ end
+
+ context 'ref exists as both tag/branch and tag' do
+ let(:pipeline) { create(:ci_pipeline, :failed, project: ci_ref.project, ref: ci_ref.ref, tag: true) }
+ let!(:branch_pipeline) { create(:ci_pipeline, :success, project: ci_ref.project, ref: ci_ref.ref, tag: false) }
+
+ it_behaves_like 'creates ci_ref'
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/applications/check_installation_progress_service_spec.rb b/spec/services/clusters/applications/check_installation_progress_service_spec.rb
index 2f224d40920..4b8db405101 100644
--- a/spec/services/clusters/applications/check_installation_progress_service_spec.rb
+++ b/spec/services/clusters/applications/check_installation_progress_service_spec.rb
@@ -144,7 +144,7 @@ describe Clusters::Applications::CheckInstallationProgressService, '#execute' do
end
it 'removes the installation POD' do
- expect_next_instance_of(Gitlab::Kubernetes::Helm::Api) do |instance|
+ expect_next_instance_of(Gitlab::Kubernetes::Helm::API) do |instance|
expect(instance).to receive(:delete_pod!).with(kind_of(String)).once
end
expect(service).to receive(:remove_installation_pod).and_call_original
diff --git a/spec/services/clusters/applications/check_uninstall_progress_service_spec.rb b/spec/services/clusters/applications/check_uninstall_progress_service_spec.rb
index 93557c6b229..ffb658330d3 100644
--- a/spec/services/clusters/applications/check_uninstall_progress_service_spec.rb
+++ b/spec/services/clusters/applications/check_uninstall_progress_service_spec.rb
@@ -48,7 +48,7 @@ describe Clusters::Applications::CheckUninstallProgressService do
let(:phase) { Gitlab::Kubernetes::Pod::SUCCEEDED }
before do
- expect_next_instance_of(Gitlab::Kubernetes::Helm::Api) do |instance|
+ expect_next_instance_of(Gitlab::Kubernetes::Helm::API) do |instance|
expect(instance).to receive(:delete_pod!).with(kind_of(String)).once
end
expect(service).to receive(:pod_phase).once.and_return(phase)
diff --git a/spec/services/clusters/applications/create_service_spec.rb b/spec/services/clusters/applications/create_service_spec.rb
index f62af86f1bf..0b48af408e1 100644
--- a/spec/services/clusters/applications/create_service_spec.rb
+++ b/spec/services/clusters/applications/create_service_spec.rb
@@ -137,10 +137,14 @@ describe Clusters::Applications::CreateService do
let(:params) do
{
application: 'knative',
- hostname: 'example.com'
+ hostname: 'example.com',
+ pages_domain_id: domain.id
}
end
+ let(:domain) { create(:pages_domain, :instance_serverless) }
+ let(:associate_domain_service) { double('AssociateDomainService') }
+
before do
expect_any_instance_of(Clusters::Applications::Knative)
.to receive(:make_scheduled!)
@@ -158,6 +162,20 @@ describe Clusters::Applications::CreateService do
it 'sets the hostname' do
expect(subject.hostname).to eq('example.com')
end
+
+ it 'executes AssociateDomainService' do
+ expect(Serverless::AssociateDomainService).to receive(:new) do |knative, args|
+ expect(knative).to be_a(Clusters::Applications::Knative)
+ expect(args[:pages_domain_id]).to eq(params[:pages_domain_id])
+ expect(args[:creator]).to eq(user)
+
+ associate_domain_service
+ end
+
+ expect(associate_domain_service).to receive(:execute)
+
+ subject
+ end
end
context 'elastic stack application' do
diff --git a/spec/services/clusters/applications/install_service_spec.rb b/spec/services/clusters/applications/install_service_spec.rb
index 9e1ae5e8742..2441cc595a3 100644
--- a/spec/services/clusters/applications/install_service_spec.rb
+++ b/spec/services/clusters/applications/install_service_spec.rb
@@ -7,7 +7,7 @@ describe Clusters::Applications::InstallService do
let(:application) { create(:clusters_applications_helm, :scheduled) }
let!(:install_command) { application.install_command }
let(:service) { described_class.new(application) }
- let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::Api) }
+ let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::API) }
before do
allow(service).to receive(:install_command).and_return(install_command)
diff --git a/spec/services/clusters/applications/patch_service_spec.rb b/spec/services/clusters/applications/patch_service_spec.rb
index c6d0fee43c4..dc9843a5116 100644
--- a/spec/services/clusters/applications/patch_service_spec.rb
+++ b/spec/services/clusters/applications/patch_service_spec.rb
@@ -7,7 +7,7 @@ describe Clusters::Applications::PatchService do
let(:application) { create(:clusters_applications_knative, :scheduled) }
let!(:update_command) { application.update_command }
let(:service) { described_class.new(application) }
- let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::Api) }
+ let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::API) }
before do
allow(service).to receive(:update_command).and_return(update_command)
diff --git a/spec/services/clusters/applications/uninstall_service_spec.rb b/spec/services/clusters/applications/uninstall_service_spec.rb
index 16497d752b2..6d7f0478b20 100644
--- a/spec/services/clusters/applications/uninstall_service_spec.rb
+++ b/spec/services/clusters/applications/uninstall_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Clusters::Applications::UninstallService, '#execute' do
let(:application) { create(:clusters_applications_prometheus, :scheduled) }
let(:service) { described_class.new(application) }
- let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::Api) }
+ let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::API) }
let(:worker_class) { Clusters::Applications::WaitForUninstallAppWorker }
before do
diff --git a/spec/services/clusters/applications/update_service_spec.rb b/spec/services/clusters/applications/update_service_spec.rb
index 2d299882af0..4676951faff 100644
--- a/spec/services/clusters/applications/update_service_spec.rb
+++ b/spec/services/clusters/applications/update_service_spec.rb
@@ -7,8 +7,9 @@ describe Clusters::Applications::UpdateService do
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:user) { create(:user) }
- let(:params) { { application: 'knative', hostname: 'udpate.example.com' } }
+ let(:params) { { application: 'knative', hostname: 'update.example.com', pages_domain_id: domain.id } }
let(:service) { described_class.new(cluster, user, params) }
+ let(:domain) { create(:pages_domain, :instance_serverless) }
subject { service.execute(test_request) }
@@ -51,6 +52,24 @@ describe Clusters::Applications::UpdateService do
subject
end
+
+ context 'knative application' do
+ let(:associate_domain_service) { double('AssociateDomainService') }
+
+ it 'executes AssociateDomainService' do
+ expect(Serverless::AssociateDomainService).to receive(:new) do |knative, args|
+ expect(knative.id).to eq(application.id)
+ expect(args[:pages_domain_id]).to eq(params[:pages_domain_id])
+ expect(args[:creator]).to eq(user)
+
+ associate_domain_service
+ end
+
+ expect(associate_domain_service).to receive(:execute)
+
+ subject
+ end
+ end
end
context 'application is not schedulable' do
diff --git a/spec/services/clusters/applications/upgrade_service_spec.rb b/spec/services/clusters/applications/upgrade_service_spec.rb
index 48fa79eeb97..86fb06375f1 100644
--- a/spec/services/clusters/applications/upgrade_service_spec.rb
+++ b/spec/services/clusters/applications/upgrade_service_spec.rb
@@ -7,7 +7,7 @@ describe Clusters::Applications::UpgradeService do
let(:application) { create(:clusters_applications_helm, :scheduled) }
let!(:install_command) { application.install_command }
let(:service) { described_class.new(application) }
- let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::Api) }
+ let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::API) }
before do
allow(service).to receive(:install_command).and_return(install_command)
diff --git a/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb b/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
index 572e2b91187..9238f7debd0 100644
--- a/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
+++ b/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
@@ -194,4 +194,36 @@ describe Clusters::Kubernetes::ConfigureIstioIngressService, '#execute' do
)
end
end
+
+ context 'when there is an error' do
+ before do
+ cluster.application_knative = create(:clusters_applications_knative)
+
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:configure_passthrough).and_raise(error)
+ end
+ end
+
+ context 'Kubeclient::HttpError' do
+ let(:error) { Kubeclient::HttpError.new(404, nil, nil) }
+
+ it 'puts Knative into an errored state' do
+ subject
+
+ expect(cluster.application_knative).to be_errored
+ expect(cluster.application_knative.status_reason).to eq('Kubernetes error: 404')
+ end
+ end
+
+ context 'StandardError' do
+ let(:error) { RuntimeError.new('something went wrong') }
+
+ it 'puts Knative into an errored state' do
+ subject
+
+ expect(cluster.application_knative).to be_errored
+ expect(cluster.application_knative.status_reason).to eq('Failed to update.')
+ end
+ end
+ end
end
diff --git a/spec/services/clusters/update_service_spec.rb b/spec/services/clusters/update_service_spec.rb
index fdbed4fa5d8..d487edd8850 100644
--- a/spec/services/clusters/update_service_spec.rb
+++ b/spec/services/clusters/update_service_spec.rb
@@ -86,7 +86,7 @@ describe Clusters::UpdateService do
it 'rejects changes' do
is_expected.to eq(false)
- expect(cluster.errors.full_messages).to include('cannot modify during creation')
+ expect(cluster.errors.full_messages).to include('Cannot modify provider during creation')
end
end
end
diff --git a/spec/services/commits/cherry_pick_service_spec.rb b/spec/services/commits/cherry_pick_service_spec.rb
index ead1932c2d1..3b797b8ac02 100644
--- a/spec/services/commits/cherry_pick_service_spec.rb
+++ b/spec/services/commits/cherry_pick_service_spec.rb
@@ -61,18 +61,6 @@ describe Commits::CherryPickService do
expect(mr_notes.length).to eq(1)
expect(mr_notes[0].commit_id).to eq(result[:result])
end
-
- context 'when :track_mr_picking feature flag is disabled' do
- before do
- stub_feature_flags(track_mr_picking: false)
- end
-
- it 'does not add system notes' do
- expect do
- cherry_pick(merge_commit_sha, branch_name)
- end.not_to change { Note.count }
- end
- end
end
def find_cherry_pick_notes(noteable)
diff --git a/spec/services/deploy_tokens/create_service_spec.rb b/spec/services/deploy_tokens/create_service_spec.rb
deleted file mode 100644
index fbb66fe4cb7..00000000000
--- a/spec/services/deploy_tokens/create_service_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe DeployTokens::CreateService do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
- let(:deploy_token_params) { attributes_for(:deploy_token) }
-
- describe '#execute' do
- subject { described_class.new(project, user, deploy_token_params).execute }
-
- context 'when the deploy token is valid' do
- it 'creates a new DeployToken' do
- expect { subject }.to change { DeployToken.count }.by(1)
- end
-
- it 'creates a new ProjectDeployToken' do
- expect { subject }.to change { ProjectDeployToken.count }.by(1)
- end
-
- it 'returns a DeployToken' do
- expect(subject).to be_an_instance_of DeployToken
- end
- end
-
- context 'when expires at date is not passed' do
- let(:deploy_token_params) { attributes_for(:deploy_token, expires_at: '') }
-
- it 'sets Forever.date' do
- expect(subject.read_attribute(:expires_at)).to eq(Forever.date)
- end
- end
-
- context 'when username is empty string' do
- let(:deploy_token_params) { attributes_for(:deploy_token, username: '') }
-
- it 'converts it to nil' do
- expect(subject.read_attribute(:username)).to be_nil
- end
- end
-
- context 'when username is provided' do
- let(:deploy_token_params) { attributes_for(:deploy_token, username: 'deployer') }
-
- it 'keeps the provided username' do
- expect(subject.read_attribute(:username)).to eq('deployer')
- end
- end
-
- context 'when the deploy token is invalid' do
- let(:deploy_token_params) { attributes_for(:deploy_token, read_repository: false, read_registry: false) }
-
- it 'does not create a new DeployToken' do
- expect { subject }.not_to change { DeployToken.count }
- end
-
- it 'does not create a new ProjectDeployToken' do
- expect { subject }.not_to change { ProjectDeployToken.count }
- end
- end
- end
-end
diff --git a/spec/services/deployments/after_create_service_spec.rb b/spec/services/deployments/after_create_service_spec.rb
index 51c6de2c0b9..5a69ffd8b9c 100644
--- a/spec/services/deployments/after_create_service_spec.rb
+++ b/spec/services/deployments/after_create_service_spec.rb
@@ -49,7 +49,7 @@ describe Deployments::AfterCreateService do
it 'creates ref' do
expect_any_instance_of(Repository)
.to receive(:create_ref)
- .with(deployment.ref, deployment.send(:ref_path))
+ .with(deployment.sha, "refs/environments/production/deployments/#{deployment.iid}")
service.execute
end
@@ -177,6 +177,26 @@ describe Deployments::AfterCreateService do
it { is_expected.to eq('http://review/host') }
end
+ context 'when job variables are generated during runtime' do
+ let(:job) do
+ create(:ci_build,
+ :with_deployment,
+ pipeline: pipeline,
+ environment: 'review/$CI_COMMIT_REF_NAME',
+ project: project,
+ job_variables: [job_variable],
+ options: { environment: { name: 'review/$CI_COMMIT_REF_NAME', url: 'http://$DYNAMIC_ENV_URL' } })
+ end
+
+ let(:job_variable) do
+ build(:ci_job_variable, :dotenv_source, key: 'DYNAMIC_ENV_URL', value: 'abc.test.com')
+ end
+
+ it 'expands the environment URL from the dynamic variable' do
+ is_expected.to eq('http://abc.test.com')
+ end
+ end
+
context 'when yaml environment does not have url' do
let(:job) { create(:ci_build, :with_deployment, pipeline: pipeline, environment: 'staging', project: project) }
diff --git a/spec/services/deployments/link_merge_requests_service_spec.rb b/spec/services/deployments/link_merge_requests_service_spec.rb
index 605f2cfdc51..aa2cecbf897 100644
--- a/spec/services/deployments/link_merge_requests_service_spec.rb
+++ b/spec/services/deployments/link_merge_requests_service_spec.rb
@@ -133,51 +133,32 @@ describe Deployments::LinkMergeRequestsService do
expect(deploy.merge_requests).to include(mr1, picked_mr)
end
- context 'when :track_mr_picking feature flag is disabled' do
- before do
- stub_feature_flags(track_mr_picking: false)
- end
+ it "doesn't link the same merge_request twice" do
+ create(:merge_request, :merged, merge_commit_sha: mr1_merge_commit_sha,
+ source_project: project)
- it 'does not link picked merge requests' do
- environment = create(:environment, project: project)
- deploy =
- create(:deployment, :success, project: project, environment: environment)
-
- picked_mr = create(
- :merge_request,
- :merged,
- merge_commit_sha: '123abc',
- source_project: project,
- target_project: project
- )
+ picked_mr = create(:merge_request, :merged, merge_commit_sha: '123abc',
+ source_project: project)
- mr1 = create(
- :merge_request,
- :merged,
- merge_commit_sha: mr1_merge_commit_sha,
- source_project: project,
- target_project: project
- )
+ # the first MR includes c1c67abba which is a cherry-pick of the fake picked_mr merge request
+ create(:track_mr_picking_note, noteable: picked_mr, project: project, commit_id: 'c1c67abbaf91f624347bb3ae96eabe3a1b742478')
- # mr1 includes c1c67abba which is a cherry-pick of the fake picked_mr merge request
- create(:track_mr_picking_note, noteable: picked_mr, project: project, commit_id: 'c1c67abbaf91f624347bb3ae96eabe3a1b742478')
+ environment = create(:environment, project: project)
+ old_deploy =
+ create(:deployment, :success, project: project, environment: environment)
- mr2 = create(
- :merge_request,
- :merged,
- merge_commit_sha: mr2_merge_commit_sha,
- source_project: project,
- target_project: project
- )
+ # manually linking all the MRs to the old_deploy
+ old_deploy.link_merge_requests(project.merge_requests)
- described_class.new(deploy).link_merge_requests_for_range(
- first_deployment_sha,
- mr2_merge_commit_sha
- )
+ deploy =
+ create(:deployment, :success, project: project, environment: environment)
- expect(deploy.merge_requests).to include(mr1, mr2)
- expect(deploy.merge_requests).not_to include(picked_mr)
- end
+ described_class.new(deploy).link_merge_requests_for_range(
+ first_deployment_sha,
+ mr1_merge_commit_sha
+ )
+
+ expect(deploy.merge_requests).to be_empty
end
end
diff --git a/spec/services/error_tracking/issue_details_service_spec.rb b/spec/services/error_tracking/issue_details_service_spec.rb
index 9f217deda21..66b8988f8e3 100644
--- a/spec/services/error_tracking/issue_details_service_spec.rb
+++ b/spec/services/error_tracking/issue_details_service_spec.rb
@@ -27,7 +27,7 @@ describe ErrorTracking::IssueDetailsService do
create(:sentry_issue, issue: gitlab_issue, sentry_issue_identifier: detailed_error.id)
expect(result[:issue].gitlab_issue).to include(
- "http", "/#{project.full_path}/issues/#{gitlab_issue.iid}"
+ "http", "/#{project.full_path}/-/issues/#{gitlab_issue.iid}"
)
end
diff --git a/spec/services/error_tracking/issue_update_service_spec.rb b/spec/services/error_tracking/issue_update_service_spec.rb
index 78388328a22..ffb15901f80 100644
--- a/spec/services/error_tracking/issue_update_service_spec.rb
+++ b/spec/services/error_tracking/issue_update_service_spec.rb
@@ -43,6 +43,12 @@ describe ErrorTracking::IssueUpdateService do
update_service.execute
end
+ it 'clears the reactive cache' do
+ expect(error_tracking_setting).to receive(:expire_issues_cache)
+
+ result
+ end
+
context 'related issue and resolving' do
let(:issue) { create(:issue, project: project) }
let(:sentry_issue) { create(:sentry_issue, issue: issue) }
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 8b4f45010ed..acd14005c69 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -129,6 +129,21 @@ describe Git::BranchPushService, services: true do
end
end
end
+
+ context 'when .gitlab-ci.yml file is invalid' do
+ before do
+ stub_ci_pipeline_yaml_file('invalid yaml file')
+ end
+
+ it 'persists an error pipeline' do
+ expect { subject }.to change { Ci::Pipeline.count }
+
+ pipeline = Ci::Pipeline.last
+ expect(pipeline).to be_push
+ expect(pipeline).to be_failed
+ expect(pipeline).to be_config_error
+ end
+ end
end
describe "Updates merge requests" do
@@ -186,7 +201,7 @@ describe Git::BranchPushService, services: true do
end
it "when pushing a branch for the first time with default branch protection disabled" do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE)
+ expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_NONE)
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
@@ -195,7 +210,7 @@ describe Git::BranchPushService, services: true do
end
it "when pushing a branch for the first time with default branch protection set to 'developers can push'" do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
+ expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
@@ -208,7 +223,7 @@ describe Git::BranchPushService, services: true do
end
it "when pushing a branch for the first time with an existing branch permission configured" do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
+ expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
create(:protected_branch, :no_one_can_push, :developers_can_merge, project: project, name: 'master')
expect(project).to receive(:execute_hooks)
@@ -223,7 +238,7 @@ describe Git::BranchPushService, services: true do
end
it "when pushing a branch for the first time with default branch protection set to 'developers can merge'" do
- stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
+ expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
diff --git a/spec/services/git/process_ref_changes_service_spec.rb b/spec/services/git/process_ref_changes_service_spec.rb
index 35ddf95b5f6..fc5e379f51d 100644
--- a/spec/services/git/process_ref_changes_service_spec.rb
+++ b/spec/services/git/process_ref_changes_service_spec.rb
@@ -55,36 +55,14 @@ describe Git::ProcessRefChangesService do
stub_application_setting(push_event_hooks_limit: push_event_hooks_limit)
end
- context 'git_push_execute_all_project_hooks is disabled' do
- before do
- stub_feature_flags(git_push_execute_all_project_hooks: false)
- end
-
- it "calls #{push_service_class} with execute_project_hooks set to false" do
- expect(push_service_class)
- .to receive(:new)
- .with(project, project.owner, hash_including(execute_project_hooks: false))
- .exactly(changes.count).times
- .and_return(service)
-
- subject.execute
- end
- end
-
- context 'git_push_execute_all_project_hooks is enabled' do
- before do
- stub_feature_flags(git_push_execute_all_project_hooks: true)
- end
-
- it "calls #{push_service_class} with execute_project_hooks set to true" do
- expect(push_service_class)
- .to receive(:new)
- .with(project, project.owner, hash_including(execute_project_hooks: true))
- .exactly(changes.count).times
- .and_return(service)
+ it "calls #{push_service_class} with execute_project_hooks set to false" do
+ expect(push_service_class)
+ .to receive(:new)
+ .with(project, project.owner, hash_including(execute_project_hooks: false))
+ .exactly(changes.count).times
+ .and_return(service)
- subject.execute
- end
+ subject.execute
end
end
diff --git a/spec/services/groups/deploy_tokens/create_service_spec.rb b/spec/services/groups/deploy_tokens/create_service_spec.rb
new file mode 100644
index 00000000000..20c609bc828
--- /dev/null
+++ b/spec/services/groups/deploy_tokens/create_service_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Groups::DeployTokens::CreateService do
+ it_behaves_like 'a deploy token creation service' do
+ let(:entity) { create(:group) }
+ let(:deploy_token_class) { GroupDeployToken }
+ end
+end
diff --git a/spec/services/groups/import_export/export_service_spec.rb b/spec/services/groups/import_export/export_service_spec.rb
index b1f76964722..0d7fa98e16b 100644
--- a/spec/services/groups/import_export/export_service_spec.rb
+++ b/spec/services/groups/import_export/export_service_spec.rb
@@ -7,7 +7,7 @@ describe Groups::ImportExport::ExportService do
let!(:user) { create(:user) }
let(:group) { create(:group) }
let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:export_path) { shared.export_path }
+ let(:archive_path) { shared.archive_path }
let(:service) { described_class.new(group: group, user: user, params: { shared: shared }) }
before do
@@ -15,11 +15,11 @@ describe Groups::ImportExport::ExportService do
end
after do
- FileUtils.rm_rf(export_path)
+ FileUtils.rm_rf(archive_path)
end
it 'saves the models' do
- expect(Gitlab::ImportExport::GroupTreeSaver).to receive(:new).and_call_original
+ expect(Gitlab::ImportExport::Group::TreeSaver).to receive(:new).and_call_original
service.execute
end
@@ -29,7 +29,7 @@ describe Groups::ImportExport::ExportService do
service.execute
expect(group.import_export_upload.export_file.file).not_to be_nil
- expect(File.directory?(export_path)).to eq(false)
+ expect(File.directory?(archive_path)).to eq(false)
expect(File.exist?(shared.archive_path)).to eq(false)
end
end
@@ -38,33 +38,69 @@ describe Groups::ImportExport::ExportService do
let!(:another_user) { create(:user) }
let(:service) { described_class.new(group: group, user: another_user, params: { shared: shared }) }
+ let(:expected_message) do
+ "User with ID: %s does not have required permissions for Group: %s with ID: %s" %
+ [another_user.id, group.name, group.id]
+ end
+
it 'fails' do
- expected_message =
- "User with ID: %s does not have permission to Group %s with ID: %s." %
- [another_user.id, group.name, group.id]
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error).with_message(expected_message)
end
- end
- context 'when saving services fail' do
- before do
- allow(service).to receive_message_chain(:tree_exporter, :save).and_return(false)
+ it 'logs the error' do
+ expect(shared.logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ error: expected_message,
+ message: 'Group Import/Export: Export failed'
+ )
+
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
- it 'removes the remaining exported data' do
- allow_any_instance_of(Gitlab::ImportExport::Saver).to receive(:compress_and_save).and_return(false)
+ it 'tracks the error' do
+ expect(shared).to receive(:error) { |param| expect(param.message).to eq expected_message }
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
- expect(group.import_export_upload).to be_nil
- expect(File.directory?(export_path)).to eq(false)
- expect(File.exist?(shared.archive_path)).to eq(false)
+ context 'when export fails' do
+ context 'when file saver fails' do
+ it 'removes the remaining exported data' do
+ allow_next_instance_of(Gitlab::ImportExport::Saver) do |saver|
+ allow(saver).to receive(:save).and_return(false)
+ end
+
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+
+ expect(group.import_export_upload).to be_nil
+ expect(File.exist?(shared.archive_path)).to eq(false)
+ end
end
- it 'notifies logger' do
- expect_any_instance_of(Gitlab::Import::Logger).to receive(:error)
+ context 'when file compression fails' do
+ before do
+ allow(service).to receive_message_chain(:tree_exporter, :save).and_return(false)
+ end
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+ it 'removes the remaining exported data' do
+ allow_next_instance_of(Gitlab::ImportExport::Saver) do |saver|
+ allow(saver).to receive(:compress_and_save).and_return(false)
+ end
+
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+
+ expect(group.import_export_upload).to be_nil
+ expect(File.exist?(shared.archive_path)).to eq(false)
+ end
+
+ it 'notifies logger' do
+ allow(service).to receive_message_chain(:tree_exporter, :save).and_return(false)
+ expect(shared.logger).to receive(:error)
+
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+ end
end
end
end
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index bac266d08da..49c786ef67f 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -9,6 +9,8 @@ describe Groups::ImportExport::ImportService do
let(:service) { described_class.new(group: group, user: user) }
let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
+ let(:import_logger) { instance_double(Gitlab::Import::Logger) }
+
subject { service.execute }
before do
@@ -25,13 +27,82 @@ describe Groups::ImportExport::ImportService do
expect(group.import_export_upload.import_file.file).to be_nil
end
+
+ it 'logs the import success' do
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+
+ expect(import_logger).to receive(:info).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: 'Group Import/Export: Import succeeded'
+ )
+
+ subject
+ end
end
context 'when user does not have correct permissions' do
let(:user) { create(:user) }
- it 'raises exception' do
- expect { subject }.to raise_error(StandardError)
+ it 'logs the error and raises an exception' do
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ )
+
+ expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ end
+
+ it 'tracks the error' do
+ shared = Gitlab::ImportExport::Shared.new(group)
+ allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
+
+ expect(shared).to receive(:error) do |param|
+ expect(param.message).to include 'does not have required permissions for'
+ end
+
+ expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
+
+ context 'when there are errors with the import file' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/symlink_export.tar.gz') }
+
+ before do
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ end
+
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ )
+
+ expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
+
+ context 'when there are errors with the sub-relations' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/group_export_invalid_subrelations.tar.gz') }
+
+ it 'successfully imports the group' do
+ expect(subject).to be_truthy
+ end
+
+ it 'logs the import success' do
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+
+ expect(import_logger).to receive(:info).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: 'Group Import/Export: Import succeeded'
+ )
+
+ subject
end
end
end
diff --git a/spec/services/issuable/clone/attributes_rewriter_spec.rb b/spec/services/issuable/clone/attributes_rewriter_spec.rb
index 20bda6984bd..6bc0df8260b 100644
--- a/spec/services/issuable/clone/attributes_rewriter_spec.rb
+++ b/spec/services/issuable/clone/attributes_rewriter_spec.rb
@@ -75,5 +75,47 @@ describe Issuable::Clone::AttributesRewriter do
expect(new_issue.reload.milestone).to eq(milestone)
end
+
+ context 'with existing milestone events' do
+ let!(:milestone1_project1) { create(:milestone, title: 'milestone1', project: project1) }
+ let!(:milestone2_project1) { create(:milestone, title: 'milestone2', project: project1) }
+ let!(:milestone3_project1) { create(:milestone, title: 'milestone3', project: project1) }
+
+ let!(:milestone1_project2) { create(:milestone, title: 'milestone1', project: project2) }
+ let!(:milestone2_project2) { create(:milestone, title: 'milestone2', project: project2) }
+
+ before do
+ original_issue.update(milestone: milestone2_project1)
+
+ create_event(milestone1_project1)
+ create_event(milestone2_project1)
+ create_event(milestone1_project1, 'remove')
+ create_event(milestone3_project1)
+ end
+
+ it 'copies existing resource milestone events' do
+ subject.execute
+
+ new_issue_milestone_events = new_issue.reload.resource_milestone_events
+ expect(new_issue_milestone_events.count).to eq(3)
+
+ expect_milestone_event(new_issue_milestone_events.first, milestone: milestone1_project2, action: 'add', state: 'opened')
+ expect_milestone_event(new_issue_milestone_events.second, milestone: milestone2_project2, action: 'add', state: 'opened')
+ expect_milestone_event(new_issue_milestone_events.third, milestone: milestone1_project2, action: 'remove', state: 'opened')
+ end
+
+ def create_event(milestone, action = 'add')
+ create(:resource_milestone_event, issue: original_issue, milestone: milestone, action: action)
+ end
+
+ def expect_milestone_event(event, expected_attrs)
+ expect(event.milestone_id).to eq(expected_attrs[:milestone].id)
+ expect(event.action).to eq(expected_attrs[:action])
+ expect(event.state).to eq(expected_attrs[:state])
+
+ expect(event.reference).to be_nil
+ expect(event.reference_html).to be_nil
+ end
+ end
end
end
diff --git a/spec/services/issuable/common_system_notes_service_spec.rb b/spec/services/issuable/common_system_notes_service_spec.rb
index 7e40ac9ff4d..771e7ca42c9 100644
--- a/spec/services/issuable/common_system_notes_service_spec.rb
+++ b/spec/services/issuable/common_system_notes_service_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
describe Issuable::CommonSystemNotesService do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
let(:issuable) { create(:issue, project: project) }
context 'on issuable update' do
@@ -35,6 +36,8 @@ describe Issuable::CommonSystemNotesService do
before do
milestone = create(:milestone, project: project)
issuable.milestone_id = milestone.id
+
+ stub_feature_flags(track_resource_milestone_change_events: false)
end
it_behaves_like 'system note creation', {}, 'changed milestone'
@@ -97,12 +100,39 @@ describe Issuable::CommonSystemNotesService do
expect(event.user_id).to eq user.id
end
- it 'creates a system note for milestone set' do
- issuable.milestone = create(:milestone, project: project)
- issuable.save
+ context 'when milestone change event tracking is disabled' do
+ before do
+ stub_feature_flags(track_resource_milestone_change_events: false)
- expect { subject }.to change { issuable.notes.count }.from(0).to(1)
- expect(issuable.notes.last.note).to match('changed milestone')
+ issuable.milestone = create(:milestone, project: project)
+ issuable.save
+ end
+
+ it 'creates a system note for milestone set' do
+ expect { subject }.to change { issuable.notes.count }.from(0).to(1)
+ expect(issuable.notes.last.note).to match('changed milestone')
+ end
+
+ it 'does not create a milestone change event' do
+ expect { subject }.not_to change { ResourceMilestoneEvent.count }
+ end
+ end
+
+ context 'when milestone change event tracking is enabled' do
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:issuable) { create(:issue, project: project, milestone: milestone) }
+
+ before do
+ stub_feature_flags(track_resource_milestone_change_events: true)
+ end
+
+ it 'does not create a system note for milestone set' do
+ expect { subject }.not_to change { issuable.notes.count }
+ end
+
+ it 'creates a milestone change event' do
+ expect { subject }.to change { ResourceMilestoneEvent.count }.from(0).to(1)
+ end
end
it 'creates a system note for due_date set' do
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index dce62d1d20e..d8bcd8008ce 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -70,6 +70,38 @@ describe Issues::CloseService do
end
describe '#close_issue' do
+ context 'with external issue' do
+ context 'with an active external issue tracker supporting close_issue' do
+ let!(:external_issue_tracker) { create(:jira_service, project: project) }
+
+ it 'closes the issue on the external issue tracker' do
+ expect(project.external_issue_tracker).to receive(:close_issue)
+
+ described_class.new(project, user).close_issue(external_issue)
+ end
+ end
+
+ context 'with innactive external issue tracker supporting close_issue' do
+ let!(:external_issue_tracker) { create(:jira_service, project: project, active: false) }
+
+ it 'does not close the issue on the external issue tracker' do
+ expect(project.external_issue_tracker).not_to receive(:close_issue)
+
+ described_class.new(project, user).close_issue(external_issue)
+ end
+ end
+
+ context 'with an active external issue tracker not supporting close_issue' do
+ let!(:external_issue_tracker) { create(:bugzilla_service, project: project) }
+
+ it 'does not close the issue on the external issue tracker' do
+ expect(project.external_issue_tracker).not_to receive(:close_issue)
+
+ described_class.new(project, user).close_issue(external_issue)
+ end
+ end
+ end
+
context "closed by a merge request", :sidekiq_might_not_need_inline do
it 'mentions closure via a merge request' do
perform_enqueued_jobs do
@@ -98,6 +130,53 @@ describe Issues::CloseService do
expect(body_text).not_to include(closing_merge_request.to_reference)
end
end
+
+ context 'updating `metrics.first_mentioned_in_commit_at`' do
+ subject { described_class.new(project, user).close_issue(issue, closed_via: closing_merge_request) }
+
+ context 'when `metrics.first_mentioned_in_commit_at` is not set' do
+ it 'uses the first commit authored timestamp' do
+ expected = closing_merge_request.commits.first.authored_date
+
+ subject
+
+ expect(issue.metrics.first_mentioned_in_commit_at).to eq(expected)
+ end
+ end
+
+ context 'when `metrics.first_mentioned_in_commit_at` is already set' do
+ before do
+ issue.metrics.update!(first_mentioned_in_commit_at: Time.now)
+ end
+
+ it 'does not update the metrics' do
+ expect { subject }.not_to change { issue.metrics.first_mentioned_in_commit_at }
+ end
+ end
+
+ context 'when merge request has no commits' do
+ let(:closing_merge_request) { create(:merge_request, :without_diffs, source_project: project) }
+
+ it 'does not update the metrics' do
+ subject
+
+ expect(issue.metrics.first_mentioned_in_commit_at).to be_nil
+ end
+ end
+
+ context 'when `store_first_mentioned_in_commit_on_issue_close` feature flag is off' do
+ before do
+ stub_feature_flags(store_first_mentioned_in_commit_on_issue_close: { enabled: false, thing: issue.project })
+ end
+
+ it 'does not update the metrics' do
+ subject
+
+ expect(described_class).not_to receive(:store_first_mentioned_in_commit_at)
+ expect(issue.metrics.first_mentioned_in_commit_at).to be_nil
+ end
+ end
+ end
end
context "closed by a commit", :sidekiq_might_not_need_inline do
@@ -131,35 +210,55 @@ describe Issues::CloseService do
end
context "valid params" do
- before do
+ def close_issue
perform_enqueued_jobs do
described_class.new(project, user).close_issue(issue)
end
end
it 'closes the issue' do
+ close_issue
+
expect(issue).to be_valid
expect(issue).to be_closed
end
it 'records closed user' do
+ close_issue
+
expect(issue.closed_by_id).to be(user.id)
end
it 'sends email to user2 about assign of new issue', :sidekiq_might_not_need_inline do
+ close_issue
+
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
expect(email.subject).to include(issue.title)
end
it 'creates system note about issue reassign' do
+ close_issue
+
note = issue.notes.last
expect(note.note).to include "closed"
end
it 'marks todos as done' do
+ close_issue
+
expect(todo.reload).to be_done
end
+
+ it 'deletes milestone issue counters cache' do
+ issue.update(milestone: create(:milestone, project: project))
+
+ expect_next_instance_of(Milestones::ClosedIssuesCountService, issue.milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ close_issue
+ end
end
context 'when issue is not confidential' do
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index c9701e5d194..09fff389cec 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -196,6 +196,14 @@ describe Issues::CreateService do
end
end
end
+
+ it 'deletes milestone issues count cache' do
+ expect_next_instance_of(Milestones::IssuesCountService, milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ issue
+ end
end
context 'issue create service' do
diff --git a/spec/services/issues/import_csv_service_spec.rb b/spec/services/issues/import_csv_service_spec.rb
index e7370407d4c..aa43892a36d 100644
--- a/spec/services/issues/import_csv_service_spec.rb
+++ b/spec/services/issues/import_csv_service_spec.rb
@@ -27,6 +27,29 @@ describe Issues::ImportCsvService do
end
end
+ context 'with a file generated by Gitlab CSV export' do
+ let(:file) { fixture_file_upload('spec/fixtures/csv_gitlab_export.csv') }
+
+ it 'imports the CSV without errors' do
+ expect_next_instance_of(Notify) do |instance|
+ expect(instance).to receive(:import_issues_csv_email)
+ end
+
+ expect(subject[:success]).to eq(4)
+ expect(subject[:error_lines]).to eq([])
+ expect(subject[:parse_error]).to eq(false)
+ end
+
+ it 'correctly sets the issue attributes' do
+ expect { subject }.to change { project.issues.count }.by 4
+
+ expect(project.issues.reload.last).to have_attributes(
+ title: 'Test Title',
+ description: 'Test Description'
+ )
+ end
+ end
+
context 'comma delimited file' do
let(:file) { fixture_file_upload('spec/fixtures/csv_comma.csv') }
@@ -39,6 +62,15 @@ describe Issues::ImportCsvService do
expect(subject[:error_lines]).to eq([])
expect(subject[:parse_error]).to eq(false)
end
+
+ it 'correctly sets the issue attributes' do
+ expect { subject }.to change { project.issues.count }.by 3
+
+ expect(project.issues.reload.last).to have_attributes(
+ title: 'Title with quote"',
+ description: 'Description'
+ )
+ end
end
context 'tab delimited file with error row' do
@@ -53,6 +85,15 @@ describe Issues::ImportCsvService do
expect(subject[:error_lines]).to eq([3])
expect(subject[:parse_error]).to eq(false)
end
+
+ it 'correctly sets the issue attributes' do
+ expect { subject }.to change { project.issues.count }.by 2
+
+ expect(project.issues.reload.last).to have_attributes(
+ title: 'Hello',
+ description: 'World'
+ )
+ end
end
context 'semicolon delimited file with CRLF' do
@@ -67,6 +108,15 @@ describe Issues::ImportCsvService do
expect(subject[:error_lines]).to eq([4])
expect(subject[:parse_error]).to eq(false)
end
+
+ it 'correctly sets the issue attributes' do
+ expect { subject }.to change { project.issues.count }.by 3
+
+ expect(project.issues.reload.last).to have_attributes(
+ title: 'Hello',
+ description: 'World'
+ )
+ end
end
end
end
diff --git a/spec/services/issues/reopen_service_spec.rb b/spec/services/issues/reopen_service_spec.rb
index f04029e64aa..ca878ee947a 100644
--- a/spec/services/issues/reopen_service_spec.rb
+++ b/spec/services/issues/reopen_service_spec.rb
@@ -43,6 +43,16 @@ describe Issues::ReopenService do
.to change { project.open_issues_count }.from(0).to(1)
end
+ it 'deletes milestone issue counters cache' do
+ issue.update(milestone: create(:milestone, project: project))
+
+ expect_next_instance_of(Milestones::ClosedIssuesCountService, issue.milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ described_class.new(project, user).execute(issue)
+ end
+
context 'when issue is not confidential' do
it 'executes issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 888a63980f6..69e47d890a5 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -385,6 +385,10 @@ describe Issues::UpdateService, :mailer do
end
context 'when the milestone is removed' do
+ before do
+ stub_feature_flags(track_resource_milestone_change_events: false)
+ end
+
let!(:non_subscriber) { create(:user) }
let!(:subscriber) do
@@ -408,9 +412,28 @@ describe Issues::UpdateService, :mailer do
should_email(subscriber)
should_not_email(non_subscriber)
end
+
+ it 'clears milestone issue counters cache' do
+ issue.milestone = create(:milestone, project: project)
+
+ issue.save
+
+ expect_next_instance_of(Milestones::IssuesCountService, issue.milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+ expect_next_instance_of(Milestones::ClosedIssuesCountService, issue.milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ update_issue(milestone_id: "")
+ end
end
- context 'when the milestone is changed' do
+ context 'when the milestone is assigned' do
+ before do
+ stub_feature_flags(track_resource_milestone_change_events: false)
+ end
+
let!(:non_subscriber) { create(:user) }
let!(:subscriber) do
@@ -436,6 +459,43 @@ describe Issues::UpdateService, :mailer do
should_email(subscriber)
should_not_email(non_subscriber)
end
+
+ it 'deletes issue counters cache for the milestone' do
+ milestone = create(:milestone, project: project)
+
+ expect_next_instance_of(Milestones::IssuesCountService, milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+ expect_next_instance_of(Milestones::ClosedIssuesCountService, milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ update_issue(milestone: milestone)
+ end
+ end
+
+ context 'when the milestone is changed' do
+ it 'deletes issue counters cache for both milestones' do
+ old_milestone = create(:milestone, project: project)
+ new_milestone = create(:milestone, project: project)
+
+ issue.update!(milestone: old_milestone)
+
+ expect_next_instance_of(Milestones::IssuesCountService, old_milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+ expect_next_instance_of(Milestones::ClosedIssuesCountService, old_milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+ expect_next_instance_of(Milestones::IssuesCountService, new_milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+ expect_next_instance_of(Milestones::ClosedIssuesCountService, new_milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ update_issue(milestone: new_milestone)
+ end
end
context 'when the labels change' do
diff --git a/spec/services/labels/transfer_service_spec.rb b/spec/services/labels/transfer_service_spec.rb
index e29c6aeef5b..a2a9c8dddf2 100644
--- a/spec/services/labels/transfer_service_spec.rb
+++ b/spec/services/labels/transfer_service_spec.rb
@@ -4,65 +4,101 @@ require 'spec_helper'
describe Labels::TransferService do
describe '#execute' do
- let(:user) { create(:admin) }
- let(:group_1) { create(:group) }
- let(:group_2) { create(:group) }
- let(:group_3) { create(:group) }
- let(:project_1) { create(:project, namespace: group_2) }
- let(:project_2) { create(:project, namespace: group_3) }
- let(:project_3) { create(:project, namespace: group_1) }
-
- let(:group_label_1) { create(:group_label, group: group_1, name: 'Group Label 1') }
- let(:group_label_2) { create(:group_label, group: group_1, name: 'Group Label 2') }
- let(:group_label_3) { create(:group_label, group: group_1, name: 'Group Label 3') }
- let(:group_label_4) { create(:group_label, group: group_2, name: 'Group Label 4') }
- let(:group_label_5) { create(:group_label, group: group_3, name: 'Group Label 5') }
- let(:project_label_1) { create(:label, project: project_1, name: 'Project Label 1') }
-
- subject(:service) { described_class.new(user, group_1, project_1) }
-
- before do
- create(:labeled_issue, project: project_1, labels: [group_label_1])
- create(:labeled_issue, project: project_1, labels: [group_label_4])
- create(:labeled_issue, project: project_1, labels: [project_label_1])
- create(:labeled_issue, project: project_2, labels: [group_label_5])
- create(:labeled_issue, project: project_3, labels: [group_label_1])
- create(:labeled_merge_request, source_project: project_1, labels: [group_label_1, group_label_2])
- create(:labeled_merge_request, source_project: project_2, labels: [group_label_5])
+ let_it_be(:user) { create(:admin) }
+
+ let_it_be(:old_group_ancestor) { create(:group) }
+ let_it_be(:old_group) { create(:group, parent: old_group_ancestor) }
+
+ let_it_be(:new_group) { create(:group) }
+
+ let_it_be(:project) { create(:project, :repository, group: new_group) }
+
+ subject(:service) { described_class.new(user, old_group, project) }
+
+ it 'recreates missing group labels at project level and assigns them to the issuables' do
+ old_group_label_1 = create(:group_label, group: old_group)
+ old_group_label_2 = create(:group_label, group: old_group)
+
+ labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label_1])
+ labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_label_2])
+
+ expect { service.execute }.to change(project.labels, :count).by(2)
+ expect(labeled_issue.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_label_1.title))
+ expect(labeled_merge_request.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_label_2.title))
end
- it 'recreates the missing group labels at project level' do
- expect { service.execute }.to change(project_1.labels, :count).by(2)
+ it 'recreates missing ancestor group labels at project level and assigns them to the issuables' do
+ old_group_ancestor_label_1 = create(:group_label, group: old_group_ancestor)
+ old_group_ancestor_label_2 = create(:group_label, group: old_group_ancestor)
+
+ labeled_issue = create(:labeled_issue, project: project, labels: [old_group_ancestor_label_1])
+ labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_ancestor_label_2])
+
+ expect { service.execute }.to change(project.labels, :count).by(2)
+ expect(labeled_issue.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_ancestor_label_1.title))
+ expect(labeled_merge_request.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_ancestor_label_2.title))
end
it 'recreates label priorities related to the missing group labels' do
- create(:label_priority, project: project_1, label: group_label_1, priority: 1)
+ old_group_label = create(:group_label, group: old_group)
+ create(:labeled_issue, project: project, labels: [old_group_label])
+ create(:label_priority, project: project, label: old_group_label, priority: 1)
service.execute
- new_project_label = project_1.labels.find_by(title: group_label_1.title)
- expect(new_project_label.id).not_to eq group_label_1.id
+ new_project_label = project.labels.find_by(title: old_group_label.title)
+ expect(new_project_label.id).not_to eq old_group_label.id
expect(new_project_label.priorities).not_to be_empty
end
it 'does not recreate missing group labels that are not applied to issues or merge requests' do
+ old_group_label = create(:group_label, group: old_group)
+
service.execute
- expect(project_1.labels.where(title: group_label_3.title)).to be_empty
+ expect(project.labels.where(title: old_group_label.title)).to be_empty
end
it 'does not recreate missing group labels that already exist in the project group' do
+ old_group_label = create(:group_label, group: old_group)
+ labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label])
+
+ new_group_label = create(:group_label, group: new_group, title: old_group_label.title)
+
service.execute
- expect(project_1.labels.where(title: group_label_4.title)).to be_empty
+ expect(project.labels.where(title: old_group_label.title)).to be_empty
+ expect(labeled_issue.reload.labels).to contain_exactly(new_group_label)
end
it 'updates only label links in the given project' do
+ old_group_label = create(:group_label, group: old_group)
+ other_project = create(:project, group: old_group)
+
+ labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label])
+ other_project_labeled_issue = create(:labeled_issue, project: other_project, labels: [old_group_label])
+
service.execute
- targets = LabelLink.where(label_id: group_label_1.id).map(&:target)
+ expect(labeled_issue.reload.labels).not_to include(old_group_label)
+ expect(other_project_labeled_issue.reload.labels).to contain_exactly(old_group_label)
+ end
+
+ context 'when moving within the same ancestor group' do
+ let(:other_subgroup) { create(:group, parent: old_group_ancestor) }
+ let(:project) { create(:project, :repository, group: other_subgroup) }
+
+ it 'does not recreate ancestor group labels' do
+ old_group_ancestor_label_1 = create(:group_label, group: old_group_ancestor)
+ old_group_ancestor_label_2 = create(:group_label, group: old_group_ancestor)
+
+ labeled_issue = create(:labeled_issue, project: project, labels: [old_group_ancestor_label_1])
+ labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_ancestor_label_2])
- expect(targets).to eq(project_3.issues)
+ expect { service.execute }.not_to change(project.labels, :count)
+ expect(labeled_issue.reload.labels).to contain_exactly(old_group_ancestor_label_1)
+ expect(labeled_merge_request.reload.labels).to contain_exactly(old_group_ancestor_label_2)
+ end
end
end
end
diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb
new file mode 100644
index 00000000000..974f72fa376
--- /dev/null
+++ b/spec/services/merge_requests/after_create_service_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequests::AfterCreateService do
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ subject(:after_create_service) do
+ described_class.new(merge_request.target_project, merge_request.author)
+ end
+
+ describe '#execute' do
+ let(:event_service) { instance_double('EventCreateService', open_mr: true) }
+ let(:notification_service) { instance_double('NotificationService', new_merge_request: true) }
+
+ before do
+ allow(after_create_service).to receive(:event_service).and_return(event_service)
+ allow(after_create_service).to receive(:notification_service).and_return(notification_service)
+ end
+
+ it 'creates a merge request open event' do
+ expect(event_service)
+ .to receive(:open_mr).with(merge_request, merge_request.author)
+
+ after_create_service.execute(merge_request)
+ end
+
+ it 'creates a new merge request notification' do
+ expect(notification_service)
+ .to receive(:new_merge_request).with(merge_request, merge_request.author)
+
+ after_create_service.execute(merge_request)
+ end
+
+ it 'writes diffs to the cache' do
+ expect(merge_request)
+ .to receive_message_chain(:diffs, :write_cache)
+
+ after_create_service.execute(merge_request)
+ end
+
+ it 'creates cross references' do
+ expect(merge_request)
+ .to receive(:create_cross_references!).with(merge_request.author)
+
+ after_create_service.execute(merge_request)
+ end
+
+ it 'creates a pipeline and updates the HEAD pipeline' do
+ expect(after_create_service)
+ .to receive(:create_pipeline_for).with(merge_request, merge_request.author)
+ expect(merge_request).to receive(:update_head_pipeline)
+
+ after_create_service.execute(merge_request)
+ end
+
+ # https://gitlab.com/gitlab-org/gitlab/issues/208813
+ context 'when the create_merge_request_pipelines_in_sidekiq flag is disabled' do
+ before do
+ stub_feature_flags(create_merge_request_pipelines_in_sidekiq: false)
+ end
+
+ it 'does not create a pipeline or update the HEAD pipeline' do
+ expect(after_create_service).not_to receive(:create_pipeline_for)
+ expect(merge_request).not_to receive(:update_head_pipeline)
+
+ after_create_service.execute(merge_request)
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index aebead481ce..2514fda5053 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -129,7 +129,23 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
end
- context 'when head pipelines already exist for merge request source branch' do
+ # https://gitlab.com/gitlab-org/gitlab/issues/208813
+ context 'when the create_merge_request_pipelines_in_sidekiq flag is disabled' do
+ before do
+ stub_feature_flags(create_merge_request_pipelines_in_sidekiq: false)
+ end
+
+ it 'creates a pipeline and updates the HEAD pipeline' do
+ expect(service).to receive(:create_pipeline_for)
+ expect_next_instance_of(MergeRequest) do |merge_request|
+ expect(merge_request).to receive(:update_head_pipeline)
+ end
+
+ service.execute
+ end
+ end
+
+ context 'when head pipelines already exist for merge request source branch', :sidekiq_inline do
let(:shas) { project.repository.commits(opts[:source_branch], limit: 2).map(&:id) }
let!(:pipeline_1) { create(:ci_pipeline, project: project, ref: opts[:source_branch], project_id: project.id, sha: shas[1]) }
let!(:pipeline_2) { create(:ci_pipeline, project: project, ref: opts[:source_branch], project_id: project.id, sha: shas[0]) }
@@ -175,20 +191,20 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
end
- describe 'Pipelines for merge requests' do
+ describe 'Pipelines for merge requests', :sidekiq_inline do
before do
- stub_ci_pipeline_yaml_file(YAML.dump(config))
+ stub_ci_pipeline_yaml_file(config)
end
context "when .gitlab-ci.yml has merge_requests keywords" do
let(:config) do
- {
+ YAML.dump({
test: {
stage: 'test',
script: 'echo',
only: ['merge_requests']
}
- }
+ })
end
it 'creates a detached merge request pipeline and sets it as a head pipeline' do
@@ -216,7 +232,9 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
target_project.add_maintainer(user)
end
- it 'create legacy detached merge request pipeline for fork merge request', :sidekiq_might_not_need_inline do
+ it 'create legacy detached merge request pipeline for fork merge request' do
+ merge_request.reload
+
expect(merge_request.actual_head_pipeline)
.to be_legacy_detached_merge_request_pipeline
end
@@ -228,6 +246,8 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it 'create legacy detached merge request pipeline for non-fork merge request' do
+ merge_request.reload
+
expect(merge_request.actual_head_pipeline)
.to be_legacy_detached_merge_request_pipeline
end
@@ -262,6 +282,8 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it 'sets the latest detached merge request pipeline as the head pipeline' do
+ merge_request.reload
+
expect(merge_request.actual_head_pipeline).to be_merge_request_event
end
end
@@ -269,12 +291,12 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
context "when .gitlab-ci.yml does not have merge_requests keywords" do
let(:config) do
- {
+ YAML.dump({
test: {
stage: 'test',
script: 'echo'
}
- }
+ })
end
it 'does not create a detached merge request pipeline' do
@@ -284,6 +306,19 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
expect(merge_request.pipelines_for_merge_request.count).to eq(0)
end
end
+
+ context 'when .gitlab-ci.yml is invalid' do
+ let(:config) { 'invalid yaml file' }
+
+ it 'persists a pipeline with config error' do
+ expect(merge_request).to be_persisted
+
+ merge_request.reload
+ expect(merge_request.pipelines_for_merge_request.count).to eq(1)
+ expect(merge_request.pipelines_for_merge_request.last).to be_failed
+ expect(merge_request.pipelines_for_merge_request.last).to be_config_error
+ end
+ end
end
it 'increments the usage data counter of create event' do
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 496b08799f2..fa7f745d8a0 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -118,7 +118,7 @@ describe MergeRequests::MergeService do
it 'closes GitLab issue tracker issues' do
issue = create :issue, project: project
- commit = double('commit', safe_message: "Fixes #{issue.to_reference}")
+ commit = instance_double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.now, authored_date: Time.now)
allow(merge_request).to receive(:commits).and_return([commit])
merge_request.cache_merge_request_closes_issues!
@@ -158,7 +158,7 @@ describe MergeRequests::MergeService do
end
it 'does not close issue' do
- allow(jira_tracker).to receive_messages(jira_issue_transition_id: nil)
+ jira_tracker.update(jira_issue_transition_id: nil)
expect_any_instance_of(JiraService).not_to receive(:transition_issue)
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 5c26e32bb22..596d46f3c43 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -91,6 +91,17 @@ describe MergeRequests::MergeToRefService do
it_behaves_like 'successfully evaluates pre-condition checks'
+ it 'returns an error when Gitlab::Git::CommandError is raised during merge' do
+ allow(project.repository).to receive(:merge_to_ref) do
+ raise Gitlab::Git::CommandError.new('Failed to create merge commit')
+ end
+
+ result = service.execute(merge_request)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Failed to create merge commit')
+ end
+
context 'commit history comparison with regular MergeService' do
before do
# The merge service needs an authorized user while merge-to-ref
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index b67779a912d..4f052fa3edb 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -148,7 +148,7 @@ describe MergeRequests::RefreshService do
describe 'Pipelines for merge requests' do
before do
- stub_ci_pipeline_yaml_file(YAML.dump(config))
+ stub_ci_pipeline_yaml_file(config)
end
subject { service.new(project, @user).execute(@oldrev, @newrev, ref) }
@@ -158,13 +158,13 @@ describe MergeRequests::RefreshService do
context "when .gitlab-ci.yml has merge_requests keywords" do
let(:config) do
- {
+ YAML.dump({
test: {
stage: 'test',
script: 'echo',
only: ['merge_requests']
}
- }
+ })
end
it 'create detached merge request pipeline with commits' do
@@ -255,16 +255,28 @@ describe MergeRequests::RefreshService do
end.not_to change { @merge_request.pipelines_for_merge_request.count }
end
end
+
+ context 'when the pipeline should be skipped' do
+ it 'saves a skipped detached merge request pipeline' do
+ project.repository.create_file(@user, 'new-file.txt', 'A new file',
+ message: '[skip ci] This is a test',
+ branch_name: 'master')
+
+ expect { subject }
+ .to change { @merge_request.pipelines_for_merge_request.count }.by(1)
+ expect(@merge_request.pipelines_for_merge_request.last).to be_skipped
+ end
+ end
end
context "when .gitlab-ci.yml does not have merge_requests keywords" do
let(:config) do
- {
+ YAML.dump({
test: {
stage: 'test',
script: 'echo'
}
- }
+ })
end
it 'does not create a detached merge request pipeline' do
@@ -272,6 +284,40 @@ describe MergeRequests::RefreshService do
.not_to change { @merge_request.pipelines_for_merge_request.count }
end
end
+
+ context 'when .gitlab-ci.yml is invalid' do
+ let(:config) { 'invalid yaml file' }
+
+ it 'persists a pipeline with config error' do
+ expect { subject }
+ .to change { @merge_request.pipelines_for_merge_request.count }.by(1)
+ expect(@merge_request.pipelines_for_merge_request.last).to be_failed
+ expect(@merge_request.pipelines_for_merge_request.last).to be_config_error
+ end
+ end
+
+ context 'when .gitlab-ci.yml file is valid but has a logical error' do
+ let(:config) do
+ YAML.dump({
+ build: {
+ script: 'echo "Valid yaml syntax, but..."',
+ only: ['master']
+ },
+ test: {
+ script: 'echo "... I depend on build, which does not run."',
+ only: ['merge_request'],
+ needs: ['build']
+ }
+ })
+ end
+
+ it 'persists a pipeline with config error' do
+ expect { subject }
+ .to change { @merge_request.pipelines_for_merge_request.count }.by(1)
+ expect(@merge_request.pipelines_for_merge_request.last).to be_failed
+ expect(@merge_request.pipelines_for_merge_request.last).to be_config_error
+ end
+ end
end
context 'push to origin repo source branch' do
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index f295f3c4a81..dd5d90b2d07 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -367,6 +367,10 @@ describe MergeRequests::UpdateService, :mailer do
end
context 'when the milestone is removed' do
+ before do
+ stub_feature_flags(track_resource_milestone_change_events: false)
+ end
+
let!(:non_subscriber) { create(:user) }
let!(:subscriber) do
@@ -393,6 +397,10 @@ describe MergeRequests::UpdateService, :mailer do
end
context 'when the milestone is changed' do
+ before do
+ stub_feature_flags(track_resource_milestone_change_events: false)
+ end
+
let!(:non_subscriber) { create(:user) }
let!(:subscriber) do
diff --git a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
index 5f7279ee550..431b0db392a 100644
--- a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
@@ -29,7 +29,7 @@ describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_stor
end
context 'user does not have push right to repository' do
- it_behaves_like 'misconfigured dashboard service response', :forbidden, %q(You can't commit to this project)
+ it_behaves_like 'misconfigured dashboard service response', :forbidden, %q(You are not allowed to push into this branch. Create another branch or open a merge request.)
end
context 'with rights to push to the repository' do
diff --git a/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
index 744693dad15..2f03d18cd1f 100644
--- a/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
@@ -35,12 +35,18 @@ describe Metrics::Dashboard::CustomMetricEmbedService do
it { is_expected.to be_truthy }
- context 'not embedded' do
+ context 'missing embedded' do
let(:params) { valid_params.except(:embedded) }
it { is_expected.to be_falsey }
end
+ context 'not embedded' do
+ let(:params) { valid_params.merge(embedded: 'false') }
+
+ it { is_expected.to be_falsey }
+ end
+
context 'non-system dashboard' do
let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
diff --git a/spec/services/metrics/dashboard/default_embed_service_spec.rb b/spec/services/metrics/dashboard/default_embed_service_spec.rb
index 1b88276368c..8e32316433d 100644
--- a/spec/services/metrics/dashboard/default_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/default_embed_service_spec.rb
@@ -27,7 +27,7 @@ describe Metrics::Dashboard::DefaultEmbedService, :use_clean_rails_memory_store_
end
context 'not embedded' do
- let(:params) { { embedded: false } }
+ let(:params) { { embedded: 'false' } }
it { is_expected.to be_falsey }
end
diff --git a/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb b/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
index c1ce9818f21..ee75284b4ce 100644
--- a/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
@@ -35,12 +35,18 @@ describe Metrics::Dashboard::DynamicEmbedService, :use_clean_rails_memory_store_
it { is_expected.to be_truthy }
- context 'not embedded' do
+ context 'missing embedded' do
let(:params) { valid_params.except(:embedded) }
it { is_expected.to be_falsey }
end
+ context 'not embedded' do
+ let(:params) { valid_params.merge(embedded: 'false') }
+
+ it { is_expected.to be_falsey }
+ end
+
context 'undefined dashboard' do
let(:params) { valid_params.except(:dashboard_path) }
diff --git a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
index a772b911d8a..034d6aba5d6 100644
--- a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
@@ -28,12 +28,18 @@ describe Metrics::Dashboard::GrafanaMetricEmbedService do
it { is_expected.to be_truthy }
- context 'not embedded' do
+ context 'missing embedded' do
let(:params) { valid_params.except(:embedded) }
it { is_expected.to be_falsey }
end
+ context 'not embedded' do
+ let(:params) { valid_params.merge(embedded: 'false') }
+
+ it { is_expected.to be_falsey }
+ end
+
context 'undefined grafana_url' do
let(:params) { valid_params.except(:grafana_url) }
diff --git a/spec/services/metrics/dashboard/update_dashboard_service_spec.rb b/spec/services/metrics/dashboard/update_dashboard_service_spec.rb
new file mode 100644
index 00000000000..227041344d7
--- /dev/null
+++ b/spec/services/metrics/dashboard/update_dashboard_service_spec.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::Dashboard::UpdateDashboardService, :use_clean_rails_memory_store_caching do
+ include MetricsDashboardHelpers
+
+ set(:user) { create(:user) }
+ set(:project) { create(:project, :repository) }
+ set(:environment) { create(:environment, project: project) }
+
+ describe '#execute' do
+ subject(:service_call) { described_class.new(project, user, params).execute }
+
+ let(:commit_message) { 'test' }
+ let(:branch) { 'dashboard_new_branch' }
+ let(:dashboard) { 'config/prometheus/common_metrics.yml' }
+ let(:file_name) { 'custom_dashboard.yml' }
+ let(:file_content_hash) { YAML.safe_load(File.read(dashboard)) }
+ let(:params) do
+ {
+ file_name: file_name,
+ file_content: file_content_hash,
+ commit_message: commit_message,
+ branch: branch
+ }
+ end
+
+ context 'user does not have push right to repository' do
+ it 'returns an appropriate message and status code', :aggregate_failures do
+ result = service_call
+
+ expect(result.keys).to contain_exactly(:message, :http_status, :status, :last_step)
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(:forbidden)
+ expect(result[:message]).to eq("You are not allowed to push into this branch. Create another branch or open a merge request.")
+ end
+ end
+
+ context 'with rights to push to the repository' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'path traversal attack attempt' do
+ context 'with a yml extension' do
+ let(:file_name) { 'config/prometheus/../database.yml' }
+
+ it 'returns an appropriate message and status code', :aggregate_failures do
+ result = service_call
+
+ expect(result.keys).to contain_exactly(:message, :http_status, :status, :last_step)
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(:bad_request)
+ expect(result[:message]).to eq("A file with this name doesn't exist")
+ end
+ end
+
+ context 'without a yml extension' do
+ let(:file_name) { '../../..../etc/passwd' }
+
+ it 'returns an appropriate message and status code', :aggregate_failures do
+ result = service_call
+
+ expect(result.keys).to contain_exactly(:message, :http_status, :status, :last_step)
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(:bad_request)
+ expect(result[:message]).to eq("The file name should have a .yml extension")
+ end
+ end
+ end
+
+ context 'valid parameters' do
+ it_behaves_like 'valid dashboard update process'
+ end
+
+ context 'selected branch already exists' do
+ let(:branch) { 'existing_branch' }
+
+ before do
+ project.repository.add_branch(user, branch, 'master')
+ end
+
+ it 'returns an appropriate message and status code', :aggregate_failures do
+ result = service_call
+
+ expect(result.keys).to contain_exactly(:message, :http_status, :status, :last_step)
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(:bad_request)
+ expect(result[:message]).to eq("There was an error updating the dashboard, branch named: existing_branch already exists.")
+ end
+ end
+
+ context 'Files::UpdateService success' do
+ before do
+ allow(::Files::UpdateService).to receive(:new).and_return(double(execute: { status: :success }))
+ end
+
+ it 'returns success', :aggregate_failures do
+ dashboard_details = {
+ path: '.gitlab/dashboards/custom_dashboard.yml',
+ display_name: 'custom_dashboard.yml',
+ default: false,
+ system_dashboard: false
+ }
+
+ expect(service_call[:status]).to be :success
+ expect(service_call[:http_status]).to be :created
+ expect(service_call[:dashboard]).to match dashboard_details
+ end
+
+ context 'with escaped characters in file name' do
+ let(:file_name) { "custom_dashboard%26copy.yml" }
+
+ it 'escapes the special characters', :aggregate_failures do
+ dashboard_details = {
+ path: '.gitlab/dashboards/custom_dashboard&copy.yml',
+ display_name: 'custom_dashboard&copy.yml',
+ default: false,
+ system_dashboard: false
+ }
+
+ expect(service_call[:status]).to be :success
+ expect(service_call[:http_status]).to be :created
+ expect(service_call[:dashboard]).to match dashboard_details
+ end
+ end
+ end
+
+ context 'Files::UpdateService fails' do
+ before do
+ allow(::Files::UpdateService).to receive(:new).and_return(double(execute: { status: :error }))
+ end
+
+ it 'returns error' do
+ expect(service_call[:status]).to be :error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/milestones/closed_issues_count_service_spec.rb b/spec/services/milestones/closed_issues_count_service_spec.rb
new file mode 100644
index 00000000000..b86eede2e22
--- /dev/null
+++ b/spec/services/milestones/closed_issues_count_service_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Milestones::ClosedIssuesCountService, :use_clean_rails_memory_store_caching do
+ let(:project) { create(:project) }
+ let(:milestone) { create(:milestone, project: project) }
+
+ before do
+ create(:issue, milestone: milestone, project: project)
+ create(:issue, :confidential, milestone: milestone, project: project)
+
+ create(:issue, :closed, milestone: milestone, project: project)
+ create(:issue, :closed, :confidential, milestone: milestone, project: project)
+ end
+
+ subject { described_class.new(milestone) }
+
+ it_behaves_like 'a counter caching service'
+
+ it 'counts closed issues including confidential' do
+ expect(subject.count).to eq(2)
+ end
+end
diff --git a/spec/services/milestones/issues_count_service_spec.rb b/spec/services/milestones/issues_count_service_spec.rb
new file mode 100644
index 00000000000..22aea884424
--- /dev/null
+++ b/spec/services/milestones/issues_count_service_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Milestones::IssuesCountService, :use_clean_rails_memory_store_caching do
+ let(:project) { create(:project) }
+ let(:milestone) { create(:milestone, project: project) }
+
+ before do
+ create(:issue, milestone: milestone, project: project)
+ create(:issue, :confidential, milestone: milestone, project: project)
+
+ create(:issue, :closed, milestone: milestone, project: project)
+ create(:issue, :closed, milestone: milestone, project: project)
+ end
+
+ subject { described_class.new(milestone) }
+
+ it_behaves_like 'a counter caching service'
+
+ it 'counts all issues including confidential' do
+ expect(subject.count).to eq(4)
+ end
+end
diff --git a/spec/services/milestones/transfer_service_spec.rb b/spec/services/milestones/transfer_service_spec.rb
index 711969ce504..9b087b07cea 100644
--- a/spec/services/milestones/transfer_service_spec.rb
+++ b/spec/services/milestones/transfer_service_spec.rb
@@ -40,6 +40,25 @@ describe Milestones::TransferService do
expect(new_milestone.project_milestone?).to be_truthy
end
+ it 'deletes milestone issue counters cache for both milestones' do
+ new_milestone = create(:milestone, project: project, title: group_milestone.title)
+
+ expect_next_instance_of(Milestones::IssuesCountService, group_milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+ expect_next_instance_of(Milestones::ClosedIssuesCountService, group_milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+ expect_next_instance_of(Milestones::IssuesCountService, new_milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+ expect_next_instance_of(Milestones::ClosedIssuesCountService, new_milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ service.execute
+ end
+
it 'does not apply new project milestone to issues with project milestone' do
service.execute
diff --git a/spec/services/notification_recipient_service_spec.rb b/spec/services/notification_recipient_service_spec.rb
deleted file mode 100644
index 9c2283f555b..00000000000
--- a/spec/services/notification_recipient_service_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe NotificationRecipientService do
- let(:service) { described_class }
- let(:assignee) { create(:user) }
- let(:project) { create(:project, :public) }
- let(:other_projects) { create_list(:project, 5, :public) }
-
- describe '#build_new_note_recipients' do
- let(:issue) { create(:issue, project: project, assignees: [assignee]) }
- let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id) }
-
- shared_examples 'no N+1 queries' do
- it 'avoids N+1 queries', :request_store do
- create_user
-
- service.build_new_note_recipients(note)
-
- control_count = ActiveRecord::QueryRecorder.new do
- service.build_new_note_recipients(note)
- end
-
- create_user
-
- expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control_count)
- end
- end
-
- context 'when there are multiple watchers' do
- def create_user
- watcher = create(:user)
- create(:notification_setting, source: project, user: watcher, level: :watch)
-
- other_projects.each do |other_project|
- create(:notification_setting, source: other_project, user: watcher, level: :watch)
- end
- end
-
- include_examples 'no N+1 queries'
- end
-
- context 'when there are multiple subscribers' do
- def create_user
- subscriber = create(:user)
- issue.subscriptions.create(user: subscriber, project: project, subscribed: true)
- end
-
- include_examples 'no N+1 queries'
-
- context 'when the project is private' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- end
-
- include_examples 'no N+1 queries'
- end
- end
- end
-end
diff --git a/spec/services/notification_recipients/build_service_spec.rb b/spec/services/notification_recipients/build_service_spec.rb
new file mode 100644
index 00000000000..2e848c2f04d
--- /dev/null
+++ b/spec/services/notification_recipients/build_service_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe NotificationRecipients::BuildService do
+ let(:service) { described_class }
+ let(:assignee) { create(:user) }
+ let(:project) { create(:project, :public) }
+ let(:other_projects) { create_list(:project, 5, :public) }
+
+ describe '#build_new_note_recipients' do
+ let(:issue) { create(:issue, project: project, assignees: [assignee]) }
+ let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id) }
+
+ shared_examples 'no N+1 queries' do
+ it 'avoids N+1 queries', :request_store do
+ create_user
+
+ service.build_new_note_recipients(note)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ service.build_new_note_recipients(note)
+ end
+
+ create_user
+
+ expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control_count)
+ end
+ end
+
+ context 'when there are multiple watchers' do
+ def create_user
+ watcher = create(:user)
+ create(:notification_setting, source: project, user: watcher, level: :watch)
+
+ other_projects.each do |other_project|
+ create(:notification_setting, source: other_project, user: watcher, level: :watch)
+ end
+ end
+
+ include_examples 'no N+1 queries'
+ end
+
+ context 'when there are multiple subscribers' do
+ def create_user
+ subscriber = create(:user)
+ issue.subscriptions.create(user: subscriber, project: project, subscribed: true)
+ end
+
+ include_examples 'no N+1 queries'
+
+ context 'when the project is private' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ include_examples 'no N+1 queries'
+ end
+ end
+ end
+end
diff --git a/spec/services/notification_recipients/builder/default_spec.rb b/spec/services/notification_recipients/builder/default_spec.rb
new file mode 100644
index 00000000000..307ca40248e
--- /dev/null
+++ b/spec/services/notification_recipients/builder/default_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe NotificationRecipients::Builder::Default do
+ describe '#build!' do
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :public, group: group).tap { |p| p.add_developer(project_watcher) } }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+ let_it_be(:participant) { create(:user) }
+ let_it_be(:group_watcher) { create(:user) }
+ let_it_be(:project_watcher) { create(:user) }
+
+ let_it_be(:notification_setting_project_w) { create(:notification_setting, source: project, user: project_watcher, level: 2) }
+ let_it_be(:notification_setting_group_w) { create(:notification_setting, source: group, user: group_watcher, level: 2) }
+
+ subject { described_class.new(issue, current_user, action: :new).tap { |s| s.build! } }
+
+ context 'participants and project watchers' do
+ before do
+ expect(issue).to receive(:participants).and_return([participant, current_user])
+ end
+
+ it 'adds all participants and watchers' do
+ expect(subject.recipients.map(&:user)).to include(participant, project_watcher, group_watcher)
+ expect(subject.recipients.map(&:user)).not_to include(other_user)
+ end
+ end
+
+ context 'subscribers' do
+ it 'adds all subscribers' do
+ subscriber = create(:user)
+ non_subscriber = create(:user)
+ create(:subscription, project: project, user: subscriber, subscribable: issue, subscribed: true)
+ create(:subscription, project: project, user: non_subscriber, subscribable: issue, subscribed: false)
+
+ expect(subject.recipients.map(&:user)).to include(subscriber)
+ end
+ end
+ end
+end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 07a1be6c12b..96906b4ca3c 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -394,7 +394,7 @@ describe NotificationService, :mailer do
end
end
- context 'confidential issue note', :deliver_mails_inline do
+ context 'confidential issue note' do
let(:project) { create(:project, :public) }
let(:author) { create(:user) }
let(:assignee) { create(:user) }
@@ -406,23 +406,22 @@ describe NotificationService, :mailer do
let(:note) { create(:note_on_issue, noteable: confidential_issue, project: project, note: "#{author.to_reference} #{assignee.to_reference} #{non_member.to_reference} #{member.to_reference} #{admin.to_reference}") }
let(:guest_watcher) { create_user_with_notification(:watch, "guest-watcher-confidential") }
- it 'filters out users that can not read the issue' do
+ subject { notification.new_note(note) }
+
+ before do
project.add_developer(member)
project.add_guest(guest)
-
- expect(SentNotification).to receive(:record).with(confidential_issue, any_args).exactly(4).times
-
reset_delivered_emails!
+ end
- notification.new_note(note)
+ it 'filters out users that can not read the issue' do
+ subject
- should_not_email(non_member)
- should_not_email(guest)
- should_not_email(guest_watcher)
- should_email(author)
- should_email(assignee)
- should_email(member)
- should_email(admin)
+ expect_delivery_jobs_count(4)
+ expect_enqueud_email(author.id, note.id, "mentioned", mail: "note_issue_email")
+ expect_enqueud_email(assignee.id, note.id, "mentioned", mail: "note_issue_email")
+ expect_enqueud_email(member.id, note.id, "mentioned", mail: "note_issue_email")
+ expect_enqueud_email(admin.id, note.id, "mentioned", mail: "note_issue_email")
end
context 'on project that belongs to subgroup' do
@@ -442,10 +441,10 @@ describe NotificationService, :mailer do
end
it 'does not email guest user' do
- notification.new_note(note)
+ subject
- should_email(group_reporter)
- should_not_email(group_guest)
+ expect_enqueud_email(group_reporter.id, note.id, nil, mail: "note_issue_email")
+ expect_not_enqueud_email(group_guest.id, "mentioned", mail: "note_issue_email")
end
end
end
@@ -711,7 +710,7 @@ describe NotificationService, :mailer do
user_3 = create(:user)
recipient_1 = NotificationRecipient.new(user_1, :custom, custom_action: :new_release)
recipient_2 = NotificationRecipient.new(user_2, :custom, custom_action: :new_release)
- allow(NotificationRecipientService).to receive(:build_new_release_recipients).and_return([recipient_1, recipient_2])
+ allow(NotificationRecipients::BuildService).to receive(:build_new_release_recipients).and_return([recipient_1, recipient_2])
release
@@ -2315,6 +2314,7 @@ describe NotificationService, :mailer do
user = create_user_with_notification(:custom, 'custom_enabled')
update_custom_notification(:success_pipeline, user, resource: project)
update_custom_notification(:failed_pipeline, user, resource: project)
+ update_custom_notification(:fixed_pipeline, user, resource: project)
user
end
@@ -2322,6 +2322,7 @@ describe NotificationService, :mailer do
user = create_user_with_notification(:custom, 'custom_disabled')
update_custom_notification(:success_pipeline, user, resource: project, value: false)
update_custom_notification(:failed_pipeline, user, resource: project, value: false)
+ update_custom_notification(:fixed_pipeline, user, resource: project, value: false)
user
end
@@ -2514,6 +2515,85 @@ describe NotificationService, :mailer do
end
end
end
+
+ context 'with a fixed pipeline' do
+ let(:ref_status) { 'fixed' }
+
+ context 'when the creator has no custom notification set' do
+ let(:pipeline) { create_pipeline(u_member, :success) }
+
+ it 'emails only the creator' do
+ notification.pipeline_finished(pipeline, ref_status: ref_status)
+
+ should_only_email(u_member, kind: :bcc)
+ end
+
+ it_behaves_like 'project emails are disabled' do
+ let(:notification_target) { pipeline }
+ let(:notification_trigger) { notification.pipeline_finished(pipeline, ref_status: ref_status) }
+ end
+
+ context 'when the creator has group notification email set' do
+ let(:group_notification_email) { 'user+group@example.com' }
+
+ before do
+ group = create(:group)
+
+ project.update(group: group)
+ create(:notification_setting, user: u_member, source: group, notification_email: group_notification_email)
+ end
+
+ it 'sends to group notification email' do
+ notification.pipeline_finished(pipeline, ref_status: ref_status)
+
+ expect(email_recipients(kind: :bcc).first).to eq(group_notification_email)
+ end
+ end
+ end
+
+ context 'when the creator has watch set' do
+ before do
+ pipeline = create_pipeline(u_watcher, :success)
+ notification.pipeline_finished(pipeline, ref_status: ref_status)
+ end
+
+ it 'emails only the creator' do
+ should_only_email(u_watcher, kind: :bcc)
+ end
+ end
+
+ context 'when the creator has custom notifications, but without any set' do
+ before do
+ pipeline = create_pipeline(u_custom_notification_unset, :success)
+ notification.pipeline_finished(pipeline, ref_status: ref_status)
+ end
+
+ it 'emails only the creator' do
+ should_only_email(u_custom_notification_unset, kind: :bcc)
+ end
+ end
+
+ context 'when the creator has custom notifications disabled' do
+ before do
+ pipeline = create_pipeline(u_custom_notification_disabled, :success)
+ notification.pipeline_finished(pipeline, ref_status: ref_status)
+ end
+
+ it 'notifies nobody' do
+ should_not_email_anyone
+ end
+ end
+
+ context 'when the creator has custom notifications set' do
+ it 'emails only the creator' do
+ pipeline = create_pipeline(u_custom_notification_enabled, :success)
+
+ notification.pipeline_finished(pipeline, ref_status: ref_status)
+
+ should_only_email(u_custom_notification_enabled, kind: :bcc)
+ end
+ end
+ end
end
end
diff --git a/spec/services/pod_logs/base_service_spec.rb b/spec/services/pod_logs/base_service_spec.rb
new file mode 100644
index 00000000000..63ab8b29f78
--- /dev/null
+++ b/spec/services/pod_logs/base_service_spec.rb
@@ -0,0 +1,232 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::PodLogs::BaseService do
+ include KubernetesHelpers
+
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+ let(:namespace) { 'autodevops-deploy-9-production' }
+
+ let(:pod_name) { 'pod-1' }
+ let(:pod_name_2) { 'pod-2' }
+ let(:container_name) { 'container-0' }
+ let(:container_name_2) { 'foo-0' }
+ let(:params) { {} }
+ let(:raw_pods) do
+ JSON.parse([
+ kube_pod(name: pod_name),
+ kube_pod(name: pod_name_2, container_name: container_name_2)
+ ].to_json, object_class: OpenStruct)
+ end
+
+ subject { described_class.new(cluster, namespace, params: params) }
+
+ describe '#initialize' do
+ let(:params) do
+ {
+ 'container_name' => container_name,
+ 'another_param' => 'foo'
+ }
+ end
+
+ it 'filters the parameters' do
+ expect(subject.cluster).to eq(cluster)
+ expect(subject.namespace).to eq(namespace)
+ expect(subject.params).to eq({
+ 'container_name' => container_name
+ })
+ expect(subject.params.equal?(params)).to be(false)
+ end
+ end
+
+ describe '#check_arguments' do
+ context 'when cluster and namespace are provided' do
+ it 'returns success' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:success)
+ end
+ end
+
+ context 'when cluster is nil' do
+ let(:cluster) { nil }
+
+ it 'returns an error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Cluster does not exist')
+ end
+ end
+
+ context 'when namespace is nil' do
+ let(:namespace) { nil }
+
+ it 'returns an error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Namespace is empty')
+ end
+ end
+
+ context 'when namespace is empty' do
+ let(:namespace) { '' }
+
+ it 'returns an error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Namespace is empty')
+ end
+ end
+ end
+
+ describe '#check_param_lengths' do
+ context 'when pod_name and container_name are provided' do
+ let(:params) do
+ {
+ 'pod_name' => pod_name,
+ 'container_name' => container_name
+ }
+ end
+
+ it 'returns success' do
+ result = subject.send(:check_param_lengths, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pod_name]).to eq(pod_name)
+ expect(result[:container_name]).to eq(container_name)
+ end
+ end
+
+ context 'when pod_name is too long' do
+ let(:params) do
+ {
+ 'pod_name' => "a very long string." * 15
+ }
+ end
+
+ it 'returns an error' do
+ result = subject.send(:check_param_lengths, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('pod_name cannot be larger than 253 chars')
+ end
+ end
+
+ context 'when container_name is too long' do
+ let(:params) do
+ {
+ 'container_name' => "a very long string." * 15
+ }
+ end
+
+ it 'returns an error' do
+ result = subject.send(:check_param_lengths, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('container_name cannot be larger than 253 chars')
+ end
+ end
+ end
+
+ describe '#get_raw_pods' do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ it 'returns success with passthrough k8s response' do
+ stub_kubeclient_pods(namespace)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:raw_pods].first).to be_a(Kubeclient::Resource)
+ end
+ end
+
+ describe '#get_pod_names' do
+ it 'returns success with a list of pods' do
+ result = subject.send(:get_pod_names, raw_pods: raw_pods)
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pods]).to eq([pod_name, pod_name_2])
+ end
+ end
+
+ describe '#check_pod_name' do
+ it 'returns success if pod_name was specified' do
+ result = subject.send(:check_pod_name, pod_name: pod_name, pods: [pod_name])
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pod_name]).to eq(pod_name)
+ end
+
+ it 'returns success if pod_name was not specified but there are pods' do
+ result = subject.send(:check_pod_name, pod_name: nil, pods: [pod_name])
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pod_name]).to eq(pod_name)
+ end
+
+ it 'returns error if pod_name was not specified and there are no pods' do
+ result = subject.send(:check_pod_name, pod_name: nil, pods: [])
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('No pods available')
+ end
+
+ it 'returns error if pod_name was specified but does not exist' do
+ result = subject.send(:check_pod_name, pod_name: 'another_pod', pods: [pod_name])
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Pod does not exist')
+ end
+ end
+
+ describe '#check_container_name' do
+ it 'returns success if container_name was specified' do
+ result = subject.send(:check_container_name,
+ container_name: container_name,
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:container_name]).to eq(container_name)
+ end
+
+ it 'returns success if container_name was not specified and there are containers' do
+ result = subject.send(:check_container_name,
+ pod_name: pod_name_2,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:container_name]).to eq(container_name_2)
+ end
+
+ it 'returns error if container_name was not specified and there are no containers on the pod' do
+ raw_pods.first.spec.containers = []
+
+ result = subject.send(:check_container_name,
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('No containers available')
+ end
+
+ it 'returns error if container_name was specified but does not exist' do
+ result = subject.send(:check_container_name,
+ container_name: 'foo',
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Container does not exist')
+ end
+ end
+end
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
new file mode 100644
index 00000000000..984a303e9e3
--- /dev/null
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -0,0 +1,222 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::PodLogs::ElasticsearchService do
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+ let(:namespace) { 'autodevops-deploy-9-production' }
+
+ let(:pod_name) { 'pod-1' }
+ let(:container_name) { 'container-1' }
+ let(:search) { 'foo -bar' }
+ let(:start_time) { '2019-01-02T12:13:14+02:00' }
+ let(:end_time) { '2019-01-03T12:13:14+02:00' }
+ let(:cursor) { '9999934,1572449784442' }
+ let(:params) { {} }
+ let(:expected_logs) do
+ [
+ { message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" },
+ { message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" },
+ { message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" }
+ ]
+ end
+
+ subject { described_class.new(cluster, namespace, params: params) }
+
+ describe '#check_times' do
+ context 'with start and end provided and valid' do
+ let(:params) do
+ {
+ 'start' => start_time,
+ 'end' => end_time
+ }
+ end
+
+ it 'returns success with times' do
+ result = subject.send(:check_times, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:start]).to eq(start_time)
+ expect(result[:end]).to eq(end_time)
+ end
+ end
+
+ context 'with start and end not provided' do
+ let(:params) do
+ {}
+ end
+
+ it 'returns success with nothing else' do
+ result = subject.send(:check_times, {})
+
+ expect(result.keys.length).to eq(1)
+ expect(result[:status]).to eq(:success)
+ end
+ end
+
+ context 'with start valid and end invalid' do
+ let(:params) do
+ {
+ 'start' => start_time,
+ 'end' => 'invalid date'
+ }
+ end
+
+ it 'returns error' do
+ result = subject.send(:check_times, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid start or end time format')
+ end
+ end
+
+ context 'with start invalid and end valid' do
+ let(:params) do
+ {
+ 'start' => 'invalid date',
+ 'end' => end_time
+ }
+ end
+
+ it 'returns error' do
+ result = subject.send(:check_times, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid start or end time format')
+ end
+ end
+ end
+
+ describe '#check_search' do
+ context 'with search provided and valid' do
+ let(:params) do
+ {
+ 'search' => search
+ }
+ end
+
+ it 'returns success with search' do
+ result = subject.send(:check_search, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:search]).to eq(search)
+ end
+ end
+
+ context 'with search not provided' do
+ let(:params) do
+ {}
+ end
+
+ it 'returns success with nothing else' do
+ result = subject.send(:check_search, {})
+
+ expect(result.keys.length).to eq(1)
+ expect(result[:status]).to eq(:success)
+ end
+ end
+ end
+
+ describe '#check_cursor' do
+ context 'with cursor provided and valid' do
+ let(:params) do
+ {
+ 'cursor' => cursor
+ }
+ end
+
+ it 'returns success with cursor' do
+ result = subject.send(:check_cursor, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:cursor]).to eq(cursor)
+ end
+ end
+
+ context 'with cursor not provided' do
+ let(:params) do
+ {}
+ end
+
+ it 'returns success with nothing else' do
+ result = subject.send(:check_cursor, {})
+
+ expect(result.keys.length).to eq(1)
+ expect(result[:status]).to eq(:success)
+ end
+ end
+ end
+
+ describe '#pod_logs' do
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name,
+ search: search,
+ start: start_time,
+ end: end_time,
+ cursor: cursor
+ }
+ end
+ let(:expected_cursor) { '9999934,1572449784442' }
+
+ before do
+ create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
+ end
+
+ it 'returns the logs' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ .to receive(:pod_logs)
+ .with(namespace, pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor)
+ .and_return({ logs: expected_logs, cursor: expected_cursor })
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ expect(result[:cursor]).to eq(expected_cursor)
+ end
+
+ it 'returns an error when ES is unreachable' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(nil)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to connect to Elasticsearch')
+ end
+
+ it 'handles server errors from elasticsearch' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ .to receive(:pod_logs)
+ .and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
+ end
+
+ it 'handles cursor errors from elasticsearch' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ .to receive(:pod_logs)
+ .and_raise(::Gitlab::Elasticsearch::Logs::InvalidCursor.new)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid cursor value provided')
+ end
+ end
+end
diff --git a/spec/services/pod_logs/kubernetes_service_spec.rb b/spec/services/pod_logs/kubernetes_service_spec.rb
new file mode 100644
index 00000000000..9fab88a14f6
--- /dev/null
+++ b/spec/services/pod_logs/kubernetes_service_spec.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::PodLogs::KubernetesService do
+ include KubernetesHelpers
+
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+ let(:namespace) { 'autodevops-deploy-9-production' }
+
+ let(:pod_name) { 'pod-1' }
+ let(:container_name) { 'container-1' }
+ let(:params) { {} }
+
+ let(:raw_logs) do
+ "2019-12-13T14:04:22.123456Z Log 1\n2019-12-13T14:04:23.123456Z Log 2\n" \
+ "2019-12-13T14:04:24.123456Z Log 3"
+ end
+
+ subject { described_class.new(cluster, namespace, params: params) }
+
+ describe '#pod_logs' do
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name
+ }
+ end
+
+ let(:expected_logs) { raw_logs }
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ it 'returns the logs' do
+ stub_kubeclient_logs(pod_name, namespace, container: container_name)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+
+ it 'handles Not Found errors from k8s' do
+ allow_any_instance_of(Gitlab::Kubernetes::KubeClient)
+ .to receive(:get_pod_log)
+ .with(any_args)
+ .and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not Found', {}))
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Pod not found')
+ end
+
+ it 'handles HTTP errors from k8s' do
+ allow_any_instance_of(Gitlab::Kubernetes::KubeClient)
+ .to receive(:get_pod_log)
+ .with(any_args)
+ .and_raise(Kubeclient::HttpError.new(500, 'Error', {}))
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Kubernetes API returned status code: 500')
+ end
+ end
+
+ describe '#encode_logs_to_utf8', :aggregate_failures do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+ let(:expected_logs) { '2019-12-13T14:04:22.123456Z ✔ Started logging errors to Sentry' }
+ let(:raw_logs) { expected_logs.dup.force_encoding(Encoding::ASCII_8BIT) }
+ let(:result) { subject.send(:encode_logs_to_utf8, result_arg) }
+
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name,
+ logs: raw_logs
+ }
+ end
+
+ it 'converts logs to utf-8' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+
+ it 'returns error if output of encoding helper is blank' do
+ allow(Gitlab::EncodingHelper).to receive(:encode_utf8).and_return('')
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
+ end
+
+ it 'returns error if output of encoding helper is nil' do
+ allow(Gitlab::EncodingHelper).to receive(:encode_utf8).and_return(nil)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
+ end
+
+ it 'returns error if output of encoding helper is not UTF-8' do
+ allow(Gitlab::EncodingHelper).to receive(:encode_utf8)
+ .and_return(expected_logs.encode(Encoding::UTF_16BE))
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
+ end
+
+ context 'when logs are nil' do
+ let(:raw_logs) { nil }
+ let(:expected_logs) { nil }
+
+ it 'returns nil' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+
+ context 'when logs are blank' do
+ let(:raw_logs) { (+'').force_encoding(Encoding::ASCII_8BIT) }
+ let(:expected_logs) { '' }
+
+ it 'returns blank string' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+
+ context 'when logs are already in utf-8' do
+ let(:raw_logs) { expected_logs }
+
+ it 'does not fail' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+ end
+
+ describe '#split_logs' do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ let(:expected_logs) do
+ [
+ { message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" },
+ { message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" },
+ { message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" }
+ ]
+ end
+
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name,
+ logs: raw_logs
+ }
+ end
+
+ it 'returns the logs' do
+ result = subject.send(:split_logs, result_arg)
+
+ aggregate_failures do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+ end
+end
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 9b9200fd33e..b4f48696b15 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -5,8 +5,10 @@ require 'spec_helper'
describe PostReceiveService do
include Gitlab::Routing
- let_it_be(:project) { create(:project, :repository, :wiki_repo) }
let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
+ let_it_be(:project_snippet) { create(:project_snippet, :repository, project: project, author: user) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :repository, author: user) }
let(:identifier) { 'key-123' }
let(:gl_repository) { "project-#{project.id}" }
@@ -14,6 +16,7 @@ describe PostReceiveService do
let(:secret_token) { Gitlab::Shell.secret_token }
let(:reference_counter) { double('ReferenceCounter') }
let(:push_options) { ['ci.skip', 'another push option'] }
+ let(:repository) { project.repository }
let(:changes) do
"#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{branch_name}"
@@ -29,115 +32,192 @@ describe PostReceiveService do
}
end
- let(:response) { PostReceiveService.new(user, project, params).execute }
+ let(:service) { described_class.new(user, repository, project, params) }
+ let(:response) { service.execute }
subject { response.messages.as_json }
- it 'enqueues a PostReceive worker job' do
- expect(PostReceive).to receive(:perform_async)
- .with(gl_repository, identifier, changes, { ci: { skip: true } })
+ context 'when project is nil' do
+ let(:gl_repository) { "snippet-#{personal_snippet.id}" }
+ let(:project) { nil }
+ let(:repository) { personal_snippet.repository }
- subject
+ it 'does not return error' do
+ expect(subject).to be_empty
+ end
end
- it 'decreases the reference counter and returns the result' do
- expect(Gitlab::ReferenceCounter).to receive(:new).with(gl_repository)
- .and_return(reference_counter)
- expect(reference_counter).to receive(:decrease).and_return(true)
+ context 'when repository is nil' do
+ let(:repository) { nil }
- expect(response.reference_counter_decreased).to be(true)
+ it 'does not return error' do
+ expect(subject).to be_empty
+ end
end
- it 'returns link to create new merge request' do
- message = <<~MESSAGE.strip
- To create a merge request for #{branch_name}, visit:
- http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}
- MESSAGE
+ context 'when both repository and project are nil' do
+ let(:gl_repository) { "snippet-#{personal_snippet.id}" }
+ let(:project) { nil }
+ let(:repository) { nil }
- expect(subject).to include(build_basic_message(message))
+ it 'does not return error' do
+ expect(subject).to be_empty
+ end
end
- it 'returns the link to an existing merge request when it exists' do
- merge_request = create(:merge_request, source_project: project, source_branch: branch_name, target_branch: 'master')
- message = <<~MESSAGE.strip
- View merge request for feature:
- #{project_merge_request_url(project, merge_request)}
- MESSAGE
+ shared_examples 'post_receive_service actions' do
+ it 'enqueues a PostReceive worker job' do
+ expect(PostReceive).to receive(:perform_async)
+ .with(gl_repository, identifier, changes, { ci: { skip: true } })
- expect(subject).to include(build_basic_message(message))
- end
+ subject
+ end
- context 'when printing_merge_request_link_enabled is false' do
- let(:project) { create(:project, printing_merge_request_link_enabled: false) }
+ it 'decreases the reference counter and returns the result' do
+ expect(Gitlab::ReferenceCounter).to receive(:new).with(gl_repository)
+ .and_return(reference_counter)
+ expect(reference_counter).to receive(:decrease).and_return(true)
- it 'returns no merge request messages' do
- expect(subject).to be_blank
+ expect(response.reference_counter_decreased).to be(true)
end
end
- it 'does not invoke MergeRequests::PushOptionsHandlerService' do
- expect(MergeRequests::PushOptionsHandlerService).not_to receive(:new)
+ context 'with Project' do
+ it_behaves_like 'post_receive_service actions'
- subject
- end
+ it 'returns link to create new merge request' do
+ message = <<~MESSAGE.strip
+ To create a merge request for #{branch_name}, visit:
+ http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}
+ MESSAGE
+
+ expect(subject).to include(build_basic_message(message))
+ end
+
+ it 'returns the link to an existing merge request when it exists' do
+ merge_request = create(:merge_request, source_project: project, source_branch: branch_name, target_branch: 'master')
+ message = <<~MESSAGE.strip
+ View merge request for feature:
+ #{project_merge_request_url(project, merge_request)}
+ MESSAGE
- context 'when there are merge_request push options' do
- let(:params) { super().merge(push_options: ['merge_request.create']) }
+ expect(subject).to include(build_basic_message(message))
+ end
- before do
- project.add_developer(user)
+ context 'when printing_merge_request_link_enabled is false' do
+ let(:project) { create(:project, printing_merge_request_link_enabled: false) }
+
+ it 'returns no merge request messages' do
+ expect(subject).to be_blank
+ end
end
- it 'invokes MergeRequests::PushOptionsHandlerService' do
- expect(MergeRequests::PushOptionsHandlerService).to receive(:new).and_call_original
+ it 'does not invoke MergeRequests::PushOptionsHandlerService' do
+ expect(MergeRequests::PushOptionsHandlerService).not_to receive(:new)
subject
end
- it 'creates a new merge request' do
- expect { Sidekiq::Testing.fake! { subject } }.to change(MergeRequest, :count).by(1)
- end
+ context 'when there are merge_request push options' do
+ let(:params) { super().merge(push_options: ['merge_request.create']) }
- it 'links to the newly created merge request' do
- message = <<~MESSAGE.strip
- View merge request for #{branch_name}:
- http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/1
- MESSAGE
+ before do
+ project.add_developer(user)
+ end
- expect(subject).to include(build_basic_message(message))
+ it 'invokes MergeRequests::PushOptionsHandlerService' do
+ expect(MergeRequests::PushOptionsHandlerService).to receive(:new).and_call_original
+
+ subject
+ end
+
+ it 'creates a new merge request' do
+ expect { Sidekiq::Testing.fake! { subject } }.to change(MergeRequest, :count).by(1)
+ end
+
+ it 'links to the newly created merge request' do
+ message = <<~MESSAGE.strip
+ View merge request for #{branch_name}:
+ http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/1
+ MESSAGE
+
+ expect(subject).to include(build_basic_message(message))
+ end
+
+ it 'adds errors on the service instance to warnings' do
+ expect_any_instance_of(
+ MergeRequests::PushOptionsHandlerService
+ ).to receive(:errors).at_least(:once).and_return(['my error'])
+
+ message = "WARNINGS:\nError encountered with push options 'merge_request.create': my error"
+
+ expect(subject).to include(build_alert_message(message))
+ end
+
+ it 'adds ActiveRecord errors on invalid MergeRequest records to warnings' do
+ invalid_merge_request = MergeRequest.new
+ invalid_merge_request.errors.add(:base, 'my error')
+ message = "WARNINGS:\nError encountered with push options 'merge_request.create': my error"
+
+ expect_any_instance_of(
+ MergeRequests::CreateService
+ ).to receive(:execute).and_return(invalid_merge_request)
+
+ expect(subject).to include(build_alert_message(message))
+ end
end
+ end
+
+ context 'with PersonalSnippet' do
+ let(:gl_repository) { "snippet-#{personal_snippet.id}" }
+ let(:repository) { personal_snippet.repository }
- it 'adds errors on the service instance to warnings' do
- expect_any_instance_of(
- MergeRequests::PushOptionsHandlerService
- ).to receive(:errors).at_least(:once).and_return(['my error'])
+ it_behaves_like 'post_receive_service actions'
- message = "WARNINGS:\nError encountered with push options 'merge_request.create': my error"
+ it 'does not return link to create new merge request' do
+ expect(subject).to be_empty
+ end
+
+ it 'does not return the link to an existing merge request when it exists' do
+ create(:merge_request, source_project: project, source_branch: branch_name, target_branch: 'master')
- expect(subject).to include(build_alert_message(message))
+ expect(subject).to be_empty
end
+ end
- it 'adds ActiveRecord errors on invalid MergeRequest records to warnings' do
- invalid_merge_request = MergeRequest.new
- invalid_merge_request.errors.add(:base, 'my error')
- message = "WARNINGS:\nError encountered with push options 'merge_request.create': my error"
+ context 'with ProjectSnippet' do
+ let(:gl_repository) { "snippet-#{project_snippet.id}" }
+ let(:repository) { project_snippet.repository }
- expect_any_instance_of(
- MergeRequests::CreateService
- ).to receive(:execute).and_return(invalid_merge_request)
+ it_behaves_like 'post_receive_service actions'
- expect(subject).to include(build_alert_message(message))
+ it 'does not return link to create new merge request' do
+ expect(subject).to be_empty
+ end
+
+ it 'does not return the link to an existing merge request when it exists' do
+ create(:merge_request, source_project: project, source_branch: branch_name, target_branch: 'master')
+
+ expect(subject).to be_empty
end
end
- context 'broadcast message exists' do
+ context 'broadcast message banner exists' do
it 'outputs a broadcast message' do
- broadcast_message = create(:broadcast_message, starts_at: 1.day.ago, ends_at: 1.day.from_now)
+ broadcast_message = create(:broadcast_message)
expect(subject).to include(build_alert_message(broadcast_message.message))
end
end
+ context 'broadcast message notification exists' do
+ it 'does not output a broadcast message' do
+ create(:broadcast_message, :notification)
+
+ expect(has_alert_messages?(subject)).to be_falsey
+ end
+ end
+
context 'broadcast message does not exist' do
it 'does not output a broadcast message' do
expect(has_alert_messages?(subject)).to be_falsey
@@ -154,7 +234,7 @@ describe PostReceiveService do
context 'with a redirected data' do
it 'returns redirected message on the response' do
- project_moved = Gitlab::Checks::ProjectMoved.new(project, user, 'http', 'foo/baz')
+ project_moved = Gitlab::Checks::ProjectMoved.new(project.repository, user, 'http', 'foo/baz')
project_moved.add_message
expect(subject).to include(build_basic_message(project_moved.message))
@@ -163,13 +243,57 @@ describe PostReceiveService do
context 'with new project data' do
it 'returns new project message on the response' do
- project_created = Gitlab::Checks::ProjectCreated.new(project, user, 'http')
+ project_created = Gitlab::Checks::ProjectCreated.new(project.repository, user, 'http')
project_created.add_message
expect(subject).to include(build_basic_message(project_created.message))
end
end
+ describe '#process_mr_push_options' do
+ context 'when repository belongs to a snippet' do
+ context 'with PersonalSnippet' do
+ let(:repository) { personal_snippet.repository }
+
+ it 'returns an error message' do
+ result = service.process_mr_push_options(push_options, changes)
+
+ expect(result).to match('Push options are only supported for projects')
+ end
+ end
+
+ context 'with ProjectSnippet' do
+ let(:repository) { project_snippet.repository }
+
+ it 'returns an error message' do
+ result = service.process_mr_push_options(push_options, changes)
+
+ expect(result).to match('Push options are only supported for projects')
+ end
+ end
+ end
+ end
+
+ describe '#merge_request_urls' do
+ context 'when repository belongs to a snippet' do
+ context 'with PersonalSnippet' do
+ let(:repository) { personal_snippet.repository }
+
+ it 'returns an empty array' do
+ expect(service.merge_request_urls).to be_empty
+ end
+ end
+
+ context 'with ProjectSnippet' do
+ let(:repository) { project_snippet.repository }
+
+ it 'returns an empty array' do
+ expect(service.merge_request_urls).to be_empty
+ end
+ end
+ end
+ end
+
def build_alert_message(message)
{ 'type' => 'alert', 'message' => message }
end
diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb
index 925d323584e..289d812f498 100644
--- a/spec/services/projects/alerting/notify_service_spec.rb
+++ b/spec/services/projects/alerting/notify_service_spec.rb
@@ -25,7 +25,31 @@ describe Projects::Alerting::NotifyService do
end
end
- shared_examples 'does not process incident issues' do |http_status:|
+ shared_examples 'sends notification email' do
+ let(:notification_service) { spy }
+
+ it 'sends a notification for firing alerts only' do
+ expect(NotificationService)
+ .to receive(:new)
+ .and_return(notification_service)
+
+ expect(notification_service)
+ .to receive_message_chain(:async, :prometheus_alerts_fired)
+
+ expect(subject.status).to eq(:success)
+ end
+ end
+
+ shared_examples 'does not process incident issues' do
+ it 'does not process issues' do
+ expect(IncidentManagement::ProcessAlertWorker)
+ .not_to receive(:perform_async)
+
+ expect(subject.status).to eq(:success)
+ end
+ end
+
+ shared_examples 'does not process incident issues due to error' do |http_status:|
it 'does not process issues' do
expect(IncidentManagement::ProcessAlertWorker)
.not_to receive(:perform_async)
@@ -54,31 +78,50 @@ describe Projects::Alerting::NotifyService do
context 'with valid token' do
let(:token) { alerts_service.token }
+ let(:incident_management_setting) { double(send_email?: email_enabled, create_issue?: issue_enabled) }
+ let(:email_enabled) { false }
+ let(:issue_enabled) { false }
+
+ before do
+ allow(service)
+ .to receive(:incident_management_setting)
+ .and_return(incident_management_setting)
+ end
+
+ it_behaves_like 'does not process incident issues'
+
+ context 'issue enabled' do
+ let(:issue_enabled) { true }
- context 'with a valid payload' do
it_behaves_like 'processes incident issues', 1
- end
- context 'with an invalid payload' do
- before do
- allow(Gitlab::Alerting::NotificationPayloadParser)
- .to receive(:call)
- .and_raise(Gitlab::Alerting::NotificationPayloadParser::BadPayloadError)
+ context 'with an invalid payload' do
+ before do
+ allow(Gitlab::Alerting::NotificationPayloadParser)
+ .to receive(:call)
+ .and_raise(Gitlab::Alerting::NotificationPayloadParser::BadPayloadError)
+ end
+
+ it_behaves_like 'does not process incident issues due to error', http_status: 400
end
+ end
+
+ context 'with emails turned on' do
+ let(:email_enabled) { true }
- it_behaves_like 'does not process incident issues', http_status: 400
+ it_behaves_like 'sends notification email'
end
end
context 'with invalid token' do
- it_behaves_like 'does not process incident issues', http_status: 401
+ it_behaves_like 'does not process incident issues due to error', http_status: 401
end
- end
- context 'with deactivated Alerts Service' do
- let!(:alerts_service) { create(:alerts_service, :inactive, project: project) }
+ context 'with deactivated Alerts Service' do
+ let!(:alerts_service) { create(:alerts_service, :inactive, project: project) }
- it_behaves_like 'does not process incident issues', http_status: 403
+ it_behaves_like 'does not process incident issues due to error', http_status: 403
+ end
end
end
end
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index ef7e9cda9e0..96cddef4628 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -48,25 +48,37 @@ describe Projects::ContainerRepository::CleanupTagsService do
end
context 'when regex matching everything is specified' do
+ shared_examples 'removes all matches' do
+ it 'does remove B* and C' do
+ # The :A cannot be removed as config is shared with :latest
+ # The :E cannot be removed as it does not have valid manifest
+
+ expect_delete('sha256:configB').twice
+ expect_delete('sha256:configC')
+ expect_delete('sha256:configD')
+
+ is_expected.to include(status: :success, deleted: %w(D Bb Ba C))
+ end
+ end
+
let(:params) do
- { 'name_regex' => '.*' }
+ { 'name_regex_delete' => '.*' }
end
- it 'does remove B* and C' do
- # The :A cannot be removed as config is shared with :latest
- # The :E cannot be removed as it does not have valid manifest
+ it_behaves_like 'removes all matches'
- expect_delete('sha256:configB').twice
- expect_delete('sha256:configC')
- expect_delete('sha256:configD')
+ context 'with deprecated name_regex param' do
+ let(:params) do
+ { 'name_regex' => '.*' }
+ end
- is_expected.to include(status: :success, deleted: %w(D Bb Ba C))
+ it_behaves_like 'removes all matches'
end
end
- context 'when regex matching specific tags is used' do
+ context 'when delete regex matching specific tags is used' do
let(:params) do
- { 'name_regex' => 'C|D' }
+ { 'name_regex_delete' => 'C|D' }
end
it 'does remove C and D' do
@@ -75,11 +87,37 @@ describe Projects::ContainerRepository::CleanupTagsService do
is_expected.to include(status: :success, deleted: %w(D C))
end
+
+ context 'with overriding allow regex' do
+ let(:params) do
+ { 'name_regex_delete' => 'C|D',
+ 'name_regex_keep' => 'C' }
+ end
+
+ it 'does not remove C' do
+ expect_delete('sha256:configD')
+
+ is_expected.to include(status: :success, deleted: %w(D))
+ end
+ end
+
+ context 'with name_regex_delete overriding deprecated name_regex' do
+ let(:params) do
+ { 'name_regex' => 'C|D',
+ 'name_regex_delete' => 'D' }
+ end
+
+ it 'does not remove C' do
+ expect_delete('sha256:configD')
+
+ is_expected.to include(status: :success, deleted: %w(D))
+ end
+ end
end
context 'when removing a tagged image that is used by another tag' do
let(:params) do
- { 'name_regex' => 'Ba' }
+ { 'name_regex_delete' => 'Ba' }
end
it 'does not remove the tag' do
@@ -89,9 +127,23 @@ describe Projects::ContainerRepository::CleanupTagsService do
end
end
+ context 'with allow regex value' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'name_regex_keep' => 'B.*' }
+ end
+
+ it 'does not remove B*' do
+ expect_delete('sha256:configC')
+ expect_delete('sha256:configD')
+
+ is_expected.to include(status: :success, deleted: %w(D C))
+ end
+ end
+
context 'when removing keeping only 3' do
let(:params) do
- { 'name_regex' => '.*',
+ { 'name_regex_delete' => '.*',
'keep_n' => 3 }
end
@@ -104,7 +156,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'when removing older than 1 day' do
let(:params) do
- { 'name_regex' => '.*',
+ { 'name_regex_delete' => '.*',
'older_than' => '1 day' }
end
@@ -118,7 +170,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'when combining all parameters' do
let(:params) do
- { 'name_regex' => '.*',
+ { 'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day' }
end
@@ -136,7 +188,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'with valid container_expiration_policy param' do
let(:params) do
- { 'name_regex' => '.*',
+ { 'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day',
'container_expiration_policy' => true }
@@ -152,7 +204,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'without container_expiration_policy param' do
let(:params) do
- { 'name_regex' => '.*',
+ { 'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day' }
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index a8e7919dc81..3c9914e2a89 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -6,7 +6,6 @@ describe Projects::CreateService, '#execute' do
include ExternalAuthorizationServiceHelpers
include GitHelpers
- let(:gitlab_shell) { Gitlab::Shell.new }
let(:user) { create :user }
let(:opts) do
{
@@ -264,8 +263,6 @@ describe Projects::CreateService, '#execute' do
end
context 'when another repository already exists on disk' do
- let(:repository_storage) { 'default' }
-
let(:opts) do
{
name: 'Existing',
@@ -274,13 +271,15 @@ describe Projects::CreateService, '#execute' do
end
context 'with legacy storage' do
+ let(:fake_repo_path) { File.join(TestEnv.repos_path, user.namespace.full_path, 'existing.git') }
+
before do
stub_application_setting(hashed_storage_enabled: false)
- gitlab_shell.create_repository(repository_storage, "#{user.namespace.full_path}/existing", 'group/project')
+ TestEnv.create_bare_repository(fake_repo_path)
end
after do
- gitlab_shell.remove_repository(repository_storage, "#{user.namespace.full_path}/existing")
+ FileUtils.rm_rf(fake_repo_path)
end
it 'does not allow to create a project when path matches existing repository on disk' do
@@ -305,17 +304,15 @@ describe Projects::CreateService, '#execute' do
context 'with hashed storage' do
let(:hash) { '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
let(:hashed_path) { '@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
+ let(:fake_repo_path) { File.join(TestEnv.repos_path, "#{hashed_path}.git") }
before do
allow(Digest::SHA2).to receive(:hexdigest) { hash }
- end
-
- before do
- gitlab_shell.create_repository(repository_storage, hashed_path, 'group/project')
+ TestEnv.create_bare_repository(fake_repo_path)
end
after do
- gitlab_shell.remove_repository(repository_storage, hashed_path)
+ FileUtils.rm_rf(fake_repo_path)
end
it 'does not allow to create a project when path matches existing repository on disk' do
@@ -344,7 +341,7 @@ describe Projects::CreateService, '#execute' do
context 'when there is an active service template' do
before do
- create(:service, project: nil, template: true, active: true)
+ create(:prometheus_service, project: nil, template: true, active: true)
end
it 'creates a service from this template' do
@@ -394,6 +391,67 @@ describe Projects::CreateService, '#execute' do
expect(rugged.config['gitlab.fullpath']).to eq project.full_path
end
+ context 'when project has access to shared service' do
+ context 'Prometheus application is shared via group cluster' do
+ let(:cluster) { create(:cluster, :group, groups: [group]) }
+ let(:group) do
+ create(:group).tap do |group|
+ group.add_owner(user)
+ end
+ end
+
+ before do
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ end
+
+ it 'creates PrometheusService record', :aggregate_failures do
+ project = create_project(user, opts.merge!(namespace_id: group.id))
+ service = project.prometheus_service
+
+ expect(service.active).to be true
+ expect(service.manual_configuration?).to be false
+ expect(service.persisted?).to be true
+ end
+ end
+
+ context 'Prometheus application is shared via instance cluster' do
+ let(:cluster) { create(:cluster, :instance) }
+
+ before do
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ end
+
+ it 'creates PrometheusService record', :aggregate_failures do
+ project = create_project(user, opts)
+ service = project.prometheus_service
+
+ expect(service.active).to be true
+ expect(service.manual_configuration?).to be false
+ expect(service.persisted?).to be true
+ end
+
+ it 'cleans invalid record and logs warning', :aggregate_failures do
+ invalid_service_record = build(:prometheus_service, properties: { api_url: nil, manual_configuration: true }.to_json)
+ allow_next_instance_of(Project) do |instance|
+ allow(instance).to receive(:build_prometheus_service).and_return(invalid_service_record)
+ end
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(an_instance_of(ActiveRecord::RecordInvalid), include(extra: { project_id: a_kind_of(Integer) }))
+ project = create_project(user, opts)
+
+ expect(project.prometheus_service).to be_nil
+ end
+ end
+
+ context 'shared Prometheus application is not available' do
+ it 'does not persist PrometheusService record', :aggregate_failures do
+ project = create_project(user, opts)
+
+ expect(project.prometheus_service).to be_nil
+ end
+ end
+ end
+
context 'with external authorization enabled' do
before do
enable_external_authorization_service_check
diff --git a/spec/services/projects/deploy_tokens/create_service_spec.rb b/spec/services/projects/deploy_tokens/create_service_spec.rb
new file mode 100644
index 00000000000..5c3ada8af4e
--- /dev/null
+++ b/spec/services/projects/deploy_tokens/create_service_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::DeployTokens::CreateService do
+ it_behaves_like 'a deploy token creation service' do
+ let(:entity) { create(:project) }
+ let(:deploy_token_class) { ProjectDeployToken }
+ end
+end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 21a65f361a9..58c40d04fe9 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -124,7 +124,7 @@ describe Projects::DestroyService do
allow(project.repository).to receive(:before_delete).and_raise(::Gitlab::Git::CommandError)
allow(Gitlab::GitLogger).to receive(:warn).with(
class: Repositories::DestroyService.name,
- project_id: project.id,
+ container_id: project.id,
disk_path: project.disk_path,
message: 'Gitlab::Git::CommandError').and_call_original
end
@@ -338,6 +338,39 @@ describe Projects::DestroyService do
end
end
+ context 'snippets' do
+ let!(:snippet1) { create(:project_snippet, project: project, author: user) }
+ let!(:snippet2) { create(:project_snippet, project: project, author: user) }
+
+ it 'does not include snippets when deleting in batches' do
+ expect(project).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:container_repositories, :snippets] })
+
+ destroy_project(project, user)
+ end
+
+ it 'calls the bulk snippet destroy service' do
+ expect(project.snippets.count).to eq 2
+
+ expect(Snippets::BulkDestroyService).to receive(:new)
+ .with(user, project.snippets).and_call_original
+
+ expect do
+ destroy_project(project, user)
+ end.to change(Snippet, :count).by(-2)
+ end
+
+ context 'when an error is raised deleting snippets' do
+ it 'does not delete project' do
+ allow_next_instance_of(Snippets::BulkDestroyService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
+ end
+
+ expect(destroy_project(project, user)).to be_falsey
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
+ end
+ end
+ end
+
def destroy_project(project, user, params = {})
described_class.new(project, user, params).public_send(async ? :async_execute : :execute)
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index e14f1abf018..55987c6fa0f 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
describe Projects::ForkService do
include ProjectForksHelper
- include Gitlab::ShellAdapter
shared_examples 'forks count cache refresh' do
it 'flushes the forks count cache of the source project', :clean_gitlab_redis_cache do
@@ -135,17 +134,16 @@ describe Projects::ForkService do
end
context 'repository in legacy storage already exists' do
- let(:repository_storage) { 'default' }
- let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path }
+ let(:fake_repo_path) { File.join(TestEnv.repos_path, @to_user.namespace.full_path, "#{@from_project.path}.git") }
let(:params) { { namespace: @to_user.namespace } }
before do
stub_application_setting(hashed_storage_enabled: false)
- gitlab_shell.create_repository(repository_storage, "#{@to_user.namespace.full_path}/#{@from_project.path}", "#{@to_user.namespace.full_path}/#{@from_project.path}")
+ TestEnv.create_bare_repository(fake_repo_path)
end
after do
- gitlab_shell.remove_repository(repository_storage, "#{@to_user.namespace.full_path}/#{@from_project.path}")
+ FileUtils.rm_rf(fake_repo_path)
end
subject { fork_project(@from_project, @to_user, params) }
@@ -275,6 +273,7 @@ describe Projects::ForkService do
context 'fork project for group when user not owner' do
it 'group developer fails to fork project into the group' do
to_project = fork_project(@project, @developer, @opts)
+
expect(to_project.errors[:namespace]).to eq(['is not valid'])
end
end
@@ -306,6 +305,31 @@ describe Projects::ForkService do
end
end
+ context 'when a project is already forked' do
+ it 'creates a new poolresository after the project is moved to a new shard' do
+ project = create(:project, :public, :repository)
+ fork_before_move = fork_project(project)
+
+ # Stub everything required to move a project to a Gitaly shard that does not exist
+ stub_storage_settings('test_second_storage' => { 'path' => TestEnv::SECOND_STORAGE_PATH })
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_repository)
+ .and_return(true)
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate)
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum)
+ .and_return(::Gitlab::Git::BLANK_SHA)
+
+ Projects::UpdateRepositoryStorageService.new(project).execute('test_second_storage')
+ fork_after_move = fork_project(project)
+ pool_repository_before_move = PoolRepository.joins(:shard)
+ .where(source_project: project, shards: { name: 'default' }).first
+ pool_repository_after_move = PoolRepository.joins(:shard)
+ .where(source_project: project, shards: { name: 'test_second_storage' }).first
+
+ expect(fork_before_move.pool_repository).to eq(pool_repository_before_move)
+ expect(fork_after_move.pool_repository).to eq(pool_repository_after_move)
+ end
+ end
+
context 'when forking with object pools' do
let(:fork_from_project) { create(:project, :public) }
let(:forker) { create(:user) }
@@ -336,7 +360,9 @@ describe Projects::ForkService do
context 'when linking fork to an existing project' do
let(:fork_from_project) { create(:project, :public) }
let(:fork_to_project) { create(:project, :public) }
- let(:user) { create(:user) }
+ let(:user) do
+ create(:user).tap { |u| fork_to_project.add_maintainer(u) }
+ end
subject { described_class.new(fork_from_project, user) }
@@ -387,4 +413,54 @@ describe Projects::ForkService do
end
end
end
+
+ describe '#valid_fork_targets' do
+ let(:finder_mock) { instance_double('ForkTargetsFinder', execute: ['finder_return_value']) }
+ let(:current_user) { instance_double('User') }
+ let(:project) { instance_double('Project') }
+
+ before do
+ allow(ForkTargetsFinder).to receive(:new).with(project, current_user).and_return(finder_mock)
+ end
+
+ it 'returns whatever finder returns' do
+ expect(described_class.new(project, current_user).valid_fork_targets).to eq ['finder_return_value']
+ end
+ end
+
+ describe '#valid_fork_target?' do
+ subject { described_class.new(project, user, params).valid_fork_target? }
+
+ let(:project) { Project.new }
+ let(:params) { {} }
+
+ context 'when current user is an admin' do
+ let(:user) { build(:user, :admin) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when current_user is not an admin' do
+ let(:user) { create(:user) }
+
+ let(:finder_mock) { instance_double('ForkTargetsFinder', execute: [user.namespace]) }
+ let(:project) { create(:project) }
+
+ before do
+ allow(ForkTargetsFinder).to receive(:new).with(project, user).and_return(finder_mock)
+ end
+
+ context 'when target namespace is in valid fork targets' do
+ let(:params) { { namespace: user.namespace } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when target namespace is not in valid fork targets' do
+ let(:params) { { namespace: create(:group) } }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
end
diff --git a/spec/services/projects/import_export/export_service_spec.rb b/spec/services/projects/import_export/export_service_spec.rb
index 906fef6edf5..1315ae26322 100644
--- a/spec/services/projects/import_export/export_service_spec.rb
+++ b/spec/services/projects/import_export/export_service_spec.rb
@@ -26,10 +26,28 @@ describe Projects::ImportExport::ExportService do
service.execute
end
- it 'saves the models' do
- expect(Gitlab::ImportExport::ProjectTreeSaver).to receive(:new).and_call_original
+ context 'when :streaming_serializer feature is enabled' do
+ before do
+ stub_feature_flags(streaming_serializer: true)
+ end
- service.execute
+ it 'saves the models' do
+ expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
+
+ service.execute
+ end
+ end
+
+ context 'when :streaming_serializer feature is disabled' do
+ before do
+ stub_feature_flags(streaming_serializer: false)
+ end
+
+ it 'saves the models' do
+ expect(Gitlab::ImportExport::Project::LegacyTreeSaver).to receive(:new).and_call_original
+
+ service.execute
+ end
end
it 'saves the uploads' do
@@ -64,6 +82,14 @@ describe Projects::ImportExport::ExportService do
service.execute
end
+ it 'saves the snippets' do
+ expect_next_instance_of(Gitlab::ImportExport::SnippetsRepoSaver) do |instance|
+ expect(instance).to receive(:save).and_call_original
+ end
+
+ service.execute
+ end
+
context 'when all saver services succeed' do
before do
allow(service).to receive(:save_services).and_return(true)
@@ -91,10 +117,10 @@ describe Projects::ImportExport::ExportService do
end
it 'removes the remaining exported data' do
- allow(shared).to receive(:export_path).and_return('whatever')
+ allow(shared).to receive(:archive_path).and_return('whatever')
allow(FileUtils).to receive(:rm_rf)
- expect(FileUtils).to receive(:rm_rf).with(shared.export_path)
+ expect(FileUtils).to receive(:rm_rf).with(shared.archive_path)
end
it 'notifies the user' do
@@ -121,10 +147,10 @@ describe Projects::ImportExport::ExportService do
end
it 'removes the remaining exported data' do
- allow(shared).to receive(:export_path).and_return('whatever')
+ allow(shared).to receive(:archive_path).and_return('whatever')
allow(FileUtils).to receive(:rm_rf)
- expect(FileUtils).to receive(:rm_rf).with(shared.export_path)
+ expect(FileUtils).to receive(:rm_rf).with(shared.archive_path)
end
it 'notifies the user' do
@@ -142,6 +168,21 @@ describe Projects::ImportExport::ExportService do
end
end
+ context 'when one of the savers fail unexpectedly' do
+ let(:archive_path) { shared.archive_path }
+
+ before do
+ allow(service).to receive_message_chain(:uploads_saver, :save).and_return(false)
+ end
+
+ it 'removes the remaining exported data' do
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+
+ expect(project.import_export_upload).to be_nil
+ expect(File.exist?(shared.archive_path)).to eq(false)
+ end
+ end
+
context 'when user does not have admin_project permission' do
let!(:another_user) { create(:user) }
@@ -149,7 +190,7 @@ describe Projects::ImportExport::ExportService do
it 'fails' do
expected_message =
- "User with ID: %s does not have permission to Project %s with ID: %s." %
+ "User with ID: %s does not have required permissions for Project: %s with ID: %s" %
[another_user.id, project.name, project.id]
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error).with_message(expected_message)
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index d9f9ede8ecd..1e9ac40128a 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -122,7 +122,7 @@ describe Projects::ImportService do
end
it 'succeeds if repository import is successful' do
- expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true)
+ expect(project.repository).to receive(:import_repository).and_return(true)
expect_any_instance_of(Gitlab::BitbucketImport::Importer).to receive(:execute).and_return(true)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute).and_return(status: :success)
@@ -132,7 +132,9 @@ describe Projects::ImportService do
end
it 'fails if repository import fails' do
- expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_raise(Gitlab::Shell::Error.new('Failed to import the repository /a/b/c'))
+ expect(project.repository)
+ .to receive(:import_repository)
+ .and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
result = subject.execute
@@ -144,7 +146,7 @@ describe Projects::ImportService do
it 'logs the error' do
error_message = 'error message'
- expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true)
+ expect(project.repository).to receive(:import_repository).and_return(true)
expect_any_instance_of(Gitlab::BitbucketImport::Importer).to receive(:execute).and_return(true)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute).and_return(status: :error, message: error_message)
expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}")
@@ -155,7 +157,7 @@ describe Projects::ImportService do
context 'when repository import scheduled' do
before do
- allow_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true)
+ expect(project.repository).to receive(:import_repository).and_return(true)
allow(subject).to receive(:import_data)
end
diff --git a/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb
index dabfd61d3f5..99d35fdc7f7 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb
@@ -8,18 +8,27 @@ describe Projects::LfsPointers::LfsDownloadLinkListService do
let(:new_oids) { { 'oid1' => 123, 'oid2' => 125 } }
let(:remote_uri) { URI.parse(lfs_endpoint) }
- let(:objects_response) do
- body = new_oids.map do |oid, size|
+ let(:request_object) { HTTParty::Request.new(Net::HTTP::Post, '/') }
+ let(:parsed_block) { lambda {} }
+ let(:success_net_response) { Net::HTTPOK.new('', '', '') }
+ let(:response) { Gitlab::HTTP::Response.new(request_object, net_response, parsed_block) }
+
+ def objects_response(oids)
+ body = oids.map do |oid, size|
{
- 'oid' => oid,
- 'size' => size,
+ 'oid' => oid, 'size' => size,
'actions' => {
'download' => { 'href' => "#{import_url}/gitlab-lfs/objects/#{oid}" }
}
}
end
- Struct.new(:success?, :objects).new(true, body)
+ Struct.new(:success?, :objects).new(true, body).to_json
+ end
+
+ def custom_response(net_response, body = nil)
+ allow(net_response).to receive(:body).and_return(body)
+ Gitlab::HTTP::Response.new(request_object, net_response, parsed_block)
end
let(:invalid_object_response) do
@@ -33,9 +42,8 @@ describe Projects::LfsPointers::LfsDownloadLinkListService do
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
- response = instance_double(Gitlab::HTTP::Response)
- allow(response).to receive(:body).and_return(objects_response.to_json)
- allow(response).to receive(:success?).and_return(true)
+
+ response = custom_response(success_net_response, objects_response(new_oids))
allow(Gitlab::HTTP).to receive(:post).and_return(response)
end
@@ -46,6 +54,102 @@ describe Projects::LfsPointers::LfsDownloadLinkListService do
end
end
+ context 'when lfs objects size is larger than the batch size' do
+ def stub_successful_request(batch)
+ response = custom_response(success_net_response, objects_response(batch))
+ stub_request(batch, response)
+ end
+
+ def stub_entity_too_large_error_request(batch)
+ entity_too_large_net_response = Net::HTTPRequestEntityTooLarge.new('', '', '')
+ response = custom_response(entity_too_large_net_response)
+ stub_request(batch, response)
+ end
+
+ def stub_request(batch, response)
+ expect(Gitlab::HTTP).to receive(:post).with(
+ remote_uri,
+ {
+ body: { operation: 'download', objects: batch.map { |k, v| { oid: k, size: v } } }.to_json,
+ headers: subject.send(:headers)
+ }
+ ).and_return(response)
+ end
+
+ let(:new_oids) { { 'oid1' => 123, 'oid2' => 125, 'oid3' => 126, 'oid4' => 127, 'oid5' => 128 } }
+
+ context 'when batch size' do
+ before do
+ stub_const("#{described_class.name}::REQUEST_BATCH_SIZE", 2)
+
+ data = new_oids.to_a
+ stub_successful_request([data[0], data[1]])
+ stub_successful_request([data[2], data[3]])
+ stub_successful_request([data[4]])
+ end
+
+ it 'retreives them in batches' do
+ subject.execute(new_oids).each do |lfs_download_object|
+ expect(lfs_download_object.link).to eq "#{import_url}/gitlab-lfs/objects/#{lfs_download_object.oid}"
+ end
+ end
+ end
+
+ context 'when request fails with PayloadTooLarge error' do
+ let(:error_class) { described_class::DownloadLinksRequestEntityTooLargeError }
+
+ context 'when the smaller batch eventually works' do
+ before do
+ stub_const("#{described_class.name}::REQUEST_BATCH_SIZE", 5)
+
+ data = new_oids.to_a
+
+ # with the batch size of 5
+ stub_entity_too_large_error_request(data)
+
+ # with the batch size of 2
+ stub_successful_request([data[0], data[1]])
+ stub_successful_request([data[2], data[3]])
+ stub_successful_request([data[4]])
+ end
+
+ it 'retreives them eventually and logs exceptions' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(error_class), project_id: project.id, batch_size: 5, oids_count: 5
+ )
+
+ subject.execute(new_oids).each do |lfs_download_object|
+ expect(lfs_download_object.link).to eq "#{import_url}/gitlab-lfs/objects/#{lfs_download_object.oid}"
+ end
+ end
+ end
+
+ context 'when batch size cannot be any smaller' do
+ before do
+ stub_const("#{described_class.name}::REQUEST_BATCH_SIZE", 5)
+
+ data = new_oids.to_a
+
+ # with the batch size of 5
+ stub_entity_too_large_error_request(data)
+
+ # with the batch size of 2
+ stub_entity_too_large_error_request([data[0], data[1]])
+ end
+
+ it 'raises an error and logs exceptions' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(error_class), project_id: project.id, batch_size: 5, oids_count: 5
+ )
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(error_class), project_id: project.id, batch_size: 2, oids_count: 5
+ )
+ expect { subject.execute(new_oids) }.to raise_error(described_class::DownloadLinksError)
+ end
+ end
+ end
+ end
+
context 'credentials' do
context 'when the download link and the lfs_endpoint have the same host' do
context 'when lfs_endpoint has credentials' do
@@ -87,17 +191,22 @@ describe Projects::LfsPointers::LfsDownloadLinkListService do
end
describe '#get_download_links' do
- it 'raise error if request fails' do
- allow(Gitlab::HTTP).to receive(:post).and_return(Struct.new(:success?, :message).new(false, 'Failed request'))
+ context 'if request fails' do
+ before do
+ request_timeout_net_response = Net::HTTPRequestTimeout.new('', '', '')
+ response = custom_response(request_timeout_net_response)
+ allow(Gitlab::HTTP).to receive(:post).and_return(response)
+ end
- expect { subject.send(:get_download_links, new_oids) }.to raise_error(described_class::DownloadLinksError)
+ it 'raises an error' do
+ expect { subject.send(:get_download_links, new_oids) }.to raise_error(described_class::DownloadLinksError)
+ end
end
shared_examples 'JSON parse errors' do |body|
- it 'raises error' do
- response = instance_double(Gitlab::HTTP::Response)
+ it 'raises an error' do
+ response = custom_response(success_net_response)
allow(response).to receive(:body).and_return(body)
- allow(response).to receive(:success?).and_return(true)
allow(Gitlab::HTTP).to receive(:post).and_return(response)
expect { subject.send(:get_download_links, new_oids) }.to raise_error(described_class::DownloadLinksError)
diff --git a/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb
index aca59079b3c..b64662f3782 100644
--- a/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb
@@ -60,8 +60,8 @@ describe Projects::LfsPointers::LfsLinkService do
stub_const("#{described_class}::BATCH_SIZE", 1)
oids = %w(one two)
- expect(LfsObject).to receive(:where).with(oid: %w(one)).once.and_call_original
- expect(LfsObject).to receive(:where).with(oid: %w(two)).once.and_call_original
+ expect(LfsObject).to receive(:for_oids).with(%w(one)).once.and_call_original
+ expect(LfsObject).to receive(:for_oids).with(%w(two)).once.and_call_original
subject.execute(oids)
end
diff --git a/spec/services/projects/lsif_data_service_spec.rb b/spec/services/projects/lsif_data_service_spec.rb
index 93579869d1d..4866f848121 100644
--- a/spec/services/projects/lsif_data_service_spec.rb
+++ b/spec/services/projects/lsif_data_service_spec.rb
@@ -7,9 +7,8 @@ describe Projects::LsifDataService do
let(:project) { build_stubbed(:project) }
let(:path) { 'main.go' }
let(:commit_id) { Digest::SHA1.hexdigest(SecureRandom.hex) }
- let(:params) { { path: path, commit_id: commit_id } }
- let(:service) { described_class.new(artifact.file, project, params) }
+ let(:service) { described_class.new(artifact.file, project, commit_id) }
describe '#execute' do
def highlighted_value(value)
@@ -18,7 +17,7 @@ describe Projects::LsifDataService do
context 'fetched lsif file', :use_clean_rails_memory_store_caching do
it 'is cached' do
- service.execute
+ service.execute(path)
cached_data = Rails.cache.fetch("project:#{project.id}:lsif:#{commit_id}")
@@ -30,7 +29,7 @@ describe Projects::LsifDataService do
let(:path_prefix) { "/#{project.full_path}/-/blob/#{commit_id}" }
it 'returns lsif ranges for the file' do
- expect(service.execute).to eq([
+ expect(service.execute(path)).to eq([
{
end_char: 9,
end_line: 6,
@@ -87,7 +86,7 @@ describe Projects::LsifDataService do
let(:path) { 'morestrings/reverse.go' }
it 'returns lsif ranges for the file' do
- expect(service.execute.first).to eq({
+ expect(service.execute(path).first).to eq({
end_char: 2,
end_line: 11,
start_char: 1,
@@ -102,7 +101,7 @@ describe Projects::LsifDataService do
let(:path) { 'unknown.go' }
it 'returns nil' do
- expect(service.execute).to eq(nil)
+ expect(service.execute(path)).to eq(nil)
end
end
end
@@ -120,9 +119,7 @@ describe Projects::LsifDataService do
end
it 'fetches the document with the shortest absolute path' do
- service.instance_variable_set(:@docs, docs)
-
- expect(service.__send__(:doc_id)).to eq(3)
+ expect(service.__send__(:find_doc_id, docs, path)).to eq(3)
end
end
end
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index 182906a3337..de028ecb693 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -298,55 +298,28 @@ describe Projects::Operations::UpdateService do
manual_configuration: "0"
})
end
- let(:prometheus_params) do
- {
- "type" => "PrometheusService",
- "title" => nil,
- "active" => true,
- "properties" => { "api_url" => "http://example.prometheus.com", "manual_configuration" => "0" },
- "push_events" => true,
- "issues_events" => true,
- "merge_requests_events" => true,
- "tag_push_events" => true,
- "note_events" => true,
- "category" => "monitoring",
- "default" => false,
- "wiki_page_events" => true,
- "pipeline_events" => true,
- "confidential_issues_events" => true,
- "commit_events" => true,
- "job_events" => true,
- "confidential_note_events" => true,
- "deployment_events" => false,
- "description" => nil,
- "comment_on_event_enabled" => true,
- "template" => false
- }
- end
let(:params) do
{
prometheus_integration_attributes: {
- api_url: 'http://new.prometheus.com',
- manual_configuration: '1'
+ 'api_url' => 'http://new.prometheus.com',
+ 'manual_configuration' => '1'
}
}
end
it 'uses Project#find_or_initialize_service to include instance defined defaults and pass them to Projects::UpdateService', :aggregate_failures do
project_update_service = double(Projects::UpdateService)
- prometheus_update_params = prometheus_params.merge('properties' => {
- 'api_url' => 'http://new.prometheus.com',
- 'manual_configuration' => '1'
- })
expect(project)
.to receive(:find_or_initialize_service)
.with('prometheus')
.and_return(prometheus_service)
- expect(Projects::UpdateService)
- .to receive(:new)
- .with(project, user, { prometheus_service_attributes: prometheus_update_params })
- .and_return(project_update_service)
+ expect(Projects::UpdateService).to receive(:new) do |project_arg, user_arg, update_params_hash|
+ expect(project_arg).to eq project
+ expect(user_arg).to eq user
+ expect(update_params_hash[:prometheus_service_attributes]).to include('properties' => { 'api_url' => 'http://new.prometheus.com', 'manual_configuration' => '1' })
+ expect(update_params_hash[:prometheus_service_attributes]).not_to include(*%w(id project_id created_at updated_at))
+ end.and_return(project_update_service)
expect(project_update_service).to receive(:execute)
subject.execute
diff --git a/spec/services/projects/protect_default_branch_service_spec.rb b/spec/services/projects/protect_default_branch_service_spec.rb
index c145b2c06c6..c0b819ab17b 100644
--- a/spec/services/projects/protect_default_branch_service_spec.rb
+++ b/spec/services/projects/protect_default_branch_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe Projects::ProtectDefaultBranchService do
let(:service) { described_class.new(project) }
- let(:project) { instance_spy(Project) }
+ let(:project) { create(:project) }
describe '#execute' do
before do
@@ -147,7 +147,7 @@ describe Projects::ProtectDefaultBranchService do
describe '#protect_branch?' do
context 'when default branch protection is disabled' do
it 'returns false' do
- allow(Gitlab::CurrentSettings)
+ allow(project.namespace)
.to receive(:default_branch_protection)
.and_return(Gitlab::Access::PROTECTION_NONE)
@@ -157,7 +157,7 @@ describe Projects::ProtectDefaultBranchService do
context 'when default branch protection is enabled' do
before do
- allow(Gitlab::CurrentSettings)
+ allow(project.namespace)
.to receive(:default_branch_protection)
.and_return(Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
@@ -199,7 +199,7 @@ describe Projects::ProtectDefaultBranchService do
describe '#push_access_level' do
context 'when developers can push' do
it 'returns the DEVELOPER access level' do
- allow(Gitlab::CurrentSettings)
+ allow(project.namespace)
.to receive(:default_branch_protection)
.and_return(Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
@@ -209,7 +209,7 @@ describe Projects::ProtectDefaultBranchService do
context 'when developers can not push' do
it 'returns the MAINTAINER access level' do
- allow(Gitlab::CurrentSettings)
+ allow(project.namespace)
.to receive(:default_branch_protection)
.and_return(Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
@@ -221,7 +221,7 @@ describe Projects::ProtectDefaultBranchService do
describe '#merge_access_level' do
context 'when developers can merge' do
it 'returns the DEVELOPER access level' do
- allow(Gitlab::CurrentSettings)
+ allow(project.namespace)
.to receive(:default_branch_protection)
.and_return(Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
@@ -231,7 +231,7 @@ describe Projects::ProtectDefaultBranchService do
context 'when developers can not merge' do
it 'returns the MAINTAINER access level' do
- allow(Gitlab::CurrentSettings)
+ allow(project.namespace)
.to receive(:default_branch_protection)
.and_return(Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index fe31dafdd03..f17ddb22d22 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
describe Projects::TransferService do
include GitHelpers
- let(:gitlab_shell) { Gitlab::Shell.new }
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, :repository, :legacy_storage, namespace: user.namespace) }
@@ -104,7 +103,7 @@ describe Projects::TransferService do
it 'rolls back repo location' do
attempt_project_transfer
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{project.disk_path}.git")).to be(true)
+ expect(project.repository.raw.exists?).to be(true)
expect(original_path).to eq current_path
end
@@ -172,21 +171,18 @@ describe Projects::TransferService do
end
context 'namespace which contains orphan repository with same projects path name' do
- let(:repository_storage) { 'default' }
- let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path }
+ let(:fake_repo_path) { File.join(TestEnv.repos_path, group.full_path, "#{project.path}.git") }
before do
group.add_owner(user)
- unless gitlab_shell.create_repository(repository_storage, "#{group.full_path}/#{project.path}", project.full_path)
- raise 'failed to add repository'
- end
+ TestEnv.create_bare_repository(fake_repo_path)
@result = transfer_project(project, user, group)
end
after do
- gitlab_shell.remove_repository(repository_storage, "#{group.full_path}/#{project.path}")
+ FileUtils.rm_rf(fake_repo_path)
end
it { expect(@result).to eq false }
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 52ec80c252b..f561a303be4 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -82,6 +82,9 @@ describe Projects::UpdatePagesService do
expect(execute).not_to eq(:success)
expect(project.pages_metadatum).not_to be_deployed
+
+ expect(deploy_status).to be_failed
+ expect(deploy_status.description).to eq('build SHA is outdated for this ref')
end
context 'when using empty file' do
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
new file mode 100644
index 00000000000..23ce6f9165d
--- /dev/null
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::UpdateRepositoryStorageService do
+ include Gitlab::ShellAdapter
+
+ subject { described_class.new(project) }
+
+ describe "#execute" do
+ let(:time) { Time.now }
+
+ before do
+ allow(Time).to receive(:now).and_return(time)
+ end
+
+ context 'without wiki and design repository' do
+ let(:project) { create(:project, :repository, repository_read_only: true, wiki_enabled: false) }
+ let!(:checksum) { project.repository.checksum }
+ let(:project_repository_double) { double(:repository) }
+
+ before do
+ allow(Gitlab::Git::Repository).to receive(:new).and_call_original
+ allow(Gitlab::Git::Repository).to receive(:new)
+ .with('test_second_storage', project.repository.raw.relative_path, project.repository.gl_repository, project.repository.full_path)
+ .and_return(project_repository_double)
+ end
+
+ context 'when the move succeeds' do
+ it 'moves the repository to the new storage and unmarks the repository as read only' do
+ old_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ project.repository.path_to_repo
+ end
+
+ expect(project_repository_double).to receive(:create_repository)
+ .and_return(true)
+ expect(project_repository_double).to receive(:replicate)
+ .with(project.repository.raw)
+ expect(project_repository_double).to receive(:checksum)
+ .and_return(checksum)
+
+ result = subject.execute('test_second_storage')
+
+ expect(result[:status]).to eq(:success)
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('test_second_storage')
+ expect(gitlab_shell.repository_exists?('default', old_path)).to be(false)
+ expect(project.project_repository.shard_name).to eq('test_second_storage')
+ end
+ end
+
+ context 'when the project is already on the target storage' do
+ it 'bails out and does nothing' do
+ result = subject.execute(project.repository_storage)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to match(/repository and source have the same storage/)
+ end
+ end
+
+ context 'when the move fails' do
+ it 'unmarks the repository as read-only without updating the repository storage' do
+ expect(project_repository_double).to receive(:create_repository)
+ .and_return(true)
+ expect(project_repository_double).to receive(:replicate)
+ .with(project.repository.raw)
+ .and_raise(Gitlab::Git::CommandError)
+ expect(GitlabShellWorker).not_to receive(:perform_async)
+
+ result = subject.execute('test_second_storage')
+
+ expect(result[:status]).to eq(:error)
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('default')
+ end
+ end
+
+ context 'when the checksum does not match' do
+ it 'unmarks the repository as read-only without updating the repository storage' do
+ expect(project_repository_double).to receive(:create_repository)
+ .and_return(true)
+ expect(project_repository_double).to receive(:replicate)
+ .with(project.repository.raw)
+ expect(project_repository_double).to receive(:checksum)
+ .and_return('not matching checksum')
+ expect(GitlabShellWorker).not_to receive(:perform_async)
+
+ result = subject.execute('test_second_storage')
+
+ expect(result[:status]).to eq(:error)
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('default')
+ end
+ end
+
+ context 'when a object pool was joined' do
+ let!(:pool) { create(:pool_repository, :ready, source_project: project) }
+
+ it 'leaves the pool' do
+ expect(project_repository_double).to receive(:create_repository)
+ .and_return(true)
+ expect(project_repository_double).to receive(:replicate)
+ .with(project.repository.raw)
+ expect(project_repository_double).to receive(:checksum)
+ .and_return(checksum)
+
+ result = subject.execute('test_second_storage')
+
+ expect(result[:status]).to eq(:success)
+ expect(project.repository_storage).to eq('test_second_storage')
+ expect(project.reload_pool_repository).to be_nil
+ end
+ end
+ end
+
+ context 'with wiki repository' do
+ include_examples 'moves repository to another storage', 'wiki' do
+ let(:project) { create(:project, :repository, repository_read_only: true, wiki_enabled: true) }
+ let(:repository) { project.wiki.repository }
+
+ before do
+ project.create_wiki
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 90fb6b932ee..ce9765a36ba 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -12,7 +12,6 @@ describe Projects::UpdateService do
end
describe '#execute' do
- let(:gitlab_shell) { Gitlab::Shell.new }
let(:admin) { create(:admin) }
context 'when changing visibility level' do
@@ -303,18 +302,17 @@ describe Projects::UpdateService do
end
context 'when renaming a project' do
- let(:repository_storage) { 'default' }
- let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path }
+ let(:fake_repo_path) { File.join(TestEnv.repos_path, user.namespace.full_path, 'existing.git') }
context 'with legacy storage' do
let(:project) { create(:project, :legacy_storage, :repository, creator: user, namespace: user.namespace) }
before do
- gitlab_shell.create_repository(repository_storage, "#{user.namespace.full_path}/existing", user.namespace.full_path)
+ TestEnv.create_bare_repository(fake_repo_path)
end
after do
- gitlab_shell.remove_repository(repository_storage, "#{user.namespace.full_path}/existing")
+ FileUtils.rm_rf(fake_repo_path)
end
it 'does not allow renaming when new path matches existing repository on disk' do
@@ -613,6 +611,25 @@ describe Projects::UpdateService do
end
end
+ describe 'repository_storage' do
+ let(:admin) { create(:admin) }
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:opts) { { repository_storage: 'test_second_storage' } }
+
+ it 'calls the change repository storage method if the storage changed' do
+ expect(project).to receive(:change_repository_storage).with('test_second_storage')
+
+ update_project(project, admin, opts).inspect
+ end
+
+ it "doesn't call the change repository storage for non-admin users" do
+ expect(project).not_to receive(:change_repository_storage)
+
+ update_project(project, user, opts).inspect
+ end
+ end
+
def update_project(project, user, opts)
described_class.new(project, user, opts).execute
end
diff --git a/spec/services/releases/update_service_spec.rb b/spec/services/releases/update_service_spec.rb
index f6c70873540..7f1849e39a4 100644
--- a/spec/services/releases/update_service_spec.rb
+++ b/spec/services/releases/update_service_spec.rb
@@ -44,12 +44,6 @@ describe Releases::UpdateService do
it_behaves_like 'a failed update'
end
- context 'with an invalid update' do
- let(:new_description) { '' }
-
- it_behaves_like 'a failed update'
- end
-
context 'when a milestone is passed in' do
let(:milestone) { create(:milestone, project: project, title: 'v1.0') }
let(:params_with_milestone) { params.merge!({ milestones: [new_title] }) }
diff --git a/spec/services/resource_events/change_milestone_service_spec.rb b/spec/services/resource_events/change_milestone_service_spec.rb
new file mode 100644
index 00000000000..bc634fadadd
--- /dev/null
+++ b/spec/services/resource_events/change_milestone_service_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ResourceEvents::ChangeMilestoneService do
+ it_behaves_like 'a milestone events creator' do
+ let(:resource) { create(:issue) }
+ end
+
+ it_behaves_like 'a milestone events creator' do
+ let(:resource) { create(:merge_request) }
+ end
+end
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index 48065bf596a..10dafaebe85 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -10,13 +10,16 @@ describe SearchService do
let!(:group_member) { create(:group_member, group: accessible_group, user: user) }
let!(:accessible_project) { create(:project, :private, name: 'accessible_project') }
- let!(:inaccessible_project) { create(:project, :private, name: 'inaccessible_project') }
let(:note) { create(:note_on_issue, project: accessible_project) }
+ let!(:inaccessible_project) { create(:project, :private, name: 'inaccessible_project') }
+
let(:snippet) { create(:snippet, author: user) }
let(:group_project) { create(:project, group: accessible_group, name: 'group_project') }
let(:public_project) { create(:project, :public, name: 'public_project') }
+ subject(:search_service) { described_class.new(user, search: search, scope: scope, page: 1) }
+
before do
accessible_project.add_maintainer(user)
end
@@ -293,5 +296,70 @@ describe SearchService do
expect(search_objects.first).to eq public_project
end
end
+
+ context 'redacting search results' do
+ shared_examples 'it redacts incorrect results' do
+ before do
+ allow(Ability).to receive(:allowed?).and_return(allowed)
+ end
+
+ context 'when allowed' do
+ let(:allowed) { true }
+
+ it 'does nothing' do
+ expect(results).not_to be_empty
+ expect(results).to all(be_an(model_class))
+ end
+ end
+
+ context 'when disallowed' do
+ let(:allowed) { false }
+
+ it 'does nothing' do
+ expect(results).to be_empty
+ end
+ end
+ end
+
+ context 'issues' do
+ let(:issue) { create(:issue, project: accessible_project) }
+ let(:scope) { 'issues' }
+ let(:model_class) { Issue }
+ let(:ability) { :read_issue }
+ let(:search) { issue.title }
+ let(:results) { subject.search_objects }
+
+ it_behaves_like 'it redacts incorrect results'
+ end
+
+ context 'notes' do
+ let(:note) { create(:note_on_commit, project: accessible_project) }
+ let(:scope) { 'notes' }
+ let(:model_class) { Note }
+ let(:ability) { :read_note }
+ let(:search) { note.note }
+ let(:results) do
+ described_class.new(
+ user,
+ project_id: accessible_project.id,
+ scope: scope,
+ search: note.note
+ ).search_objects
+ end
+
+ it_behaves_like 'it redacts incorrect results'
+ end
+
+ context 'merge_requests' do
+ let(:scope) { 'merge_requests' }
+ let(:model_class) { MergeRequest }
+ let(:ability) { :read_merge_request }
+ let(:merge_request) { create(:merge_request, source_project: accessible_project, author: user) }
+ let(:search) { merge_request.title }
+ let(:results) { subject.search_objects }
+
+ it_behaves_like 'it redacts incorrect results'
+ end
+ end
end
end
diff --git a/spec/services/serverless/associate_domain_service_spec.rb b/spec/services/serverless/associate_domain_service_spec.rb
new file mode 100644
index 00000000000..3d1a878bcf5
--- /dev/null
+++ b/spec/services/serverless/associate_domain_service_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Serverless::AssociateDomainService do
+ subject { described_class.new(knative, pages_domain_id: pages_domain_id, creator: creator) }
+
+ let(:sdc) { create(:serverless_domain_cluster, pages_domain: create(:pages_domain, :instance_serverless)) }
+ let(:knative) { sdc.knative }
+ let(:creator) { sdc.creator }
+ let(:pages_domain_id) { sdc.pages_domain_id }
+
+ context 'when the domain is unchanged' do
+ let(:creator) { create(:user) }
+
+ it 'does not update creator' do
+ expect { subject.execute }.not_to change { sdc.reload.creator }
+ end
+ end
+
+ context 'when domain is changed to nil' do
+ let(:pages_domain_id) { nil }
+ let(:creator) { create(:user) }
+
+ it 'removes the association between knative and the domain' do
+ expect { subject.execute }.to change { knative.reload.pages_domain }.from(sdc.pages_domain).to(nil)
+ end
+
+ it 'does not attempt to update creator' do
+ expect { subject.execute }.not_to raise_error
+ end
+ end
+
+ context 'when a new domain is associated' do
+ let(:pages_domain_id) { create(:pages_domain, :instance_serverless).id }
+ let(:creator) { create(:user) }
+
+ it 'creates an association with the domain' do
+ expect { subject.execute }.to change { knative.pages_domain.id }.from(sdc.pages_domain.id).to(pages_domain_id)
+ end
+
+ it 'updates creator' do
+ expect { subject.execute }.to change { sdc.reload.creator }.from(sdc.creator).to(creator)
+ end
+ end
+
+ context 'when knative is not authorized to use the pages domain' do
+ let(:pages_domain_id) { create(:pages_domain).id }
+
+ before do
+ expect(knative).to receive(:available_domains).and_return(PagesDomain.none)
+ end
+
+ it 'sets pages_domain_id to nil' do
+ expect { subject.execute }.to change { knative.reload.pages_domain }.from(sdc.pages_domain).to(nil)
+ end
+ end
+
+ context 'when knative hostname is nil' do
+ let(:knative) { build(:clusters_applications_knative, hostname: nil) }
+
+ it 'sets hostname to a placeholder value' do
+ expect { subject.execute }.to change { knative.hostname }.to('example.com')
+ end
+ end
+
+ context 'when knative hostname exists' do
+ let(:knative) { build(:clusters_applications_knative, hostname: 'hostname.com') }
+
+ it 'does not change hostname' do
+ expect { subject.execute }.not_to change { knative.hostname }
+ end
+ end
+end
diff --git a/spec/services/snippets/bulk_destroy_service_spec.rb b/spec/services/snippets/bulk_destroy_service_spec.rb
new file mode 100644
index 00000000000..f03d7496f94
--- /dev/null
+++ b/spec/services/snippets/bulk_destroy_service_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Snippets::BulkDestroyService do
+ let_it_be(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let!(:personal_snippet) { create(:personal_snippet, :repository, author: user) }
+ let!(:project_snippet) { create(:project_snippet, :repository, project: project, author: user) }
+ let(:snippets) { user.snippets }
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:service_user) { user }
+
+ before do
+ project.add_developer(user)
+ end
+
+ subject { described_class.new(service_user, snippets) }
+
+ describe '#execute' do
+ it 'deletes the snippets in bulk' do
+ response = nil
+
+ expect(Repositories::ShellDestroyService).to receive(:new).with(personal_snippet.repository).and_call_original
+ expect(Repositories::ShellDestroyService).to receive(:new).with(project_snippet.repository).and_call_original
+
+ aggregate_failures do
+ expect do
+ response = subject.execute
+ end.to change(Snippet, :count).by(-2)
+
+ expect(response).to be_success
+ expect(repository_exists?(personal_snippet)).to be_falsey
+ expect(repository_exists?(project_snippet)).to be_falsey
+ end
+ end
+
+ context 'when snippets is empty' do
+ let(:snippets) { Snippet.none }
+
+ it 'returns a ServiceResponse success response' do
+ response = subject.execute
+
+ expect(response).to be_success
+ expect(response.message).to eq 'No snippets found.'
+ end
+ end
+
+ shared_examples 'error is raised' do
+ it 'returns error' do
+ response = subject.execute
+
+ aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq error_message
+ end
+ end
+
+ it 'no record is deleted' do
+ expect do
+ subject.execute
+ end.not_to change(Snippet, :count)
+ end
+ end
+
+ context 'when user does not have access to remove the snippet' do
+ let(:service_user) { create(:user) }
+
+ it_behaves_like 'error is raised' do
+ let(:error_message) { "You don't have access to delete these snippets." }
+ end
+ end
+
+ context 'when an error is raised deleting the repository' do
+ before do
+ allow_next_instance_of(Repositories::DestroyService) do |instance|
+ allow(instance).to receive(:execute).and_return({ status: :error })
+ end
+ end
+
+ it_behaves_like 'error is raised' do
+ let(:error_message) { 'Failed to delete snippet repositories.' }
+ end
+
+ it 'tries to rollback the repository' do
+ expect(subject).to receive(:attempt_rollback_repositories)
+
+ subject.execute
+ end
+ end
+
+ context 'when an error is raised deleting the records' do
+ before do
+ allow(snippets).to receive(:destroy_all).and_raise(ActiveRecord::ActiveRecordError)
+ end
+
+ it_behaves_like 'error is raised' do
+ let(:error_message) { 'Failed to remove snippets.' }
+ end
+
+ it 'tries to rollback the repository' do
+ expect(subject).to receive(:attempt_rollback_repositories)
+
+ subject.execute
+ end
+ end
+
+ context 'when snippet does not have a repository attached' do
+ let!(:snippet_without_repo) { create(:personal_snippet, author: user) }
+
+ it 'does not schedule anything for the snippet without repository and return success' do
+ response = nil
+
+ expect(Repositories::ShellDestroyService).to receive(:new).with(personal_snippet.repository).and_call_original
+ expect(Repositories::ShellDestroyService).to receive(:new).with(project_snippet.repository).and_call_original
+
+ expect do
+ response = subject.execute
+ end.to change(Snippet, :count).by(-3)
+
+ expect(response).to be_success
+ end
+ end
+ end
+
+ describe '#attempt_rollback_repositories' do
+ before do
+ Repositories::DestroyService.new(personal_snippet.repository).execute
+ end
+
+ it 'rollbacks the repository' do
+ error_msg = personal_snippet.disk_path + "+#{personal_snippet.id}+deleted.git"
+ expect(repository_exists?(personal_snippet, error_msg)).to be_truthy
+
+ subject.__send__(:attempt_rollback_repositories)
+
+ aggregate_failures do
+ expect(repository_exists?(personal_snippet, error_msg)).to be_falsey
+ expect(repository_exists?(personal_snippet)).to be_truthy
+ end
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow_next_instance_of(Repositories::DestroyRollbackService) do |instance|
+ allow(instance).to receive(:execute).and_return({ status: :error })
+ end
+ end
+
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error).with(/\ARepository .* in path .* could not be rolled back\z/).twice
+
+ subject.__send__(:attempt_rollback_repositories)
+ end
+ end
+ end
+
+ def repository_exists?(snippet, path = snippet.disk_path + ".git")
+ gitlab_shell.repository_exists?(snippet.snippet_repository.shard_name, path)
+ end
+end
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index a1cbec6748a..ffad3c8b8e5 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -18,7 +18,7 @@ describe Snippets::CreateService do
let(:extra_opts) { {} }
let(:creator) { admin }
- subject { Snippets::CreateService.new(project, creator, opts).execute }
+ subject { described_class.new(project, creator, opts).execute }
let(:snippet) { subject.payload[:snippet] }
@@ -143,37 +143,100 @@ describe Snippets::CreateService do
end
end
- shared_examples 'creates repository' do
- it do
+ shared_examples 'creates repository and files' do
+ it 'creates repository' do
subject
- expect(snippet.repository_exists?).to be_truthy
+ expect(snippet.repository.exists?).to be_truthy
+ end
+
+ it 'commit the files to the repository' do
+ subject
+
+ blob = snippet.repository.blob_at('master', base_opts[:file_name])
+
+ expect(blob.data).to eq base_opts[:content]
+ end
+
+ context 'when repository creation action fails' do
+ before do
+ allow_next_instance_of(Snippet) do |instance|
+ allow(instance).to receive(:create_repository).and_return(nil)
+ end
+ end
+
+ it 'does not create the snippet' do
+ expect { subject }.not_to change { Snippet.count }
+ end
+
+ it 'returns the error' do
+ expect(snippet.errors.full_messages).to include('Repository could not be created')
+ end
+ end
+
+ context 'when the commit action fails' do
+ before do
+ allow_next_instance_of(SnippetRepository) do |instance|
+ allow(instance).to receive(:multi_files_action).and_raise(SnippetRepository::CommitError.new('foobar'))
+ end
+ end
+
+ it 'does not create the snippet' do
+ expect { subject }.not_to change { Snippet.count }
+ end
+
+ it 'destroys the created repository' do
+ expect_next_instance_of(Repository) do |instance|
+ expect(instance).to receive(:remove).and_call_original
+ end
+
+ subject
+ end
+
+ it 'returns the error' do
+ response = subject
+
+ expect(response).to be_error
+ expect(response.payload[:snippet].errors.full_messages).to eq ['foobar']
+ end
end
context 'when snippet creation fails' do
let(:extra_opts) { { content: nil } }
it 'does not create repository' do
- subject
+ expect do
+ subject
+ end.not_to change(Snippet, :count)
expect(snippet.repository_exists?).to be_falsey
end
end
context 'when feature flag :version_snippets is disabled' do
- it 'does not create snippet repository' do
+ before do
stub_feature_flags(version_snippets: false)
+ end
+ it 'does not create snippet repository' do
expect do
subject
end.to change(Snippet, :count).by(1)
expect(snippet.repository_exists?).to be_falsey
end
+
+ it 'does not try to commit files' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).not_to receive(:create_commit)
+ end
+
+ subject
+ end
end
end
- context 'when Project Snippet' do
+ context 'when ProjectSnippet' do
let_it_be(:project) { create(:project) }
before do
@@ -185,7 +248,7 @@ describe Snippets::CreateService do
it_behaves_like 'spam check is performed'
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
- it_behaves_like 'creates repository'
+ it_behaves_like 'creates repository and files'
end
context 'when PersonalSnippet' do
@@ -196,7 +259,7 @@ describe Snippets::CreateService do
it_behaves_like 'spam check is performed'
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
- it_behaves_like 'creates repository'
+ it_behaves_like 'creates repository and files'
end
end
end
diff --git a/spec/services/snippets/destroy_service_spec.rb b/spec/services/snippets/destroy_service_spec.rb
index bb035d275ab..840dc11a740 100644
--- a/spec/services/snippets/destroy_service_spec.rb
+++ b/spec/services/snippets/destroy_service_spec.rb
@@ -8,7 +8,7 @@ describe Snippets::DestroyService do
let_it_be(:other_user) { create(:user) }
describe '#execute' do
- subject { Snippets::DestroyService.new(user, snippet).execute }
+ subject { described_class.new(user, snippet).execute }
context 'when snippet is nil' do
let(:snippet) { nil }
@@ -30,7 +30,7 @@ describe Snippets::DestroyService do
shared_examples 'an unsuccessful destroy' do
it 'does not delete the snippet' do
- expect { subject }.to change { Snippet.count }.by(0)
+ expect { subject }.not_to change { Snippet.count }
end
it 'returns ServiceResponse error' do
@@ -38,8 +38,63 @@ describe Snippets::DestroyService do
end
end
+ shared_examples 'deletes the snippet repository' do
+ it 'removes the snippet repository' do
+ expect(snippet.repository.exists?).to be_truthy
+ expect(GitlabShellWorker).to receive(:perform_in)
+ expect_next_instance_of(Repositories::DestroyService) do |instance|
+ expect(instance).to receive(:execute).and_call_original
+ end
+
+ expect(subject).to be_success
+ end
+
+ context 'when the repository deletion service raises an error' do
+ before do
+ allow_next_instance_of(Repositories::DestroyService) do |instance|
+ allow(instance).to receive(:execute).and_return({ status: :error })
+ end
+ end
+
+ it_behaves_like 'an unsuccessful destroy'
+
+ it 'does not try to rollback repository' do
+ expect(Repositories::DestroyRollbackService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'when a destroy error is raised' do
+ before do
+ allow(snippet).to receive(:destroy!).and_raise(ActiveRecord::ActiveRecordError)
+ end
+
+ it_behaves_like 'an unsuccessful destroy'
+
+ it 'attempts to rollback the repository' do
+ expect(Repositories::DestroyRollbackService).to receive(:new).and_call_original
+
+ subject
+ end
+ end
+
+ context 'when repository is nil' do
+ it 'does not schedule anything and return success' do
+ allow(snippet).to receive(:repository).and_return(nil)
+
+ expect(GitlabShellWorker).not_to receive(:perform_in)
+ expect_next_instance_of(Repositories::DestroyService) do |instance|
+ expect(instance).to receive(:execute).and_call_original
+ end
+
+ expect(subject).to be_success
+ end
+ end
+ end
+
context 'when ProjectSnippet' do
- let!(:snippet) { create(:project_snippet, project: project, author: author) }
+ let!(:snippet) { create(:project_snippet, :repository, project: project, author: author) }
context 'when user is able to admin_project_snippet' do
let(:author) { user }
@@ -49,6 +104,7 @@ describe Snippets::DestroyService do
end
it_behaves_like 'a successful destroy'
+ it_behaves_like 'deletes the snippet repository'
end
context 'when user is not able to admin_project_snippet' do
@@ -59,12 +115,13 @@ describe Snippets::DestroyService do
end
context 'when PersonalSnippet' do
- let!(:snippet) { create(:personal_snippet, author: author) }
+ let!(:snippet) { create(:personal_snippet, :repository, author: author) }
context 'when user is able to admin_personal_snippet' do
let(:author) { user }
it_behaves_like 'a successful destroy'
+ it_behaves_like 'deletes the snippet repository'
end
context 'when user is not able to admin_personal_snippet' do
@@ -73,5 +130,21 @@ describe Snippets::DestroyService do
it_behaves_like 'an unsuccessful destroy'
end
end
+
+ context 'when the repository does not exists' do
+ let(:snippet) { create(:personal_snippet, author: user) }
+
+ it 'does not schedule anything and return success' do
+ expect(snippet.repository).not_to be_nil
+ expect(snippet.repository.exists?).to be_falsey
+
+ expect(GitlabShellWorker).not_to receive(:perform_in)
+ expect_next_instance_of(Repositories::DestroyService) do |instance|
+ expect(instance).to receive(:execute).and_call_original
+ end
+
+ expect(subject).to be_success
+ end
+ end
end
end
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index b8215f9779d..3605d3f76da 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -16,14 +16,9 @@ describe Snippets::UpdateService do
}
end
let(:updater) { user }
+ let(:service) { Snippets::UpdateService.new(project, updater, options) }
- subject do
- Snippets::UpdateService.new(
- project,
- updater,
- options
- ).execute(snippet)
- end
+ subject { service.execute(snippet) }
shared_examples 'a service that updates a snippet' do
it 'updates a snippet with the provided attributes' do
@@ -98,9 +93,109 @@ describe Snippets::UpdateService do
end
end
+ shared_examples 'creates repository and creates file' do
+ it 'creates repository' do
+ expect(snippet.repository).not_to exist
+
+ subject
+
+ expect(snippet.repository).to exist
+ end
+
+ it 'commits the files to the repository' do
+ subject
+
+ expect(snippet.blobs.count).to eq 1
+
+ blob = snippet.repository.blob_at('master', options[:file_name])
+
+ expect(blob.data).to eq options[:content]
+ end
+
+ context 'when the repository does not exist' do
+ it 'does not try to commit file' do
+ allow(snippet).to receive(:repository_exists?).and_return(false)
+
+ expect(service).not_to receive(:create_commit)
+
+ subject
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(version_snippets: false)
+ end
+
+ it 'does not create repository' do
+ subject
+
+ expect(snippet.repository).not_to exist
+ end
+
+ it 'does not try to commit file' do
+ expect(service).not_to receive(:create_commit)
+
+ subject
+ end
+ end
+
+ it 'returns error when the commit action fails' do
+ allow_next_instance_of(SnippetRepository) do |instance|
+ allow(instance).to receive(:multi_files_action).and_raise(SnippetRepository::CommitError)
+ end
+
+ response = subject
+
+ expect(response).to be_error
+ expect(response.payload[:snippet].errors.full_messages).to eq ['Repository Error updating the snippet']
+ end
+ end
+
+ shared_examples 'updates repository content' do
+ it 'commit the files to the repository' do
+ blob = snippet.blobs.first
+ options[:file_name] = blob.path + '_new'
+
+ expect(blob.data).not_to eq(options[:content])
+
+ subject
+
+ blob = snippet.blobs.first
+
+ expect(blob.path).to eq(options[:file_name])
+ expect(blob.data).to eq(options[:content])
+ end
+
+ it 'returns error when the commit action fails' do
+ allow(snippet.snippet_repository).to receive(:multi_files_action).and_raise(SnippetRepository::CommitError)
+
+ response = subject
+
+ expect(response).to be_error
+ expect(response.payload[:snippet].errors.full_messages).to eq ['Repository Error updating the snippet']
+ end
+
+ it 'returns error if snippet does not have a snippet_repository' do
+ allow(snippet).to receive(:snippet_repository).and_return(nil)
+
+ expect(subject).to be_error
+ end
+
+ context 'when the repository does not exist' do
+ it 'does not try to commit file' do
+ allow(snippet).to receive(:repository_exists?).and_return(false)
+
+ expect(service).not_to receive(:create_commit)
+
+ subject
+ end
+ end
+ end
+
context 'when Project Snippet' do
let_it_be(:project) { create(:project) }
- let!(:snippet) { create(:project_snippet, author: user, project: project) }
+ let!(:snippet) { create(:project_snippet, :repository, author: user, project: project) }
before do
project.add_developer(user)
@@ -109,15 +204,29 @@ describe Snippets::UpdateService do
it_behaves_like 'a service that updates a snippet'
it_behaves_like 'public visibility level restrictions apply'
it_behaves_like 'snippet update data is tracked'
+ it_behaves_like 'updates repository content'
+
+ context 'when snippet does not have a repository' do
+ let!(:snippet) { create(:project_snippet, author: user, project: project) }
+
+ it_behaves_like 'creates repository and creates file'
+ end
end
context 'when PersonalSnippet' do
let(:project) { nil }
- let!(:snippet) { create(:personal_snippet, author: user) }
+ let!(:snippet) { create(:personal_snippet, :repository, author: user) }
it_behaves_like 'a service that updates a snippet'
it_behaves_like 'public visibility level restrictions apply'
it_behaves_like 'snippet update data is tracked'
+ it_behaves_like 'updates repository content'
+
+ context 'when snippet does not have a repository' do
+ let!(:snippet) { create(:personal_snippet, author: user, project: project) }
+
+ it_behaves_like 'creates repository and creates file'
+ end
end
end
end
diff --git a/spec/services/spam/mark_as_spam_service_spec.rb b/spec/services/spam/mark_as_spam_service_spec.rb
index cba9d6a39cb..9978005279a 100644
--- a/spec/services/spam/mark_as_spam_service_spec.rb
+++ b/spec/services/spam/mark_as_spam_service_spec.rb
@@ -7,7 +7,7 @@ describe Spam::MarkAsSpamService do
let(:spammable) { build(:issue, user_agent_detail: user_agent_detail) }
let(:fake_akismet_service) { double(:akismet_service, submit_spam: true) }
- subject { described_class.new(spammable: spammable) }
+ subject { described_class.new(target: spammable) }
describe '#execute' do
before do
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 3df620d1fea..5b87ec022ae 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -625,4 +625,14 @@ describe SystemNoteService do
described_class.discussion_lock(issuable, double)
end
end
+
+ describe '.auto_resolve_prometheus_alert' do
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:auto_resolve_prometheus_alert)
+ end
+
+ described_class.auto_resolve_prometheus_alert(noteable, project, author)
+ end
+ end
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 228d69fda4e..477f9eae39e 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -598,8 +598,8 @@ describe ::SystemNotes::IssuablesService do
context 'when mentioner is not a MergeRequest' do
it 'is falsey' do
mentioner = noteable.dup
- expect(service.cross_reference_disallowed?(mentioner))
- .to be_falsey
+
+ expect(service.cross_reference_disallowed?(mentioner)).to be_falsey
end
end
@@ -609,24 +609,35 @@ describe ::SystemNotes::IssuablesService do
it 'is truthy when noteable is in commits' do
expect(mentioner).to receive(:commits).and_return([noteable])
- expect(service.cross_reference_disallowed?(mentioner))
- .to be_truthy
+
+ expect(service.cross_reference_disallowed?(mentioner)).to be_truthy
end
it 'is falsey when noteable is not in commits' do
expect(mentioner).to receive(:commits).and_return([])
- expect(service.cross_reference_disallowed?(mentioner))
- .to be_falsey
+
+ expect(service.cross_reference_disallowed?(mentioner)).to be_falsey
end
end
context 'when notable is an ExternalIssue' do
+ let(:project) { create(:project) }
let(:noteable) { ExternalIssue.new('EXT-1234', project) }
- it 'is truthy' do
- mentioner = noteable.dup
- expect(service.cross_reference_disallowed?(mentioner))
- .to be_truthy
+ it 'is false with issue tracker supporting referencing' do
+ create(:jira_service, project: project)
+
+ expect(service.cross_reference_disallowed?(noteable)).to be_falsey
+ end
+
+ it 'is true with issue tracker not supporting referencing' do
+ create(:bugzilla_service, project: project)
+
+ expect(service.cross_reference_disallowed?(noteable)).to be_truthy
+ end
+
+ it 'is true without issue tracker' do
+ expect(service.cross_reference_disallowed?(noteable)).to be_truthy
end
end
end
@@ -643,4 +654,16 @@ describe ::SystemNotes::IssuablesService do
.to eq('resolved the corresponding error and closed the issue.')
end
end
+
+ describe '#auto_resolve_prometheus_alert' do
+ subject { service.auto_resolve_prometheus_alert }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'closed' }
+ end
+
+ it 'creates the expected system note' do
+ expect(subject.note).to eq('automatically closed this issue because the alert resolved.')
+ end
+ end
end
diff --git a/spec/services/system_notes/merge_requests_service_spec.rb b/spec/services/system_notes/merge_requests_service_spec.rb
index f5c071502f5..13d6367a585 100644
--- a/spec/services/system_notes/merge_requests_service_spec.rb
+++ b/spec/services/system_notes/merge_requests_service_spec.rb
@@ -253,7 +253,7 @@ describe ::SystemNotes::MergeRequestsService do
end
it "posts the 'picked merge request' system note" do
- expect(subject.note).to eq("picked this merge request into branch [`#{branch_name}`](/#{project.full_path}/-/tree/#{branch_name}) with commit #{commit_sha}")
+ expect(subject.note).to eq("picked the changes into the branch [`#{branch_name}`](/#{project.full_path}/-/tree/#{branch_name}) with commit #{commit_sha}")
end
it 'links the merge request and the cherry-pick commit' do
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index 2b658a93b0a..a664719783a 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -26,6 +26,12 @@ describe Users::DestroyService do
service.execute(user)
end
+ it 'does not include snippets when deleting in batches' do
+ expect(user).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:snippets] })
+
+ service.execute(user)
+ end
+
it 'will delete the project' do
expect_next_instance_of(Projects::DestroyService) do |destroy_service|
expect(destroy_service).to receive(:execute).once.and_return(true)
@@ -33,6 +39,54 @@ describe Users::DestroyService do
service.execute(user)
end
+
+ it 'calls the bulk snippet destroy service for the user personal snippets' do
+ repo1 = create(:personal_snippet, :repository, author: user).snippet_repository
+ repo2 = create(:project_snippet, :repository, author: user).snippet_repository
+ repo3 = create(:project_snippet, :repository, project: project, author: user).snippet_repository
+
+ aggregate_failures do
+ expect(gitlab_shell.repository_exists?(repo1.shard_name, repo1.disk_path + '.git')).to be_truthy
+ expect(gitlab_shell.repository_exists?(repo2.shard_name, repo2.disk_path + '.git')).to be_truthy
+ expect(gitlab_shell.repository_exists?(repo3.shard_name, repo3.disk_path + '.git')).to be_truthy
+ end
+
+ # Call made when destroying user personal projects
+ expect(Snippets::BulkDestroyService).to receive(:new)
+ .with(admin, project.snippets).and_call_original
+
+ # Call to remove user personal snippets and for
+ # project snippets where projects are not user personal
+ # ones
+ expect(Snippets::BulkDestroyService).to receive(:new)
+ .with(admin, user.snippets).and_call_original
+
+ service.execute(user)
+
+ aggregate_failures do
+ expect(gitlab_shell.repository_exists?(repo1.shard_name, repo1.disk_path + '.git')).to be_falsey
+ expect(gitlab_shell.repository_exists?(repo2.shard_name, repo2.disk_path + '.git')).to be_falsey
+ expect(gitlab_shell.repository_exists?(repo3.shard_name, repo3.disk_path + '.git')).to be_falsey
+ end
+ end
+
+ context 'when an error is raised deleting snippets' do
+ it 'does not delete user' do
+ snippet = create(:personal_snippet, :repository, author: user)
+
+ bulk_service = double
+ allow(Snippets::BulkDestroyService).to receive(:new).and_call_original
+ allow(Snippets::BulkDestroyService).to receive(:new).with(admin, user.snippets).and_return(bulk_service)
+ allow(bulk_service).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
+
+ aggregate_failures do
+ expect { service.execute(user) }
+ .to raise_error(Users::DestroyService::DestroyError, 'foo' )
+ expect(snippet.reload).not_to be_nil
+ expect(gitlab_shell.repository_exists?(snippet.repository_storage, snippet.disk_path + '.git')).to be_truthy
+ end
+ end
+ end
end
context 'projects in pending_delete' do
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index 5cd6283ca96..24738a79045 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -64,6 +64,13 @@ describe Users::UpdateService do
end.not_to change { user.name }
end
+ it 'updates user detail with provided attributes' do
+ result = update_user(user, job_title: 'Backend Engineer')
+
+ expect(result).to eq(status: :success)
+ expect(user.job_title).to eq('Backend Engineer')
+ end
+
def update_user(user, opts)
described_class.new(user, opts.merge(user: user)).execute
end
diff --git a/spec/services/x509_certificate_revoke_service_spec.rb b/spec/services/x509_certificate_revoke_service_spec.rb
new file mode 100644
index 00000000000..ef76f616c93
--- /dev/null
+++ b/spec/services/x509_certificate_revoke_service_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe X509CertificateRevokeService do
+ describe '#execute' do
+ let(:service) { described_class.new }
+ let!(:x509_signature_1) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified ) }
+ let!(:x509_signature_2) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified ) }
+
+ context 'for revoked certificates' do
+ let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked ) }
+
+ it 'update all commit signatures' do
+ expect do
+ service.execute(x509_certificate)
+
+ x509_signature_1.reload
+ x509_signature_2.reload
+ end
+ .to change(x509_signature_1, :verification_status).from('verified').to('unverified')
+ .and change(x509_signature_2, :verification_status).from('verified').to('unverified')
+ end
+ end
+
+ context 'for good certificates' do
+ RSpec::Matchers.define_negated_matcher :not_change, :change
+
+ let(:x509_certificate) { create(:x509_certificate) }
+
+ it 'do not update any commit signature' do
+ expect do
+ service.execute(x509_certificate)
+
+ x509_signature_1.reload
+ x509_signature_2.reload
+ end
+ .to not_change(x509_signature_1, :verification_status)
+ .and not_change(x509_signature_2, :verification_status)
+ end
+ end
+ end
+end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 35bf6846ab3..30524e4bbae 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -76,11 +76,21 @@ RSpec.configure do |config|
metadata[:level] = quality_level.level_for(location)
metadata[:api] = true if location =~ %r{/spec/requests/api/}
- # do not overwrite type if it's already set
- next if metadata.key?(:type)
+ # Do not overwrite migration if it's already set
+ unless metadata.key?(:migration)
+ metadata[:migration] = true if metadata[:level] == :migration
+ end
+
+ # Do not overwrite schema if it's already set
+ unless metadata.key?(:schema)
+ metadata[:schema] = :latest if quality_level.background_migration?(location)
+ end
- match = location.match(%r{/spec/([^/]+)/})
- metadata[:type] = match[1].singularize.to_sym if match
+ # Do not overwrite type if it's already set
+ unless metadata.key?(:type)
+ match = location.match(%r{/spec/([^/]+)/})
+ metadata[:type] = match[1].singularize.to_sym if match
+ end
end
config.include LicenseHelpers
@@ -119,6 +129,7 @@ RSpec.configure do |config|
config.include PolicyHelpers, type: :policy
config.include MemoryUsageHelper
config.include ExpectRequestWithStatus, type: :request
+ config.include IdempotentWorkerHelper, type: :worker
config.include RailsHelpers
if ENV['CI'] || ENV['RETRIES']
@@ -192,8 +203,10 @@ RSpec.configure do |config|
# expect(Gitlab::Git::KeepAround).to receive(:execute).and_call_original
allow(Gitlab::Git::KeepAround).to receive(:execute)
- # Clear thread cache and Sidekiq queues
- Gitlab::ThreadMemoryCache.cache_backend.clear
+ [Gitlab::ThreadMemoryCache, Gitlab::ProcessMemoryCache].each do |cache|
+ cache.cache_backend.clear
+ end
+
Sidekiq::Worker.clear_all
# Temporary patch to force admin mode to be active by default in tests when
diff --git a/spec/support/caching.rb b/spec/support/caching.rb
index ecbe65f7e97..883d531550a 100644
--- a/spec/support/caching.rb
+++ b/spec/support/caching.rb
@@ -21,6 +21,21 @@ RSpec.configure do |config|
ActionController::Base.cache_store = caching_store
end
+ config.around(:each, :use_clean_rails_redis_caching) do |example|
+ original_null_store = Rails.cache
+ caching_config_hash = Gitlab::Redis::Cache.params
+ caching_config_hash[:namespace] = Gitlab::Redis::Cache::CACHE_NAMESPACE
+ Rails.cache = ActiveSupport::Cache::RedisCacheStore.new(caching_config_hash)
+
+ redis_cache_cleanup!
+
+ example.run
+
+ redis_cache_cleanup!
+
+ Rails.cache = original_null_store
+ end
+
config.around(:each, :use_sql_query_cache) do |example|
ActiveRecord::Base.cache do
example.run
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 5ae042e4148..90adfb1a2ee 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -17,11 +17,24 @@ JS_CONSOLE_FILTER = Regexp.union([
'"[HMR] Waiting for update signal from WDS..."',
'"[WDS] Hot Module Replacement enabled."',
'"[WDS] Live Reloading enabled."',
- "Download the Vue Devtools extension"
+ 'Download the Vue Devtools extension',
+ 'Download the Apollo DevTools'
])
CAPYBARA_WINDOW_SIZE = [1366, 768].freeze
+# Run Workhorse on the given host and port, proxying to Puma on a UNIX socket,
+# for a closer-to-production experience
+Capybara.register_server :puma_via_workhorse do |app, port, host, **options|
+ file = Tempfile.new
+ socket_path = file.path
+ file.close! # We just want the filename
+
+ TestEnv.with_workhorse(TestEnv.workhorse_dir, host, port, socket_path) do
+ Capybara.servers[:puma].call(app, nil, socket_path, **options)
+ end
+end
+
Capybara.register_driver :chrome do |app|
capabilities = Selenium::WebDriver::Remote::Capabilities.chrome(
# This enables access to logs with `page.driver.manage.get_log(:browser)`
@@ -59,13 +72,18 @@ Capybara.register_driver :chrome do |app|
)
end
-Capybara.server = :webrick
+Capybara.server = :puma_via_workhorse
Capybara.javascript_driver = :chrome
Capybara.default_max_wait_time = timeout
Capybara.ignore_hidden_elements = true
Capybara.default_normalize_ws = true
Capybara.enable_aria_label = true
+Capybara::Screenshot.append_timestamp = false
+
+Capybara::Screenshot.register_filename_prefix_formatter(:rspec) do |example|
+ example.full_description.downcase.parameterize(separator: "_")[0..99]
+end
# Keep only the screenshots generated from the last failing test suite
Capybara::Screenshot.prune_strategy = :keep_last_run
# From https://github.com/mattheworiordan/capybara-screenshot/issues/84#issuecomment-41219326
@@ -100,6 +118,24 @@ RSpec.configure do |config|
end
end
+ # The :capybara_ignore_server_errors metadata means unhandled exceptions raised
+ # by the application under test will not necessarily fail the server. This is
+ # useful when testing conditions that are expected to raise a 500 error in
+ # production; it should not be used on the happy path.
+ config.around(:each, :capybara_ignore_server_errors) do |example|
+ Capybara.raise_server_errors = false
+
+ example.run
+
+ if example.metadata[:screenshot]
+ screenshot = example.metadata[:screenshot][:image] || example.metadata[:screenshot][:html]
+ example.metadata[:stdout] = %{[[ATTACHMENT|#{screenshot}]]}
+ end
+
+ ensure
+ Capybara.raise_server_errors = true
+ end
+
config.after(:example, :js) do |example|
# when a test fails, display any messages in the browser's console
# but fail don't add the message if the failure is a pending test that got
diff --git a/spec/support/helpers/filtered_search_helpers.rb b/spec/support/helpers/filtered_search_helpers.rb
index c8b7a9251a9..99a5e043825 100644
--- a/spec/support/helpers/filtered_search_helpers.rb
+++ b/spec/support/helpers/filtered_search_helpers.rb
@@ -26,7 +26,7 @@ module FilteredSearchHelpers
# Select a label clicking in the search dropdown instead
# of entering label names on the input.
def select_label_on_dropdown(label_title)
- input_filtered_search("label=", submit: false)
+ input_filtered_search("label:=", submit: false)
within('#js-dropdown-label') do
wait_for_requests
@@ -71,7 +71,7 @@ module FilteredSearchHelpers
end
def init_label_search
- filtered_search.set('label=')
+ filtered_search.set('label:=')
# This ensures the dropdown is shown
expect(find('#js-dropdown-label')).not_to have_css('.filter-dropdown-loading')
end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 35b1b802f35..370162b45f0 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -12,8 +12,8 @@ module GraphqlHelpers
end
# Run a loader's named resolver
- def resolve(resolver_class, obj: nil, args: {}, ctx: {})
- resolver_class.new(object: obj, context: ctx).resolve(args)
+ def resolve(resolver_class, obj: nil, args: {}, ctx: {}, field: nil)
+ resolver_class.new(object: obj, context: ctx, field: field).resolve(args)
end
# Eagerly run a loader's named resolver
diff --git a/spec/support/helpers/idempotent_worker_helper.rb b/spec/support/helpers/idempotent_worker_helper.rb
new file mode 100644
index 00000000000..b80758d10bd
--- /dev/null
+++ b/spec/support/helpers/idempotent_worker_helper.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module IdempotentWorkerHelper
+ WORKER_EXEC_TIMES = 2
+
+ def perform_multiple(args = [], worker: described_class.new, exec_times: WORKER_EXEC_TIMES)
+ Sidekiq::Testing.inline! do
+ job_args = args.nil? ? [nil] : Array.wrap(args)
+
+ expect(worker).to receive(:perform).exactly(exec_times).and_call_original
+
+ exec_times.times { worker.perform(*job_args) }
+ end
+ end
+end
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index 427948bda96..ca910e47695 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -489,7 +489,7 @@ module KubernetesHelpers
# This is a partial response, it will have many more elements in reality but
# these are the ones we care about at the moment
- def kube_pod(name: "kube-pod", environment_slug: "production", namespace: "project-namespace", project_slug: "project-path-slug", status: "Running", track: nil)
+ def kube_pod(name: "kube-pod", container_name: "container-0", environment_slug: "production", namespace: "project-namespace", project_slug: "project-path-slug", status: "Running", track: nil)
{
"metadata" => {
"name" => name,
@@ -506,8 +506,8 @@ module KubernetesHelpers
},
"spec" => {
"containers" => [
- { "name" => "container-0" },
- { "name" => "container-1" }
+ { "name" => "#{container_name}" },
+ { "name" => "#{container_name}-1" }
]
},
"status" => { "phase" => status }
diff --git a/spec/support/helpers/ldap_helpers.rb b/spec/support/helpers/ldap_helpers.rb
index 0549c56c753..8154e3a4fc9 100644
--- a/spec/support/helpers/ldap_helpers.rb
+++ b/spec/support/helpers/ldap_helpers.rb
@@ -2,14 +2,14 @@
module LdapHelpers
def ldap_adapter(provider = 'ldapmain', ldap = double(:ldap))
- ::Gitlab::Auth::LDAP::Adapter.new(provider, ldap)
+ ::Gitlab::Auth::Ldap::Adapter.new(provider, ldap)
end
def user_dn(uid)
"uid=#{uid},ou=users,dc=example,dc=com"
end
- # Accepts a hash of Gitlab::Auth::LDAP::Config keys and values.
+ # Accepts a hash of Gitlab::Auth::Ldap::Config keys and values.
#
# Example:
# stub_ldap_config(
@@ -17,7 +17,7 @@ module LdapHelpers
# admin_group: 'my-admin-group'
# )
def stub_ldap_config(messages)
- allow_any_instance_of(::Gitlab::Auth::LDAP::Config).to receive_messages(messages)
+ allow_any_instance_of(::Gitlab::Auth::Ldap::Config).to receive_messages(messages)
end
def stub_ldap_setting(messages)
@@ -28,29 +28,29 @@ module LdapHelpers
# `entry` to simulate when an LDAP person is not found
#
# Example:
- # adapter = ::Gitlab::Auth::LDAP::Adapter.new('ldapmain', double(:ldap))
+ # adapter = ::Gitlab::Auth::Ldap::Adapter.new('ldapmain', double(:ldap))
# ldap_user_entry = ldap_user_entry('john_doe')
#
# stub_ldap_person_find_by_uid('john_doe', ldap_user_entry, adapter)
def stub_ldap_person_find_by_uid(uid, entry, provider = 'ldapmain')
- return_value = ::Gitlab::Auth::LDAP::Person.new(entry, provider) if entry.present?
+ return_value = ::Gitlab::Auth::Ldap::Person.new(entry, provider) if entry.present?
- allow(::Gitlab::Auth::LDAP::Person)
+ allow(::Gitlab::Auth::Ldap::Person)
.to receive(:find_by_uid).with(uid, any_args).and_return(return_value)
end
def stub_ldap_person_find_by_dn(entry, provider = 'ldapmain')
- person = ::Gitlab::Auth::LDAP::Person.new(entry, provider) if entry.present?
+ person = ::Gitlab::Auth::Ldap::Person.new(entry, provider) if entry.present?
- allow(::Gitlab::Auth::LDAP::Person)
+ allow(::Gitlab::Auth::Ldap::Person)
.to receive(:find_by_dn)
.and_return(person)
end
def stub_ldap_person_find_by_email(email, entry, provider = 'ldapmain')
- person = ::Gitlab::Auth::LDAP::Person.new(entry, provider) if entry.present?
+ person = ::Gitlab::Auth::Ldap::Person.new(entry, provider) if entry.present?
- allow(::Gitlab::Auth::LDAP::Person)
+ allow(::Gitlab::Auth::Ldap::Person)
.to receive(:find_by_email)
.with(email, anything)
.and_return(person)
@@ -66,8 +66,8 @@ module LdapHelpers
end
def raise_ldap_connection_error
- allow_any_instance_of(Gitlab::Auth::LDAP::Adapter)
- .to receive(:ldap_search).and_raise(Gitlab::Auth::LDAP::LDAPConnectionError)
+ allow_any_instance_of(Gitlab::Auth::Ldap::Adapter)
+ .to receive(:ldap_search).and_raise(Gitlab::Auth::Ldap::LdapConnectionError)
end
end
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 1d42f26ad3e..6a4dcfcdb1e 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -51,7 +51,7 @@ module LoginHelpers
def gitlab_enable_admin_mode_sign_in(user)
visit new_admin_session_path
- fill_in 'password', with: user.password
+ fill_in 'user_password', with: user.password
click_button 'Enter Admin Mode'
end
@@ -62,6 +62,12 @@ module LoginHelpers
click_link provider
end
+ def gitlab_enable_admin_mode_sign_in_via(provider, user, uid, saml_response = nil)
+ mock_auth_hash_with_saml_xml(provider, uid, user.email, saml_response)
+ visit new_admin_session_path
+ click_link provider
+ end
+
# Requires Javascript driver.
def gitlab_sign_out
find(".header-user-dropdown-toggle").click
@@ -71,6 +77,11 @@ module LoginHelpers
expect(page).to have_button('Sign in')
end
+ # Requires Javascript driver.
+ def gitlab_disable_admin_mode
+ click_on 'Leave Admin Mode'
+ end
+
private
# Private: Login as the specified user
diff --git a/spec/support/helpers/metrics_dashboard_url_helpers.rb b/spec/support/helpers/metrics_dashboard_url_helpers.rb
new file mode 100644
index 00000000000..cb9f58753a3
--- /dev/null
+++ b/spec/support/helpers/metrics_dashboard_url_helpers.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module MetricsDashboardUrlHelpers
+ # Using the url_helpers available in the test suite uses
+ # the sample host, but the urls generated may need to
+ # point to the configured host in the :js trait
+ def urls
+ ::Gitlab::Routing.url_helpers
+ end
+
+ def clear_host_from_memoized_variables
+ [:metrics_regex, :grafana_regex, :clusters_regex, :alert_regex].each do |method_name|
+ Gitlab::Metrics::Dashboard::Url.clear_memoization(method_name)
+ end
+ end
+end
diff --git a/spec/support/helpers/notification_helpers.rb b/spec/support/helpers/notification_helpers.rb
index aee76b8be4a..b3e0e7d811b 100644
--- a/spec/support/helpers/notification_helpers.rb
+++ b/spec/support/helpers/notification_helpers.rb
@@ -57,7 +57,7 @@ module NotificationHelpers
expect(ActionMailer::DeliveryJob).to have_been_enqueued.with(mailer, mail, delivery, *args)
end
- def expect_not_enqueud_email(*args, mailer: "Notify", mail: "", delivery: "deliver_now")
+ def expect_not_enqueud_email(*args, mailer: "Notify", mail: "")
expect(ActionMailer::DeliveryJob).not_to have_been_enqueued.with(mailer, mail, *args, any_args)
end
end
diff --git a/spec/support/helpers/project_forks_helper.rb b/spec/support/helpers/project_forks_helper.rb
index 90d0d1845fc..a32e39e52c8 100644
--- a/spec/support/helpers/project_forks_helper.rb
+++ b/spec/support/helpers/project_forks_helper.rb
@@ -59,7 +59,7 @@ module ProjectForksHelper
bare_repo: TestEnv.forked_repo_path_bare,
refs: TestEnv::FORKED_BRANCH_SHA
)
- forked_project.repository.after_import
+ forked_project.repository.expire_content_cache
forked_project
end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index bd945fe6409..66c2faac2dd 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -61,6 +61,11 @@ module TestEnv
'merge-commit-analyze-before' => '1adbdef',
'merge-commit-analyze-side-branch' => '8a99451',
'merge-commit-analyze-after' => '646ece5',
+ 'snippet/single-file' => '43e4080',
+ 'snippet/multiple-files' => 'b80faa8',
+ 'snippet/rename-and-edit-file' => '220a1e4',
+ 'snippet/edit-file' => 'c2f074f',
+ 'snippet/no-files' => '671aaa8',
'2-mb-file' => 'bf12d25',
'before-create-delete-modify-move' => '845009f',
'between-create-delete-modify-move' => '3f5f443',
@@ -84,6 +89,7 @@ module TestEnv
TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**')
REPOS_STORAGE = 'default'.freeze
+ SECOND_STORAGE_PATH = Rails.root.join('tmp', 'tests', 'second_storage')
# Test environment
#
@@ -104,6 +110,9 @@ module TestEnv
setup_gitaly
+ # Feature specs are run through Workhorse
+ setup_workhorse
+
# Create repository for FactoryBot.create(:project)
setup_factory_repo
@@ -138,6 +147,7 @@ module TestEnv
end
FileUtils.mkdir_p(repos_path)
+ FileUtils.mkdir_p(SECOND_STORAGE_PATH)
FileUtils.mkdir_p(backup_path)
FileUtils.mkdir_p(pages_path)
FileUtils.mkdir_p(artifacts_path)
@@ -173,8 +183,6 @@ module TestEnv
return
end
- FileUtils.mkdir_p("tmp/tests/second_storage") unless File.exist?("tmp/tests/second_storage")
-
spawn_script = Rails.root.join('scripts/gitaly-test-spawn').to_s
Bundler.with_original_env do
unless system(spawn_script)
@@ -218,6 +226,52 @@ module TestEnv
ENV.fetch('GITALY_REPO_URL', nil)
end
+ def setup_workhorse
+ install_workhorse_args = [workhorse_dir, workhorse_url].compact.join(',')
+
+ component_timed_setup(
+ 'GitLab Workhorse',
+ install_dir: workhorse_dir,
+ version: Gitlab::Workhorse.version,
+ task: "gitlab:workhorse:install[#{install_workhorse_args}]"
+ )
+ end
+
+ def workhorse_dir
+ @workhorse_path ||= File.join('tmp', 'tests', 'gitlab-workhorse')
+ end
+
+ def with_workhorse(workhorse_dir, host, port, upstream, &blk)
+ host = "[#{host}]" if host.include?(':')
+ listen_addr = [host, port].join(':')
+
+ workhorse_pid = spawn(
+ File.join(workhorse_dir, 'gitlab-workhorse'),
+ '-authSocket', upstream,
+ '-documentRoot', Rails.root.join('public').to_s,
+ '-listenAddr', listen_addr,
+ '-secretPath', Gitlab::Workhorse.secret_path.to_s,
+ # TODO: Needed for workhorse + redis features.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/209245
+ #
+ # '-config', '',
+ '-logFile', 'log/workhorse-test.log',
+ '-logFormat', 'structured',
+ '-developmentMode' # to serve assets and rich error messages
+ )
+
+ begin
+ yield
+ ensure
+ Process.kill('TERM', workhorse_pid)
+ Process.wait(workhorse_pid)
+ end
+ end
+
+ def workhorse_url
+ ENV.fetch('GITLAB_WORKHORSE_URL', nil)
+ end
+
def setup_factory_repo
setup_repo(factory_repo_path, factory_repo_path_bare, factory_repo_name,
BRANCH_SHA)
@@ -347,6 +401,8 @@ module TestEnv
gitlab-test_bare
gitlab-test-fork
gitlab-test-fork_bare
+ gitlab-workhorse
+ gitlab_workhorse_secret
]
end
diff --git a/spec/support/helpers/wait_for_requests.rb b/spec/support/helpers/wait_for_requests.rb
index d5483d0b0a7..52d1c59ab03 100644
--- a/spec/support/helpers/wait_for_requests.rb
+++ b/spec/support/helpers/wait_for_requests.rb
@@ -34,14 +34,14 @@ module WaitForRequests
# Wait for active Rack requests and client-side AJAX requests
def wait_for_all_requests
wait_for('pending requests complete') do
- finished_all_rack_reqiests? &&
+ finished_all_rack_requests? &&
finished_all_js_requests?
end
end
private
- def finished_all_rack_reqiests?
+ def finished_all_rack_requests?
Gitlab::Testing::RequestBlockerMiddleware.num_active_requests.zero?
end
diff --git a/spec/support/helpers/wiki_helpers.rb b/spec/support/helpers/wiki_helpers.rb
index 06cea728b42..86eb1793707 100644
--- a/spec/support/helpers/wiki_helpers.rb
+++ b/spec/support/helpers/wiki_helpers.rb
@@ -3,6 +3,11 @@
module WikiHelpers
extend self
+ def wait_for_svg_to_be_loaded(example = nil)
+ # Ensure the SVG is loaded first before clicking the button
+ find('.svg-content .js-lazy-loaded') if example.nil? || example.metadata.key?(:js)
+ end
+
def upload_file_to_wiki(project, user, file_name)
opts = {
file_name: file_name,
diff --git a/spec/support/helpers/workhorse_helpers.rb b/spec/support/helpers/workhorse_helpers.rb
index e0fba191deb..27d5083728d 100644
--- a/spec/support/helpers/workhorse_helpers.rb
+++ b/spec/support/helpers/workhorse_helpers.rb
@@ -32,12 +32,12 @@ module WorkhorseHelpers
# workhorse_finalize will transform file_key inside params as if it was the finalize call of an inline object storage upload.
# note that based on the content of the params it can simulate a disc acceleration or an object storage upload
- def workhorse_finalize(url, method: :post, file_key:, params:, headers: {})
+ def workhorse_finalize(url, method: :post, file_key:, params:, headers: {}, send_rewritten_field: false)
workhorse_request_with_file(method, url,
file_key: file_key,
params: params,
extra_headers: headers,
- send_rewritten_field: false
+ send_rewritten_field: send_rewritten_field
)
end
diff --git a/spec/support/import_export/common_util.rb b/spec/support/import_export/common_util.rb
index 912a8e0a2ab..9281937e4ba 100644
--- a/spec/support/import_export/common_util.rb
+++ b/spec/support/import_export/common_util.rb
@@ -34,13 +34,13 @@ module ImportExport
end
def get_project_restorer(project, import_path)
- Gitlab::ImportExport::ProjectTreeRestorer.new(
+ Gitlab::ImportExport::Project::TreeRestorer.new(
user: project.creator, shared: get_shared_env(path: import_path), project: project
)
end
def get_project_saver(project, export_path)
- Gitlab::ImportExport::ProjectTreeSaver.new(
+ Gitlab::ImportExport::Project::TreeSaver.new(
project: project, current_user: project.creator, shared: get_shared_env(path: export_path)
)
end
diff --git a/spec/support/import_export/configuration_helper.rb b/spec/support/import_export/configuration_helper.rb
index 27819b5201a..4fe619225bb 100644
--- a/spec/support/import_export/configuration_helper.rb
+++ b/spec/support/import_export/configuration_helper.rb
@@ -36,8 +36,8 @@ module ConfigurationHelper
end
def relation_class_for_name(relation_name)
- relation_name = Gitlab::ImportExport::ProjectRelationFactory.overrides[relation_name.to_sym] || relation_name
- Gitlab::ImportExport::ProjectRelationFactory.relation_class(relation_name)
+ relation_name = Gitlab::ImportExport::Project::RelationFactory.overrides[relation_name.to_sym] || relation_name
+ Gitlab::ImportExport::Project::RelationFactory.relation_class(relation_name)
end
def parsed_attributes(relation_name, attributes, config: Gitlab::ImportExport.config_file)
diff --git a/spec/support/matchers/exceed_query_limit.rb b/spec/support/matchers/exceed_query_limit.rb
index 40cf85eb8e5..f38ae44a577 100644
--- a/spec/support/matchers/exceed_query_limit.rb
+++ b/spec/support/matchers/exceed_query_limit.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
module ExceedQueryLimitHelpers
+ MARGINALIA_ANNOTATION_REGEX = %r{\s*\/\*.*\*\/}.freeze
+
def with_threshold(threshold)
@threshold = threshold
self
@@ -41,8 +43,8 @@ module ExceedQueryLimitHelpers
def log_message
if expected.is_a?(ActiveRecord::QueryRecorder)
- counts = count_queries(expected.log)
- extra_queries = @recorder.log.reject { |query| counts[query] -= 1 unless counts[query].zero? }
+ counts = count_queries(strip_marginalia_annotations(expected.log))
+ extra_queries = strip_marginalia_annotations(@recorder.log).reject { |query| counts[query] -= 1 unless counts[query].zero? }
extra_queries_display = count_queries(extra_queries).map { |query, count| "[#{count}] #{query}" }
(['Extra queries:'] + extra_queries_display).join("\n\n")
@@ -65,6 +67,10 @@ module ExceedQueryLimitHelpers
counts = "#{expected_count}#{threshold_message}"
"Expected a maximum of #{counts} queries, got #{actual_count}:\n\n#{log_message}"
end
+
+ def strip_marginalia_annotations(logs)
+ logs.map { |log| log.sub(MARGINALIA_ANNOTATION_REGEX, '') }
+ end
end
RSpec::Matchers.define :exceed_all_query_limit do |expected|
diff --git a/spec/support/services/deploy_token_shared_examples.rb b/spec/support/services/deploy_token_shared_examples.rb
new file mode 100644
index 00000000000..b49f4743f7d
--- /dev/null
+++ b/spec/support/services/deploy_token_shared_examples.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a deploy token creation service' do
+ let(:user) { create(:user) }
+ let(:deploy_token_params) { attributes_for(:deploy_token) }
+
+ describe '#execute' do
+ subject { described_class.new(entity, user, deploy_token_params).execute }
+
+ context 'when the deploy token is valid' do
+ it 'creates a new DeployToken' do
+ expect { subject }.to change { DeployToken.count }.by(1)
+ end
+
+ it 'creates a new ProjectDeployToken' do
+ expect { subject }.to change { deploy_token_class.count }.by(1)
+ end
+
+ it 'returns a DeployToken' do
+ expect(subject).to be_an_instance_of DeployToken
+ end
+ end
+
+ context 'when expires at date is not passed' do
+ let(:deploy_token_params) { attributes_for(:deploy_token, expires_at: '') }
+
+ it 'sets Forever.date' do
+ expect(subject.read_attribute(:expires_at)).to eq(Forever.date)
+ end
+ end
+
+ context 'when username is empty string' do
+ let(:deploy_token_params) { attributes_for(:deploy_token, username: '') }
+
+ it 'converts it to nil' do
+ expect(subject.read_attribute(:username)).to be_nil
+ end
+ end
+
+ context 'when username is provided' do
+ let(:deploy_token_params) { attributes_for(:deploy_token, username: 'deployer') }
+
+ it 'keeps the provided username' do
+ expect(subject.read_attribute(:username)).to eq('deployer')
+ end
+ end
+
+ context 'when the deploy token is invalid' do
+ let(:deploy_token_params) { attributes_for(:deploy_token, read_repository: false, read_registry: false) }
+
+ it 'does not create a new DeployToken' do
+ expect { subject }.not_to change { DeployToken.count }
+ end
+
+ it 'does not create a new ProjectDeployToken' do
+ expect { subject }.not_to change { deploy_token_class.count }
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb b/spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
index 4426d3af908..8635c9a8ff9 100644
--- a/spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
+++ b/spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
@@ -30,7 +30,7 @@ RSpec.shared_context 'Ldap::OmniauthCallbacksController' do
@original_env_config_omniauth_auth = mock_auth_hash(provider.to_s, uid, user.email)
stub_omniauth_provider(provider, context: request)
- allow(Gitlab::Auth::LDAP::Access).to receive(:allowed?).and_return(valid_login?)
+ allow(Gitlab::Auth::Ldap::Access).to receive(:allowed?).and_return(valid_login?)
end
after do
diff --git a/spec/support/shared_contexts/requests/api/graphql/group_and_project_boards_query_shared_context.rb b/spec/support/shared_contexts/requests/api/graphql/group_and_project_boards_query_shared_context.rb
new file mode 100644
index 00000000000..ca77c68c130
--- /dev/null
+++ b/spec/support/shared_contexts/requests/api/graphql/group_and_project_boards_query_shared_context.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'group and project boards query context' do
+ let_it_be(:user) { create :user }
+ let(:current_user) { user }
+ let(:params) { '' }
+ let(:board_parent_type) { board_parent.class.to_s.downcase }
+ let(:boards_data) { graphql_data[board_parent_type]['boards']['edges'] }
+ let(:board_data) { graphql_data[board_parent_type]['board'] }
+ let(:start_cursor) { graphql_data[board_parent_type]['boards']['pageInfo']['startCursor'] }
+ let(:end_cursor) { graphql_data[board_parent_type]['boards']['pageInfo']['endCursor'] }
+
+ def query(board_params = params)
+ graphql_query_for(
+ board_parent_type,
+ { 'fullPath' => board_parent.full_path },
+ <<~BOARDS
+ boards(#{board_params}) {
+ pageInfo {
+ startCursor
+ endCursor
+ }
+ edges {
+ node {
+ #{all_graphql_fields_for('boards'.classify)}
+ }
+ }
+ }
+ BOARDS
+ )
+ end
+
+ def query_single_board(board_params = params)
+ graphql_query_for(
+ board_parent_type,
+ { 'fullPath' => board_parent.full_path },
+ <<~BOARD
+ board(#{board_params}) {
+ #{all_graphql_fields_for('board'.classify)}
+ }
+ BOARD
+ )
+ end
+
+ def grab_names(data = boards_data)
+ data.map do |board|
+ board.dig('node', 'name')
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/upload_type_check_shared_context.rb b/spec/support/shared_contexts/upload_type_check_shared_context.rb
index d29c498fd15..f168cad961c 100644
--- a/spec/support/shared_contexts/upload_type_check_shared_context.rb
+++ b/spec/support/shared_contexts/upload_type_check_shared_context.rb
@@ -2,32 +2,18 @@
# Construct an `uploader` variable that is configured to `check_upload_type`
# with `mime_types` and `extensions`.
-RSpec.shared_context 'uploader with type check' do
- let(:uploader_class) do
- Class.new(GitlabUploader) do
- include UploadTypeCheck::Concern
- storage :file
- end
- end
-
- let(:mime_types) { nil }
- let(:extensions) { nil }
- let(:uploader) do
- uploader_class.class_exec(mime_types, extensions) do |mime_types, extensions|
- check_upload_type mime_types: mime_types, extensions: extensions
- end
- uploader_class.new(build_stubbed(:user))
+# @param uploader [CarrierWave::Uploader::Base] uploader with extension_whitelist method.
+RSpec.shared_context 'ignore extension whitelist check' do
+ before do
+ allow(uploader).to receive(:extension_whitelist).and_return(nil)
end
end
-RSpec.shared_context 'stubbed MimeMagic mime type detection' do
- let(:mime_type) { '' }
- let(:magic_mime) { mime_type }
- let(:ext_mime) { mime_type }
+# This works with a content_type_whitelist and content_type_blacklist type check.
+# @param mime_type [String] mime type to forcibly detect.
+RSpec.shared_context 'force content type detection to mime_type' do
before do
- magic_mime_obj = MimeMagic.new(magic_mime)
- ext_mime_obj = MimeMagic.new(ext_mime)
+ magic_mime_obj = MimeMagic.new(mime_type)
allow(MimeMagic).to receive(:by_magic).with(anything).and_return(magic_mime_obj)
- allow(MimeMagic).to receive(:by_path).with(anything).and_return(ext_mime_obj)
end
end
diff --git a/spec/support/shared_examples/banzai/filters/inline_embeds_shared_examples.rb b/spec/support/shared_examples/banzai/filters/inline_embeds_shared_examples.rb
new file mode 100644
index 00000000000..599161abbfe
--- /dev/null
+++ b/spec/support/shared_examples/banzai/filters/inline_embeds_shared_examples.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+# Expects 2 attributes to be defined:
+# trigger_url - Url expected to trigger the insertion of a placeholder.
+# dashboard_url - Url expected to be present in the placeholder.
+RSpec.shared_examples 'a metrics embed filter' do
+ let(:input) { %(<a href="#{url}">example</a>) }
+ let(:doc) { filter(input) }
+
+ context 'when the document has an external link' do
+ let(:url) { 'https://foo.com' }
+
+ it 'leaves regular non-metrics links unchanged' do
+ expect(doc.to_s).to eq(input)
+ end
+ end
+
+ context 'when the document contains an embeddable link' do
+ let(:url) { trigger_url }
+
+ it 'leaves the original link unchanged' do
+ expect(unescape(doc.at_css('a').to_s)).to eq(input)
+ end
+
+ it 'appends a metrics charts placeholder' do
+ node = doc.at_css('.js-render-metrics')
+ expect(node).to be_present
+
+ expect(node.attribute('data-dashboard-url').to_s).to eq(dashboard_url)
+ end
+
+ context 'in a paragraph' do
+ let(:paragraph) { %(This is an <a href="#{url}">example</a> of metrics.) }
+ let(:input) { %(<p>#{paragraph}</p>) }
+
+ it 'appends a metrics charts placeholder after the enclosing paragraph' do
+ expect(unescape(doc.at_css('p').to_s)).to include(paragraph)
+ expect(doc.at_css('.js-render-metrics')).to be_present
+ end
+ end
+ end
+
+ # Nokogiri escapes the URLs, but we don't care about that
+ # distinction for the purposes of these filters
+ def unescape(html)
+ CGI.unescapeHTML(html)
+ end
+end
diff --git a/spec/support/shared_examples/banzai/filters/inline_metrics_redactor_shared_examples.rb b/spec/support/shared_examples/banzai/filters/inline_metrics_redactor_shared_examples.rb
new file mode 100644
index 00000000000..07abb86ceb5
--- /dev/null
+++ b/spec/support/shared_examples/banzai/filters/inline_metrics_redactor_shared_examples.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'redacts the embed placeholder' do
+ context 'no user is logged in' do
+ it 'redacts the placeholder' do
+ expect(doc.to_s).to be_empty
+ end
+ end
+
+ context 'the user does not have permission do see charts' do
+ let(:doc) { filter(input, current_user: build(:user)) }
+
+ it 'redacts the placeholder' do
+ expect(doc.to_s).to be_empty
+ end
+ end
+end
+
+RSpec.shared_examples 'retains the embed placeholder when applicable' do
+ context 'the user has requisite permissions' do
+ let(:user) { create(:user) }
+ let(:doc) { filter(input, current_user: user) }
+
+ it 'leaves the placeholder' do
+ project.add_maintainer(user)
+
+ expect(CGI.unescapeHTML(doc.to_s)).to eq(input)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb b/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb
new file mode 100644
index 00000000000..791eb0b68e0
--- /dev/null
+++ b/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a created deploy token' do
+ let(:deploy_token_params) do
+ {
+ name: 'deployer_token',
+ expires_at: 1.month.from_now.to_date.to_s,
+ username: 'deployer',
+ read_repository: '1',
+ deploy_token_type: deploy_token_type
+ }
+ end
+
+ subject(:create_deploy_token) { post :create_deploy_token, params: create_entity_params.merge({ deploy_token: deploy_token_params }) }
+
+ it 'creates deploy token' do
+ expect { create_deploy_token }.to change { DeployToken.active.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:show)
+ end
+end
diff --git a/spec/support/shared_examples/csp.rb b/spec/support/shared_examples/csp.rb
new file mode 100644
index 00000000000..c4a8c7df898
--- /dev/null
+++ b/spec/support/shared_examples/csp.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'setting CSP' do |rule_name|
+ let_it_be(:default_csp_values) { "'self' https://some-cdn.test" }
+
+ shared_context 'csp config' do |csp_rule|
+ before do
+ csp = ActionDispatch::ContentSecurityPolicy.new do |p|
+ p.send(csp_rule, default_csp_values) if csp_rule
+ end
+
+ expect_next_instance_of(extended_controller_class) do |controller|
+ expect(controller).to receive(:current_content_security_policy).at_least(:once).and_return(csp)
+ end
+ end
+ end
+
+ context 'when no CSP config' do
+ include_context 'csp config', nil
+
+ it 'does not add CSP directives' do
+ is_expected.to be_blank
+ end
+ end
+
+ describe "when a CSP config exists for #{rule_name}" do
+ include_context 'csp config', rule_name.parameterize.underscore.to_sym
+
+ context 'when feature is enabled' do
+ it "appends to #{rule_name}" do
+ is_expected.to eql("#{rule_name} #{default_csp_values} #{whitelisted_url}")
+ end
+ end
+
+ context 'when feature is disabled' do
+ include_context 'disable feature'
+
+ it "keeps original #{rule_name}" do
+ is_expected.to eql("#{rule_name} #{default_csp_values}")
+ end
+ end
+ end
+
+ describe "when a CSP config exists for default-src but not #{rule_name}" do
+ include_context 'csp config', :default_src
+
+ context 'when feature is enabled' do
+ it "uses default-src values in #{rule_name}" do
+ is_expected.to eql("default-src #{default_csp_values}; #{rule_name} #{default_csp_values} #{whitelisted_url}")
+ end
+ end
+
+ context 'when feature is disabled' do
+ include_context 'disable feature'
+
+ it "does not add #{rule_name}" do
+ is_expected.to eql("default-src #{default_csp_values}")
+ end
+ end
+ end
+
+ describe "when a CSP config exists for font-src but not #{rule_name}" do
+ include_context 'csp config', :font_src
+
+ context 'when feature is enabled' do
+ it "uses default-src values in #{rule_name}" do
+ is_expected.to eql("font-src #{default_csp_values}; #{rule_name} #{whitelisted_url}")
+ end
+ end
+
+ context 'when feature is disabled' do
+ include_context 'disable feature'
+
+ it "does not add #{rule_name}" do
+ is_expected.to eql("font-src #{default_csp_values}")
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/deploy_token_shared_examples.rb b/spec/support/shared_examples/features/deploy_token_shared_examples.rb
new file mode 100644
index 00000000000..f358615ee9e
--- /dev/null
+++ b/spec/support/shared_examples/features/deploy_token_shared_examples.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a deploy token in ci/cd settings' do
+ it 'view deploy tokens' do
+ within('.deploy-tokens') do
+ expect(page).to have_content(deploy_token.name)
+ expect(page).to have_content('read_repository')
+ expect(page).to have_content('read_registry')
+ end
+ end
+
+ it 'add a new deploy token' do
+ fill_in 'deploy_token_name', with: 'new_deploy_key'
+ fill_in 'deploy_token_expires_at', with: (Date.today + 1.month).to_s
+ fill_in 'deploy_token_username', with: 'deployer'
+ check 'deploy_token_read_repository'
+ check 'deploy_token_read_registry'
+ click_button 'Create deploy token'
+
+ expect(page).to have_content("Your new #{entity_type} deploy token has been created")
+
+ within('.created-deploy-token-container') do
+ expect(page).to have_selector("input[name='deploy-token-user'][value='deployer']")
+ expect(page).to have_selector("input[name='deploy-token'][readonly='readonly']")
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/error_tracking_shared_example.rb b/spec/support/shared_examples/features/error_tracking_shared_example.rb
index edc1f42f646..922d2627bce 100644
--- a/spec/support/shared_examples/features/error_tracking_shared_example.rb
+++ b/spec/support/shared_examples/features/error_tracking_shared_example.rb
@@ -50,17 +50,21 @@ end
shared_examples 'error tracking show page' do
it 'renders the error details' do
+ content = page.find(".content")
+ nav = page.find("nav.breadcrumbs")
+ header = page.find(".error-details-header")
+
release_short_version = issue_response['firstRelease']['shortVersion']
- expect(page).to have_content('1 month ago by raven.scripts.runner in main')
- expect(page).to have_content(issue_response['metadata']['title'])
- expect(page).to have_content('level: error')
- expect(page).to have_content('Error Details')
- expect(page).to have_content('GitLab Issue: https://gitlab.com/gitlab-org/gitlab/issues/1')
- expect(page).to have_content("Sentry event: https://sentrytest.gitlab.com/sentry-org/sentry-project/issues/#{issue_id}")
- expect(page).to have_content("First seen: 1 year ago (#{formatted_issue_seen}) Release: #{release_short_version}")
- expect(page).to have_content('Events: 1')
- expect(page).to have_content('Users: 0')
+ expect(header).to have_content('1 month ago by raven.scripts.runner in main')
+ expect(content).to have_content(issue_response['metadata']['title'])
+ expect(content).to have_content('level: error')
+ expect(nav).to have_content('Error Details')
+ expect(content).to have_content('GitLab Issue: https://gitlab.com/gitlab-org/gitlab/issues/1')
+ expect(content).to have_content("Sentry event: https://sentrytest.gitlab.com/sentry-org/sentry-project/issues/#{issue_id}")
+ expect(content).to have_content("First seen: 1 year ago (#{formatted_issue_seen}) Release: #{release_short_version}")
+ expect(content).to have_content('Events: 1')
+ expect(content).to have_content('Users: 0')
end
it 'renders the stack trace heading' do
diff --git a/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb b/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
index e0d9b828992..1848b4fffd9 100644
--- a/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
+++ b/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
@@ -13,7 +13,7 @@ RSpec.shared_examples 'issuable user dropdown behaviors' do
it 'only includes members of the project/group' do
visit issuables_path
- filtered_search.set("#{dropdown}=")
+ filtered_search.set("#{dropdown}:=")
expect(find("#js-dropdown-#{dropdown} .filter-dropdown")).to have_content(user_in_dropdown.name)
expect(find("#js-dropdown-#{dropdown} .filter-dropdown")).not_to have_content(user_not_in_dropdown.name)
diff --git a/spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb b/spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb
index 36d91d323b5..d30e8241da0 100644
--- a/spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb
@@ -20,7 +20,7 @@ RSpec.shared_examples 'wiki file attachments' do
end
end
- context 'uploading is in progress' do
+ context 'uploading is in progress', :capybara_ignore_server_errors do
it 'cancels uploading on clicking to "Cancel" button' do
slow_requests do
attach_with_dropzone
@@ -42,7 +42,7 @@ RSpec.shared_examples 'wiki file attachments' do
end
end
- context 'uploading is complete', :quarantine do
+ context 'uploading is complete' do
it 'shows "Attach a file" button on uploading complete' do
attach_with_dropzone
wait_for_requests
diff --git a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
index f90e1a1ebab..a3f0c84bd1f 100644
--- a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
@@ -26,16 +26,18 @@ shared_examples 'resource notes mentions migration' do |migration_class, resourc
note1.becomes(Note).save!
note2.becomes(Note).save!
note3.becomes(Note).save!
- # note4.becomes(Note).save(validate: false)
+ note4.becomes(Note).save!
+ note5.becomes(Note).save(validate: false)
end
it 'migrates mentions from note' do
join = migration_class::JOIN
conditions = migration_class::QUERY_CONDITIONS
- # there are 4 notes for each noteable_type, but one does not have mentions and
+ # there are 5 notes for each noteable_type, but two do not have mentions and
# another one's noteable_id points to an inexistent resource
- expect(notes.where(noteable_type: resource_class.to_s).count).to eq 4
+ expect(notes.where(noteable_type: resource_class.to_s).count).to eq 5
+ expect(user_mentions.count).to eq 0
expect do
subject.perform(resource_class.name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
@@ -63,18 +65,26 @@ shared_examples 'resource notes mentions migration' do |migration_class, resourc
end
shared_examples 'schedules resource mentions migration' do |resource_class, is_for_notes|
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ end
+
it 'schedules background migrations' do
Sidekiq::Testing.fake! do
Timecop.freeze do
+ resource_count = is_for_notes ? Note.count : resource_class.count
+ expect(resource_count).to eq 5
+
migrate!
migration = described_class::MIGRATION
join = described_class::JOIN
conditions = described_class::QUERY_CONDITIONS
+ delay = described_class::DELAY
- expect(migration).to be_scheduled_delayed_migration(2.minutes, resource_class.name, join, conditions, is_for_notes, resource1.id, resource1.id)
- expect(migration).to be_scheduled_delayed_migration(4.minutes, resource_class.name, join, conditions, is_for_notes, resource2.id, resource2.id)
- expect(migration).to be_scheduled_delayed_migration(6.minutes, resource_class.name, join, conditions, is_for_notes, resource3.id, resource3.id)
+ expect(migration).to be_scheduled_delayed_migration(1 * delay, resource_class.name, join, conditions, is_for_notes, resource1.id, resource1.id)
+ expect(migration).to be_scheduled_delayed_migration(2 * delay, resource_class.name, join, conditions, is_for_notes, resource2.id, resource2.id)
+ expect(migration).to be_scheduled_delayed_migration(3 * delay, resource_class.name, join, conditions, is_for_notes, resource3.id, resource3.id)
expect(BackgroundMigrationWorker.jobs.size).to eq 3
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb
index 556d81133bc..95772b1774a 100644
--- a/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb
@@ -53,7 +53,7 @@ RSpec.shared_examples 'with inheritable CI config' do
let(:deps) do
if inheritable_key
- double('deps', inheritable_key => inheritable, '[]' => unspecified)
+ double('deps', "#{inheritable_key}_entry" => inheritable, '[]' => unspecified)
else
inheritable
end
@@ -68,7 +68,7 @@ RSpec.shared_examples 'with inheritable CI config' do
it 'does inherit value' do
expect(inheritable).to receive('[]').with(entry_key).and_return(specified)
- entry.compose!(deps)
+ entry.send(:inherit!, deps)
expect(entry[entry_key]).to eq(specified)
end
@@ -86,7 +86,7 @@ RSpec.shared_examples 'with inheritable CI config' do
expect do
# we ignore exceptions as `#overwrite_entry`
# can raise exception on duplicates
- entry.compose!(deps) rescue described_class::InheritError
+ entry.send(:inherit!, deps) rescue described_class::InheritError
end.not_to change { entry[entry_key] }
end
end
@@ -94,7 +94,7 @@ RSpec.shared_examples 'with inheritable CI config' do
context 'when inheritable does not specify' do
it 'does not inherit value' do
- entry.compose!(deps)
+ entry.send(:inherit!, deps)
expect(entry[entry_key]).to be_a(
Gitlab::Config::Entry::Undefined)
diff --git a/spec/support/shared_examples/lib/gitlab/import_export/project_tree_restorer_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import_export/project_tree_restorer_shared_examples.rb
index 4ef9a9930f7..0e43f686327 100644
--- a/spec/support/shared_examples/lib/gitlab/import_export/project_tree_restorer_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/import_export/project_tree_restorer_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-# Shared examples for ProjectTreeRestorer (shared to allow the testing
+# Shared examples for Project::TreeRestorer (shared to allow the testing
# of EE-specific features)
RSpec.shared_examples 'restores project successfully' do |**results|
it 'restores the project' do
@@ -30,7 +30,16 @@ RSpec.shared_examples 'restores project successfully' do |**results|
expect(project.issues.size).to eq(results.fetch(:issues, 0))
end
- it 'does not set params that are excluded from import_export settings' do
+ it 'has ci pipelines' do
+ expect(project.ci_pipelines.size).to eq(results.fetch(:ci_pipelines, 0))
+ end
+
+ it 'has external pull requests' do
+ expect(project.external_pull_requests.size).to eq(results.fetch(:external_pull_requests, 0))
+ end
+
+ # This test is quarantined because the use of magic number 999 causes failure on CI
+ it 'does not set params that are excluded from import_export settings', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/207932#note_293724442' do
expect(project.import_type).to be_nil
expect(project.creator_id).not_to eq 999
end
diff --git a/spec/support/shared_examples/models/application_setting_shared_examples.rb b/spec/support/shared_examples/models/application_setting_shared_examples.rb
index a43d2a75082..aed85a6630a 100644
--- a/spec/support/shared_examples/models/application_setting_shared_examples.rb
+++ b/spec/support/shared_examples/models/application_setting_shared_examples.rb
@@ -68,12 +68,12 @@ RSpec.shared_examples 'application settings examples' do
setting.outbound_local_requests_whitelist_raw = 'example.com'
expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
- [], ['example.com']
+ [], [an_object_having_attributes(domain: 'example.com')]
)
setting.outbound_local_requests_whitelist_raw = 'gitlab.com'
expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
- [], ['gitlab.com']
+ [], [an_object_having_attributes(domain: 'gitlab.com')]
)
end
end
@@ -81,15 +81,42 @@ RSpec.shared_examples 'application settings examples' do
context 'outbound_local_requests_whitelist_arrays' do
it 'separates the IPs and domains' do
setting.outbound_local_requests_whitelist = [
- '192.168.1.1', '127.0.0.0/28', 'www.example.com', 'example.com',
- '::ffff:a00:2', '1:0:0:0:0:0:0:0/124', 'subdomain.example.com'
+ '192.168.1.1',
+ '127.0.0.0/28',
+ '::ffff:a00:2',
+ '1:0:0:0:0:0:0:0/124',
+ 'example.com',
+ 'subdomain.example.com',
+ 'www.example.com',
+ '::',
+ '1::',
+ '::1',
+ '1:2:3:4:5::7:8',
+ '[1:2:3:4:5::7:8]',
+ '[2001:db8:85a3:8d3:1319:8a2e:370:7348]:443',
+ 'www.example2.com:8080',
+ 'example.com:8080'
]
ip_whitelist = [
- IPAddr.new('192.168.1.1'), IPAddr.new('127.0.0.0/8'),
- IPAddr.new('::ffff:a00:2'), IPAddr.new('1:0:0:0:0:0:0:0/124')
+ an_object_having_attributes(ip: IPAddr.new('192.168.1.1')),
+ an_object_having_attributes(ip: IPAddr.new('127.0.0.0/8')),
+ an_object_having_attributes(ip: IPAddr.new('::ffff:a00:2')),
+ an_object_having_attributes(ip: IPAddr.new('1:0:0:0:0:0:0:0/124')),
+ an_object_having_attributes(ip: IPAddr.new('::')),
+ an_object_having_attributes(ip: IPAddr.new('1::')),
+ an_object_having_attributes(ip: IPAddr.new('::1')),
+ an_object_having_attributes(ip: IPAddr.new('1:2:3:4:5::7:8')),
+ an_object_having_attributes(ip: IPAddr.new('[1:2:3:4:5::7:8]')),
+ an_object_having_attributes(ip: IPAddr.new('[2001:db8:85a3:8d3:1319:8a2e:370:7348]'), port: 443)
+ ]
+ domain_whitelist = [
+ an_object_having_attributes(domain: 'example.com'),
+ an_object_having_attributes(domain: 'subdomain.example.com'),
+ an_object_having_attributes(domain: 'www.example.com'),
+ an_object_having_attributes(domain: 'www.example2.com', port: 8080),
+ an_object_having_attributes(domain: 'example.com', port: 8080)
]
- domain_whitelist = ['www.example.com', 'example.com', 'subdomain.example.com']
expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
ip_whitelist, domain_whitelist
@@ -117,7 +144,7 @@ RSpec.shared_examples 'application settings examples' do
expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
[],
- ['example.com']
+ [an_object_having_attributes(domain: 'example.com')]
)
setting.add_to_outbound_local_requests_whitelist(
@@ -126,7 +153,7 @@ RSpec.shared_examples 'application settings examples' do
expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
[],
- ['example.com', 'gitlab.com']
+ [an_object_having_attributes(domain: 'example.com'), an_object_having_attributes(domain: 'gitlab.com')]
)
end
@@ -137,7 +164,7 @@ RSpec.shared_examples 'application settings examples' do
expect(setting.outbound_local_requests_whitelist).to contain_exactly('gitlab.com')
expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
- [], ['gitlab.com']
+ [], [an_object_having_attributes(domain: 'gitlab.com')]
)
end
diff --git a/spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb
index d5c425dea51..fa6b0c3afdd 100644
--- a/spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb
@@ -28,22 +28,46 @@ RSpec.shared_examples 'cluster application helm specs' do |application_name|
describe '#files' do
subject { application.files }
- context 'when the helm application does not have a ca_cert' do
+ context 'managed_apps_local_tiller feature flag is disabled' do
before do
- application.cluster.application_helm.ca_cert = nil
+ stub_feature_flags(managed_apps_local_tiller: false)
end
- it 'does not include cert files when there is no ca_cert entry' do
- expect(subject).not_to include(:'ca.pem', :'cert.pem', :'key.pem')
+ context 'when the helm application does not have a ca_cert' do
+ before do
+ application.cluster.application_helm.ca_cert = nil
+ end
+
+ it 'does not include cert files when there is no ca_cert entry' do
+ expect(subject).not_to include(:'ca.pem', :'cert.pem', :'key.pem')
+ end
+ end
+
+ it 'includes cert files when there is a ca_cert entry' do
+ expect(subject).to include(:'ca.pem', :'cert.pem', :'key.pem')
+ expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
+
+ cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
+ expect(cert.not_after).to be < 60.minutes.from_now
end
end
- it 'includes cert files when there is a ca_cert entry' do
- expect(subject).to include(:'ca.pem', :'cert.pem', :'key.pem')
- expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
+ context 'managed_apps_local_tiller feature flag is enabled' do
+ before do
+ stub_feature_flags(managed_apps_local_tiller: true)
+ end
+
+ it 'does not include cert files' do
+ expect(subject).not_to include(:'ca.pem', :'cert.pem', :'key.pem')
+ end
+
+ context 'when cluster does not have helm installed' do
+ let(:application) { create(application_name, :no_helm_installed) }
- cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
- expect(cert.not_after).to be < 60.minutes.from_now
+ it 'does not include cert files' do
+ expect(subject).not_to include(:'ca.pem', :'cert.pem', :'key.pem')
+ end
+ end
end
end
end
diff --git a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
index e4e49b94e42..37f1b33d455 100644
--- a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
@@ -48,14 +48,44 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
expect(subject).to be_installed
end
- it 'updates helm version' do
- subject.cluster.application_helm.update!(version: '1.2.3')
+ context 'managed_apps_local_tiller feature flag disabled' do
+ before do
+ stub_feature_flags(managed_apps_local_tiller: false)
+ end
- subject.make_installed!
+ it 'updates helm version' do
+ subject.cluster.application_helm.update!(version: '1.2.3')
+
+ subject.make_installed!
+
+ subject.cluster.application_helm.reload
+
+ expect(subject.cluster.application_helm.version).to eq(Gitlab::Kubernetes::Helm::HELM_VERSION)
+ end
+ end
+
+ context 'managed_apps_local_tiller feature flag enabled' do
+ before do
+ stub_feature_flags(managed_apps_local_tiller: true)
+ end
+
+ it 'does not update the helm version' do
+ subject.cluster.application_helm.update!(version: '1.2.3')
+
+ expect do
+ subject.make_installed!
- subject.cluster.application_helm.reload
+ subject.cluster.application_helm.reload
+ end.not_to change { subject.cluster.application_helm.version }
+ end
+
+ context 'the cluster has no helm installed' do
+ subject { create(application_name, :installing, :no_helm_installed) }
- expect(subject.cluster.application_helm.version).to eq(Gitlab::Kubernetes::Helm::HELM_VERSION)
+ it 'runs without errors' do
+ expect { subject.make_installed! }.not_to raise_error
+ end
+ end
end
it 'sets the correct version of the application' do
@@ -77,14 +107,44 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
expect(subject).to be_updated
end
- it 'updates helm version' do
- subject.cluster.application_helm.update!(version: '1.2.3')
+ context 'managed_apps_local_tiller feature flag disabled' do
+ before do
+ stub_feature_flags(managed_apps_local_tiller: false)
+ end
- subject.make_installed!
+ it 'updates helm version' do
+ subject.cluster.application_helm.update!(version: '1.2.3')
- subject.cluster.application_helm.reload
+ subject.make_installed!
- expect(subject.cluster.application_helm.version).to eq(Gitlab::Kubernetes::Helm::HELM_VERSION)
+ subject.cluster.application_helm.reload
+
+ expect(subject.cluster.application_helm.version).to eq(Gitlab::Kubernetes::Helm::HELM_VERSION)
+ end
+ end
+
+ context 'managed_apps_local_tiller feature flag enabled' do
+ before do
+ stub_feature_flags(managed_apps_local_tiller: true)
+ end
+
+ it 'does not update the helm version' do
+ subject.cluster.application_helm.update!(version: '1.2.3')
+
+ expect do
+ subject.make_installed!
+
+ subject.cluster.application_helm.reload
+ end.not_to change { subject.cluster.application_helm.version }
+ end
+
+ context 'the cluster has no helm installed' do
+ subject { create(application_name, :updating, :no_helm_installed) }
+
+ it 'runs without errors' do
+ expect { subject.make_installed! }.not_to raise_error
+ end
+ end
end
it 'updates the version of the application' do
@@ -204,6 +264,8 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
describe '#available?' do
using RSpec::Parameterized::TableSyntax
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp) }
+
where(:trait, :available) do
:not_installable | false
:installable | false
@@ -220,7 +282,7 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
end
with_them do
- subject { build(application_name, trait) }
+ subject { build(application_name, trait, cluster: cluster) }
if params[:available]
it { is_expected.to be_available }
diff --git a/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb
index e293467774e..cf7010c48c2 100644
--- a/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb
@@ -2,16 +2,18 @@
RSpec.shared_examples 'cluster application version specs' do |application_name|
describe 'update_available?' do
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp) }
+
let(:version) { '0.0.0' }
- subject { create(application_name, :installed, version: version).update_available? }
+ subject { build(application_name, :installed, version: version, cluster: cluster).update_available? }
context 'version is not the same as VERSION' do
it { is_expected.to be_truthy }
end
context 'version is the same as VERSION' do
- let(:application) { build(application_name) }
+ let(:application) { build(application_name, cluster: cluster) }
let(:version) { application.class.const_get(:VERSION, false) }
it { is_expected.to be_falsey }
diff --git a/spec/support/shared_examples/models/concerns/blob_replicator_strategy_shared_examples.rb b/spec/support/shared_examples/models/concerns/blob_replicator_strategy_shared_examples.rb
new file mode 100644
index 00000000000..ebe464735c5
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/blob_replicator_strategy_shared_examples.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+# Include these shared examples in specs of Replicators that include
+# BlobReplicatorStrategy.
+#
+# A let variable called model_record should be defined in the spec. It should be
+# a valid, unpersisted instance of the model class.
+#
+RSpec.shared_examples 'a blob replicator' do
+ include EE::GeoHelpers
+
+ let_it_be(:primary) { create(:geo_node, :primary) }
+ let_it_be(:secondary) { create(:geo_node) }
+
+ subject(:replicator) { model_record.replicator }
+
+ before do
+ stub_current_geo_node(primary)
+ end
+
+ describe '#handle_after_create_commit' do
+ it 'creates a Geo::Event' do
+ expect do
+ replicator.handle_after_create_commit
+ end.to change { ::Geo::Event.count }.by(1)
+
+ expect(::Geo::Event.last.attributes).to include(
+ "replicable_name" => replicator.replicable_name, "event_name" => "created", "payload" => { "model_record_id" => replicator.model_record.id })
+ end
+ end
+
+ describe '#consume_created_event' do
+ it 'invokes Geo::BlobDownloadService' do
+ service = double(:service)
+
+ expect(service).to receive(:execute)
+ expect(::Geo::BlobDownloadService).to receive(:new).with(replicator: replicator).and_return(service)
+
+ replicator.consume_created_event
+ end
+ end
+
+ describe '#carrierwave_uploader' do
+ it 'is implemented' do
+ expect do
+ replicator.carrierwave_uploader
+ end.not_to raise_error
+ end
+ end
+
+ describe '#model' do
+ let(:invoke_model) { replicator.send(:model) }
+
+ it 'is implemented' do
+ expect do
+ invoke_model
+ end.not_to raise_error
+ end
+
+ it 'is a Class' do
+ expect(invoke_model).to be_a(Class)
+ end
+
+ # For convenience (and reliability), instead of asking developers to include shared examples on each model spec as well
+ context 'replicable model' do
+ it 'defines #replicator' do
+ expect(model_record).to respond_to(:replicator)
+ end
+
+ it 'invokes replicator.handle_after_create_commit on create' do
+ expect(replicator).to receive(:handle_after_create_commit)
+
+ model_record.save!
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb b/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb
index 78d0945ea63..c6180a5a196 100644
--- a/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb
@@ -35,4 +35,44 @@ RSpec.shared_examples 'a BulkInsertSafe model' do |klass|
expect { target_class.belongs_to(:other_record) }.not_to raise_error
end
end
+
+ describe '.bulk_insert!' do
+ context 'when all items are valid' do
+ it 'inserts them all' do
+ items = valid_items_for_bulk_insertion
+
+ expect(items).not_to be_empty
+ expect { target_class.bulk_insert!(items) }.to change { target_class.count }.by(items.size)
+ end
+
+ it 'returns true' do
+ items = valid_items_for_bulk_insertion
+
+ expect(items).not_to be_empty
+ expect(target_class.bulk_insert!(items)).to be true
+ end
+ end
+
+ context 'when some items are invalid' do
+ it 'does not insert any of them and raises an error' do
+ items = invalid_items_for_bulk_insertion
+
+ # it is not always possible to create invalid items
+ if items.any?
+ expect { target_class.bulk_insert!(items) }.to raise_error(ActiveRecord::RecordInvalid)
+ expect(target_class.count).to eq(0)
+ end
+ end
+
+ it 'inserts them anyway when bypassing validations' do
+ items = invalid_items_for_bulk_insertion
+
+ # it is not always possible to create invalid items
+ if items.any?
+ expect(target_class.bulk_insert!(items, validate: false)).to be(true)
+ expect(target_class.count).to eq(items.size)
+ end
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb b/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb
index 0a483fd30ba..b275d594792 100644
--- a/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb
+++ b/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb
@@ -21,4 +21,8 @@ RSpec.shared_examples 'allows project key on reference pattern' do |url_attr|
expect(described_class.reference_pattern.match('3EXT_EXT-1234')).to eq nil
expect(described_class.reference_pattern.match('EXT_EXT-1234')[0]).to eq 'EXT_EXT-1234'
end
+
+ it 'does not allow issue number to finish with a letter' do
+ expect(described_class.reference_pattern.match('EXT-123A')).to eq(nil)
+ end
end
diff --git a/spec/support/shared_examples/models/note_access_check_shared_examples.rb b/spec/support/shared_examples/models/note_access_check_shared_examples.rb
new file mode 100644
index 00000000000..3bafad202f6
--- /dev/null
+++ b/spec/support/shared_examples/models/note_access_check_shared_examples.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+shared_examples 'users with note access' do
+ it 'returns true' do
+ users.each do |user|
+ expect(note.system_note_with_references_visible_for?(user)).to be_truthy
+ expect(note.readable_by?(user)).to be_truthy
+ end
+ end
+end
+
+shared_examples 'users without note access' do
+ it 'returns false' do
+ users.each do |user|
+ expect(note.system_note_with_references_visible_for?(user)).to be_falsy
+ expect(note.readable_by?(user)).to be_falsy
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb b/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb
index 2b68e7bfa82..24ff57c8517 100644
--- a/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb
+++ b/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb
@@ -151,22 +151,14 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
it 'uses the username as an option for slack when configured' do
allow(chat_service).to receive(:username).and_return(username)
- expect(Slack::Notifier).to receive(:new)
- .with(webhook_url, username: username, http_client: SlackService::Notifier::HTTPClient)
- .and_return(
- double(:slack_service).as_null_object
- )
+ expect(Slack::Messenger).to execute_with_options(username: username)
chat_service.execute(data)
end
it 'uses the channel as an option when it is configured' do
allow(chat_service).to receive(:channel).and_return(channel)
- expect(Slack::Notifier).to receive(:new)
- .with(webhook_url, channel: channel, http_client: SlackService::Notifier::HTTPClient)
- .and_return(
- double(:slack_service).as_null_object
- )
+ expect(Slack::Messenger).to execute_with_options(channel: [channel])
chat_service.execute(data)
end
@@ -174,11 +166,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
it "uses the right channel for push event" do
chat_service.update(push_channel: "random")
- expect(Slack::Notifier).to receive(:new)
- .with(webhook_url, channel: "random", http_client: SlackService::Notifier::HTTPClient)
- .and_return(
- double(:slack_service).as_null_object
- )
+ expect(Slack::Messenger).to execute_with_options(channel: ['random'])
chat_service.execute(data)
end
@@ -186,11 +174,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
it "uses the right channel for merge request event" do
chat_service.update(merge_request_channel: "random")
- expect(Slack::Notifier).to receive(:new)
- .with(webhook_url, channel: "random", http_client: SlackService::Notifier::HTTPClient)
- .and_return(
- double(:slack_service).as_null_object
- )
+ expect(Slack::Messenger).to execute_with_options(channel: ['random'])
chat_service.execute(@merge_sample_data)
end
@@ -198,11 +182,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
it "uses the right channel for issue event" do
chat_service.update(issue_channel: "random")
- expect(Slack::Notifier).to receive(:new)
- .with(webhook_url, channel: "random", http_client: SlackService::Notifier::HTTPClient)
- .and_return(
- double(:slack_service).as_null_object
- )
+ expect(Slack::Messenger).to execute_with_options(channel: ['random'])
chat_service.execute(@issues_sample_data)
end
@@ -213,7 +193,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
it "uses confidential issue channel" do
chat_service.update(confidential_issue_channel: 'confidential')
- expect(Slack::Notifier).to execute_with_options(channel: 'confidential')
+ expect(Slack::Messenger).to execute_with_options(channel: ['confidential'])
chat_service.execute(@issues_sample_data)
end
@@ -221,7 +201,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
it 'falls back to issue channel' do
chat_service.update(issue_channel: 'fallback_channel')
- expect(Slack::Notifier).to execute_with_options(channel: 'fallback_channel')
+ expect(Slack::Messenger).to execute_with_options(channel: ['fallback_channel'])
chat_service.execute(@issues_sample_data)
end
@@ -230,11 +210,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
it "uses the right channel for wiki event" do
chat_service.update(wiki_page_channel: "random")
- expect(Slack::Notifier).to receive(:new)
- .with(webhook_url, channel: "random", http_client: SlackService::Notifier::HTTPClient)
- .and_return(
- double(:slack_service).as_null_object
- )
+ expect(Slack::Messenger).to execute_with_options(channel: ['random'])
chat_service.execute(@wiki_page_sample_data)
end
@@ -249,11 +225,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
note_data = Gitlab::DataBuilder::Note.build(issue_note, user)
- expect(Slack::Notifier).to receive(:new)
- .with(webhook_url, channel: "random", http_client: SlackService::Notifier::HTTPClient)
- .and_return(
- double(:slack_service).as_null_object
- )
+ expect(Slack::Messenger).to execute_with_options(channel: ['random'])
chat_service.execute(note_data)
end
@@ -268,7 +240,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
note_data = Gitlab::DataBuilder::Note.build(issue_note, user)
- expect(Slack::Notifier).to execute_with_options(channel: 'confidential')
+ expect(Slack::Messenger).to execute_with_options(channel: ['confidential'])
chat_service.execute(note_data)
end
@@ -278,7 +250,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
note_data = Gitlab::DataBuilder::Note.build(issue_note, user)
- expect(Slack::Notifier).to execute_with_options(channel: 'fallback_channel')
+ expect(Slack::Messenger).to execute_with_options(channel: ['fallback_channel'])
chat_service.execute(note_data)
end
diff --git a/spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb
index 897a962fc56..32c46753006 100644
--- a/spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb
@@ -50,6 +50,8 @@ RSpec.shared_examples 'move quick action' do
let(:bug) { create(:label, project: project, title: 'bug') }
let(:wontfix) { create(:label, project: project, title: 'wontfix') }
+ let!(:target_milestone) { create(:milestone, title: '1.0', project: target_project) }
+
before do
target_project.add_maintainer(user)
end
diff --git a/spec/support/shared_examples/requests/api/discussions_shared_examples.rb b/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
index 939ea405724..3ad2263688b 100644
--- a/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
@@ -55,6 +55,58 @@ RSpec.shared_examples 'with cross-reference system notes' do
end
RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_reply_to_individual_notes: false|
+ shared_examples 'is_gitlab_employee attribute presence' do
+ subject { get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user) }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ user.update(email: email)
+ user.confirm
+ end
+
+ context 'when author is a gitlab employee' do
+ let(:email) { 'test@gitlab.com' }
+
+ it 'returns is_gitlab_employee as true' do
+ subject
+
+ expect(json_response.first["notes"].first["author"]['is_gitlab_employee']).to be true
+ end
+ end
+
+ shared_examples 'non inclusion of gitlab employee badge' do
+ it 'does not include is_gitlab_employee attribute' do
+ subject
+
+ expect(json_response.first["notes"].first["author"]).not_to have_key('is_gitlab_employee')
+ end
+ end
+
+ context 'when author is not a gitlab employee' do
+ let(:email) { 'test@example.com' }
+
+ it_behaves_like 'non inclusion of gitlab employee badge'
+ end
+
+ describe 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(gitlab_employee_badge: false)
+ end
+
+ context 'when author is a gitlab employee' do
+ let(:email) { 'test@gitlab.com' }
+
+ it_behaves_like 'non inclusion of gitlab employee badge'
+ end
+
+ context 'when author is not a gitlab employee' do
+ let(:email) { 'test@example.com' }
+
+ it_behaves_like 'non inclusion of gitlab employee badge'
+ end
+ end
+ end
+
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions" do
it "returns an array of discussions" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user)
@@ -78,6 +130,8 @@ RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name,
expect(response).to have_gitlab_http_status(:not_found)
end
+
+ it_behaves_like 'is_gitlab_employee attribute presence'
end
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions/:discussion_id" do
@@ -196,6 +250,8 @@ RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name,
end
end
end
+
+ it_behaves_like 'is_gitlab_employee attribute presence'
end
describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions/:discussion_id/notes" do
diff --git a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
new file mode 100644
index 00000000000..90ac60a6fe7
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'group and project boards query' do
+ include GraphqlHelpers
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ context 'when the user does not have access to the board parent' do
+ it 'returns nil' do
+ create(:board, resource_parent: board_parent, name: 'A')
+
+ post_graphql(query)
+
+ expect(graphql_data[board_parent_type]).to be_nil
+ end
+ end
+
+ context 'when no permission to read board' do
+ it 'does not return any boards' do
+ board_parent.add_guest(current_user)
+ board = create(:board, resource_parent: board_parent, name: 'A')
+
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :read_board, board).and_return(false)
+
+ post_graphql(query, current_user: current_user)
+
+ expect(boards_data).to be_empty
+ end
+ end
+
+ context 'when user can read the board parent' do
+ before do
+ board_parent.add_reporter(current_user)
+ end
+
+ it 'does not create a default board' do
+ post_graphql(query, current_user: current_user)
+
+ expect(boards_data).to be_empty
+ end
+
+ describe 'sorting and pagination' do
+ context 'when using default sorting' do
+ let!(:board_B) { create(:board, resource_parent: board_parent, name: 'B') }
+ let!(:board_C) { create(:board, resource_parent: board_parent, name: 'C') }
+ let!(:board_a) { create(:board, resource_parent: board_parent, name: 'a') }
+ let!(:board_A) { create(:board, resource_parent: board_parent, name: 'A') }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ context 'when ascending' do
+ let(:boards) { [board_a, board_A, board_B, board_C] }
+ let(:expected_boards) do
+ if board_parent.multiple_issue_boards_available?
+ boards
+ else
+ [boards.first]
+ end
+ end
+
+ it 'sorts boards' do
+ expect(grab_names).to eq expected_boards.map(&:name)
+ end
+
+ context 'when paginating' do
+ let(:params) { 'first: 2' }
+
+ it 'sorts boards' do
+ expect(grab_names).to eq expected_boards.first(2).map(&:name)
+
+ cursored_query = query("after: \"#{end_cursor}\"")
+ post_graphql(cursored_query, current_user: current_user)
+
+ response_data = JSON.parse(response.body)['data'][board_parent_type]['boards']['edges']
+
+ expect(grab_names(response_data)).to eq expected_boards.drop(2).first(2).map(&:name)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ context 'when querying for a single board' do
+ before do
+ board_parent.add_reporter(current_user)
+ end
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query_single_board, current_user: current_user)
+ end
+ end
+
+ it 'finds the correct board' do
+ board = create(:board, resource_parent: board_parent, name: 'A')
+
+ post_graphql(query_single_board("id: \"#{global_id_of(board)}\""), current_user: current_user)
+
+ expect(board_data['name']).to eq board.name
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
index 30ba8d9b436..53183ac89f8 100644
--- a/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
@@ -109,7 +109,7 @@ RSpec.shared_examples 'time tracking endpoints' do |issuable_name|
end
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']['time_spent'].first).to match(/exceeds the total time spent/)
+ expect(json_response['message']['base'].first).to eq(_('Time to subtract exceeds the total time spent'))
end
end
end
diff --git a/spec/support/shared_examples/requests/snippet_shared_examples.rb b/spec/support/shared_examples/requests/snippet_shared_examples.rb
new file mode 100644
index 00000000000..f2df97a35d9
--- /dev/null
+++ b/spec/support/shared_examples/requests/snippet_shared_examples.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'update with repository actions' do
+ context 'when the repository exists' do
+ it 'commits the changes to the repository' do
+ existing_blob = snippet.blobs.first
+ new_file_name = existing_blob.path + '_new'
+ new_content = 'New content'
+
+ update_snippet(params: { content: new_content, file_name: new_file_name })
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(snippet.repository.blob_at('master', existing_blob.path)).to be_nil
+
+ blob = snippet.repository.blob_at('master', new_file_name)
+ expect(blob).not_to be_nil
+ expect(blob.data).to eq(new_content)
+ end
+ end
+ end
+
+ context 'when the repository does not exist' do
+ let(:snippet) { snippet_without_repo }
+
+ it 'creates the repository' do
+ update_snippet(snippet_id: snippet.id, params: { title: 'foo' })
+
+ expect(snippet.repository).to exist
+ end
+
+ it 'commits the file to the repository' do
+ content = 'New Content'
+ file_name = 'file_name.rb'
+
+ update_snippet(snippet_id: snippet.id, params: { content: content, file_name: file_name })
+
+ blob = snippet.repository.blob_at('master', file_name)
+ expect(blob).not_to be_nil
+ expect(blob.data).to eq content
+ end
+ end
+end
diff --git a/spec/support/shared_examples/resource_events.rb b/spec/support/shared_examples/resource_events.rb
new file mode 100644
index 00000000000..963453666c9
--- /dev/null
+++ b/spec/support/shared_examples/resource_events.rb
@@ -0,0 +1,117 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+shared_examples 'a resource event' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ let_it_be(:issue1) { create(:issue, author: user1) }
+ let_it_be(:issue2) { create(:issue, author: user1) }
+ let_it_be(:issue3) { create(:issue, author: user2) }
+
+ describe 'importable' do
+ it { is_expected.to respond_to(:importing?) }
+ it { is_expected.to respond_to(:imported?) }
+ end
+
+ describe 'validations' do
+ it { is_expected.not_to allow_value(nil).for(:user) }
+
+ context 'when importing' do
+ before do
+ allow(subject).to receive(:importing?).and_return(true)
+ end
+
+ it { is_expected.to allow_value(nil).for(:user) }
+ end
+ end
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:user) }
+ end
+
+ describe '.created_after' do
+ let!(:created_at1) { 1.day.ago }
+ let!(:created_at2) { 2.days.ago }
+ let!(:created_at3) { 3.days.ago }
+
+ let!(:event1) { create(described_class.name.underscore.to_sym, issue: issue1, created_at: created_at1) }
+ let!(:event2) { create(described_class.name.underscore.to_sym, issue: issue2, created_at: created_at2) }
+ let!(:event3) { create(described_class.name.underscore.to_sym, issue: issue2, created_at: created_at3) }
+
+ it 'returns the expected events' do
+ events = described_class.created_after(created_at3)
+
+ expect(events).to contain_exactly(event1, event2)
+ end
+
+ it 'returns no events if time is after last record time' do
+ events = described_class.created_after(1.minute.ago)
+
+ expect(events).to be_empty
+ end
+ end
+end
+
+shared_examples 'a resource event for issues' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ let_it_be(:issue1) { create(:issue, author: user1) }
+ let_it_be(:issue2) { create(:issue, author: user1) }
+ let_it_be(:issue3) { create(:issue, author: user2) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:issue) }
+ end
+
+ describe '.by_issue' do
+ let_it_be(:event1) { create(described_class.name.underscore.to_sym, issue: issue1) }
+ let_it_be(:event2) { create(described_class.name.underscore.to_sym, issue: issue2) }
+ let_it_be(:event3) { create(described_class.name.underscore.to_sym, issue: issue1) }
+
+ it 'returns the expected records for an issue with events' do
+ events = described_class.by_issue(issue1)
+
+ expect(events).to contain_exactly(event1, event3)
+ end
+
+ it 'returns the expected records for an issue with no events' do
+ events = described_class.by_issue(issue3)
+
+ expect(events).to be_empty
+ end
+ end
+end
+
+shared_examples 'a resource event for merge requests' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ let_it_be(:merge_request1) { create(:merge_request, author: user1) }
+ let_it_be(:merge_request2) { create(:merge_request, author: user1) }
+ let_it_be(:merge_request3) { create(:merge_request, author: user2) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:merge_request) }
+ end
+
+ describe '.by_merge_request' do
+ let_it_be(:event1) { create(described_class.name.underscore.to_sym, merge_request: merge_request1) }
+ let_it_be(:event2) { create(described_class.name.underscore.to_sym, merge_request: merge_request2) }
+ let_it_be(:event3) { create(described_class.name.underscore.to_sym, merge_request: merge_request1) }
+
+ it 'returns the expected records for an issue with events' do
+ events = described_class.by_merge_request(merge_request1)
+
+ expect(events).to contain_exactly(event1, event3)
+ end
+
+ it 'returns the expected records for an issue with no events' do
+ events = described_class.by_merge_request(merge_request3)
+
+ expect(events).to be_empty
+ end
+ end
+end
diff --git a/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb b/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb
index db5c4b45b70..b6c4841dbd4 100644
--- a/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb
+++ b/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb
@@ -60,7 +60,7 @@ RSpec.shared_examples 'diff file entity' do
context 'when the `single_mr_diff_view` feature is disabled' do
before do
- stub_feature_flags(single_mr_diff_view: false)
+ stub_feature_flags(single_mr_diff_view: { enabled: false, thing: project })
end
it 'contains both kinds of diffs' do
diff --git a/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb b/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
index ec1c58e5b67..756c4136059 100644
--- a/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
@@ -36,20 +36,22 @@ RSpec.shared_examples 'issues list service' do
expect(issues).to eq [opened_issue2, reopened_issue1, opened_issue1]
end
- it 'returns closed issues when listing issues from Closed' do
- params = { board_id: board.id, id: closed.id }
+ it 'returns opened issues that have label list applied when listing issues from a label list' do
+ params = { board_id: board.id, id: list1.id }
issues = described_class.new(parent, user, params).execute
- expect(issues).to eq [closed_issue4, closed_issue2, closed_issue5, closed_issue3, closed_issue1]
+ expect(issues).to eq [list1_issue3, list1_issue1, list1_issue2]
end
+ end
- it 'returns opened issues that have label list applied when listing issues from a label list' do
- params = { board_id: board.id, id: list1.id }
+ context 'issues are ordered by date of closing' do
+ it 'returns closed issues when listing issues from Closed' do
+ params = { board_id: board.id, id: closed.id }
issues = described_class.new(parent, user, params).execute
- expect(issues).to eq [list1_issue3, list1_issue1, list1_issue2]
+ expect(issues).to eq [closed_issue1, closed_issue2, closed_issue3, closed_issue4, closed_issue5]
end
end
diff --git a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
index 1f229d6b783..48e4b4a18fd 100644
--- a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
+++ b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
@@ -67,6 +67,7 @@ RSpec.shared_examples 'valid dashboard cloning process' do |dashboard_template,
it 'delegates commit creation to Files::CreateService', :aggregate_failures do
service_instance = instance_double(::Files::CreateService)
+ allow(::Gitlab::Metrics::Dashboard::Processor).to receive(:new).and_return(double(process: file_content_hash))
expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
expect(service_instance).to receive(:execute).and_return(status: :success)
@@ -85,3 +86,24 @@ RSpec.shared_examples 'valid dashboard cloning process' do |dashboard_template,
end
end
end
+
+RSpec.shared_examples 'valid dashboard update process' do
+ let(:dashboard_attrs) do
+ {
+ commit_message: commit_message,
+ branch_name: branch,
+ start_branch: project.default_branch,
+ encoding: 'text',
+ file_path: ".gitlab/dashboards/#{file_name}",
+ file_content: ::PerformanceMonitoring::PrometheusDashboard.from_json(file_content_hash).to_yaml
+ }
+ end
+
+ it 'delegates commit creation to Files::UpdateService', :aggregate_failures do
+ service_instance = instance_double(::Files::UpdateService)
+ expect(::Files::UpdateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
+ expect(service_instance).to receive(:execute).and_return(status: :success)
+
+ service_call
+ end
+end
diff --git a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
new file mode 100644
index 00000000000..b22379b8b68
--- /dev/null
+++ b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'moves repository to another storage' do |repository_type|
+ let(:project_repository_double) { double(:repository) }
+ let!(:project_repository_checksum) { project.repository.checksum }
+
+ let(:repository_double) { double(:repository) }
+ let(:repository_checksum) { repository.checksum }
+
+ before do
+ # Default stub for non-specified params
+ allow(Gitlab::Git::Repository).to receive(:new).and_call_original
+
+ allow(Gitlab::Git::Repository).to receive(:new)
+ .with('test_second_storage', project.repository.raw.relative_path, project.repository.gl_repository, project.repository.full_path)
+ .and_return(project_repository_double)
+
+ allow(Gitlab::Git::Repository).to receive(:new)
+ .with('test_second_storage', repository.raw.relative_path, repository.gl_repository, repository.full_path)
+ .and_return(repository_double)
+ end
+
+ context 'when the move succeeds', :clean_gitlab_redis_shared_state do
+ before do
+ allow(project_repository_double).to receive(:create_repository)
+ .and_return(true)
+ allow(project_repository_double).to receive(:replicate)
+ .with(project.repository.raw)
+ allow(project_repository_double).to receive(:checksum)
+ .and_return(project_repository_checksum)
+
+ allow(repository_double).to receive(:create_repository)
+ .and_return(true)
+ allow(repository_double).to receive(:replicate)
+ .with(repository.raw)
+ allow(repository_double).to receive(:checksum)
+ .and_return(repository_checksum)
+ end
+
+ it "moves the project and its #{repository_type} repository to the new storage and unmarks the repository as read only" do
+ old_project_repository_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ project.repository.path_to_repo
+ end
+
+ old_repository_path = repository.full_path
+
+ result = subject.execute('test_second_storage')
+
+ expect(result[:status]).to eq(:success)
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('test_second_storage')
+ expect(gitlab_shell.repository_exists?('default', old_project_repository_path)).to be(false)
+ expect(gitlab_shell.repository_exists?('default', old_repository_path)).to be(false)
+ end
+
+ context ':repack_after_shard_migration feature flag disabled' do
+ before do
+ stub_feature_flags(repack_after_shard_migration: false)
+ end
+
+ it 'does not enqueue a GC run' do
+ expect { subject.execute('test_second_storage') }
+ .not_to change(GitGarbageCollectWorker.jobs, :count)
+ end
+ end
+
+ context ':repack_after_shard_migration feature flag enabled' do
+ before do
+ stub_feature_flags(repack_after_shard_migration: true)
+ end
+
+ it 'does not enqueue a GC run if housekeeping is disabled' do
+ stub_application_setting(housekeeping_enabled: false)
+
+ expect { subject.execute('test_second_storage') }
+ .not_to change(GitGarbageCollectWorker.jobs, :count)
+ end
+
+ it 'enqueues a GC run' do
+ expect { subject.execute('test_second_storage') }
+ .to change(GitGarbageCollectWorker.jobs, :count).by(1)
+ end
+ end
+ end
+
+ context 'when the project is already on the target storage' do
+ it 'bails out and does nothing' do
+ result = subject.execute(project.repository_storage)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to match(/repository and source have the same storage/)
+ end
+ end
+
+ context "when the move of the #{repository_type} repository fails" do
+ it 'unmarks the repository as read-only without updating the repository storage' do
+ allow(project_repository_double).to receive(:create_repository)
+ .and_return(true)
+ allow(project_repository_double).to receive(:replicate)
+ .with(project.repository.raw)
+ allow(project_repository_double).to receive(:checksum)
+ .and_return(project_repository_checksum)
+
+ allow(repository_double).to receive(:create_repository)
+ .and_return(true)
+ allow(repository_double).to receive(:replicate)
+ .with(repository.raw)
+ .and_raise(Gitlab::Git::CommandError)
+
+ expect(GitlabShellWorker).not_to receive(:perform_async)
+
+ result = subject.execute('test_second_storage')
+
+ expect(result[:status]).to eq(:error)
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('default')
+ end
+ end
+
+ context "when the checksum of the #{repository_type} repository does not match" do
+ it 'unmarks the repository as read-only without updating the repository storage' do
+ allow(project_repository_double).to receive(:create_repository)
+ .and_return(true)
+ allow(project_repository_double).to receive(:replicate)
+ .with(project.repository.raw)
+ allow(project_repository_double).to receive(:checksum)
+ .and_return(project_repository_checksum)
+
+ allow(repository_double).to receive(:create_repository)
+ .and_return(true)
+ allow(repository_double).to receive(:replicate)
+ .with(repository.raw)
+ allow(repository_double).to receive(:checksum)
+ .and_return('not matching checksum')
+
+ expect(GitlabShellWorker).not_to receive(:perform_async)
+
+ result = subject.execute('test_second_storage')
+
+ expect(result[:status]).to eq(:error)
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('default')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb b/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb
new file mode 100644
index 00000000000..77f64e5e8f8
--- /dev/null
+++ b/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+shared_examples 'a milestone events creator' do
+ let_it_be(:user) { create(:user) }
+
+ let(:created_at_time) { Time.utc(2019, 12, 30) }
+ let(:service) { described_class.new(resource, user, created_at: created_at_time) }
+
+ context 'when milestone is present' do
+ let_it_be(:milestone) { create(:milestone) }
+
+ before do
+ resource.milestone = milestone
+ end
+
+ it 'creates the expected event record' do
+ expect { service.execute }.to change { ResourceMilestoneEvent.count }.by(1)
+
+ expect_event_record(ResourceMilestoneEvent.last, action: 'add', milestone: milestone, state: 'opened')
+ end
+ end
+
+ context 'when milestones is not present' do
+ before do
+ resource.milestone = nil
+ end
+
+ it 'creates the expected event records' do
+ expect { service.execute }.to change { ResourceMilestoneEvent.count }.by(1)
+
+ expect_event_record(ResourceMilestoneEvent.last, action: 'remove', milestone: nil, state: 'opened')
+ end
+ end
+
+ def expect_event_record(event, expected_attrs)
+ expect(event.action).to eq(expected_attrs[:action])
+ expect(event.state).to eq(expected_attrs[:state])
+ expect(event.user).to eq(user)
+ expect(event.issue).to eq(resource) if resource.is_a?(Issue)
+ expect(event.issue).to be_nil unless resource.is_a?(Issue)
+ expect(event.merge_request).to eq(resource) if resource.is_a?(MergeRequest)
+ expect(event.merge_request).to be_nil unless resource.is_a?(MergeRequest)
+ expect(event.milestone).to eq(expected_attrs[:milestone])
+ expect(event.created_at).to eq(created_at_time)
+ end
+end
diff --git a/spec/support/shared_examples/tasks/gitlab/import_export/import_measurement_shared_examples.rb b/spec/support/shared_examples/tasks/gitlab/import_export/import_measurement_shared_examples.rb
deleted file mode 100644
index e232f237df9..00000000000
--- a/spec/support/shared_examples/tasks/gitlab/import_export/import_measurement_shared_examples.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'import measurement' do
- context 'when measurement is enabled' do
- let(:measurement_enabled) { true }
-
- it 'prints measurement results' do
- expect { subject }.to output(including('Measuring enabled...', 'Number of sql calls:', 'Total GC count:', 'Total GC count:')).to_stdout
- end
- end
-
- context 'when measurement is not enabled' do
- let(:measurement_enabled) { false }
-
- it 'does not output measurement results' do
- expect { subject }.not_to output(/Measuring enabled.../).to_stdout
- end
- end
-
- context 'when measurement is not provided' do
- let(:task_params) { [username, namespace_path, project_name, archive_path] }
-
- it 'does not output measurement results' do
- expect { subject }.not_to output(/Measuring enabled.../).to_stdout
- end
-
- it 'does not raise any exception' do
- expect { subject }.not_to raise_error
- end
- end
-end
diff --git a/spec/support/shared_examples/tasks/gitlab/import_export/measurable_shared_examples.rb b/spec/support/shared_examples/tasks/gitlab/import_export/measurable_shared_examples.rb
new file mode 100644
index 00000000000..45b2c5eac3a
--- /dev/null
+++ b/spec/support/shared_examples/tasks/gitlab/import_export/measurable_shared_examples.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'measurable' do
+ context 'when measurement is enabled' do
+ let(:measurement_enabled) { true }
+
+ it 'prints measurement results' do
+ expect { subject }.to output(including('Measuring enabled...', 'Number of sql calls:', 'Total GC count:', 'Total GC count:')).to_stdout
+ end
+ end
+
+ context 'when measurement is not enabled' do
+ let(:measurement_enabled) { false }
+
+ it 'does not output measurement results' do
+ expect { subject }.not_to output(/Measuring enabled.../).to_stdout
+ end
+ end
+
+ context 'when measurement is not provided' do
+ let(:measurement_enabled) { nil }
+
+ it 'does not output measurement results' do
+ expect { subject }.not_to output(/Measuring enabled.../).to_stdout
+ end
+
+ it 'does not raise any exception' do
+ expect { subject }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/support/shared_examples/tasks/gitlab/uploads/migration_shared_examples.rb b/spec/support/shared_examples/tasks/gitlab/uploads/migration_shared_examples.rb
new file mode 100644
index 00000000000..b37a8059574
--- /dev/null
+++ b/spec/support/shared_examples/tasks/gitlab/uploads/migration_shared_examples.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+# Expects the calling spec to define:
+# - uploader_class
+# - model_class
+# - mounted_as
+RSpec.shared_examples 'enqueue upload migration jobs in batch' do |batch:|
+ def run(task)
+ args = [uploader_class.to_s, model_class.to_s, mounted_as].compact
+ run_rake_task(task, *args)
+ end
+
+ it 'migrates local storage to remote object storage' do
+ expect(ObjectStorage::MigrateUploadsWorker)
+ .to receive(:perform_async).exactly(batch).times
+ .and_return("A fake job.")
+
+ run('gitlab:uploads:migrate')
+ end
+
+ it 'migrates remote object storage to local storage' do
+ expect(Upload).to receive(:where).exactly(batch + 1).times { Upload.all }
+ expect(ObjectStorage::MigrateUploadsWorker)
+ .to receive(:perform_async)
+ .with(anything, model_class.name, mounted_as, ObjectStorage::Store::LOCAL)
+ .exactly(batch).times
+ .and_return("A fake job.")
+
+ run('gitlab:uploads:migrate_to_local')
+ end
+end
diff --git a/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb b/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb
index 6e5075e135d..81ac6bd94db 100644
--- a/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb
+++ b/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb
@@ -1,65 +1,28 @@
# frozen_string_literal: true
-def check_content_matches_extension!(file = double(read: nil, path: ''))
- magic_file = UploadTypeCheck::MagicFile.new(file)
- uploader.check_content_matches_extension!(magic_file)
-end
-
-RSpec.shared_examples 'upload passes content type check' do
- it 'does not raise error' do
- expect { check_content_matches_extension! }.not_to raise_error
+# @param path [String] the path to file to upload. E.g. File.join('spec', 'fixtures', 'sanitized.svg')
+# @param uploader [CarrierWave::Uploader::Base] uploader to handle the upload.
+shared_examples 'denied carrierwave upload' do
+ it 'will deny upload' do
+ fixture_file = fixture_file_upload(path)
+ expect { uploader.cache!(fixture_file) }.to raise_exception(CarrierWave::IntegrityError)
end
end
-RSpec.shared_examples 'upload fails content type check' do
- it 'raises error' do
- expect { check_content_matches_extension! }.to raise_error(CarrierWave::IntegrityError)
- end
-end
-
-def upload_type_checked_filenames(filenames)
- Array(filenames).each do |filename|
- # Feed the uploader "some" content.
- path = File.join('spec', 'fixtures', 'dk.png')
- file = File.new(path, 'r')
- # Rename the file with what we want.
- allow(file).to receive(:path).and_return(filename)
-
- # Force the content type to match the extension type.
- mime_type = MimeMagic.by_path(filename)
- allow(MimeMagic).to receive(:by_magic).and_return(mime_type)
-
- uploaded_file = Rack::Test::UploadedFile.new(file, original_filename: filename)
- uploader.cache!(uploaded_file)
- end
-end
+# @param path [String] the path to file to upload. E.g. File.join('spec', 'fixtures', 'sanitized.svg')
+# @param uploader [CarrierWave::Uploader::Base] uploader to handle the upload.
+shared_examples 'accepted carrierwave upload' do
+ let(:fixture_file) { fixture_file_upload(path) }
-def upload_type_checked_fixtures(upload_fixtures)
- upload_fixtures = Array(upload_fixtures)
- upload_fixtures.each do |upload_fixture|
- path = File.join('spec', 'fixtures', upload_fixture)
- uploader.cache!(fixture_file_upload(path))
+ before do
+ uploader.remove!
end
-end
-
-RSpec.shared_examples 'type checked uploads' do |upload_fixtures = nil, filenames: nil|
- it 'check type' do
- upload_fixtures = Array(upload_fixtures)
- filenames = Array(filenames)
- times = upload_fixtures.length + filenames.length
- expect(uploader).to receive(:check_content_matches_extension!).exactly(times).times
-
- upload_type_checked_fixtures(upload_fixtures) unless upload_fixtures.empty?
- upload_type_checked_filenames(filenames) unless filenames.empty?
+ it 'will accept upload' do
+ expect { uploader.cache!(fixture_file) }.not_to raise_exception
end
-end
-
-RSpec.shared_examples 'skipped type checked uploads' do |upload_fixtures = nil, filenames: nil|
- it 'skip type check' do
- expect(uploader).not_to receive(:check_content_matches_extension!)
- upload_type_checked_fixtures(upload_fixtures) if upload_fixtures
- upload_type_checked_filenames(filenames) if filenames
+ it 'will cache uploaded file' do
+ expect { uploader.cache!(fixture_file) }.to change { uploader.file }.from(nil).to(kind_of(CarrierWave::SanitizedFile))
end
end
diff --git a/spec/support/shared_examples/uploaders/workers/object_storage/migrate_uploads_shared_examples.rb b/spec/support/shared_examples/uploaders/workers/object_storage/migrate_uploads_shared_examples.rb
new file mode 100644
index 00000000000..f143cbc7165
--- /dev/null
+++ b/spec/support/shared_examples/uploaders/workers/object_storage/migrate_uploads_shared_examples.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+# Expects the calling spec to define:
+# - model_class
+# - mounted_as
+# - to_store
+RSpec.shared_examples 'uploads migration worker' do
+ def perform(uploads, store = nil)
+ described_class.new.perform(uploads.ids, model_class.to_s, mounted_as, store || to_store)
+ rescue ObjectStorage::MigrateUploadsWorker::Report::MigrationFailures
+ # swallow
+ end
+
+ describe '.enqueue!' do
+ def enqueue!
+ described_class.enqueue!(uploads, model_class, mounted_as, to_store)
+ end
+
+ it 'is guarded by .sanity_check!' do
+ expect(described_class).to receive(:perform_async)
+ expect(described_class).to receive(:sanity_check!)
+
+ enqueue!
+ end
+
+ context 'sanity_check! fails' do
+ include_context 'sanity_check! fails'
+
+ it 'does not enqueue a job' do
+ expect(described_class).not_to receive(:perform_async)
+
+ expect { enqueue! }.to raise_error(described_class::SanityCheckError)
+ end
+ end
+ end
+
+ describe '.sanity_check!' do
+ shared_examples 'raises a SanityCheckError' do |expected_message|
+ let(:mount_point) { nil }
+
+ it do
+ expect { described_class.sanity_check!(uploads, model_class, mount_point) }
+ .to raise_error(described_class::SanityCheckError).with_message(expected_message)
+ end
+ end
+
+ context 'uploader types mismatch' do
+ let!(:outlier) { create(:upload, uploader: 'GitlabUploader') }
+
+ include_examples 'raises a SanityCheckError', /Multiple uploaders found/
+ end
+
+ context 'mount point not found' do
+ include_examples 'raises a SanityCheckError', /Mount point [a-z:]+ not found in/ do
+ let(:mount_point) { :potato }
+ end
+ end
+ end
+
+ describe '#perform' do
+ shared_examples 'outputs correctly' do |success: 0, failures: 0|
+ total = success + failures
+
+ if success > 0
+ it 'outputs the reports' do
+ expect(Rails.logger).to receive(:info).with(%r{Migrated #{success}/#{total} files})
+
+ perform(uploads)
+ end
+ end
+
+ if failures > 0
+ it 'outputs upload failures' do
+ expect(Rails.logger).to receive(:warn).with(/Error .* I am a teapot/)
+
+ perform(uploads)
+ end
+ end
+ end
+
+ it_behaves_like 'outputs correctly', success: 10
+
+ it 'migrates files to remote storage' do
+ perform(uploads)
+
+ expect(Upload.where(store: ObjectStorage::Store::LOCAL).count).to eq(0)
+ end
+
+ context 'reversed' do
+ let(:to_store) { ObjectStorage::Store::LOCAL }
+
+ before do
+ perform(uploads, ObjectStorage::Store::REMOTE)
+ end
+
+ it 'migrates files to local storage' do
+ expect(Upload.where(store: ObjectStorage::Store::REMOTE).count).to eq(10)
+
+ perform(uploads)
+
+ expect(Upload.where(store: ObjectStorage::Store::LOCAL).count).to eq(10)
+ end
+ end
+
+ context 'migration is unsuccessful' do
+ before do
+ allow_any_instance_of(ObjectStorage::Concern)
+ .to receive(:migrate!).and_raise(CarrierWave::UploadError, 'I am a teapot.')
+ end
+
+ it_behaves_like 'outputs correctly', failures: 10
+ end
+ end
+end
+
+RSpec.shared_context 'sanity_check! fails' do
+ before do
+ expect(described_class).to receive(:sanity_check!).and_raise(described_class::SanityCheckError)
+ end
+end
diff --git a/spec/support/shared_examples/views/pipeline_status_changes_email.rb b/spec/support/shared_examples/views/pipeline_status_changes_email.rb
new file mode 100644
index 00000000000..15b4ce9c44e
--- /dev/null
+++ b/spec/support/shared_examples/views/pipeline_status_changes_email.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+shared_examples 'pipeline status changes email' do
+ include Devise::Test::ControllerHelpers
+
+ let(:user) { create(:user, developer_projects: [project]) }
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, :simple, source_project: project) }
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ project: project,
+ user: user,
+ ref: project.default_branch,
+ sha: project.commit.sha,
+ status: status)
+ end
+
+ before do
+ assign(:project, project)
+ assign(:pipeline, pipeline)
+ assign(:merge_request, merge_request)
+ end
+
+ shared_examples_for 'renders the pipeline status changes email correctly' do
+ context 'pipeline with user' do
+ it 'renders the email correctly' do
+ render
+
+ expect(rendered).to have_content title
+ expect(rendered).to have_content pipeline.project.name
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
+ expect(rendered).to have_content pipeline.commit.author_name
+ expect(rendered).to have_content "##{pipeline.id}"
+ expect(rendered).to have_content pipeline.user.name
+
+ if status == :failed
+ expect(rendered).to have_content build.name
+ end
+ end
+
+ it_behaves_like 'correct pipeline information for pipelines for merge requests'
+ end
+
+ context 'pipeline without user' do
+ before do
+ pipeline.update_attribute(:user, nil)
+ end
+
+ it 'renders the email correctly' do
+ render
+
+ expect(rendered).to have_content title
+ expect(rendered).to have_content pipeline.project.name
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
+ expect(rendered).to have_content pipeline.commit.author_name
+ expect(rendered).to have_content "##{pipeline.id}"
+ expect(rendered).to have_content "by API"
+
+ if status == :failed
+ expect(rendered).to have_content build.name
+ end
+ end
+ end
+ end
+
+ context 'when the pipeline contains a failed job' do
+ let!(:build) { create(:ci_build, status: status, pipeline: pipeline, project: pipeline.project) }
+
+ it_behaves_like 'renders the pipeline status changes email correctly'
+ end
+
+ context 'when the latest failed job is a bridge job' do
+ let!(:build) { create(:ci_bridge, status: status, pipeline: pipeline, project: pipeline.project) }
+
+ it_behaves_like 'renders the pipeline status changes email correctly'
+ end
+end
diff --git a/spec/support/shared_examples/workers/idempotency_shared_examples.rb b/spec/support/shared_examples/workers/idempotency_shared_examples.rb
new file mode 100644
index 00000000000..9d9b371d61a
--- /dev/null
+++ b/spec/support/shared_examples/workers/idempotency_shared_examples.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+# This shared_example requires the following variables:
+# - job_args (if not given, will fallback to call perform without arguments)
+#
+# Usage:
+#
+# include_examples 'an idempotent worker' do
+# it 'checks the side-effects for multiple calls' do
+# # it'll call the job's perform method 3 times
+# # by default.
+# subject
+#
+# expect(model.state).to eq('state')
+# end
+# end
+#
+RSpec.shared_examples 'an idempotent worker' do
+ let(:worker_exec_times) { IdempotentWorkerHelper::WORKER_EXEC_TIMES }
+
+ # Avoid stubbing calls for a more accurate run.
+ subject do
+ defined?(job_args) ? perform_multiple(job_args) : perform_multiple
+ end
+
+ it 'is labeled as idempotent' do
+ expect(described_class).to be_idempotent
+ end
+
+ it 'performs multiple times sequentially without raising an exception' do
+ expect { subject }.not_to raise_error
+ end
+end
diff --git a/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb b/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb
new file mode 100644
index 00000000000..3b15d804d7c
--- /dev/null
+++ b/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ExceedQueryLimitHelpers do
+ class TestQueries < ActiveRecord::Base
+ self.table_name = 'schema_migrations'
+ end
+
+ class TestMatcher
+ include ExceedQueryLimitHelpers
+
+ def expected
+ ActiveRecord::QueryRecorder.new do
+ 2.times { TestQueries.count }
+ end
+ end
+ end
+
+ it 'does not contain marginalia annotations' do
+ test_matcher = TestMatcher.new
+ test_matcher.verify_count do
+ 2.times { TestQueries.count }
+ TestQueries.first
+ end
+
+ aggregate_failures do
+ expect(test_matcher.log_message)
+ .to match(%r{ORDER BY.*#{TestQueries.table_name}.*LIMIT 1})
+ expect(test_matcher.log_message)
+ .not_to match(%r{\/\*.*correlation_id.*\*\/})
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index e58919c8688..5e15ade492b 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -14,6 +14,14 @@ describe 'gitlab:app namespace rake task' do
tars_glob.first
end
+ def backup_files
+ %w(backup_information.yml artifacts.tar.gz builds.tar.gz lfs.tar.gz pages.tar.gz)
+ end
+
+ def backup_directories
+ %w(db repositories)
+ end
+
before(:all) do
Rake.application.rake_require 'tasks/gitlab/helpers'
Rake.application.rake_require 'tasks/gitlab/backup'
@@ -28,12 +36,16 @@ describe 'gitlab:app namespace rake task' do
before do
stub_env('force', 'yes')
FileUtils.rm(tars_glob, force: true)
+ FileUtils.rm(backup_files, force: true)
+ FileUtils.rm_rf(backup_directories, secure: true)
reenable_backup_sub_tasks
stub_container_registry_config(enabled: enable_registry)
end
after do
FileUtils.rm(tars_glob, force: true)
+ FileUtils.rm(backup_files, force: true)
+ FileUtils.rm_rf(backup_directories, secure: true)
end
def run_rake_task(task_name)
@@ -62,15 +74,6 @@ describe 'gitlab:app namespace rake task' do
let(:gitlab_version) { Gitlab::VERSION }
- it 'fails on mismatch' do
- allow(YAML).to receive(:load_file)
- .and_return({ gitlab_version: "not #{gitlab_version}" })
-
- expect do
- expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout
- end.to raise_error(SystemExit)
- end
-
context 'restore with matching gitlab version' do
before do
allow(YAML).to receive(:load_file)
@@ -241,7 +244,7 @@ describe 'gitlab:app namespace rake task' do
)
expect(exit_status).to eq(0)
- expect(tar_contents).to match('db/')
+ expect(tar_contents).to match('db')
expect(tar_contents).to match('uploads.tar.gz')
expect(tar_contents).to match('repositories/')
expect(tar_contents).to match('builds.tar.gz')
@@ -379,6 +382,50 @@ describe 'gitlab:app namespace rake task' do
end
end
+ describe 'skipping tar archive creation' do
+ before do
+ stub_env('SKIP', 'tar')
+ end
+
+ it 'created files with backup content and no tar archive' do
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
+ dir_contents = Dir.children(Gitlab.config.backup.path)
+
+ expect(dir_contents).to contain_exactly(
+ 'backup_information.yml',
+ 'db',
+ 'uploads.tar.gz',
+ 'builds.tar.gz',
+ 'artifacts.tar.gz',
+ 'lfs.tar.gz',
+ 'pages.tar.gz',
+ 'registry.tar.gz',
+ 'repositories',
+ 'tmp'
+ )
+ end
+
+ it 'those component files can be restored from' do
+ expect { run_rake_task("gitlab:backup:create") }.to output.to_stdout
+
+ allow(Rake::Task['gitlab:shell:setup'])
+ .to receive(:invoke).and_return(true)
+
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive :invoke
+ expect(Rake::Task['gitlab:backup:db:restore']).to receive :invoke
+ expect(Rake::Task['gitlab:backup:repo:restore']).to receive :invoke
+ expect(Rake::Task['gitlab:backup:uploads:restore']).to receive :invoke
+ expect(Rake::Task['gitlab:backup:builds:restore']).to receive :invoke
+ expect(Rake::Task['gitlab:backup:artifacts:restore']).to receive :invoke
+ expect(Rake::Task['gitlab:backup:pages:restore']).to receive :invoke
+ expect(Rake::Task['gitlab:backup:lfs:restore']).to receive :invoke
+ expect(Rake::Task['gitlab:backup:registry:restore']).to receive :invoke
+ expect(Rake::Task['gitlab:shell:setup']).to receive :invoke
+ expect { run_rake_task("gitlab:backup:restore") }.to output.to_stdout
+ end
+ end
+
describe "Human Readable Backup Name" do
it 'name has human readable time' do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
diff --git a/spec/tasks/gitlab/check_rake_spec.rb b/spec/tasks/gitlab/check_rake_spec.rb
index b3c8ca03aec..e3e2a22add9 100644
--- a/spec/tasks/gitlab/check_rake_spec.rb
+++ b/spec/tasks/gitlab/check_rake_spec.rb
@@ -68,8 +68,8 @@ describe 'check.rake' do
context 'when LDAP is not enabled' do
it 'does not attempt to bind or search for users' do
- expect(Gitlab::Auth::LDAP::Config).not_to receive(:providers)
- expect(Gitlab::Auth::LDAP::Adapter).not_to receive(:open)
+ expect(Gitlab::Auth::Ldap::Config).not_to receive(:providers)
+ expect(Gitlab::Auth::Ldap::Adapter).not_to receive(:open)
subject
end
@@ -80,12 +80,12 @@ describe 'check.rake' do
let(:adapter) { ldap_adapter('ldapmain', ldap) }
before do
- allow(Gitlab::Auth::LDAP::Config)
+ allow(Gitlab::Auth::Ldap::Config)
.to receive_messages(
enabled?: true,
providers: ['ldapmain']
)
- allow(Gitlab::Auth::LDAP::Adapter).to receive(:open).and_yield(adapter)
+ allow(Gitlab::Auth::Ldap::Adapter).to receive(:open).and_yield(adapter)
allow(adapter).to receive(:users).and_return([])
end
diff --git a/spec/tasks/gitlab/import_export/import_rake_spec.rb b/spec/tasks/gitlab/import_export/import_rake_spec.rb
deleted file mode 100644
index 3a819d23299..00000000000
--- a/spec/tasks/gitlab/import_export/import_rake_spec.rb
+++ /dev/null
@@ -1,91 +0,0 @@
-# frozen_string_literal: true
-
-require 'rake_helper'
-
-describe 'gitlab:import_export:import rake task' do
- let(:username) { 'root' }
- let(:namespace_path) { username }
- let!(:user) { create(:user, username: username) }
- let(:measurement_enabled) { false }
- let(:task_params) { [username, namespace_path, project_name, archive_path, measurement_enabled] }
- let(:project) { Project.find_by_full_path("#{namespace_path}/#{project_name}") }
-
- before do
- Rake.application.rake_require('tasks/gitlab/import_export/import')
- allow(Settings.uploads.object_store).to receive(:[]=).and_call_original
- allow_any_instance_of(GitlabProjectImport).to receive(:exit)
- .and_raise(RuntimeError, 'exit not handled')
- end
-
- around do |example|
- old_direct_upload_setting = Settings.uploads.object_store['direct_upload']
- old_background_upload_setting = Settings.uploads.object_store['background_upload']
-
- Settings.uploads.object_store['direct_upload'] = true
- Settings.uploads.object_store['background_upload'] = true
-
- example.run
-
- Settings.uploads.object_store['direct_upload'] = old_direct_upload_setting
- Settings.uploads.object_store['background_upload'] = old_background_upload_setting
- end
-
- subject { run_rake_task('gitlab:import_export:import', task_params) }
-
- context 'when project import is valid' do
- let(:project_name) { 'import_rake_test_project' }
- let(:archive_path) { 'spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz' }
-
- it 'performs project import successfully' do
- expect { subject }.to output(/Done!/).to_stdout
- expect { subject }.not_to raise_error
-
- expect(project.merge_requests.count).to be > 0
- expect(project.issues.count).to be > 0
- expect(project.milestones.count).to be > 0
- expect(project.import_state.status).to eq('finished')
- end
-
- it 'disables direct & background upload only during project creation' do
- expect_next_instance_of(Projects::GitlabProjectsImportService) do |service|
- expect(service).to receive(:execute).and_wrap_original do |m|
- expect(Settings.uploads.object_store['background_upload']).to eq(false)
- expect(Settings.uploads.object_store['direct_upload']).to eq(false)
-
- m.call
- end
- end
-
- expect_next_instance_of(GitlabProjectImport) do |importer|
- expect(importer).to receive(:execute_sidekiq_job).and_wrap_original do |m|
- expect(Settings.uploads.object_store['background_upload']).to eq(true)
- expect(Settings.uploads.object_store['direct_upload']).to eq(true)
- expect(Settings.uploads.object_store).not_to receive(:[]=).with('backgroud_upload', false)
- expect(Settings.uploads.object_store).not_to receive(:[]=).with('direct_upload', false)
-
- m.call
- end
- end
-
- subject
- end
-
- it_behaves_like 'import measurement'
- end
-
- context 'when project import is invalid' do
- let(:project_name) { 'import_rake_invalid_test_project' }
- let(:archive_path) { 'spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz' }
- let(:not_imported_message) { /Total number of not imported relations: 1/ }
- let(:error) { /Validation failed: Notes is invalid/ }
-
- it 'performs project import successfully' do
- expect { subject }.to output(not_imported_message).to_stdout
- expect { subject }.not_to raise_error
-
- expect(project.merge_requests).to be_empty
- expect(project.import_state.last_error).to be_nil
- expect(project.import_state.status).to eq('finished')
- end
- end
-end
diff --git a/spec/tasks/gitlab/uploads/migrate_rake_spec.rb b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb
index 2f773bdfeec..8ea0a98a1dc 100644
--- a/spec/tasks/gitlab/uploads/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb
@@ -16,32 +16,6 @@ describe 'gitlab:uploads:migrate and migrate_to_local rake tasks' do
allow(ObjectStorage::MigrateUploadsWorker).to receive(:perform_async)
end
- def run(task)
- args = [uploader_class.to_s, model_class.to_s, mounted_as].compact
- run_rake_task(task, *args)
- end
-
- shared_examples 'enqueue jobs in batch' do |batch:|
- it 'migrates local storage to remote object storage' do
- expect(ObjectStorage::MigrateUploadsWorker)
- .to receive(:perform_async).exactly(batch).times
- .and_return("A fake job.")
-
- run('gitlab:uploads:migrate')
- end
-
- it 'migrates remote object storage to local storage' do
- expect(Upload).to receive(:where).exactly(batch + 1).times { Upload.all }
- expect(ObjectStorage::MigrateUploadsWorker)
- .to receive(:perform_async)
- .with(anything, model_class.name, mounted_as, ObjectStorage::Store::LOCAL)
- .exactly(batch).times
- .and_return("A fake job.")
-
- run('gitlab:uploads:migrate_to_local')
- end
- end
-
context "for AvatarUploader" do
let(:uploader_class) { AvatarUploader }
let(:mounted_as) { :avatar }
@@ -50,7 +24,7 @@ describe 'gitlab:uploads:migrate and migrate_to_local rake tasks' do
let(:model_class) { Project }
let!(:projects) { create_list(:project, 10, :with_avatar) }
- it_behaves_like 'enqueue jobs in batch', batch: 4
+ it_behaves_like 'enqueue upload migration jobs in batch', batch: 4
end
context "for Group" do
@@ -60,7 +34,7 @@ describe 'gitlab:uploads:migrate and migrate_to_local rake tasks' do
create_list(:group, 10, :with_avatar)
end
- it_behaves_like 'enqueue jobs in batch', batch: 4
+ it_behaves_like 'enqueue upload migration jobs in batch', batch: 4
end
context "for User" do
@@ -70,7 +44,7 @@ describe 'gitlab:uploads:migrate and migrate_to_local rake tasks' do
create_list(:user, 10, :with_avatar)
end
- it_behaves_like 'enqueue jobs in batch', batch: 4
+ it_behaves_like 'enqueue upload migration jobs in batch', batch: 4
end
end
@@ -85,7 +59,7 @@ describe 'gitlab:uploads:migrate and migrate_to_local rake tasks' do
create_list(:note, 10, :with_attachment)
end
- it_behaves_like 'enqueue jobs in batch', batch: 4
+ it_behaves_like 'enqueue upload migration jobs in batch', batch: 4
end
context "for Appearance" do
@@ -97,7 +71,7 @@ describe 'gitlab:uploads:migrate and migrate_to_local rake tasks' do
end
%i(logo header_logo).each do |mount|
- it_behaves_like 'enqueue jobs in batch', batch: 1 do
+ it_behaves_like 'enqueue upload migration jobs in batch', batch: 1 do
let(:mounted_as) { mount }
end
end
@@ -115,7 +89,7 @@ describe 'gitlab:uploads:migrate and migrate_to_local rake tasks' do
end
end
- it_behaves_like 'enqueue jobs in batch', batch: 4
+ it_behaves_like 'enqueue upload migration jobs in batch', batch: 4
end
context "for PersonalFileUploader" do
@@ -129,7 +103,7 @@ describe 'gitlab:uploads:migrate and migrate_to_local rake tasks' do
end
end
- it_behaves_like 'enqueue jobs in batch', batch: 4
+ it_behaves_like 'enqueue upload migration jobs in batch', batch: 4
end
context "for NamespaceFileUploader" do
@@ -143,6 +117,6 @@ describe 'gitlab:uploads:migrate and migrate_to_local rake tasks' do
end
end
- it_behaves_like 'enqueue jobs in batch', batch: 4
+ it_behaves_like 'enqueue upload migration jobs in batch', batch: 4
end
end
diff --git a/spec/tasks/gitlab/web_hook_rake_spec.rb b/spec/tasks/gitlab/web_hook_rake_spec.rb
index be31507000d..ee3c7e034f3 100644
--- a/spec/tasks/gitlab/web_hook_rake_spec.rb
+++ b/spec/tasks/gitlab/web_hook_rake_spec.rb
@@ -3,11 +3,10 @@
require 'rake_helper'
describe 'gitlab:web_hook namespace rake tasks' do
- set(:group) { create(:group) }
-
- set(:project1) { create(:project, namespace: group) }
- set(:project2) { create(:project, namespace: group) }
- set(:other_group_project) { create(:project) }
+ let_it_be(:group, refind: true) { create(:group) }
+ let_it_be(:project1, reload: true) { create(:project, namespace: group) }
+ let_it_be(:project2, reload: true) { create(:project, namespace: group) }
+ let_it_be(:other_group_project, reload: true) { create(:project) }
let(:url) { 'http://example.com' }
let(:hook_urls) { (project1.hooks + project2.hooks).map(&:url) }
diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb
index 669f75b2ee8..142ee557afa 100644
--- a/spec/uploaders/avatar_uploader_spec.rb
+++ b/spec/uploaders/avatar_uploader_spec.rb
@@ -47,15 +47,29 @@ describe AvatarUploader do
end
end
- context 'upload type check' do
- AvatarUploader::SAFE_IMAGE_EXT.each do |ext|
- context "#{ext} extension" do
- it_behaves_like 'type checked uploads', filenames: "image.#{ext}"
- end
- end
+ context 'accept whitelist file content type' do
+ # We need to feed through a valid path, but we force the parsed mime type
+ # in a stub below so we can set any path.
+ let_it_be(:path) { File.join('spec', 'fixtures', 'video_sample.mp4') }
+
+ where(:mime_type) { described_class::MIME_WHITELIST }
+
+ with_them do
+ include_context 'force content type detection to mime_type'
- context 'skip image/svg+xml integrity check' do
- it_behaves_like 'skipped type checked uploads', filenames: 'image.svg'
+ it_behaves_like 'accepted carrierwave upload'
end
end
+
+ context 'upload non-whitelisted file content type' do
+ let_it_be(:path) { File.join('spec', 'fixtures', 'sanitized.svg') }
+
+ it_behaves_like 'denied carrierwave upload'
+ end
+
+ context 'upload misnamed non-whitelisted file content type' do
+ let_it_be(:path) { File.join('spec', 'fixtures', 'not_a_png.png') }
+
+ it_behaves_like 'denied carrierwave upload'
+ end
end
diff --git a/spec/uploaders/content_type_whitelist_spec.rb b/spec/uploaders/content_type_whitelist_spec.rb
new file mode 100644
index 00000000000..be519ead1c8
--- /dev/null
+++ b/spec/uploaders/content_type_whitelist_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ContentTypeWhitelist do
+ class DummyUploader < CarrierWave::Uploader::Base
+ include ContentTypeWhitelist::Concern
+
+ def content_type_whitelist
+ %w[image/png image/jpeg]
+ end
+ end
+
+ let_it_be(:model) { build_stubbed(:user) }
+ let_it_be(:uploader) { DummyUploader.new(model, :dummy) }
+
+ context 'upload whitelisted file content type' do
+ let(:path) { File.join('spec', 'fixtures', 'rails_sample.jpg') }
+
+ it_behaves_like 'accepted carrierwave upload'
+ end
+
+ context 'upload non-whitelisted file content type' do
+ let(:path) { File.join('spec', 'fixtures', 'sanitized.svg') }
+
+ it_behaves_like 'denied carrierwave upload'
+ end
+
+ context 'upload misnamed non-whitelisted file content type' do
+ let(:path) { File.join('spec', 'fixtures', 'not_a_png.png') }
+
+ it_behaves_like 'denied carrierwave upload'
+ end
+end
diff --git a/spec/uploaders/favicon_uploader_spec.rb b/spec/uploaders/favicon_uploader_spec.rb
index 4d6c849883a..0f5941b3f0a 100644
--- a/spec/uploaders/favicon_uploader_spec.rb
+++ b/spec/uploaders/favicon_uploader_spec.rb
@@ -6,19 +6,35 @@ describe FaviconUploader do
let_it_be(:model) { build_stubbed(:user) }
let_it_be(:uploader) { described_class.new(model, :favicon) }
- context 'upload type check' do
- FaviconUploader::EXTENSION_WHITELIST.each do |ext|
- context "#{ext} extension" do
- it_behaves_like 'type checked uploads', filenames: "image.#{ext}"
- end
+ context 'accept whitelist file content type' do
+ include_context 'ignore extension whitelist check'
+
+ # We need to feed through a valid path, but we force the parsed mime type
+ # in a stub below so we can set any path.
+ let_it_be(:path) { File.join('spec', 'fixtures', 'video_sample.mp4') }
+
+ where(:mime_type) { described_class::MIME_WHITELIST }
+
+ with_them do
+ include_context 'force content type detection to mime_type'
+
+ it_behaves_like 'accepted carrierwave upload'
end
end
- context 'upload non-whitelisted file extensions' do
- it 'will deny upload' do
- path = File.join('spec', 'fixtures', 'banana_sample.gif')
- fixture_file = fixture_file_upload(path)
- expect { uploader.cache!(fixture_file) }.to raise_exception(CarrierWave::IntegrityError)
- end
+ context 'upload non-whitelisted file content type' do
+ include_context 'ignore extension whitelist check'
+
+ let_it_be(:path) { File.join('spec', 'fixtures', 'sanitized.svg') }
+
+ it_behaves_like 'denied carrierwave upload'
+ end
+
+ context 'upload misnamed non-whitelisted file content type' do
+ include_context 'ignore extension whitelist check'
+
+ let_it_be(:path) { File.join('spec', 'fixtures', 'not_a_png.png') }
+
+ it_behaves_like 'denied carrierwave upload'
end
end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index 2f2ed28891a..06116bd4737 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -272,7 +272,7 @@ describe ObjectStorage do
end
it "to raise an error" do
- expect { subject }.to raise_error(/Object Storage is not enabled/)
+ expect { subject }.to raise_error(/Object Storage is not enabled for JobArtifactUploader/)
end
end
diff --git a/spec/uploaders/upload_type_check_spec.rb b/spec/uploaders/upload_type_check_spec.rb
deleted file mode 100644
index a4895f6a956..00000000000
--- a/spec/uploaders/upload_type_check_spec.rb
+++ /dev/null
@@ -1,124 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe UploadTypeCheck do
- include_context 'uploader with type check'
-
- def upload_fixture(filename)
- fixture_file_upload(File.join('spec', 'fixtures', filename))
- end
-
- describe '#check_content_matches_extension! callback using file upload' do
- context 'when extension matches contents' do
- it 'not raise error on upload' do
- expect { uploader.cache!(upload_fixture('banana_sample.gif')) }.not_to raise_error
- end
- end
-
- context 'when extension does not match contents' do
- it 'raise error' do
- expect { uploader.cache!(upload_fixture('not_a_png.png')) }.to raise_error(CarrierWave::IntegrityError)
- end
- end
- end
-
- describe '#check_content_matches_extension! callback using stubs' do
- include_context 'stubbed MimeMagic mime type detection'
-
- context 'when no extension and with ambiguous/text content' do
- let(:magic_mime) { '' }
- let(:ext_mime) { '' }
-
- it_behaves_like 'upload passes content type check'
- end
-
- context 'when no extension and with non-text content' do
- let(:magic_mime) { 'image/gif' }
- let(:ext_mime) { '' }
-
- it_behaves_like 'upload fails content type check'
- end
-
- # Most text files will exhibit this behaviour.
- context 'when ambiguous content with text extension' do
- let(:magic_mime) { '' }
- let(:ext_mime) { 'text/plain' }
-
- it_behaves_like 'upload passes content type check'
- end
-
- context 'when text content with text extension' do
- let(:magic_mime) { 'text/plain' }
- let(:ext_mime) { 'text/plain' }
-
- it_behaves_like 'upload passes content type check'
- end
-
- context 'when ambiguous content with non-text extension' do
- let(:magic_mime) { '' }
- let(:ext_mime) { 'application/zip' }
-
- it_behaves_like 'upload fails content type check'
- end
-
- # These are the types when uploading a .dmg
- context 'when content and extension do not match' do
- let(:magic_mime) { 'application/x-bzip' }
- let(:ext_mime) { 'application/x-apple-diskimage' }
-
- it_behaves_like 'upload fails content type check'
- end
- end
-
- describe '#check_content_matches_extension! mime_type filtering' do
- context 'without mime types' do
- let(:mime_types) { nil }
-
- it_behaves_like 'type checked uploads', %w[doc_sample.txt rails_sample.jpg]
- end
-
- context 'with mime types string' do
- let(:mime_types) { 'text/plain' }
-
- it_behaves_like 'type checked uploads', %w[doc_sample.txt]
- it_behaves_like 'skipped type checked uploads', %w[dk.png]
- end
-
- context 'with mime types regex' do
- let(:mime_types) { [/image\/(gif|png)/] }
-
- it_behaves_like 'type checked uploads', %w[banana_sample.gif dk.png]
- it_behaves_like 'skipped type checked uploads', %w[doc_sample.txt]
- end
-
- context 'with mime types array' do
- let(:mime_types) { ['text/plain', /image\/png/] }
-
- it_behaves_like 'type checked uploads', %w[doc_sample.txt dk.png]
- it_behaves_like 'skipped type checked uploads', %w[audio_sample.wav]
- end
- end
-
- describe '#check_content_matches_extension! extensions filtering' do
- context 'without extensions' do
- let(:extensions) { nil }
-
- it_behaves_like 'type checked uploads', %w[doc_sample.txt dk.png]
- end
-
- context 'with extensions string' do
- let(:extensions) { 'txt' }
-
- it_behaves_like 'type checked uploads', %w[doc_sample.txt]
- it_behaves_like 'skipped type checked uploads', %w[rails_sample.jpg]
- end
-
- context 'with extensions array of strings' do
- let(:extensions) { %w[txt png] }
-
- it_behaves_like 'type checked uploads', %w[doc_sample.txt dk.png]
- it_behaves_like 'skipped type checked uploads', %w[audio_sample.wav]
- end
- end
-end
diff --git a/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb b/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb
index 89a1fa80943..fcb8f4e51b5 100644
--- a/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb
+++ b/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb
@@ -3,12 +3,6 @@
require 'spec_helper'
describe ObjectStorage::MigrateUploadsWorker do
- shared_context 'sanity_check! fails' do
- before do
- expect(described_class).to receive(:sanity_check!).and_raise(described_class::SanityCheckError)
- end
- end
-
let(:model_class) { Project }
let(:uploads) { Upload.all }
let(:to_store) { ObjectStorage::Store::REMOTE }
@@ -19,109 +13,6 @@ describe ObjectStorage::MigrateUploadsWorker do
# swallow
end
- shared_examples "uploads migration worker" do
- describe '.enqueue!' do
- def enqueue!
- described_class.enqueue!(uploads, Project, mounted_as, to_store)
- end
-
- it 'is guarded by .sanity_check!' do
- expect(described_class).to receive(:perform_async)
- expect(described_class).to receive(:sanity_check!)
-
- enqueue!
- end
-
- context 'sanity_check! fails' do
- include_context 'sanity_check! fails'
-
- it 'does not enqueue a job' do
- expect(described_class).not_to receive(:perform_async)
-
- expect { enqueue! }.to raise_error(described_class::SanityCheckError)
- end
- end
- end
-
- describe '.sanity_check!' do
- shared_examples 'raises a SanityCheckError' do |expected_message|
- let(:mount_point) { nil }
-
- it do
- expect { described_class.sanity_check!(uploads, model_class, mount_point) }
- .to raise_error(described_class::SanityCheckError).with_message(expected_message)
- end
- end
-
- context 'uploader types mismatch' do
- let!(:outlier) { create(:upload, uploader: 'GitlabUploader') }
-
- include_examples 'raises a SanityCheckError', /Multiple uploaders found/
- end
-
- context 'mount point not found' do
- include_examples 'raises a SanityCheckError', /Mount point [a-z:]+ not found in/ do
- let(:mount_point) { :potato }
- end
- end
- end
-
- describe '#perform' do
- shared_examples 'outputs correctly' do |success: 0, failures: 0|
- total = success + failures
-
- if success > 0
- it 'outputs the reports' do
- expect(Rails.logger).to receive(:info).with(%r{Migrated #{success}/#{total} files})
-
- perform(uploads)
- end
- end
-
- if failures > 0
- it 'outputs upload failures' do
- expect(Rails.logger).to receive(:warn).with(/Error .* I am a teapot/)
-
- perform(uploads)
- end
- end
- end
-
- it_behaves_like 'outputs correctly', success: 10
-
- it 'migrates files to remote storage' do
- perform(uploads)
-
- expect(Upload.where(store: ObjectStorage::Store::LOCAL).count).to eq(0)
- end
-
- context 'reversed' do
- let(:to_store) { ObjectStorage::Store::LOCAL }
-
- before do
- perform(uploads, ObjectStorage::Store::REMOTE)
- end
-
- it 'migrates files to local storage' do
- expect(Upload.where(store: ObjectStorage::Store::REMOTE).count).to eq(10)
-
- perform(uploads)
-
- expect(Upload.where(store: ObjectStorage::Store::LOCAL).count).to eq(10)
- end
- end
-
- context 'migration is unsuccessful' do
- before do
- allow_any_instance_of(ObjectStorage::Concern)
- .to receive(:migrate!).and_raise(CarrierWave::UploadError, "I am a teapot.")
- end
-
- it_behaves_like 'outputs correctly', failures: 10
- end
- end
- end
-
context "for AvatarUploader" do
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:mounted_as) { :avatar }
diff --git a/spec/views/admin/sessions/new.html.haml_spec.rb b/spec/views/admin/sessions/new.html.haml_spec.rb
index b3208296c80..05601e5471e 100644
--- a/spec/views/admin/sessions/new.html.haml_spec.rb
+++ b/spec/views/admin/sessions/new.html.haml_spec.rb
@@ -15,7 +15,7 @@ describe 'admin/sessions/new.html.haml' do
render
expect(rendered).to have_css('#login-pane.active')
- expect(rendered).to have_selector('input[name="password"]')
+ expect(rendered).to have_selector('input[name="user[password]"]')
end
it 'warns authentication not possible if password not set' do
diff --git a/spec/views/admin/sessions/two_factor.html.haml_spec.rb b/spec/views/admin/sessions/two_factor.html.haml_spec.rb
new file mode 100644
index 00000000000..2c061c7707b
--- /dev/null
+++ b/spec/views/admin/sessions/two_factor.html.haml_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'admin/sessions/two_factor.html.haml' do
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ end
+
+ context 'user has no two factor auth' do
+ let(:user) { create(:admin) }
+
+ it 'shows tab' do
+ render
+
+ expect(rendered).to have_no_field('user[otp_attempt]')
+ expect(rendered).to have_no_field('user[device_response]')
+ end
+ end
+
+ context 'user has otp active' do
+ let(:user) { create(:admin, :two_factor) }
+
+ it 'shows enter otp form' do
+ render
+
+ expect(rendered).to have_css('#login-pane.active')
+ expect(rendered).to have_field('user[otp_attempt]')
+ end
+ end
+
+ context 'user has u2f active' do
+ let(:user) { create(:admin, :two_factor_via_u2f) }
+
+ it 'shows enter u2f form' do
+ render
+
+ expect(rendered).to have_css('#js-login-2fa-device.btn')
+ end
+ end
+end
diff --git a/spec/views/import/gitlab_projects/new.html.haml_spec.rb b/spec/views/import/gitlab_projects/new.html.haml_spec.rb
index 953fcc6dc51..17636c99cbc 100644
--- a/spec/views/import/gitlab_projects/new.html.haml_spec.rb
+++ b/spec/views/import/gitlab_projects/new.html.haml_spec.rb
@@ -5,17 +5,18 @@ require 'spec_helper'
describe 'import/gitlab_projects/new.html.haml' do
include Devise::Test::ControllerHelpers
- let(:user) { build_stubbed(:user, namespace: build_stubbed(:namespace)) }
+ let(:namespace) { build_stubbed(:namespace) }
+ let(:user) { build_stubbed(:user, namespace: namespace) }
before do
allow(view).to receive(:current_user).and_return(user)
end
context 'when the user has no other namespaces' do
- it 'shows a namespace_id hidden field tag' do
+ it 'adds a namespace_id hidden field tag with the namespace id as value' do
render
- expect(rendered).to have_css('input[name="namespace_id"]', count: 1, visible: false)
+ expect(rendered).to have_css("input[name='namespace_id'][value='#{namespace.id}']", count: 1, visible: false)
end
end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index 6ca8fa2bc5c..cd622807c09 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -166,14 +166,13 @@ describe 'layouts/nav/sidebar/_project' do
before do
allow(view).to receive(:can?).with(nil, :read_cycle_analytics, project).and_return(read_cycle_analytics)
- stub_feature_flags(analytics_pages_under_project_analytics_sidebar: { enabled: false, thing: project })
end
describe 'when value stream analytics is enabled' do
it 'shows the value stream analytics entry' do
render
- expect(rendered).to have_link('Value Stream Analytics', href: project_cycle_analytics_path(project))
+ expect(rendered).to have_link('Value Stream', href: project_cycle_analytics_path(project))
end
end
@@ -183,7 +182,7 @@ describe 'layouts/nav/sidebar/_project' do
it 'does not show the value stream analytics entry' do
render
- expect(rendered).not_to have_link('Value Stream Analytics', href: project_cycle_analytics_path(project))
+ expect(rendered).not_to have_link('Value Stream', href: project_cycle_analytics_path(project))
end
end
end
diff --git a/spec/views/notify/pipeline_failed_email.html.haml_spec.rb b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
index a540a53c91d..80dc14b523d 100644
--- a/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
+++ b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
@@ -3,72 +3,8 @@
require 'spec_helper'
describe 'notify/pipeline_failed_email.html.haml' do
- include Devise::Test::ControllerHelpers
-
- let(:user) { create(:user, developer_projects: [project]) }
- let(:project) { create(:project, :repository) }
- let(:merge_request) { create(:merge_request, :simple, source_project: project) }
-
- let(:pipeline) do
- create(:ci_pipeline,
- project: project,
- user: user,
- ref: project.default_branch,
- sha: project.commit.sha,
- status: :failed)
- end
-
- before do
- assign(:project, project)
- assign(:pipeline, pipeline)
- assign(:merge_request, merge_request)
- end
-
- shared_examples_for 'renders the pipeline failed email correctly' do
- context 'pipeline with user' do
- it 'renders the email correctly' do
- render
-
- expect(rendered).to have_content "Your pipeline has failed"
- expect(rendered).to have_content pipeline.project.name
- expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
- expect(rendered).to have_content pipeline.commit.author_name
- expect(rendered).to have_content "##{pipeline.id}"
- expect(rendered).to have_content pipeline.user.name
- expect(rendered).to have_content build.name
- end
-
- it_behaves_like 'correct pipeline information for pipelines for merge requests'
- end
-
- context 'pipeline without user' do
- before do
- pipeline.update_attribute(:user, nil)
- end
-
- it 'renders the email correctly' do
- render
-
- expect(rendered).to have_content "Your pipeline has failed"
- expect(rendered).to have_content pipeline.project.name
- expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
- expect(rendered).to have_content pipeline.commit.author_name
- expect(rendered).to have_content "##{pipeline.id}"
- expect(rendered).to have_content "by API"
- expect(rendered).to have_content build.name
- end
- end
- end
-
- context 'when the pipeline contains a failed job' do
- let!(:build) { create(:ci_build, :failed, pipeline: pipeline, project: pipeline.project) }
-
- it_behaves_like 'renders the pipeline failed email correctly'
- end
-
- context 'when the latest failed job is a bridge job' do
- let!(:build) { create(:ci_bridge, status: :failed, pipeline: pipeline, project: pipeline.project) }
-
- it_behaves_like 'renders the pipeline failed email correctly'
+ it_behaves_like 'pipeline status changes email' do
+ let(:title) { 'Your pipeline has failed' }
+ let(:status) { :failed }
end
end
diff --git a/spec/views/notify/pipeline_fixed_email.html.haml_spec.rb b/spec/views/notify/pipeline_fixed_email.html.haml_spec.rb
new file mode 100644
index 00000000000..382fc5ecdd3
--- /dev/null
+++ b/spec/views/notify/pipeline_fixed_email.html.haml_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'notify/pipeline_fixed_email.html.haml' do
+ it_behaves_like 'pipeline status changes email' do
+ let(:title) { 'Your pipeline has been fixed!' }
+ let(:status) { :success }
+ end
+end
diff --git a/spec/views/notify/pipeline_fixed_email.text.erb_spec.rb b/spec/views/notify/pipeline_fixed_email.text.erb_spec.rb
new file mode 100644
index 00000000000..ec540dc3f77
--- /dev/null
+++ b/spec/views/notify/pipeline_fixed_email.text.erb_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'notify/pipeline_fixed_email.text.erb' do
+ it_behaves_like 'pipeline status changes email' do
+ let(:title) { 'Your pipeline has been fixed!' }
+ let(:status) { :success }
+ end
+end
diff --git a/spec/views/notify/pipeline_success_email.html.haml_spec.rb b/spec/views/notify/pipeline_success_email.html.haml_spec.rb
index fbf33b7ec35..417909fd67b 100644
--- a/spec/views/notify/pipeline_success_email.html.haml_spec.rb
+++ b/spec/views/notify/pipeline_success_email.html.haml_spec.rb
@@ -3,56 +3,8 @@
require 'spec_helper'
describe 'notify/pipeline_success_email.html.haml' do
- include Devise::Test::ControllerHelpers
-
- let(:user) { create(:user, developer_projects: [project]) }
- let(:project) { create(:project, :repository) }
- let(:merge_request) { create(:merge_request, :simple, source_project: project) }
-
- let(:pipeline) do
- create(:ci_pipeline,
- project: project,
- user: user,
- ref: project.default_branch,
- sha: project.commit.sha,
- status: :success)
- end
-
- before do
- assign(:project, project)
- assign(:pipeline, pipeline)
- assign(:merge_request, merge_request)
- end
-
- context 'pipeline with user' do
- it 'renders the email correctly' do
- render
-
- expect(rendered).to have_content "Your pipeline has passed"
- expect(rendered).to have_content pipeline.project.name
- expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
- expect(rendered).to have_content pipeline.commit.author_name
- expect(rendered).to have_content "##{pipeline.id}"
- expect(rendered).to have_content pipeline.user.name
- end
-
- it_behaves_like 'correct pipeline information for pipelines for merge requests'
- end
-
- context 'pipeline without user' do
- before do
- pipeline.update_attribute(:user, nil)
- end
-
- it 'renders the email correctly' do
- render
-
- expect(rendered).to have_content "Your pipeline has passed"
- expect(rendered).to have_content pipeline.project.name
- expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
- expect(rendered).to have_content pipeline.commit.author_name
- expect(rendered).to have_content "##{pipeline.id}"
- expect(rendered).to have_content "by API"
- end
+ it_behaves_like 'pipeline status changes email' do
+ let(:title) { 'Your pipeline has passed' }
+ let(:status) { :success }
end
end
diff --git a/spec/views/notify/pipeline_success_email.text.erb_spec.rb b/spec/views/notify/pipeline_success_email.text.erb_spec.rb
index ba4633bc346..4a914cab85e 100644
--- a/spec/views/notify/pipeline_success_email.text.erb_spec.rb
+++ b/spec/views/notify/pipeline_success_email.text.erb_spec.rb
@@ -3,24 +3,8 @@
require 'spec_helper'
describe 'notify/pipeline_success_email.text.erb' do
- let(:user) { create(:user, developer_projects: [project]) }
- let(:project) { create(:project, :repository) }
- let(:merge_request) { create(:merge_request, :simple, source_project: project) }
-
- let(:pipeline) do
- create(:ci_pipeline,
- :success,
- project: project,
- user: user,
- ref: project.default_branch,
- sha: project.commit.sha)
- end
-
- before do
- assign(:project, project)
- assign(:pipeline, pipeline)
- assign(:merge_request, merge_request)
+ it_behaves_like 'pipeline status changes email' do
+ let(:title) { 'Your pipeline has passed' }
+ let(:status) { :success }
end
-
- it_behaves_like 'correct pipeline information for pipelines for merge requests'
end
diff --git a/spec/views/profiles/preferences/show.html.haml_spec.rb b/spec/views/profiles/preferences/show.html.haml_spec.rb
index e3eb822b045..16e4bd9c6d1 100644
--- a/spec/views/profiles/preferences/show.html.haml_spec.rb
+++ b/spec/views/profiles/preferences/show.html.haml_spec.rb
@@ -56,7 +56,7 @@ describe 'profiles/preferences/show' do
expect(rendered).not_to have_sourcegraph_field
end
- it 'does not display integrations settings' do
+ it 'does not display Integration Settings' do
expect(rendered).not_to have_integrations_section
end
end
diff --git a/spec/views/projects/settings/operations/show.html.haml_spec.rb b/spec/views/projects/settings/operations/show.html.haml_spec.rb
index ece9c16650f..7d6faae0f5a 100644
--- a/spec/views/projects/settings/operations/show.html.haml_spec.rb
+++ b/spec/views/projects/settings/operations/show.html.haml_spec.rb
@@ -26,7 +26,7 @@ describe 'projects/settings/operations/show' do
context 'Settings page ' do
it 'renders the Operations Settings page' do
- render
+ render template: "projects/settings/operations/show", locals: { prometheus_service: project.find_or_initialize_service('prometheus') }
expect(rendered).to have_content _('Error Tracking')
expect(rendered).to have_content _('To link Sentry to GitLab, enter your Sentry URL and Auth Token')
diff --git a/spec/views/shared/milestones/_issuable.html.haml_spec.rb b/spec/views/shared/milestones/_issuable.html.haml_spec.rb
index 3c2b7c6305a..6e81fec79d4 100644
--- a/spec/views/shared/milestones/_issuable.html.haml_spec.rb
+++ b/spec/views/shared/milestones/_issuable.html.haml_spec.rb
@@ -3,19 +3,38 @@
require 'spec_helper'
describe 'shared/milestones/_issuable.html.haml' do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
- let(:milestone) { create(:milestone, project: project) }
- let(:issuable) { create(:issue, project: project, assignees: [user]) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
before do
assign(:project, project)
assign(:milestone, milestone)
end
- it 'avatar links to issues page' do
- render 'shared/milestones/issuable', issuable: issuable, show_project_name: true
+ subject(:rendered) { render 'shared/milestones/issuable', issuable: issuable, show_project_name: true }
- expect(rendered).to have_css("a[href='#{project_issues_path(project, milestone_title: milestone.title, assignee_id: user.id, state: 'all')}']")
+ context 'issue' do
+ let(:issuable) { create(:issue, project: project, assignees: [user]) }
+
+ it 'links to the page for the issue' do
+ expect(rendered).to have_css("a[href='#{project_issue_path(project, issuable)}']", class: 'issue-link')
+ end
+
+ it 'links to issues page for user' do
+ expect(rendered).to have_css("a[href='#{project_issues_path(project, milestone_title: milestone.title, assignee_id: user.id, state: 'all')}']")
+ end
+ end
+
+ context 'merge request' do
+ let(:issuable) { create(:merge_request, source_project: project, target_project: project, assignees: [user]) }
+
+ it 'links to merge requests page for user' do
+ expect(rendered).to have_css("a[href='#{project_merge_requests_path(project, milestone_title: milestone.title, assignee_id: user.id, state: 'all')}']")
+ end
+
+ it 'links to the page for the merge request' do
+ expect(rendered).to have_css("a[href='#{project_merge_request_path(project, issuable)}']", class: 'issue-link')
+ end
end
end
diff --git a/spec/views/shared/milestones/_top.html.haml_spec.rb b/spec/views/shared/milestones/_top.html.haml_spec.rb
index 944dfc8a27b..1e209ad6f3f 100644
--- a/spec/views/shared/milestones/_top.html.haml_spec.rb
+++ b/spec/views/shared/milestones/_top.html.haml_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'shared/milestones/_top.html.haml' do
- set(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
let(:project) { create(:project, group: group) }
let(:milestone) { create(:milestone, project: project) }
diff --git a/spec/workers/authorized_keys_worker_spec.rb b/spec/workers/authorized_keys_worker_spec.rb
new file mode 100644
index 00000000000..2aaa6fb2ddf
--- /dev/null
+++ b/spec/workers/authorized_keys_worker_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe AuthorizedKeysWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ context 'authorized_keys is enabled' do
+ before do
+ stub_application_setting(authorized_keys_enabled: true)
+ end
+
+ describe '#add_key' do
+ it 'delegates to Gitlab::AuthorizedKeys' do
+ expect_next_instance_of(Gitlab::AuthorizedKeys) do |instance|
+ expect(instance).to receive(:add_key).with('foo', 'bar')
+ end
+
+ worker.perform(:add_key, 'foo', 'bar')
+ end
+ end
+
+ describe '#remove_key' do
+ it 'delegates to Gitlab::AuthorizedKeys' do
+ expect_next_instance_of(Gitlab::AuthorizedKeys) do |instance|
+ expect(instance).to receive(:remove_key).with('foo', 'bar')
+ end
+
+ worker.perform(:remove_key, 'foo', 'bar')
+ end
+ end
+
+ describe 'all other commands' do
+ it 'does nothing' do
+ expect(Gitlab::AuthorizedKeys).not_to receive(:new)
+
+ worker.perform(:foo, 'bar', 'baz')
+ end
+ end
+ end
+
+ context 'authorized_keys is disabled' do
+ before do
+ stub_application_setting(authorized_keys_enabled: false)
+ end
+
+ it 'does nothing' do
+ expect(Gitlab::AuthorizedKeys).not_to receive(:new)
+
+ worker.perform(:add_key, 'foo', 'bar')
+ end
+ end
+ end
+end
diff --git a/spec/workers/authorized_projects_worker_spec.rb b/spec/workers/authorized_projects_worker_spec.rb
index 4c02278de64..8ce0d4edd4f 100644
--- a/spec/workers/authorized_projects_worker_spec.rb
+++ b/spec/workers/authorized_projects_worker_spec.rb
@@ -21,5 +21,22 @@ describe AuthorizedProjectsWorker do
job.perform(-1)
end
end
+
+ it_behaves_like "an idempotent worker" do
+ let(:job_args) { user.id }
+
+ it "does not change authorizations when run twice" do
+ group = create(:group)
+ create(:project, namespace: group)
+ group.add_developer(user)
+
+ # Delete the authorization created by the after save hook of the member
+ # created above.
+ user.project_authorizations.delete_all
+
+ expect { job.perform(user.id) }.to change { user.project_authorizations.reload.size }.by(1)
+ expect { job.perform(user.id) }.not_to change { user.project_authorizations.reload.size }
+ end
+ end
end
end
diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb
index aae6fa02a0c..2b2ffc9f5b9 100644
--- a/spec/workers/background_migration_worker_spec.rb
+++ b/spec/workers/background_migration_worker_spec.rb
@@ -11,7 +11,7 @@ describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state do
end
end
- describe '.perform' do
+ describe '#perform' do
it 'performs a background migration' do
expect(Gitlab::BackgroundMigration)
.to receive(:perform)
@@ -52,6 +52,14 @@ describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state do
worker.perform('Foo', [10, 20])
end
+
+ it 'sets the class that will be executed as the caller_id' do
+ expect(Gitlab::BackgroundMigration).to receive(:perform) do
+ expect(Labkit::Context.current.to_h).to include('meta.caller_id' => 'Foo')
+ end
+
+ worker.perform('Foo', [10, 20])
+ end
end
describe '#healthy_database?' do
diff --git a/spec/workers/concerns/application_worker_spec.rb b/spec/workers/concerns/application_worker_spec.rb
index ae5244e2f62..2fbaaf1131f 100644
--- a/spec/workers/concerns/application_worker_spec.rb
+++ b/spec/workers/concerns/application_worker_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe ApplicationWorker do
- let(:worker) do
+ let_it_be(:worker) do
Class.new do
def self.name
'Gitlab::Foo::Bar::DummyWorker'
@@ -13,12 +13,51 @@ describe ApplicationWorker do
end
end
+ let(:instance) { worker.new }
+
describe 'Sidekiq options' do
it 'sets the queue name based on the class name' do
expect(worker.sidekiq_options['queue']).to eq('foo_bar_dummy')
end
end
+ describe '#structured_payload' do
+ let(:payload) { {} }
+
+ subject(:result) { instance.structured_payload(payload) }
+
+ it 'adds worker related payload' do
+ instance.jid = 'a jid'
+
+ expect(result).to include(
+ 'class' => worker.class,
+ 'job_status' => 'running',
+ 'queue' => worker.queue,
+ 'jid' => instance.jid
+ )
+ end
+
+ it 'adds labkit context' do
+ user = build_stubbed(:user, username: 'jane-doe')
+
+ instance.with_context(user: user) do
+ expect(result).to include('meta.user' => user.username)
+ end
+ end
+
+ it 'adds custom payload converting stringified keys' do
+ payload[:message] = 'some message'
+
+ expect(result).to include('message' => payload[:message])
+ end
+
+ it 'does not override predefined context keys with custom payload' do
+ payload['class'] = 'custom value'
+
+ expect(result).to include('class' => worker.class)
+ end
+ end
+
describe '.queue_namespace' do
it 'sets the queue name based on the class name' do
worker.queue_namespace :some_namespace
diff --git a/spec/workers/concerns/gitlab/github_import/notify_upon_death_spec.rb b/spec/workers/concerns/gitlab/github_import/notify_upon_death_spec.rb
deleted file mode 100644
index 200cdffd560..00000000000
--- a/spec/workers/concerns/gitlab/github_import/notify_upon_death_spec.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::GithubImport::NotifyUponDeath do
- let(:worker_class) do
- Class.new do
- include Sidekiq::Worker
- include Gitlab::GithubImport::NotifyUponDeath
- end
- end
-
- describe '.sidekiq_retries_exhausted' do
- it 'notifies the JobWaiter when 3 arguments are given and the last is a String' do
- job = { 'args' => [12, {}, '123abc'], 'jid' => '123' }
-
- expect(Gitlab::JobWaiter)
- .to receive(:notify)
- .with('123abc', '123')
-
- worker_class.sidekiq_retries_exhausted_block.call(job)
- end
-
- it 'does not notify the JobWaiter when only 2 arguments are given' do
- job = { 'args' => [12, {}], 'jid' => '123' }
-
- expect(Gitlab::JobWaiter)
- .not_to receive(:notify)
-
- worker_class.sidekiq_retries_exhausted_block.call(job)
- end
-
- it 'does not notify the JobWaiter when only 1 argument is given' do
- job = { 'args' => [12], 'jid' => '123' }
-
- expect(Gitlab::JobWaiter)
- .not_to receive(:notify)
-
- worker_class.sidekiq_retries_exhausted_block.call(job)
- end
-
- it 'does not notify the JobWaiter when the last argument is not a String' do
- job = { 'args' => [12, {}, 40], 'jid' => '123' }
-
- expect(Gitlab::JobWaiter)
- .not_to receive(:notify)
-
- worker_class.sidekiq_retries_exhausted_block.call(job)
- end
- end
-end
diff --git a/spec/workers/concerns/gitlab/notify_upon_death_spec.rb b/spec/workers/concerns/gitlab/notify_upon_death_spec.rb
new file mode 100644
index 00000000000..1c75ac99227
--- /dev/null
+++ b/spec/workers/concerns/gitlab/notify_upon_death_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::NotifyUponDeath do
+ let(:worker_class) do
+ Class.new do
+ include Sidekiq::Worker
+ include Gitlab::NotifyUponDeath
+ end
+ end
+
+ describe '.sidekiq_retries_exhausted' do
+ it 'notifies the JobWaiter when 3 arguments are given and the last is a String' do
+ job = { 'args' => [12, {}, '123abc'], 'jid' => '123' }
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:notify)
+ .with('123abc', '123')
+
+ worker_class.sidekiq_retries_exhausted_block.call(job)
+ end
+
+ it 'does not notify the JobWaiter when only 2 arguments are given' do
+ job = { 'args' => [12, {}], 'jid' => '123' }
+
+ expect(Gitlab::JobWaiter)
+ .not_to receive(:notify)
+
+ worker_class.sidekiq_retries_exhausted_block.call(job)
+ end
+
+ it 'does not notify the JobWaiter when only 1 argument is given' do
+ job = { 'args' => [12], 'jid' => '123' }
+
+ expect(Gitlab::JobWaiter)
+ .not_to receive(:notify)
+
+ worker_class.sidekiq_retries_exhausted_block.call(job)
+ end
+
+ it 'does not notify the JobWaiter when the last argument is not a String' do
+ job = { 'args' => [12, {}, 40], 'jid' => '123' }
+
+ expect(Gitlab::JobWaiter)
+ .not_to receive(:notify)
+
+ worker_class.sidekiq_retries_exhausted_block.call(job)
+ end
+ end
+end
diff --git a/spec/workers/concerns/project_export_options_spec.rb b/spec/workers/concerns/project_export_options_spec.rb
new file mode 100644
index 00000000000..985afaaf11e
--- /dev/null
+++ b/spec/workers/concerns/project_export_options_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ProjectExportOptions do
+ let(:project) { create(:project) }
+ let(:project_export_job) { create(:project_export_job, project: project, jid: '123', status: 1) }
+ let(:job) { { 'args' => [project.owner.id, project.id, nil, nil], 'jid' => '123' } }
+ let(:worker_class) do
+ Class.new do
+ include Sidekiq::Worker
+ include ProjectExportOptions
+ end
+ end
+
+ it 'sets default retry limit' do
+ expect(worker_class.sidekiq_options['retry']).to eq(ProjectExportOptions::EXPORT_RETRY_COUNT)
+ end
+
+ it 'sets default status expiration' do
+ expect(worker_class.sidekiq_options['status_expiration']).to eq(StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION)
+ end
+
+ describe '.sidekiq_retries_exhausted' do
+ it 'marks status as failed' do
+ expect { worker_class.sidekiq_retries_exhausted_block.call(job) }.to change { project_export_job.reload.status }.from(1).to(3)
+ end
+
+ context 'when status update fails' do
+ before do
+ project_export_job.update(status: 2)
+ end
+
+ it 'logs an error' do
+ expect(Sidekiq.logger).to receive(:error).with("Failed to set Job #{job['jid']} for project #{project.id} to failed state")
+
+ worker_class.sidekiq_retries_exhausted_block.call(job)
+ end
+ end
+ end
+end
diff --git a/spec/workers/detect_repository_languages_worker_spec.rb b/spec/workers/detect_repository_languages_worker_spec.rb
index 755eb8dbf6b..84af49050d4 100644
--- a/spec/workers/detect_repository_languages_worker_spec.rb
+++ b/spec/workers/detect_repository_languages_worker_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe DetectRepositoryLanguagesWorker do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
subject { described_class.new }
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index f3ee1dc8435..d6e867ee407 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -71,30 +71,30 @@ describe 'Every Sidekiq worker' do
# concurrency, so that each job can consume a large amounts of memory. For this reason, on
# GitLab.com, when a large number of memory-bound jobs arrive at once, we let them queue up
# rather than scaling the hardware to meet the SLO. For this reason, memory-bound,
- # latency-sensitive jobs are explicitly discouraged and disabled.
- it 'is (exclusively) memory-bound or latency-sentitive, not both', :aggregate_failures do
- latency_sensitive_workers = workers_without_defaults
- .select(&:latency_sensitive_worker?)
+ # high urgency jobs are explicitly discouraged and disabled.
+ it 'is (exclusively) memory-bound or high urgency, not both', :aggregate_failures do
+ high_urgency_workers = workers_without_defaults
+ .select { |worker| worker.get_urgency == :high }
- latency_sensitive_workers.each do |worker|
- expect(worker.get_worker_resource_boundary).not_to eq(:memory), "#{worker.inspect} cannot be both memory-bound and latency sensitive"
+ high_urgency_workers.each do |worker|
+ expect(worker.get_worker_resource_boundary).not_to eq(:memory), "#{worker.inspect} cannot be both memory-bound and high urgency"
end
end
- # In high traffic installations, such as GitLab.com, `latency_sensitive` workers run in a
- # dedicated fleet. In order to ensure short queue times, `latency_sensitive` jobs have strict
+ # In high traffic installations, such as GitLab.com, `urgency :high` workers run in a
+ # dedicated fleet. In order to ensure short queue times, `urgency :high` jobs have strict
# SLOs in order to ensure throughput. However, when a worker depends on an external service,
# such as a user's k8s cluster or a third-party internet service, we cannot guarantee latency,
# and therefore throughput. An outage to an 3rd party service could therefore impact throughput
- # on other latency_sensitive jobs, leading to degradation through the GitLab application.
- # Please see doc/development/sidekiq_style_guide.md#Jobs-with-External-Dependencies for more
+ # on other high urgency jobs, leading to degradation through the GitLab application.
+ # Please see doc/development/sidekiq_style_guide.md#jobs-with-external-dependencies for more
# details.
- it 'has (exclusively) external dependencies or is latency-sentitive, not both', :aggregate_failures do
- latency_sensitive_workers = workers_without_defaults
- .select(&:latency_sensitive_worker?)
+ it 'has (exclusively) external dependencies or is high urgency, not both', :aggregate_failures do
+ high_urgency_workers = workers_without_defaults
+ .select { |worker| worker.get_urgency == :high }
- latency_sensitive_workers.each do |worker|
- expect(worker.worker_has_external_dependencies?).to be_falsey, "#{worker.inspect} cannot have both external dependencies and be latency sensitive"
+ high_urgency_workers.each do |worker|
+ expect(worker.worker_has_external_dependencies?).to be_falsey, "#{worker.inspect} cannot have both external dependencies and be high urgency"
end
end
end
diff --git a/spec/workers/expire_build_instance_artifacts_worker_spec.rb b/spec/workers/expire_build_instance_artifacts_worker_spec.rb
index 39f676f1057..335fa5e6c98 100644
--- a/spec/workers/expire_build_instance_artifacts_worker_spec.rb
+++ b/spec/workers/expire_build_instance_artifacts_worker_spec.rb
@@ -31,7 +31,7 @@ describe ExpireBuildInstanceArtifactsWorker do
end
context 'with not yet expired artifacts' do
- set(:build) do
+ let_it_be(:build) do
create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days)
end
diff --git a/spec/workers/expire_job_cache_worker_spec.rb b/spec/workers/expire_job_cache_worker_spec.rb
index eeab304d926..2776624f14c 100644
--- a/spec/workers/expire_job_cache_worker_spec.rb
+++ b/spec/workers/expire_job_cache_worker_spec.rb
@@ -3,23 +3,35 @@
require 'spec_helper'
describe ExpireJobCacheWorker do
- set(:pipeline) { create(:ci_empty_pipeline) }
+ let_it_be(:pipeline) { create(:ci_empty_pipeline) }
let(:project) { pipeline.project }
- subject { described_class.new }
-
describe '#perform' do
context 'with a job in the pipeline' do
let(:job) { create(:ci_build, pipeline: pipeline) }
+ let(:job_args) { job.id }
+
+ include_examples 'an idempotent worker' do
+ it 'invalidates Etag caching for the job path' do
+ pipeline_path = "/#{project.full_path}/pipelines/#{pipeline.id}.json"
+ job_path = "/#{project.full_path}/builds/#{job.id}.json"
+
+ spy_store = Gitlab::EtagCaching::Store.new
+
+ allow(Gitlab::EtagCaching::Store).to receive(:new) { spy_store }
- it 'invalidates Etag caching for the job path' do
- pipeline_path = "/#{project.full_path}/pipelines/#{pipeline.id}.json"
- job_path = "/#{project.full_path}/builds/#{job.id}.json"
+ expect(spy_store).to receive(:touch)
+ .exactly(worker_exec_times).times
+ .with(pipeline_path)
+ .and_call_original
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(pipeline_path)
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(job_path)
+ expect(spy_store).to receive(:touch)
+ .exactly(worker_exec_times).times
+ .with(job_path)
+ .and_call_original
- subject.perform(job.id)
+ subject
+ end
end
end
@@ -27,7 +39,7 @@ describe ExpireJobCacheWorker do
it 'does not change the etag store' do
expect(Gitlab::EtagCaching::Store).not_to receive(:new)
- subject.perform(9999)
+ perform_multiple(9999)
end
end
end
diff --git a/spec/workers/gitlab_shell_worker_spec.rb b/spec/workers/gitlab_shell_worker_spec.rb
index 5dedf5be9fa..f5884e5e8f8 100644
--- a/spec/workers/gitlab_shell_worker_spec.rb
+++ b/spec/workers/gitlab_shell_worker_spec.rb
@@ -5,12 +5,35 @@ require 'spec_helper'
describe GitlabShellWorker do
let(:worker) { described_class.new }
- describe '#perform with add_key' do
- it 'calls add_key on Gitlab::Shell' do
- expect_next_instance_of(Gitlab::Shell) do |instance|
- expect(instance).to receive(:add_key).with('foo', 'bar')
+ describe '#perform' do
+ describe '#add_key' do
+ it 'delegates to Gitlab::AuthorizedKeys' do
+ expect_next_instance_of(Gitlab::AuthorizedKeys) do |instance|
+ expect(instance).to receive(:add_key).with('foo', 'bar')
+ end
+
+ worker.perform(:add_key, 'foo', 'bar')
+ end
+ end
+
+ describe '#remove_key' do
+ it 'delegates to Gitlab::AuthorizedKeys' do
+ expect_next_instance_of(Gitlab::AuthorizedKeys) do |instance|
+ expect(instance).to receive(:remove_key).with('foo', 'bar')
+ end
+
+ worker.perform(:remove_key, 'foo', 'bar')
+ end
+ end
+
+ describe 'all other commands' do
+ it 'delegates them to Gitlab::Shell' do
+ expect_next_instance_of(Gitlab::Shell) do |instance|
+ expect(instance).to receive(:foo).with('bar', 'baz')
+ end
+
+ worker.perform(:foo, 'bar', 'baz')
end
- worker.perform(:add_key, 'foo', 'bar')
end
end
end
diff --git a/spec/workers/mail_scheduler/notification_service_worker_spec.rb b/spec/workers/mail_scheduler/notification_service_worker_spec.rb
index 3c023e713ed..ac95a6d7e43 100644
--- a/spec/workers/mail_scheduler/notification_service_worker_spec.rb
+++ b/spec/workers/mail_scheduler/notification_service_worker_spec.rb
@@ -6,7 +6,7 @@ describe MailScheduler::NotificationServiceWorker do
let(:worker) { described_class.new }
let(:method) { 'new_key' }
- set(:key) { create(:personal_key) }
+ let_it_be(:key) { create(:personal_key) }
def serialize(*args)
ActiveJob::Arguments.serialize(args)
diff --git a/spec/workers/namespaces/root_statistics_worker_spec.rb b/spec/workers/namespaces/root_statistics_worker_spec.rb
index 6bbdfe03ceb..45e75c9b0da 100644
--- a/spec/workers/namespaces/root_statistics_worker_spec.rb
+++ b/spec/workers/namespaces/root_statistics_worker_spec.rb
@@ -42,7 +42,7 @@ describe Namespaces::RootStatisticsWorker, '#perform' do
allow_any_instance_of(Namespace::AggregationSchedule)
.to receive(:schedule_root_storage_statistics).and_return(nil)
- expect(Gitlab::SidekiqLogger).to receive(:error).once
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).once
worker.perform(group.id)
end
diff --git a/spec/workers/namespaces/schedule_aggregation_worker_spec.rb b/spec/workers/namespaces/schedule_aggregation_worker_spec.rb
index be722f451e0..a2ce9891b5f 100644
--- a/spec/workers/namespaces/schedule_aggregation_worker_spec.rb
+++ b/spec/workers/namespaces/schedule_aggregation_worker_spec.rb
@@ -48,7 +48,7 @@ describe Namespaces::ScheduleAggregationWorker, '#perform', :clean_gitlab_redis_
context 'when namespace does not exist' do
it 'logs the error' do
- expect(Gitlab::SidekiqLogger).to receive(:error).once
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).once
worker.perform(12345)
end
diff --git a/spec/workers/pipeline_notification_worker_spec.rb b/spec/workers/pipeline_notification_worker_spec.rb
index 98b0f139fe2..5defd3d5bd7 100644
--- a/spec/workers/pipeline_notification_worker_spec.rb
+++ b/spec/workers/pipeline_notification_worker_spec.rb
@@ -3,13 +3,16 @@
require 'spec_helper'
describe PipelineNotificationWorker, :mailer do
- let(:pipeline) { create(:ci_pipeline) }
+ let_it_be(:pipeline) { create(:ci_pipeline) }
describe '#execute' do
it 'calls NotificationService#pipeline_finished when the pipeline exists' do
- expect(NotificationService).to receive_message_chain(:new, :pipeline_finished)
+ notification_service_double = double
+ expect(notification_service_double).to receive(:pipeline_finished)
+ .with(pipeline, ref_status: 'success', recipients: ['test@gitlab.com'])
+ expect(NotificationService).to receive(:new).and_return(notification_service_double)
- subject.perform(pipeline.id)
+ subject.perform(pipeline.id, ref_status: 'success', recipients: ['test@gitlab.com'])
end
it 'does nothing when the pipeline does not exist' do
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
index 4926c14a6ab..758cfa82d5d 100644
--- a/spec/workers/pipeline_schedule_worker_spec.rb
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -7,8 +7,8 @@ describe PipelineScheduleWorker do
subject { described_class.new.perform }
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let!(:pipeline_schedule) do
create(:ci_pipeline_schedule, :nightly, project: project, owner: user)
diff --git a/spec/workers/pipeline_update_ci_ref_status_worker_service_spec.rb b/spec/workers/pipeline_update_ci_ref_status_worker_service_spec.rb
new file mode 100644
index 00000000000..7228de4f895
--- /dev/null
+++ b/spec/workers/pipeline_update_ci_ref_status_worker_service_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe PipelineUpdateCiRefStatusWorker do
+ let(:worker) { described_class.new }
+ let(:pipeline) { create(:ci_pipeline) }
+
+ describe '#perform' do
+ it 'updates the ci_ref status' do
+ expect(Ci::UpdateCiRefStatusService).to receive(:new)
+ .with(pipeline)
+ .and_return(double(call: true))
+
+ worker.perform(pipeline.id)
+ end
+ end
+end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 34aaa9bb1e9..ac17919d7f0 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -34,6 +34,31 @@ describe PostReceive do
expect(Gitlab::GitLogger).to receive(:error).with("POST-RECEIVE: #{error_message}")
expect(perform).to be(false)
end
+
+ context 'with PersonalSnippet' do
+ let(:gl_repository) { "snippet-#{snippet.id}" }
+ let(:snippet) { create(:personal_snippet, author: project.owner) }
+
+ it 'does not log an error' do
+ expect(Gitlab::GitLogger).not_to receive(:error)
+ expect(Gitlab::GitPostReceive).to receive(:new).and_call_original
+ expect_any_instance_of(described_class) do |instance|
+ expect(instance).to receive(:process_snippet_changes)
+ end
+
+ perform
+ end
+ end
+
+ context 'with ProjectSnippet' do
+ let(:gl_repository) { "snippet-#{snippet.id}" }
+ let(:snippet) { create(:snippet, type: 'ProjectSnippet', project: nil, author: project.owner) }
+
+ it 'returns false and logs an error' do
+ expect(Gitlab::GitLogger).to receive(:error).with("POST-RECEIVE: #{error_message}")
+ expect(perform).to be(false)
+ end
+ end
end
describe "#process_project_changes" do
@@ -44,7 +69,7 @@ describe PostReceive do
before do
allow_any_instance_of(Gitlab::GitPostReceive).to receive(:identify).and_return(empty_project.owner)
# Need to mock here so we can expect calls on project
- allow(Gitlab::GlRepository).to receive(:parse).and_return([empty_project, Gitlab::GlRepository::PROJECT])
+ allow(Gitlab::GlRepository).to receive(:parse).and_return([empty_project, empty_project, Gitlab::GlRepository::PROJECT])
end
it 'expire the status cache' do
@@ -97,7 +122,7 @@ describe PostReceive do
before do
allow_any_instance_of(Gitlab::GitPostReceive).to receive(:identify).and_return(project.owner)
- allow(Gitlab::GlRepository).to receive(:parse).and_return([project, Gitlab::GlRepository::PROJECT])
+ allow(Gitlab::GlRepository).to receive(:parse).and_return([project, project, Gitlab::GlRepository::PROJECT])
end
shared_examples 'updating remote mirrors' do
@@ -176,7 +201,7 @@ describe PostReceive do
end
before do
- expect(Gitlab::GlRepository).to receive(:parse).and_return([project, Gitlab::GlRepository::PROJECT])
+ expect(Gitlab::GlRepository).to receive(:parse).and_return([project, project, Gitlab::GlRepository::PROJECT])
end
it 'does not expire branches cache' do
@@ -256,7 +281,7 @@ describe PostReceive do
before do
# Need to mock here so we can expect calls on project
- allow(Gitlab::GlRepository).to receive(:parse).and_return([project, Gitlab::GlRepository::WIKI])
+ allow(Gitlab::GlRepository).to receive(:parse).and_return([project, project, Gitlab::GlRepository::WIKI])
end
it 'updates project activity' do
@@ -333,4 +358,82 @@ describe PostReceive do
perform
end
end
+
+ describe '#process_snippet_changes' do
+ let(:gl_repository) { "snippet-#{snippet.id}" }
+
+ before do
+ # Need to mock here so we can expect calls on project
+ allow(Gitlab::GlRepository).to receive(:parse).and_return([snippet, snippet.project, Gitlab::GlRepository::SNIPPET])
+ end
+
+ shared_examples 'snippet changes actions' do
+ context 'unidentified user' do
+ let!(:key_id) { '' }
+
+ it 'returns false' do
+ expect(perform).to be false
+ end
+ end
+
+ context 'with changes' do
+ context 'branches' do
+ let(:changes) do
+ <<~EOF
+ 123456 789012 refs/heads/tést1
+ 123456 789012 refs/heads/tést2
+ EOF
+ end
+
+ it 'expires the branches cache' do
+ expect(snippet.repository).to receive(:expire_branches_cache).once
+
+ perform
+ end
+
+ it 'expires the status cache' do
+ expect(snippet.repository).to receive(:empty?).and_return(true)
+ expect(snippet.repository).to receive(:expire_status_cache)
+
+ perform
+ end
+ end
+
+ context 'tags' do
+ let(:changes) do
+ <<~EOF
+ 654321 210987 refs/tags/tag1
+ 654322 210986 refs/tags/tag2
+ 654323 210985 refs/tags/tag3
+ EOF
+ end
+
+ it 'does not expire branches cache' do
+ expect(snippet.repository).not_to receive(:expire_branches_cache)
+
+ perform
+ end
+
+ it 'only invalidates tags once' do
+ expect(snippet.repository).to receive(:expire_caches_for_tags).once.and_call_original
+ expect(snippet.repository).to receive(:expire_tags_cache).once.and_call_original
+
+ perform
+ end
+ end
+ end
+ end
+
+ context 'with PersonalSnippet' do
+ let!(:snippet) { create(:personal_snippet, :repository, author: project.owner) }
+
+ it_behaves_like 'snippet changes actions'
+ end
+
+ context 'with ProjectSnippet' do
+ let!(:snippet) { create(:project_snippet, :repository, project: project, author: project.owner) }
+
+ it_behaves_like 'snippet changes actions'
+ end
+ end
end
diff --git a/spec/workers/project_export_worker_spec.rb b/spec/workers/project_export_worker_spec.rb
index 8065087796c..d0d52e0df2d 100644
--- a/spec/workers/project_export_worker_spec.rb
+++ b/spec/workers/project_export_worker_spec.rb
@@ -9,21 +9,59 @@ describe ProjectExportWorker do
subject { described_class.new }
describe '#perform' do
+ before do
+ allow_next_instance_of(described_class) do |job|
+ allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
+ end
+ end
+
context 'when it succeeds' do
it 'calls the ExportService' do
expect_any_instance_of(::Projects::ImportExport::ExportService).to receive(:execute)
subject.perform(user.id, project.id, { 'klass' => 'Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy' })
end
+
+ context 'export job' do
+ before do
+ allow_any_instance_of(::Projects::ImportExport::ExportService).to receive(:execute)
+ end
+
+ it 'creates an export job record for the project' do
+ expect { subject.perform(user.id, project.id, {}) }.to change { project.export_jobs.count }.from(0).to(1)
+ end
+
+ it 'sets the export job status to started' do
+ expect_next_instance_of(ProjectExportJob) do |job|
+ expect(job).to receive(:start)
+ end
+
+ subject.perform(user.id, project.id, {})
+ end
+
+ it 'sets the export job status to finished' do
+ expect_next_instance_of(ProjectExportJob) do |job|
+ expect(job).to receive(:finish)
+ end
+
+ subject.perform(user.id, project.id, {})
+ end
+ end
end
context 'when it fails' do
- it 'raises an exception when params are invalid' do
+ it 'does not raise an exception when strategy is invalid' do
expect_any_instance_of(::Projects::ImportExport::ExportService).not_to receive(:execute)
- expect { subject.perform(1234, project.id, {}) }.to raise_exception(ActiveRecord::RecordNotFound)
- expect { subject.perform(user.id, 1234, {}) }.to raise_exception(ActiveRecord::RecordNotFound)
- expect { subject.perform(user.id, project.id, { 'klass' => 'Whatever' }) }.to raise_exception(Gitlab::ImportExport::AfterExportStrategyBuilder::StrategyNotFoundError)
+ expect { subject.perform(user.id, project.id, { 'klass' => 'Whatever' }) }.not_to raise_error
+ end
+
+ it 'does not raise error when project cannot be found' do
+ expect { subject.perform(user.id, -234, {}) }.not_to raise_error
+ end
+
+ it 'does not raise error when user cannot be found' do
+ expect { subject.perform(-863, project.id, {}) }.not_to raise_error
end
end
end
diff --git a/spec/workers/project_update_repository_storage_worker_spec.rb b/spec/workers/project_update_repository_storage_worker_spec.rb
new file mode 100644
index 00000000000..4cc44281a69
--- /dev/null
+++ b/spec/workers/project_update_repository_storage_worker_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ProjectUpdateRepositoryStorageWorker do
+ let(:project) { create(:project, :repository) }
+
+ subject { described_class.new }
+
+ describe "#perform" do
+ it "calls the update repository storage service" do
+ expect_next_instance_of(Projects::UpdateRepositoryStorageService) do |instance|
+ expect(instance).to receive(:execute).with('new_storage')
+ end
+
+ subject.perform(project.id, 'new_storage')
+ end
+ end
+end
diff --git a/spec/workers/remote_mirror_notification_worker_spec.rb b/spec/workers/remote_mirror_notification_worker_spec.rb
index 5182f67b4af..c9321fd7c56 100644
--- a/spec/workers/remote_mirror_notification_worker_spec.rb
+++ b/spec/workers/remote_mirror_notification_worker_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe RemoteMirrorNotificationWorker, :mailer do
- set(:project) { create(:project, :repository, :remote_mirror) }
- set(:mirror) { project.remote_mirrors.first }
+ let_it_be(:project) { create(:project, :repository, :remote_mirror) }
+ let_it_be(:mirror) { project.remote_mirrors.first }
describe '#execute' do
it 'calls NotificationService#remote_mirror_update_failed when the mirror exists' do
diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb
index 01104049404..7209c40646f 100644
--- a/spec/workers/repository_fork_worker_spec.rb
+++ b/spec/workers/repository_fork_worker_spec.rb
@@ -13,7 +13,6 @@ describe RepositoryForkWorker do
describe "#perform" do
let(:project) { create(:project, :public, :repository) }
- let(:shell) { Gitlab::Shell.new }
let(:forked_project) { create(:project, :repository, :import_scheduled) }
before do
@@ -21,12 +20,17 @@ describe RepositoryForkWorker do
end
shared_examples 'RepositoryForkWorker performing' do
- before do
- allow(subject).to receive(:gitlab_shell).and_return(shell)
- end
-
- def expect_fork_repository
- expect(shell).to receive(:fork_repository).with(project, forked_project)
+ def expect_fork_repository(success:)
+ allow(::Gitlab::GitalyClient::RepositoryService).to receive(:new).and_call_original
+ expect_next_instance_of(::Gitlab::GitalyClient::RepositoryService, forked_project.repository.raw) do |svc|
+ exp = expect(svc).to receive(:fork_repository).with(project.repository.raw)
+
+ if success
+ exp.and_return(true)
+ else
+ exp.and_raise(GRPC::BadStatus, 'Fork failed in tests')
+ end
+ end
end
describe 'when a worker was reset without cleanup' do
@@ -35,20 +39,20 @@ describe RepositoryForkWorker do
it 'creates a new repository from a fork' do
allow(subject).to receive(:jid).and_return(jid)
- expect_fork_repository.and_return(true)
+ expect_fork_repository(success: true)
perform!
end
end
it "creates a new repository from a fork" do
- expect_fork_repository.and_return(true)
+ expect_fork_repository(success: true)
perform!
end
it 'protects the default branch' do
- expect_fork_repository.and_return(true)
+ expect_fork_repository(success: true)
perform!
@@ -56,7 +60,7 @@ describe RepositoryForkWorker do
end
it 'flushes various caches' do
- expect_fork_repository.and_return(true)
+ expect_fork_repository(success: true)
# Works around https://github.com/rspec/rspec-mocks/issues/910
expect(Project).to receive(:find).with(forked_project.id).and_return(forked_project)
@@ -75,15 +79,15 @@ describe RepositoryForkWorker do
it 'handles bad fork' do
error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Failed to create fork repository"
- expect_fork_repository.and_return(false)
+ expect_fork_repository(success: false)
expect { perform! }.to raise_error(StandardError, error_message)
end
it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of source project LFS objects' do
- expect_fork_repository.and_return(true)
+ expect_fork_repository(success: true)
expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
- expect(service).to receive(:execute).with(project.lfs_objects_oids)
+ expect(service).to receive(:execute).with(project.all_lfs_objects_oids)
end
perform!
@@ -92,7 +96,7 @@ describe RepositoryForkWorker do
it "handles LFS objects link failure" do
error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Source project has too many LFS objects"
- expect_fork_repository.and_return(true)
+ expect_fork_repository(success: true)
expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
expect(service).to receive(:execute).and_raise(Projects::LfsPointers::LfsLinkService::TooManyOidsError)
end
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index 14364194b44..afeee4bac73 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -4,9 +4,9 @@ require 'spec_helper'
describe RunPipelineScheduleWorker do
describe '#perform' do
- set(:project) { create(:project) }
- set(:user) { create(:user) }
- set(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project ) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project ) }
let(:worker) { described_class.new }
context 'when a project not found' do
diff --git a/spec/workers/stuck_export_jobs_worker_spec.rb b/spec/workers/stuck_export_jobs_worker_spec.rb
new file mode 100644
index 00000000000..fc5758fdadf
--- /dev/null
+++ b/spec/workers/stuck_export_jobs_worker_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe StuckExportJobsWorker do
+ let(:worker) { described_class.new }
+
+ shared_examples 'project export job detection' do
+ context 'when the job has completed' do
+ context 'when the export status was already updated' do
+ before do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids) do
+ project_export_job.start
+ project_export_job.finish
+
+ [project_export_job.jid]
+ end
+ end
+
+ it 'does not mark the export as failed' do
+ worker.perform
+
+ expect(project_export_job.reload.finished?).to be true
+ end
+ end
+
+ context 'when the export status was not updated' do
+ before do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids) do
+ project_export_job.start
+
+ [project_export_job.jid]
+ end
+ end
+
+ it 'marks the project as failed' do
+ worker.perform
+
+ expect(project_export_job.reload.failed?).to be true
+ end
+ end
+
+ context 'when the job is not in queue and db record in queued state' do
+ before do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([project_export_job.jid])
+ end
+
+ it 'marks the project as failed' do
+ expect(project_export_job.queued?).to be true
+
+ worker.perform
+
+ expect(project_export_job.reload.failed?).to be true
+ end
+ end
+ end
+
+ context 'when the job is running in Sidekiq' do
+ before do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([])
+ end
+
+ it 'does not mark the project export as failed' do
+ expect { worker.perform }.not_to change { project_export_job.reload.status }
+ end
+ end
+ end
+
+ describe 'with started export status' do
+ it_behaves_like 'project export job detection' do
+ let(:project) { create(:project) }
+ let!(:project_export_job) { create(:project_export_job, project: project, jid: '123') }
+ end
+ end
+end
diff --git a/spec/workers/update_external_pull_requests_worker_spec.rb b/spec/workers/update_external_pull_requests_worker_spec.rb
index f3956bb3514..8930a36ceb8 100644
--- a/spec/workers/update_external_pull_requests_worker_spec.rb
+++ b/spec/workers/update_external_pull_requests_worker_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
describe UpdateExternalPullRequestsWorker do
describe '#perform' do
- set(:project) { create(:project, import_source: 'tanuki/repository') }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, import_source: 'tanuki/repository') }
+ let_it_be(:user) { create(:user) }
let(:worker) { described_class.new }
before do
diff --git a/spec/workers/update_merge_requests_worker_spec.rb b/spec/workers/update_merge_requests_worker_spec.rb
index 486dade454a..14a363f9d59 100644
--- a/spec/workers/update_merge_requests_worker_spec.rb
+++ b/spec/workers/update_merge_requests_worker_spec.rb
@@ -26,17 +26,5 @@ describe UpdateMergeRequestsWorker do
perform
end
-
- context 'when slow' do
- before do
- stub_const("UpdateMergeRequestsWorker::LOG_TIME_THRESHOLD", -1)
- end
-
- it 'logs debug info' do
- expect(Rails.logger).to receive(:info).with(a_string_matching(/\AUpdateMergeRequestsWorker#perform.*project_id=#{project.id},user_id=#{user.id},oldrev=#{oldrev},newrev=#{newrev},ref=#{ref}/))
-
- perform
- end
- end
end
end
diff --git a/spec/workers/x509_certificate_revoke_worker_spec.rb b/spec/workers/x509_certificate_revoke_worker_spec.rb
new file mode 100644
index 00000000000..1e0cbf61267
--- /dev/null
+++ b/spec/workers/x509_certificate_revoke_worker_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe X509CertificateRevokeWorker do
+ describe '#perform' do
+ context 'with a revoked certificate' do
+ subject { described_class.new }
+
+ let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked) }
+ let(:job_args) { x509_certificate.id }
+
+ include_examples 'an idempotent worker' do
+ it 'executes the revoke service' do
+ spy_service = X509CertificateRevokeService.new
+
+ allow(X509CertificateRevokeService).to receive(:new) { spy_service }
+
+ expect(spy_service).to receive(:execute)
+ .exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES).times
+ .with(x509_certificate)
+ .and_call_original
+
+ subject
+ end
+ end
+
+ it 'executes the revoke service' do
+ spy_service = X509CertificateRevokeService.new
+
+ allow(X509CertificateRevokeService).to receive(:new) { spy_service }
+
+ expect_next_instance_of(X509CertificateRevokeService) do |service|
+ expect(service).to receive(:execute).with(x509_certificate)
+ end
+
+ subject
+ end
+ end
+ end
+end
diff --git a/vendor/gitignore/C++.gitignore b/vendor/gitignore/C++.gitignore
index 259148fa18f..259148fa18f 100755..100644
--- a/vendor/gitignore/C++.gitignore
+++ b/vendor/gitignore/C++.gitignore
diff --git a/vendor/gitignore/Java.gitignore b/vendor/gitignore/Java.gitignore
index a1c2a238a96..a1c2a238a96 100755..100644
--- a/vendor/gitignore/Java.gitignore
+++ b/vendor/gitignore/Java.gitignore
diff --git a/vendor/ingress/values.yaml b/vendor/ingress/values.yaml
index 0baaccedc4e..681d10d938a 100644
--- a/vendor/ingress/values.yaml
+++ b/vendor/ingress/values.yaml
@@ -6,3 +6,8 @@ controller:
podAnnotations:
prometheus.io/scrape: "true"
prometheus.io/port: "10254"
+ service:
+ clusterIP: "-"
+defaultBackend:
+ service:
+ clusterIP: "-"
diff --git a/vendor/project_templates/gatsby.tar.gz b/vendor/project_templates/gatsby.tar.gz
new file mode 100644
index 00000000000..d9025978126
--- /dev/null
+++ b/vendor/project_templates/gatsby.tar.gz
Binary files differ
diff --git a/yarn.lock b/yarn.lock
index 94aa81c7aa0..a443fb11287 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2,656 +2,712 @@
# yarn lockfile v1
-"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5":
- version "7.5.5"
- resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d"
- integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw==
- dependencies:
- "@babel/highlight" "^7.0.0"
-
-"@babel/core@>=7.2.2", "@babel/core@^7.1.0", "@babel/core@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.6.2.tgz#069a776e8d5e9eefff76236bc8845566bd31dd91"
- integrity sha512-l8zto/fuoZIbncm+01p8zPSDZu/VuuJhAfA7d/AbzM09WR7iVhavvfNDYCNpo1VvLk6E6xgAoP9P+/EMJHuRkQ==
- dependencies:
- "@babel/code-frame" "^7.5.5"
- "@babel/generator" "^7.6.2"
- "@babel/helpers" "^7.6.2"
- "@babel/parser" "^7.6.2"
- "@babel/template" "^7.6.0"
- "@babel/traverse" "^7.6.2"
- "@babel/types" "^7.6.0"
- convert-source-map "^1.1.0"
+"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.8.3.tgz#33e25903d7481181534e12ec0a25f16b6fcf419e"
+ integrity sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==
+ dependencies:
+ "@babel/highlight" "^7.8.3"
+
+"@babel/compat-data@^7.8.4":
+ version "7.8.5"
+ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.8.5.tgz#d28ce872778c23551cbb9432fc68d28495b613b9"
+ integrity sha512-jWYUqQX/ObOhG1UiEkbH5SANsE/8oKXiQWjj7p7xgj9Zmnt//aUvyz4dBkK0HNsS8/cbyC5NmmH87VekW+mXFg==
+ dependencies:
+ browserslist "^4.8.5"
+ invariant "^2.2.4"
+ semver "^5.5.0"
+
+"@babel/core@>=7.2.2", "@babel/core@^7.1.0", "@babel/core@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.8.4.tgz#d496799e5c12195b3602d0fddd77294e3e38e80e"
+ integrity sha512-0LiLrB2PwrVI+a2/IEskBopDYSd8BCb3rOvH7D5tzoWd696TBEduBvuLVm4Nx6rltrLZqvI3MCalB2K2aVzQjA==
+ dependencies:
+ "@babel/code-frame" "^7.8.3"
+ "@babel/generator" "^7.8.4"
+ "@babel/helpers" "^7.8.4"
+ "@babel/parser" "^7.8.4"
+ "@babel/template" "^7.8.3"
+ "@babel/traverse" "^7.8.4"
+ "@babel/types" "^7.8.3"
+ convert-source-map "^1.7.0"
debug "^4.1.0"
+ gensync "^1.0.0-beta.1"
json5 "^2.1.0"
lodash "^4.17.13"
resolve "^1.3.2"
semver "^5.4.1"
source-map "^0.5.0"
-"@babel/generator@^7.4.0", "@babel/generator@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.6.2.tgz#dac8a3c2df118334c2a29ff3446da1636a8f8c03"
- integrity sha512-j8iHaIW4gGPnViaIHI7e9t/Hl8qLjERI6DcV9kEpAIDJsAOrcnXqRS7t+QbhL76pwbtqP+QCQLL0z1CyVmtjjQ==
+"@babel/generator@^7.4.0", "@babel/generator@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.8.4.tgz#35bbc74486956fe4251829f9f6c48330e8d0985e"
+ integrity sha512-PwhclGdRpNAf3IxZb0YVuITPZmmrXz9zf6fH8lT4XbrmfQKr6ryBzhv593P5C6poJRciFCL/eHGW2NuGrgEyxA==
dependencies:
- "@babel/types" "^7.6.0"
+ "@babel/types" "^7.8.3"
jsesc "^2.5.1"
lodash "^4.17.13"
source-map "^0.5.0"
-"@babel/helper-annotate-as-pure@^7.0.0":
- version "7.0.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.0.0.tgz#323d39dd0b50e10c7c06ca7d7638e6864d8c5c32"
- integrity sha512-3UYcJUj9kvSLbLbUIfQTqzcy5VX7GRZ/CCDrnOaZorFFM01aXp1+GJwuFGV4NDDoAS+mOUyHcO6UD/RfqOks3Q==
+"@babel/helper-annotate-as-pure@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.8.3.tgz#60bc0bc657f63a0924ff9a4b4a0b24a13cf4deee"
+ integrity sha512-6o+mJrZBxOoEX77Ezv9zwW7WV8DdluouRKNY/IR5u/YTMuKHgugHOzYWlYvYLpLA9nPsQCAAASpCIbjI9Mv+Uw==
dependencies:
- "@babel/types" "^7.0.0"
+ "@babel/types" "^7.8.3"
-"@babel/helper-builder-binary-assignment-operator-visitor@^7.1.0":
- version "7.1.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.1.0.tgz#6b69628dfe4087798e0c4ed98e3d4a6b2fbd2f5f"
- integrity sha512-qNSR4jrmJ8M1VMM9tibvyRAHXQs2PmaksQF7c1CGJNipfe3D8p+wgNwgso/P2A2r2mdgBWAXljNWR0QRZAMW8w==
+"@babel/helper-builder-binary-assignment-operator-visitor@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.8.3.tgz#c84097a427a061ac56a1c30ebf54b7b22d241503"
+ integrity sha512-5eFOm2SyFPK4Rh3XMMRDjN7lBH0orh3ss0g3rTYZnBQ+r6YPj7lgDyCvPphynHvUrobJmeMignBr6Acw9mAPlw==
dependencies:
- "@babel/helper-explode-assignable-expression" "^7.1.0"
- "@babel/types" "^7.0.0"
+ "@babel/helper-explode-assignable-expression" "^7.8.3"
+ "@babel/types" "^7.8.3"
-"@babel/helper-call-delegate@^7.4.4":
- version "7.4.4"
- resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.4.4.tgz#87c1f8ca19ad552a736a7a27b1c1fcf8b1ff1f43"
- integrity sha512-l79boDFJ8S1c5hvQvG+rc+wHw6IuH7YldmRKsYtpbawsxURu/paVy57FZMomGK22/JckepaikOkY0MoAmdyOlQ==
- dependencies:
- "@babel/helper-hoist-variables" "^7.4.4"
- "@babel/traverse" "^7.4.4"
- "@babel/types" "^7.4.4"
-
-"@babel/helper-create-class-features-plugin@^7.5.5", "@babel/helper-create-class-features-plugin@^7.6.0":
- version "7.6.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.6.0.tgz#769711acca889be371e9bc2eb68641d55218021f"
- integrity sha512-O1QWBko4fzGju6VoVvrZg0RROCVifcLxiApnGP3OWfWzvxRZFCoBD81K5ur5e3bVY2Vf/5rIJm8cqPKn8HUJng==
- dependencies:
- "@babel/helper-function-name" "^7.1.0"
- "@babel/helper-member-expression-to-functions" "^7.5.5"
- "@babel/helper-optimise-call-expression" "^7.0.0"
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/helper-replace-supers" "^7.5.5"
- "@babel/helper-split-export-declaration" "^7.4.4"
-
-"@babel/helper-define-map@^7.5.5":
- version "7.5.5"
- resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.5.5.tgz#3dec32c2046f37e09b28c93eb0b103fd2a25d369"
- integrity sha512-fTfxx7i0B5NJqvUOBBGREnrqbTxRh7zinBANpZXAVDlsZxYdclDp467G1sQ8VZYMnAURY3RpBUAgOYT9GfzHBg==
- dependencies:
- "@babel/helper-function-name" "^7.1.0"
- "@babel/types" "^7.5.5"
+"@babel/helper-call-delegate@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.8.3.tgz#de82619898aa605d409c42be6ffb8d7204579692"
+ integrity sha512-6Q05px0Eb+N4/GTyKPPvnkig7Lylw+QzihMpws9iiZQv7ZImf84ZsZpQH7QoWN4n4tm81SnSzPgHw2qtO0Zf3A==
+ dependencies:
+ "@babel/helper-hoist-variables" "^7.8.3"
+ "@babel/traverse" "^7.8.3"
+ "@babel/types" "^7.8.3"
+
+"@babel/helper-compilation-targets@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.8.4.tgz#03d7ecd454b7ebe19a254f76617e61770aed2c88"
+ integrity sha512-3k3BsKMvPp5bjxgMdrFyq0UaEO48HciVrOVF0+lon8pp95cyJ2ujAh0TrBHNMnJGT2rr0iKOJPFFbSqjDyf/Pg==
+ dependencies:
+ "@babel/compat-data" "^7.8.4"
+ browserslist "^4.8.5"
+ invariant "^2.2.4"
+ levenary "^1.1.1"
+ semver "^5.5.0"
+
+"@babel/helper-create-class-features-plugin@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.8.3.tgz#5b94be88c255f140fd2c10dd151e7f98f4bff397"
+ integrity sha512-qmp4pD7zeTxsv0JNecSBsEmG1ei2MqwJq4YQcK3ZWm/0t07QstWfvuV/vm3Qt5xNMFETn2SZqpMx2MQzbtq+KA==
+ dependencies:
+ "@babel/helper-function-name" "^7.8.3"
+ "@babel/helper-member-expression-to-functions" "^7.8.3"
+ "@babel/helper-optimise-call-expression" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/helper-replace-supers" "^7.8.3"
+ "@babel/helper-split-export-declaration" "^7.8.3"
+
+"@babel/helper-create-regexp-features-plugin@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.8.3.tgz#c774268c95ec07ee92476a3862b75cc2839beb79"
+ integrity sha512-Gcsm1OHCUr9o9TcJln57xhWHtdXbA2pgQ58S0Lxlks0WMGNXuki4+GLfX0p+L2ZkINUGZvfkz8rzoqJQSthI+Q==
+ dependencies:
+ "@babel/helper-regex" "^7.8.3"
+ regexpu-core "^4.6.0"
+
+"@babel/helper-define-map@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.8.3.tgz#a0655cad5451c3760b726eba875f1cd8faa02c15"
+ integrity sha512-PoeBYtxoZGtct3md6xZOCWPcKuMuk3IHhgxsRRNtnNShebf4C8YonTSblsK4tvDbm+eJAw2HAPOfCr+Q/YRG/g==
+ dependencies:
+ "@babel/helper-function-name" "^7.8.3"
+ "@babel/types" "^7.8.3"
lodash "^4.17.13"
-"@babel/helper-explode-assignable-expression@^7.1.0":
- version "7.1.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.1.0.tgz#537fa13f6f1674df745b0c00ec8fe4e99681c8f6"
- integrity sha512-NRQpfHrJ1msCHtKjbzs9YcMmJZOg6mQMmGRB+hbamEdG5PNpaSm95275VD92DvJKuyl0s2sFiDmMZ+EnnvufqA==
+"@babel/helper-explode-assignable-expression@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.8.3.tgz#a728dc5b4e89e30fc2dfc7d04fa28a930653f982"
+ integrity sha512-N+8eW86/Kj147bO9G2uclsg5pwfs/fqqY5rwgIL7eTBklgXjcOJ3btzS5iM6AitJcftnY7pm2lGsrJVYLGjzIw==
dependencies:
- "@babel/traverse" "^7.1.0"
- "@babel/types" "^7.0.0"
+ "@babel/traverse" "^7.8.3"
+ "@babel/types" "^7.8.3"
-"@babel/helper-function-name@^7.1.0":
- version "7.1.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.1.0.tgz#a0ceb01685f73355d4360c1247f582bfafc8ff53"
- integrity sha512-A95XEoCpb3TO+KZzJ4S/5uW5fNe26DjBGqf1o9ucyLyCmi1dXq/B3c8iaWTfBk3VvetUxl16e8tIrd5teOCfGw==
+"@babel/helper-function-name@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz#eeeb665a01b1f11068e9fb86ad56a1cb1a824cca"
+ integrity sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA==
dependencies:
- "@babel/helper-get-function-arity" "^7.0.0"
- "@babel/template" "^7.1.0"
- "@babel/types" "^7.0.0"
+ "@babel/helper-get-function-arity" "^7.8.3"
+ "@babel/template" "^7.8.3"
+ "@babel/types" "^7.8.3"
-"@babel/helper-get-function-arity@^7.0.0":
- version "7.0.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0.tgz#83572d4320e2a4657263734113c42868b64e49c3"
- integrity sha512-r2DbJeg4svYvt3HOS74U4eWKsUAMRH01Z1ds1zx8KNTPtpTL5JAsdFv8BNyOpVqdFhHkkRDIg5B4AsxmkjAlmQ==
+"@babel/helper-get-function-arity@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz#b894b947bd004381ce63ea1db9f08547e920abd5"
+ integrity sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==
dependencies:
- "@babel/types" "^7.0.0"
+ "@babel/types" "^7.8.3"
-"@babel/helper-hoist-variables@^7.4.4":
- version "7.4.4"
- resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.4.4.tgz#0298b5f25c8c09c53102d52ac4a98f773eb2850a"
- integrity sha512-VYk2/H/BnYbZDDg39hr3t2kKyifAm1W6zHRfhx8jGjIHpQEBv9dry7oQ2f3+J703TLu69nYdxsovl0XYfcnK4w==
+"@babel/helper-hoist-variables@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.8.3.tgz#1dbe9b6b55d78c9b4183fc8cdc6e30ceb83b7134"
+ integrity sha512-ky1JLOjcDUtSc+xkt0xhYff7Z6ILTAHKmZLHPxAhOP0Nd77O+3nCsd6uSVYur6nJnCI029CrNbYlc0LoPfAPQg==
dependencies:
- "@babel/types" "^7.4.4"
+ "@babel/types" "^7.8.3"
-"@babel/helper-member-expression-to-functions@^7.5.5":
- version "7.5.5"
- resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.5.5.tgz#1fb5b8ec4453a93c439ee9fe3aeea4a84b76b590"
- integrity sha512-5qZ3D1uMclSNqYcXqiHoA0meVdv+xUEex9em2fqMnrk/scphGlGgg66zjMrPJESPwrFJ6sbfFQYUSa0Mz7FabA==
+"@babel/helper-member-expression-to-functions@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.8.3.tgz#659b710498ea6c1d9907e0c73f206eee7dadc24c"
+ integrity sha512-fO4Egq88utkQFjbPrSHGmGLFqmrshs11d46WI+WZDESt7Wu7wN2G2Iu+NMMZJFDOVRHAMIkB5SNh30NtwCA7RA==
dependencies:
- "@babel/types" "^7.5.5"
+ "@babel/types" "^7.8.3"
-"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.0.0-beta.49":
+"@babel/helper-module-imports@^7.0.0-beta.49", "@babel/helper-module-imports@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.8.3.tgz#7fe39589b39c016331b6b8c3f441e8f0b1419498"
integrity sha512-R0Bx3jippsbAEtzkpZ/6FIiuzOURPcMjHp+Z6xPe6DtApDJx+w7UYyOLanZqO8+wKR9G10s/FmHXvxaMd9s6Kg==
dependencies:
"@babel/types" "^7.8.3"
-"@babel/helper-module-transforms@^7.1.0", "@babel/helper-module-transforms@^7.4.4":
- version "7.5.5"
- resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.5.5.tgz#f84ff8a09038dcbca1fd4355661a500937165b4a"
- integrity sha512-jBeCvETKuJqeiaCdyaheF40aXnnU1+wkSiUs/IQg3tB85up1LyL8x77ClY8qJpuRJUcXQo+ZtdNESmZl4j56Pw==
+"@babel/helper-module-transforms@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.8.3.tgz#d305e35d02bee720fbc2c3c3623aa0c316c01590"
+ integrity sha512-C7NG6B7vfBa/pwCOshpMbOYUmrYQDfCpVL/JCRu0ek8B5p8kue1+BCXpg2vOYs7w5ACB9GTOBYQ5U6NwrMg+3Q==
dependencies:
- "@babel/helper-module-imports" "^7.0.0"
- "@babel/helper-simple-access" "^7.1.0"
- "@babel/helper-split-export-declaration" "^7.4.4"
- "@babel/template" "^7.4.4"
- "@babel/types" "^7.5.5"
+ "@babel/helper-module-imports" "^7.8.3"
+ "@babel/helper-simple-access" "^7.8.3"
+ "@babel/helper-split-export-declaration" "^7.8.3"
+ "@babel/template" "^7.8.3"
+ "@babel/types" "^7.8.3"
lodash "^4.17.13"
-"@babel/helper-optimise-call-expression@^7.0.0":
- version "7.0.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.0.0.tgz#a2920c5702b073c15de51106200aa8cad20497d5"
- integrity sha512-u8nd9NQePYNQV8iPWu/pLLYBqZBa4ZaY1YWRFMuxrid94wKI1QNt67NEZ7GAe5Kc/0LLScbim05xZFWkAdrj9g==
+"@babel/helper-optimise-call-expression@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.8.3.tgz#7ed071813d09c75298ef4f208956006b6111ecb9"
+ integrity sha512-Kag20n86cbO2AvHca6EJsvqAd82gc6VMGule4HwebwMlwkpXuVqrNRj6CkCV2sKxgi9MyAUnZVnZ6lJ1/vKhHQ==
dependencies:
- "@babel/types" "^7.0.0"
+ "@babel/types" "^7.8.3"
-"@babel/helper-plugin-utils@^7.0.0":
- version "7.0.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250"
- integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA==
+"@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.8.3.tgz#9ea293be19babc0f52ff8ca88b34c3611b208670"
+ integrity sha512-j+fq49Xds2smCUNYmEHF9kGNkhbet6yVIBp4e6oeQpH1RUs/Ir06xUKzDjDkGcaaokPiTNs2JBWHjaE4csUkZQ==
-"@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4":
- version "7.5.5"
- resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351"
- integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw==
+"@babel/helper-regex@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.8.3.tgz#139772607d51b93f23effe72105b319d2a4c6965"
+ integrity sha512-BWt0QtYv/cg/NecOAZMdcn/waj/5P26DR4mVLXfFtDokSR6fyuG0Pj+e2FqtSME+MqED1khnSMulkmGl8qWiUQ==
dependencies:
lodash "^4.17.13"
-"@babel/helper-remap-async-to-generator@^7.1.0":
- version "7.1.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.1.0.tgz#361d80821b6f38da75bd3f0785ece20a88c5fe7f"
- integrity sha512-3fOK0L+Fdlg8S5al8u/hWE6vhufGSn0bN09xm2LXMy//REAF8kDCrYoOBKYmA8m5Nom+sV9LyLCwrFynA8/slg==
+"@babel/helper-remap-async-to-generator@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.8.3.tgz#273c600d8b9bf5006142c1e35887d555c12edd86"
+ integrity sha512-kgwDmw4fCg7AVgS4DukQR/roGp+jP+XluJE5hsRZwxCYGg+Rv9wSGErDWhlI90FODdYfd4xG4AQRiMDjjN0GzA==
dependencies:
- "@babel/helper-annotate-as-pure" "^7.0.0"
- "@babel/helper-wrap-function" "^7.1.0"
- "@babel/template" "^7.1.0"
- "@babel/traverse" "^7.1.0"
- "@babel/types" "^7.0.0"
+ "@babel/helper-annotate-as-pure" "^7.8.3"
+ "@babel/helper-wrap-function" "^7.8.3"
+ "@babel/template" "^7.8.3"
+ "@babel/traverse" "^7.8.3"
+ "@babel/types" "^7.8.3"
-"@babel/helper-replace-supers@^7.5.5":
- version "7.5.5"
- resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.5.5.tgz#f84ce43df031222d2bad068d2626cb5799c34bc2"
- integrity sha512-XvRFWrNnlsow2u7jXDuH4jDDctkxbS7gXssrP4q2nUD606ukXHRvydj346wmNg+zAgpFx4MWf4+usfC93bElJg==
+"@babel/helper-replace-supers@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.8.3.tgz#91192d25f6abbcd41da8a989d4492574fb1530bc"
+ integrity sha512-xOUssL6ho41U81etpLoT2RTdvdus4VfHamCuAm4AHxGr+0it5fnwoVdwUJ7GFEqCsQYzJUhcbsN9wB9apcYKFA==
dependencies:
- "@babel/helper-member-expression-to-functions" "^7.5.5"
- "@babel/helper-optimise-call-expression" "^7.0.0"
- "@babel/traverse" "^7.5.5"
- "@babel/types" "^7.5.5"
+ "@babel/helper-member-expression-to-functions" "^7.8.3"
+ "@babel/helper-optimise-call-expression" "^7.8.3"
+ "@babel/traverse" "^7.8.3"
+ "@babel/types" "^7.8.3"
-"@babel/helper-simple-access@^7.1.0":
- version "7.1.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.1.0.tgz#65eeb954c8c245beaa4e859da6188f39d71e585c"
- integrity sha512-Vk+78hNjRbsiu49zAPALxTb+JUQCz1aolpd8osOF16BGnLtseD21nbHgLPGUwrXEurZgiCOUmvs3ExTu4F5x6w==
+"@babel/helper-simple-access@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.8.3.tgz#7f8109928b4dab4654076986af575231deb639ae"
+ integrity sha512-VNGUDjx5cCWg4vvCTR8qQ7YJYZ+HBjxOgXEl7ounz+4Sn7+LMD3CFrCTEU6/qXKbA2nKg21CwhhBzO0RpRbdCw==
dependencies:
- "@babel/template" "^7.1.0"
- "@babel/types" "^7.0.0"
+ "@babel/template" "^7.8.3"
+ "@babel/types" "^7.8.3"
-"@babel/helper-split-export-declaration@^7.4.4":
- version "7.4.4"
- resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.4.4.tgz#ff94894a340be78f53f06af038b205c49d993677"
- integrity sha512-Ro/XkzLf3JFITkW6b+hNxzZ1n5OQ80NvIUdmHspih1XAhtN3vPTuUFT4eQnela+2MaZ5ulH+iyP513KJrxbN7Q==
+"@babel/helper-split-export-declaration@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz#31a9f30070f91368a7182cf05f831781065fc7a9"
+ integrity sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==
dependencies:
- "@babel/types" "^7.4.4"
+ "@babel/types" "^7.8.3"
-"@babel/helper-wrap-function@^7.1.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.2.0.tgz#c4e0012445769e2815b55296ead43a958549f6fa"
- integrity sha512-o9fP1BZLLSrYlxYEYyl2aS+Flun5gtjTIG8iln+XuEzQTs0PLagAGSXUcqruJwD5fM48jzIEggCKpIfWTcR7pQ==
+"@babel/helper-wrap-function@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.8.3.tgz#9dbdb2bb55ef14aaa01fe8c99b629bd5352d8610"
+ integrity sha512-LACJrbUET9cQDzb6kG7EeD7+7doC3JNvUgTEQOx2qaO1fKlzE/Bf05qs9w1oXQMmXlPO65lC3Tq9S6gZpTErEQ==
dependencies:
- "@babel/helper-function-name" "^7.1.0"
- "@babel/template" "^7.1.0"
- "@babel/traverse" "^7.1.0"
- "@babel/types" "^7.2.0"
+ "@babel/helper-function-name" "^7.8.3"
+ "@babel/template" "^7.8.3"
+ "@babel/traverse" "^7.8.3"
+ "@babel/types" "^7.8.3"
-"@babel/helpers@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.6.2.tgz#681ffe489ea4dcc55f23ce469e58e59c1c045153"
- integrity sha512-3/bAUL8zZxYs1cdX2ilEE0WobqbCmKWr/889lf2SS0PpDcpEIY8pb1CCyz0pEcX3pEb+MCbks1jIokz2xLtGTA==
+"@babel/helpers@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.8.4.tgz#754eb3ee727c165e0a240d6c207de7c455f36f73"
+ integrity sha512-VPbe7wcQ4chu4TDQjimHv/5tj73qz88o12EPkO2ValS2QiQS/1F2SsjyIGNnAD0vF/nZS6Cf9i+vW6HIlnaR8w==
dependencies:
- "@babel/template" "^7.6.0"
- "@babel/traverse" "^7.6.2"
- "@babel/types" "^7.6.0"
+ "@babel/template" "^7.8.3"
+ "@babel/traverse" "^7.8.4"
+ "@babel/types" "^7.8.3"
-"@babel/highlight@^7.0.0":
- version "7.5.0"
- resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540"
- integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ==
+"@babel/highlight@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.8.3.tgz#28f173d04223eaaa59bc1d439a3836e6d1265797"
+ integrity sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==
dependencies:
chalk "^2.0.0"
esutils "^2.0.2"
js-tokens "^4.0.0"
-"@babel/parser@^7.0.0", "@babel/parser@^7.1.0", "@babel/parser@^7.4.3", "@babel/parser@^7.6.0", "@babel/parser@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.6.2.tgz#205e9c95e16ba3b8b96090677a67c9d6075b70a1"
- integrity sha512-mdFqWrSPCmikBoaBYMuBulzTIKuXVPtEISFbRRVNwMWpCms/hmE2kRq0bblUHaNRKrjRlmVbx1sDHmjmRgD2Xg==
+"@babel/parser@^7.0.0", "@babel/parser@^7.1.0", "@babel/parser@^7.4.3", "@babel/parser@^7.8.3", "@babel/parser@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.8.4.tgz#d1dbe64691d60358a974295fa53da074dd2ce8e8"
+ integrity sha512-0fKu/QqildpXmPVaRBoXOlyBb3MC+J0A66x97qEfLOMkn3u6nfY5esWogQwi/K0BjASYy4DbnsEWnpNL6qT5Mw==
+
+"@babel/plugin-proposal-async-generator-functions@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.8.3.tgz#bad329c670b382589721b27540c7d288601c6e6f"
+ integrity sha512-NZ9zLv848JsV3hs8ryEh7Uaz/0KsmPLqv0+PdkDJL1cJy0K4kOCFa8zc1E3mp+RHPQcpdfb/6GovEsW4VDrOMw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/helper-remap-async-to-generator" "^7.8.3"
+ "@babel/plugin-syntax-async-generators" "^7.8.0"
-"@babel/plugin-proposal-async-generator-functions@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.2.0.tgz#b289b306669dce4ad20b0252889a15768c9d417e"
- integrity sha512-+Dfo/SCQqrwx48ptLVGLdE39YtWRuKc/Y9I5Fy0P1DDBB9lsAHpjcEJQt+4IifuSOSTLBKJObJqMvaO1pIE8LQ==
+"@babel/plugin-proposal-class-properties@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.8.3.tgz#5e06654af5cd04b608915aada9b2a6788004464e"
+ integrity sha512-EqFhbo7IosdgPgZggHaNObkmO1kNUe3slaKu54d5OWvy+p9QIKOzK1GAEpAIsZtWVtPXUHSMcT4smvDrCfY4AA==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/helper-remap-async-to-generator" "^7.1.0"
- "@babel/plugin-syntax-async-generators" "^7.2.0"
+ "@babel/helper-create-class-features-plugin" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-proposal-class-properties@^7.5.5":
- version "7.5.5"
- resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.5.5.tgz#a974cfae1e37c3110e71f3c6a2e48b8e71958cd4"
- integrity sha512-AF79FsnWFxjlaosgdi421vmYG6/jg79bVD0dpD44QdgobzHKuLZ6S3vl8la9qIeSwGi8i1fS0O1mfuDAAdo1/A==
+"@babel/plugin-proposal-dynamic-import@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.8.3.tgz#38c4fe555744826e97e2ae930b0fb4cc07e66054"
+ integrity sha512-NyaBbyLFXFLT9FP+zk0kYlUlA8XtCUbehs67F0nnEg7KICgMc2mNkIeu9TYhKzyXMkrapZFwAhXLdnt4IYHy1w==
dependencies:
- "@babel/helper-create-class-features-plugin" "^7.5.5"
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.0"
-"@babel/plugin-proposal-dynamic-import@^7.5.0":
- version "7.5.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.5.0.tgz#e532202db4838723691b10a67b8ce509e397c506"
- integrity sha512-x/iMjggsKTFHYC6g11PL7Qy58IK8H5zqfm9e6hu4z1iH2IRyAp9u9dL80zA6R76yFovETFLKz2VJIC2iIPBuFw==
+"@babel/plugin-proposal-json-strings@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.8.3.tgz#da5216b238a98b58a1e05d6852104b10f9a70d6b"
+ integrity sha512-KGhQNZ3TVCQG/MjRbAUwuH+14y9q0tpxs1nWWs3pbSleRdDro9SAMMDyye8HhY1gqZ7/NqIc8SKhya0wRDgP1Q==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/plugin-syntax-dynamic-import" "^7.2.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/plugin-syntax-json-strings" "^7.8.0"
-"@babel/plugin-proposal-json-strings@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.2.0.tgz#568ecc446c6148ae6b267f02551130891e29f317"
- integrity sha512-MAFV1CA/YVmYwZG0fBQyXhmj0BHCB5egZHCKWIFVv/XCxAeVGIHfos3SwDck4LvCllENIAg7xMKOG5kH0dzyUg==
+"@babel/plugin-proposal-nullish-coalescing-operator@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.8.3.tgz#e4572253fdeed65cddeecfdab3f928afeb2fd5d2"
+ integrity sha512-TS9MlfzXpXKt6YYomudb/KU7nQI6/xnapG6in1uZxoxDghuSMZsPb6D2fyUwNYSAp4l1iR7QtFOjkqcRYcUsfw==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/plugin-syntax-json-strings" "^7.2.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0"
-"@babel/plugin-proposal-object-rest-spread@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.6.2.tgz#8ffccc8f3a6545e9f78988b6bf4fe881b88e8096"
- integrity sha512-LDBXlmADCsMZV1Y9OQwMc0MyGZ8Ta/zlD9N67BfQT8uYwkRswiu2hU6nJKrjrt/58aH/vqfQlR/9yId/7A2gWw==
+"@babel/plugin-proposal-object-rest-spread@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.8.3.tgz#eb5ae366118ddca67bed583b53d7554cad9951bb"
+ integrity sha512-8qvuPwU/xxUCt78HocNlv0mXXo0wdh9VT1R04WU8HGOfaOob26pF+9P5/lYjN/q7DHOX1bvX60hnhOvuQUJdbA==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/plugin-syntax-object-rest-spread" "^7.2.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.0"
-"@babel/plugin-proposal-optional-catch-binding@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.2.0.tgz#135d81edb68a081e55e56ec48541ece8065c38f5"
- integrity sha512-mgYj3jCcxug6KUcX4OBoOJz3CMrwRfQELPQ5560F70YQUBZB7uac9fqaWamKR1iWUzGiK2t0ygzjTScZnVz75g==
+"@babel/plugin-proposal-optional-catch-binding@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.8.3.tgz#9dee96ab1650eed88646ae9734ca167ac4a9c5c9"
+ integrity sha512-0gkX7J7E+AtAw9fcwlVQj8peP61qhdg/89D5swOkjYbkboA2CVckn3kiyum1DE0wskGb7KJJxBdyEBApDLLVdw==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/plugin-syntax-optional-catch-binding" "^7.2.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.0"
-"@babel/plugin-proposal-optional-chaining@^7.7.5":
- version "7.7.5"
- resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.7.5.tgz#f0835f044cef85b31071a924010a2a390add11d4"
- integrity sha512-sOwFqT8JSchtJeDD+CjmWCaiFoLxY4Ps7NjvwHC/U7l4e9i5pTRNt8nDMIFSOUL+ncFbYSwruHM8WknYItWdXw==
+"@babel/plugin-proposal-optional-chaining@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.8.3.tgz#ae10b3214cb25f7adb1f3bc87ba42ca10b7e2543"
+ integrity sha512-QIoIR9abkVn+seDE3OjA08jWcs3eZ9+wJCKSRgo3WdEU2csFYgdScb+8qHB3+WXsGJD55u+5hWCISI7ejXS+kg==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/plugin-syntax-optional-chaining" "^7.7.4"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.0"
-"@babel/plugin-proposal-private-methods@^7.6.0":
- version "7.6.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.6.0.tgz#19ddc493c7b5d47afdd4291e740c609a83c9fae4"
- integrity sha512-G/qjXbdYXxQfDPPu2h7L48SkgO2H2MXoFkCPg+9nFFyMXRCN+ASm/iOT8o/2i3+YhnjFiz5tie+F5Xswvfm/LQ==
+"@babel/plugin-proposal-private-methods@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.8.3.tgz#01248c6c8dc292116b3b4ebd746150f4f0728bab"
+ integrity sha512-ysLAper960yy1TVXa2lMYdCQIGqtUXo8sVb+zYE7UTiZSLs6/wbZ0PrrXEKESJcK3SgFWrF8WpsaDzdslhuoZA==
dependencies:
- "@babel/helper-create-class-features-plugin" "^7.6.0"
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-create-class-features-plugin" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-proposal-unicode-property-regex@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.6.2.tgz#05413762894f41bfe42b9a5e80919bd575dcc802"
- integrity sha512-NxHETdmpeSCtiatMRYWVJo7266rrvAC3DTeG5exQBIH/fMIUK7ejDNznBbn3HQl/o9peymRRg7Yqkx6PdUXmMw==
+"@babel/plugin-proposal-unicode-property-regex@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.8.3.tgz#b646c3adea5f98800c9ab45105ac34d06cd4a47f"
+ integrity sha512-1/1/rEZv2XGweRwwSkLpY+s60za9OZ1hJs4YDqFHCw0kYWYwL5IFljVY1MYBL+weT1l9pokDO2uhSTLVxzoHkQ==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/helper-regex" "^7.4.4"
- regexpu-core "^4.6.0"
+ "@babel/helper-create-regexp-features-plugin" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-syntax-async-generators@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.2.0.tgz#69e1f0db34c6f5a0cf7e2b3323bf159a76c8cb7f"
- integrity sha512-1ZrIRBv2t0GSlcwVoQ6VgSLpLgiN/FVQUzt9znxo7v2Ov4jJrs8RY8tv0wvDmFN3qIdMKWrmMMW6yZ0G19MfGg==
+"@babel/plugin-syntax-async-generators@^7.8.0":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d"
+ integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.0"
-"@babel/plugin-syntax-dynamic-import@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.2.0.tgz#69c159ffaf4998122161ad8ebc5e6d1f55df8612"
- integrity sha512-mVxuJ0YroI/h/tbFTPGZR8cv6ai+STMKNBq0f8hFxsxWjl94qqhsb+wXbpNMDPU3cfR1TIsVFzU3nXyZMqyK4w==
+"@babel/plugin-syntax-dynamic-import@^7.8.0":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3"
+ integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.0"
-"@babel/plugin-syntax-import-meta@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.2.0.tgz#2333ef4b875553a3bcd1e93f8ebc09f5b9213a40"
- integrity sha512-Hq6kFSZD7+PHkmBN8bCpHR6J8QEoCuEV/B38AIQscYjgMZkGlXB7cHNFzP5jR4RCh5545yP1ujHdmO7hAgKtBA==
+"@babel/plugin-syntax-import-meta@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.8.3.tgz#230afff79d3ccc215b5944b438e4e266daf3d84d"
+ integrity sha512-vYiGd4wQ9gx0Lngb7+bPCwQXGK/PR6FeTIJ+TIOlq+OfOKG/kCAOO2+IBac3oMM9qV7/fU76hfcqxUaLKZf1hQ==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-syntax-json-strings@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.2.0.tgz#72bd13f6ffe1d25938129d2a186b11fd62951470"
- integrity sha512-5UGYnMSLRE1dqqZwug+1LISpA403HzlSfsg6P9VXU6TBjcSHeNlw4DxDx7LgpF+iKZoOG/+uzqoRHTdcUpiZNg==
+"@babel/plugin-syntax-json-strings@^7.8.0":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a"
+ integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.0"
-"@babel/plugin-syntax-object-rest-spread@^7.0.0", "@babel/plugin-syntax-object-rest-spread@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.2.0.tgz#3b7a3e733510c57e820b9142a6579ac8b0dfad2e"
- integrity sha512-t0JKGgqk2We+9may3t0xDdmneaXmyxq0xieYcKHxIsrJO64n1OiMWNUtc5gQK1PA0NpdCRrtZp4z+IUaKugrSA==
+"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.0":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9"
+ integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.0"
-"@babel/plugin-syntax-optional-catch-binding@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.2.0.tgz#a94013d6eda8908dfe6a477e7f9eda85656ecf5c"
- integrity sha512-bDe4xKNhb0LI7IvZHiA13kff0KEfaGX/Hv4lMA9+7TEc63hMNvfKo6ZFpXhKuEp+II/q35Gc4NoMeDZyaUbj9w==
+"@babel/plugin-syntax-object-rest-spread@^7.0.0", "@babel/plugin-syntax-object-rest-spread@^7.8.0":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871"
+ integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.0"
-"@babel/plugin-syntax-optional-chaining@^7.7.4":
- version "7.7.4"
- resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.7.4.tgz#c91fdde6de85d2eb8906daea7b21944c3610c901"
- integrity sha512-2MqYD5WjZSbJdUagnJvIdSfkb/ucOC9/1fRJxm7GAxY6YQLWlUvkfxoNbUPcPLHJyetKUDQ4+yyuUyAoc0HriA==
+"@babel/plugin-syntax-optional-catch-binding@^7.8.0":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1"
+ integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.0"
-"@babel/plugin-transform-arrow-functions@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.2.0.tgz#9aeafbe4d6ffc6563bf8f8372091628f00779550"
- integrity sha512-ER77Cax1+8/8jCB9fo4Ud161OZzWN5qawi4GusDuRLcDbDG+bIGYY20zb2dfAFdTRGzrfq2xZPvF0R64EHnimg==
+"@babel/plugin-syntax-optional-chaining@^7.8.0":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a"
+ integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.0"
-"@babel/plugin-transform-async-to-generator@^7.5.0":
- version "7.5.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.5.0.tgz#89a3848a0166623b5bc481164b5936ab947e887e"
- integrity sha512-mqvkzwIGkq0bEF1zLRRiTdjfomZJDV33AH3oQzHVGkI2VzEmXLpKKOBvEVaFZBJdN0XTyH38s9j/Kiqr68dggg==
+"@babel/plugin-syntax-top-level-await@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.8.3.tgz#3acdece695e6b13aaf57fc291d1a800950c71391"
+ integrity sha512-kwj1j9lL/6Wd0hROD3b/OZZ7MSrZLqqn9RAZ5+cYYsflQ9HZBIKCUkr3+uL1MEJ1NePiUbf98jjiMQSv0NMR9g==
dependencies:
- "@babel/helper-module-imports" "^7.0.0"
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/helper-remap-async-to-generator" "^7.1.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-block-scoped-functions@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.2.0.tgz#5d3cc11e8d5ddd752aa64c9148d0db6cb79fd190"
- integrity sha512-ntQPR6q1/NKuphly49+QiQiTN0O63uOwjdD6dhIjSWBI5xlrbUFh720TIpzBhpnrLfv2tNH/BXvLIab1+BAI0w==
+"@babel/plugin-transform-arrow-functions@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.8.3.tgz#82776c2ed0cd9e1a49956daeb896024c9473b8b6"
+ integrity sha512-0MRF+KC8EqH4dbuITCWwPSzsyO3HIWWlm30v8BbbpOrS1B++isGxPnnuq/IZvOX5J2D/p7DQalQm+/2PnlKGxg==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-block-scoping@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.6.2.tgz#96c33ab97a9ae500cc6f5b19e04a7e6553360a79"
- integrity sha512-zZT8ivau9LOQQaOGC7bQLQOT4XPkPXgN2ERfUgk1X8ql+mVkLc4E8eKk+FO3o0154kxzqenWCorfmEXpEZcrSQ==
+"@babel/plugin-transform-async-to-generator@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.8.3.tgz#4308fad0d9409d71eafb9b1a6ee35f9d64b64086"
+ integrity sha512-imt9tFLD9ogt56Dd5CI/6XgpukMwd/fLGSrix2httihVe7LOGVPhyhMh1BU5kDM7iHD08i8uUtmV2sWaBFlHVQ==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-module-imports" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/helper-remap-async-to-generator" "^7.8.3"
+
+"@babel/plugin-transform-block-scoped-functions@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.8.3.tgz#437eec5b799b5852072084b3ae5ef66e8349e8a3"
+ integrity sha512-vo4F2OewqjbB1+yaJ7k2EJFHlTP3jR634Z9Cj9itpqNjuLXvhlVxgnjsHsdRgASR8xYDrx6onw4vW5H6We0Jmg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.3"
+
+"@babel/plugin-transform-block-scoping@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.8.3.tgz#97d35dab66857a437c166358b91d09050c868f3a"
+ integrity sha512-pGnYfm7RNRgYRi7bids5bHluENHqJhrV4bCZRwc5GamaWIIs07N4rZECcmJL6ZClwjDz1GbdMZFtPs27hTB06w==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.3"
lodash "^4.17.13"
-"@babel/plugin-transform-classes@^7.5.5":
- version "7.5.5"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.5.5.tgz#d094299d9bd680a14a2a0edae38305ad60fb4de9"
- integrity sha512-U2htCNK/6e9K7jGyJ++1p5XRU+LJjrwtoiVn9SzRlDT2KubcZ11OOwy3s24TjHxPgxNwonCYP7U2K51uVYCMDg==
- dependencies:
- "@babel/helper-annotate-as-pure" "^7.0.0"
- "@babel/helper-define-map" "^7.5.5"
- "@babel/helper-function-name" "^7.1.0"
- "@babel/helper-optimise-call-expression" "^7.0.0"
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/helper-replace-supers" "^7.5.5"
- "@babel/helper-split-export-declaration" "^7.4.4"
+"@babel/plugin-transform-classes@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.8.3.tgz#46fd7a9d2bb9ea89ce88720477979fe0d71b21b8"
+ integrity sha512-SjT0cwFJ+7Rbr1vQsvphAHwUHvSUPmMjMU/0P59G8U2HLFqSa082JO7zkbDNWs9kH/IUqpHI6xWNesGf8haF1w==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.8.3"
+ "@babel/helper-define-map" "^7.8.3"
+ "@babel/helper-function-name" "^7.8.3"
+ "@babel/helper-optimise-call-expression" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/helper-replace-supers" "^7.8.3"
+ "@babel/helper-split-export-declaration" "^7.8.3"
globals "^11.1.0"
-"@babel/plugin-transform-computed-properties@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.2.0.tgz#83a7df6a658865b1c8f641d510c6f3af220216da"
- integrity sha512-kP/drqTxY6Xt3NNpKiMomfgkNn4o7+vKxK2DDKcBG9sHj51vHqMBGy8wbDS/J4lMxnqs153/T3+DmCEAkC5cpA==
+"@babel/plugin-transform-computed-properties@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.8.3.tgz#96d0d28b7f7ce4eb5b120bb2e0e943343c86f81b"
+ integrity sha512-O5hiIpSyOGdrQZRQ2ccwtTVkgUDBBiCuK//4RJ6UfePllUTCENOzKxfh6ulckXKc0DixTFLCfb2HVkNA7aDpzA==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-destructuring@^7.6.0":
- version "7.6.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.6.0.tgz#44bbe08b57f4480094d57d9ffbcd96d309075ba6"
- integrity sha512-2bGIS5P1v4+sWTCnKNDZDxbGvEqi0ijeqM/YqHtVGrvG2y0ySgnEEhXErvE9dA0bnIzY9bIzdFK0jFA46ASIIQ==
+"@babel/plugin-transform-destructuring@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.8.3.tgz#20ddfbd9e4676906b1056ee60af88590cc7aaa0b"
+ integrity sha512-H4X646nCkiEcHZUZaRkhE2XVsoz0J/1x3VVujnn96pSoGCtKPA99ZZA+va+gK+92Zycd6OBKCD8tDb/731bhgQ==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-dotall-regex@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.6.2.tgz#44abb948b88f0199a627024e1508acaf8dc9b2f9"
- integrity sha512-KGKT9aqKV+9YMZSkowzYoYEiHqgaDhGmPNZlZxX6UeHC4z30nC1J9IrZuGqbYFB1jaIGdv91ujpze0exiVK8bA==
+"@babel/plugin-transform-dotall-regex@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.8.3.tgz#c3c6ec5ee6125c6993c5cbca20dc8621a9ea7a6e"
+ integrity sha512-kLs1j9Nn4MQoBYdRXH6AeaXMbEJFaFu/v1nQkvib6QzTj8MZI5OQzqmD83/2jEM1z0DLilra5aWO5YpyC0ALIw==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/helper-regex" "^7.4.4"
- regexpu-core "^4.6.0"
+ "@babel/helper-create-regexp-features-plugin" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-duplicate-keys@^7.5.0":
- version "7.5.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.5.0.tgz#c5dbf5106bf84cdf691222c0974c12b1df931853"
- integrity sha512-igcziksHizyQPlX9gfSjHkE2wmoCH3evvD2qR5w29/Dk0SMKE/eOI7f1HhBdNhR/zxJDqrgpoDTq5YSLH/XMsQ==
+"@babel/plugin-transform-duplicate-keys@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.8.3.tgz#8d12df309aa537f272899c565ea1768e286e21f1"
+ integrity sha512-s8dHiBUbcbSgipS4SMFuWGqCvyge5V2ZeAWzR6INTVC3Ltjig/Vw1G2Gztv0vU/hRG9X8IvKvYdoksnUfgXOEQ==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-exponentiation-operator@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.2.0.tgz#a63868289e5b4007f7054d46491af51435766008"
- integrity sha512-umh4hR6N7mu4Elq9GG8TOu9M0bakvlsREEC+ialrQN6ABS4oDQ69qJv1VtR3uxlKMCQMCvzk7vr17RHKcjx68A==
+"@babel/plugin-transform-exponentiation-operator@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.8.3.tgz#581a6d7f56970e06bf51560cd64f5e947b70d7b7"
+ integrity sha512-zwIpuIymb3ACcInbksHaNcR12S++0MDLKkiqXHl3AzpgdKlFNhog+z/K0+TGW+b0w5pgTq4H6IwV/WhxbGYSjQ==
dependencies:
- "@babel/helper-builder-binary-assignment-operator-visitor" "^7.1.0"
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-builder-binary-assignment-operator-visitor" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-for-of@^7.4.4":
- version "7.4.4"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.4.4.tgz#0267fc735e24c808ba173866c6c4d1440fc3c556"
- integrity sha512-9T/5Dlr14Z9TIEXLXkt8T1DU7F24cbhwhMNUziN3hB1AXoZcdzPcTiKGRn/6iOymDqtTKWnr/BtRKN9JwbKtdQ==
+"@babel/plugin-transform-for-of@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.8.4.tgz#6fe8eae5d6875086ee185dd0b098a8513783b47d"
+ integrity sha512-iAXNlOWvcYUYoV8YIxwS7TxGRJcxyl8eQCfT+A5j8sKUzRFvJdcyjp97jL2IghWSRDaL2PU2O2tX8Cu9dTBq5A==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-function-name@^7.4.4":
- version "7.4.4"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.4.4.tgz#e1436116abb0610c2259094848754ac5230922ad"
- integrity sha512-iU9pv7U+2jC9ANQkKeNF6DrPy4GBa4NWQtl6dHB4Pb3izX2JOEvDTFarlNsBj/63ZEzNNIAMs3Qw4fNCcSOXJA==
+"@babel/plugin-transform-function-name@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.8.3.tgz#279373cb27322aaad67c2683e776dfc47196ed8b"
+ integrity sha512-rO/OnDS78Eifbjn5Py9v8y0aR+aSYhDhqAwVfsTl0ERuMZyr05L1aFSCJnbv2mmsLkit/4ReeQ9N2BgLnOcPCQ==
dependencies:
- "@babel/helper-function-name" "^7.1.0"
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-function-name" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-literals@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.2.0.tgz#690353e81f9267dad4fd8cfd77eafa86aba53ea1"
- integrity sha512-2ThDhm4lI4oV7fVQ6pNNK+sx+c/GM5/SaML0w/r4ZB7sAneD/piDJtwdKlNckXeyGK7wlwg2E2w33C/Hh+VFCg==
+"@babel/plugin-transform-literals@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.8.3.tgz#aef239823d91994ec7b68e55193525d76dbd5dc1"
+ integrity sha512-3Tqf8JJ/qB7TeldGl+TT55+uQei9JfYaregDcEAyBZ7akutriFrt6C/wLYIer6OYhleVQvH/ntEhjE/xMmy10A==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-member-expression-literals@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.2.0.tgz#fa10aa5c58a2cb6afcf2c9ffa8cb4d8b3d489a2d"
- integrity sha512-HiU3zKkSU6scTidmnFJ0bMX8hz5ixC93b4MHMiYebmk2lUVNGOboPsqQvx5LzooihijUoLR/v7Nc1rbBtnc7FA==
+"@babel/plugin-transform-member-expression-literals@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.8.3.tgz#963fed4b620ac7cbf6029c755424029fa3a40410"
+ integrity sha512-3Wk2EXhnw+rP+IDkK6BdtPKsUE5IeZ6QOGrPYvw52NwBStw9V1ZVzxgK6fSKSxqUvH9eQPR3tm3cOq79HlsKYA==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-modules-amd@^7.5.0":
- version "7.5.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.5.0.tgz#ef00435d46da0a5961aa728a1d2ecff063e4fb91"
- integrity sha512-n20UsQMKnWrltocZZm24cRURxQnWIvsABPJlw/fvoy9c6AgHZzoelAIzajDHAQrDpuKFFPPcFGd7ChsYuIUMpg==
+"@babel/plugin-transform-modules-amd@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.8.3.tgz#65606d44616b50225e76f5578f33c568a0b876a5"
+ integrity sha512-MadJiU3rLKclzT5kBH4yxdry96odTUwuqrZM+GllFI/VhxfPz+k9MshJM+MwhfkCdxxclSbSBbUGciBngR+kEQ==
dependencies:
- "@babel/helper-module-transforms" "^7.1.0"
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-module-transforms" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
babel-plugin-dynamic-import-node "^2.3.0"
-"@babel/plugin-transform-modules-commonjs@^7.2.0", "@babel/plugin-transform-modules-commonjs@^7.5.0", "@babel/plugin-transform-modules-commonjs@^7.6.0":
- version "7.6.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.6.0.tgz#39dfe957de4420445f1fcf88b68a2e4aa4515486"
- integrity sha512-Ma93Ix95PNSEngqomy5LSBMAQvYKVe3dy+JlVJSHEXZR5ASL9lQBedMiCyVtmTLraIDVRE3ZjTZvmXXD2Ozw3g==
+"@babel/plugin-transform-modules-commonjs@^7.2.0", "@babel/plugin-transform-modules-commonjs@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.8.3.tgz#df251706ec331bd058a34bdd72613915f82928a5"
+ integrity sha512-JpdMEfA15HZ/1gNuB9XEDlZM1h/gF/YOH7zaZzQu2xCFRfwc01NXBMHHSTT6hRjlXJJs5x/bfODM3LiCk94Sxg==
dependencies:
- "@babel/helper-module-transforms" "^7.4.4"
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/helper-simple-access" "^7.1.0"
+ "@babel/helper-module-transforms" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/helper-simple-access" "^7.8.3"
babel-plugin-dynamic-import-node "^2.3.0"
-"@babel/plugin-transform-modules-systemjs@^7.5.0":
- version "7.5.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.5.0.tgz#e75266a13ef94202db2a0620977756f51d52d249"
- integrity sha512-Q2m56tyoQWmuNGxEtUyeEkm6qJYFqs4c+XyXH5RAuYxObRNz9Zgj/1g2GMnjYp2EUyEy7YTrxliGCXzecl/vJg==
+"@babel/plugin-transform-modules-systemjs@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.8.3.tgz#d8bbf222c1dbe3661f440f2f00c16e9bb7d0d420"
+ integrity sha512-8cESMCJjmArMYqa9AO5YuMEkE4ds28tMpZcGZB/jl3n0ZzlsxOAi3mC+SKypTfT8gjMupCnd3YiXCkMjj2jfOg==
dependencies:
- "@babel/helper-hoist-variables" "^7.4.4"
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-hoist-variables" "^7.8.3"
+ "@babel/helper-module-transforms" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
babel-plugin-dynamic-import-node "^2.3.0"
-"@babel/plugin-transform-modules-umd@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.2.0.tgz#7678ce75169f0877b8eb2235538c074268dd01ae"
- integrity sha512-BV3bw6MyUH1iIsGhXlOK6sXhmSarZjtJ/vMiD9dNmpY8QXFFQTj+6v92pcfy1iqa8DeAfJFwoxcrS/TUZda6sw==
+"@babel/plugin-transform-modules-umd@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.8.3.tgz#592d578ce06c52f5b98b02f913d653ffe972661a"
+ integrity sha512-evhTyWhbwbI3/U6dZAnx/ePoV7H6OUG+OjiJFHmhr9FPn0VShjwC2kdxqIuQ/+1P50TMrneGzMeyMTFOjKSnAw==
dependencies:
- "@babel/helper-module-transforms" "^7.1.0"
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-module-transforms" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-named-capturing-groups-regex@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.6.2.tgz#c1ca0bb84b94f385ca302c3932e870b0fb0e522b"
- integrity sha512-xBdB+XOs+lgbZc2/4F5BVDVcDNS4tcSKQc96KmlqLEAwz6tpYPEvPdmDfvVG0Ssn8lAhronaRs6Z6KSexIpK5g==
+"@babel/plugin-transform-named-capturing-groups-regex@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.8.3.tgz#a2a72bffa202ac0e2d0506afd0939c5ecbc48c6c"
+ integrity sha512-f+tF/8UVPU86TrCb06JoPWIdDpTNSGGcAtaD9mLP0aYGA0OS0j7j7DHJR0GTFrUZPUU6loZhbsVZgTh0N+Qdnw==
dependencies:
- regexpu-core "^4.6.0"
+ "@babel/helper-create-regexp-features-plugin" "^7.8.3"
-"@babel/plugin-transform-new-target@^7.4.4":
- version "7.4.4"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.4.4.tgz#18d120438b0cc9ee95a47f2c72bc9768fbed60a5"
- integrity sha512-r1z3T2DNGQwwe2vPGZMBNjioT2scgWzK9BCnDEh+46z8EEwXBq24uRzd65I7pjtugzPSj921aM15RpESgzsSuA==
+"@babel/plugin-transform-new-target@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.8.3.tgz#60cc2ae66d85c95ab540eb34babb6434d4c70c43"
+ integrity sha512-QuSGysibQpyxexRyui2vca+Cmbljo8bcRckgzYV4kRIsHpVeyeC3JDO63pY+xFZ6bWOBn7pfKZTqV4o/ix9sFw==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-object-super@^7.5.5":
- version "7.5.5"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.5.5.tgz#c70021df834073c65eb613b8679cc4a381d1a9f9"
- integrity sha512-un1zJQAhSosGFBduPgN/YFNvWVpRuHKU7IHBglLoLZsGmruJPOo6pbInneflUdmq7YvSVqhpPs5zdBvLnteltQ==
+"@babel/plugin-transform-object-super@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.8.3.tgz#ebb6a1e7a86ffa96858bd6ac0102d65944261725"
+ integrity sha512-57FXk+gItG/GejofIyLIgBKTas4+pEU47IXKDBWFTxdPd7F80H8zybyAY7UoblVfBhBGs2EKM+bJUu2+iUYPDQ==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/helper-replace-supers" "^7.5.5"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/helper-replace-supers" "^7.8.3"
-"@babel/plugin-transform-parameters@^7.4.4":
- version "7.4.4"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.4.4.tgz#7556cf03f318bd2719fe4c922d2d808be5571e16"
- integrity sha512-oMh5DUO1V63nZcu/ZVLQFqiihBGo4OpxJxR1otF50GMeCLiRx5nUdtokd+u9SuVJrvvuIh9OosRFPP4pIPnwmw==
+"@babel/plugin-transform-parameters@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.8.4.tgz#1d5155de0b65db0ccf9971165745d3bb990d77d3"
+ integrity sha512-IsS3oTxeTsZlE5KqzTbcC2sV0P9pXdec53SU+Yxv7o/6dvGM5AkTotQKhoSffhNgZ/dftsSiOoxy7evCYJXzVA==
dependencies:
- "@babel/helper-call-delegate" "^7.4.4"
- "@babel/helper-get-function-arity" "^7.0.0"
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-call-delegate" "^7.8.3"
+ "@babel/helper-get-function-arity" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-property-literals@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.2.0.tgz#03e33f653f5b25c4eb572c98b9485055b389e905"
- integrity sha512-9q7Dbk4RhgcLp8ebduOpCbtjh7C0itoLYHXd9ueASKAG/is5PQtMR5VJGka9NKqGhYEGn5ITahd4h9QeBMylWQ==
+"@babel/plugin-transform-property-literals@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.8.3.tgz#33194300d8539c1ed28c62ad5087ba3807b98263"
+ integrity sha512-uGiiXAZMqEoQhRWMK17VospMZh5sXWg+dlh2soffpkAl96KAm+WZuJfa6lcELotSRmooLqg0MWdH6UUq85nmmg==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-regenerator@^7.4.5":
- version "7.4.5"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.4.5.tgz#629dc82512c55cee01341fb27bdfcb210354680f"
- integrity sha512-gBKRh5qAaCWntnd09S8QC7r3auLCqq5DI6O0DlfoyDjslSBVqBibrMdsqO+Uhmx3+BlOmE/Kw1HFxmGbv0N9dA==
+"@babel/plugin-transform-regenerator@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.8.3.tgz#b31031e8059c07495bf23614c97f3d9698bc6ec8"
+ integrity sha512-qt/kcur/FxrQrzFR432FGZznkVAjiyFtCOANjkAKwCbt465L6ZCiUQh2oMYGU3Wo8LRFJxNDFwWn106S5wVUNA==
dependencies:
regenerator-transform "^0.14.0"
-"@babel/plugin-transform-reserved-words@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.2.0.tgz#4792af87c998a49367597d07fedf02636d2e1634"
- integrity sha512-fz43fqW8E1tAB3DKF19/vxbpib1fuyCwSPE418ge5ZxILnBhWyhtPgz8eh1RCGGJlwvksHkyxMxh0eenFi+kFw==
- dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
-
-"@babel/plugin-transform-shorthand-properties@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.2.0.tgz#6333aee2f8d6ee7e28615457298934a3b46198f0"
- integrity sha512-QP4eUM83ha9zmYtpbnyjTLAGKQritA5XW/iG9cjtuOI8s1RuL/3V6a3DeSHfKutJQ+ayUfeZJPcnCYEQzaPQqg==
+"@babel/plugin-transform-reserved-words@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.8.3.tgz#9a0635ac4e665d29b162837dd3cc50745dfdf1f5"
+ integrity sha512-mwMxcycN3omKFDjDQUl+8zyMsBfjRFr0Zn/64I41pmjv4NJuqcYlEtezwYtw9TFd9WR1vN5kiM+O0gMZzO6L0A==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-spread@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.6.2.tgz#fc77cf798b24b10c46e1b51b1b88c2bf661bb8dd"
- integrity sha512-DpSvPFryKdK1x+EDJYCy28nmAaIMdxmhot62jAXF/o99iA33Zj2Lmcp3vDmz+MUh0LNYVPvfj5iC3feb3/+PFg==
+"@babel/plugin-transform-shorthand-properties@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.8.3.tgz#28545216e023a832d4d3a1185ed492bcfeac08c8"
+ integrity sha512-I9DI6Odg0JJwxCHzbzW08ggMdCezoWcuQRz3ptdudgwaHxTjxw5HgdFJmZIkIMlRymL6YiZcped4TTCB0JcC8w==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-sticky-regex@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.2.0.tgz#a1e454b5995560a9c1e0d537dfc15061fd2687e1"
- integrity sha512-KKYCoGaRAf+ckH8gEL3JHUaFVyNHKe3ASNsZ+AlktgHevvxGigoIttrEJb8iKN03Q7Eazlv1s6cx2B2cQ3Jabw==
+"@babel/plugin-transform-spread@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.8.3.tgz#9c8ffe8170fdfb88b114ecb920b82fb6e95fe5e8"
+ integrity sha512-CkuTU9mbmAoFOI1tklFWYYbzX5qCIZVXPVy0jpXgGwkplCndQAa58s2jr66fTeQnA64bDox0HL4U56CFYoyC7g==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/helper-regex" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-template-literals@^7.4.4":
- version "7.4.4"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.4.4.tgz#9d28fea7bbce637fb7612a0750989d8321d4bcb0"
- integrity sha512-mQrEC4TWkhLN0z8ygIvEL9ZEToPhG5K7KDW3pzGqOfIGZ28Jb0POUkeWcoz8HnHvhFy6dwAT1j8OzqN8s804+g==
+"@babel/plugin-transform-sticky-regex@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.8.3.tgz#be7a1290f81dae767475452199e1f76d6175b100"
+ integrity sha512-9Spq0vGCD5Bb4Z/ZXXSK5wbbLFMG085qd2vhL1JYu1WcQ5bXqZBAYRzU1d+p79GcHs2szYv5pVQCX13QgldaWw==
dependencies:
- "@babel/helper-annotate-as-pure" "^7.0.0"
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/helper-regex" "^7.8.3"
-"@babel/plugin-transform-typeof-symbol@^7.2.0":
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.2.0.tgz#117d2bcec2fbf64b4b59d1f9819894682d29f2b2"
- integrity sha512-2LNhETWYxiYysBtrBTqL8+La0jIoQQnIScUJc74OYvUGRmkskNY4EzLCnjHBzdmb38wqtTaixpo1NctEcvMDZw==
+"@babel/plugin-transform-template-literals@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.8.3.tgz#7bfa4732b455ea6a43130adc0ba767ec0e402a80"
+ integrity sha512-820QBtykIQOLFT8NZOcTRJ1UNuztIELe4p9DCgvj4NK+PwluSJ49we7s9FB1HIGNIYT7wFUJ0ar2QpCDj0escQ==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/helper-annotate-as-pure" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/plugin-transform-unicode-regex@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.6.2.tgz#b692aad888a7e8d8b1b214be6b9dc03d5031f698"
- integrity sha512-orZI6cWlR3nk2YmYdb0gImrgCUwb5cBUwjf6Ks6dvNVvXERkwtJWOQaEOjPiu0Gu1Tq6Yq/hruCZZOOi9F34Dw==
+"@babel/plugin-transform-typeof-symbol@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.8.4.tgz#ede4062315ce0aaf8a657a920858f1a2f35fc412"
+ integrity sha512-2QKyfjGdvuNfHsb7qnBBlKclbD4CfshH2KvDabiijLMGXPHJXGxtDzwIF7bQP+T0ysw8fYTtxPafgfs/c1Lrqg==
dependencies:
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/helper-regex" "^7.4.4"
- regexpu-core "^4.6.0"
+ "@babel/helper-plugin-utils" "^7.8.3"
-"@babel/preset-env@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.6.2.tgz#abbb3ed785c7fe4220d4c82a53621d71fc0c75d3"
- integrity sha512-Ru7+mfzy9M1/YTEtlDS8CD45jd22ngb9tXnn64DvQK3ooyqSw9K4K9DUWmYknTTVk4TqygL9dqCrZgm1HMea/Q==
- dependencies:
- "@babel/helper-module-imports" "^7.0.0"
- "@babel/helper-plugin-utils" "^7.0.0"
- "@babel/plugin-proposal-async-generator-functions" "^7.2.0"
- "@babel/plugin-proposal-dynamic-import" "^7.5.0"
- "@babel/plugin-proposal-json-strings" "^7.2.0"
- "@babel/plugin-proposal-object-rest-spread" "^7.6.2"
- "@babel/plugin-proposal-optional-catch-binding" "^7.2.0"
- "@babel/plugin-proposal-unicode-property-regex" "^7.6.2"
- "@babel/plugin-syntax-async-generators" "^7.2.0"
- "@babel/plugin-syntax-dynamic-import" "^7.2.0"
- "@babel/plugin-syntax-json-strings" "^7.2.0"
- "@babel/plugin-syntax-object-rest-spread" "^7.2.0"
- "@babel/plugin-syntax-optional-catch-binding" "^7.2.0"
- "@babel/plugin-transform-arrow-functions" "^7.2.0"
- "@babel/plugin-transform-async-to-generator" "^7.5.0"
- "@babel/plugin-transform-block-scoped-functions" "^7.2.0"
- "@babel/plugin-transform-block-scoping" "^7.6.2"
- "@babel/plugin-transform-classes" "^7.5.5"
- "@babel/plugin-transform-computed-properties" "^7.2.0"
- "@babel/plugin-transform-destructuring" "^7.6.0"
- "@babel/plugin-transform-dotall-regex" "^7.6.2"
- "@babel/plugin-transform-duplicate-keys" "^7.5.0"
- "@babel/plugin-transform-exponentiation-operator" "^7.2.0"
- "@babel/plugin-transform-for-of" "^7.4.4"
- "@babel/plugin-transform-function-name" "^7.4.4"
- "@babel/plugin-transform-literals" "^7.2.0"
- "@babel/plugin-transform-member-expression-literals" "^7.2.0"
- "@babel/plugin-transform-modules-amd" "^7.5.0"
- "@babel/plugin-transform-modules-commonjs" "^7.6.0"
- "@babel/plugin-transform-modules-systemjs" "^7.5.0"
- "@babel/plugin-transform-modules-umd" "^7.2.0"
- "@babel/plugin-transform-named-capturing-groups-regex" "^7.6.2"
- "@babel/plugin-transform-new-target" "^7.4.4"
- "@babel/plugin-transform-object-super" "^7.5.5"
- "@babel/plugin-transform-parameters" "^7.4.4"
- "@babel/plugin-transform-property-literals" "^7.2.0"
- "@babel/plugin-transform-regenerator" "^7.4.5"
- "@babel/plugin-transform-reserved-words" "^7.2.0"
- "@babel/plugin-transform-shorthand-properties" "^7.2.0"
- "@babel/plugin-transform-spread" "^7.6.2"
- "@babel/plugin-transform-sticky-regex" "^7.2.0"
- "@babel/plugin-transform-template-literals" "^7.4.4"
- "@babel/plugin-transform-typeof-symbol" "^7.2.0"
- "@babel/plugin-transform-unicode-regex" "^7.6.2"
- "@babel/types" "^7.6.0"
- browserslist "^4.6.0"
- core-js-compat "^3.1.1"
+"@babel/plugin-transform-unicode-regex@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.8.3.tgz#0cef36e3ba73e5c57273effb182f46b91a1ecaad"
+ integrity sha512-+ufgJjYdmWfSQ+6NS9VGUR2ns8cjJjYbrbi11mZBTaWm+Fui/ncTLFF28Ei1okavY+xkojGr1eJxNsWYeA5aZw==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
+
+"@babel/preset-env@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.8.4.tgz#9dac6df5f423015d3d49b6e9e5fa3413e4a72c4e"
+ integrity sha512-HihCgpr45AnSOHRbS5cWNTINs0TwaR8BS8xIIH+QwiW8cKL0llV91njQMpeMReEPVs+1Ao0x3RLEBLtt1hOq4w==
+ dependencies:
+ "@babel/compat-data" "^7.8.4"
+ "@babel/helper-compilation-targets" "^7.8.4"
+ "@babel/helper-module-imports" "^7.8.3"
+ "@babel/helper-plugin-utils" "^7.8.3"
+ "@babel/plugin-proposal-async-generator-functions" "^7.8.3"
+ "@babel/plugin-proposal-dynamic-import" "^7.8.3"
+ "@babel/plugin-proposal-json-strings" "^7.8.3"
+ "@babel/plugin-proposal-nullish-coalescing-operator" "^7.8.3"
+ "@babel/plugin-proposal-object-rest-spread" "^7.8.3"
+ "@babel/plugin-proposal-optional-catch-binding" "^7.8.3"
+ "@babel/plugin-proposal-optional-chaining" "^7.8.3"
+ "@babel/plugin-proposal-unicode-property-regex" "^7.8.3"
+ "@babel/plugin-syntax-async-generators" "^7.8.0"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.0"
+ "@babel/plugin-syntax-json-strings" "^7.8.0"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.0"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.0"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.0"
+ "@babel/plugin-syntax-top-level-await" "^7.8.3"
+ "@babel/plugin-transform-arrow-functions" "^7.8.3"
+ "@babel/plugin-transform-async-to-generator" "^7.8.3"
+ "@babel/plugin-transform-block-scoped-functions" "^7.8.3"
+ "@babel/plugin-transform-block-scoping" "^7.8.3"
+ "@babel/plugin-transform-classes" "^7.8.3"
+ "@babel/plugin-transform-computed-properties" "^7.8.3"
+ "@babel/plugin-transform-destructuring" "^7.8.3"
+ "@babel/plugin-transform-dotall-regex" "^7.8.3"
+ "@babel/plugin-transform-duplicate-keys" "^7.8.3"
+ "@babel/plugin-transform-exponentiation-operator" "^7.8.3"
+ "@babel/plugin-transform-for-of" "^7.8.4"
+ "@babel/plugin-transform-function-name" "^7.8.3"
+ "@babel/plugin-transform-literals" "^7.8.3"
+ "@babel/plugin-transform-member-expression-literals" "^7.8.3"
+ "@babel/plugin-transform-modules-amd" "^7.8.3"
+ "@babel/plugin-transform-modules-commonjs" "^7.8.3"
+ "@babel/plugin-transform-modules-systemjs" "^7.8.3"
+ "@babel/plugin-transform-modules-umd" "^7.8.3"
+ "@babel/plugin-transform-named-capturing-groups-regex" "^7.8.3"
+ "@babel/plugin-transform-new-target" "^7.8.3"
+ "@babel/plugin-transform-object-super" "^7.8.3"
+ "@babel/plugin-transform-parameters" "^7.8.4"
+ "@babel/plugin-transform-property-literals" "^7.8.3"
+ "@babel/plugin-transform-regenerator" "^7.8.3"
+ "@babel/plugin-transform-reserved-words" "^7.8.3"
+ "@babel/plugin-transform-shorthand-properties" "^7.8.3"
+ "@babel/plugin-transform-spread" "^7.8.3"
+ "@babel/plugin-transform-sticky-regex" "^7.8.3"
+ "@babel/plugin-transform-template-literals" "^7.8.3"
+ "@babel/plugin-transform-typeof-symbol" "^7.8.4"
+ "@babel/plugin-transform-unicode-regex" "^7.8.3"
+ "@babel/types" "^7.8.3"
+ browserslist "^4.8.5"
+ core-js-compat "^3.6.2"
invariant "^2.2.2"
- js-levenshtein "^1.1.3"
+ levenary "^1.1.1"
semver "^5.5.0"
"@babel/standalone@^7.0.0":
@@ -659,31 +715,31 @@
resolved "https://registry.yarnpkg.com/@babel/standalone/-/standalone-7.8.3.tgz#0674730a8c5fbb9352de5342bf0c0c040d658380"
integrity sha512-WRYZUuGBYpmfUL50f2h3Cvw7s1F4wTVT5iIeT01tHo+LyB9QwrTJ6GF5J6YrtJHQqxMxt8zEl1d7I0Uhyz9NyQ==
-"@babel/template@^7.1.0", "@babel/template@^7.4.0", "@babel/template@^7.4.4", "@babel/template@^7.6.0":
- version "7.6.0"
- resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.6.0.tgz#7f0159c7f5012230dad64cca42ec9bdb5c9536e6"
- integrity sha512-5AEH2EXD8euCk446b7edmgFdub/qfH1SN6Nii3+fyXP807QRx9Q73A2N5hNwRRslC2H9sNzaFhsPubkS4L8oNQ==
+"@babel/template@^7.4.0", "@babel/template@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.8.3.tgz#e02ad04fe262a657809327f578056ca15fd4d1b8"
+ integrity sha512-04m87AcQgAFdvuoyiQ2kgELr2tV8B4fP/xJAVUL3Yb3bkNdMedD3d0rlSQr3PegP0cms3eHjl1F7PWlvWbU8FQ==
dependencies:
- "@babel/code-frame" "^7.0.0"
- "@babel/parser" "^7.6.0"
- "@babel/types" "^7.6.0"
-
-"@babel/traverse@^7.0.0", "@babel/traverse@^7.1.0", "@babel/traverse@^7.4.3", "@babel/traverse@^7.4.4", "@babel/traverse@^7.5.5", "@babel/traverse@^7.6.2":
- version "7.6.2"
- resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.6.2.tgz#b0e2bfd401d339ce0e6c05690206d1e11502ce2c"
- integrity sha512-8fRE76xNwNttVEF2TwxJDGBLWthUkHWSldmfuBzVRmEDWOtu4XdINTgN7TDWzuLg4bbeIMLvfMFD9we5YcWkRQ==
- dependencies:
- "@babel/code-frame" "^7.5.5"
- "@babel/generator" "^7.6.2"
- "@babel/helper-function-name" "^7.1.0"
- "@babel/helper-split-export-declaration" "^7.4.4"
- "@babel/parser" "^7.6.2"
- "@babel/types" "^7.6.0"
+ "@babel/code-frame" "^7.8.3"
+ "@babel/parser" "^7.8.3"
+ "@babel/types" "^7.8.3"
+
+"@babel/traverse@^7.0.0", "@babel/traverse@^7.1.0", "@babel/traverse@^7.4.3", "@babel/traverse@^7.8.3", "@babel/traverse@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.8.4.tgz#f0845822365f9d5b0e312ed3959d3f827f869e3c"
+ integrity sha512-NGLJPZwnVEyBPLI+bl9y9aSnxMhsKz42so7ApAv9D+b4vAFPpY013FTS9LdKxcABoIYFU52HcYga1pPlx454mg==
+ dependencies:
+ "@babel/code-frame" "^7.8.3"
+ "@babel/generator" "^7.8.4"
+ "@babel/helper-function-name" "^7.8.3"
+ "@babel/helper-split-export-declaration" "^7.8.3"
+ "@babel/parser" "^7.8.4"
+ "@babel/types" "^7.8.3"
debug "^4.1.0"
globals "^11.1.0"
lodash "^4.17.13"
-"@babel/types@^7.0.0", "@babel/types@^7.0.0-beta.49", "@babel/types@^7.2.0", "@babel/types@^7.3.0", "@babel/types@^7.4.0", "@babel/types@^7.4.4", "@babel/types@^7.5.5", "@babel/types@^7.6.0", "@babel/types@^7.8.3":
+"@babel/types@^7.0.0", "@babel/types@^7.0.0-beta.49", "@babel/types@^7.3.0", "@babel/types@^7.4.0", "@babel/types@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.8.3.tgz#5a383dffa5416db1b73dedffd311ffd0788fb31c"
integrity sha512-jBD+G8+LWpMBBWvVcdr4QysjUE4mU/syrhN17o1u3gx0/WzJB1kwiVZAXRtWbsIPOwW8pF/YJV5+nmetPzepXg==
@@ -740,15 +796,15 @@
dependencies:
vue-eslint-parser "^7.0.0"
-"@gitlab/svgs@^1.96.0":
- version "1.96.0"
- resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.96.0.tgz#1d32730389e94358dc245e8336912523446d1269"
- integrity sha512-mhg6kndxDhwjWChKhs5utO6PowlOyFdaCXUrkkxxe2H3cd8DYa40QOEcJeUrSIhkmgIMVesUawesx5tt4Bnnnw==
+"@gitlab/svgs@^1.113.0":
+ version "1.113.0"
+ resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.113.0.tgz#0ea9cb3122a479f3ed4bb22943d68a9a38f69948"
+ integrity sha512-upT+sKEnnwZDU7vzI5VSyg2l6IOd/0Icm/MLae6po/nOGbf2vftkUVfbalzISAmk9eYTuJQ1sGmRWdKXPGy1cw==
-"@gitlab/ui@^9.8.0":
- version "9.8.0"
- resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.8.0.tgz#b1a0b5f1f6ac9fdb19b64d74f0f729e3ec182495"
- integrity sha512-0VjSTjCCtevdoeByxf5o/OimzV3zt1MMH5DlZSqakML38uoOM0WpgXI/4xAipzfYwiKUW+IWbuyZGJ3ucaJnhQ==
+"@gitlab/ui@^9.29.0":
+ version "9.29.0"
+ resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.29.0.tgz#2308aec1d3d837392eaaf9d441aecec4b695ed73"
+ integrity sha512-zIY/aChWaU4UBlAjZ95PpBxgUd9VrGiXs9ujVU6Gbi+ZsHbpDvPlBHsHEG9isnUVBE2AD4GPqMLO8K9i+B9O4A==
dependencies:
"@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0"
@@ -763,6 +819,7 @@
url-search-params-polyfill "^5.0.0"
vue "^2.6.10"
vue-loader "^15.4.2"
+ vue-runtime-helpers "^1.1.2"
"@gitlab/visual-review-tools@1.5.1":
version "1.5.1"
@@ -994,10 +1051,10 @@
"@sentry/types" "5.10.0"
tslib "^1.9.3"
-"@sourcegraph/code-host-integration@0.0.30":
- version "0.0.30"
- resolved "https://registry.yarnpkg.com/@sourcegraph/code-host-integration/-/code-host-integration-0.0.30.tgz#85f52eca0f8fd5efb1526a7ec6a09d261ab43bda"
- integrity sha512-5zBN0/oa1c0lY0+MPb2kEs9NqefvOg0NevDQXqQpLHDOx+TtMzC2uEOQiBnyHm2bWcCl/RFatjvNlEV+reGgnA==
+"@sourcegraph/code-host-integration@0.0.31":
+ version "0.0.31"
+ resolved "https://registry.yarnpkg.com/@sourcegraph/code-host-integration/-/code-host-integration-0.0.31.tgz#c4d6c7adaaf937e4b8a143c206020e110ba73e25"
+ integrity sha512-b0WQ1CKlEx9S+IHRs1YNRO7CcwW06ulQU6D+W9cQlfjJu+qQVTAvkyv1xjySkfrCNK8IcfVd8WZzWIhP16VVfw==
"@types/anymatch@*":
version "1.3.0"
@@ -1693,7 +1750,7 @@ array-union@^1.0.1, array-union@^1.0.2:
dependencies:
array-uniq "^1.0.1"
-array-uniq@^1.0.1, array-uniq@^1.0.2:
+array-uniq@^1.0.1:
version "1.0.3"
resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6"
integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=
@@ -1774,11 +1831,6 @@ async-limiter@~1.0.0:
resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.0.tgz#78faed8c3d074ab81f22b4e985d79e8738f720f8"
integrity sha512-jp/uFnooOiO+L211eZOoSyzpOITMXx1rBITauYykG3BRYPu8h0UcxsPNB04RR5vo4Tyz3+ay17tR6JVf9qzYWg==
-async@^1.5.2:
- version "1.5.2"
- resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a"
- integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=
-
async@^2.6.2:
version "2.6.3"
resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff"
@@ -1809,19 +1861,19 @@ autoprefixer@^9.5.1:
postcss "^7.0.17"
postcss-value-parser "^4.0.0"
-autosize@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/autosize/-/autosize-4.0.0.tgz#7a0599b1ba84d73bd7589b0d9da3870152c69237"
- integrity sha1-egWZsbqE1zvXWJsNnaOHAVLGkjc=
+autosize@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/autosize/-/autosize-4.0.2.tgz#073cfd07c8bf45da4b9fd153437f5bafbba1e4c9"
+ integrity sha512-jnSyH2d+qdfPGpWlcuhGiHmqBJ6g3X+8T+iRwFrHPLVcdoGJE/x6Qicm6aDHfTsbgZKxyV8UU/YB2p4cjKDRRA==
-aws-sdk@^2.526.0:
- version "2.526.0"
- resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.526.0.tgz#e0f899be59edb7d50eb8cca7978bcd401a5d48c2"
- integrity sha512-ZZqf8AnD9A8ZJd/4oU711R8taxm8sV7wcAOvT0HhrZxv8zASAzoz2lpZ19QAil6uJ52IOkq4ij/zGy7VBXEgPA==
+aws-sdk@^2.637.0:
+ version "2.637.0"
+ resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.637.0.tgz#810e25e53acf2250d35fc74498f9d4492e154217"
+ integrity sha512-e7EYX5rNtQyEaleQylUtLSNKXOmvOwfifQ4bYkfF80mFsVI3DSydczLHXrqPzXoEJaS/GI/9HqVnlQcPs6Q3ew==
dependencies:
buffer "4.9.1"
events "1.1.1"
- ieee754 "1.1.8"
+ ieee754 "1.1.13"
jmespath "0.15.0"
querystring "0.2.0"
sax "1.2.1"
@@ -2227,14 +2279,14 @@ browserify-zlib@^0.2.0:
dependencies:
pako "~1.0.5"
-browserslist@^4.6.0, browserslist@^4.6.2, browserslist@^4.6.3:
- version "4.6.6"
- resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.6.6.tgz#6e4bf467cde520bc9dbdf3747dafa03531cec453"
- integrity sha512-D2Nk3W9JL9Fp/gIcWei8LrERCS+eXu9AM5cfXA8WEZ84lFks+ARnZ0q/R69m2SV3Wjma83QDDPxsNKXUwdIsyA==
+browserslist@^4.6.3, browserslist@^4.8.3, browserslist@^4.8.5:
+ version "4.8.7"
+ resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.8.7.tgz#ec8301ff415e6a42c949d0e66b405eb539c532d0"
+ integrity sha512-gFOnZNYBHrEyUML0xr5NJ6edFaaKbTFX9S9kQHlYfCP0Rit/boRIz4G+Avq6/4haEKJXdGGUnoolx+5MWW2BoA==
dependencies:
- caniuse-lite "^1.0.30000984"
- electron-to-chromium "^1.3.191"
- node-releases "^1.1.25"
+ caniuse-lite "^1.0.30001027"
+ electron-to-chromium "^1.3.349"
+ node-releases "^1.1.49"
bs-logger@0.x:
version "0.2.6"
@@ -2445,10 +2497,10 @@ camelcase@^5.0.0:
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
-caniuse-lite@^1.0.30000980, caniuse-lite@^1.0.30000984:
- version "1.0.30001025"
- resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001025.tgz#30336a8aca7f98618eb3cf38e35184e13d4e5fe6"
- integrity sha512-SKyFdHYfXUZf5V85+PJgLYyit27q4wgvZuf8QTOk1osbypcROihMBlx9GRar2/pIcKH2r4OehdlBr9x6PXetAQ==
+caniuse-lite@^1.0.30000980, caniuse-lite@^1.0.30001027:
+ version "1.0.30001030"
+ resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001030.tgz#78076c4c6d67d3e41d6eb9399853fb27fe6e44ee"
+ integrity sha512-QGK0W4Ft/Ac+zTjEiRJfwDNATvS3fodDczBXrH42784kcfqcDKpEPfN08N0HQjrAp8He/Jw8QiSS9QRn7XAbUw==
capture-exit@^2.0.0:
version "2.0.0"
@@ -2727,10 +2779,10 @@ code-point-at@^1.0.0:
resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77"
integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=
-codesandbox-api@^0.0.20:
- version "0.0.20"
- resolved "https://registry.yarnpkg.com/codesandbox-api/-/codesandbox-api-0.0.20.tgz#174bcd76c9f31521175c6bceabc37da6b1fbc30b"
- integrity sha512-jhxZzAmjCKBZad8QWMeueiQVFE87igK6F2DBOEVFFJO6jgTXT8qjuzGYepr+B8bjgo/icN7bc/2xmEMBA63s2w==
+codesandbox-api@0.0.23:
+ version "0.0.23"
+ resolved "https://registry.yarnpkg.com/codesandbox-api/-/codesandbox-api-0.0.23.tgz#bf650a21b5f3c2369e03f0c19d10b4e2ba255b4f"
+ integrity sha512-fFGBkIghDkQILh7iHYlpZU5sfWncCDb92FQSFE4rR3VBcTfUsD5VZgpQi+JjZQuwWIdfl4cOhcIFrUYwshUezA==
codesandbox-import-util-types@^1.2.11:
version "1.2.11"
@@ -2967,10 +3019,10 @@ content-type@~1.0.4:
resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
-convert-source-map@^1.1.0, convert-source-map@^1.4.0:
- version "1.6.0"
- resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.6.0.tgz#51b537a8c43e0f04dec1993bffcdd504e758ac20"
- integrity sha512-eFu7XigvxdZ1ETfbgPBohgyQ/Z++C0eEhTor0qRwBw9unw+L0/6V8wkSuGgzdThkiS5lSpdptOQPD8Ak40a+7A==
+convert-source-map@^1.4.0, convert-source-map@^1.7.0:
+ version "1.7.0"
+ resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442"
+ integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==
dependencies:
safe-buffer "~5.1.1"
@@ -3031,24 +3083,18 @@ copy-webpack-plugin@^5.0.5:
serialize-javascript "^2.1.2"
webpack-log "^2.0.0"
-core-js-compat@^3.1.1:
- version "3.1.4"
- resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.1.4.tgz#e4d0c40fbd01e65b1d457980fe4112d4358a7408"
- integrity sha512-Z5zbO9f1d0YrJdoaQhphVAnKPimX92D6z8lCGphH89MNRxlL1prI9ExJPqVwP0/kgkQCv8c4GJGT8X16yUncOg==
+core-js-compat@^3.6.2:
+ version "3.6.4"
+ resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.6.4.tgz#938476569ebb6cda80d339bcf199fae4f16fff17"
+ integrity sha512-zAa3IZPvsJ0slViBQ2z+vgyyTuhd3MFn1rBQjZSKVEgB0UMYhUkCj9jJUVPgGTGqWvsBVmfnruXgTcNyTlEiSA==
dependencies:
- browserslist "^4.6.2"
- core-js-pure "3.1.4"
- semver "^6.1.1"
-
-core-js-pure@3.1.4:
- version "3.1.4"
- resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.1.4.tgz#5fa17dc77002a169a3566cc48dc774d2e13e3769"
- integrity sha512-uJ4Z7iPNwiu1foygbcZYJsJs1jiXrTTCvxfLDXNhI/I+NHbSIEyr548y4fcsCEyWY0XgfAG/qqaunJ1SThHenA==
+ browserslist "^4.8.3"
+ semver "7.0.0"
-core-js@^3.1.3, core-js@^3.2.1:
- version "3.2.1"
- resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.2.1.tgz#cd41f38534da6cc59f7db050fe67307de9868b09"
- integrity sha512-Qa5XSVefSVPRxy2XfUC13WbvqkxhkwB3ve+pgCQveNgYzbM/UxZeu1dcOX/xr4UmfUd+muuvsaxilQzCyUurMw==
+core-js@^3.1.3, core-js@^3.6.4:
+ version "3.6.4"
+ resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.6.4.tgz#440a83536b458114b9cb2ac1580ba377dc470647"
+ integrity sha512-4paDGScNgZP2IXXilaffL9X7968RuvwlkK3xWtZRVqgd8SYNiVKRJvkFd1aqqEuPfN7E68ZHEp9hDj6lHj4Hyw==
core-js@~2.3.0:
version "2.3.0"
@@ -3205,13 +3251,13 @@ css-selector-parser@^1.3:
integrity sha1-XxrUPi2O77/cME/NOaUhZklD4+s=
css-selector-tokenizer@^0.7.0:
- version "0.7.1"
- resolved "https://registry.yarnpkg.com/css-selector-tokenizer/-/css-selector-tokenizer-0.7.1.tgz#a177271a8bca5019172f4f891fc6eed9cbf68d5d"
- integrity sha512-xYL0AMZJ4gFzJQsHUKa5jiWWi2vH77WVNg7JYRyewwj6oPh4yb/y6Y9ZCw9dsj/9UauMhtuxR+ogQd//EdEVNA==
+ version "0.7.2"
+ resolved "https://registry.yarnpkg.com/css-selector-tokenizer/-/css-selector-tokenizer-0.7.2.tgz#11e5e27c9a48d90284f22d45061c303d7a25ad87"
+ integrity sha512-yj856NGuAymN6r8bn8/Jl46pR+OC3eEvAhfGYDUe7YPtTPAYrSSw4oAniZ9Y8T5B92hjhwTBLUen0/vKPxf6pw==
dependencies:
- cssesc "^0.1.0"
- fastparse "^1.1.1"
- regexpu-core "^1.0.0"
+ cssesc "^3.0.0"
+ fastparse "^1.1.2"
+ regexpu-core "^4.6.0"
css@^2.1.0:
version "2.2.4"
@@ -3223,11 +3269,6 @@ css@^2.1.0:
source-map-resolve "^0.5.2"
urix "^0.1.0"
-cssesc@^0.1.0:
- version "0.1.0"
- resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-0.1.0.tgz#c814903e45623371a0477b40109aaafbeeaddbb4"
- integrity sha1-yBSQPkViM3GgR3tAEJqq++6t27Q=
-
cssesc@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-2.0.0.tgz#3b13bd1bb1cb36e1bcb5a4dcd27f54c5dcb35703"
@@ -3657,7 +3698,7 @@ debug@=3.1.0, debug@~3.1.0:
dependencies:
ms "2.0.0"
-debug@^3.1.0, debug@^3.2.5, debug@^3.2.6:
+debug@^3.1.0, debug@^3.1.1, debug@^3.2.5, debug@^3.2.6:
version "3.2.6"
resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
@@ -3900,12 +3941,12 @@ doctrine@^3.0.0:
dependencies:
esutils "^2.0.2"
-document-register-element@1.13.1:
- version "1.13.1"
- resolved "https://registry.yarnpkg.com/document-register-element/-/document-register-element-1.13.1.tgz#dad8cb7be38e04ee3f56842e6cf81af46c1249ba"
- integrity sha512-92ZyLDKg9j4rOll//NNXj25f+8rAzOkYsGJonhugKwXfeqH7bzs8Ucpvey0WzZ2ZzKdrvW9RnUw3UyOZ/uhBFw==
+document-register-element@1.14.3:
+ version "1.14.3"
+ resolved "https://registry.yarnpkg.com/document-register-element/-/document-register-element-1.14.3.tgz#3335d4578df6a1536a34595b91cca36dd5db61d7"
+ integrity sha512-SbJTzoQXLTcYxnpdDNRZXu/gwsGSShemXpvj6Pa6ujRwJFpJ41siil4tk4y+cQXnqylS6mc2Rtxp/PkMzfkqyQ==
dependencies:
- lightercollective "^0.1.0"
+ lightercollective "^0.3.0"
dom-event-types@^1.0.0:
version "1.0.0"
@@ -3922,13 +3963,13 @@ dom-serialize@^2.2.0:
extend "^3.0.0"
void-elements "^2.0.0"
-dom-serializer@0:
- version "0.1.0"
- resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.1.0.tgz#073c697546ce0780ce23be4a28e293e40bc30c82"
- integrity sha1-BzxpdUbOB4DOI75KKOKT5AvDDII=
+dom-serializer@0, dom-serializer@^0.2.1:
+ version "0.2.2"
+ resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51"
+ integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g==
dependencies:
- domelementtype "~1.1.1"
- entities "~1.1.1"
+ domelementtype "^2.0.1"
+ entities "^2.0.0"
domain-browser@^1.1.1:
version "1.1.7"
@@ -3940,10 +3981,10 @@ domelementtype@1, domelementtype@^1.3.0:
resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.0.tgz#b17aed82e8ab59e52dd9c19b1756e0fc187204c2"
integrity sha1-sXrtguirWeUt2cGbF1bg/BhyBMI=
-domelementtype@~1.1.1:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.1.3.tgz#bd28773e2642881aec51544924299c5cd822185b"
- integrity sha1-vSh3PiZCiBrsUVRJJCmcXNgiGFs=
+domelementtype@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d"
+ integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ==
domexception@^1.0.1:
version "1.0.1"
@@ -3959,6 +4000,13 @@ domhandler@^2.3.0:
dependencies:
domelementtype "1"
+domhandler@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-3.0.0.tgz#51cd13efca31da95bbb0c5bee3a48300e333b3e9"
+ integrity sha512-eKLdI5v9m67kbXQbJSNn1zjh0SDzvzWVWtX+qEI3eMjZw8daH9k8rlj1FZY9memPwjiskQFbe7vHVVJIAqoEhw==
+ dependencies:
+ domelementtype "^2.0.1"
+
domutils@^1.5.1:
version "1.6.2"
resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.6.2.tgz#1958cc0b4c9426e9ed367fb1c8e854891b0fa3ff"
@@ -3967,6 +4015,15 @@ domutils@^1.5.1:
dom-serializer "0"
domelementtype "1"
+domutils@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.0.0.tgz#15b8278e37bfa8468d157478c58c367718133c08"
+ integrity sha512-n5SelJ1axbO636c2yUtOGia/IcJtVtlhQbFiVDBZHKV5ReJO1ViX7sFEemtuyoAnBxk5meNSYgA8V4s0271efg==
+ dependencies:
+ dom-serializer "^0.2.1"
+ domelementtype "^2.0.1"
+ domhandler "^3.0.0"
+
dot-prop@^4.1.0, dot-prop@^4.1.1:
version "4.2.0"
resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-4.2.0.tgz#1f19e0c2e1aa0e32797c49799f2837ac6af69c57"
@@ -4039,10 +4096,10 @@ ejs@^2.6.1:
resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.6.1.tgz#498ec0d495655abc6f23cd61868d926464071aa0"
integrity sha512-0xy4A/twfrRCnkhfk8ErDi5DqdAsAqeGxht4xkCUrsvhhbQNs7E+4jV0CN7+NKIY0aHE72+XvqtBIXzD31ZbXQ==
-electron-to-chromium@^1.3.191:
- version "1.3.199"
- resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.199.tgz#f9a62a74cda77854310a2abffde8b75591ea09a1"
- integrity sha512-gachlDdHSK47s0N2e58GH9HMC6Z4ip0SfmYUa5iEbE50AKaOUXysaJnXMfKj0xB245jWbYcyFSH+th3rqsF8hA==
+electron-to-chromium@^1.3.349:
+ version "1.3.360"
+ resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.360.tgz#1db9cb8d43f4c772546d94ea9be8b677a8ecb483"
+ integrity sha512-RE1pv2sjQiDRRN1nI0fJ0eQHZ9le4oobu16OArnwEUV5ycAU5SNjFyvzjZ1gPUAqBa2Ud1XagtW8j3ZXfHuQHA==
elliptic@^6.0.0:
version "6.4.0"
@@ -4077,6 +4134,11 @@ emojis-list@^2.0.0:
resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389"
integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k=
+emojis-list@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78"
+ integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==
+
encodeurl@~1.0.1, encodeurl@~1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
@@ -4157,6 +4219,11 @@ entities@^1.1.1, entities@~1.1.1:
resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.1.tgz#6e5c2d0a5621b5dadaecef80b90edfb5cd7772f0"
integrity sha1-blwtClYhtdra7O+AuQ7ftc13cvA=
+entities@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4"
+ integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw==
+
errno@^0.1.3, errno@~0.1.7:
version "0.1.7"
resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618"
@@ -4464,9 +4531,9 @@ estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1, estraverse@^4.2.0:
integrity sha1-De4/7TH81GlhjOc0IJn8GvoL2xM=
esutils@^2.0.2:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b"
- integrity sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
+ integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
etag@~1.8.1:
version "1.8.1"
@@ -4714,7 +4781,7 @@ fast-levenshtein@~2.0.6:
resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=
-fastparse@^1.1.1:
+fastparse@^1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.2.tgz#91728c5a5942eced8531283c79441ee4122c35a9"
integrity sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==
@@ -4766,13 +4833,13 @@ file-entry-cache@^5.0.1:
dependencies:
flat-cache "^2.0.1"
-file-loader@^4.2.0:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-4.2.0.tgz#5fb124d2369d7075d70a9a5abecd12e60a95215e"
- integrity sha512-+xZnaK5R8kBJrHK0/6HRlrKNamvVS5rjyuju+rnyxRGuwUJwpAMsVzUl5dz6rK8brkzjV6JpcFNjp6NqV0g1OQ==
+file-loader@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-5.1.0.tgz#cb56c070efc0e40666424309bd0d9e45ac6f2bb8"
+ integrity sha512-u/VkLGskw3Ue59nyOwUwXI/6nuBCo7KBkniB/l7ICwr/7cPNGsL1WCXUp3GB0qgOOKU1TiP49bv4DZF/LJqprg==
dependencies:
- loader-utils "^1.2.3"
- schema-utils "^2.0.0"
+ loader-utils "^1.4.0"
+ schema-utils "^2.5.0"
fileset@^2.0.3:
version "2.0.3"
@@ -4946,10 +5013,10 @@ format@^0.2.2:
resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b"
integrity sha1-1hcBB+nv3E7TDJ3DkBbflCtctYs=
-formdata-polyfill@^3.0.11:
- version "3.0.11"
- resolved "https://registry.yarnpkg.com/formdata-polyfill/-/formdata-polyfill-3.0.11.tgz#c82b4b4bea3356c0a6752219e54ce1edb2a7fb5b"
- integrity sha512-lDyjdlptnGL1Fk7q+hketv31EN9rWaVC/SLz1tRaUktGrsCijyueIcjn7Tw3xKEdCjS5SeBrWp5aNLWUQq+QLg==
+formdata-polyfill@^3.0.19:
+ version "3.0.19"
+ resolved "https://registry.yarnpkg.com/formdata-polyfill/-/formdata-polyfill-3.0.19.tgz#72f517db3a646a5dd8c31af0edf111fd8f1e4cee"
+ integrity sha512-nRSp8nniopIOCLZOUE2omwnUvmRH6VEdKm52rLTne8XBsW7hMMBUiOjuxUPoBsiK0CatKmxArh+Svt2s7R66JQ==
forwarded@~0.1.2:
version "0.1.2"
@@ -5047,10 +5114,10 @@ functional-red-black-tree@^1.0.1:
resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=
-fuzzaldrin-plus@^0.5.0:
- version "0.5.0"
- resolved "https://registry.yarnpkg.com/fuzzaldrin-plus/-/fuzzaldrin-plus-0.5.0.tgz#ef5f26f0c2fc7e9e9a16ea149a802d6cb4804b1e"
- integrity sha1-718m8ML8fp6aFuoUmoAtbLSASx4=
+fuzzaldrin-plus@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.yarnpkg.com/fuzzaldrin-plus/-/fuzzaldrin-plus-0.6.0.tgz#832f6489fbe876769459599c914a670ec22947ee"
+ integrity sha1-gy9kifvodnaUWVmckUpnDsIpR+4=
gauge@~2.7.3:
version "2.7.4"
@@ -5073,6 +5140,11 @@ gaze@^1.0.0:
dependencies:
globule "^1.0.0"
+gensync@^1.0.0-beta.1:
+ version "1.0.0-beta.1"
+ resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269"
+ integrity sha512-r8EC6NO1sngH/zdD9fiRDLdcgnbayXah+mLgManTaIZJqEC1MZstmnox8KpnI2/fxQwrp5OpCOYWLp4rBl4Jcg==
+
get-caller-file@^1.0.1:
version "1.0.3"
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a"
@@ -5174,10 +5246,10 @@ glob-to-regexp@^0.4.0:
resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e"
integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==
-"glob@5 - 7", glob@^7.0.0, glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@~7.1.1, glob@~7.1.2:
- version "7.1.4"
- resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255"
- integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A==
+"glob@5 - 7", glob@^7.0.0, glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@~7.1.1, glob@~7.1.2:
+ version "7.1.6"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6"
+ integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==
dependencies:
fs.realpath "^1.0.0"
inflight "^1.0.4"
@@ -5418,6 +5490,11 @@ has-flag@^3.0.0:
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
+has-flag@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
+ integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
+
has-symbols@^1.0.0, has-symbols@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8"
@@ -5552,6 +5629,11 @@ html-entities@^1.2.1:
resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f"
integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8=
+html-escaper@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.0.tgz#71e87f931de3fe09e56661ab9a29aadec707b491"
+ integrity sha512-a4u9BeERWGu/S8JiWEAQcdrg9v4QArtP9keViQjGMdff20fBdd8waotXaNmODqBe6uZ3Nafi7K/ho4gCQHV3Ig==
+
html-minifier@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-4.0.0.tgz#cca9aad8bce1175e02e17a8c33e46d8988889f56"
@@ -5582,6 +5664,16 @@ htmlparser2@^3.10.0:
inherits "^2.0.1"
readable-stream "^3.0.6"
+htmlparser2@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-4.1.0.tgz#9a4ef161f2e4625ebf7dfbe6c0a2f52d18a59e78"
+ integrity sha512-4zDq1a1zhE4gQso/c5LP1OtrhYTncXNSpvJYtWJBtXAETPlMfi3IFNjGuQbYLuVY4ZR0QMqRVvo4Pdy9KLyP8Q==
+ dependencies:
+ domelementtype "^2.0.1"
+ domhandler "^3.0.0"
+ domutils "^2.0.0"
+ entities "^2.0.0"
+
http-deceiver@^1.2.7:
version "1.2.7"
resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87"
@@ -5608,7 +5700,7 @@ http-errors@~1.6.2:
setprototypeof "1.0.3"
statuses ">= 1.3.1 < 2"
-http-proxy-middleware@^0.19.1:
+http-proxy-middleware@0.19.1:
version "0.19.1"
resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a"
integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==
@@ -5660,10 +5752,10 @@ icss-utils@^2.1.0:
dependencies:
postcss "^6.0.1"
-ieee754@1.1.8, ieee754@^1.1.4:
- version "1.1.8"
- resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.8.tgz#be33d40ac10ef1926701f6f08a2d86fbfd1ad3e4"
- integrity sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q=
+ieee754@1.1.13, ieee754@^1.1.4:
+ version "1.1.13"
+ resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84"
+ integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==
iferr@^0.1.5:
version "0.1.5"
@@ -5878,10 +5970,10 @@ ipaddr.js@1.9.0, ipaddr.js@^1.9.0:
resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65"
integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA==
-is-absolute-url@^3.0.2:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.2.tgz#554f2933e7385cc46e94351977ca2081170a206e"
- integrity sha512-+5g/wLlcm1AcxSP7014m6GvbPHswDx980vD/3bZaap8aGV9Yfs7Q6y6tfaupgZ5O74Byzc8dGrSCJ+bFXx0KdA==
+is-absolute-url@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698"
+ integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==
is-accessor-descriptor@^0.1.6:
version "0.1.6"
@@ -6283,6 +6375,11 @@ istanbul-lib-coverage@^2.0.2, istanbul-lib-coverage@^2.0.5:
resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz#675f0ab69503fad4b1d849f736baaca803344f49"
integrity sha512-8aXznuEPCJvGnMSRft4udDRDtb1V3pkQkMMI5LI+6HuQz5oQ4J2UFn1H82raA3qJtyOLkkwVqICBQkjnGtn5mA==
+istanbul-lib-coverage@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec"
+ integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==
+
istanbul-lib-hook@^2.0.7:
version "2.0.7"
resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-2.0.7.tgz#c95695f383d4f8f60df1f04252a9550e15b5b133"
@@ -6312,6 +6409,15 @@ istanbul-lib-report@^2.0.4, istanbul-lib-report@^2.0.8:
make-dir "^2.1.0"
supports-color "^6.1.0"
+istanbul-lib-report@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6"
+ integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==
+ dependencies:
+ istanbul-lib-coverage "^3.0.0"
+ make-dir "^3.0.0"
+ supports-color "^7.1.0"
+
istanbul-lib-source-maps@^3.0.1, istanbul-lib-source-maps@^3.0.6:
version "3.0.6"
resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-3.0.6.tgz#284997c48211752ec486253da97e3879defba8c8"
@@ -6330,6 +6436,14 @@ istanbul-reports@^2.1.1, istanbul-reports@^2.2.4:
dependencies:
handlebars "^4.1.2"
+istanbul-reports@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.0.tgz#d4d16d035db99581b6194e119bbf36c963c5eb70"
+ integrity sha512-2osTcC8zcOSUkImzN2EWQta3Vdi4WjjKw99P2yWx5mLnigAM0Rd5uYFn1cf2i/Ois45GkNjaoTqc5CxgMSX80A==
+ dependencies:
+ html-escaper "^2.0.0"
+ istanbul-lib-report "^3.0.0"
+
istextorbinary@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/istextorbinary/-/istextorbinary-2.2.1.tgz#a5231a08ef6dd22b268d0895084cf8d58b5bec53"
@@ -6784,15 +6898,10 @@ js-beautify@^1.6.12, js-beautify@^1.8.8:
mkdirp "~0.5.1"
nopt "~4.0.1"
-js-cookie@^2.1.3:
- version "2.1.3"
- resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.1.3.tgz#48071625217ac9ecfab8c343a13d42ec09ff0526"
- integrity sha1-SAcWJSF6yez6uMNDoT1C7An/BSY=
-
-js-levenshtein@^1.1.3:
- version "1.1.4"
- resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.4.tgz#3a56e3cbf589ca0081eb22cd9ba0b1290a16d26e"
- integrity sha512-PxfGzSs0ztShKrUYPIn5r0MtyAhYcCwmndozzpz8YObbPnD1jFxzlBGbRnX2mIu6Z13xN6+PTu05TQFnZFlzow==
+js-cookie@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.2.1.tgz#69e106dc5d5806894562902aa5baec3744e9b2b8"
+ integrity sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==
"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0:
version "4.0.0"
@@ -7144,6 +7253,13 @@ leven@^3.1.0:
resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2"
integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==
+levenary@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/levenary/-/levenary-1.1.1.tgz#842a9ee98d2075aa7faeedbe32679e9205f46f77"
+ integrity sha512-mkAdOIt79FD6irqjYSs4rdbnlT5vRonMEvBVPVb3XmevfS8kgRXwfes0dhPdEtzTWD/1eNE/Bm/G1iRt6DcnQQ==
+ dependencies:
+ leven "^3.1.0"
+
levn@^0.3.0, levn@~0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee"
@@ -7159,10 +7275,10 @@ lie@~3.1.0:
dependencies:
immediate "~3.0.5"
-lightercollective@^0.1.0:
- version "0.1.0"
- resolved "https://registry.yarnpkg.com/lightercollective/-/lightercollective-0.1.0.tgz#70df102c530dcb8d0ccabfe6175a8d00d5f61300"
- integrity sha512-J9tg5uraYoQKaWbmrzDDexbG6hHnMcWS1qLYgJSWE+mpA3U5OCSeMUhb+K55otgZJ34oFdR0ECvdIb3xuO5JOQ==
+lightercollective@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/lightercollective/-/lightercollective-0.3.0.tgz#1f07638642ec645d70bdb69ab2777676f35a28f0"
+ integrity sha512-RFOLSUVvwdK3xA0P8o6G7QGXLIyy1L2qv5caEI7zXN5ciaEjbAriRF182kbsoJ1S1TgvpyGcN485fMky6qxOPw==
linkify-it@^2.0.0:
version "2.1.0"
@@ -7207,7 +7323,7 @@ loader-runner@^2.4.0:
resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357"
integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw==
-loader-utils@1.2.3, loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3:
+loader-utils@1.2.3:
version "1.2.3"
resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7"
integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA==
@@ -7216,6 +7332,15 @@ loader-utils@1.2.3, loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.
emojis-list "^2.0.0"
json5 "^1.0.1"
+loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3, loader-utils@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.0.tgz#c579b5e34cb34b1a74edc6c1fb36bfa371d5a613"
+ integrity sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==
+ dependencies:
+ big.js "^5.2.2"
+ emojis-list "^3.0.0"
+ json5 "^1.0.1"
+
locate-path@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
@@ -7334,10 +7459,10 @@ log4js@^4.0.0:
rfdc "^1.1.4"
streamroller "^1.0.6"
-loglevel@^1.6.4:
- version "1.6.4"
- resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.4.tgz#f408f4f006db8354d0577dcf6d33485b3cb90d56"
- integrity sha512-p0b6mOGKcGa+7nnmKbpzR6qloPbrgLcnio++E+14Vo/XffOGwZtRpUhr8dTH/x2oCMmEoIU0Zwm3ZauhvYD17g==
+loglevel@^1.6.6:
+ version "1.6.7"
+ resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.7.tgz#b3e034233188c68b889f5b862415306f565e2c56"
+ integrity sha512-cY2eLFrQSAfVPhCgH1s7JI73tMbg9YC3v3+ZHVW67sBS7UxWzNEk/ZBbSfLykBWHp33dqqtOv82gjhKEi81T/A==
longest-streak@^2.0.1:
version "2.0.2"
@@ -7641,10 +7766,10 @@ merge2@^1.2.3:
resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.2.3.tgz#7ee99dbd69bb6481689253f018488a1b902b0ed5"
integrity sha512-gdUU1Fwj5ep4kplwcmftruWofEFt6lfpkkr3h860CXbAB9c3hGb55EOL2ali0Td5oebvW0E1+3Sr+Ur7XfKpRA==
-mermaid@^8.4.5:
- version "8.4.5"
- resolved "https://registry.yarnpkg.com/mermaid/-/mermaid-8.4.5.tgz#48d5722cbc72be2ad01002795835d7ca1b48e000"
- integrity sha512-oJWgZBtT2rvAdmqHvKjDwb3tOut1+ksfgDdZrVhhNcdzNibzGPjCsmMPpVXjkFYzKZCVunIbAkfxltSuaGIhaw==
+mermaid@^8.4.8:
+ version "8.4.8"
+ resolved "https://registry.yarnpkg.com/mermaid/-/mermaid-8.4.8.tgz#8adcfdbc505d6bca52df167cff690427c9727b60"
+ integrity sha512-sumTNBFwMX7oMQgogdr3NhgTeQOiwcEsm23rQ4KHGW7tpmvMwER1S+1gjCSSnqlmM/zw7Ga7oesYCYicKboRwQ==
dependencies:
"@braintree/sanitize-url" "^3.1.0"
crypto-random-string "^3.0.1"
@@ -7845,7 +7970,7 @@ mixin-deep@^1.2.0:
for-in "^1.0.2"
is-extendable "^1.0.1"
-mkdirp@0.5.x, mkdirp@0.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.1:
+mkdirp@0.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.1:
version "0.5.1"
resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=
@@ -7987,10 +8112,10 @@ node-fetch@^2.6.0:
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
-node-forge@0.8.2:
- version "0.8.2"
- resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.8.2.tgz#b4bcc59fb12ce77a8825fc6a783dfe3182499c5a"
- integrity sha512-mXQ9GBq1N3uDCyV1pdSzgIguwgtVpM7f5/5J4ipz12PKWElmPpVWLDuWl8iXmhysr21+WmX/OJ5UKx82wjomgg==
+node-forge@0.9.0:
+ version "0.9.0"
+ resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579"
+ integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ==
node-gyp@^3.8.0:
version "3.8.0"
@@ -8076,12 +8201,12 @@ node-pre-gyp@^0.12.0:
semver "^5.3.0"
tar "^4"
-node-releases@^1.1.25:
- version "1.1.25"
- resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.25.tgz#0c2d7dbc7fed30fbe02a9ee3007b8c90bf0133d3"
- integrity sha512-fI5BXuk83lKEoZDdH3gRhtsNgh05/wZacuXkgbiYkceE7+QIMXOg98n9ZV7mz27B+kFHnqHcUpscZZlGRSmTpQ==
+node-releases@^1.1.49:
+ version "1.1.50"
+ resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.50.tgz#803c40d2c45db172d0410e4efec83aa8c6ad0592"
+ integrity sha512-lgAmPv9eYZ0bGwUYAKlr8MG6K4CvWliWqnkcT2P8mMAgVrH3lqfBPorFlxiG1pHQnqmavJZ9vbMXUTNyMLbrgQ==
dependencies:
- semver "^5.3.0"
+ semver "^6.3.0"
node-sass@^4.12.0:
version "4.12.0"
@@ -8827,24 +8952,24 @@ pofile@^1:
resolved "https://registry.yarnpkg.com/pofile/-/pofile-1.0.11.tgz#35aff58c17491d127a07336d5522ebc9df57c954"
integrity sha512-Vy9eH1dRD9wHjYt/QqXcTz+RnX/zg53xK+KljFSX30PvdDMb2z+c6uDUeblUGqqJgz3QFsdlA0IJvHziPmWtQg==
-popper.js@^1.14.7, popper.js@^1.16.0:
- version "1.16.0"
- resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.16.0.tgz#2e1816bcbbaa518ea6c2e15a466f4cb9c6e2fbb3"
- integrity sha512-+G+EkOPoE5S/zChTpmBSSDYmhXJ5PsW8eMhH8cP/CQHMFPBG/kC9Y5IIw6qNYgdJ+/COf0ddY2li28iHaZRSjw==
+popper.js@^1.16.0, popper.js@^1.16.1:
+ version "1.16.1"
+ resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.16.1.tgz#2a223cb3dc7b6213d740e40372be40de43e65b1b"
+ integrity sha512-Wb4p1J4zyFTbM+u6WuO4XstYx4Ky9Cewe4DWrel7B0w6VVICvPwdOpotjzcf6eD8TsckVnIMNONQyPIUFOUbCQ==
portal-vue@^2.1.6:
version "2.1.6"
resolved "https://registry.yarnpkg.com/portal-vue/-/portal-vue-2.1.6.tgz#a7d4790b14a79af7fd159a60ec88c30cddc6c639"
integrity sha512-lvCF85D4e8whd0nN32D8FqKwwkk7nYUI3Ku8UAEx4Z1reomu75dv5evRUTZNaj1EalxxWNXiNl0EHRq36fG8WA==
-portfinder@^1.0.24:
- version "1.0.24"
- resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.24.tgz#11efbc6865f12f37624b6531ead1d809ed965cfa"
- integrity sha512-ekRl7zD2qxYndYflwiryJwMioBI7LI7rVXg3EnLK3sjkouT5eOuhS3gS255XxBksa30VG8UPZYZCdgfGOfkSUg==
+portfinder@^1.0.25:
+ version "1.0.25"
+ resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca"
+ integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg==
dependencies:
- async "^1.5.2"
- debug "^2.2.0"
- mkdirp "0.5.x"
+ async "^2.6.2"
+ debug "^3.1.1"
+ mkdirp "^0.5.1"
posix-character-classes@^0.1.0:
version "0.1.1"
@@ -9004,10 +9129,10 @@ postcss@^6.0.1, postcss@^6.0.23:
source-map "^0.6.1"
supports-color "^5.4.0"
-postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.14, postcss@^7.0.17, postcss@^7.0.2, postcss@^7.0.5, postcss@^7.0.7:
- version "7.0.21"
- resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.21.tgz#06bb07824c19c2021c5d056d5b10c35b989f7e17"
- integrity sha512-uIFtJElxJo29QC753JzhidoAhvp/e/Exezkdhfmt8AymWT6/5B7W1WmponYWkHk2eg6sONyTch0A3nkMPun3SQ==
+postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.14, postcss@^7.0.17, postcss@^7.0.2, postcss@^7.0.27, postcss@^7.0.7:
+ version "7.0.27"
+ resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.27.tgz#cc67cdc6b0daa375105b7c424a85567345fc54d9"
+ integrity sha512-WuQETPMcW9Uf1/22HWUWP9lgsIC+KEHg2kozMflKjbeUtw9ujvFX6QmIfozaErDkmLWS9WEnEdEe6Uo9/BNTdQ==
dependencies:
chalk "^2.4.2"
source-map "^0.6.1"
@@ -9369,13 +9494,13 @@ raw-body@2.4.0:
iconv-lite "0.4.24"
unpipe "1.0.0"
-raw-loader@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-3.1.0.tgz#5e9d399a5a222cc0de18f42c3bc5e49677532b3f"
- integrity sha512-lzUVMuJ06HF4rYveaz9Tv0WRlUMxJ0Y1hgSkkgg+50iEdaI0TthyEDe08KIHb0XsF6rn8WYTqPCaGTZg3sX+qA==
+raw-loader@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-4.0.0.tgz#d639c40fb9d72b5c7f8abc1fb2ddb25b29d3d540"
+ integrity sha512-iINUOYvl1cGEmfoaLjnZXt4bKfT2LJnZZib5N/LLyAphC+Dd11vNP9CNVb38j+SAJpFI1uo8j9frmih53ASy7Q==
dependencies:
- loader-utils "^1.1.0"
- schema-utils "^2.0.1"
+ loader-utils "^1.2.3"
+ schema-utils "^2.5.0"
rc@^1.0.1, rc@^1.1.6, rc@^1.2.7, rc@~1.2.7:
version "1.2.8"
@@ -9539,7 +9664,7 @@ regenerate-unicode-properties@^8.1.0:
dependencies:
regenerate "^1.4.0"
-regenerate@^1.2.1, regenerate@^1.4.0:
+regenerate@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11"
integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg==
@@ -9564,15 +9689,6 @@ regexpp@^2.0.1:
resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f"
integrity sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==
-regexpu-core@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-1.0.0.tgz#86a763f58ee4d7c2f6b102e4764050de7ed90c6b"
- integrity sha1-hqdj9Y7k18L2sQLkdkBQ3n7ZDGs=
- dependencies:
- regenerate "^1.2.1"
- regjsgen "^0.2.0"
- regjsparser "^0.1.4"
-
regexpu-core@^4.6.0:
version "4.6.0"
resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6"
@@ -9600,23 +9716,11 @@ registry-url@^3.0.3:
dependencies:
rc "^1.0.1"
-regjsgen@^0.2.0:
- version "0.2.0"
- resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.2.0.tgz#6c016adeac554f75823fe37ac05b92d5a4edb1f7"
- integrity sha1-bAFq3qxVT3WCP+N6wFuS1aTtsfc=
-
regjsgen@^0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.0.tgz#a7634dc08f89209c2049adda3525711fb97265dd"
integrity sha512-RnIrLhrXCX5ow/E5/Mh2O4e/oa1/jW0eaBKTSy3LaCj+M3Bqvm97GWDp2yUtzIs4LEn65zR2yiYGFqb2ApnzDA==
-regjsparser@^0.1.4:
- version "0.1.5"
- resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.1.5.tgz#7ee8f84dc6fa792d3fd0ae228d24bd949ead205c"
- integrity sha1-fuj4Tcb6eS0/0K4ijSS9lJ6tIFw=
- dependencies:
- jsesc "~0.5.0"
-
regjsparser@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c"
@@ -9950,20 +10054,20 @@ sane@^4.0.3:
minimist "^1.1.1"
walker "~1.0.5"
-sanitize-html@^1.20.0:
- version "1.20.1"
- resolved "https://registry.yarnpkg.com/sanitize-html/-/sanitize-html-1.20.1.tgz#f6effdf55dd398807171215a62bfc21811bacf85"
- integrity sha512-txnH8TQjaQvg2Q0HY06G6CDJLVYCpbnxrdO0WN8gjCKaU5J0KbyGYhZxx5QJg3WLZ1lB7XU9kDkfrCXUozqptA==
+sanitize-html@^1.22.0:
+ version "1.22.0"
+ resolved "https://registry.yarnpkg.com/sanitize-html/-/sanitize-html-1.22.0.tgz#9df779c53cf5755adb2322943c21c1c1dffca7bf"
+ integrity sha512-3RPo65mbTKpOAdAYWU496MSty1YbB3Y5bjwL5OclgaSSMtv65xvM7RW/EHRumzaZ1UddEJowCbSdK0xl5sAu0A==
dependencies:
chalk "^2.4.1"
- htmlparser2 "^3.10.0"
+ htmlparser2 "^4.1.0"
lodash.clonedeep "^4.5.0"
lodash.escaperegexp "^4.1.2"
lodash.isplainobject "^4.0.6"
lodash.isstring "^4.0.1"
lodash.mergewith "^4.6.1"
- postcss "^7.0.5"
- srcset "^1.0.0"
+ postcss "^7.0.27"
+ srcset "^2.0.1"
xtend "^4.0.1"
sass-graph@^2.2.4:
@@ -10003,10 +10107,10 @@ schema-utils@^1.0.0:
ajv-errors "^1.0.0"
ajv-keywords "^3.1.0"
-schema-utils@^2.0.0, schema-utils@^2.0.1, schema-utils@^2.6.1:
- version "2.6.1"
- resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.6.1.tgz#eb78f0b945c7bcfa2082b3565e8db3548011dc4f"
- integrity sha512-0WXHDs1VDJyo+Zqs9TKLKyD/h7yDpHUhEFsM2CzkICFdoX1av+GBq/J2xRTFfsQO5kBfhZzANf2VcIm84jqDbg==
+schema-utils@^2.0.0, schema-utils@^2.5.0, schema-utils@^2.6.1, schema-utils@^2.6.4:
+ version "2.6.4"
+ resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.6.4.tgz#a27efbf6e4e78689d91872ee3ccfa57d7bdd0f53"
+ integrity sha512-VNjcaUxVnEeun6B2fiiUDjXXBtD4ZSH7pdbfIu1pOFwgptDPLMo/z9jr4sUfsjFVPqDCEin/F7IYlq7/E6yDbQ==
dependencies:
ajv "^6.10.2"
ajv-keywords "^3.4.1"
@@ -10043,12 +10147,12 @@ select@^1.1.2:
resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d"
integrity sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0=
-selfsigned@^1.10.6:
- version "1.10.6"
- resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.6.tgz#7b3cd37ed9c2034261a173af1a1aae27d8169b67"
- integrity sha512-i3+CeqxL7DpAazgVpAGdKMwHuL63B5nhJMh9NQ7xmChGkA3jNFflq6Jyo1LLJYcr3idWiNOPWHCrm4zMayLG4w==
+selfsigned@^1.10.7:
+ version "1.10.7"
+ resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b"
+ integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA==
dependencies:
- node-forge "0.8.2"
+ node-forge "0.9.0"
semver-diff@^2.0.0:
version "2.1.0"
@@ -10062,7 +10166,12 @@ semver-diff@^2.0.0:
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
-semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0:
+semver@7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e"
+ integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==
+
+semver@^6.0.0, semver@^6.1.2, semver@^6.3.0:
version "6.3.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
@@ -10225,12 +10334,12 @@ slugify@^1.3.1:
resolved "https://registry.yarnpkg.com/slugify/-/slugify-1.3.1.tgz#f572127e8535329fbc6c1edb74ab856b61ad7de2"
integrity sha512-6BwyhjF5tG5P8s+0DPNyJmBSBePG6iMyhjvIW5zGdA3tFik9PtK+yNkZgTeiroCRGZYgkHftFA62tGVK1EI9Kw==
-smooshpack@^0.0.54:
- version "0.0.54"
- resolved "https://registry.yarnpkg.com/smooshpack/-/smooshpack-0.0.54.tgz#9044358b85052d348b801f385678c8a0c76f2bb6"
- integrity sha512-yIwEWb17hqoW5IaWyzO6O6nxY89I5UdRoGIZy5hihoqXP9OYcoMbBTxKwS57MeXSKdNA2rtk86rlCcOgAYIgrA==
+smooshpack@^0.0.62:
+ version "0.0.62"
+ resolved "https://registry.yarnpkg.com/smooshpack/-/smooshpack-0.0.62.tgz#cb31b9f808f73de3146b050f84d044eb353b5503"
+ integrity sha512-lFuJV2f504/U78sifWy0V2FyoE/8mTgOXM4DL918ncNxAxbtu236XSCLAH3SQwXZWn0JdmRnWs/XU4+sIUVVmQ==
dependencies:
- codesandbox-api "^0.0.20"
+ codesandbox-api "0.0.23"
codesandbox-import-utils "^1.2.3"
lodash.isequal "^4.5.0"
@@ -10330,10 +10439,10 @@ sockjs@0.3.19:
faye-websocket "^0.10.0"
uuid "^3.0.1"
-sortablejs@^1.10.0, sortablejs@^1.9.0:
- version "1.10.0"
- resolved "https://registry.yarnpkg.com/sortablejs/-/sortablejs-1.10.0.tgz#0ebc054acff2486569194a2f975b2b145dd5e7d6"
- integrity sha512-+e0YakK1BxgEZpf9l9UiFaiQ8ZOBn1p/4qkkXr8QDVmYyCrUDTyDRRGm0AgW4E4cD0wtgxJ6yzIRkSPUwqhuhg==
+sortablejs@^1.10.2, sortablejs@^1.9.0:
+ version "1.10.2"
+ resolved "https://registry.yarnpkg.com/sortablejs/-/sortablejs-1.10.2.tgz#6e40364d913f98b85a14f6678f92b5c1221f5290"
+ integrity sha512-YkPGufevysvfwn5rfdlGyrGjt7/CRHwvRPogD/lC+TnvcN29jDpCifKP+rBqf+LRldfXSTh+0CGLcSg0VIxq3A==
source-list-map@^2.0.0:
version "2.0.0"
@@ -10453,13 +10562,10 @@ sql.js@^0.4.0:
resolved "https://registry.yarnpkg.com/sql.js/-/sql.js-0.4.0.tgz#23be9635520eb0ff43a741e7e830397266e88445"
integrity sha1-I76WNVIOsP9Dp0Hn6DA5cmbohEU=
-srcset@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/srcset/-/srcset-1.0.0.tgz#a5669de12b42f3b1d5e83ed03c71046fc48f41ef"
- integrity sha1-pWad4StC87HV6D7QPHEEb8SPQe8=
- dependencies:
- array-uniq "^1.0.2"
- number-is-nan "^1.0.0"
+srcset@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/srcset/-/srcset-2.0.1.tgz#8f842d357487eb797f413d9c309de7a5149df5ac"
+ integrity sha512-00kZI87TdRKwt+P8jj8UZxbfp7mK2ufxcIMWvhAOZNJTRROimpHeruWrGvCZneiuVDLqdyHefVp748ECTnyUBQ==
sshpk@^1.7.0:
version "1.15.2"
@@ -10531,10 +10637,10 @@ stealthy-require@^1.1.0:
resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b"
integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=
-stickyfilljs@^2.0.5:
- version "2.0.5"
- resolved "https://registry.yarnpkg.com/stickyfilljs/-/stickyfilljs-2.0.5.tgz#d229e372d2199ddf5d283bbe34ac1f7d2529c2fc"
- integrity sha512-KGKdqKbv1jXit54ltFPIWw/XVeuSrJmTUS8viT1Pmdpp1Jyv3SMpFmhvPBdddX9FHDlHbm9s8cPAhPviBaBVpA==
+stickyfilljs@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/stickyfilljs/-/stickyfilljs-2.1.0.tgz#46dabb599d8275d185bdb97db597f86a2e3afa7b"
+ integrity sha512-LkG0BXArL5HbW2O09IAXfnBQfpScgGqJuUDUrI3Ire5YKjRz/EhakIZEJogHwgXeQ4qnTicM9sK9uYfWN11qKg==
stream-browserify@^2.0.1:
version "2.0.1"
@@ -10727,13 +10833,13 @@ strip-json-comments@~2.0.1:
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
-style-loader@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-1.0.0.tgz#1d5296f9165e8e2c85d24eee0b7caf9ec8ca1f82"
- integrity sha512-B0dOCFwv7/eY31a5PCieNwMgMhVGFe9w+rh7s/Bx8kfFkrth9zfTZquoYvdw8URgiqxObQKcpW51Ugz1HjfdZw==
+style-loader@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-1.1.3.tgz#9e826e69c683c4d9bf9db924f85e9abb30d5e200"
+ integrity sha512-rlkH7X/22yuwFYK357fMN/BxYOorfnfq0eD7+vqlemSK4wEcejFF1dg4zxP0euBW8NrYx2WZzZ8PPFevr7D+Kw==
dependencies:
loader-utils "^1.2.3"
- schema-utils "^2.0.1"
+ schema-utils "^2.6.4"
style-search@^0.1.0:
version "0.1.0"
@@ -10836,6 +10942,13 @@ supports-color@^5.2.0, supports-color@^5.3.0, supports-color@^5.4.0:
dependencies:
has-flag "^3.0.0"
+supports-color@^7.1.0:
+ version "7.1.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.1.0.tgz#68e32591df73e25ad1c4b49108a2ec507962bfd1"
+ integrity sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==
+ dependencies:
+ has-flag "^4.0.0"
+
svg-tags@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/svg-tags/-/svg-tags-1.0.0.tgz#58f71cee3bd519b59d4b2a843b6c7de64ac04764"
@@ -11002,10 +11115,10 @@ thunky@^0.1.0:
resolved "https://registry.yarnpkg.com/thunky/-/thunky-0.1.0.tgz#bf30146824e2b6e67b0f2d7a4ac8beb26908684e"
integrity sha1-vzAUaCTituZ7Dy16Ssi+smkIaE4=
-timeago.js@^4.0.1:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/timeago.js/-/timeago.js-4.0.1.tgz#4be4aa19565ceaeb0da31fe14e01ce6ca4742da6"
- integrity sha512-ePzZuMoJqUc44hJbUYtY1qtzU7IammxooDCcFKogLkS5Nj+iCabR0ZlmNOFX8Dm1r5EpvR5q/PotOJli/mEPew==
+timeago.js@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/timeago.js/-/timeago.js-4.0.2.tgz#724e8c8833e3490676c7bb0a75f5daf20e558028"
+ integrity sha512-a7wPxPdVlQL7lqvitHGGRsofhdwtkoSXPGATFuSOA2i1ZNQEPLrGnj68vOp2sOJTCFAQVXPeNMX/GctBaO9L2w==
timed-out@^4.0.0:
version "4.0.1"
@@ -11171,6 +11284,11 @@ tr46@^1.0.1:
dependencies:
punycode "^2.1.0"
+tributejs@4.1.3:
+ version "4.1.3"
+ resolved "https://registry.yarnpkg.com/tributejs/-/tributejs-4.1.3.tgz#2e1be7d9a1e403ed4c394f91d859812267e4691c"
+ integrity sha512-+VUqyi8p7tCdaqCINCWHf95E2hJFMIML180BhplTpXNooz3E2r96AONXI9qO2Ru6Ugp7MsMPJjB+rnBq+hAmzA==
+
trim-newlines@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613"
@@ -11341,16 +11459,21 @@ underscore@1.6.0, underscore@~1.6.0:
resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.6.0.tgz#8b38b10cacdef63337b8b24e4ff86d45aea529a8"
integrity sha1-izixDKze9jM3uLJOT/htRa6lKag=
-underscore@^1.9.0:
- version "1.9.0"
- resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.0.tgz#31dbb314cfcc88f169cd3692d9149d81a00a73e4"
- integrity sha512-4IV1DSSxC1QK48j9ONFK1MoIAKKkbE8i7u55w2R6IqBqbT7A/iG7aZBCR2Bi8piF0Uz+i/MG1aeqLwl/5vqF+A==
+underscore@^1.9.2:
+ version "1.9.2"
+ resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.2.tgz#0c8d6f536d6f378a5af264a72f7bec50feb7cf2f"
+ integrity sha512-D39qtimx0c1fI3ya1Lnhk3E9nONswSKhnffBI0gME9C99fYOkNi04xs8K6pePLhvl1frbDemkaBQ5ikWllR2HQ==
underscore@~1.8.3:
version "1.8.3"
resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.8.3.tgz#4f3fb53b106e6097fcf9cb4109f2a5e9bdfa5022"
integrity sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI=
+unfetch@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/unfetch/-/unfetch-4.1.0.tgz#6ec2dd0de887e58a4dee83a050ded80ffc4137db"
+ integrity sha512-crP/n3eAPUJxZXM9T80/yv0YhkTEx2K1D3h7D1AJM6fzsWZrxdyRuLN0JH/dkZh1LNH8LxCnBzoPFCPbb2iGpg==
+
unherit@^1.0.4:
version "1.1.1"
resolved "https://registry.yarnpkg.com/unherit/-/unherit-1.1.1.tgz#132748da3e88eab767e08fabfbb89c5e9d28628c"
@@ -11531,14 +11654,14 @@ urix@^0.1.0:
resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72"
integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=
-url-loader@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-2.1.0.tgz#bcc1ecabbd197e913eca23f5e0378e24b4412961"
- integrity sha512-kVrp/8VfEm5fUt+fl2E0FQyrpmOYgMEkBsv8+UDP1wFhszECq5JyGF33I7cajlVY90zRZ6MyfgKXngLvHYZX8A==
+url-loader@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-3.0.0.tgz#9f1f11b371acf6e51ed15a50db635e02eec18368"
+ integrity sha512-a84JJbIA5xTFTWyjjcPdnsu+41o/SNE8SpXMdUvXs6Q+LuhCD9E2+0VCiuDWqgo3GGXVlFHzArDmBpj9PgWn4A==
dependencies:
loader-utils "^1.2.3"
mime "^2.4.4"
- schema-utils "^2.0.0"
+ schema-utils "^2.5.0"
url-parse-lax@^1.0.0:
version "1.0.0"
@@ -11626,16 +11749,11 @@ uuid@3.3.2, uuid@^3.0.1, uuid@^3.3.2:
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"
integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==
-v8-compile-cache@2.0.3:
+v8-compile-cache@2.0.3, v8-compile-cache@^2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe"
integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w==
-v8-compile-cache@^2.0.3:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz#e14de37b31a6d194f5690d67efc4e7f6fc6ab30e"
- integrity sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==
-
validate-npm-package-license@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz#2804babe712ad3379459acfbe24746ab2c303fbc"
@@ -11737,10 +11855,10 @@ vue-jest@^4.0.0-beta.2:
source-map "^0.5.6"
ts-jest "^23.10.5"
-vue-loader@^15.4.2, vue-loader@^15.7.1:
- version "15.8.3"
- resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.8.3.tgz#857cb9e30eb5fc25e66db48dce7e4f768602a23c"
- integrity sha512-yFksTFbhp+lxlm92DrKdpVIWMpranXnTEuGSc0oW+Gk43M9LWaAmBTnfj5+FCdve715mTHvo78IdaXf5TbiTJg==
+vue-loader@^15.4.2, vue-loader@^15.9.0:
+ version "15.9.0"
+ resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.9.0.tgz#5d4b0378a4606188fc83e587ed23c94bc3a10998"
+ integrity sha512-FeDHvTSpwyLeF7LIV1PYkvqUQgTJ8UmOxhSlCyRSxaXCKk+M6NF4tDQsLsPPNeDPyR7TfRQ8MLg6v+8PsDV9xQ==
dependencies:
"@vue/component-compiler-utils" "^3.1.0"
hash-sum "^1.0.2"
@@ -11753,6 +11871,11 @@ vue-router@^3.0.2:
resolved "https://registry.yarnpkg.com/vue-router/-/vue-router-3.0.2.tgz#dedc67afe6c4e2bc25682c8b1c2a8c0d7c7e56be"
integrity sha512-opKtsxjp9eOcFWdp6xLQPLmRGgfM932Tl56U9chYTnoWqKxQ8M20N7AkdEbM5beUh6wICoFGYugAX9vQjyJLFg==
+vue-runtime-helpers@^1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/vue-runtime-helpers/-/vue-runtime-helpers-1.1.2.tgz#446b7b820888ab0c5264d2c3a32468e72e4100f3"
+ integrity sha512-pZfGp+PW/IXEOyETE09xQHR1CKkR9HfHZdnMD/FVLUNI+HxYTa82evx5WrF6Kz4s82qtqHvMZ8MZpbk2zT2E1Q==
+
vue-style-loader@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/vue-style-loader/-/vue-style-loader-4.1.0.tgz#7588bd778e2c9f8d87bfc3c5a4a039638da7a863"
@@ -11836,10 +11959,10 @@ webidl-conversions@^4.0.2:
resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad"
integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==
-webpack-bundle-analyzer@^3.5.1:
- version "3.5.1"
- resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.5.1.tgz#84aabb1547178d842ebb4ccc7324084b6c3b0ea9"
- integrity sha512-CDdaT3TTu4F9X3tcDq6PNJOiNGgREOM0WdN2vVAoUUn+M6NLB5kJ543HImCWbrDwOpbpGARSwU8r+u0Pl367kA==
+webpack-bundle-analyzer@^3.6.0:
+ version "3.6.0"
+ resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.6.0.tgz#39b3a8f829ca044682bc6f9e011c95deb554aefd"
+ integrity sha512-orUfvVYEfBMDXgEKAKVvab5iQ2wXneIEorGNsyuOyVYpjYrI7CUOhhXNDd3huMwQ3vNNWWlGP+hzflMFYNzi2g==
dependencies:
acorn "^6.0.7"
acorn-walk "^6.1.1"
@@ -11855,10 +11978,10 @@ webpack-bundle-analyzer@^3.5.1:
opener "^1.5.1"
ws "^6.0.0"
-webpack-cli@^3.3.9:
- version "3.3.9"
- resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.9.tgz#79c27e71f94b7fe324d594ab64a8e396b9daa91a"
- integrity sha512-xwnSxWl8nZtBl/AFJCOn9pG7s5CYUYdZxmmukv+fAHLcBIHM36dImfpQg3WfShZXeArkWlf6QRw24Klcsv8a5A==
+webpack-cli@^3.3.11:
+ version "3.3.11"
+ resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.11.tgz#3bf21889bf597b5d82c38f215135a411edfdc631"
+ integrity sha512-dXlfuml7xvAFwYUPsrtQAA9e4DOe58gnzSxhgrO/ZM/gyXTBowrsYeubyN4mqGhYdpXMFNyQ6emjJS9M7OBd4g==
dependencies:
chalk "2.4.2"
cross-spawn "6.0.5"
@@ -11872,10 +11995,10 @@ webpack-cli@^3.3.9:
v8-compile-cache "2.0.3"
yargs "13.2.4"
-webpack-dev-middleware@^3.7.0, webpack-dev-middleware@^3.7.1:
- version "3.7.1"
- resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.1.tgz#1167aea02afa034489869b8368fe9fed1aea7d09"
- integrity sha512-5MWu9SH1z3hY7oHOV6Kbkz5x7hXbxK56mGHNqHTe6d+ewxOwKUxoUJBs7QIaJb33lPjl9bJZ3X0vCoooUzC36A==
+webpack-dev-middleware@^3.7.0, webpack-dev-middleware@^3.7.2:
+ version "3.7.2"
+ resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3"
+ integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw==
dependencies:
memory-fs "^0.4.1"
mime "^2.4.4"
@@ -11883,10 +12006,10 @@ webpack-dev-middleware@^3.7.0, webpack-dev-middleware@^3.7.1:
range-parser "^1.2.1"
webpack-log "^2.0.0"
-webpack-dev-server@^3.8.1:
- version "3.8.1"
- resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.8.1.tgz#485b64c4aadc23f601e72114b40c1b1fea31d9f1"
- integrity sha512-9F5DnfFA9bsrhpUCAfQic/AXBVHvq+3gQS+x6Zj0yc1fVVE0erKh2MV4IV12TBewuTrYeeTIRwCH9qLMvdNvTw==
+webpack-dev-server@^3.10.3:
+ version "3.10.3"
+ resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.10.3.tgz#f35945036813e57ef582c2420ef7b470e14d3af0"
+ integrity sha512-e4nWev8YzEVNdOMcNzNeCN947sWJNd43E5XvsJzbAL08kGc2frm1tQ32hTJslRS+H65LCb/AaUCYU7fjHCpDeQ==
dependencies:
ansi-html "0.0.7"
bonjour "^3.5.0"
@@ -11897,18 +12020,18 @@ webpack-dev-server@^3.8.1:
del "^4.1.1"
express "^4.17.1"
html-entities "^1.2.1"
- http-proxy-middleware "^0.19.1"
+ http-proxy-middleware "0.19.1"
import-local "^2.0.0"
internal-ip "^4.3.0"
ip "^1.1.5"
- is-absolute-url "^3.0.2"
+ is-absolute-url "^3.0.3"
killable "^1.0.1"
- loglevel "^1.6.4"
+ loglevel "^1.6.6"
opn "^5.5.0"
p-retry "^3.0.1"
- portfinder "^1.0.24"
+ portfinder "^1.0.25"
schema-utils "^1.0.0"
- selfsigned "^1.10.6"
+ selfsigned "^1.10.7"
semver "^6.3.0"
serve-index "^1.9.1"
sockjs "0.3.19"
@@ -11917,7 +12040,7 @@ webpack-dev-server@^3.8.1:
strip-ansi "^3.0.1"
supports-color "^6.1.0"
url "^0.11.0"
- webpack-dev-middleware "^3.7.1"
+ webpack-dev-middleware "^3.7.2"
webpack-log "^2.0.0"
ws "^6.2.1"
yargs "12.0.5"
@@ -11938,15 +12061,15 @@ webpack-sources@^1.0.1, webpack-sources@^1.4.0, webpack-sources@^1.4.1:
source-list-map "^2.0.0"
source-map "~0.6.1"
-webpack-stats-plugin@^0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/webpack-stats-plugin/-/webpack-stats-plugin-0.3.0.tgz#6952f63feb9a5393a328d774fb3eccac78d2f51b"
- integrity sha512-4a6mEl9HLtMukVjEPY8QPCSmtX2EDFJNhDTX5ZE2CLch2adKAZf53nUrpG6m7NattwigS0AodNcwNxlu9kMSDQ==
+webpack-stats-plugin@^0.3.1:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/webpack-stats-plugin/-/webpack-stats-plugin-0.3.1.tgz#1103c39a305a4e6ba15d5078db84bc0b35447417"
+ integrity sha512-pxqzFE055NlNTlNyfDG3xlB2QwT1EWdm/CF5dCJI/e+rRHVxrWhWg1rf1lfsWhI1/EePv8gi/A36YxO/+u0FgQ==
-webpack@^4.41.5:
- version "4.41.5"
- resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.5.tgz#3210f1886bce5310e62bb97204d18c263341b77c"
- integrity sha512-wp0Co4vpyumnp3KlkmpM5LWuzvZYayDwM2n17EHFr4qxBBbRokC7DJawPJC7TfSFZ9HZ6GsdH40EBj4UV0nmpw==
+webpack@^4.42.0:
+ version "4.42.0"
+ resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.42.0.tgz#b901635dd6179391d90740a63c93f76f39883eb8"
+ integrity sha512-EzJRHvwQyBiYrYqhyjW9AqM90dE4+s1/XtCfn7uWg6cS72zH+2VPFAlsnW0+W0cDi0XRjNKUMoJtpSi50+Ph6w==
dependencies:
"@webassemblyjs/ast" "1.8.5"
"@webassemblyjs/helper-module-context" "1.8.5"